├── .gitignore ├── .travis.yml ├── LICENSE ├── MANIFEST.in ├── README.md ├── build ├── docs ├── Makefile ├── api.rst ├── changes.rst ├── conf.py └── index.rst ├── examples ├── chat │ ├── client │ │ └── client.py │ ├── messages │ │ ├── chat.proto │ │ ├── chat_grpc.py │ │ └── chat_pb2.py │ └── server │ │ └── main.py ├── cluster_grain_hello_world │ ├── messages │ │ ├── protos.proto │ │ ├── protos.py │ │ ├── protos_grpc.py │ │ └── protos_pb2.py │ ├── node_1 │ │ └── node_1.py │ └── node_2 │ │ └── node_2.py ├── cluster_hello_world │ ├── messages │ │ ├── protos.proto │ │ ├── protos_grpc.py │ │ └── protos_pb2.py │ ├── node_1 │ │ └── node_1.py │ └── node_2 │ │ └── node_2.py ├── context_decorators │ └── context_decorators.py ├── futures │ └── futures.py ├── hello_world │ └── hello_world.py ├── lifecycle_events │ └── lifecycle_events.py ├── mailbox_benchmark │ └── mailbox_benchmark.py ├── middleware │ └── middleware.py ├── patterns │ └── saga │ │ ├── account.py │ │ ├── account_proxy.py │ │ ├── factories │ │ └── transfer_factory.py │ │ ├── in_memory_provider.py │ │ ├── internal │ │ └── for_with_progress.py │ │ ├── messages.py │ │ ├── runner.py │ │ ├── saga.py │ │ └── transfer_process.py ├── persistence │ ├── messages │ │ ├── protos.proto │ │ ├── protos_grpc.py │ │ └── protos_pb2.py │ └── persistence │ │ └── persistence.py ├── receive_timeout │ └── receive_timeout.py ├── remote_benchmark │ ├── messages │ │ ├── protos.proto │ │ ├── protos_grpc.py │ │ └── protos_pb2.py │ ├── node_1 │ │ └── node_1.py │ └── node_2 │ │ └── node_2.py ├── router │ └── router.py ├── schedulers │ └── simple_scheduler.py ├── spawn_benchmark │ └── spawn_benchmark.py └── supervision │ ├── escalate_supervision.py │ └── supervision.py ├── protoactor ├── __init__.py ├── actor │ ├── __init__.py │ ├── actor.py │ ├── actor_context.py │ ├── behavior.py │ ├── cancel_token.py │ ├── context_decorator.py │ ├── event_stream.py │ ├── exceptions.py │ ├── log.py │ ├── message_envelope.py │ ├── message_header.py │ ├── messages.py │ ├── persistence.py │ ├── process.py │ ├── props.py │ ├── protos.proto │ ├── protos_grpc.py │ ├── protos_pb2.py │ ├── restart_statistics.py │ ├── supervision.py │ └── utils.py ├── cluster │ ├── __init__.py │ ├── grain_call_options.py │ ├── hash_algorithms │ │ └── fnv1a32.py │ ├── member_status.py │ ├── member_status_events.py │ ├── member_strategy.py │ ├── messages.py │ ├── pid_cache.py │ ├── protos.proto │ ├── protos_grpc.py │ ├── protos_pb2.py │ ├── providers │ │ ├── abstract_cluster_provider.py │ │ ├── consul │ │ │ ├── consul_client.py │ │ │ ├── consul_provider.py │ │ │ └── startconsul.bat │ │ └── single_remote_instance │ │ │ ├── protos.proto │ │ │ ├── protos_grpc.py │ │ │ ├── protos_pb2.py │ │ │ └── single_remote_instance_provider.py │ ├── rendezvous.py │ ├── round_robin.py │ └── сluster.py ├── mailbox │ ├── __init__.py │ ├── dispatcher.py │ ├── mailbox.py │ ├── mailbox_statistics.py │ └── queue.py ├── persistence │ ├── __init__.py │ ├── messages.py │ ├── persistence.py │ ├── providers │ │ ├── __init__.py │ │ ├── abstract_provider.py │ │ └── in_memory_provider.py │ └── snapshot_strategies │ │ ├── abstract_snapshot_strategy.py │ │ ├── event_type_strategy.py │ │ ├── interval_strategy.py │ │ └── time_strategy.py ├── remote │ ├── __init__.py │ ├── exceptions.py │ ├── messages.py │ ├── protos_remote.proto │ ├── protos_remote_grpc.py │ ├── protos_remote_pb2.py │ ├── remote.py │ ├── response.py │ └── serialization.py ├── router │ ├── __init__.py │ ├── broadcast_router.py │ ├── consistent_hash_group_router.py │ ├── hash.py │ ├── messages.py │ ├── random_router.py │ ├── round_robin_router.py │ ├── router.py │ ├── router_actor.py │ ├── router_config.py │ ├── router_process.py │ └── router_state.py ├── schedulers │ ├── __init__.py │ └── simple_scheduler.py ├── tracing │ ├── __init__.py │ └── opentracing │ │ ├── open_tracing_decorator.py │ │ ├── open_tracing_factory.py │ │ ├── open_tracing_helper.py │ │ └── open_tracing_middleware.py ├── utils │ ├── __init__.py │ └── async_timer.py └── сluster │ ├── __init__.py │ ├── grain_call_options.py │ ├── hash_algorithms │ └── fnv1a32.py │ ├── member_status.py │ ├── member_status_events.py │ ├── member_strategy.py │ ├── messages.py │ ├── pid_cache.py │ ├── protos.proto │ ├── protos_grpc.py │ ├── protos_pb2.py │ ├── providers │ ├── abstract_cluster_provider.py │ ├── consul │ │ ├── consul_client.py │ │ ├── consul_provider.py │ │ └── startconsul.bat │ └── single_remote_instance │ │ ├── protos.proto │ │ ├── protos_grpc.py │ │ ├── protos_pb2.py │ │ └── single_remote_instance_provider.py │ ├── rendezvous.py │ ├── round_robin.py │ └── сluster.py ├── protobuf ├── __init__.py ├── proto_grain_generator │ ├── grain_gen.py │ ├── proto.py │ └── proto_grain_generator.py └── templates │ └── template.txt ├── pylintrc ├── requirements.dev.txt ├── requirements.txt ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── actor │ ├── __init__.py │ ├── test_actor.py │ ├── test_behavior.py │ ├── test_cancel_token.py │ ├── test_deps.py │ ├── test_event_stream.py │ ├── test_local_context.py │ ├── test_message_envelope.py │ ├── test_persistance.py │ ├── test_pid.py │ ├── test_process.py │ ├── test_process_registry.py │ ├── test_props.py │ ├── test_restart_statistics.py │ ├── test_supervision.py │ └── test_utils.py ├── cluster │ ├── __init__.py │ └── hash_algorithms │ │ └── test_fnv1a32.py ├── mailbox │ ├── __init__.py │ ├── test_mailbox.py │ └── test_mailbox_queue.py ├── persistence │ ├── __init__.py │ ├── snapshot_strategies │ │ ├── test_event_type_strategy.py │ │ ├── test_interval_strategy.py │ │ └── test_time_strategy.py │ ├── test_example_persistent_actor.py │ └── test_persistence_with_snapshot_strategies.py ├── protobuf │ └── proto_grain_generator │ │ ├── messages │ │ ├── protos.proto │ │ ├── protos.py │ │ ├── protos_grpc.py │ │ └── protos_pb2.py │ │ └── test_proto_grain_generator.py ├── remote │ ├── __init__.py │ ├── messages │ │ ├── __init__.py │ │ ├── protos.proto │ │ └── protos_pb2.py │ ├── node │ │ ├── __init__.py │ │ └── node.py │ ├── remote_manager.py │ ├── test_remote.py │ └── test_serialization.py ├── router │ ├── __init__.py │ ├── test_broadcast_group.py │ ├── test_consistent_hash_group.py │ ├── test_random_group.py │ └── test_round_robin_group.py ├── test_fixtures │ ├── mock_mailbox.py │ └── test_mailbox.py └── test_utils │ └── async_mock.py └── tox.ini /.gitignore: -------------------------------------------------------------------------------- 1 | #IDEs and Text Editros 2 | .idea/ 3 | 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | env/ 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *,cover 49 | .hypothesis/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # IPython Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # dotenv 82 | .env 83 | 84 | # virtualenv 85 | venv/ 86 | ENV/ 87 | *.venv 88 | *.env 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | 93 | # Rope project settings 94 | .ropeproject 95 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | 3 | language: python 4 | python: 5 | - 3.8 6 | 7 | env: 8 | - TOX_ENV=py3 9 | - TOX_ENV=docs 10 | 11 | install: 12 | - "pip3 install tox" 13 | 14 | script: 15 | - "tox -e $TOX_ENV" 16 | 17 | notifications: 18 | email: false 19 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include README.md 3 | recursive-include tests *.py 4 | recursive-include protoactor *.py 5 | include .coveragerc 6 | include .travis.yml 7 | include pylintrc 8 | include tox.ini 9 | 10 | prune docs/_build 11 | prune tests/__pycache__ 12 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # protoactor-python 2 | Proto Actor - Ultra fast distributed actors 3 | 4 | [![Join the chat at https://gitter.im/AsynkronIT/protoactor](https://badges.gitter.im/AsynkronIT/protoactor.svg)](https://gitter.im/AsynkronIT/protoactor?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) 5 | 6 | [![Build Status](https://travis-ci.org/AsynkronIT/protoactor-python.svg?branch=master)](https://travis-ci.org/AsynkronIT/protoactor-python) 7 | 8 | # Getting Started 9 | 10 | Ensure you have Python 3.6 with pip installed. 11 | 12 | ``` 13 | $ pip3 install virtualenv 14 | $ virtualenv dev.venv 15 | $ source dev.venv/bin/activate # on POSIX systems 16 | $ .\dev.venv\Scripts\activate.ps1 # on Windows 17 | $ pip3 install -r requirements.dev.txt 18 | ``` 19 | ## Run mypy 20 | 21 | ``` 22 | mypy --python-version 3.6 --fast-parser -p protoactor 23 | ``` 24 | 25 | ## Run tests 26 | 27 | ``` 28 | $ tox 29 | ``` 30 | -------------------------------------------------------------------------------- /build: -------------------------------------------------------------------------------- 1 | python3 -m grpc_tools.protoc -I. --python_out=. --python_grpc_out=. protoactor/actor/protos.proto 2 | python3 -m grpc_tools.protoc -I. --python_out=. --python_grpc_out=. protoactor/remote/protos_remote.proto 3 | python3 -m grpc_tools.protoc -I. --python_out=. --python_grpc_out=. protoactor/сluster/protos.proto 4 | python3 -m grpc_tools.protoc -I. --python_out=. --python_grpc_out=. protoactor/сluster/providers/single_remote_instance/protos.proto 5 | python3 -m grpc_tools.protoc -I. --python_out=. --python_grpc_out=. examples/cluster_hello_world/messages/protos.proto 6 | python3 -m grpc_tools.protoc -I. --python_out=. --python_grpc_out=. examples/cluster_grain_hello_world/messages/protos.proto 7 | python3 -m grpc_tools.protoc -I. --python_out=. --python_grpc_out=. tests/protobuf/proto_grain_generator/messages/protos.proto 8 | python3 -m grpc_tools.protoc -I. --python_out=. --python_grpc_out=. examples/chat/messages/chat.proto 9 | python3 -m grpc_tools.protoc -I. --python_out=. --python_grpc_out=. examples/persistence/messages/protos.proto 10 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | 15 | .PHONY: help clean html dirhtml epub latexpdf man changes linkcheck doctest 16 | 17 | help: 18 | @echo "Please use \`make ' where is one of" 19 | @echo " html to make standalone HTML files" 20 | @echo " dirhtml to make HTML files named index.html in directories" 21 | @echo " epub to make an epub" 22 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 23 | @echo " linkcheck to check for errors" 24 | @echo " man to make manual pages" 25 | @echo " changes to make an overview of all changed/added/deprecated items" 26 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 27 | 28 | clean: 29 | -rm -rf $(BUILDDIR)/* 30 | 31 | html: 32 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 33 | @echo 34 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 35 | 36 | dirhtml: 37 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 38 | @echo 39 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 40 | 41 | epub: 42 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 43 | @echo 44 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 45 | 46 | latexpdf: 47 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 48 | @echo "Running LaTeX files through pdflatex..." 49 | make -C $(BUILDDIR)/latex all-pdf 50 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 51 | 52 | man: 53 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 54 | @echo 55 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 56 | 57 | changes: 58 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 59 | @echo 60 | @echo "The overview file is in $(BUILDDIR)/changes." 61 | 62 | linkcheck: 63 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 64 | @echo 65 | @echo "Link check complete; look for any errors in the above output " \ 66 | "or in $(BUILDDIR)/linkcheck/output.txt." 67 | 68 | doctest: 69 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 70 | @echo "Testing of doctests in the sources finished, look at the " \ 71 | "results in $(BUILDDIR)/doctest/output.txt." 72 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | ============== 2 | Protoactor API 3 | ============== 4 | 5 | .. module: protoactor 6 | .. attribute:: __version__ -------------------------------------------------------------------------------- /docs/changes.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Changes 3 | ======= -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """Protoactor documentation build configuration file""" 4 | 5 | from __future__ import unicode_literals 6 | 7 | import os 8 | import re 9 | import sys 10 | 11 | 12 | # -- Workarounds to have autodoc generate API docs ---------------------------- 13 | 14 | sys.path.insert(0, os.path.abspath('..')) 15 | 16 | 17 | # -- General configuration ---------------------------------------------------- 18 | 19 | needs_sphinx = '1.0' 20 | 21 | extensions = [ 22 | 'sphinx.ext.autodoc', 23 | 'sphinx.ext.extlinks', 24 | 'sphinx.ext.intersphinx', 25 | 'sphinx.ext.viewcode', 26 | ] 27 | 28 | templates_path = ['_templates'] 29 | source_suffix = '.rst' 30 | master_doc = 'index' 31 | 32 | project = u'Protoactor' 33 | 34 | 35 | def get_version(): 36 | init_py = open('../protoactor/__init__.py').read() 37 | # TODO: make this work with both single and double quotes 38 | metadata = dict(re.findall("__([a-z]+)__ = \"([^\"]+)\"", init_py)) 39 | return metadata['version'] 40 | 41 | 42 | release = get_version() 43 | version = '.'.join(release.split('.')[:2]) 44 | 45 | exclude_patterns = ['_build'] 46 | 47 | pygments_style = 'sphinx' 48 | 49 | modindex_common_prefix = ['protoactor.'] 50 | 51 | 52 | # -- Options for HTML output -------------------------------------------------- 53 | 54 | html_theme = 'default' 55 | html_static_path = ['_static'] 56 | 57 | html_use_modindex = True 58 | html_use_index = True 59 | html_split_index = False 60 | html_show_sourcelink = True 61 | 62 | htmlhelp_basename = 'Protoactor' 63 | 64 | 65 | # -- Options for LaTeX output ------------------------------------------------- 66 | 67 | latex_documents = [ 68 | ( 69 | 'index', 70 | 'protoactor.tex', 71 | 'Protoactor Documentation', 72 | 'manual', 73 | ), 74 | ] 75 | 76 | 77 | # -- Options for manual page output ------------------------------------------- 78 | 79 | man_pages = [] 80 | 81 | 82 | # -- Options for autodoc extension -------------------------------------------- 83 | 84 | autodoc_member_order = 'bysource' 85 | 86 | 87 | # -- Options for intersphinx extension ---------------------------------------- 88 | 89 | intersphinx_mapping = { 90 | 'python': ('http://docs.python.org/2', None), 91 | } 92 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Table of contents 2 | ================= 3 | 4 | .. toctree:: 5 | 6 | api 7 | changes -------------------------------------------------------------------------------- /examples/chat/client/client.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | 4 | import opentracing 5 | from jaeger_client import Tracer, Config, Span 6 | 7 | from examples.chat.messages.chat_pb2 import Connected, SayResponse, NickResponse, Connect, NickRequest, SayRequest, \ 8 | DESCRIPTOR 9 | from protoactor.actor import PID 10 | from protoactor.actor.actor_context import AbstractContext, GlobalRootContext, RootContext 11 | from protoactor.actor.message_header import MessageHeader 12 | from protoactor.actor.props import Props 13 | from protoactor.remote.remote import Remote 14 | from protoactor.remote.serialization import Serialization 15 | from protoactor.tracing.opentracing import open_tracing_middleware 16 | from protoactor.tracing.opentracing.open_tracing_factory import OpenTracingFactory 17 | 18 | 19 | async def process_message(ctx: AbstractContext): 20 | msg = ctx.message 21 | if isinstance(msg, Connected): 22 | print(msg.message) 23 | elif isinstance(msg, SayResponse): 24 | print(f'{msg.user_name} {msg.message}') 25 | elif isinstance(msg, NickResponse): 26 | print(f'{msg.old_user_name} {msg.new_user_name}') 27 | 28 | 29 | async def start(argv): 30 | tracer = init_jaeger_tracer() 31 | opentracing.set_global_tracer(tracer) 32 | 33 | middleware = open_tracing_middleware.open_tracing_sender_middleware(tracer) 34 | 35 | Serialization().register_file_descriptor(DESCRIPTOR) 36 | Remote().start("127.0.0.1", 12001) 37 | server = PID(address='127.0.0.1:8000', id='chatserver') 38 | context = RootContext(MessageHeader(), [middleware]) 39 | 40 | props = OpenTracingFactory.get_props_with_open_tracing(Props.from_func(process_message), span_setup, span_setup, 41 | tracer) 42 | 43 | client = context.spawn(props) 44 | await context.send(server, Connect(sender=client)) 45 | 46 | nick = 'Alex' 47 | while True: 48 | text = input() 49 | if text == '/exit': 50 | return 51 | elif text.startswith('/nick '): 52 | new_nick = text.split(' ')[1] 53 | await context.send(server, NickRequest(old_user_name=nick, new_user_name=new_nick)) 54 | nick = new_nick 55 | else: 56 | await context.send(server, SayRequest(user_name=nick, message=text)) 57 | 58 | 59 | def span_setup(span: Span, message: any): 60 | if message is not None: 61 | span.log_kv({'message': str(message)}) 62 | 63 | 64 | def init_jaeger_tracer(service_name='proto.chat.client'): 65 | config = Config(config={'sampler': { 66 | 'type': 'const', 67 | 'param': 1, 68 | }, 69 | 'logging': True, }, service_name=service_name, validate=True) 70 | return config.initialize_tracer() 71 | 72 | 73 | def main(argv): 74 | loop = asyncio.get_event_loop() 75 | loop.run_until_complete(start(argv)) 76 | loop.close() 77 | 78 | 79 | if __name__ == "__main__": 80 | main(sys.argv[1:]) 81 | -------------------------------------------------------------------------------- /examples/chat/messages/chat.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package messages; 3 | import "protoactor/actor/protos.proto"; 4 | 5 | message Connect 6 | { 7 | actor.PID sender = 1; 8 | } 9 | 10 | message Connected { 11 | string message = 1; 12 | } 13 | 14 | message SayRequest { 15 | string user_name = 1; 16 | string message = 2; 17 | } 18 | 19 | message SayResponse { 20 | string user_name = 1; 21 | string message = 2; 22 | } 23 | 24 | message NickRequest { 25 | string old_user_name = 1; 26 | string new_user_name = 2; 27 | } 28 | 29 | message NickResponse { 30 | string old_user_name = 1; 31 | string new_user_name = 2; 32 | } -------------------------------------------------------------------------------- /examples/chat/messages/chat_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT! 2 | # source: examples/chat/messages/chat.proto 3 | # plugin: grpclib.plugin.main 4 | -------------------------------------------------------------------------------- /examples/chat/server/main.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | 4 | import opentracing 5 | from jaeger_client import Span, Config 6 | 7 | from examples.chat.messages.chat_pb2 import DESCRIPTOR, Connect, SayRequest, NickRequest, Connected, SayResponse, \ 8 | NickResponse 9 | from protoactor.actor.actor_context import AbstractContext, GlobalRootContext, RootContext 10 | from protoactor.actor.props import Props 11 | from protoactor.remote.remote import Remote 12 | from protoactor.remote.serialization import Serialization 13 | from protoactor.tracing.opentracing.open_tracing_factory import OpenTracingFactory 14 | 15 | from protoactor.actor.actor_context import RootContext 16 | 17 | 18 | async def main(): 19 | tracer = init_jaeger_tracer() 20 | opentracing.set_global_tracer(tracer) 21 | 22 | context = RootContext() 23 | 24 | Serialization().register_file_descriptor(DESCRIPTOR) 25 | Remote().start("127.0.0.1", 8000) 26 | clients = [] 27 | 28 | async def process_message(ctx: AbstractContext): 29 | msg = ctx.message 30 | if isinstance(msg, Connect): 31 | print(f'Client {msg.sender} connected') 32 | clients.append(msg.sender) 33 | await ctx.send(msg.sender, Connected(message='Welcome!')) 34 | elif isinstance(msg, SayRequest): 35 | for client in clients: 36 | await ctx.send(client, SayResponse(user_name=msg.user_name, message=msg.message)) 37 | elif isinstance(msg, NickRequest): 38 | for client in clients: 39 | await ctx.send(client, NickResponse(old_user_name=msg.old_user_name, new_user_name=msg.new_user_name)) 40 | 41 | props = OpenTracingFactory.get_props_with_open_tracing(Props.from_func(process_message), span_setup, span_setup) 42 | context.spawn_named(props, 'chatserver') 43 | 44 | input() 45 | 46 | 47 | def init_jaeger_tracer(service_name='proto.chat.server'): 48 | config = Config(config={'sampler': { 49 | 'type': 'const', 50 | 'param': 1, 51 | }, 52 | 'logging': True, }, service_name=service_name, validate=True) 53 | return config.initialize_tracer() 54 | 55 | 56 | def span_setup(span: Span, message: any): 57 | if message is not None: 58 | span.log_kv({'message': str(message)}) 59 | 60 | 61 | if __name__ == "__main__": 62 | loop = asyncio.get_event_loop() 63 | loop.run_until_complete(main()) 64 | -------------------------------------------------------------------------------- /examples/cluster_grain_hello_world/messages/protos.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package messages; 3 | 4 | message HelloRequest {} 5 | message HelloResponse { 6 | string message=1; 7 | } 8 | 9 | service HelloGrain { 10 | rpc SayHello(HelloRequest) returns (HelloResponse) {} 11 | } -------------------------------------------------------------------------------- /examples/cluster_grain_hello_world/messages/protos_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT! 2 | # source: examples/cluster_grain_hello_world/messages/protos.proto 3 | # plugin: grpclib.plugin.main 4 | import abc 5 | import typing 6 | 7 | import grpclib.const 8 | import grpclib.client 9 | if typing.TYPE_CHECKING: 10 | import grpclib.server 11 | 12 | import examples.cluster_grain_hello_world.messages.protos_pb2 13 | 14 | 15 | class HelloGrainBase(abc.ABC): 16 | 17 | @abc.abstractmethod 18 | async def SayHello(self, stream: 'grpclib.server.Stream[examples.cluster_grain_hello_world.messages.protos_pb2.HelloRequest, examples.cluster_grain_hello_world.messages.protos_pb2.HelloResponse]') -> None: 19 | pass 20 | 21 | def __mapping__(self) -> typing.Dict[str, grpclib.const.Handler]: 22 | return { 23 | '/messages.HelloGrain/SayHello': grpclib.const.Handler( 24 | self.SayHello, 25 | grpclib.const.Cardinality.UNARY_UNARY, 26 | examples.cluster_grain_hello_world.messages.protos_pb2.HelloRequest, 27 | examples.cluster_grain_hello_world.messages.protos_pb2.HelloResponse, 28 | ), 29 | } 30 | 31 | 32 | class HelloGrainStub: 33 | 34 | def __init__(self, channel: grpclib.client.Channel) -> None: 35 | self.SayHello = grpclib.client.UnaryUnaryMethod( 36 | channel, 37 | '/messages.HelloGrain/SayHello', 38 | examples.cluster_grain_hello_world.messages.protos_pb2.HelloRequest, 39 | examples.cluster_grain_hello_world.messages.protos_pb2.HelloResponse, 40 | ) 41 | -------------------------------------------------------------------------------- /examples/cluster_grain_hello_world/node_1/node_1.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | 4 | from examples.cluster_grain_hello_world.messages.protos import Grains 5 | from examples.cluster_grain_hello_world.messages.protos_pb2 import DESCRIPTOR, HelloRequest 6 | from protoactor.remote.serialization import Serialization 7 | from protoactor.cluster.providers.consul.consul_client import ConsulClientConfiguration 8 | from protoactor.cluster.providers.consul.consul_provider import ConsulProvider 9 | from protoactor.cluster.сluster import Cluster 10 | 11 | 12 | async def start(argv): 13 | Serialization().register_file_descriptor(DESCRIPTOR) 14 | await Cluster.start('MyCluster', '127.0.0.1', 12001, 15 | ConsulProvider(ConsulClientConfiguration(f'http://192.168.1.35:8500/'))) 16 | 17 | client = Grains.hello_grain("Roger") 18 | res = await client.say_hello(HelloRequest()) 19 | print(res.message) 20 | input() 21 | 22 | res = await client.say_hello(HelloRequest()) 23 | print(res.message) 24 | input() 25 | 26 | print('Shutting Down...') 27 | await Cluster.shutdown() 28 | 29 | def main(argv): 30 | loop = asyncio.get_event_loop() 31 | loop.run_until_complete(start(argv)) 32 | loop.close() 33 | 34 | if __name__ == "__main__": 35 | main(sys.argv[1:]) -------------------------------------------------------------------------------- /examples/cluster_grain_hello_world/node_2/node_2.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | 4 | from examples.cluster_grain_hello_world.messages.protos import AbstractHelloGrain, Grains 5 | from examples.cluster_grain_hello_world.messages.protos_pb2 import DESCRIPTOR, HelloRequest, HelloResponse 6 | from protoactor.remote.serialization import Serialization 7 | from protoactor.cluster.providers.consul.consul_client import ConsulClientConfiguration 8 | from protoactor.cluster.providers.consul.consul_provider import ConsulProvider 9 | from protoactor.cluster.сluster import Cluster 10 | 11 | 12 | class HelloGrain(AbstractHelloGrain): 13 | async def say_hello(self, request: HelloRequest) -> HelloResponse: 14 | return HelloResponse(message='Hello from typed grain') 15 | 16 | 17 | async def start(argv): 18 | Serialization().register_file_descriptor(DESCRIPTOR) 19 | 20 | Grains.hello_grain_factory(HelloGrain()) 21 | 22 | await Cluster.start('MyCluster', '192.168.1.129', 12000, 23 | ConsulProvider(ConsulClientConfiguration(f'http://192.168.1.35:8500/'))) 24 | 25 | 26 | await asyncio.sleep(10000) 27 | input() 28 | print('Shutting Down...') 29 | await Cluster.shutdown() 30 | 31 | 32 | def main(argv): 33 | loop = asyncio.get_event_loop() 34 | loop.run_until_complete(start(argv)) 35 | loop.close() 36 | 37 | 38 | if __name__ == "__main__": 39 | main(sys.argv[1:]) 40 | -------------------------------------------------------------------------------- /examples/cluster_hello_world/messages/protos.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package messages; 3 | 4 | message HelloRequest {} 5 | message HelloResponse { 6 | string message=1; 7 | } -------------------------------------------------------------------------------- /examples/cluster_hello_world/messages/protos_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT! 2 | # source: examples/cluster_hello_world/messages/protos.proto 3 | # plugin: grpclib.plugin.main 4 | -------------------------------------------------------------------------------- /examples/cluster_hello_world/messages/protos_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: examples/cluster_hello_world/messages/protos.proto 4 | 5 | import sys 6 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 7 | from google.protobuf import descriptor as _descriptor 8 | from google.protobuf import message as _message 9 | from google.protobuf import reflection as _reflection 10 | from google.protobuf import symbol_database as _symbol_database 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | 17 | 18 | DESCRIPTOR = _descriptor.FileDescriptor( 19 | name='examples/cluster_hello_world/messages/protos.proto', 20 | package='messages', 21 | syntax='proto3', 22 | serialized_options=None, 23 | serialized_pb=_b('\n2examples/cluster_hello_world/messages/protos.proto\x12\x08messages\"\x0e\n\x0cHelloRequest\" \n\rHelloResponse\x12\x0f\n\x07message\x18\x01 \x01(\tb\x06proto3') 24 | ) 25 | 26 | 27 | 28 | 29 | _HELLOREQUEST = _descriptor.Descriptor( 30 | name='HelloRequest', 31 | full_name='messages.HelloRequest', 32 | filename=None, 33 | file=DESCRIPTOR, 34 | containing_type=None, 35 | fields=[ 36 | ], 37 | extensions=[ 38 | ], 39 | nested_types=[], 40 | enum_types=[ 41 | ], 42 | serialized_options=None, 43 | is_extendable=False, 44 | syntax='proto3', 45 | extension_ranges=[], 46 | oneofs=[ 47 | ], 48 | serialized_start=64, 49 | serialized_end=78, 50 | ) 51 | 52 | 53 | _HELLORESPONSE = _descriptor.Descriptor( 54 | name='HelloResponse', 55 | full_name='messages.HelloResponse', 56 | filename=None, 57 | file=DESCRIPTOR, 58 | containing_type=None, 59 | fields=[ 60 | _descriptor.FieldDescriptor( 61 | name='message', full_name='messages.HelloResponse.message', index=0, 62 | number=1, type=9, cpp_type=9, label=1, 63 | has_default_value=False, default_value=_b("").decode('utf-8'), 64 | message_type=None, enum_type=None, containing_type=None, 65 | is_extension=False, extension_scope=None, 66 | serialized_options=None, file=DESCRIPTOR), 67 | ], 68 | extensions=[ 69 | ], 70 | nested_types=[], 71 | enum_types=[ 72 | ], 73 | serialized_options=None, 74 | is_extendable=False, 75 | syntax='proto3', 76 | extension_ranges=[], 77 | oneofs=[ 78 | ], 79 | serialized_start=80, 80 | serialized_end=112, 81 | ) 82 | 83 | DESCRIPTOR.message_types_by_name['HelloRequest'] = _HELLOREQUEST 84 | DESCRIPTOR.message_types_by_name['HelloResponse'] = _HELLORESPONSE 85 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 86 | 87 | HelloRequest = _reflection.GeneratedProtocolMessageType('HelloRequest', (_message.Message,), { 88 | 'DESCRIPTOR' : _HELLOREQUEST, 89 | '__module__' : 'examples.cluster_hello_world.messages.protos_pb2' 90 | # @@protoc_insertion_point(class_scope:messages.HelloRequest) 91 | }) 92 | _sym_db.RegisterMessage(HelloRequest) 93 | 94 | HelloResponse = _reflection.GeneratedProtocolMessageType('HelloResponse', (_message.Message,), { 95 | 'DESCRIPTOR' : _HELLORESPONSE, 96 | '__module__' : 'examples.cluster_hello_world.messages.protos_pb2' 97 | # @@protoc_insertion_point(class_scope:messages.HelloResponse) 98 | }) 99 | _sym_db.RegisterMessage(HelloResponse) 100 | 101 | 102 | # @@protoc_insertion_point(module_scope) 103 | -------------------------------------------------------------------------------- /examples/cluster_hello_world/node_1/node_1.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import getopt 3 | import sys 4 | from collections import namedtuple 5 | from datetime import timedelta 6 | 7 | from examples.cluster_hello_world.messages.protos_pb2 import DESCRIPTOR, HelloRequest 8 | from protoactor.actor.actor_context import RootContext 9 | from protoactor.remote.response import ResponseStatusCode 10 | from protoactor.remote.serialization import Serialization 11 | from protoactor.cluster.providers.consul.consul_client import ConsulClientConfiguration 12 | from protoactor.cluster.providers.consul.consul_provider import ConsulProvider 13 | from protoactor.cluster.providers.single_remote_instance.single_remote_instance_provider import \ 14 | SingleRemoteInstanceProvider 15 | from protoactor.cluster.сluster import Cluster 16 | 17 | Node1Config = namedtuple('Node1Config', 'server_name consul_url start_consul') 18 | 19 | 20 | async def start(argv): 21 | context = RootContext() 22 | Serialization().register_file_descriptor(DESCRIPTOR) 23 | parsed_args = parse_args(argv) 24 | 25 | # await Cluster.start('MyCluster', parsed_args.server_name, 12002, SingleRemoteInstanceProvider('192.168.1.72', 12000)) 26 | 27 | await Cluster.start('MyCluster', parsed_args.server_name, 12001, 28 | ConsulProvider(ConsulClientConfiguration(f'http://{parsed_args.consul_url}:8500/'))) 29 | 30 | pid, sc = await Cluster.get_async("TheName", "HelloKind") 31 | while sc != ResponseStatusCode.OK: 32 | await asyncio.sleep(0.5) 33 | pid, sc = await Cluster.get_async("TheName", "HelloKind") 34 | 35 | res = await context.request_future(pid, HelloRequest()) 36 | print(res.message) 37 | await asyncio.sleep(timedelta(days=180).total_seconds()) 38 | print('Shutting Down...') 39 | await Cluster.shutdown() 40 | 41 | def parse_args(argv): 42 | opts, args = getopt.getopt(argv, ["server name=", "consul url=", "start consul="]) 43 | if len(opts) > 0: 44 | Node1Config(opts[0][0], opts[1][0], True) 45 | return Node1Config('192.168.1.35', '127.0.0.1', True) 46 | 47 | 48 | def main(argv): 49 | loop = asyncio.get_event_loop() 50 | loop.run_until_complete(start(argv)) 51 | loop.close() 52 | 53 | 54 | if __name__ == "__main__": 55 | main(sys.argv[1:]) 56 | -------------------------------------------------------------------------------- /examples/cluster_hello_world/node_2/node_2.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import getopt 3 | import sys 4 | from collections import namedtuple 5 | from datetime import timedelta 6 | 7 | from examples.cluster_hello_world.messages.protos_pb2 import DESCRIPTOR, HelloRequest, HelloResponse 8 | from protoactor.actor.actor_context import RootContext, AbstractContext 9 | from protoactor.actor.props import Props 10 | from protoactor.remote.remote import Remote 11 | from protoactor.remote.serialization import Serialization 12 | from protoactor.cluster.providers.consul.consul_client import ConsulClientConfiguration 13 | from protoactor.cluster.providers.consul.consul_provider import ConsulProvider 14 | from protoactor.cluster.providers.single_remote_instance.single_remote_instance_provider import \ 15 | SingleRemoteInstanceProvider 16 | from protoactor.cluster.сluster import Cluster 17 | 18 | Node2Config = namedtuple('Node1Config', 'server_name consul_url') 19 | 20 | 21 | async def start(argv): 22 | Serialization().register_file_descriptor(DESCRIPTOR) 23 | 24 | async def fn(ctx: AbstractContext): 25 | if isinstance(ctx.message, HelloRequest): 26 | await ctx.respond(HelloResponse(message='Hello from node 2')) 27 | 28 | props = Props.from_func(fn) 29 | parsed_args = parse_args(argv) 30 | Remote().register_known_kind("HelloKind", props) 31 | 32 | # await Cluster.start('MyCluster', parsed_args.server_name, 12000, 33 | # SingleRemoteInstanceProvider(parsed_args.server_name, 12000)) 34 | 35 | await Cluster.start('MyCluster', parsed_args.server_name, 12000, 36 | ConsulProvider(ConsulClientConfiguration(f'http://{parsed_args.consul_url}:8500/'))) 37 | 38 | await asyncio.sleep(timedelta(days=180).total_seconds()) 39 | print('Shutting Down...') 40 | await Cluster.shutdown() 41 | 42 | def parse_args(argv): 43 | opts, args = getopt.getopt(argv, ["server name=", "consul url=", "start consul="]) 44 | if len(opts) > 0: 45 | Node2Config(opts[0][0], opts[1][0]) 46 | return Node2Config('192.168.1.129', '192.168.1.35') 47 | 48 | def main(argv): 49 | loop = asyncio.get_event_loop() 50 | loop.run_until_complete(start(argv)) 51 | loop.close() 52 | 53 | 54 | if __name__ == "__main__": 55 | main(sys.argv[1:]) 56 | -------------------------------------------------------------------------------- /examples/context_decorators/context_decorators.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from datetime import timedelta 3 | 4 | from protoactor.actor import PID 5 | from protoactor.actor.actor_context import AbstractRootContext, AbstractContext, RootContext 6 | from protoactor.actor.cancel_token import CancelToken 7 | from protoactor.actor.context_decorator import RootContextDecorator, ActorContextDecorator 8 | from protoactor.actor.props import Props 9 | 10 | 11 | class LoggingRootDecorator(RootContextDecorator): 12 | def __init__(self, context: AbstractRootContext): 13 | super().__init__(context) 14 | 15 | async def request_future(self, target: PID, message: object, timeout: timedelta = None, 16 | cancellation_token: CancelToken = None) -> asyncio.Future: 17 | print('Enter request future') 18 | res = await super().request_future(target, message) 19 | print('Exit request future') 20 | return res 21 | 22 | 23 | class LoggingDecorator(ActorContextDecorator): 24 | def __init__(self, context: AbstractContext, logger_name: str): 25 | super().__init__(context) 26 | self._logger_name = logger_name 27 | 28 | async def respond(self, message: object): 29 | print(f'{self._logger_name} : Enter respond') 30 | await super().respond(message) 31 | print(f'{self._logger_name} : Exit respond') 32 | 33 | 34 | async def main(): 35 | context = LoggingRootDecorator(RootContext()) 36 | 37 | async def fn(context: AbstractContext): 38 | message = context.message 39 | if isinstance(message, str): 40 | print(f'Inside Actor: {message}') 41 | await context.respond("Yo!") 42 | 43 | props = Props.from_func(fn).with_context_decorator([lambda c: LoggingDecorator(c, 'logger1'), 44 | lambda c: LoggingDecorator(c, 'logger2')]) 45 | pid = context.spawn(props) 46 | 47 | res = await context.request_future(pid, 'Hello') 48 | print(f'Got result {res}') 49 | input() 50 | 51 | 52 | if __name__ == "__main__": 53 | asyncio.run(main()) 54 | -------------------------------------------------------------------------------- /examples/futures/futures.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from protoactor.actor.props import Props 4 | from protoactor.actor.actor_context import RootContext 5 | 6 | 7 | class HelloMessage: 8 | def __init__(self, text: str): 9 | self.text = text 10 | 11 | 12 | async def hello_function(context): 13 | message = context.message 14 | if isinstance(message, HelloMessage): 15 | await context.respond("hey") 16 | 17 | 18 | async def main(): 19 | context = RootContext() 20 | props = Props.from_func(hello_function) 21 | pid = context.spawn(props) 22 | 23 | reply = await context.request_future(pid, HelloMessage('Hello')) 24 | print(reply) 25 | 26 | 27 | if __name__ == "__main__": 28 | loop = asyncio.get_event_loop() 29 | loop.run_until_complete(main()) 30 | -------------------------------------------------------------------------------- /examples/hello_world/hello_world.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from protoactor.actor.props import Props 4 | from protoactor.actor.actor_context import Actor, AbstractContext, RootContext 5 | 6 | 7 | class HelloMessage: 8 | def __init__(self, text: str): 9 | self.text = text 10 | 11 | 12 | class HelloActor(Actor): 13 | async def receive(self, context: AbstractContext) -> None: 14 | message = context.message 15 | if isinstance(message, HelloMessage): 16 | print(message.text) 17 | 18 | 19 | async def main(): 20 | context = RootContext() 21 | props = Props.from_producer(HelloActor) 22 | pid = context.spawn(props) 23 | 24 | await context.send(pid, HelloMessage('Hello World!')) 25 | input() 26 | 27 | 28 | if __name__ == "__main__": 29 | loop = asyncio.get_event_loop() 30 | loop.run_until_complete(main()) 31 | -------------------------------------------------------------------------------- /examples/lifecycle_events/lifecycle_events.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import opentracing 4 | from jaeger_client import Config 5 | 6 | from protoactor.actor.actor import Actor 7 | from protoactor.actor.actor_context import RootContext, AbstractContext, GlobalRootContext 8 | from protoactor.actor.event_stream import GlobalEventStream 9 | from protoactor.actor.messages import DeadLetterEvent, Started, Stopping, Stopped, Restarting 10 | from protoactor.actor.props import Props 11 | from protoactor.tracing.opentracing.open_tracing_factory import OpenTracingFactory 12 | from protoactor.tracing.opentracing.open_tracing_middleware import open_tracing_sender_middleware 13 | 14 | 15 | class Hello: 16 | def __init__(self, who: str): 17 | self.who = who 18 | 19 | 20 | class ChildActor(Actor): 21 | async def receive(self, context: AbstractContext) -> None: 22 | message = context.message 23 | if isinstance(message, Hello): 24 | print(f"Hello {message.who}") 25 | elif isinstance(message, Started): 26 | print(f"Started, initialize actor here") 27 | elif isinstance(message, Stopping): 28 | print(f"Stopping, actor is about shut down") 29 | elif isinstance(message, Stopped): 30 | print(f"Stopped, actor and it's children are stopped") 31 | elif isinstance(message, Restarting): 32 | print(f"Restarting, actor is about restart") 33 | 34 | 35 | async def main(): 36 | tracer = init_jaeger_tracer() 37 | opentracing.set_global_tracer(tracer) 38 | GlobalEventStream.subscribe(process_dead_letter_event, DeadLetterEvent) 39 | 40 | context = RootContext(middleware=[open_tracing_sender_middleware()]) 41 | 42 | props = Props.from_producer(lambda: ChildActor()) 43 | props = OpenTracingFactory.get_props_with_open_tracing(props) 44 | 45 | actor = context.spawn(props) 46 | await context.send(actor, Hello(who="Alex")) 47 | 48 | await asyncio.sleep(1) 49 | await GlobalRootContext.stop_future(actor) 50 | 51 | input() 52 | 53 | async def process_dead_letter_event(msg: DeadLetterEvent) -> None: 54 | if msg.message is not None: 55 | print(f"DeadLetter from {msg.sender} to {msg.pid} : {type(msg.message).__name__} = '{str(msg.message)}'") 56 | else: 57 | print(f"DeadLetter from {msg.sender} to {msg.pid}") 58 | 59 | 60 | def init_jaeger_tracer(service_name='proto.example.lifecycle_events'): 61 | config = Config(config={'sampler': { 62 | 'type': 'const', 63 | 'param': 1, 64 | }, 65 | 'logging': True, }, service_name=service_name, validate=True) 66 | return config.initialize_tracer() 67 | 68 | 69 | if __name__ == "__main__": 70 | loop = asyncio.get_event_loop() 71 | loop.run_until_complete(main()) 72 | -------------------------------------------------------------------------------- /examples/mailbox_benchmark/mailbox_benchmark.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import cProfile 3 | from typing import Callable 4 | 5 | from protoactor.actor.actor_context import AbstractContext, GlobalRootContext 6 | from protoactor.actor.props import Props 7 | from protoactor.mailbox.mailbox import AbstractMailbox, DefaultMailbox 8 | from protoactor.mailbox.queue import UnboundedMailboxQueue 9 | 10 | 11 | async def process_message(ctx: AbstractContext): 12 | if isinstance(ctx.message, str): 13 | await ctx.respond('done') 14 | 15 | 16 | async def run_test(mailbox: Callable[..., AbstractMailbox]): 17 | props = Props.from_func(process_message) \ 18 | .with_mailbox(mailbox) 19 | 20 | pid = GlobalRootContext.spawn(props) 21 | for i in range(10000): 22 | await GlobalRootContext.send(pid, i) 23 | await GlobalRootContext.request_future(pid, 'stop') 24 | 25 | 26 | async def main(): 27 | pr = cProfile.Profile() 28 | 29 | pr.enable() 30 | await run_test(lambda: DefaultMailbox(UnboundedMailboxQueue(), UnboundedMailboxQueue(), [])) 31 | pr.disable() 32 | 33 | pr.print_stats(sort='time') 34 | 35 | 36 | if __name__ == "__main__": 37 | loop = asyncio.get_event_loop() 38 | loop.run_until_complete(main()) 39 | -------------------------------------------------------------------------------- /examples/patterns/saga/account_proxy.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | from typing import Callable, Any 3 | 4 | from examples.patterns.saga.messages import OK, Refused, InsufficientFunds, InternalServerError, ServiceUnavailable 5 | from protoactor.actor import PID 6 | from protoactor.actor.actor import Actor 7 | from protoactor.actor.actor_context import AbstractContext 8 | from protoactor.actor.messages import Started, ReceiveTimeout 9 | 10 | 11 | class AccountProxy(Actor): 12 | def __init__(self,target:PID, create_message:Callable[[PID], Any]): 13 | self._target = target 14 | self._create_message = create_message 15 | 16 | async def receive(self, context: AbstractContext) -> None: 17 | msg = context.message 18 | if isinstance(msg, Started): 19 | # imagine this is some sort of remote call rather than a local actor call 20 | await self._target.tell(self._create_message(context.my_self)) 21 | context.set_receive_timeout(timedelta(milliseconds=100)) 22 | elif isinstance(msg, OK): 23 | context.cancel_receive_timeout() 24 | await context.parent.tell(msg) 25 | elif isinstance(msg, Refused): 26 | context.cancel_receive_timeout() 27 | await context.parent.tell(msg) 28 | # This emulates a failed remote call 29 | elif isinstance(msg, (InsufficientFunds, 30 | InternalServerError, 31 | ReceiveTimeout, 32 | ServiceUnavailable)): 33 | raise Exception() -------------------------------------------------------------------------------- /examples/patterns/saga/factories/transfer_factory.py: -------------------------------------------------------------------------------- 1 | from examples.patterns.saga.transfer_process import TransferProcess 2 | from protoactor.actor import PID 3 | from protoactor.actor.actor_context import AbstractContext 4 | from protoactor.actor.props import Props 5 | from protoactor.actor.supervision import OneForOneStrategy, SupervisorDirective 6 | from protoactor.persistence.providers.abstract_provider import AbstractProvider 7 | 8 | 9 | class TransferFactory: 10 | def __init__(self, context: AbstractContext, provider: AbstractProvider, availability: float, retry_attempts: int): 11 | self._context = context 12 | self._provider = provider 13 | self._availability = availability 14 | self._retry_attempts = retry_attempts 15 | 16 | def create_transfer(self, actor_name: str, from_account: PID, to_account: PID, amount: float, 17 | persistence_id: str) -> PID: 18 | transfer_props = Props.from_producer( 19 | lambda: TransferProcess(from_account, to_account, amount, self._provider, persistence_id, 20 | self._availability)).with_child_supervisor_strategy( 21 | OneForOneStrategy(lambda pid, reason: SupervisorDirective.Restart, self._retry_attempts, None)) 22 | transfer = self._context.spawn_named(transfer_props, actor_name) 23 | return transfer 24 | -------------------------------------------------------------------------------- /examples/patterns/saga/in_memory_provider.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple, Callable 2 | 3 | from protoactor.persistence.messages import Snapshot 4 | from protoactor.persistence.providers.abstract_provider import AbstractProvider 5 | 6 | 7 | class InMemoryProvider(AbstractProvider): 8 | def __init__(self): 9 | self._events = {} 10 | 11 | async def get_snapshot(self, actor_name: str) -> Tuple[any, int]: 12 | return Snapshot, 0 13 | 14 | async def get_events(self, actor_name: str, index_start: int, index_end: int, 15 | callback: Callable[[any], None]) -> int: 16 | if events := self._events.get(actor_name): 17 | for e in events: 18 | if index_start <= e.key <= index_end: 19 | callback(e.value) 20 | return 0 21 | 22 | async def persist_event(self, actor_name: str, index: int, event: any) -> int: 23 | events = self._events.setdefault(actor_name, {}) 24 | next_event_index = 1 25 | if len(events) != 0: 26 | next_event_index = list(events.items())[-1][0] + 1 27 | events[next_event_index] = event 28 | return 0 29 | 30 | async def persist_snapshot(self, actor_name: str, index: int, snapshot: any) -> None: 31 | pass 32 | 33 | async def delete_events(self, actor_name: str, inclusive_to_index: int) -> None: 34 | events = self._events.get(actor_name) 35 | if events is None: 36 | pass 37 | events_to_remove = list(filter(lambda s: s.key <= inclusive_to_index, events.items())) 38 | for event in events_to_remove: 39 | del self._events[event.key] 40 | 41 | async def delete_snapshots(self, actor_name: str, inclusive_to_index: int) -> None: 42 | pass 43 | -------------------------------------------------------------------------------- /examples/patterns/saga/internal/for_with_progress.py: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | 3 | 4 | class ForWithProgress: 5 | def __init__(self, total: int, every_nth: int, run_both_on_every: bool, run_on_start: bool): 6 | self._total = total 7 | self._every_nth = every_nth 8 | self._run_both_on_every = run_both_on_every 9 | self._run_on_start = run_on_start 10 | 11 | def every_nth(self, every_nth_action: Callable[[int], None], every_action: Callable[[int, bool], None])->None: 12 | def must_run_nth(current: int) -> bool: 13 | if current == 0 and self._run_on_start: 14 | return True 15 | if current == 0: 16 | return False 17 | return current % self._every_nth == 0 18 | 19 | for i in range(self._total+1): 20 | must = must_run_nth(i) 21 | if must: 22 | every_nth_action(i) 23 | if must and not self._run_both_on_every: 24 | continue 25 | every_action(i, must) -------------------------------------------------------------------------------- /examples/patterns/saga/messages.py: -------------------------------------------------------------------------------- 1 | import decimal 2 | 3 | from protoactor.actor import PID 4 | 5 | 6 | class AccountCredited: 7 | pass 8 | 9 | 10 | class AccountDebited: 11 | pass 12 | 13 | 14 | class ChangeBalance: 15 | def __init__(self, amount: decimal, reply_to: PID): 16 | self.amount = amount 17 | self.reply_to = reply_to 18 | 19 | 20 | class Credit(ChangeBalance): 21 | def __init__(self, amount: decimal, reply_to: PID): 22 | super().__init__(amount, reply_to) 23 | 24 | 25 | class CreditRefused: 26 | pass 27 | 28 | 29 | class Debit(ChangeBalance): 30 | def __init__(self, amount: decimal, reply_to: PID): 31 | super().__init__(amount, reply_to) 32 | 33 | 34 | class DebitRolledBack: 35 | pass 36 | 37 | 38 | class EscalateTransfer: 39 | def __init__(self, message: str): 40 | self._message = message 41 | 42 | @property 43 | def message(self): 44 | return self._message 45 | 46 | def __str__(self): 47 | return f'{self.__class__.__module__}.{self.__class__.__name__}: {self._message}' 48 | 49 | 50 | class Result(): 51 | def __init__(self, pid: PID): 52 | self.pid = pid 53 | 54 | 55 | class FailedAndInconsistent(Result): 56 | def __init__(self, pid: PID): 57 | super().__init__(pid) 58 | 59 | 60 | class FailedButConsistentResult(Result): 61 | def __init__(self, pid: PID): 62 | super().__init__(pid) 63 | 64 | 65 | class GetBalance: 66 | pass 67 | 68 | 69 | class InsufficientFunds: 70 | pass 71 | 72 | 73 | class InternalServerError: 74 | pass 75 | 76 | 77 | class OK: 78 | pass 79 | 80 | 81 | class Refused: 82 | pass 83 | 84 | 85 | class ServiceUnavailable: 86 | pass 87 | 88 | 89 | class StatusUnknown: 90 | pass 91 | 92 | 93 | class SuccessResult(Result): 94 | def __init__(self, pid: PID): 95 | super().__init__(pid) 96 | 97 | 98 | class TransferCompleted: 99 | def __init__(self, from_id: PID, from_balance: decimal, to: PID, to_balance: decimal): 100 | self.from_id = from_id 101 | self.from_balance = from_balance 102 | self.to = to 103 | self.to_balance = to_balance 104 | 105 | def __str__(self): 106 | return f'{self.__class__.__module__}.{self.__class__.__name__}: {self.from_id.id} balance is ' \ 107 | f'{self.from_balance}, {self.to.id} balance is {self.to_balance}' 108 | 109 | 110 | class TransferFailed(): 111 | def __init__(self, reason: str): 112 | self.reason = reason 113 | 114 | def __str__(self): 115 | return f'{self.__class__.__module__}.{self.__class__.__name__}: {self.reason}' 116 | 117 | 118 | class TransferStarted: 119 | pass 120 | 121 | 122 | class UnknownResult(Result): 123 | def __init__(self, pid: PID): 124 | super().__init__(pid) 125 | -------------------------------------------------------------------------------- /examples/patterns/saga/saga.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from examples.patterns.saga.runner import Runner 4 | from protoactor.actor.actor_context import RootContext 5 | from protoactor.actor.props import Props 6 | from protoactor.actor.supervision import OneForOneStrategy, SupervisorDirective 7 | 8 | 9 | async def main(): 10 | context = RootContext() 11 | number_of_transfers = 5 12 | interval_between_console_updates = 1 13 | uptime = 99.99 14 | retry_attempts = 0 15 | refusal_probability = 0.01 16 | busy_probability = 0.01 17 | verbose = False 18 | 19 | props = Props.from_producer(lambda: Runner(number_of_transfers, interval_between_console_updates, uptime, 20 | refusal_probability, busy_probability, retry_attempts, 21 | verbose)).with_child_supervisor_strategy( 22 | OneForOneStrategy(lambda pid, reason: SupervisorDirective.Restart, retry_attempts, None) 23 | ) 24 | 25 | print('Spawning runner') 26 | context.spawn_named(props, 'runner') 27 | input() 28 | 29 | 30 | if __name__ == "__main__": 31 | asyncio.run(main()) 32 | -------------------------------------------------------------------------------- /examples/persistence/messages/protos.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package messages; 3 | 4 | message RenameCommand { 5 | string name = 1; 6 | } 7 | message RenameEvent { 8 | string name = 1; 9 | } 10 | message State { 11 | string Name = 1; 12 | } -------------------------------------------------------------------------------- /examples/persistence/messages/protos_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT! 2 | # source: examples/persistence/messages/protos.proto 3 | # plugin: grpclib.plugin.main 4 | -------------------------------------------------------------------------------- /examples/receive_timeout/receive_timeout.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import itertools 3 | from datetime import timedelta, datetime 4 | 5 | from protoactor.actor.actor_context import RootContext, AbstractContext 6 | from protoactor.actor.messages import Started, ReceiveTimeout, AbstractNotInfluenceReceiveTimeout 7 | from protoactor.actor.props import Props 8 | 9 | 10 | class NoInfluence(AbstractNotInfluenceReceiveTimeout): 11 | pass 12 | 13 | 14 | async def main(): 15 | root_context = RootContext() 16 | counter = itertools.count() 17 | next(counter) 18 | 19 | async def fn(context: AbstractContext): 20 | msg = context.message 21 | if isinstance(msg, Started): 22 | print(f'{datetime.today().strftime("%Y-%m-%d-%H.%M.%S")} Started') 23 | context.set_receive_timeout(timedelta(seconds=1)) 24 | elif isinstance(msg, ReceiveTimeout): 25 | print(f'{datetime.today().strftime("%Y-%m-%d-%H.%M.%S")} ReceiveTimeout: {next(counter)}') 26 | elif isinstance(msg, NoInfluence): 27 | print(f'{datetime.today().strftime("%Y-%m-%d-%H.%M.%S")} Received a no-influence message') 28 | elif isinstance(msg, str): 29 | print(f'{datetime.today().strftime("%Y-%m-%d-%H.%M.%S")} Received message: {msg}') 30 | 31 | props = Props.from_func(fn) 32 | pid = root_context.spawn(props) 33 | 34 | for i in range(6): 35 | await root_context.send(pid, 'hello') 36 | await asyncio.sleep(0.5) 37 | 38 | print('Hit [return] to send no-influence messages') 39 | input() 40 | 41 | for i in range(6): 42 | await root_context.send(pid, NoInfluence()) 43 | await asyncio.sleep(0.5) 44 | 45 | print('Hit [return] to send a message to cancel the timeout') 46 | input() 47 | 48 | await root_context.send(pid, 'cancel') 49 | 50 | print('Hit [return] to finish') 51 | input() 52 | 53 | 54 | if __name__ == "__main__": 55 | loop = asyncio.get_event_loop() 56 | loop.run_until_complete(main()) 57 | -------------------------------------------------------------------------------- /examples/remote_benchmark/messages/protos.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package messages; 3 | import "protoactor/actor/protos.proto"; 4 | 5 | message Start {} 6 | message StartRemote { 7 | actor.PID Sender = 1; 8 | } 9 | message Ping {} 10 | message Pong {} -------------------------------------------------------------------------------- /examples/remote_benchmark/messages/protos_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT! 2 | # source: examples/remote_benchmark/messages/protos.proto 3 | # plugin: grpclib.plugin.main 4 | -------------------------------------------------------------------------------- /examples/remote_benchmark/node_1/node_1.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import datetime 3 | import time 4 | from threading import Event 5 | 6 | from examples.remote_benchmark.messages.protos_pb2 import Pong, DESCRIPTOR, StartRemote, Ping 7 | from protoactor.actor import PID 8 | from protoactor.actor.actor import Actor 9 | from protoactor.actor.actor_context import AbstractContext, RootContext 10 | from protoactor.actor.props import Props 11 | from protoactor.remote.remote import Remote 12 | from protoactor.remote.serialization import Serialization 13 | 14 | 15 | class LocalClient(Actor): 16 | def __init__(self, count: int, message_count: int, wg: Event, loop): 17 | self._count = count 18 | self._message_count = message_count 19 | self._wg = wg 20 | self._loop = loop 21 | 22 | async def notify(self) -> None: 23 | self._wg.set() 24 | 25 | async def receive(self, context: AbstractContext) -> None: 26 | message = context.message 27 | if isinstance(message, Pong): 28 | self._count += 1 29 | if self._count % 5 == 0: 30 | print(self._count) 31 | if self._count == self._message_count: 32 | asyncio.run_coroutine_threadsafe(self.notify(), self._loop) 33 | 34 | 35 | 36 | async def main(): 37 | context = RootContext() 38 | Serialization().register_file_descriptor(DESCRIPTOR) 39 | Remote().start("192.168.1.129", 12001) 40 | 41 | wg = asyncio.Event() 42 | message_count = 10000 43 | 44 | props = Props.from_producer(lambda: LocalClient(0, message_count, wg, asyncio.get_event_loop())) 45 | 46 | pid = context.spawn(props) 47 | remote = PID(address="192.168.1.77:12000", id="remote") 48 | 49 | await context.request_future(remote, StartRemote(Sender=pid)) 50 | 51 | start = datetime.datetime.now() 52 | print('Starting to send') 53 | for i in range(message_count): 54 | await context.send(remote, Ping()) 55 | await wg.wait() 56 | 57 | elapsed = datetime.datetime.now() - start 58 | print(f'Elapsed {elapsed}') 59 | 60 | t = message_count * 2.0 / elapsed.total_seconds() 61 | print(f'Throughput {t} msg / sec') 62 | 63 | input() 64 | 65 | 66 | if __name__ == "__main__": 67 | asyncio.run(main()) 68 | 69 | -------------------------------------------------------------------------------- /examples/remote_benchmark/node_2/node_2.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from examples.remote_benchmark.messages.protos_pb2 import DESCRIPTOR, StartRemote, Ping, Start, Pong 4 | from protoactor.actor import PID 5 | from protoactor.actor.actor import Actor 6 | from protoactor.actor.actor_context import RootContext, AbstractContext 7 | from protoactor.actor.props import Props 8 | from protoactor.remote.remote import Remote 9 | from protoactor.remote.serialization import Serialization 10 | 11 | 12 | class EchoActor(Actor): 13 | def __init__(self): 14 | self._sender: PID = None 15 | 16 | async def receive(self, context: AbstractContext) -> None: 17 | message = context.message 18 | if isinstance(message, StartRemote): 19 | self._sender = message.Sender 20 | await context.respond(Start()) 21 | elif isinstance(message, Ping): 22 | await context.send(self._sender, Pong()) 23 | 24 | 25 | async def main(): 26 | context = RootContext() 27 | Serialization().register_file_descriptor(DESCRIPTOR) 28 | Remote().start("192.168.1.77", 12000) 29 | context.spawn_named(Props.from_producer(lambda: EchoActor()), 'remote') 30 | input() 31 | 32 | 33 | if __name__ == "__main__": 34 | asyncio.run(main()) 35 | -------------------------------------------------------------------------------- /examples/spawn_benchmark/spawn_benchmark.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import cProfile 3 | from dataclasses import dataclass 4 | from typing import Optional 5 | 6 | from protoactor.actor import PID 7 | from protoactor.actor.actor import Actor 8 | from protoactor.actor.actor_context import RootContext, AbstractContext, GlobalRootContext 9 | from protoactor.actor.props import Props 10 | 11 | 12 | @dataclass 13 | class Request: 14 | div: int 15 | num: int 16 | size: int 17 | 18 | 19 | class MyActor(Actor): 20 | def __init__(self): 21 | self._replies: Optional[int] = None 22 | self._reply_to: Optional[PID] = None 23 | self._sum: int = 0 24 | 25 | async def receive(self, context: AbstractContext): 26 | msg = context.message 27 | if isinstance(msg, Request): 28 | if msg.size == 1: 29 | await context.respond(msg.num) 30 | await context.stop(context.my_self) 31 | return 32 | 33 | self._replies = msg.div 34 | self._reply_to = context.sender 35 | 36 | for i in range(msg.div): 37 | child = GlobalRootContext.spawn(props) 38 | await context.request(child, Request(num=msg.num + i * (msg.size // msg.div), 39 | size=msg.size // msg.div, 40 | div=msg.div)) 41 | elif isinstance(msg, int): 42 | self._sum += msg 43 | self._replies -= 1 44 | if self._replies == 0: 45 | await context.send(self._reply_to, self._sum) 46 | 47 | 48 | props = Props.from_producer(MyActor) 49 | 50 | 51 | async def main(): 52 | context = RootContext() 53 | pr = cProfile.Profile() 54 | while True: 55 | pid = context.spawn(props) 56 | pr.clear() 57 | pr.enable() 58 | response = await context.request_future(pid, Request(num=0, 59 | size=100, 60 | div=10)) 61 | pr.disable() 62 | pr.print_stats(sort='time') 63 | print(response) 64 | await context.stop_future(pid) 65 | await asyncio.sleep(0.5) 66 | 67 | 68 | if __name__ == "__main__": 69 | asyncio.run(main()) 70 | -------------------------------------------------------------------------------- /examples/supervision/escalate_supervision.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from protoactor.actor.actor_context import AbstractContext, RootContext 4 | from protoactor.actor.messages import Started 5 | from protoactor.actor.props import Props 6 | from protoactor.actor.protos_pb2 import Terminated 7 | from protoactor.actor.supervision import OneForOneStrategy, SupervisorDirective 8 | 9 | 10 | async def main(): 11 | async def child_fn(context: AbstractContext): 12 | print(f'{context.my_self.id}: MSG: {type(context.message)}') 13 | if isinstance(context.message, Started): 14 | raise Exception('child failure') 15 | 16 | child_props = Props.from_func(child_fn) 17 | 18 | async def root_fn(context: AbstractContext): 19 | print(f'{context.my_self.id}: MSG: {type(context.message)}') 20 | if isinstance(context.message, Started): 21 | context.spawn_named(child_props, 'child') 22 | elif isinstance(context.message, Terminated): 23 | print(f'Terminated {context.message.who}') 24 | 25 | root_props = Props.from_func(root_fn).with_child_supervisor_strategy( 26 | OneForOneStrategy(lambda pid, reason: SupervisorDirective.Escalate, 0, None)) 27 | 28 | root_context = RootContext() 29 | root_context.spawn_named(root_props, 'root') 30 | 31 | input() 32 | 33 | if __name__ == "__main__": 34 | asyncio.run(main()) 35 | -------------------------------------------------------------------------------- /protoactor/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | __version__ = "0.0.1" 4 | 5 | # from .protos_pb2 import PID 6 | # from .process_registry import ProcessRegistry 7 | # 8 | # 9 | # # from .process import ActorProcess, DeadLettersProcess 10 | # 11 | # 12 | # def __tell(self, message): 13 | # ProcessRegistry().get(self).send_user_message(self, message) 14 | # 15 | # 16 | # def __send_user_message(self, message): 17 | # ProcessRegistry().get(self).send_user_message(self, message) 18 | # 19 | # 20 | # def __send_system_message(self, message): 21 | # ProcessRegistry().get(self).send_system_message(self, message) 22 | # 23 | # 24 | # def __stop(self): 25 | # ProcessRegistry().get(self).stop(self) 26 | # 27 | # 28 | # PID.tell = __tell 29 | # PID.send_user_message = __send_user_message 30 | # PID.send_system_message = __send_system_message 31 | # PID.stop = __stop 32 | -------------------------------------------------------------------------------- /protoactor/actor/__init__.py: -------------------------------------------------------------------------------- 1 | from protoactor.actor.protos_pb2 import PID 2 | from protoactor.actor.process import ProcessRegistry 3 | 4 | 5 | async def __tell(self, message): 6 | await ProcessRegistry().get(self).send_user_message(self, message) 7 | 8 | 9 | async def __send_user_message(self, message): 10 | await ProcessRegistry().get(self).send_user_message(self, message) 11 | 12 | 13 | async def __send_system_message(self, message): 14 | await ProcessRegistry().get(self).send_system_message(self, message) 15 | 16 | 17 | async def __stop(self): 18 | await ProcessRegistry().get(self).stop(self) 19 | 20 | 21 | def __to_short_string(self): 22 | return self.address + '/' + self.id 23 | 24 | 25 | PID.tell = __tell 26 | PID.send_user_message = __send_user_message 27 | PID.send_system_message = __send_system_message 28 | PID.stop = __stop 29 | PID.to_short_string = __to_short_string 30 | -------------------------------------------------------------------------------- /protoactor/actor/actor.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod 2 | 3 | from protoactor.actor.actor_context import AbstractContext 4 | 5 | 6 | class Actor(): 7 | @abstractmethod 8 | async def receive(self, context: AbstractContext) -> None: 9 | pass 10 | 11 | class EmptyActor(Actor): 12 | def __init__(self, receive): 13 | self._receive = receive 14 | 15 | async def receive(self, context: AbstractContext): 16 | await self._receive(context) 17 | 18 | -------------------------------------------------------------------------------- /protoactor/actor/behavior.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from protoactor.actor.actor_context import AbstractContext 4 | from protoactor.actor.utils import Stack 5 | 6 | 7 | class Behavior: 8 | def __init__(self, receive: asyncio.Future = None) -> None: 9 | self._behaviors = Stack() 10 | self.become(receive) 11 | 12 | def become(self, receive: object): 13 | self._behaviors.clear() 14 | self._behaviors.push(receive) 15 | 16 | def become_stacked(self, receive: object): 17 | self._behaviors.push(receive) 18 | 19 | def unbecome_stacked(self) -> None: 20 | self._behaviors.pop() 21 | 22 | def receive_async(self, context: AbstractContext) -> asyncio.Future: 23 | behavior = self._behaviors.peek() 24 | return behavior(context) 25 | -------------------------------------------------------------------------------- /protoactor/actor/event_stream.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Callable, Any 3 | from uuid import uuid4 4 | 5 | from protoactor.actor import log 6 | from protoactor.actor.messages import DeadLetterEvent 7 | 8 | from protoactor.mailbox.dispatcher import Dispatchers, AbstractDispatcher 9 | 10 | 11 | class Subscription(): 12 | def __init__(self, event_stream, action, dispatcher): 13 | self._event_stream = event_stream 14 | self._dispatcher = dispatcher 15 | self._action = action 16 | self._id = uuid4() 17 | 18 | @property 19 | def id(self): 20 | return self._id 21 | 22 | @property 23 | def dispatcher(self): 24 | return self._dispatcher 25 | 26 | @property 27 | def action(self): 28 | return self._action 29 | 30 | def unsubscribe(self): 31 | self._event_stream.unsubscribe(self._id) 32 | 33 | 34 | class EventStream(): 35 | def __init__(self): 36 | self._subscriptions = {} 37 | self._logger = log.create_logger(logging.INFO, context=EventStream) 38 | self.subscribe(self.__process_dead_letters, DeadLetterEvent) 39 | 40 | def subscribe(self, fun: Callable[..., Any], msg_type: type = None, 41 | dispatcher: AbstractDispatcher = Dispatchers().synchronous_dispatcher) -> Subscription: 42 | async def action(msg): 43 | if msg_type is None: 44 | await fun(msg) 45 | elif isinstance(msg, msg_type): 46 | await fun(msg) 47 | 48 | sub = Subscription(self, action, dispatcher) 49 | self._subscriptions[sub.id] = sub 50 | return sub 51 | 52 | async def publish(self, message: object) -> None: 53 | for sub in self._subscriptions.values(): 54 | try: 55 | await sub.action(message) 56 | except Exception: 57 | self._logger.exception('Exception has occurred when publishing a message.') 58 | 59 | def unsubscribe(self, uniq_id): 60 | del self._subscriptions[uniq_id] 61 | 62 | async def __process_dead_letters(self, message: DeadLetterEvent) -> None: 63 | self._logger.info(f'[DeadLetter] {message.pid.to_short_string()} got {type(message.message)}:{message.message} ' 64 | f'from {message.sender}') 65 | 66 | 67 | GlobalEventStream = EventStream() 68 | 69 | # class GlobalEventStream(metaclass=Singleton): 70 | # def __init__(self): 71 | # self.__instance = EventStream() 72 | # 73 | # @property 74 | # def instance(self): 75 | # return self.__instance 76 | -------------------------------------------------------------------------------- /protoactor/actor/exceptions.py: -------------------------------------------------------------------------------- 1 | from protoactor.actor.protos_pb2 import PID 2 | 3 | 4 | class BaseCancelTokenException(Exception): 5 | """ 6 | Base exception class for the `asyncio-cancel-token` library. 7 | """ 8 | pass 9 | 10 | 11 | class EventLoopMismatch(BaseCancelTokenException): 12 | """ 13 | Raised when two different asyncio event loops are referenced, but must be equal 14 | """ 15 | pass 16 | 17 | 18 | class OperationCancelled(BaseCancelTokenException): 19 | """ 20 | Raised when an operation was cancelled. 21 | """ 22 | pass 23 | 24 | 25 | class ProcessNameExistException(Exception): 26 | def __init__(self, name: str, pid: PID): 27 | super().__init__('a Process with the name %s already exists' % name) 28 | self.name = name 29 | self.pid = pid 30 | -------------------------------------------------------------------------------- /protoactor/actor/log.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | 4 | def create_logger(level: int, prefix: str = None, context: type = None): 5 | name = 'protoactor' 6 | if prefix is not None and context is None: 7 | new_name = f'{name}.{prefix}' 8 | logger = logging.getLogger(new_name) 9 | elif prefix is None and context is not None: 10 | if name in context.__module__: 11 | new_name = f'{context.__module__}.{context.__name__}'.lower() 12 | else: 13 | new_name = f'{name}.{context.__module__}.{context.__name__}'.lower() 14 | logger = logging.getLogger(new_name) 15 | else: 16 | logger = logging.getLogger(name) 17 | logger.setLevel(level) 18 | return logger 19 | 20 | -------------------------------------------------------------------------------- /protoactor/actor/message_envelope.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from protoactor.actor.message_header import MessageHeader 4 | 5 | is_import = False 6 | if is_import: 7 | from protoactor.actor import PID 8 | 9 | 10 | class MessageEnvelope: 11 | def __init__(self, message: object, sender: 'PID' = None, header: MessageHeader = None) -> None: 12 | self._message = message 13 | self._sender = sender 14 | self._header = header 15 | 16 | @property 17 | def message(self) -> object: 18 | return self._message 19 | 20 | @property 21 | def sender(self) -> 'PID': 22 | return self._sender 23 | 24 | @property 25 | def header(self) -> MessageHeader: 26 | return self._header 27 | 28 | @staticmethod 29 | def wrap(message: Any): 30 | if isinstance(message, MessageEnvelope): 31 | return message 32 | return MessageEnvelope(message) 33 | 34 | def with_sender(self, sender: 'PID'): 35 | return MessageEnvelope(self.message, sender, self.header) 36 | 37 | def with_message(self, message: Any): 38 | return MessageEnvelope(message, self.sender, self.header) 39 | 40 | def with_header(self, header: MessageHeader = None, key: str = None, value: str = None): 41 | if header is not None and key is None and value is None: 42 | return MessageEnvelope(self.message, self.sender, header) 43 | elif header is None and key is not None and value is not None: 44 | message_header = self.header 45 | if message_header is None: 46 | message_header = MessageHeader() 47 | header = message_header.extend(key=key, value=value) 48 | return MessageEnvelope(self.message, self.sender, header) 49 | else: 50 | raise ValueError('Incorrect input value') 51 | 52 | def with_headers(self, items=None): 53 | message_header = self.header 54 | if message_header is None: 55 | message_header = MessageHeader() 56 | header = message_header.extend(items=items) 57 | return MessageEnvelope(self.message, self.sender, header) 58 | 59 | @staticmethod 60 | def unwrap(message: Any) -> Any: 61 | if isinstance(message, MessageEnvelope): 62 | return message.message, message.sender, message.header 63 | return message, None, None 64 | 65 | @staticmethod 66 | def unwrap_header(message: Any) -> MessageHeader: 67 | if isinstance(message, MessageEnvelope) and message.header is not None: 68 | return message.header 69 | return MessageHeader.empty() 70 | 71 | @staticmethod 72 | def unwrap_message(message: Any) -> Any: 73 | if isinstance(message, MessageEnvelope): 74 | return message.message 75 | else: 76 | return message 77 | 78 | @staticmethod 79 | def unwrap_sender(message: Any) -> 'PID': 80 | if isinstance(message, MessageEnvelope): 81 | return message.sender 82 | else: 83 | return None 84 | -------------------------------------------------------------------------------- /protoactor/actor/message_header.py: -------------------------------------------------------------------------------- 1 | import collections 2 | 3 | 4 | class MessageHeader(collections.Mapping): 5 | 6 | def __init__(self, data=None): 7 | if data is None: 8 | data = {} 9 | self._data = data 10 | 11 | def __getitem__(self, key): 12 | return self._data[key] 13 | 14 | def __len__(self): 15 | return len(self._data) 16 | 17 | def __iter__(self): 18 | return iter(self._data) 19 | 20 | def extend(self, key=None, value=None, items=None): 21 | if key is not None and value is not None: 22 | self._data[key] = value 23 | elif items is not None: 24 | self._data.update(items) 25 | return MessageHeader(self._data) 26 | 27 | @staticmethod 28 | def empty(): 29 | return MessageHeader() -------------------------------------------------------------------------------- /protoactor/actor/messages.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta 2 | from typing import Optional, Any 3 | 4 | from protoactor.actor.protos_pb2 import PID 5 | from protoactor.actor.restart_statistics import RestartStatistics 6 | from protoactor.actor.utils import Singleton 7 | 8 | 9 | class AbstractSystemMessage(): 10 | pass 11 | 12 | class AbstractNotInfluenceReceiveTimeout(metaclass=ABCMeta): 13 | pass 14 | 15 | class AutoReceiveMessage(metaclass=ABCMeta): 16 | pass 17 | 18 | 19 | class Restarting(metaclass=Singleton): 20 | pass 21 | 22 | 23 | class Restart(AbstractSystemMessage): 24 | def __init__(self, reason): 25 | self.reason = reason 26 | 27 | 28 | class Failure(AbstractSystemMessage): 29 | def __init__(self, who: PID, reason: Exception, crs: RestartStatistics, message: Any) -> None: 30 | self._who = who 31 | self._reason = reason 32 | self._crs = crs 33 | self._message = message 34 | 35 | @property 36 | def who(self) -> PID: 37 | return self._who 38 | 39 | @property 40 | def reason(self) -> Exception: 41 | return self._reason 42 | 43 | @property 44 | def restart_statistics(self) -> RestartStatistics: 45 | return self._crs 46 | 47 | @property 48 | def message(self) -> Any: 49 | return self._message 50 | 51 | 52 | class SystemMessage: 53 | pass 54 | 55 | 56 | class Stopping(AutoReceiveMessage): 57 | pass 58 | 59 | 60 | class Stopped(AutoReceiveMessage): 61 | pass 62 | 63 | 64 | class Started(AbstractSystemMessage): 65 | pass 66 | 67 | 68 | class ReceiveTimeout(AbstractSystemMessage, metaclass=Singleton): 69 | pass 70 | 71 | 72 | class NotInfluenceReceiveTimeout(AbstractSystemMessage): 73 | pass 74 | 75 | 76 | class PoisonPill(AbstractSystemMessage): 77 | pass 78 | 79 | 80 | class Continuation(SystemMessage): 81 | def __init__(self, fun, message): 82 | self.action = fun 83 | self.message = message 84 | 85 | 86 | class SuspendMailbox(SystemMessage): 87 | pass 88 | 89 | 90 | class ResumeMailbox(SystemMessage): 91 | pass 92 | 93 | 94 | class DeadLetterEvent: 95 | def __init__(self, pid: 'PID', message: object, sender: Optional['PID']) -> None: 96 | self._pid = pid 97 | self._message = message 98 | self._sender = sender 99 | 100 | @property 101 | def pid(self) -> 'PID': 102 | return self._pid 103 | 104 | @property 105 | def message(self) -> object: 106 | return self._message 107 | 108 | @property 109 | def sender(self) -> Optional['PID']: 110 | return self._sender 111 | -------------------------------------------------------------------------------- /protoactor/actor/protos.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package actor; 3 | 4 | message PID { 5 | string address = 1; 6 | string id = 2; 7 | } 8 | 9 | //user messages 10 | message PoisonPill {} 11 | 12 | //system messages 13 | message Watch { 14 | PID watcher = 1; 15 | } 16 | 17 | message Unwatch { 18 | PID watcher = 1; 19 | } 20 | 21 | message Terminated { 22 | PID who = 1; 23 | bool address_terminated = 2; 24 | } 25 | 26 | message Stop {} 27 | -------------------------------------------------------------------------------- /protoactor/actor/protos_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT! 2 | # source: protoactor/actor/protos.proto 3 | # plugin: grpclib.plugin.main 4 | -------------------------------------------------------------------------------- /protoactor/actor/restart_statistics.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | from typing import Optional 3 | 4 | 5 | class RestartStatistics: 6 | 7 | def __init__(self, failure_count: int, last_failure_time: Optional[datetime]) -> None: 8 | self.__failures_items = [] 9 | for i in range(0, failure_count): 10 | self.__failures_items.append(last_failure_time or datetime.now()) 11 | 12 | @property 13 | def failure_count(self) -> int: 14 | return len(self.__failures_items) 15 | 16 | def fail(self) -> None: 17 | self.__failures_items.append(datetime.now()) 18 | 19 | def reset(self) -> None: 20 | self.__failures_items.clear() 21 | 22 | def number_of_failures(self, within: timedelta) -> int: 23 | res = 0 24 | 25 | if within is not None: 26 | for failure_item in self.__failures_items: 27 | if datetime.now() - failure_item < within: 28 | res += 1 29 | else: 30 | res = len(self.__failures_items) 31 | 32 | return res 33 | -------------------------------------------------------------------------------- /protoactor/actor/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import sys 4 | from asyncio import Future 5 | from multiprocessing import RLock 6 | from typing import Callable 7 | 8 | 9 | class Singleton(type): 10 | _instances = {} 11 | _singleton_lock = RLock() 12 | 13 | def __call__(cls, *args, **kwargs): 14 | if cls not in cls._instances: 15 | with cls._singleton_lock: 16 | if cls not in cls._instances: 17 | cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) 18 | return cls._instances[cls] 19 | 20 | def clear(cls): 21 | try: 22 | del Singleton._instances[cls] 23 | except KeyError: 24 | pass 25 | 26 | 27 | def python_version(): 28 | """Get the version of python.""" 29 | 30 | return sys.version_info[0] 31 | 32 | 33 | class Stack: 34 | def __init__(self) -> None: 35 | self.stack = list() 36 | 37 | def push(self, data: object) -> None: 38 | self.stack.append(data) 39 | 40 | def pop(self) -> object: 41 | if self.is_empty(): 42 | raise Exception("nothing to pop") 43 | return self.stack.pop(len(self.stack) - 1) 44 | 45 | def peek(self) -> Callable[[object], Future]: 46 | if self.is_empty(): 47 | raise Exception("Nothing to peek") 48 | return self.stack[len(self.stack) - 1] 49 | 50 | def clear(self) -> None: 51 | self.stack.clear() 52 | 53 | def is_empty(self) -> bool: 54 | return len(self.stack) == 0 55 | 56 | def __len__(self) -> int: 57 | return len(self.stack) -------------------------------------------------------------------------------- /protoactor/cluster/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/protoactor/cluster/__init__.py -------------------------------------------------------------------------------- /protoactor/cluster/grain_call_options.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | 4 | class GrainCallOptions: 5 | def __init__(self): 6 | self.retry_count = 10 7 | self.retry_action = self.exponential_backoff 8 | 9 | @staticmethod 10 | async def exponential_backoff(i: int) -> None: 11 | i += 1 12 | await asyncio.sleep(i * i * 50) 13 | -------------------------------------------------------------------------------- /protoactor/cluster/hash_algorithms/fnv1a32.py: -------------------------------------------------------------------------------- 1 | class FNV1A32(): 2 | def __init__(self): 3 | self._fnv_prime = 0x01000193 4 | self._fnv_offset_basis = 0x811C9DC5 5 | self._hash = self._fnv_offset_basis 6 | self._uint32_max = 0x100000000 7 | 8 | def compute_hash(self, buffer: bytes) -> int: 9 | self._hash = self._fnv_offset_basis 10 | 11 | if buffer is None: 12 | raise ValueError('buffer is empty') 13 | 14 | for b in buffer: 15 | self._hash = self._hash ^ b 16 | self._hash = (self._hash * self._fnv_prime) % self._uint32_max 17 | 18 | return self._hash 19 | -------------------------------------------------------------------------------- /protoactor/cluster/member_status.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod, ABCMeta 2 | from typing import List 3 | 4 | 5 | class AbstractMemberStatusValue(metaclass=ABCMeta): 6 | @abstractmethod 7 | def is_same(self, val: 'AbstractMemberStatusValue') -> bool: 8 | raise NotImplementedError("Should Implement this method") 9 | 10 | 11 | class AbstractMemberStatusValueSerializer(metaclass=ABCMeta): 12 | @abstractmethod 13 | def to_value_bytes(self, val: AbstractMemberStatusValue) -> bytes: 14 | raise NotImplementedError("Should Implement this method") 15 | 16 | @abstractmethod 17 | def from_value_bytes(self, val: bytes) -> AbstractMemberStatusValue: 18 | raise NotImplementedError("Should Implement this method") 19 | 20 | 21 | class MemberStatus: 22 | def __init__(self, member_id: str, host: str, port: int, kinds: List[str], alive: bool, 23 | status_value: AbstractMemberStatusValue): 24 | self._member_id = member_id 25 | if host is None: 26 | raise ValueError('host not set') 27 | self._host = host 28 | if kinds is None: 29 | raise ValueError('kinds not set') 30 | self._kinds = kinds 31 | self._port = port 32 | self._alive = alive 33 | self._status_value = status_value 34 | 35 | @property 36 | def address(self) -> str: 37 | return self._host + ':' + str(self._port) 38 | 39 | @property 40 | def member_id(self) -> str: 41 | return self._member_id 42 | 43 | @property 44 | def host(self) -> str: 45 | return self._host 46 | 47 | @property 48 | def port(self) -> int: 49 | return self._port 50 | 51 | @property 52 | def kinds(self) -> List[str]: 53 | return self._kinds 54 | 55 | @property 56 | def alive(self) -> bool: 57 | return self._alive 58 | 59 | @property 60 | def status_value(self) -> AbstractMemberStatusValue: 61 | return self._status_value 62 | 63 | 64 | class NullMemberStatusValueSerializer(AbstractMemberStatusValueSerializer): 65 | def to_value_bytes(self, val: AbstractMemberStatusValue) -> bytes: 66 | return None 67 | 68 | def from_value_bytes(self, val: bytes) -> AbstractMemberStatusValue: 69 | return None -------------------------------------------------------------------------------- /protoactor/cluster/member_status_events.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta 2 | from typing import List 3 | 4 | from protoactor.cluster.member_status import MemberStatus 5 | 6 | 7 | class ClusterTopologyEvent(): 8 | def __init__(self, statuses: List[MemberStatus]): 9 | if statuses is None: 10 | raise ValueError('statuses is empty') 11 | self._statuses = statuses 12 | 13 | @property 14 | def statuses(self) -> List[MemberStatus]: 15 | return self._statuses 16 | 17 | 18 | class AbstractMemberStatusEvent(metaclass=ABCMeta): 19 | def __init__(self, host: str, port: int, kinds: List[str]): 20 | if host is None: 21 | raise ValueError('host is none') 22 | self._host = host 23 | self._port = port 24 | 25 | if kinds is None: 26 | raise ValueError('kinds is none') 27 | self._kinds = kinds 28 | 29 | @property 30 | def address(self) -> str: 31 | return self._host + ":" + str(self._port) 32 | 33 | @property 34 | def host(self) -> str: 35 | return self._host 36 | 37 | @property 38 | def port(self) -> int: 39 | return self._port 40 | 41 | @property 42 | def kinds(self) -> List[str]: 43 | return self._kinds 44 | 45 | 46 | class MemberJoinedEvent(AbstractMemberStatusEvent): 47 | def __init__(self, host: str, port: int, kinds: List[str]): 48 | super().__init__(host, port, kinds) 49 | 50 | 51 | class MemberRejoinedEvent(AbstractMemberStatusEvent): 52 | def __init__(self, host: str, port: int, kinds: List[str]): 53 | super().__init__(host, port, kinds) 54 | 55 | 56 | class MemberLeftEvent(AbstractMemberStatusEvent): 57 | def __init__(self, host: str, port: int, kinds: List[str]): 58 | super().__init__(host, port, kinds) 59 | -------------------------------------------------------------------------------- /protoactor/cluster/member_strategy.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod, ABCMeta 2 | from typing import List 3 | 4 | from protoactor.cluster.member_status import MemberStatus 5 | from protoactor.cluster.rendezvous import Rendezvous 6 | from protoactor.cluster.round_robin import RoundRobin 7 | 8 | 9 | class AbstractMemberStrategy(metaclass=ABCMeta): 10 | @abstractmethod 11 | def get_all_members(self) -> List[MemberStatus]: 12 | raise NotImplementedError("Should Implement this method") 13 | 14 | @abstractmethod 15 | def add_member(self, member: MemberStatus) -> None: 16 | raise NotImplementedError("Should Implement this method") 17 | 18 | @abstractmethod 19 | def update_member(self, member: MemberStatus) -> None: 20 | raise NotImplementedError("Should Implement this method") 21 | 22 | @abstractmethod 23 | def remove_member(self, member: MemberStatus) -> None: 24 | raise NotImplementedError("Should Implement this method") 25 | 26 | @abstractmethod 27 | def get_partition(self, key) -> str: 28 | raise NotImplementedError("Should Implement this method") 29 | 30 | @abstractmethod 31 | def get_activator(self) -> str: 32 | raise NotImplementedError("Should Implement this method") 33 | 34 | 35 | class SimpleMemberStrategy(AbstractMemberStrategy): 36 | def __init__(self): 37 | self._members = [] 38 | self._rdv = Rendezvous(self) 39 | self._rr = RoundRobin(self) 40 | 41 | def get_all_members(self) -> List[MemberStatus]: 42 | return self._members 43 | 44 | def add_member(self, member: MemberStatus) -> None: 45 | self._members.append(member) 46 | self._rdv.update_rdv() 47 | 48 | def update_member(self, member: MemberStatus) -> None: 49 | for i in range(len(self._members)): 50 | if self._members[i].address == member.address: 51 | self._members[i] = member 52 | 53 | def remove_member(self, member: MemberStatus) -> None: 54 | for i in range(len(self._members)): 55 | if self._members[i].address == member.address: 56 | del self._members[i] 57 | self._rdv.update_rdv() 58 | 59 | def get_partition(self, key) -> str: 60 | return self._rdv.get_node(key) 61 | 62 | def get_activator(self) -> str: 63 | return self._rr.get_node() 64 | 65 | -------------------------------------------------------------------------------- /protoactor/cluster/messages.py: -------------------------------------------------------------------------------- 1 | from protoactor.actor import PID 2 | 3 | 4 | class WatchPidRequest(): 5 | def __init__(self, pid: PID): 6 | self.pid = pid -------------------------------------------------------------------------------- /protoactor/cluster/pid_cache.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Tuple 3 | 4 | from protoactor.actor import log 5 | from protoactor.actor.actor_context import Actor, AbstractContext, GlobalRootContext 6 | from protoactor.actor.event_stream import GlobalEventStream 7 | from protoactor.actor.messages import Started 8 | from protoactor.actor.props import Props 9 | from protoactor.actor.protos_pb2 import Terminated, PID 10 | from protoactor.actor.supervision import Supervision 11 | from protoactor.actor.utils import Singleton 12 | from protoactor.cluster.member_status_events import AbstractMemberStatusEvent, MemberLeftEvent, MemberRejoinedEvent 13 | from protoactor.cluster.messages import WatchPidRequest 14 | 15 | 16 | class PidCache(metaclass=Singleton): 17 | def __init__(self): 18 | self._watcher = None 19 | self._cluster_topology_evn_sub = None 20 | self._cache = {} 21 | self._reverse_cache = {} 22 | 23 | async def setup(self) -> None: 24 | props = Props.from_producer(lambda: PidCacheWatcher()) \ 25 | .with_guardian_supervisor_strategy(Supervision.always_restart_strategy) 26 | 27 | self._watcher = GlobalRootContext.spawn_named(props, 'PidCacheWatcher') 28 | self._cluster_topology_evn_sub = GlobalEventStream.subscribe(self.process_member_status_event, 29 | type(AbstractMemberStatusEvent)) 30 | 31 | async def stop(self) -> None: 32 | await GlobalRootContext.stop(self._watcher) 33 | GlobalEventStream.unsubscribe(self._cluster_topology_evn_sub.id) 34 | 35 | def process_member_status_event(self, evn: AbstractMemberStatusEvent) -> None: 36 | if isinstance(evn, (MemberLeftEvent, MemberRejoinedEvent)): 37 | self.remove_cache_by_member_address(evn.address) 38 | 39 | def get_cache(self, name: str) -> Tuple[PID, bool]: 40 | if name in self._cache: 41 | return self._cache[name], True 42 | return None, False 43 | 44 | async def add_cache(self, name: str, pid: PID) -> bool: 45 | if name not in self._cache: 46 | key = pid.to_short_string() 47 | self._cache[name] = pid 48 | self._reverse_cache[key] = name 49 | 50 | await GlobalRootContext.send(self._watcher, WatchPidRequest(pid)) 51 | return True 52 | return False 53 | 54 | def remove_cache_by_pid(self, pid: PID) -> None: 55 | key = pid.to_short_string() 56 | if key in self._reverse_cache: 57 | name = self._reverse_cache[key] 58 | del self._reverse_cache[key] 59 | del self._cache[name] 60 | 61 | def remove_cache_by_name(self, name: str) -> None: 62 | if name in self._cache: 63 | key = self._cache[name] 64 | del self._reverse_cache[key] 65 | del self._cache[name] 66 | 67 | def remove_cache_by_member_address(self, member_address: str) -> None: 68 | for name, pid in self._cache.items(): 69 | if pid.address == member_address: 70 | key = pid.to_short_string() 71 | del self._reverse_cache[key] 72 | del self._cache[name] 73 | 74 | 75 | class PidCacheWatcher(Actor): 76 | def __init__(self): 77 | self._logger = log.create_logger(logging.INFO, context=PidCacheWatcher) 78 | 79 | async def receive(self, context: AbstractContext) -> None: 80 | msg = context.message 81 | if isinstance(msg, Started): 82 | self._logger.debug('Started PidCacheWatcher') 83 | elif isinstance(msg, WatchPidRequest): 84 | await context.watch(msg.pid) 85 | elif isinstance(msg, Terminated): 86 | PidCache().remove_cache_by_pid(msg.who) 87 | -------------------------------------------------------------------------------- /protoactor/cluster/protos.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package cluster; 3 | 4 | import "protoactor/actor/protos.proto"; 5 | 6 | message TakeOwnership { 7 | actor.PID pid = 1; 8 | string name = 2; 9 | } 10 | 11 | message GrainRequest { 12 | int32 method_index = 1; 13 | bytes message_data = 2; 14 | } 15 | 16 | message GrainResponse { 17 | bytes message_data = 1; 18 | } 19 | 20 | message GrainErrorResponse { 21 | string err = 1; 22 | } -------------------------------------------------------------------------------- /protoactor/cluster/protos_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT! 2 | # source: protoactor/cluster/protos.proto 3 | # plugin: grpclib.plugin.main 4 | -------------------------------------------------------------------------------- /protoactor/cluster/providers/abstract_cluster_provider.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod 2 | from typing import List 3 | 4 | from protoactor.cluster.member_status import AbstractMemberStatusValue, AbstractMemberStatusValueSerializer 5 | 6 | 7 | class AbstractClusterProvider(metaclass=ABCMeta): 8 | @abstractmethod 9 | async def register_member_async(self, cluster_name: str, host: str, port: int, kinds: List[str], 10 | status_value: AbstractMemberStatusValue, 11 | serializer: AbstractMemberStatusValueSerializer) -> None: 12 | raise NotImplementedError("Should Implement this method") 13 | 14 | @abstractmethod 15 | async def monitor_member_status_changes(self) -> None: 16 | raise NotImplementedError("Should Implement this method") 17 | 18 | @abstractmethod 19 | async def update_member_status_value_async(self, status_value: AbstractMemberStatusValue) -> None: 20 | raise NotImplementedError("Should Implement this method") 21 | 22 | @abstractmethod 23 | async def deregister_member_async(self) -> None: 24 | raise NotImplementedError("Should Implement this method") 25 | 26 | @abstractmethod 27 | async def shutdown(self) -> None: 28 | raise NotImplementedError("Should Implement this method") 29 | -------------------------------------------------------------------------------- /protoactor/cluster/providers/consul/startconsul.bat: -------------------------------------------------------------------------------- 1 | consul agent -server -bootstrap -data-dir /tmp/consul -bind=127.0.0.1 -ui -client 0.0.0.0 -------------------------------------------------------------------------------- /protoactor/cluster/providers/single_remote_instance/protos.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package singleremoteinstance; 3 | 4 | message GetKinds {} 5 | message GetKindsResponse { 6 | repeated string kinds=1; 7 | } 8 | -------------------------------------------------------------------------------- /protoactor/cluster/providers/single_remote_instance/protos_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT! 2 | # source: protoactor/cluster/providers/single_remote_instance/protos.proto 3 | # plugin: grpclib.plugin.main 4 | -------------------------------------------------------------------------------- /protoactor/cluster/providers/single_remote_instance/protos_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: protoactor/cluster/providers/single_remote_instance/protos.proto 4 | 5 | import sys 6 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 7 | from google.protobuf import descriptor as _descriptor 8 | from google.protobuf import message as _message 9 | from google.protobuf import reflection as _reflection 10 | from google.protobuf import symbol_database as _symbol_database 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | 17 | 18 | DESCRIPTOR = _descriptor.FileDescriptor( 19 | name='protoactor/cluster/providers/single_remote_instance/protos.proto', 20 | package='singleremoteinstance', 21 | syntax='proto3', 22 | serialized_options=None, 23 | serialized_pb=_b('\nAprotoactor/\xd1\x81luster/providers/single_remote_instance/protos.proto\x12\x14singleremoteinstance\"\n\n\x08GetKinds\"!\n\x10GetKindsResponse\x12\r\n\x05kinds\x18\x01 \x03(\tb\x06proto3') 24 | ) 25 | 26 | 27 | 28 | 29 | _GETKINDS = _descriptor.Descriptor( 30 | name='GetKinds', 31 | full_name='singleremoteinstance.GetKinds', 32 | filename=None, 33 | file=DESCRIPTOR, 34 | containing_type=None, 35 | fields=[ 36 | ], 37 | extensions=[ 38 | ], 39 | nested_types=[], 40 | enum_types=[ 41 | ], 42 | serialized_options=None, 43 | is_extendable=False, 44 | syntax='proto3', 45 | extension_ranges=[], 46 | oneofs=[ 47 | ], 48 | serialized_start=91, 49 | serialized_end=101, 50 | ) 51 | 52 | 53 | _GETKINDSRESPONSE = _descriptor.Descriptor( 54 | name='GetKindsResponse', 55 | full_name='singleremoteinstance.GetKindsResponse', 56 | filename=None, 57 | file=DESCRIPTOR, 58 | containing_type=None, 59 | fields=[ 60 | _descriptor.FieldDescriptor( 61 | name='kinds', full_name='singleremoteinstance.GetKindsResponse.kinds', index=0, 62 | number=1, type=9, cpp_type=9, label=3, 63 | has_default_value=False, default_value=[], 64 | message_type=None, enum_type=None, containing_type=None, 65 | is_extension=False, extension_scope=None, 66 | serialized_options=None, file=DESCRIPTOR), 67 | ], 68 | extensions=[ 69 | ], 70 | nested_types=[], 71 | enum_types=[ 72 | ], 73 | serialized_options=None, 74 | is_extendable=False, 75 | syntax='proto3', 76 | extension_ranges=[], 77 | oneofs=[ 78 | ], 79 | serialized_start=103, 80 | serialized_end=136, 81 | ) 82 | 83 | DESCRIPTOR.message_types_by_name['GetKinds'] = _GETKINDS 84 | DESCRIPTOR.message_types_by_name['GetKindsResponse'] = _GETKINDSRESPONSE 85 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 86 | 87 | GetKinds = _reflection.GeneratedProtocolMessageType('GetKinds', (_message.Message,), { 88 | 'DESCRIPTOR' : _GETKINDS, 89 | '__module__' : 'protoactor.cluster.providers.single_remote_instance.protos_pb2' 90 | # @@protoc_insertion_point(class_scope:singleremoteinstance.GetKinds) 91 | }) 92 | _sym_db.RegisterMessage(GetKinds) 93 | 94 | GetKindsResponse = _reflection.GeneratedProtocolMessageType('GetKindsResponse', (_message.Message,), { 95 | 'DESCRIPTOR' : _GETKINDSRESPONSE, 96 | '__module__' : 'protoactor.cluster.providers.single_remote_instance.protos_pb2' 97 | # @@protoc_insertion_point(class_scope:singleremoteinstance.GetKindsResponse) 98 | }) 99 | _sym_db.RegisterMessage(GetKindsResponse) 100 | 101 | 102 | # @@protoc_insertion_point(module_scope) 103 | -------------------------------------------------------------------------------- /protoactor/cluster/rendezvous.py: -------------------------------------------------------------------------------- 1 | from protoactor.cluster.hash_algorithms.fnv1a32 import FNV1A32 2 | 3 | is_import = False 4 | if is_import: 5 | from protoactor.cluster.member_strategy import AbstractMemberStrategy 6 | 7 | 8 | class Rendezvous: 9 | def __init__(self, member_strategy: 'AbstractMemberStrategy'): 10 | self._member_strategy = member_strategy 11 | self._hash_algorithm = FNV1A32() 12 | self._member_hashes = None 13 | self.update_rdv() 14 | 15 | def get_node(self, key: str): 16 | members = self._member_strategy.get_all_members() 17 | if members is None or len(members) == 0: 18 | return '' 19 | 20 | if len(members) == 1: 21 | return members[0].address 22 | 23 | key_bytes = key.encode() 24 | max_score = 0 25 | max_node = None 26 | 27 | for i in range(len(members)): 28 | member = members[i] 29 | if member.alive: 30 | hash_bytes = self._member_hashes[i] 31 | score = self._rdv_hash(hash_bytes, key_bytes) 32 | if score > max_score: 33 | max_score = score 34 | max_node = member 35 | 36 | if max_node is None: 37 | return '' 38 | else: 39 | return max_node.address 40 | 41 | def update_rdv(self): 42 | self._member_hashes = [member.address.encode() for member in self._member_strategy.get_all_members()] 43 | 44 | def _rdv_hash(self, node: bytes, key: bytes) -> int: 45 | hash_bytes = key + node 46 | return self._hash_algorithm.compute_hash(hash_bytes) 47 | -------------------------------------------------------------------------------- /protoactor/cluster/round_robin.py: -------------------------------------------------------------------------------- 1 | import threading 2 | 3 | is_import = False 4 | if is_import: 5 | from protoactor.cluster.member_strategy import AbstractMemberStrategy 6 | 7 | 8 | class RoundRobin: 9 | def __init__(self, member_strategy: 'AbstractMemberStrategy'): 10 | self._val = 0 11 | self._lock = threading.RLock() 12 | self._member_strategy = member_strategy 13 | 14 | def get_node(self) -> str: 15 | members = self._member_strategy.get_all_members() 16 | count = len(members) 17 | 18 | if count == 0: 19 | return '' 20 | if count == 1: 21 | return members[0].address 22 | with self._lock: 23 | self._val += 1 24 | 25 | return members[self._val % count].address 26 | -------------------------------------------------------------------------------- /protoactor/mailbox/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/protoactor/mailbox/__init__.py -------------------------------------------------------------------------------- /protoactor/mailbox/dispatcher.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import threading 3 | from abc import ABCMeta, abstractmethod 4 | from threading import Thread 5 | from typing import Callable 6 | 7 | from protoactor.actor.utils import Singleton 8 | 9 | 10 | class AbstractMessageInvoker(metaclass=ABCMeta): 11 | @abstractmethod 12 | def invoke_system_message(self, msg: object): 13 | raise NotImplementedError("Should Implement this method") 14 | 15 | @abstractmethod 16 | def invoke_user_message(self, msg: object): 17 | raise NotImplementedError("Should Implement this method") 18 | 19 | @abstractmethod 20 | def escalate_failure(self, reason: Exception, msg: object): 21 | raise NotImplementedError("Should Implement this method") 22 | 23 | 24 | class AbstractDispatcher(metaclass=ABCMeta): 25 | @property 26 | @abstractmethod 27 | def throughput(self) -> int: 28 | raise NotImplementedError("Should Implement this method") 29 | 30 | @abstractmethod 31 | def schedule(self, runner: Callable[..., asyncio.coroutine], **kwargs: ...): 32 | raise NotImplementedError("Should Implement this method") 33 | 34 | 35 | class Dispatchers(metaclass=Singleton): 36 | @property 37 | def default_dispatcher(self) -> AbstractDispatcher: 38 | return ThreadDispatcher() 39 | 40 | @property 41 | def synchronous_dispatcher(self) -> AbstractDispatcher: 42 | return SynchronousDispatcher() 43 | 44 | 45 | # class ThreadDispatcher(AbstractDispatcher): 46 | # def __init__(self, async_loop=None): 47 | # self.async_loop = async_loop 48 | # 49 | # @property 50 | # def throughput(self) -> int: 51 | # return 300 52 | # 53 | # def schedule(self, runner: Callable[..., asyncio.coroutine], **kwargs: ...): 54 | # t = Thread(target=self.__run_async, daemon=True, args=(runner, self.async_loop), kwargs=kwargs) 55 | # t.start() 56 | # 57 | # def __run_async(self, runner, async_loop, **kwargs): 58 | # async_loop_absent = async_loop is None 59 | # try: 60 | # if async_loop_absent: 61 | # async_loop = asyncio.new_event_loop() 62 | # asyncio.set_event_loop(async_loop) 63 | # async_loop.run_until_complete(runner(**kwargs)) 64 | # finally: 65 | # if async_loop_absent: 66 | # async_loop.close() 67 | 68 | 69 | class ThreadDispatcher(AbstractDispatcher): 70 | @property 71 | def throughput(self) -> int: 72 | return 300 73 | 74 | def schedule(self, runner: Callable[..., asyncio.coroutine], **kwargs: ...): 75 | t = Thread(target=self.__start_background_loop, args=(runner,), kwargs=kwargs, daemon=True) 76 | t.start() 77 | 78 | def __start_background_loop(self, runner, **kwargs): 79 | asyncio.run(runner(**kwargs)) 80 | 81 | 82 | class SynchronousDispatcher(AbstractDispatcher): 83 | def __init__(self, async_loop=None): 84 | self.async_loop = async_loop 85 | 86 | @property 87 | def throughput(self) -> int: 88 | return 300 89 | 90 | def schedule(self, runner: Callable[..., asyncio.coroutine], **kwargs: ...): 91 | loop = asyncio.new_event_loop() 92 | asyncio.set_event_loop(loop) 93 | 94 | thread = threading.Thread(target=loop.run_forever, daemon=True) 95 | thread.start() 96 | 97 | future = asyncio.run_coroutine_threadsafe(runner(**kwargs), loop) 98 | future.result() 99 | 100 | loop.call_soon_threadsafe(loop.stop) 101 | thread.join() 102 | -------------------------------------------------------------------------------- /protoactor/mailbox/mailbox_statistics.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod 2 | 3 | 4 | class AbstractMailBoxStatistics(metaclass=ABCMeta): 5 | @abstractmethod 6 | def mailbox_stated(self): 7 | raise NotImplementedError("Should Implement this method") 8 | 9 | @abstractmethod 10 | def message_posted(self, message): 11 | raise NotImplementedError("Should Implement this method") 12 | 13 | @abstractmethod 14 | def message_received(self, message): 15 | raise NotImplementedError("Should Implement this method") 16 | 17 | @abstractmethod 18 | def mailbox_empty(self): 19 | raise NotImplementedError("Should Implement this method") 20 | 21 | 22 | class MailBoxStatistics(AbstractMailBoxStatistics): 23 | def mailbox_stated(self): 24 | raise NotImplementedError("Should Implement this method") 25 | 26 | def message_posted(self, message): 27 | raise NotImplementedError("Should Implement this method") 28 | 29 | def message_received(self, message): 30 | raise NotImplementedError("Should Implement this method") 31 | 32 | def mailbox_empty(self): 33 | raise NotImplementedError("Should Implement this method") -------------------------------------------------------------------------------- /protoactor/mailbox/queue.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod, ABCMeta 2 | import queue 3 | from typing import Optional 4 | 5 | 6 | class AbstractQueue(metaclass=ABCMeta): 7 | @abstractmethod 8 | def has_messages(self) -> bool: 9 | raise NotImplementedError("Should Implement this method.") 10 | 11 | @abstractmethod 12 | def push(self, message: object): 13 | raise NotImplementedError("Should Implement this method.") 14 | 15 | @abstractmethod 16 | def pop(self) -> Optional[object]: 17 | raise NotImplementedError("Should Implement this method.") 18 | 19 | 20 | class UnboundedMailboxQueue(AbstractQueue): 21 | def __init__(self): 22 | self._messages = queue.Queue() 23 | 24 | def pop(self) -> Optional[object]: 25 | try: 26 | return self._messages.get_nowait() 27 | except queue.Empty: 28 | return None 29 | 30 | def push(self, message: object): 31 | self._messages.put_nowait(message) 32 | 33 | def has_messages(self) -> bool: 34 | return not self._messages.empty() 35 | -------------------------------------------------------------------------------- /protoactor/persistence/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/protoactor/persistence/__init__.py -------------------------------------------------------------------------------- /protoactor/persistence/messages.py: -------------------------------------------------------------------------------- 1 | class Snapshot: 2 | def __init__(self, state: any, index: int): 3 | self.state = state 4 | self.index = index 5 | 6 | 7 | class RecoverSnapshot(Snapshot): 8 | def __init__(self, data: any, index: int): 9 | super().__init__(data, index) 10 | 11 | 12 | class PersistedSnapshot(Snapshot): 13 | def __init__(self, data: any, index: int): 14 | super().__init__(data, index) 15 | 16 | 17 | class Event: 18 | def __init__(self, data: any, index: int): 19 | self.data = data 20 | self.index = index 21 | 22 | 23 | class RecoverEvent(Event): 24 | def __init__(self, data: any, index: int): 25 | super().__init__(data, index) 26 | 27 | 28 | class ReplayEvent(Event): 29 | def __init__(self, data: any, index: int): 30 | super().__init__(data, index) 31 | 32 | 33 | class PersistedEvent(Event): 34 | def __init__(self, data: any, index: int): 35 | super().__init__(data, index) -------------------------------------------------------------------------------- /protoactor/persistence/providers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/protoactor/persistence/providers/__init__.py -------------------------------------------------------------------------------- /protoactor/persistence/providers/abstract_provider.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod, ABCMeta 2 | from typing import Callable, Tuple 3 | 4 | 5 | class AbstractSnapshotStore(metaclass=ABCMeta): 6 | @abstractmethod 7 | async def get_snapshot(self, actor_name: str) -> Tuple[any, int]: 8 | raise NotImplementedError("Should Implement this method") 9 | 10 | @abstractmethod 11 | async def persist_snapshot(self, actor_name: str, index: int, snapshot: any) -> None: 12 | raise NotImplementedError("Should Implement this method") 13 | 14 | @abstractmethod 15 | async def delete_snapshots(self, actor_name: str, inclusive_to_index: int) -> None: 16 | raise NotImplementedError("Should Implement this method") 17 | 18 | 19 | class AbstractEventStore(metaclass=ABCMeta): 20 | @abstractmethod 21 | async def get_events(self, actor_name: str, index_start: int, index_end: int, 22 | callback: Callable[[any], None]) -> int: 23 | raise NotImplementedError("Should Implement this method") 24 | 25 | @abstractmethod 26 | async def persist_event(self, actor_name: str, index: int, event: any) -> int: 27 | raise NotImplementedError("Should Implement this method") 28 | 29 | @abstractmethod 30 | async def delete_events(self, actor_name: str, inclusive_to_index: int) -> None: 31 | raise NotImplementedError("Should Implement this method") 32 | 33 | 34 | class AbstractProvider(AbstractEventStore, AbstractSnapshotStore, metaclass=ABCMeta): 35 | pass 36 | -------------------------------------------------------------------------------- /protoactor/persistence/providers/in_memory_provider.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import json 3 | from collections import OrderedDict 4 | from typing import Tuple, Callable, Dict 5 | 6 | from protoactor.persistence.providers.abstract_provider import AbstractProvider 7 | 8 | 9 | class InMemoryProvider(AbstractProvider): 10 | def __init__(self): 11 | self._events = {} 12 | self._snapshots = {} 13 | 14 | def get_snapshots(self, actor_id: str) -> Dict[str, str]: 15 | return self._snapshots[actor_id] 16 | 17 | async def get_snapshot(self, actor_name: str) -> Tuple[any, int]: 18 | if actor_name not in self._snapshots.keys(): 19 | self._snapshots[actor_name] = {} 20 | return None, 0 21 | 22 | ordered_snapshots = OrderedDict(self._snapshots[actor_name]) 23 | if len(ordered_snapshots) == 0: 24 | return None, 0 25 | else: 26 | snapshot = list(ordered_snapshots.items())[-1] 27 | return snapshot[1], snapshot[0] 28 | 29 | async def get_events(self, actor_name: str, index_start: int, index_end: int, 30 | callback: Callable[[any], None]) -> int: 31 | if actor_name in self._events.keys(): 32 | for value in [value for key, value in self._events[actor_name].items() if index_start <= key <= index_end]: 33 | callback(value) 34 | else: 35 | return 0 36 | 37 | async def persist_event(self, actor_name: str, index: int, event: any) -> int: 38 | events = self._events.setdefault(actor_name, {}) 39 | events[index] = event 40 | return 0 41 | 42 | async def persist_snapshot(self, actor_name: str, index: int, snapshot: any) -> None: 43 | snapshots = self._snapshots.setdefault(actor_name, {}) 44 | snapshot_copy = copy.deepcopy(snapshot) 45 | snapshots[index] = snapshot_copy 46 | 47 | async def delete_events(self, actor_name: str, inclusive_to_index: int) -> None: 48 | if actor_name in self._events.keys(): 49 | events_to_remove = [key for key, value in self._events[actor_name].items() if key <= inclusive_to_index] 50 | for key in events_to_remove: 51 | del self._events[actor_name][key] 52 | 53 | async def delete_snapshots(self, actor_name: str, inclusive_to_index: int) -> None: 54 | if actor_name in self._snapshots.keys(): 55 | snapshots_to_remove = [key for key, value in self._snapshots[actor_name].items() if 56 | key <= inclusive_to_index] 57 | for key in snapshots_to_remove: 58 | del self._snapshots[actor_name][key] 59 | -------------------------------------------------------------------------------- /protoactor/persistence/snapshot_strategies/abstract_snapshot_strategy.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod, ABCMeta 2 | 3 | from protoactor.persistence.messages import PersistedEvent 4 | 5 | 6 | class AbstractSnapshotStrategy(metaclass=ABCMeta): 7 | @abstractmethod 8 | def should_take_snapshot(self, persisted_event: PersistedEvent) -> bool: 9 | raise NotImplementedError("Should Implement this method") 10 | -------------------------------------------------------------------------------- /protoactor/persistence/snapshot_strategies/event_type_strategy.py: -------------------------------------------------------------------------------- 1 | from protoactor.persistence.messages import PersistedEvent 2 | from protoactor.persistence.snapshot_strategies.abstract_snapshot_strategy import AbstractSnapshotStrategy 3 | 4 | 5 | class EventTypeStrategy(AbstractSnapshotStrategy): 6 | def __init__(self, event_type): 7 | self._event_type = event_type 8 | 9 | def should_take_snapshot(self, persisted_event: PersistedEvent) -> bool: 10 | return self._event_type == type(persisted_event.data) 11 | -------------------------------------------------------------------------------- /protoactor/persistence/snapshot_strategies/interval_strategy.py: -------------------------------------------------------------------------------- 1 | from protoactor.persistence.messages import PersistedEvent 2 | from protoactor.persistence.snapshot_strategies.abstract_snapshot_strategy import AbstractSnapshotStrategy 3 | 4 | 5 | class IntervalStrategy(AbstractSnapshotStrategy): 6 | def __init__(self, events_per_snapshot: int): 7 | self._events_per_snapshot = events_per_snapshot 8 | 9 | def should_take_snapshot(self, persisted_event: PersistedEvent) -> bool: 10 | return persisted_event.index % self._events_per_snapshot == 0 -------------------------------------------------------------------------------- /protoactor/persistence/snapshot_strategies/time_strategy.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from datetime import timedelta 3 | from typing import Callable 4 | 5 | from protoactor.persistence.messages import PersistedEvent 6 | from protoactor.persistence.snapshot_strategies.abstract_snapshot_strategy import AbstractSnapshotStrategy 7 | 8 | 9 | class TimeStrategy(AbstractSnapshotStrategy): 10 | def __init__(self, interval: timedelta, get_now: Callable[[], datetime.datetime] = None): 11 | self._interval = interval 12 | if get_now is None: 13 | self._get_now = lambda: datetime.datetime.now() 14 | else: 15 | self._get_now = get_now 16 | self._last_taken = self._get_now() 17 | 18 | def should_take_snapshot(self, persisted_event: PersistedEvent) -> bool: 19 | now = self._get_now() 20 | if (self._last_taken + self._interval) <= now: 21 | self._last_taken = now 22 | return True 23 | 24 | return False 25 | -------------------------------------------------------------------------------- /protoactor/remote/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/protoactor/remote/__init__.py -------------------------------------------------------------------------------- /protoactor/remote/exceptions.py: -------------------------------------------------------------------------------- 1 | from protoactor.remote.response import ResponseStatusCode 2 | 3 | 4 | class ActivatorException(Exception): 5 | def __init__(self, code: int, do_not_throw: bool = False): 6 | self.code = code 7 | self.do_not_throw = do_not_throw 8 | 9 | 10 | class ActivatorUnavailableException(ActivatorException): 11 | def __init__(self): 12 | super().__init__(int(ResponseStatusCode.Unavailable), True) -------------------------------------------------------------------------------- /protoactor/remote/messages.py: -------------------------------------------------------------------------------- 1 | from protoactor.actor import PID 2 | 3 | 4 | class Endpoint: 5 | def __init__(self, watcher: PID, writer: PID): 6 | self.watcher = watcher 7 | self.writer = writer 8 | 9 | class EndpointConnectedEvent: 10 | def __init__(self, address): 11 | self.address = address 12 | 13 | 14 | class EndpointTerminatedEvent: 15 | def __init__(self, address): 16 | self.address = address 17 | 18 | 19 | class RemoteTerminate: 20 | def __init__(self, watcher: PID, watchee: PID): 21 | self.watcher = watcher 22 | self.watchee = watchee 23 | 24 | 25 | class RemoteWatch: 26 | def __init__(self, watcher: PID, watchee: PID): 27 | self.watcher = watcher 28 | self.watchee = watchee 29 | 30 | 31 | class RemoteUnwatch: 32 | def __init__(self, watcher: PID, watchee: PID): 33 | self.watcher = watcher 34 | self.watchee = watchee 35 | 36 | 37 | class RemoteDeliver: 38 | def __init__(self, header, message, target, sender, serializer_id): 39 | self.header = header 40 | self.message = message 41 | self.target = target 42 | self.sender = sender 43 | self.serializer_id = serializer_id 44 | 45 | 46 | class JsonMessage: 47 | def __init__(self, type_name, json): 48 | if type_name is None: 49 | raise TypeError("type_name") 50 | if json is None: 51 | raise TypeError("json") 52 | 53 | self.type_name = type_name 54 | self.json = json 55 | -------------------------------------------------------------------------------- /protoactor/remote/protos_remote.proto: -------------------------------------------------------------------------------- 1 |  2 | syntax = "proto3"; 3 | package remote; 4 | option csharp_namespace = "Proto.remote"; 5 | 6 | import "protoactor/actor/protos.proto"; 7 | 8 | message MessageBatch { 9 | repeated string type_names = 1; 10 | repeated string target_names = 2; 11 | repeated MessageEnvelope envelopes = 3; 12 | } 13 | 14 | message MessageEnvelope { 15 | int32 type_id = 1; 16 | bytes message_data = 2; 17 | int32 target = 3; 18 | actor.PID sender = 4; 19 | int32 serializer_id = 5; 20 | MessageHeader message_header = 6; 21 | } 22 | 23 | message MessageHeader { 24 | map header_data = 1; 25 | } 26 | 27 | message ActorPidRequest { 28 | string name = 1; 29 | string kind = 2; 30 | } 31 | 32 | message ActorPidResponse { 33 | actor.PID pid = 1; 34 | int32 status_code = 2; 35 | } 36 | 37 | message Unit {} 38 | 39 | message ConnectRequest {} 40 | 41 | message ConnectResponse { 42 | int32 default_serializer_id = 1; 43 | } 44 | 45 | service Remoting { 46 | rpc Connect(ConnectRequest) returns (ConnectResponse) {} 47 | rpc Receive (stream MessageBatch) returns (stream Unit) {} 48 | } -------------------------------------------------------------------------------- /protoactor/remote/protos_remote_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT! 2 | # source: protoactor/remote/protos_remote.proto 3 | # plugin: grpclib.plugin.main 4 | import abc 5 | import typing 6 | 7 | import grpclib.const 8 | import grpclib.client 9 | if typing.TYPE_CHECKING: 10 | import grpclib.server 11 | 12 | import protoactor.actor.protos_pb2 13 | import protoactor.remote.protos_remote_pb2 14 | 15 | 16 | class RemotingBase(abc.ABC): 17 | 18 | @abc.abstractmethod 19 | async def Connect(self, stream: 'grpclib.server.Stream[protoactor.remote.protos_remote_pb2.ConnectRequest, protoactor.remote.protos_remote_pb2.ConnectResponse]') -> None: 20 | pass 21 | 22 | @abc.abstractmethod 23 | async def Receive(self, stream: 'grpclib.server.Stream[protoactor.remote.protos_remote_pb2.MessageBatch, protoactor.remote.protos_remote_pb2.Unit]') -> None: 24 | pass 25 | 26 | def __mapping__(self) -> typing.Dict[str, grpclib.const.Handler]: 27 | return { 28 | '/remote.Remoting/Connect': grpclib.const.Handler( 29 | self.Connect, 30 | grpclib.const.Cardinality.UNARY_UNARY, 31 | protoactor.remote.protos_remote_pb2.ConnectRequest, 32 | protoactor.remote.protos_remote_pb2.ConnectResponse, 33 | ), 34 | '/remote.Remoting/Receive': grpclib.const.Handler( 35 | self.Receive, 36 | grpclib.const.Cardinality.STREAM_STREAM, 37 | protoactor.remote.protos_remote_pb2.MessageBatch, 38 | protoactor.remote.protos_remote_pb2.Unit, 39 | ), 40 | } 41 | 42 | 43 | class RemotingStub: 44 | 45 | def __init__(self, channel: grpclib.client.Channel) -> None: 46 | self.Connect = grpclib.client.UnaryUnaryMethod( 47 | channel, 48 | '/remote.Remoting/Connect', 49 | protoactor.remote.protos_remote_pb2.ConnectRequest, 50 | protoactor.remote.protos_remote_pb2.ConnectResponse, 51 | ) 52 | self.Receive = grpclib.client.StreamStreamMethod( 53 | channel, 54 | '/remote.Remoting/Receive', 55 | protoactor.remote.protos_remote_pb2.MessageBatch, 56 | protoactor.remote.protos_remote_pb2.Unit, 57 | ) 58 | -------------------------------------------------------------------------------- /protoactor/remote/response.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class ResponseStatusCode(Enum): 5 | OK = 0 6 | Unavailable = 1 7 | Timeout = 2 8 | ProcessNameAlreadyExist = 3 9 | Error = 4 -------------------------------------------------------------------------------- /protoactor/router/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/protoactor/router/__init__.py -------------------------------------------------------------------------------- /protoactor/router/broadcast_router.py: -------------------------------------------------------------------------------- 1 | from typing import List, Any, Iterable 2 | 3 | from protoactor.actor import PID 4 | from protoactor.actor.actor_context import GlobalRootContext 5 | from protoactor.actor.props import Props 6 | from protoactor.router.router_config import GroupRouterConfig, PoolRouterConfig 7 | from protoactor.router.router_state import RouterState 8 | 9 | 10 | class BroadcastGroupRouterConfig(GroupRouterConfig): 11 | def __init__(self, routees: List[PID]): 12 | super().__init__() 13 | self._routees = routees 14 | 15 | def create_router_state(self) -> RouterState: 16 | return BroadcastRouterState() 17 | 18 | 19 | class BroadcastPoolRouterConfig(PoolRouterConfig): 20 | def __init__(self, pool_size: int, routee_props: Props): 21 | super().__init__(pool_size, routee_props) 22 | 23 | def create_router_state(self) -> RouterState: 24 | return BroadcastRouterState() 25 | 26 | 27 | class BroadcastRouterState(RouterState): 28 | def __init__(self): 29 | self._routees = None 30 | 31 | def get_routees(self) -> List[PID]: 32 | return list(self._routees) 33 | 34 | def set_routees(self, routees: Iterable[PID]) -> None: 35 | self._routees = routees 36 | 37 | async def route_message(self, message: Any) -> None: 38 | for pid in self._routees: 39 | await GlobalRootContext.send(pid, message) -------------------------------------------------------------------------------- /protoactor/router/consistent_hash_group_router.py: -------------------------------------------------------------------------------- 1 | from typing import List, Callable, Any 2 | 3 | from protoactor.actor import PID 4 | from protoactor.actor.actor_context import GlobalRootContext 5 | from protoactor.actor.message_envelope import MessageEnvelope 6 | from protoactor.actor.props import Props 7 | from protoactor.router.hash import HashRing 8 | from protoactor.router.messages import AbstractHashable 9 | from protoactor.router.router_config import GroupRouterConfig, PoolRouterConfig 10 | from protoactor.router.router_state import RouterState 11 | 12 | 13 | class ConsistentHashGroupRouterConfig(GroupRouterConfig): 14 | def __init__(self, hash_func: Callable[[str], int], replica_count: int, routees: List[PID]): 15 | super().__init__() 16 | if replica_count <= 0: 17 | raise ValueError('ReplicaCount must be greater than 0') 18 | 19 | self._hash_func = hash_func 20 | self._replica_count = replica_count 21 | self._routees = routees 22 | 23 | def create_router_state(self) -> RouterState: 24 | return ConsistentHashRouterState(self._hash_func, self._replica_count) 25 | 26 | 27 | class ConsistentHashPoolRouterConfig(PoolRouterConfig): 28 | def __init__(self, pool_size: int, routee_props: Props, hash_func: Callable[[str], int], replica_count: int): 29 | super().__init__(pool_size, routee_props) 30 | if replica_count <= 0: 31 | raise ValueError('ReplicaCount must be greater than 0') 32 | 33 | self._hash_func = hash_func 34 | self._replica_count = replica_count 35 | 36 | def create_router_state(self) -> RouterState: 37 | return ConsistentHashRouterState(self._hash_func, self._replica_count) 38 | 39 | 40 | class ConsistentHashRouterState(RouterState): 41 | def __init__(self, hash_func: Callable[[str], int], replica_count: int): 42 | self._hash_func = hash_func 43 | self._replica_count = replica_count 44 | self._hash_ring = None 45 | self._routee_map = None 46 | 47 | def get_routees(self) -> List[PID]: 48 | return list(self._routee_map.values()) 49 | 50 | def set_routees(self, routees: List[PID]) -> None: 51 | self._routee_map = {} 52 | nodes = [] 53 | 54 | for pid in routees: 55 | node_name = pid.to_short_string() 56 | nodes.append(node_name) 57 | self._routee_map[node_name] = pid 58 | 59 | self._hash_ring = HashRing(nodes, self._hash_func, self._replica_count) 60 | 61 | async def route_message(self, message: Any) -> None: 62 | msg, _, _ = MessageEnvelope.unwrap(message) 63 | if isinstance(msg, AbstractHashable): 64 | key = msg.hash_by() 65 | node = self._hash_ring.get_node(key) 66 | routee = self._routee_map[node] 67 | await GlobalRootContext.send(routee, message) 68 | else: 69 | raise AttributeError('Message of type %s does not implement AbstractHashable' % type(message).__name__) 70 | -------------------------------------------------------------------------------- /protoactor/router/hash.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import struct 3 | from typing import List, Callable 4 | 5 | 6 | class MD5Hasher: 7 | @staticmethod 8 | def hash(hash_key: str) -> int: 9 | digest = hashlib.md5(hash_key.encode()).digest() 10 | hash_value = struct.unpack('i', digest[:4])[0] 11 | return hash_value 12 | 13 | 14 | class HashRing: 15 | def __init__(self, nodes: List[str], hash_func: Callable[[str], int], replica_count: int) -> None: 16 | self._ring = [] 17 | self._hash_func = hash_func 18 | 19 | for node in nodes: 20 | for count in range(replica_count): 21 | hash_key = str(count) + node 22 | self._ring.append((self._hash_func(hash_key), node)) 23 | 24 | self._ring.sort(key=lambda tup: tup[0]) 25 | 26 | def get_node(self, key: str) -> str: 27 | node = next((t for t in self._ring if t[0] > self._hash_func(key)), None) 28 | if node is not None: 29 | return node[1] 30 | return self._ring[0][1] 31 | -------------------------------------------------------------------------------- /protoactor/router/messages.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class RouterManagementMessage(ABC): 5 | pass 6 | 7 | 8 | class Routees: 9 | def __init__(self, pids): 10 | self.pids = pids 11 | 12 | 13 | class AddRoutee(RouterManagementMessage): 14 | def __init__(self, pid): 15 | self.pid = pid 16 | 17 | 18 | class RemoveRoutee(RouterManagementMessage): 19 | def __init__(self, pid): 20 | self.pid = pid 21 | 22 | 23 | class BroadcastMessage(RouterManagementMessage): 24 | def __init__(self, message): 25 | self.message = message 26 | 27 | 28 | class GetRoutees(RouterManagementMessage): 29 | def __init__(self): 30 | pass 31 | 32 | 33 | class AbstractHashable(ABC): 34 | @abstractmethod 35 | def hash_by(self) -> str: 36 | raise NotImplementedError('Should implement this method') 37 | -------------------------------------------------------------------------------- /protoactor/router/random_router.py: -------------------------------------------------------------------------------- 1 | import random 2 | from typing import List, Any 3 | 4 | from protoactor.actor import PID 5 | from protoactor.actor.actor_context import GlobalRootContext 6 | from protoactor.actor.props import Props 7 | from protoactor.router.router_config import GroupRouterConfig, PoolRouterConfig 8 | from protoactor.router.router_state import RouterState 9 | 10 | 11 | class RandomGroupRouterConfig(GroupRouterConfig): 12 | def __init__(self, routees: List[PID], seed: int = None): 13 | super().__init__() 14 | self._routees = routees 15 | self._seed = seed 16 | 17 | def create_router_state(self) -> RouterState: 18 | return RandomRouterState(self._seed) 19 | 20 | 21 | class RandomPoolRouterConfig(PoolRouterConfig): 22 | def __init__(self, pool_size: int, routee_props: Props, seed: int = None): 23 | super().__init__(pool_size, routee_props) 24 | self._seed = seed 25 | 26 | def create_router_state(self) -> RouterState: 27 | return RandomRouterState(self._seed) 28 | 29 | 30 | class RandomRouterState(RouterState): 31 | def __init__(self, seed: int = None): 32 | if seed is not None: 33 | random.seed(seed) 34 | self._routees = None 35 | 36 | def get_routees(self) -> List[PID]: 37 | return list(self._routees) 38 | 39 | def set_routees(self, routees: List[PID]) -> None: 40 | self._routees = routees 41 | 42 | async def route_message(self, message: Any) -> None: 43 | i = random.randint(0, len(self._routees) - 1) 44 | pid = self._routees[i] 45 | await GlobalRootContext.send(pid, message) 46 | -------------------------------------------------------------------------------- /protoactor/router/round_robin_router.py: -------------------------------------------------------------------------------- 1 | from typing import List, Any, Iterable 2 | 3 | from protoactor.actor import PID 4 | from protoactor.actor.actor_context import GlobalRootContext 5 | from protoactor.actor.props import Props 6 | from protoactor.router.router_config import GroupRouterConfig, PoolRouterConfig 7 | from protoactor.router.router_state import RouterState 8 | 9 | 10 | class RoundRobinGroupRouterConfig(GroupRouterConfig): 11 | def __init__(self, routees: List[PID]): 12 | super().__init__() 13 | self._routees = routees 14 | 15 | def create_router_state(self) -> RouterState: 16 | return RoundRobinState() 17 | 18 | 19 | class RoundRobinPoolRouterConfig(PoolRouterConfig): 20 | def __init__(self, pool_size: int, routee_props: Props): 21 | super().__init__(pool_size, routee_props) 22 | 23 | def create_router_state(self) -> RouterState: 24 | return RoundRobinState() 25 | 26 | 27 | class RoundRobinState(RouterState): 28 | def __init__(self): 29 | self._routees = None 30 | self._current_index = 0 31 | 32 | def get_routees(self) -> List[PID]: 33 | return list(self._routees) 34 | 35 | def set_routees(self, routees: Iterable[PID]) -> None: 36 | self._routees = routees 37 | 38 | async def route_message(self, message: Any) -> None: 39 | i = self._current_index % len(self._routees) 40 | self._current_index += 1 41 | pid = self._routees[i] 42 | 43 | await GlobalRootContext.send(pid, message) -------------------------------------------------------------------------------- /protoactor/router/router.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, List 2 | 3 | from protoactor.actor import PID 4 | from protoactor.actor.props import Props 5 | from protoactor.router.broadcast_router import BroadcastGroupRouterConfig, BroadcastPoolRouterConfig 6 | from protoactor.router.consistent_hash_group_router import ConsistentHashGroupRouterConfig, \ 7 | ConsistentHashPoolRouterConfig 8 | from protoactor.router.hash import MD5Hasher 9 | from protoactor.router.random_router import RandomGroupRouterConfig, RandomPoolRouterConfig 10 | from protoactor.router.round_robin_router import RoundRobinGroupRouterConfig, RoundRobinPoolRouterConfig 11 | 12 | 13 | class Router: 14 | @staticmethod 15 | def new_broadcast_group(routees: List[PID]) -> Props: 16 | return BroadcastGroupRouterConfig(routees).props() 17 | 18 | @staticmethod 19 | def new_consistent_hash_group(routees: List[PID], 20 | hash_func: Callable[[str], int] = None, 21 | replica_count: int = None) -> Props: 22 | 23 | if hash_func is None and replica_count is None: 24 | return ConsistentHashGroupRouterConfig(MD5Hasher.hash, 100, routees).props() 25 | return ConsistentHashGroupRouterConfig(hash_func, replica_count, routees).props() 26 | 27 | @staticmethod 28 | def new_random_group(routees: List[PID], seed: int = None) -> Props: 29 | return RandomGroupRouterConfig(routees, seed).props() 30 | 31 | @staticmethod 32 | def new_round_robin_group(routees: List[PID]) -> Props: 33 | return RoundRobinGroupRouterConfig(routees).props() 34 | 35 | @staticmethod 36 | def new_broadcast_pool(props: Props, pool_size: int) -> Props: 37 | return BroadcastPoolRouterConfig(pool_size, props).props() 38 | 39 | @staticmethod 40 | def new_consistent_hash_pool(props: Props, 41 | pool_size: int, 42 | hash_func: Callable[[str], int] = None, 43 | replica_count: int = 100) -> Props: 44 | 45 | if hash_func is None: 46 | return ConsistentHashPoolRouterConfig(pool_size, props, MD5Hasher.hash, replica_count).props() 47 | return ConsistentHashPoolRouterConfig(pool_size, props, hash_func, replica_count).props() 48 | 49 | @staticmethod 50 | def new_random_pool(props: Props, pool_size: int, seed: int = None) -> Props: 51 | return RandomPoolRouterConfig(pool_size, props, seed).props() 52 | 53 | @staticmethod 54 | def new_round_robin_pool(props: Props, pool_size: int) -> Props: 55 | return RoundRobinPoolRouterConfig(pool_size, props).props() 56 | -------------------------------------------------------------------------------- /protoactor/router/router_actor.py: -------------------------------------------------------------------------------- 1 | from threading import Event 2 | 3 | from protoactor.actor.actor import Actor 4 | from protoactor.actor.actor_context import AbstractContext 5 | from protoactor.actor.messages import Started 6 | from protoactor.actor.protos_pb2 import Terminated 7 | from protoactor.router.messages import AddRoutee, RemoveRoutee, BroadcastMessage, GetRoutees, Routees, \ 8 | RouterManagementMessage 9 | from protoactor.router.router_state import RouterState 10 | 11 | is_import = False 12 | if is_import: 13 | from protoactor.router.router_config import RouterConfig 14 | 15 | 16 | class RouterActor(Actor): 17 | def __init__(self, config: 'RouterConfig', router_state: RouterState, wg: Event): 18 | self._config = config 19 | self._router_state = router_state 20 | self._wg = wg 21 | 22 | async def receive(self, context: AbstractContext) -> None: 23 | msg = context.message 24 | if isinstance(msg, Started): 25 | await self.process_started_message(context) 26 | elif isinstance(msg, Terminated): 27 | await self.process_terminated_message(context) 28 | elif isinstance(msg, RouterManagementMessage): 29 | await self.process_router_management_message(context) 30 | else: 31 | await self.process_message(context) 32 | 33 | async def process_started_message(self, context): 34 | await self._config.on_started(context, self._router_state) 35 | self._wg.set() 36 | 37 | async def process_terminated_message(self, context): 38 | pass 39 | 40 | async def process_router_management_message(self, context): 41 | msg = context.message 42 | if isinstance(msg, AddRoutee): 43 | routees = self._router_state.get_routees() 44 | if msg.pid not in routees: 45 | await context.watch(msg.pid) 46 | routees.append(msg.pid) 47 | self._router_state.set_routees(routees) 48 | self._wg.set() 49 | elif isinstance(msg, RemoveRoutee): 50 | routees = self._router_state.get_routees() 51 | if msg.pid in routees: 52 | await context.unwatch(msg.pid) 53 | routees.remove(msg.pid) 54 | self._router_state.set_routees(routees) 55 | self._wg.set() 56 | elif isinstance(msg, BroadcastMessage): 57 | for routee in self._router_state.get_routees(): 58 | await context.request(routee, msg.message) 59 | self._wg.set() 60 | elif isinstance(msg, GetRoutees): 61 | self._wg.set() 62 | routees = self._router_state.get_routees() 63 | await context.respond(Routees(routees)) 64 | 65 | async def process_message(self, context): 66 | await self._router_state.route_message(context.message) 67 | self._wg.set() 68 | -------------------------------------------------------------------------------- /protoactor/router/router_config.py: -------------------------------------------------------------------------------- 1 | import threading 2 | from abc import abstractmethod, ABC 3 | 4 | from protoactor.actor import PID, ProcessRegistry 5 | from protoactor.actor.actor_context import AbstractContext, ActorContext 6 | from protoactor.actor.exceptions import ProcessNameExistException 7 | from protoactor.actor.messages import Started 8 | from protoactor.actor.props import Props 9 | from protoactor.router.router_actor import RouterActor 10 | from protoactor.router.router_process import RouterProcess 11 | from protoactor.router.router_state import RouterState 12 | 13 | 14 | class RouterConfig: 15 | @abstractmethod 16 | async def on_started(self, context: AbstractContext, router: RouterState) -> None: 17 | pass 18 | 19 | @abstractmethod 20 | def create_router_state(self) -> RouterState: 21 | pass 22 | 23 | def props(self) -> Props: 24 | def spawn_router_process(name: str, props: Props, parent: PID) -> PID: 25 | wg = threading.Event() 26 | router_state = self.create_router_state() 27 | p = props.with_producer(lambda: RouterActor(self, router_state, wg)) 28 | 29 | ctx = ActorContext(p, parent) 30 | mailbox = props.mailbox_producer() 31 | dispatcher = props.dispatcher 32 | process = RouterProcess(router_state, mailbox, wg) 33 | pid, absent = ProcessRegistry().try_add(name, process) 34 | if not absent: 35 | raise ProcessNameExistException(name, pid) 36 | 37 | ctx.my_self = pid 38 | mailbox.register_handlers(ctx, dispatcher) 39 | mailbox.post_system_message(Started()) 40 | mailbox.start() 41 | wg.wait() 42 | 43 | return pid 44 | 45 | return Props().with_spawner(spawn_router_process) 46 | 47 | 48 | class GroupRouterConfig(RouterConfig, ABC): 49 | def __init__(self): 50 | self._routees = None 51 | 52 | async def on_started(self, context: AbstractContext, router: RouterState) -> None: 53 | for pid in self._routees: 54 | await context.watch(pid) 55 | router.set_routees(self._routees) 56 | 57 | 58 | class PoolRouterConfig(RouterConfig, ABC): 59 | def __init__(self, pool_size: int, routee_props: Props): 60 | self._pool_size = pool_size 61 | self._routee_props = routee_props 62 | 63 | async def on_started(self, context: AbstractContext, router: RouterState) -> None: 64 | routees = map(lambda x: context.spawn(self._routee_props), range(self._pool_size)) 65 | router.set_routees(list(routees)) 66 | -------------------------------------------------------------------------------- /protoactor/router/router_process.py: -------------------------------------------------------------------------------- 1 | from threading import Event 2 | 3 | from protoactor.actor import PID 4 | from protoactor.actor.process import ActorProcess 5 | from protoactor.mailbox.mailbox import AbstractMailbox 6 | from protoactor.router.router_state import RouterState 7 | 8 | 9 | class RouterProcess(ActorProcess): 10 | def __init__(self, state: RouterState, mailbox: AbstractMailbox, wg: Event): 11 | super().__init__(mailbox) 12 | self._state = state 13 | self._wg = wg 14 | 15 | async def send_user_message(self, pid: PID, message: object, sender: PID = None): 16 | self._wg.clear() 17 | await super(RouterProcess, self).send_user_message(pid, message) 18 | self._wg.wait() -------------------------------------------------------------------------------- /protoactor/router/router_state.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod 2 | from typing import Iterable, Any, List 3 | 4 | from protoactor.actor import PID 5 | 6 | 7 | class RouterState: 8 | @abstractmethod 9 | def get_routees(self) -> List[PID]: 10 | pass 11 | 12 | @abstractmethod 13 | def set_routees(self, routees: Iterable[PID]) -> None: 14 | pass 15 | 16 | @abstractmethod 17 | async def route_message(self, message: Any) -> None: 18 | pass 19 | -------------------------------------------------------------------------------- /protoactor/schedulers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/protoactor/schedulers/__init__.py -------------------------------------------------------------------------------- /protoactor/schedulers/simple_scheduler.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from abc import ABCMeta, abstractmethod 3 | from datetime import timedelta 4 | 5 | from protoactor.actor import PID 6 | from protoactor.actor.actor_context import AbstractSenderContext, RootContext 7 | from protoactor.actor.cancel_token import CancelToken 8 | 9 | 10 | class AbstractSimpleScheduler(metaclass=ABCMeta): 11 | @abstractmethod 12 | async def schedule_tell_once(self, delay: timedelta, target: PID, message: any) -> None: 13 | raise NotImplementedError("Should Implement this method") 14 | 15 | @abstractmethod 16 | async def schedule_tell_repeatedly(self, delay: timedelta, interval: timedelta, target: PID, message: any, 17 | cancellation_token: CancelToken) -> None: 18 | raise NotImplementedError("Should Implement this method") 19 | 20 | @abstractmethod 21 | async def schedule_request_once(self, delay: timedelta, sender: PID, target: PID, 22 | message: any) -> None: 23 | raise NotImplementedError("Should Implement this method") 24 | 25 | @abstractmethod 26 | async def schedule_request_repeatedly(self, delay: timedelta, interval: timedelta, sender: PID, target: PID, 27 | message: any, 28 | cancellation_token: CancelToken) -> None: 29 | raise NotImplementedError("Should Implement this method") 30 | 31 | 32 | class SimpleScheduler(AbstractSimpleScheduler): 33 | def __init__(self, context: AbstractSenderContext = RootContext()): 34 | self._context = context 35 | 36 | async def schedule_tell_once(self, delay: timedelta, target: PID, message: any) -> None: 37 | async def schedule(): 38 | await asyncio.sleep(delay.total_seconds()) 39 | await self._context.send(target, message) 40 | 41 | asyncio.create_task(schedule()) 42 | 43 | async def schedule_tell_repeatedly(self, delay: timedelta, interval: timedelta, target: PID, message: any, 44 | cancellation_token: CancelToken) -> None: 45 | async def schedule(): 46 | await cancellation_token.wait(delay.total_seconds()) 47 | while True: 48 | if cancellation_token.triggered: 49 | return 50 | await self._context.send(target, message) 51 | await cancellation_token.wait(interval.total_seconds()) 52 | 53 | asyncio.create_task(schedule()) 54 | 55 | async def schedule_request_once(self, delay: timedelta, sender: PID, target: PID, 56 | message: any) -> None: 57 | async def schedule(): 58 | await asyncio.sleep(delay.total_seconds()) 59 | await self._context.request(target, message, sender) 60 | 61 | asyncio.create_task(schedule()) 62 | 63 | async def schedule_request_repeatedly(self, delay: timedelta, interval: timedelta, sender: PID, target: PID, 64 | message: any, cancellation_token: CancelToken) -> None: 65 | async def schedule(): 66 | await cancellation_token.cancellable_wait([], timeout=delay.total_seconds()) 67 | while True: 68 | if cancellation_token.triggered: 69 | return 70 | await self._context.request(target, message, sender) 71 | await cancellation_token.cancellable_wait([], timeout=interval.total_seconds()) 72 | 73 | asyncio.create_task(schedule()) 74 | -------------------------------------------------------------------------------- /protoactor/tracing/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/protoactor/tracing/__init__.py -------------------------------------------------------------------------------- /protoactor/tracing/opentracing/open_tracing_factory.py: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | 3 | import opentracing 4 | from jaeger_client import Span, Tracer 5 | 6 | from protoactor.actor.actor_context import AbstractContext, AbstractRootContext 7 | from protoactor.actor.props import Props 8 | from protoactor.tracing.opentracing.open_tracing_decorator import OpenTracingRootContextDecorator, \ 9 | OpenTracingActorContextDecorator 10 | from protoactor.tracing.opentracing.open_tracing_helper import OpenTracingHelper 11 | from protoactor.tracing.opentracing.open_tracing_middleware import open_tracing_sender_middleware 12 | 13 | 14 | class OpenTracingFactory: 15 | @staticmethod 16 | def get_props_with_open_tracing(props: Props, send_span_setup: Callable[[Span, any], None] = None, 17 | receive_span_setup: Callable[[Span, any], None] = None, 18 | tracer: Tracer = None) -> Props: 19 | def fn(ctx): 20 | return OpenTracingFactory.get_context_with_open_tracing(ctx, send_span_setup, receive_span_setup) 21 | 22 | new_props = props.with_context_decorator([fn]) 23 | return OpenTracingFactory.get_props_with_open_tracing_sender(new_props, tracer) 24 | 25 | @staticmethod 26 | def get_props_with_open_tracing_sender(props: Props, tracer: Tracer) -> Props: 27 | return props.with_sender_middleware([open_tracing_sender_middleware(tracer)]) 28 | 29 | @staticmethod 30 | def get_context_with_open_tracing(context: AbstractContext, send_span_setup: Callable[[Span, any], None] = None, 31 | receive_span_setup: Callable[[Span, any], None] = None, 32 | tracer: Tracer = None) -> OpenTracingActorContextDecorator: 33 | if send_span_setup is None: 34 | send_span_setup = OpenTracingHelper.default_setup_span 35 | 36 | if receive_span_setup is None: 37 | receive_span_setup = OpenTracingHelper.default_setup_span 38 | 39 | if tracer is None: 40 | tracer = opentracing.global_tracer() 41 | 42 | return OpenTracingActorContextDecorator(context, send_span_setup, receive_span_setup, tracer) 43 | 44 | @staticmethod 45 | def get_root_context_with_open_tracing(context: AbstractRootContext, 46 | send_span_setup: Callable[[Span, any], None] = None, 47 | tracer: Tracer = None) -> OpenTracingRootContextDecorator: 48 | if send_span_setup is None: 49 | send_span_setup = OpenTracingHelper.default_setup_span 50 | 51 | if tracer is None: 52 | tracer = opentracing.global_tracer() 53 | 54 | return OpenTracingRootContextDecorator(context, send_span_setup, tracer) 55 | -------------------------------------------------------------------------------- /protoactor/tracing/opentracing/open_tracing_helper.py: -------------------------------------------------------------------------------- 1 | import traceback 2 | from collections import Callable 3 | from typing import Optional 4 | 5 | from jaeger_client import Tracer, SpanContext, Span 6 | from opentracing.tags import ERROR 7 | 8 | 9 | class OpenTracingHelper: 10 | @staticmethod 11 | def build_started_scope(tracer: Tracer, parent_span: SpanContext, verb: str, message: any, 12 | span_setup: 'Callable[[Span, any], None]') -> None: 13 | message_type = type(message).__name__ 14 | scope = tracer.start_active_span(f'{verb} {message_type}', child_of=parent_span) 15 | scope.span.set_tag('proto.messagetype', message_type) 16 | 17 | if span_setup is not None: 18 | span_setup(scope.span, message) 19 | 20 | return scope 21 | 22 | @staticmethod 23 | def setup_span(exception: Exception, span: Span) -> None: 24 | if span is None: 25 | return 26 | 27 | span.set_tag(ERROR, True) 28 | span.log_kv({'exception': type(exception).__name__, 29 | 'message': str(exception), 30 | 'stackTrace': traceback.format_exception(etype=type(exception), 31 | value=exception, 32 | tb=exception.__traceback__)}) 33 | 34 | @staticmethod 35 | def get_parent_span(tracer: Tracer) -> Optional[SpanContext]: 36 | if tracer.active_span is not None: 37 | return tracer.active_span.context 38 | return None 39 | 40 | @staticmethod 41 | def default_setup_span(span: Span, message: any) -> None: 42 | pass 43 | -------------------------------------------------------------------------------- /protoactor/tracing/opentracing/open_tracing_middleware.py: -------------------------------------------------------------------------------- 1 | import opentracing 2 | from jaeger_client import Tracer 3 | from opentracing import Format 4 | 5 | from protoactor.actor import PID 6 | from protoactor.actor.actor_context import AbstractSenderContext 7 | from protoactor.actor.message_envelope import MessageEnvelope 8 | 9 | 10 | def open_tracing_sender_middleware(tracer: Tracer = None): 11 | def level_0(next): 12 | async def level_1(context: AbstractSenderContext, target: PID, envelope: MessageEnvelope): 13 | if tracer is None: 14 | inner_tracer = opentracing.global_tracer() 15 | else: 16 | inner_tracer = tracer 17 | span = inner_tracer.active_span 18 | if span is None: 19 | await next(context, target, envelope) 20 | else: 21 | dictionary = {} 22 | inner_tracer.inject(span.context, Format.TEXT_MAP, dictionary) 23 | envelope = envelope.with_headers(dictionary) 24 | await next(context, target, envelope) 25 | 26 | return level_1 27 | 28 | return level_0 29 | -------------------------------------------------------------------------------- /protoactor/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/protoactor/utils/__init__.py -------------------------------------------------------------------------------- /protoactor/utils/async_timer.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from datetime import timedelta 3 | from threading import Thread 4 | 5 | 6 | class AsyncTimer(Thread): 7 | def __init__(self, interval: timedelta, function, args=None, kwargs=None): 8 | super().__init__() 9 | self.interval = interval 10 | self.function = function 11 | self.args = args if args is not None else [] 12 | self.kwargs = kwargs if kwargs is not None else {} 13 | self.loop = None 14 | self._task = None 15 | self._cancelled = False 16 | 17 | def run(self): 18 | self.loop = asyncio.new_event_loop() 19 | loop = self.loop 20 | asyncio.set_event_loop(loop) 21 | try: 22 | self._task = asyncio.ensure_future(self._job()) 23 | loop.run_until_complete(self._task) 24 | finally: 25 | loop.close() 26 | 27 | def cancel(self): 28 | if self.loop is not None: 29 | self._cancelled = True 30 | 31 | async def _job(self): 32 | await asyncio.sleep(self.interval.total_seconds()) 33 | if not self._cancelled: 34 | await self.function(*self.args, **self.kwargs) 35 | -------------------------------------------------------------------------------- /protoactor/сluster/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/protoactor/сluster/__init__.py -------------------------------------------------------------------------------- /protoactor/сluster/grain_call_options.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | 4 | class GrainCallOptions: 5 | def __init__(self): 6 | self.retry_count = 10 7 | self.retry_action = self.exponential_backoff 8 | 9 | @staticmethod 10 | async def exponential_backoff(i: int) -> None: 11 | i += 1 12 | await asyncio.sleep(i * i * 50) 13 | -------------------------------------------------------------------------------- /protoactor/сluster/hash_algorithms/fnv1a32.py: -------------------------------------------------------------------------------- 1 | class FNV1A32(): 2 | def __init__(self): 3 | self._fnv_prime = 0x01000193 4 | self._fnv_offset_basis = 0x811C9DC5 5 | self._hash = self._fnv_offset_basis 6 | self._uint32_max = 0x100000000 7 | 8 | def compute_hash(self, buffer: bytes) -> int: 9 | self._hash = self._fnv_offset_basis 10 | 11 | if buffer is None: 12 | raise ValueError('buffer is empty') 13 | 14 | for b in buffer: 15 | self._hash = self._hash ^ b 16 | self._hash = (self._hash * self._fnv_prime) % self._uint32_max 17 | 18 | return self._hash 19 | -------------------------------------------------------------------------------- /protoactor/сluster/member_status.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod, ABCMeta 2 | from typing import List 3 | 4 | 5 | class AbstractMemberStatusValue(metaclass=ABCMeta): 6 | @abstractmethod 7 | def is_same(self, val: 'AbstractMemberStatusValue') -> bool: 8 | raise NotImplementedError("Should Implement this method") 9 | 10 | 11 | class AbstractMemberStatusValueSerializer(metaclass=ABCMeta): 12 | @abstractmethod 13 | def to_value_bytes(self, val: AbstractMemberStatusValue) -> bytes: 14 | raise NotImplementedError("Should Implement this method") 15 | 16 | @abstractmethod 17 | def from_value_bytes(self, val: bytes) -> AbstractMemberStatusValue: 18 | raise NotImplementedError("Should Implement this method") 19 | 20 | 21 | class MemberStatus: 22 | def __init__(self, member_id: str, host: str, port: int, kinds: List[str], alive: bool, 23 | status_value: AbstractMemberStatusValue): 24 | self._member_id = member_id 25 | if host is None: 26 | raise ValueError('host not set') 27 | self._host = host 28 | if kinds is None: 29 | raise ValueError('kinds not set') 30 | self._kinds = kinds 31 | self._port = port 32 | self._alive = alive 33 | self._status_value = status_value 34 | 35 | @property 36 | def address(self) -> str: 37 | return self._host + ':' + str(self._port) 38 | 39 | @property 40 | def member_id(self) -> str: 41 | return self._member_id 42 | 43 | @property 44 | def host(self) -> str: 45 | return self._host 46 | 47 | @property 48 | def port(self) -> int: 49 | return self._port 50 | 51 | @property 52 | def kinds(self) -> List[str]: 53 | return self._kinds 54 | 55 | @property 56 | def alive(self) -> bool: 57 | return self._alive 58 | 59 | @property 60 | def status_value(self) -> AbstractMemberStatusValue: 61 | return self._status_value 62 | 63 | 64 | class NullMemberStatusValueSerializer(AbstractMemberStatusValueSerializer): 65 | def to_value_bytes(self, val: AbstractMemberStatusValue) -> bytes: 66 | return None 67 | 68 | def from_value_bytes(self, val: bytes) -> AbstractMemberStatusValue: 69 | return None -------------------------------------------------------------------------------- /protoactor/сluster/member_status_events.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta 2 | from typing import List 3 | 4 | from protoactor.сluster.member_status import MemberStatus 5 | 6 | 7 | class ClusterTopologyEvent(): 8 | def __init__(self, statuses: List[MemberStatus]): 9 | if statuses is None: 10 | raise ValueError('statuses is empty') 11 | self._statuses = statuses 12 | 13 | @property 14 | def statuses(self) -> List[MemberStatus]: 15 | return self._statuses 16 | 17 | 18 | class AbstractMemberStatusEvent(metaclass=ABCMeta): 19 | def __init__(self, host: str, port: int, kinds: List[str]): 20 | if host is None: 21 | raise ValueError('host is none') 22 | self._host = host 23 | self._port = port 24 | 25 | if kinds is None: 26 | raise ValueError('kinds is none') 27 | self._kinds = kinds 28 | 29 | @property 30 | def address(self) -> str: 31 | return self._host + ":" + str(self._port) 32 | 33 | @property 34 | def host(self) -> str: 35 | return self._host 36 | 37 | @property 38 | def port(self) -> int: 39 | return self._port 40 | 41 | @property 42 | def kinds(self) -> List[str]: 43 | return self._kinds 44 | 45 | 46 | class MemberJoinedEvent(AbstractMemberStatusEvent): 47 | def __init__(self, host: str, port: int, kinds: List[str]): 48 | super().__init__(host, port, kinds) 49 | 50 | 51 | class MemberRejoinedEvent(AbstractMemberStatusEvent): 52 | def __init__(self, host: str, port: int, kinds: List[str]): 53 | super().__init__(host, port, kinds) 54 | 55 | 56 | class MemberLeftEvent(AbstractMemberStatusEvent): 57 | def __init__(self, host: str, port: int, kinds: List[str]): 58 | super().__init__(host, port, kinds) 59 | -------------------------------------------------------------------------------- /protoactor/сluster/member_strategy.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod, ABCMeta 2 | from typing import List 3 | 4 | from protoactor.сluster.member_status import MemberStatus 5 | from protoactor.сluster.rendezvous import Rendezvous 6 | from protoactor.сluster.round_robin import RoundRobin 7 | 8 | 9 | class AbstractMemberStrategy(metaclass=ABCMeta): 10 | @abstractmethod 11 | def get_all_members(self) -> List[MemberStatus]: 12 | raise NotImplementedError("Should Implement this method") 13 | 14 | @abstractmethod 15 | def add_member(self, member: MemberStatus) -> None: 16 | raise NotImplementedError("Should Implement this method") 17 | 18 | @abstractmethod 19 | def update_member(self, member: MemberStatus) -> None: 20 | raise NotImplementedError("Should Implement this method") 21 | 22 | @abstractmethod 23 | def remove_member(self, member: MemberStatus) -> None: 24 | raise NotImplementedError("Should Implement this method") 25 | 26 | @abstractmethod 27 | def get_partition(self, key) -> str: 28 | raise NotImplementedError("Should Implement this method") 29 | 30 | @abstractmethod 31 | def get_activator(self) -> str: 32 | raise NotImplementedError("Should Implement this method") 33 | 34 | 35 | class SimpleMemberStrategy(AbstractMemberStrategy): 36 | def __init__(self): 37 | self._members = [] 38 | self._rdv = Rendezvous(self) 39 | self._rr = RoundRobin(self) 40 | 41 | def get_all_members(self) -> List[MemberStatus]: 42 | return self._members 43 | 44 | def add_member(self, member: MemberStatus) -> None: 45 | self._members.append(member) 46 | self._rdv.update_rdv() 47 | 48 | def update_member(self, member: MemberStatus) -> None: 49 | for i in range(len(self._members)): 50 | if self._members[i].address == member.address: 51 | self._members[i] = member 52 | 53 | def remove_member(self, member: MemberStatus) -> None: 54 | for i in range(len(self._members)): 55 | if self._members[i].address == member.address: 56 | del self._members[i] 57 | self._rdv.update_rdv() 58 | 59 | def get_partition(self, key) -> str: 60 | return self._rdv.get_node(key) 61 | 62 | def get_activator(self) -> str: 63 | return self._rr.get_node() 64 | 65 | -------------------------------------------------------------------------------- /protoactor/сluster/messages.py: -------------------------------------------------------------------------------- 1 | from protoactor.actor import PID 2 | 3 | 4 | class WatchPidRequest(): 5 | def __init__(self, pid: PID): 6 | self.pid = pid -------------------------------------------------------------------------------- /protoactor/сluster/pid_cache.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple 2 | 3 | from protoactor.actor.actor import Actor, AbstractContext, GlobalRootContext 4 | from protoactor.actor.event_stream import GlobalEventStream 5 | from protoactor.actor.messages import Started 6 | from protoactor.actor.props import Props 7 | from protoactor.actor.protos_pb2 import Terminated, PID 8 | from protoactor.actor.supervision import Supervision 9 | from protoactor.actor.utils import Singleton 10 | from protoactor.сluster.member_status_events import AbstractMemberStatusEvent, MemberLeftEvent, MemberRejoinedEvent 11 | from protoactor.сluster.messages import WatchPidRequest 12 | 13 | 14 | class PidCache(metaclass=Singleton): 15 | def __init__(self): 16 | self._watcher = None 17 | self._cluster_topology_evn_sub = None 18 | self._cache = {} 19 | self._reverse_cache = {} 20 | 21 | async def setup(self) -> None: 22 | props = Props.from_producer(lambda: PidCacheWatcher()) \ 23 | .with_guardian_supervisor_strategy(Supervision.always_restart_strategy) 24 | 25 | self._watcher = GlobalRootContext.spawn_named(props, 'PidCacheWatcher') 26 | self._cluster_topology_evn_sub = GlobalEventStream.subscribe(self.process_member_status_event, 27 | type(AbstractMemberStatusEvent)) 28 | 29 | async def stop(self) -> None: 30 | await GlobalRootContext.stop(self._watcher) 31 | GlobalEventStream.unsubscribe(self._cluster_topology_evn_sub.id) 32 | 33 | def process_member_status_event(self, evn: AbstractMemberStatusEvent) -> None: 34 | if isinstance(evn, MemberLeftEvent) or isinstance(evn, MemberRejoinedEvent): 35 | self.remove_cache_by_member_address(evn.address) 36 | 37 | def get_cache(self, name: str) -> Tuple[PID, bool]: 38 | if name in self._cache.keys(): 39 | return self._cache[name], True 40 | else: 41 | return None, False 42 | 43 | async def add_cache(self, name: str, pid: PID) -> bool: 44 | if name not in self._cache.keys(): 45 | key = pid.to_short_string() 46 | self._cache[name] = pid 47 | self._reverse_cache[key] = name 48 | 49 | await GlobalRootContext.send(self._watcher, WatchPidRequest(pid)) 50 | return True 51 | return False 52 | 53 | def remove_cache_by_pid(self, pid: PID) -> None: 54 | key = pid.to_short_string() 55 | if key in self._reverse_cache: 56 | name = self._reverse_cache[key] 57 | del self._reverse_cache[key] 58 | del self._cache[name] 59 | 60 | def remove_cache_by_name(self, name: str) -> None: 61 | if name in self._cache: 62 | key = self._cache[name] 63 | del self._reverse_cache[key] 64 | del self._cache[name] 65 | 66 | def remove_cache_by_member_address(self, member_address: str) -> None: 67 | for name, pid in self._cache.items(): 68 | if pid.address == member_address: 69 | key = pid.to_short_string() 70 | del self._reverse_cache[key] 71 | del self._cache[name] 72 | 73 | 74 | class PidCacheWatcher(Actor): 75 | def __init__(self): 76 | self._logger = None 77 | 78 | async def receive(self, context: AbstractContext) -> None: 79 | msg = context.message 80 | if isinstance(msg, Started): 81 | # self._logger.log_debug('Started PidCacheWatcher') 82 | pass 83 | elif isinstance(msg, WatchPidRequest): 84 | await context.watch(msg.pid) 85 | elif isinstance(msg, Terminated): 86 | PidCache().remove_cache_by_pid(msg.who) 87 | -------------------------------------------------------------------------------- /protoactor/сluster/protos.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package cluster; 3 | 4 | import "protoactor/actor/protos.proto"; 5 | 6 | message TakeOwnership { 7 | actor.PID pid = 1; 8 | string name = 2; 9 | } 10 | 11 | message GrainRequest { 12 | int32 method_index = 1; 13 | bytes message_data = 2; 14 | } 15 | 16 | message GrainResponse { 17 | bytes message_data = 1; 18 | } 19 | 20 | message GrainErrorResponse { 21 | string err = 1; 22 | } -------------------------------------------------------------------------------- /protoactor/сluster/protos_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT! 2 | # source: protoactor/сluster/protos.proto 3 | # plugin: grpclib.plugin.main 4 | -------------------------------------------------------------------------------- /protoactor/сluster/providers/abstract_cluster_provider.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod 2 | from typing import List 3 | 4 | from protoactor.сluster.member_status import AbstractMemberStatusValue, AbstractMemberStatusValueSerializer 5 | 6 | 7 | class AbstractClusterProvider(metaclass=ABCMeta): 8 | @abstractmethod 9 | async def register_member_async(self, cluster_name: str, host: str, port: int, kinds: List[str], 10 | status_value: AbstractMemberStatusValue, 11 | serializer: AbstractMemberStatusValueSerializer) -> None: 12 | raise NotImplementedError("Should Implement this method") 13 | 14 | @abstractmethod 15 | async def monitor_member_status_changes(self) -> None: 16 | raise NotImplementedError("Should Implement this method") 17 | 18 | @abstractmethod 19 | async def update_member_status_value_async(self, status_value: AbstractMemberStatusValue) -> None: 20 | raise NotImplementedError("Should Implement this method") 21 | 22 | @abstractmethod 23 | async def deregister_member_async(self) -> None: 24 | raise NotImplementedError("Should Implement this method") 25 | 26 | @abstractmethod 27 | async def shutdown(self) -> None: 28 | raise NotImplementedError("Should Implement this method") 29 | -------------------------------------------------------------------------------- /protoactor/сluster/providers/consul/startconsul.bat: -------------------------------------------------------------------------------- 1 | consul agent -server -bootstrap -data-dir /tmp/consul -bind=127.0.0.1 -ui -client 0.0.0.0 -------------------------------------------------------------------------------- /protoactor/сluster/providers/single_remote_instance/protos.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package singleremoteinstance; 3 | 4 | message GetKinds {} 5 | message GetKindsResponse { 6 | repeated string kinds=1; 7 | } 8 | -------------------------------------------------------------------------------- /protoactor/сluster/providers/single_remote_instance/protos_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT! 2 | # source: protoactor/сluster/providers/single_remote_instance/protos.proto 3 | # plugin: grpclib.plugin.main 4 | -------------------------------------------------------------------------------- /protoactor/сluster/providers/single_remote_instance/protos_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: protoactor/сluster/providers/single_remote_instance/protos.proto 4 | 5 | import sys 6 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 7 | from google.protobuf import descriptor as _descriptor 8 | from google.protobuf import message as _message 9 | from google.protobuf import reflection as _reflection 10 | from google.protobuf import symbol_database as _symbol_database 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | 17 | 18 | DESCRIPTOR = _descriptor.FileDescriptor( 19 | name='protoactor/сluster/providers/single_remote_instance/protos.proto', 20 | package='singleremoteinstance', 21 | syntax='proto3', 22 | serialized_options=None, 23 | serialized_pb=_b('\nAprotoactor/\xd1\x81luster/providers/single_remote_instance/protos.proto\x12\x14singleremoteinstance\"\n\n\x08GetKinds\"!\n\x10GetKindsResponse\x12\r\n\x05kinds\x18\x01 \x03(\tb\x06proto3') 24 | ) 25 | 26 | 27 | 28 | 29 | _GETKINDS = _descriptor.Descriptor( 30 | name='GetKinds', 31 | full_name='singleremoteinstance.GetKinds', 32 | filename=None, 33 | file=DESCRIPTOR, 34 | containing_type=None, 35 | fields=[ 36 | ], 37 | extensions=[ 38 | ], 39 | nested_types=[], 40 | enum_types=[ 41 | ], 42 | serialized_options=None, 43 | is_extendable=False, 44 | syntax='proto3', 45 | extension_ranges=[], 46 | oneofs=[ 47 | ], 48 | serialized_start=91, 49 | serialized_end=101, 50 | ) 51 | 52 | 53 | _GETKINDSRESPONSE = _descriptor.Descriptor( 54 | name='GetKindsResponse', 55 | full_name='singleremoteinstance.GetKindsResponse', 56 | filename=None, 57 | file=DESCRIPTOR, 58 | containing_type=None, 59 | fields=[ 60 | _descriptor.FieldDescriptor( 61 | name='kinds', full_name='singleremoteinstance.GetKindsResponse.kinds', index=0, 62 | number=1, type=9, cpp_type=9, label=3, 63 | has_default_value=False, default_value=[], 64 | message_type=None, enum_type=None, containing_type=None, 65 | is_extension=False, extension_scope=None, 66 | serialized_options=None, file=DESCRIPTOR), 67 | ], 68 | extensions=[ 69 | ], 70 | nested_types=[], 71 | enum_types=[ 72 | ], 73 | serialized_options=None, 74 | is_extendable=False, 75 | syntax='proto3', 76 | extension_ranges=[], 77 | oneofs=[ 78 | ], 79 | serialized_start=103, 80 | serialized_end=136, 81 | ) 82 | 83 | DESCRIPTOR.message_types_by_name['GetKinds'] = _GETKINDS 84 | DESCRIPTOR.message_types_by_name['GetKindsResponse'] = _GETKINDSRESPONSE 85 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 86 | 87 | GetKinds = _reflection.GeneratedProtocolMessageType('GetKinds', (_message.Message,), { 88 | 'DESCRIPTOR' : _GETKINDS, 89 | '__module__' : 'protoactor.сluster.providers.single_remote_instance.protos_pb2' 90 | # @@protoc_insertion_point(class_scope:singleremoteinstance.GetKinds) 91 | }) 92 | _sym_db.RegisterMessage(GetKinds) 93 | 94 | GetKindsResponse = _reflection.GeneratedProtocolMessageType('GetKindsResponse', (_message.Message,), { 95 | 'DESCRIPTOR' : _GETKINDSRESPONSE, 96 | '__module__' : 'protoactor.сluster.providers.single_remote_instance.protos_pb2' 97 | # @@protoc_insertion_point(class_scope:singleremoteinstance.GetKindsResponse) 98 | }) 99 | _sym_db.RegisterMessage(GetKindsResponse) 100 | 101 | 102 | # @@protoc_insertion_point(module_scope) 103 | -------------------------------------------------------------------------------- /protoactor/сluster/rendezvous.py: -------------------------------------------------------------------------------- 1 | from protoactor.сluster.hash_algorithms.fnv1a32 import FNV1A32 2 | 3 | is_import = False 4 | if is_import: 5 | from protoactor.сluster.member_strategy import AbstractMemberStrategy 6 | 7 | 8 | class Rendezvous: 9 | def __init__(self, member_strategy: 'AbstractMemberStrategy'): 10 | self._member_strategy = member_strategy 11 | self._hash_algorithm = FNV1A32() 12 | self._member_hashes = None 13 | self.update_rdv() 14 | 15 | def get_node(self, key: str): 16 | members = self._member_strategy.get_all_members() 17 | if members is None or len(members) == 0: 18 | return '' 19 | 20 | if len(members) == 1: 21 | return members[0].address 22 | 23 | key_bytes = key.encode() 24 | max_score = 0 25 | max_node = None 26 | 27 | for i in range(len(members)): 28 | member = members[i] 29 | if member.alive: 30 | hash_bytes = self._member_hashes[i] 31 | score = self._rdv_hash(hash_bytes, key_bytes) 32 | if score > max_score: 33 | max_score = score 34 | max_node = member 35 | 36 | if max_node is None: 37 | return '' 38 | else: 39 | return max_node.address 40 | 41 | def update_rdv(self): 42 | self._member_hashes = [member.address.encode() for member in self._member_strategy.get_all_members()] 43 | 44 | def _rdv_hash(self, node: bytes, key: bytes) -> int: 45 | hash_bytes = key + node 46 | return self._hash_algorithm.compute_hash(hash_bytes) 47 | -------------------------------------------------------------------------------- /protoactor/сluster/round_robin.py: -------------------------------------------------------------------------------- 1 | import threading 2 | 3 | is_import = False 4 | if is_import: 5 | from protoactor.сluster.member_strategy import AbstractMemberStrategy 6 | 7 | 8 | class RoundRobin: 9 | def __init__(self, member_strategy: 'AbstractMemberStrategy'): 10 | self._val = 0 11 | self._lock = threading.RLock() 12 | self._member_strategy = member_strategy 13 | 14 | def get_node(self) -> str: 15 | members = self._member_strategy.get_all_members() 16 | count = len(members) 17 | 18 | if count == 0: 19 | return '' 20 | if count == 1: 21 | return members[0].address 22 | with self._lock: 23 | self._val += 1 24 | 25 | return members[self._val % count].address 26 | -------------------------------------------------------------------------------- /protobuf/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/protobuf/__init__.py -------------------------------------------------------------------------------- /protobuf/proto_grain_generator/grain_gen.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import re 3 | 4 | import jinja2 5 | 6 | from protobuf.proto_grain_generator.proto import ProtoFile, ProtoService, ProtoMethod 7 | 8 | 9 | class GrainGen: 10 | 11 | def generate(self, path: str) -> str: 12 | proto_file = self.__get_proto_file(path) 13 | env = jinja2.Environment(loader=jinja2.PackageLoader('protobuf', 'templates')) 14 | env.globals['convert_to_snake_case'] = self.__convert_to_snake_case 15 | template = env.get_template('template.txt') 16 | return template.render(proto_file=proto_file) 17 | 18 | @staticmethod 19 | def __get_proto_file(path: str) -> ProtoFile: 20 | proto_file = ProtoFile() 21 | 22 | spec = importlib.util.spec_from_file_location(path, path) 23 | module = importlib.util.module_from_spec(spec) 24 | spec.loader.exec_module(module) 25 | 26 | for service_name in module.DESCRIPTOR.services_by_name.keys(): 27 | proto_service = ProtoService(service_name) 28 | service = module.DESCRIPTOR.services_by_name[service_name] 29 | for index, method in enumerate(service.methods): 30 | proto_service.methods.append(ProtoMethod(index, 31 | method.name, 32 | method.input_type.name, 33 | method.output_type.name)) 34 | proto_file.services.append(proto_service) 35 | return proto_file 36 | 37 | @staticmethod 38 | def __convert_to_snake_case(string: str) -> str: 39 | s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', string) 40 | return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() 41 | 42 | 43 | GrainGen = GrainGen() 44 | -------------------------------------------------------------------------------- /protobuf/proto_grain_generator/proto.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | 4 | class ProtoMessage: 5 | def __init__(self, name: str): 6 | self._name = name 7 | 8 | @property 9 | def name(self) -> str: 10 | return self._name 11 | 12 | 13 | class ProtoMethod: 14 | def __init__(self, index: int, name: str, input_name: str, output_name: str): 15 | self._index = index 16 | self._name = name 17 | self._input_name = input_name 18 | self._output_name = output_name 19 | 20 | @property 21 | def index(self) -> int: 22 | return self._index 23 | 24 | @property 25 | def name(self) -> str: 26 | return self._name 27 | 28 | @property 29 | def input_name(self) -> str: 30 | return self._input_name 31 | 32 | @property 33 | def output_name(self) -> str: 34 | return self._output_name 35 | 36 | 37 | class ProtoService: 38 | def __init__(self, name: str, methods: List[str] = None): 39 | if methods is None: 40 | methods = [] 41 | self._name = name 42 | self._methods = methods 43 | 44 | @property 45 | def name(self) -> str: 46 | return self._name 47 | 48 | @property 49 | def methods(self) -> List[ProtoMethod]: 50 | return self._methods 51 | 52 | 53 | class ProtoFile: 54 | def __init__(self, messages=None, services=None): 55 | if services is None: 56 | services = [] 57 | if messages is None: 58 | messages = [] 59 | self._messages = messages 60 | self._services = services 61 | 62 | @property 63 | def messages(self) -> List[ProtoMessage]: 64 | return self._messages 65 | 66 | @property 67 | def services(self) -> List[ProtoService]: 68 | return self._services 69 | -------------------------------------------------------------------------------- /protobuf/proto_grain_generator/proto_grain_generator.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | from protobuf.proto_grain_generator import grain_gen 5 | from protobuf.proto_grain_generator.grain_gen import GrainGen 6 | 7 | 8 | def main(argv): 9 | try: 10 | if len(argv) < 1: 11 | print('You need to specify a path to the proto file to use') 12 | else: 13 | input_file_path = get_input_file_path(argv) 14 | output_file_path = get_output_file_path(argv) 15 | grain = GrainGen.generate(input_file_path) 16 | 17 | with open(output_file_path, 'w', encoding="utf-8") as writer: 18 | writer.write(grain) 19 | except Exception as ex: 20 | print(str(ex)) 21 | 22 | 23 | def get_input_file_path(argv): 24 | if os.path.isabs(argv[0]): 25 | return argv[0] 26 | else: 27 | return os.path.join(os.getcwd(), argv[0]) 28 | 29 | 30 | def get_output_file_path(argv): 31 | if len(argv) == 1: 32 | if os.path.isabs(argv[0]): 33 | file = os.path.basename(argv[0]) 34 | file_name, file_extension = os.path.splitext(file) 35 | file_name = file_name.split("_")[0] 36 | return os.path.join(os.path.dirname(argv[0]), f'{file_name}.py') 37 | else: 38 | file_name, file_extension = os.path.splitext(argv[0]) 39 | file_name = file_name.split("_")[0] 40 | return os.path.join(os.getcwd(), f'{file_name}.py') 41 | else: 42 | if os.path.isabs(argv[1]): 43 | return argv[1] 44 | else: 45 | return os.path.join(os.getcwd(), argv[1]) 46 | 47 | 48 | if __name__ == "__main__": 49 | main(sys.argv[1:]) 50 | 51 | -------------------------------------------------------------------------------- /pylintrc: -------------------------------------------------------------------------------- 1 | [MESSAGES CONTROL] 2 | # 3 | # C0103 - Invalid name "%s" (should match %s) 4 | # C0111 - Missing docstring 5 | # E0102 - %s already defined line %s 6 | # Does not understand @property getters and setters 7 | # E0202 - An attribute inherited from %s hide this method 8 | # Does not understand @property getters and setters 9 | # E1101 - %s %r has no %r member 10 | # Does not understand @property getters and setters 11 | # R0801 - Similar lines in %s files 12 | # R0903 - Too few public methods (%s/%s) 13 | # R0904 - Too many public methods (%s/%s) 14 | # R0921 - Abstract class not referenced 15 | # W0141 - Used builtin function '%s' 16 | # W0142 - Used * or ** magic 17 | # W0613 - Unused argument %r 18 | # 19 | disable = C0103,C0111,E0102,E0202,E261,E1101,R0801,R0903,R0904,R0921,W0141,W0142,W0613 20 | -------------------------------------------------------------------------------- /requirements.dev.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | flake8 3 | pytest 4 | tox 5 | pytest-cov 6 | pytest-xdist 7 | mypy 8 | protobuf 9 | grpclib 10 | pytest-asyncio 11 | aiohttp 12 | jinja2 13 | jaeger-client 14 | grpcio-tools 15 | 16 | 17 | # Check tha the MANIFEST.in is correct 18 | check-manifest 19 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | protobuf 2 | grpclib 3 | aiohttp 4 | jinja2 5 | jaeger-client 6 | 7 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | application-import-names = protoactor,tests 3 | exclude = .git,.tox,*.env,*.venv 4 | 5 | [wheel] 6 | universal = 1 7 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from setuptools import setup 4 | 5 | 6 | def get_version(): 7 | init_py = open('protoactor/__init__.py').read() 8 | # TODO: make this work with both single and double quotes 9 | metadata = dict(re.findall("__([a-z]+)__ = \"([^\"]+)\"", init_py)) 10 | return metadata['version'] 11 | 12 | 13 | setup( 14 | name="ProtoActor Python", 15 | version=get_version(), 16 | license="Apache License 2.0", 17 | description="Protocol buffers & actors", 18 | long_description="", 19 | packages=["protoactor"], 20 | package_dir={"protoactor": "protoactor"}, 21 | zip_safe=False, 22 | include_package_data=True, 23 | platforms="any", 24 | classifiers=[ 25 | "Environment :: Web Environment", 26 | "Intended Audience :: Developers", 27 | "License :: OSI Approved :: BSD License", 28 | "Operating System :: OS Independent", 29 | "Programming Language :: Python", 30 | "Topic :: Internet :: WWW/HTTP :: Dynamic Content", 31 | "Topic :: Software Development :: Libraries :: Python Modules" 32 | ] 33 | ) 34 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | -------------------------------------------------------------------------------- /tests/actor/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/tests/actor/__init__.py -------------------------------------------------------------------------------- /tests/actor/test_actor.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import signal 3 | from datetime import timedelta 4 | 5 | import pytest 6 | 7 | from protoactor.actor.actor_context import RootContext 8 | from protoactor.actor.props import Props 9 | 10 | context = RootContext() 11 | 12 | 13 | async def hello_function(context): 14 | message = context.message 15 | if isinstance(message, str): 16 | await context.respond("hey") 17 | 18 | 19 | async def empty_receive(context): 20 | pass 21 | 22 | 23 | @pytest.mark.asyncio 24 | async def test_request_actor_async(): 25 | props = Props.from_func(hello_function) 26 | pid = context.spawn(props) 27 | reply = await context.request_future(pid, "hello") 28 | 29 | assert reply == "hey" 30 | 31 | 32 | @pytest.mark.asyncio 33 | async def test_request_actor_async_should_raise_timeout_exception_when_timeout_is_reached(): 34 | with pytest.raises(TimeoutError) as excinfo: 35 | props = Props.from_func(empty_receive) 36 | pid = context.spawn(props) 37 | await context.request_future(pid, "", timedelta(seconds=1)) 38 | 39 | assert 'TimeoutError' in str(excinfo) 40 | 41 | @pytest.mark.asyncio 42 | async def test_request_actor_async_should_not_raise_timeout_exception_when_result_is_first(): 43 | props = Props.from_func(hello_function) 44 | pid = context.spawn(props) 45 | reply = await context.request_future(pid, "hello", timedelta(seconds=1)) 46 | 47 | assert reply == "hey" -------------------------------------------------------------------------------- /tests/actor/test_behavior.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from protoactor.actor.actor import Actor 4 | from protoactor.actor.actor_context import RootContext 5 | from protoactor.actor.behavior import Behavior 6 | from protoactor.actor.props import Props 7 | 8 | 9 | class PressSwitch: 10 | pass 11 | 12 | 13 | class Touch: 14 | pass 15 | 16 | 17 | class HitWithHammer: 18 | pass 19 | 20 | 21 | class LightBulb(Actor): 22 | def __init__(self): 23 | self._smashed = False 24 | self._behavior = Behavior() 25 | self._behavior.become(self.off) 26 | 27 | async def off(self, context): 28 | if isinstance(context.message, PressSwitch): 29 | await context.respond("Turning on") 30 | self._behavior.become(self.on) 31 | elif isinstance(context.message, Touch): 32 | await context.respond("Cold") 33 | 34 | async def on(self, context): 35 | if isinstance(context.message, PressSwitch): 36 | await context.respond("Turning off") 37 | self._behavior.become(self.off) 38 | elif isinstance(context.message, Touch): 39 | await context.respond("Hot!") 40 | 41 | async def receive(self, context): 42 | if isinstance(context.message, HitWithHammer): 43 | await context.respond("Smashed!") 44 | self._smashed = True 45 | elif isinstance(context.message, PressSwitch) and self._smashed: 46 | await context.respond("Broken") 47 | elif isinstance(context.message, Touch) and self._smashed: 48 | await context.respond("OW!") 49 | 50 | await self._behavior.receive_async(context) 51 | 52 | 53 | @pytest.mark.asyncio 54 | async def test_can_change_states(): 55 | test_actor_props = Props.from_producer(LightBulb) 56 | context = RootContext() 57 | actor = context.spawn(test_actor_props) 58 | assert await context.request_future(actor, PressSwitch()) == "Turning on" 59 | assert await context.request_future(actor, Touch()) == "Hot!" 60 | assert await context.request_future(actor, PressSwitch()) == "Turning off" 61 | assert await context.request_future(actor, Touch()) == "Cold" 62 | 63 | 64 | @pytest.mark.asyncio 65 | async def test_can_use_global_behaviour(): 66 | context = RootContext() 67 | test_actor_props = Props.from_producer(LightBulb) 68 | actor = context.spawn(test_actor_props) 69 | _ = await context.request_future(actor, PressSwitch()) 70 | assert await context.request_future(actor, HitWithHammer()) == "Smashed!" 71 | assert await context.request_future(actor, PressSwitch()) == "Broken" 72 | assert await context.request_future(actor, Touch()) == "OW!" 73 | 74 | 75 | @pytest.mark.asyncio 76 | async def test_pop_behavior_should_restore_pushed_behavior(): 77 | behavior = Behavior() 78 | 79 | async def func_1(ctx): 80 | if isinstance(ctx.message, str): 81 | async def func_2(ctx2): 82 | await ctx2.respond(42) 83 | behavior.unbecome_stacked() 84 | 85 | behavior.become_stacked(func_2) 86 | await ctx.respond(ctx.message) 87 | 88 | behavior.become(func_1) 89 | 90 | props = Props.from_func(behavior.receive_async) 91 | context = RootContext() 92 | pid = context.spawn(props) 93 | 94 | reply = await context.request_future(pid, "number") 95 | reply_after_push = await context.request_future(pid, None) 96 | reply_after_pop = await context.request_future(pid, "answertolifetheuniverseandeverything") 97 | 98 | assert reply + str(reply_after_push) + reply_after_pop == "number42answertolifetheuniverseandeverything" 99 | -------------------------------------------------------------------------------- /tests/actor/test_deps.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | 5 | def test_circular_dependencies(): 6 | """Verify that there are no circular dependencies""" 7 | # from protoactor.messages import AutoReceiveMessage 8 | # from protoactor.process_registry import ProcessRegistry 9 | # from protoactor.actor import Props 10 | -------------------------------------------------------------------------------- /tests/actor/test_event_stream.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from protoactor.actor.event_stream import EventStream 4 | from protoactor.mailbox.dispatcher import Dispatchers 5 | 6 | @pytest.mark.asyncio 7 | async def test_can_subscribe_to_specific_event_types(): 8 | received_events = [] 9 | 10 | async def fun(msg): 11 | received_events.append(msg) 12 | 13 | event_stream = EventStream() 14 | event_stream.subscribe(fun, str) 15 | await event_stream.publish('hello') 16 | 17 | assert received_events[0] == 'hello' 18 | 19 | @pytest.mark.asyncio 20 | async def test_can_subscribe_to_all_event_types(): 21 | received_events = [] 22 | 23 | async def fun(msg): 24 | received_events.append(msg) 25 | 26 | event_stream = EventStream() 27 | event_stream.subscribe(fun) 28 | 29 | await event_stream.publish('hello') 30 | assert received_events[0] == 'hello' 31 | 32 | await event_stream.publish(1) 33 | assert received_events[1] == 1 34 | 35 | await event_stream.publish(True) 36 | assert received_events[2] is True 37 | 38 | @pytest.mark.asyncio 39 | async def test_can_unsubscribe_from_events(): 40 | received_events = [] 41 | 42 | async def fun(msg): 43 | received_events.append(msg) 44 | 45 | event_stream = EventStream() 46 | subscription = event_stream.subscribe(fun, str) 47 | await event_stream.publish('first message') 48 | subscription.unsubscribe() 49 | await event_stream.publish('second message') 50 | 51 | assert len(received_events) == 1 52 | 53 | @pytest.mark.asyncio 54 | async def test_only_receive_subscribed_to_event_types(): 55 | received_events = [] 56 | 57 | async def fun(msg): 58 | received_events.append(msg) 59 | 60 | event_stream = EventStream() 61 | event_stream.subscribe(fun, int) 62 | await event_stream.publish('not an int') 63 | 64 | assert len(received_events) == 0 65 | 66 | @pytest.mark.asyncio 67 | async def test_can_subscribe_to_specific_event_types_async(): 68 | 69 | async def fun(msg): 70 | received = msg 71 | assert received == 'hello' 72 | 73 | event_stream = EventStream() 74 | event_stream.subscribe(fun, str, Dispatchers().default_dispatcher) 75 | await event_stream.publish('hello') -------------------------------------------------------------------------------- /tests/actor/test_local_context.py: -------------------------------------------------------------------------------- 1 | 2 | # import asyncio 3 | # from queue import Queue 4 | # 5 | # import pytest 6 | # 7 | # from protoactor.actor.actor_context import RootContext 8 | # from protoactor.actor.props import Props 9 | # 10 | # context = RootContext() 11 | # 12 | # 13 | # @pytest.mark.asyncio 14 | # async def test_reenter_after_can_do_action_for_task(): 15 | # queue = Queue() 16 | # 17 | # async def actor(ctx): 18 | # if ctx.message == 'hello1': 19 | # async def target(): 20 | # await asyncio.sleep(0.1) 21 | # queue.put('bar') 22 | # return 'hey1' 23 | # 24 | # task = asyncio.ensure_future(target()) 25 | # 26 | # async def action(): 27 | # queue.put('baz') 28 | # await ctx.respond(task.result()) 29 | # 30 | # ctx.reenter_after(task, action) 31 | # elif ctx.message == 'hello2': 32 | # queue.put('foo') 33 | # await ctx.respond('hey2') 34 | # 35 | # props = Props.from_func(actor) 36 | # pid = context.spawn(props) 37 | # 38 | # task1 = asyncio.ensure_future(context.request_future(pid, "hello1")) 39 | # task2 = asyncio.ensure_future(context.request_future(pid, "hello2")) 40 | # 41 | # reply1 = await task1 42 | # reply2 = await task2 43 | # 44 | # assert reply1 == 'hey1' 45 | # assert reply2 == 'hey2' 46 | # 47 | # assert 'foo' == queue.get() 48 | # assert 'bar' == queue.get() 49 | # assert 'baz' == queue.get() 50 | -------------------------------------------------------------------------------- /tests/actor/test_message_envelope.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from protoactor.actor import PID 4 | from protoactor.actor.message_envelope import MessageEnvelope 5 | from protoactor.actor.message_header import MessageHeader 6 | 7 | 8 | @pytest.fixture() 9 | def message_envelope(): 10 | message = "test" 11 | sender = PID() 12 | sender.address = "test" 13 | sender.id = "test" 14 | header = MessageHeader() 15 | return MessageEnvelope(message, sender, header) 16 | 17 | 18 | def test_wrap(message_envelope): 19 | envelope = MessageEnvelope.wrap(message_envelope.message) 20 | assert message_envelope.message == envelope.message 21 | 22 | 23 | def test_create_new_message_envelope_with_sender(message_envelope): 24 | sender = PID() 25 | sender.address = "test" 26 | sender.id = "test" 27 | envelope = message_envelope.with_sender(sender) 28 | 29 | assert message_envelope.message == envelope.message 30 | assert sender == envelope.sender 31 | assert message_envelope.header == envelope.header 32 | 33 | 34 | def test_create_new_message_envelope_with_message(message_envelope): 35 | message = "test message" 36 | envelope = message_envelope.with_message(message) 37 | 38 | assert message == envelope.message 39 | assert message_envelope.sender == envelope.sender 40 | assert message_envelope.header == envelope.header 41 | 42 | 43 | def test_create_new_message_envelope_with_header_based_on_key_value_pair_collection(message_envelope): 44 | collection = {"Test Key": "Test Value", "Test Key 1": "Test Value 1"} 45 | envelope = message_envelope.with_header(collection) 46 | assert envelope.header["Test Key"] == "Test Value" 47 | 48 | 49 | def test_create_new_message_envelope_with_header_based_on_message_header(message_envelope): 50 | key = "Test Key" 51 | value = "Test Value" 52 | message_header = MessageHeader({key: value}) 53 | envelope = message_envelope.with_header(message_header) 54 | assert envelope.header[key] == value 55 | 56 | 57 | def test_create_new_message_envelope_with_header_based_on_key_value_pair(message_envelope): 58 | key = "Test Key" 59 | value = "Test Value" 60 | envelope = message_envelope.with_header(key=key, value=value) 61 | assert envelope.header[key] == value 62 | 63 | 64 | def test_unwrap(message_envelope): 65 | message, sender, header = MessageEnvelope.unwrap(message_envelope) 66 | assert message == message_envelope.message 67 | assert sender == message_envelope.sender 68 | assert header == message_envelope.header 69 | 70 | 71 | def test_unwrap_header(message_envelope): 72 | assert 0 == len(message_envelope.header) 73 | 74 | 75 | def test_unwrap_message(message_envelope): 76 | message = MessageEnvelope.unwrap_message(message_envelope) 77 | assert message == message_envelope.message 78 | 79 | 80 | def test_unwrap_sender(message_envelope): 81 | sender = MessageEnvelope.unwrap_sender(message_envelope) 82 | assert sender == message_envelope.sender 83 | -------------------------------------------------------------------------------- /tests/actor/test_persistance.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from unittest.mock import Mock 3 | 4 | from protoactor.actor.persistence import InMemoryProviderState 5 | 6 | 7 | def test_InMemoryProviderState_get_snapshot(): 8 | loop = asyncio.new_event_loop() 9 | mem_ps = InMemoryProviderState() 10 | 11 | snapshot = loop.run_until_complete(mem_ps.get_snapshot('test_actor_name')) 12 | 13 | assert snapshot is None 14 | 15 | 16 | def test_InMemoryProviderState_get_events(): 17 | loop = asyncio.new_event_loop() 18 | mem_ps = InMemoryProviderState() 19 | callback_m = Mock() 20 | 21 | loop.run_until_complete(mem_ps.get_events('test_actor_name', 0, callback_m)) 22 | 23 | assert callback_m.called == False 24 | 25 | 26 | def test_InMemoryProviderState_persist_event(): 27 | loop = asyncio.new_event_loop() 28 | mem_ps = InMemoryProviderState() 29 | callback_m = Mock() 30 | obj = object() 31 | 32 | loop.run_until_complete(mem_ps.persist_event('test_actor_name', 0, obj)) 33 | loop.run_until_complete(mem_ps.get_events('test_actor_name', 0, callback_m)) 34 | 35 | callback_m.assert_called_once_with(obj) 36 | -------------------------------------------------------------------------------- /tests/actor/test_pid.py: -------------------------------------------------------------------------------- 1 | # import pytest 2 | # from unittest import mock 3 | # from protoactor import pid 4 | # 5 | # 6 | # @pytest.fixture 7 | # def mocked_pid(): 8 | # _process = mock.Mock() 9 | # _process.send_user_message = mock.MagicMock(return_value=None) 10 | # _process.send_system_message = mock.MagicMock(return_value=None) 11 | # _process.stop = mock.MagicMock(return_value=None) 12 | # return pid.PID("sample_address", "sample_id", _process) 13 | # 14 | # 15 | # def test_adress(mocked_pid): 16 | # assert mocked_pid.address == "sample_address" 17 | # 18 | # 19 | # def test_id(mocked_pid): 20 | # assert mocked_pid.id == "sample_id" 21 | # 22 | # 23 | # def test_process(mocked_pid): 24 | # _process = mock.Mock() 25 | # mocked_pid.process = _process 26 | # assert mocked_pid.process == _process 27 | # 28 | # 29 | # def test_tell(mocked_pid): 30 | # message = "test_message" 31 | # mocked_pid.tell(message) 32 | # mocked_pid.process.send_user_message.assert_called_once_with(mocked_pid, message) 33 | # 34 | # 35 | # def test_send_system_message(mocked_pid): 36 | # message = "test_message" 37 | # mocked_pid.send_system_message(message) 38 | # mocked_pid.process.send_system_message.assert_called_once_with(mocked_pid, message) 39 | # 40 | # 41 | # def test_stop(mocked_pid): 42 | # mocked_pid.stop() 43 | # mocked_pid.process.stop.assert_called_once_with() 44 | # 45 | # 46 | # def test_repr(mocked_pid): 47 | # return str(mocked_pid) == "{} / {}".format(mocked_pid.address, mocked_pid.id) 48 | -------------------------------------------------------------------------------- /tests/actor/test_process.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import pytest 4 | from unittest.mock import Mock 5 | from protoactor.actor.process import ActorProcess 6 | from protoactor.mailbox.mailbox import DefaultMailbox 7 | 8 | 9 | @pytest.fixture(scope='module', ) 10 | def process_data(): 11 | mailbox = DefaultMailbox(None, None, None) 12 | local_process = ActorProcess(mailbox) 13 | 14 | return { 15 | 'mailbox': mailbox, 16 | 'local_process': local_process, 17 | } 18 | 19 | 20 | def test_get_mailbox_property(process_data): 21 | mailbox = process_data['mailbox'] 22 | lp = process_data['local_process'] 23 | 24 | assert lp.mailbox == mailbox 25 | 26 | @pytest.mark.asyncio 27 | async def test_send_user_message(process_data): 28 | mailbox = process_data['mailbox'] 29 | lp = process_data['local_process'] 30 | 31 | mailbox.post_user_message = Mock() 32 | await lp.send_user_message(1, "message") 33 | mess = mailbox.post_user_message.call_args[0][0] 34 | 35 | assert mess == "message" -------------------------------------------------------------------------------- /tests/actor/test_process_registry.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | 5 | from protoactor.actor.process import DeadLettersProcess, ProcessRegistry, ActorProcess 6 | from protoactor.actor.utils import Singleton 7 | from protoactor.actor.protos_pb2 import PID 8 | from protoactor.mailbox.mailbox import DefaultMailbox 9 | 10 | 11 | @pytest.fixture 12 | def nohost(): 13 | if ProcessRegistry in Singleton._instances: 14 | del Singleton._instances[ProcessRegistry] 15 | return ProcessRegistry() 16 | 17 | 18 | @pytest.fixture 19 | def mock_process(): 20 | return mock.Mock() 21 | 22 | 23 | def test_nonhost_address(nohost: ProcessRegistry): 24 | assert nohost.address == 'nonhost' 25 | 26 | 27 | def test_set_address(nohost: ProcessRegistry): 28 | nohost.address = 'new_host' 29 | assert nohost.address == 'new_host' 30 | 31 | 32 | def test_next_id(nohost: ProcessRegistry): 33 | assert nohost.next_id() == '1' 34 | 35 | 36 | def test_add(nohost: ProcessRegistry, mock_process): 37 | _pid, absent = ProcessRegistry().try_add('new_id', mock_process) 38 | assert _pid.address == ProcessRegistry().address 39 | assert _pid.id == 'new_id' 40 | 41 | 42 | def test_remove(nohost: ProcessRegistry, mock_process): 43 | _pid, absent = ProcessRegistry().try_add('new_id', mock_process) 44 | assert ProcessRegistry().get(_pid) == mock_process 45 | ProcessRegistry().remove(_pid) 46 | assert isinstance(ProcessRegistry().get(_pid), DeadLettersProcess) is True 47 | 48 | 49 | def test_get_deadletter(): 50 | _pid = mock.Mock() 51 | _pid.address = 'other_host' 52 | _pid.pid = '9999' 53 | assert isinstance(ProcessRegistry().get(_pid), DeadLettersProcess) is True 54 | 55 | 56 | def test_get_nonhost(nohost: ProcessRegistry, mock_process): 57 | _pid, absent = ProcessRegistry().try_add('new_id', mock_process) 58 | assert ProcessRegistry().get(_pid) == mock_process 59 | 60 | # def test_get_sameaddress(): 61 | # test_pid = PID(address='address', id='id') 62 | # lp = ActorProcess(DefaultMailbox()) 63 | # 64 | # pr = ProcessRegistry(lambda x: lp if x == test_pid else None) 65 | # pr.address = 'address' 66 | # 67 | # new_lp = pr.get(test_pid) 68 | # 69 | # assert isinstance(new_lp, DeadLettersProcess) is True 70 | # 71 | # 72 | # def test_get_not_sameaddress(): 73 | # test_pid = PID(address='another_address', id='id') 74 | # lp = ActorProcess(DefaultMailbox()) 75 | # 76 | # pr = ProcessRegistry(lambda x: lp if x == test_pid else None) 77 | # pr.address = 'address' 78 | # 79 | # new_lp = pr.get(test_pid) 80 | # 81 | # assert test_pid.aref == lp 82 | # 83 | # def test_get__local_actor_refs_not_has_id_DeadLettersProcess(): 84 | # test_pid = PID(address='address', id='id') 85 | # lp = ActorProcess(DefaultMailbox()) 86 | # 87 | # pr = ProcessRegistry(lambda x: None) 88 | # pr.address = 'address' 89 | # 90 | # new_lp = pr.get(test_pid) 91 | # 92 | # assert isinstance(new_lp, DeadLettersProcess) is True 93 | -------------------------------------------------------------------------------- /tests/actor/test_props.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from protoactor.actor.props import Props 4 | 5 | 6 | @pytest.mark.skip(reason="need to refactor this test") 7 | def test_props_default_init(): 8 | props = Props() 9 | 10 | assert props.producer is None 11 | # TODO: change these value with concrete default instances 12 | #assert props.mailbox_producer is None 13 | #assert props.supervisor_strategy is None 14 | assert props.dispatcher is None 15 | assert props.middleware == [] 16 | assert props.middleware_chain is None 17 | 18 | 19 | class PropsObj(): 20 | pass 21 | 22 | 23 | @pytest.mark.skip(reason="need to refactor this test") 24 | @pytest.mark.parametrize("field,method,value", [ 25 | ('producer', 'with_producer', PropsObj()), 26 | ('dispatcher', 'with_dispatcher', PropsObj()), 27 | ('mailbox_producer', 'with_mailbox', PropsObj()), 28 | ('supervisor_strategy', 'with_supervisor', PropsObj()), 29 | ]) 30 | def test_props_with(field, method, value): 31 | props = Props() 32 | 33 | with_method = getattr(props, method) 34 | new_props = with_method(value) 35 | 36 | results = [ 37 | ('producer', None), 38 | ('dispatcher', None), 39 | ('mailbox_producer', None), 40 | ('supervisor_strategy', None) 41 | ] 42 | 43 | for r in results: 44 | field_name = r[0] 45 | prop_value = getattr(new_props, field_name) 46 | if field_name == field: 47 | assert prop_value == value 48 | else: 49 | assert prop_value == r[1] 50 | -------------------------------------------------------------------------------- /tests/actor/test_restart_statistics.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from datetime import timedelta 3 | 4 | from protoactor.actor.restart_statistics import RestartStatistics 5 | 6 | 7 | def test_number_of_failures(): 8 | rs = RestartStatistics(10, datetime.datetime(2017, 2, 14, 0, 0, 0)) 9 | assert rs.number_of_failures(timedelta(days=1, seconds=1)) == 0 10 | 11 | 12 | def test_number_of_failures_greater_then_zero(): 13 | rs = RestartStatistics(10, datetime.datetime.now()) 14 | assert rs.number_of_failures(timedelta(seconds=1)) == 10 15 | -------------------------------------------------------------------------------- /tests/actor/test_utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import pytest 5 | 6 | from protoactor.actor.utils import Singleton, Stack 7 | 8 | 9 | @pytest.fixture 10 | def stack(): 11 | return Stack() 12 | 13 | 14 | def test_singleton(): 15 | class TestSingleton(metaclass=Singleton): 16 | def __init__(self): 17 | self.test = "" 18 | 19 | s1 = TestSingleton() 20 | s2 = TestSingleton() 21 | 22 | assert s1 is s2 23 | 24 | 25 | def test_singleton_for_different_classes(): 26 | class A(metaclass=Singleton): 27 | def __init__(self): 28 | self.a = "" 29 | 30 | class B(metaclass=Singleton): 31 | def __init__(self): 32 | self.b = "" 33 | 34 | a = A() 35 | a1 = A() 36 | b = B() 37 | b1 = B() 38 | 39 | assert a is a1 40 | assert b is b1 41 | assert not a is b1 42 | 43 | 44 | def test_stack_push_pop(stack): 45 | stack.push("Test data") 46 | assert stack.pop() == "Test data" 47 | 48 | 49 | def test_stack_push_peek_len(stack): 50 | stack.push("Test data") 51 | assert stack.peek() == "Test data" 52 | assert len(stack) == 1 53 | 54 | 55 | def test_stack_push_is_empty(stack): 56 | stack.push("Test data") 57 | assert stack.is_empty() is False -------------------------------------------------------------------------------- /tests/cluster/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/tests/cluster/__init__.py -------------------------------------------------------------------------------- /tests/cluster/hash_algorithms/test_fnv1a32.py: -------------------------------------------------------------------------------- 1 | from protoactor.cluster.hash_algorithms.fnv1a32 import FNV1A32 2 | 3 | 4 | def test_hash_function(): 5 | hash_algorithm = FNV1A32() 6 | assert hash_algorithm.compute_hash('Test'.encode()) == 805092869 7 | assert hash_algorithm.compute_hash('Test 2'.encode()) == 3918614647 -------------------------------------------------------------------------------- /tests/mailbox/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/tests/mailbox/__init__.py -------------------------------------------------------------------------------- /tests/mailbox/test_mailbox.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import pytest 4 | 5 | from protoactor.mailbox import mailbox 6 | 7 | 8 | def test_create_abstract_mailbox(): 9 | with pytest.raises(TypeError): 10 | _mailbox = mailbox.AbstractMailbox() 11 | -------------------------------------------------------------------------------- /tests/mailbox/test_mailbox_queue.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import pytest 4 | 5 | from protoactor.mailbox import queue 6 | 7 | 8 | @pytest.fixture 9 | def unbounded_queue(): 10 | return queue.UnboundedMailboxQueue() 11 | 12 | 13 | def test_unbounded_mailbox_queue_push_pop(unbounded_queue): 14 | unbounded_queue.push("1") 15 | assert unbounded_queue.pop() == "1" 16 | 17 | 18 | def test_unbounded_mailbox_queue_pop_empty(unbounded_queue): 19 | assert unbounded_queue.pop() is None 20 | 21 | 22 | def test_unbounded_mailbox_queue_has_message(unbounded_queue): 23 | unbounded_queue.push("1") 24 | assert unbounded_queue.has_messages() is True 25 | 26 | 27 | def test_unbounded_mailbox_queue_has_message_empty(unbounded_queue): 28 | assert unbounded_queue.has_messages() is False 29 | 30 | 31 | def test_create_abstract_queue(): 32 | with pytest.raises(TypeError): 33 | _queue = queue.AbstractQueue() 34 | -------------------------------------------------------------------------------- /tests/persistence/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/tests/persistence/__init__.py -------------------------------------------------------------------------------- /tests/persistence/snapshot_strategies/test_event_type_strategy.py: -------------------------------------------------------------------------------- 1 | from protoactor.persistence.messages import PersistedEvent 2 | from protoactor.persistence.snapshot_strategies.event_type_strategy import EventTypeStrategy 3 | 4 | 5 | def test_event_type_strategy_should_snapshot_according_to_the_event_type(): 6 | strategy = EventTypeStrategy(type(int())) 7 | assert strategy.should_take_snapshot(PersistedEvent(1, 0)) == True 8 | assert strategy.should_take_snapshot(PersistedEvent("not an int", 0)) == False -------------------------------------------------------------------------------- /tests/persistence/snapshot_strategies/test_interval_strategy.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from protoactor.persistence.messages import PersistedEvent 4 | from protoactor.persistence.snapshot_strategies.interval_strategy import IntervalStrategy 5 | 6 | 7 | @pytest.mark.parametrize("interval,expected", 8 | [(1, [1, 2, 3, 4, 5]), 9 | (2, [2, 4, 6, 8, 10]), 10 | (5, [5, 10, 15, 20, 25])]) 11 | def test_interval_strategy_should_snapshot_according_to_the_interval(interval, expected): 12 | strategy = IntervalStrategy(interval) 13 | for index in range(1, expected[-1]): 14 | if index in expected: 15 | assert strategy.should_take_snapshot(PersistedEvent(None, index)) == True 16 | else: 17 | assert strategy.should_take_snapshot(PersistedEvent(None, index)) == False 18 | -------------------------------------------------------------------------------- /tests/persistence/snapshot_strategies/test_time_strategy.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from protoactor.persistence.messages import PersistedEvent 4 | from protoactor.persistence.snapshot_strategies.time_strategy import TimeStrategy 5 | 6 | 7 | def test_time_strategy_should_snapshot_according_to_the_interval(): 8 | now = datetime.datetime.strptime('2000-01-01 12:00:00', '%Y-%m-%d %H:%M:%S') 9 | strategy = TimeStrategy(datetime.timedelta(seconds=10), lambda: now) 10 | assert strategy.should_take_snapshot(PersistedEvent(None, 0)) is False 11 | now = now + datetime.timedelta(seconds=5) 12 | assert strategy.should_take_snapshot(PersistedEvent(None, 0)) is False 13 | now = now + datetime.timedelta(seconds=5) 14 | assert strategy.should_take_snapshot(PersistedEvent(None, 0)) is True 15 | now = now + datetime.timedelta(seconds=5) 16 | assert strategy.should_take_snapshot(PersistedEvent(None, 0)) is False 17 | now = now + datetime.timedelta(seconds=5) 18 | assert strategy.should_take_snapshot(PersistedEvent(None, 0)) is True -------------------------------------------------------------------------------- /tests/protobuf/proto_grain_generator/messages/protos.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package messages; 3 | 4 | message HelloRequest {} 5 | message HelloResponse { 6 | string Message=1; 7 | } 8 | 9 | service HelloGrain { 10 | rpc SayHello(HelloRequest) returns (HelloResponse) {} 11 | } -------------------------------------------------------------------------------- /tests/protobuf/proto_grain_generator/messages/protos_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the Protocol Buffers compiler. DO NOT EDIT! 2 | # source: tests/protobuf/proto_grain_generator/messages/protos.proto 3 | # plugin: grpclib.plugin.main 4 | import abc 5 | import typing 6 | 7 | import grpclib.const 8 | import grpclib.client 9 | if typing.TYPE_CHECKING: 10 | import grpclib.server 11 | 12 | import tests.protobuf.proto_grain_generator.messages.protos_pb2 13 | 14 | 15 | class HelloGrainBase(abc.ABC): 16 | 17 | @abc.abstractmethod 18 | async def SayHello(self, stream: 'grpclib.server.Stream[tests.protobuf.proto_grain_generator.messages.protos_pb2.HelloRequest, tests.protobuf.proto_grain_generator.messages.protos_pb2.HelloResponse]') -> None: 19 | pass 20 | 21 | def __mapping__(self) -> typing.Dict[str, grpclib.const.Handler]: 22 | return { 23 | '/messages.HelloGrain/SayHello': grpclib.const.Handler( 24 | self.SayHello, 25 | grpclib.const.Cardinality.UNARY_UNARY, 26 | tests.protobuf.proto_grain_generator.messages.protos_pb2.HelloRequest, 27 | tests.protobuf.proto_grain_generator.messages.protos_pb2.HelloResponse, 28 | ), 29 | } 30 | 31 | 32 | class HelloGrainStub: 33 | 34 | def __init__(self, channel: grpclib.client.Channel) -> None: 35 | self.SayHello = grpclib.client.UnaryUnaryMethod( 36 | channel, 37 | '/messages.HelloGrain/SayHello', 38 | tests.protobuf.proto_grain_generator.messages.protos_pb2.HelloRequest, 39 | tests.protobuf.proto_grain_generator.messages.protos_pb2.HelloResponse, 40 | ) 41 | -------------------------------------------------------------------------------- /tests/protobuf/proto_grain_generator/test_proto_grain_generator.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import time 4 | 5 | 6 | # def test_generate_grain_from_template_with_full_path_for_imput_and_output_files(): 7 | # grain_generator_path = os.path.abspath( 8 | # "../../../protobuf/proto_grain_generator/proto_grain_generator.py") 9 | # 10 | # file_path = os.path.abspath("./messages/protos.py") 11 | # if os.path.exists(file_path): 12 | # os.remove(file_path) 13 | # 14 | # input_file_path = os.path.abspath("./messages/protos_pb2.py") 15 | # output_file_path = os.path.abspath("./messages/protos.py") 16 | # 17 | # process = subprocess.Popen(['python', grain_generator_path, input_file_path, output_file_path], 18 | # stdout=subprocess.PIPE, 19 | # stdin=subprocess.PIPE) 20 | # 21 | # time.sleep(1) 22 | # process.kill() 23 | # 24 | # assert os.path.exists(file_path) 25 | # 26 | # 27 | # def test_generate_grain_from_template_with_path_to_imput_files(): 28 | # grain_generator_path = os.path.abspath( 29 | # "../../../protobuf/proto_grain_generator/proto_grain_generator.py") 30 | # 31 | # file_path = os.path.abspath("./messages/protos.py") 32 | # if os.path.exists(file_path): 33 | # os.remove(file_path) 34 | # 35 | # input_file_path = os.path.abspath("./messages/protos_pb2.py") 36 | # 37 | # process = subprocess.Popen(['python', grain_generator_path, input_file_path], 38 | # stdout=subprocess.PIPE, 39 | # stdin=subprocess.PIPE) 40 | # 41 | # time.sleep(1) 42 | # process.kill() 43 | # 44 | # assert os.path.exists(file_path) 45 | # 46 | # 47 | # def test_generate_grain_from_template_with_short_path_for_imput_and_output_files(): 48 | # grain_generator_path = os.path.abspath( 49 | # "../../../protobuf/proto_grain_generator/proto_grain_generator.py") 50 | # 51 | # file_path = os.path.abspath("./messages/protos.py") 52 | # if os.path.exists(file_path): 53 | # os.remove(file_path) 54 | # 55 | # input_file_path = "./messages/protos_pb2.py" 56 | # output_file_path = "./messages/protos.py" 57 | # 58 | # process = subprocess.Popen(['python', grain_generator_path, input_file_path, output_file_path], 59 | # stdout=subprocess.PIPE, 60 | # stdin=subprocess.PIPE) 61 | # 62 | # time.sleep(1) 63 | # process.kill() 64 | # 65 | # assert os.path.exists(file_path) 66 | 67 | 68 | # def test_1(): 69 | # grain_generator_path = os.path.abspath( 70 | # "../../../protobuf/proto_grain_generator/proto_grain_generator.py") 71 | # 72 | # input_file_path = os.path.abspath("../../../examples/cluster_grain_hello_world/messages/protos_pb2.py") 73 | # 74 | # process = subprocess.Popen(['python', grain_generator_path, input_file_path], 75 | # stdout=subprocess.PIPE, 76 | # stdin=subprocess.PIPE) 77 | # 78 | # time.sleep(1) 79 | # process.kill() 80 | # 81 | # assert True 82 | 83 | 84 | -------------------------------------------------------------------------------- /tests/remote/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /tests/remote/messages/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/tests/remote/messages/__init__.py -------------------------------------------------------------------------------- /tests/remote/messages/protos.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package remote_test_messages; 3 | import "protoactor/actor/protos.proto"; 4 | 5 | message Start {} 6 | 7 | message StartRemote { 8 | actor.PID Sender = 1; 9 | } 10 | 11 | message Ping { 12 | string message=1; 13 | } 14 | 15 | message Pong { 16 | string message=1; 17 | } -------------------------------------------------------------------------------- /tests/remote/node/__init__.py: -------------------------------------------------------------------------------- 1 | for i in range(10): 2 | print(i) -------------------------------------------------------------------------------- /tests/remote/node/node.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import getopt 3 | import os 4 | import re 5 | import sys 6 | 7 | from protoactor.actor.actor import Actor 8 | 9 | path = os.path.dirname(os.path.abspath(__file__)) 10 | match = re.search('protoactor-python', path) 11 | new_path = path[:match.end()] 12 | sys.path.insert(0, new_path) 13 | 14 | from protoactor.actor.props import Props 15 | from protoactor.remote.remote import Remote 16 | from protoactor.remote.serialization import Serialization 17 | from tests.remote.messages.protos_pb2 import Ping, Pong, DESCRIPTOR 18 | from protoactor.actor.actor_context import RootContext 19 | 20 | 21 | class EchoActor(Actor): 22 | def __init__(self, host, port): 23 | self._host = host 24 | self._port = port 25 | 26 | async def receive(self, context): 27 | if isinstance(context.message, Ping): 28 | await context.respond(Pong(message="%s:%s %s" % (self._host, self._port, context.message.message))) 29 | 30 | 31 | async def start(argv): 32 | host = None 33 | port = None 34 | opts, args = getopt.getopt(argv, "hp", ["host=", "port="]) 35 | for opt, arg in opts: 36 | if opt == '--host': 37 | host = arg 38 | elif opt == '--port': 39 | port = arg 40 | 41 | Serialization().register_file_descriptor(DESCRIPTOR) 42 | 43 | context = RootContext() 44 | Remote().start(host, port) 45 | props = Props().from_producer(lambda: EchoActor(host, port)) 46 | Remote().register_known_kind('EchoActor', props) 47 | context.spawn_named(props, "EchoActorInstance") 48 | 49 | input() 50 | 51 | 52 | def main(argv): 53 | loop = asyncio.get_event_loop() 54 | loop.run_until_complete(start(argv)) 55 | loop.close() 56 | 57 | 58 | if __name__ == "__main__": 59 | main(sys.argv[1:]) 60 | -------------------------------------------------------------------------------- /tests/remote/remote_manager.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import time 4 | 5 | from protoactor.remote.remote import Remote, Serialization 6 | from tests.remote.messages.protos_pb2 import DESCRIPTOR 7 | 8 | 9 | class RemoteManager(): 10 | def __init__(self): 11 | Serialization().register_file_descriptor(DESCRIPTOR) 12 | self.__nodes = {} 13 | self.__default_node_address = '127.0.0.1:12000' 14 | 15 | self.provision_node('127.0.0.1', 12000) 16 | Remote().start("127.0.0.1", 12001) 17 | 18 | @property 19 | def default_node_address(self): 20 | return self.__default_node_address 21 | 22 | @property 23 | def nodes(self): 24 | return self.__nodes 25 | 26 | def provision_node(self, host='127.0.0.1', port=12000): 27 | node_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'node/node.py') 28 | process = subprocess.Popen(['python', str(node_path), '--host', str(host), '--port', str(port)], 29 | stdout=subprocess.PIPE, 30 | stdin=subprocess.PIPE) 31 | address = '%s:%s' % (host, port) 32 | self.__nodes[address] = process 33 | time.sleep(1) 34 | return address, process 35 | 36 | def dispose(self): 37 | for process in self.__nodes.values(): 38 | process.kill() 39 | process.wait() 40 | Remote().shutdown(False) -------------------------------------------------------------------------------- /tests/remote/test_serialization.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from protoactor.actor import PID 4 | from protoactor.remote.messages import JsonMessage 5 | from protoactor.remote.serialization import Serialization 6 | 7 | from tests.remote.messages.protos_pb2 import DESCRIPTOR 8 | 9 | 10 | @pytest.fixture(scope="session", autouse=True) 11 | def register_file_descriptor(): 12 | Serialization().register_file_descriptor(DESCRIPTOR) 13 | 14 | 15 | def test_can_serialize_and_deserialize_json_pid(): 16 | type_name = "actor.PID" 17 | json = JsonMessage(type_name, "{ \"address\":\"123\", \"id\":\"456\"}") 18 | data = Serialization().serialize(json, 1) 19 | deserialized = Serialization().deserialize(type_name, data, 1) 20 | assert deserialized.address == "123" 21 | assert deserialized.id == "456" 22 | 23 | 24 | def test_can_serialize_and_deserialize_json(): 25 | type_name = "remote_test_messages.Ping" 26 | json = JsonMessage(type_name, "{ \"message\":\"Hello\"}") 27 | data = Serialization().serialize(json, 1) 28 | deserialized = Serialization().deserialize(type_name, data, 1) 29 | assert deserialized.message == "Hello" 30 | 31 | 32 | def test_can_serialize_and_deserialize_protobuf(): 33 | type_name = "actor.PID" 34 | pid = PID(address='123', id='456') 35 | data = Serialization().serialize(pid, 0) 36 | deserialized = Serialization().deserialize(type_name, data, 0) 37 | assert deserialized.address == "123" 38 | assert deserialized.id == "456" 39 | -------------------------------------------------------------------------------- /tests/router/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/asynkron/protoactor-python/94bb4220bbef7a7cee50f6829fcf4d4362e487c6/tests/router/__init__.py -------------------------------------------------------------------------------- /tests/test_fixtures/mock_mailbox.py: -------------------------------------------------------------------------------- 1 | from protoactor.mailbox.mailbox import DefaultMailbox 2 | from protoactor.mailbox.queue import UnboundedMailboxQueue 3 | 4 | 5 | class MockMailbox(DefaultMailbox): 6 | def __init__(self): 7 | super().__init__(UnboundedMailboxQueue(), UnboundedMailboxQueue(), []) 8 | self.user_messages = [] 9 | self.system_messages = [] 10 | 11 | def post_user_message(self, msg): 12 | self.user_messages.append(msg) 13 | super().post_user_message(msg) 14 | 15 | def post_system_message(self, msg): 16 | self.system_messages.append(msg) 17 | super().post_system_message(msg) -------------------------------------------------------------------------------- /tests/test_fixtures/test_mailbox.py: -------------------------------------------------------------------------------- 1 | from protoactor.mailbox.mailbox import DefaultMailbox 2 | from protoactor.mailbox.queue import UnboundedMailboxQueue 3 | 4 | 5 | class MockMailbox(DefaultMailbox): 6 | def __init__(self): 7 | super().__init__(UnboundedMailboxQueue(), UnboundedMailboxQueue(), []) 8 | self.user_messages = [] 9 | self.system_messages = [] 10 | 11 | def post_user_message(self, msg): 12 | self.user_messages.append(msg) 13 | super().post_user_message(msg) 14 | 15 | def post_system_message(self, msg): 16 | self.system_messages.append(msg) 17 | super().post_system_message(msg) -------------------------------------------------------------------------------- /tests/test_utils/async_mock.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock 2 | 3 | 4 | class AsyncMock(MagicMock): 5 | async def __call__(self, *args, **kwargs): 6 | return super().__call__(*args, **kwargs) -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py3, docs, flake8 3 | 4 | [testenv] 5 | deps = 6 | mypy 7 | pytest 8 | pytest-cov 9 | pytest-xdist 10 | pytest-asyncio 11 | protobuf 12 | grpclib 13 | commands = 14 | pytest --capture=sys tests 15 | 16 | [testenv:py3] 17 | deps = 18 | {[testenv]deps} 19 | 20 | [testenv:docs] 21 | changedir = docs 22 | deps = 23 | sphinx 24 | commands = 25 | sphinx-build -b html -d {envtmpdir}/doctrees . {envtmpdir}/html 26 | 27 | [testenv:flake8] 28 | deps = 29 | flake8 30 | commands = 31 | flake8 --exclude=event_type_strategy.py,protos_pb2.py,protos_remote_pb2.py,protos_remote_pb2_grpc.py,protos_remote_grpc.py --ignore=I201,I100,E261,E302,E128,E501,W292,W291,W391 protoactor/ 32 | --------------------------------------------------------------------------------