├── images
└── header.png
├── requirements.txt
├── .travis.yml
├── echo_server.py
├── LICENSE
├── .gitignore
├── test_bench.py
├── README.md
└── bench.py
/images/header.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/healeycodes/websocket-benchmarker/HEAD/images/header.png
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/healeycodes/websocket-benchmarker/HEAD/requirements.txt
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | # https://github.com/healeycodes/websocket-benchmarker/
2 |
3 | language: python
4 | python:
5 | - "3.7-dev"
6 |
7 | install:
8 | - pip install -r requirements.txt
9 |
10 | script:
11 | - ls # sanity logging
12 | - python -m unittest --verbose
13 |
--------------------------------------------------------------------------------
/echo_server.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import websockets
3 |
4 | async def echo(websocket, path):
5 | async for message in websocket:
6 | await websocket.send(message)
7 |
8 | asyncio.get_event_loop().run_until_complete(
9 | websockets.serve(echo, 'localhost', 3000))
10 | asyncio.get_event_loop().run_forever()
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 healeycodes
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Default log file
2 | log.txt
3 |
4 | # Byte-compiled / optimized / DLL files
5 | __pycache__/
6 | *.py[cod]
7 | *$py.class
8 |
9 | # C extensions
10 | *.so
11 |
12 | # Distribution / packaging
13 | .Python
14 | build/
15 | develop-eggs/
16 | dist/
17 | downloads/
18 | eggs/
19 | .eggs/
20 | lib/
21 | lib64/
22 | parts/
23 | sdist/
24 | var/
25 | wheels/
26 | *.egg-info/
27 | .installed.cfg
28 | *.egg
29 | MANIFEST
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | .hypothesis/
51 | .pytest_cache/
52 |
53 | # Translations
54 | *.mo
55 | *.pot
56 |
57 | # Django stuff:
58 | *.log
59 | local_settings.py
60 | db.sqlite3
61 |
62 | # Flask stuff:
63 | instance/
64 | .webassets-cache
65 |
66 | # Scrapy stuff:
67 | .scrapy
68 |
69 | # Sphinx documentation
70 | docs/_build/
71 |
72 | # PyBuilder
73 | target/
74 |
75 | # Jupyter Notebook
76 | .ipynb_checkpoints
77 |
78 | # pyenv
79 | .python-version
80 |
81 | # celery beat schedule file
82 | celerybeat-schedule
83 |
84 | # SageMath parsed files
85 | *.sage.py
86 |
87 | # Environments
88 | .env
89 | .venv
90 | env/
91 | venv/
92 | ENV/
93 | env.bak/
94 | venv.bak/
95 |
96 | # Spyder project settings
97 | .spyderproject
98 | .spyproject
99 |
100 | # Rope project settings
101 | .ropeproject
102 |
103 | # mkdocs documentation
104 | /site
105 |
106 | # mypy
107 | .mypy_cache/
108 |
--------------------------------------------------------------------------------
/test_bench.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import unittest
4 | import subprocess
5 | import warnings
6 |
7 | # set python environmental variable
8 | python_env = sys.executable
9 |
10 |
11 | def get_path(file):
12 | '''Finds the path to the given local file in a cross-platform manner.'''
13 |
14 | curr_dir = os.path.dirname(os.path.abspath(__file__))
15 | return os.path.join(curr_dir, file)
16 |
17 |
18 | def ignore_resource_warning(func):
19 | '''Ignore any resource warnings produced by leaving echo_server running.'''
20 |
21 | def without_warn(self, *args, **kwargs):
22 | warnings.simplefilter("ignore", ResourceWarning)
23 | return func(self, *args, **kwargs)
24 | return without_warn
25 |
26 |
27 | class EndToEnd(unittest.TestCase):
28 | def test_without_echo_server(self):
29 | '''The benchmark should error without the presense of an echo_server on `host` arg.'''
30 |
31 | result = subprocess.run([python_env, get_path('bench.py')],
32 | stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
33 | assert(result.returncode == 1)
34 |
35 | @ignore_resource_warning
36 | def test_with_echo_server(self):
37 | '''The benchmark should run without errors.'''
38 |
39 | echo_server = subprocess.Popen([python_env, get_path('echo_server.py'), '--n', '64'],
40 | stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
41 | result = subprocess.run([python_env, get_path('bench.py')],
42 | stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
43 | echo_server.kill()
44 | assert(result.returncode == 0)
45 |
46 | @ignore_resource_warning
47 | def test_stat_report(self):
48 | '''Statistics of the benchmark should be printed to stdout.'''
49 |
50 | echo_server = subprocess.Popen([python_env, get_path('echo_server.py'), '--n', '64'],
51 | stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
52 | result = subprocess.Popen([python_env, get_path('bench.py')],
53 | stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
54 | result.wait()
55 | output = str(result.stdout.read())
56 | echo_server.kill()
57 | assert('Min' in output)
58 | assert('Mean' in output)
59 | assert('Max' in output)
60 |
61 |
62 | if __name__ == '__main__':
63 | unittest.main()
64 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://travis-ci.org/healeycodes/websocket-benchmarker)
2 |
3 | ## :radio: WebSocket Benchmarker :watch:
4 |
5 | *Message throughput* is how fast a WebSocket server can parse and respond to a message. Some people consider this to be a good reference of a framework/library/server's performance. This tool measures the message throughput under load by mocking concurrent clients.
6 |
7 |
8 |
9 | 
10 |
11 |
12 |
13 | ---
14 |
15 | ###### 2019.01.26
16 |
17 | Now with 100% more bleeding edge :zap: [asyncio](https://docs.python.org/3/library/asyncio.html) goodness.
18 |
19 | ---
20 |
21 |
22 |
23 | ### Installation
24 |
25 | Python 3.6.5+.
26 |
27 | `pip install -r requirements.txt`
28 |
29 |
30 |
31 | ### Usage
32 |
33 | This program expects the host to be an echo server and measures the time between sending a message and recieving the same message back from the host. It performs this for a number of client connections simultaneously and is designed to produce repeatable results.
34 |
35 | `python bench.py` will launch the benchmark and print statistics to stdout. If the log file path is to a non-file then one will be created otherwise results will be appended to the existing file.
36 |
37 | The raw results are in CSV format with each line representing a client's roundtrip times.
38 |
39 | E.g., `0.1, 0.1, 0.1` for one client performing three roundtrips.
40 |
41 |
42 |
43 | | Arg | Description | Default |
44 | | ----- |:---------------------------------------|:----------------|
45 | | `--h` | Host address of WebSocket server | `localhost:3000`|
46 | | `--n` | Number of clients to create | `1000` |
47 | | `--c` | Number of concurrent clients | `64` |
48 | | `--r` | Roundtrips per client | `5` |
49 | | `--s` | Message size in characters | `30` |
50 | | `--l` | Path to create or append to a log file | `./log.txt` |
51 |
52 |
53 |
54 | ### Tests
55 |
56 | Full end-to-end testing via unittest.
57 |
58 | ```
59 | python -m unittest
60 | ...
61 | ----------------------------------------------------------------------
62 | Ran 3 tests in 8.371s
63 |
64 | OK
65 | ```
66 |
67 |
68 |
69 | ### License
70 |
71 | MIT (c) 2019 healeycodes.
72 |
73 | Inspiration taken from the unmaintained JavaScript project [websocket-benchmark](https://github.com/cargomedia/websocket-benchmark).
74 |
--------------------------------------------------------------------------------
/bench.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | import asyncio
4 | import websockets
5 | import argparse
6 |
7 | parser = argparse.ArgumentParser(description='Benchmark a WebSocket server')
8 | parser.add_argument('--h', dest='host',
9 | help='Host address of WebSocket server',
10 | default='localhost:3000')
11 | parser.add_argument('--n', dest='clients',
12 | help='Number of clients to create',
13 | default=1000, type=int)
14 | parser.add_argument('--c', dest='concurrency',
15 | help='Number of concurrent clients',
16 | default=64, type=int)
17 | parser.add_argument('--r', dest='roundtrips',
18 | help='Roundtrips per client',
19 | default=5, type=int)
20 | parser.add_argument('--s', dest='msg_size',
21 | help='Message size in characters',
22 | default=30, type=int)
23 | parser.add_argument('--l', dest='log_path',
24 | help='Path to create or append to a log file',
25 | default=os.path.join('.', 'log.txt'))
26 | args = parser.parse_args()
27 |
28 | # get benchmark parameters
29 | host = args.host
30 | clients = args.clients
31 | concurrency = args.concurrency
32 | roundtrips = args.roundtrips
33 | message = 'a' * args.msg_size
34 |
35 | # open log file
36 | log_file = open(args.log_path, 'a')
37 | log_memory = list()
38 |
39 | print(f'Benchmarking {host} with {clients} total clients. ' +
40 | f'{concurrency} clients concurrently. {roundtrips} roundtrips per client.\n')
41 |
42 |
43 | async def client(state):
44 | '''A WebSocket client, which sends a message and expects an echo
45 | `roundtrip` number of times. This client will spawn a copy of itself afterwards,
46 | so that the requested concurrency-level is continuous.
47 |
48 | Parameters
49 | ----------
50 | state : Dictionary
51 | A Dictionary-like object with the key `clients` --
52 | the number of clients spawned thus far.
53 |
54 | Returns
55 | -------
56 | string
57 | A statement when the max number of clients have been spawned.'''
58 | if state['clients'] >= clients:
59 | return 'Reached max clients.'
60 | state['clients'] += 1
61 | timings = list()
62 | async with websockets.connect(f'ws://{host}') as websocket:
63 | for i in range(roundtrips):
64 | await websocket.send(message)
65 | start = time.perf_counter()
66 | response = await websocket.recv()
67 | if response != message:
68 | raise 'Message received differs from message sent'
69 | timings.append(time.perf_counter() - start)
70 | await websocket.close()
71 | log_file.write(','.join([str(t) for t in timings]) + '\n')
72 | log_memory.append(timings)
73 | await asyncio.ensure_future(client(state))
74 |
75 |
76 | # create an amount of client coroutine functions to satisfy args.concurrency
77 | con_clients = [client] * concurrency
78 |
79 | # pass them all a 'link' to the same state Dictionary
80 | state = dict({'clients': 0})
81 |
82 | # run them concurrently
83 | main = asyncio.gather(*[i(state) for i in con_clients])
84 | loop = asyncio.get_event_loop()
85 | loop.run_until_complete(main)
86 |
87 |
88 | def stats(timings):
89 | '''Prints stats based off raw benchmark data.
90 |
91 | Parameters
92 | ----------
93 | timings : List(List(Float))
94 | A List of Lists containing message timings.
95 |
96 | Returns
97 | -------
98 | Dictionary
99 | Stats from the provided timings. Keys: `min`, `mean`, and `max`. '''
100 | timings_flat = [t for client_timings in timings for t in client_timings]
101 | min_timing = min(timings_flat) * 1000
102 | mean_timing = sum(timings_flat) / len(timings_flat) * 1000
103 | max_timing = max(timings_flat) * 1000
104 | print(f'Min: {min_timing:.2f}ms')
105 | print(f'Mean: {mean_timing:.2f}ms')
106 | print(f'Max: {max_timing:.2f}ms')
107 | return dict({'min': min_timing, 'mean': mean_timing, 'max': max_timing})
108 |
109 |
110 | # the benchmark is finished, build stats by passing an in-memory copy of the log
111 | stats(log_memory)
112 |
113 | print(f'\nRaw results sent to {log_file.name}')
114 |
--------------------------------------------------------------------------------