├── .github └── workflows │ └── python-publish.yml ├── .gitignore ├── README.md ├── example ├── bus.py ├── flask_app.py ├── listener.py ├── requirements.txt └── rest.py ├── flask_kafka ├── __init__.py ├── consumer.py ├── consumer_legacy.py ├── flask_kafka.py └── producer.py ├── requirements.txt └── setup.py /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | deploy: 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v3 25 | - name: Set up Python 26 | uses: actions/setup-python@v3 27 | with: 28 | python-version: '3.x' 29 | - name: Install dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install build 33 | - name: Build package 34 | run: python -m build 35 | - name: Publish package 36 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 37 | with: 38 | user: __token__ 39 | password: ${{ secrets.PYPI_API_TOKEN }} 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # VS Code 2 | .vscode 3 | 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | .idea 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # pyenv 81 | .python-version 82 | 83 | # celery beat schedule file 84 | celerybeat-schedule 85 | 86 | # SageMath parsed files 87 | *.sage.py 88 | 89 | # Environments 90 | .env 91 | .venv 92 | env/ 93 | venv/ 94 | ENV/ 95 | env.bak/ 96 | venv.bak/ 97 | 98 | # Spyder project settings 99 | .spyderproject 100 | .spyproject 101 | 102 | # Rope project settings 103 | .ropeproject 104 | 105 | # mkdocs documentation 106 | /site 107 | 108 | # mypy 109 | .mypy_cache/ 110 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Flask Kafka 2 | 3 | > :warning: **Breaking Changes**: Dropping kafka-python library in favour of **confluent-kafka** due it's support & documentation 4 | 5 | This is an easy to use utility to help Flask developers to implement microservices that interact with Kafka. This library has been inspired by two other similar libraries :- 6 | 7 | - [`Flask RabbitMQ`](https://github.com/pushyzheng/flask-rabbitmq) 8 | - [`Kakfaesque`](https://github.com/sankalpjonn/kafkaesque) 9 | 10 | After looking around the web and on Github, I was not able to find a lot of content on how to consume from a Kafka topic using the Kafka framework. From what I found, I was able to come up with this library by borrowing from the above libraries. They both had a little of what I wanted so I combined them to come up with this one. 11 | 12 | I hope you find this useful. 13 | 14 | ## Features 15 | 16 | - Doesn't block process 17 | - Configure by `config.py` 18 | - Support comsuming from topic by decorator 19 | 20 | ## Installation 21 | 22 | This project has been commited to Pypi, can be installed by pip: 23 | ```shell 24 | $ pip install flask-kafka 25 | ``` 26 | 27 | ## Simple example 28 | 29 | ```python 30 | from flask import Flask, request 31 | 32 | from flask_kafka import FlaskKafka 33 | app = Flask(__name__) 34 | app.config["KAFKA_CONFIG"] = {'bootstrap.servers': 'localhost:9092', 35 | 'group.id': 'foo', 36 | 'enable.auto.commit': 'false', 37 | 'auto.offset.reset': 'earliest'} 38 | 39 | bus = FlaskKafka() 40 | bus.init_app(app) 41 | 42 | # curl http://localhost:5004/publish/test-topic?key=foo&value=bar 43 | 44 | @app.route('/publish/', methods=["get"]) 45 | def publish(topic): 46 | qstr = request.args.to_dict() 47 | key = qstr['key'] 48 | value = qstr['value'] 49 | publisher = bus.get_producer() 50 | publisher.produce(topic, key=key, value=value) 51 | publisher.poll(1) 52 | return "Published to {} => {} : {}".format(topic, key, value) 53 | 54 | @bus.handle('test-topic') 55 | def test_topic_handler(consumer,msg): 56 | print("Consumed event from topic {topic}: key = {key:12} value = {value:12}".format( 57 | topic=msg.topic(), key=msg.key().decode('utf-8'), value=msg.value().decode('utf-8'))) 58 | 59 | 60 | # or 61 | # bus.add_topic_handler("test-topic", lambda consumer, msg: print(msg.value())) 62 | 63 | if __name__ == '__main__': 64 | bus.run() 65 | app.run(debug=True, port=5004, use_reloader=False) 66 | 67 | ``` 68 | 69 | ## Special Thanks 70 | 71 | - [cookieGeGe](https://github.com/cookieGeGe) - Contributed to new structure 72 | 73 | 74 | ## License 75 | 76 | ``` 77 | MIT License 78 | 79 | Copyright (c) 2019 Nimrod Kevin Maina 80 | 81 | Permission is hereby granted, free of charge, to any person obtaining a copy 82 | of this software and associated documentation files (the "Software"), to deal 83 | in the Software without restriction, including without limitation the rights 84 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 85 | copies of the Software, and to permit persons to whom the Software is 86 | furnished to do so, subject to the following conditions: 87 | 88 | The above copyright notice and this permission notice shall be included in all 89 | copies or substantial portions of the Software. 90 | 91 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 92 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 93 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 94 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 95 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 96 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 97 | SOFTWARE. 98 | ``` -------------------------------------------------------------------------------- /example/bus.py: -------------------------------------------------------------------------------- 1 | from flask_kafka import FlaskKafka 2 | 3 | bus = FlaskKafka() 4 | -------------------------------------------------------------------------------- /example/flask_app.py: -------------------------------------------------------------------------------- 1 | from flask import Flask 2 | from example.bus import bus 3 | from example.listener import register_listeners 4 | from rest import rest 5 | # from flask_kafka import FlaskKafka3 6 | 7 | app = Flask(__name__) 8 | app.config["KAFKA_CONFIG"] = {'bootstrap.servers': 'localhost:9092', 9 | 'group.id': 'foo', 10 | 'enable.auto.commit': 'false', 11 | 'auto.offset.reset': 'earliest'} 12 | 13 | 14 | # bus = FlaskKafka3() # Can be instantiated here or an external file 15 | bus.init_app(app) # 1. MUST be called before any handlers are registered (sets up 'default' consumer & producer) 16 | 17 | 18 | # @bus.handle('mpesa-reaper') # 2. MUST be called b4 bus.run() (registers handlers to consumers) 19 | # def test_topic_handler(consumer, msg): 20 | # print("Consumed event from topic {topic}: key = {key:12} value = {value:12}".format( 21 | # topic=msg.topic(), key=msg.key().decode('utf-8'), value=msg.value().decode('utf-8'))) 22 | 23 | 24 | if __name__ == '__main__': 25 | # Start consuming from the Kafka server 26 | print('running app...') 27 | 28 | # Register handlers from an external file 29 | register_listeners() # 2. MUST be called b4 bus.run() (registers handlers to consumers) 30 | 31 | bus.run() # 3. MUST be called LAST after consumers & handlers have been set up 32 | 33 | app.register_blueprint(rest) 34 | 35 | # Start Flask server 36 | app.run(port=5004, debug=True, use_reloader=False) 37 | -------------------------------------------------------------------------------- /example/listener.py: -------------------------------------------------------------------------------- 1 | # from flask import current_app 2 | from example.bus import bus 3 | 4 | # bus = current_app.extensions.get('flask_kafka') 5 | 6 | 7 | def register_listeners(): 8 | # Handle message received from a Kafka topic 9 | @bus.handle('mpesa-reaper') 10 | def test_topic_handler(consumer, msg): 11 | print("Consumed event from topic {topic}: key = {key:12} value = {value:12}".format( 12 | topic=msg.topic(), key=msg.key().decode('utf-8'), value=msg.value().decode('utf-8'))) 13 | -------------------------------------------------------------------------------- /example/requirements.txt: -------------------------------------------------------------------------------- 1 | flask 2 | flask_kafka -------------------------------------------------------------------------------- /example/rest.py: -------------------------------------------------------------------------------- 1 | from flask import Blueprint, request 2 | from example.bus import bus 3 | 4 | rest = Blueprint('rest', __name__) 5 | 6 | 7 | def acked(err, msg): 8 | if err is not None: 9 | print("Failed to deliver message: %s: %s" % (str(msg), str(err))) 10 | else: 11 | print("Message produced: %s" % (str(msg.value()))) 12 | 13 | 14 | @rest.route('/') 15 | def index(): 16 | return "This is a simple flask kafka example api" 17 | 18 | 19 | @rest.route('/publish/') 20 | def publish(topic): 21 | qstr = request.args.to_dict() 22 | key = qstr['key'] 23 | value = qstr['value'] 24 | publisher = bus.get_producer() 25 | publisher.produce(topic, key=key, value=value, callback=acked) 26 | publisher.poll(1) 27 | return "Published to {} => {} : {}".format(topic, key, value) -------------------------------------------------------------------------------- /flask_kafka/__init__.py: -------------------------------------------------------------------------------- 1 | name = "flask_kafka" 2 | # from .consumer import FlaskKafka 3 | # from .consumer2 import FlaskKafka2 4 | from .flask_kafka import FlaskKafka -------------------------------------------------------------------------------- /flask_kafka/consumer.py: -------------------------------------------------------------------------------- 1 | from confluent_kafka import Consumer, KafkaError 2 | import traceback 3 | from typing import Callable 4 | import atexit 5 | import logging 6 | import sys 7 | 8 | 9 | class KafkaConsumer(object): 10 | 11 | def __init__(self, logger, **config): 12 | self.handlers = {} 13 | self.is_running = True 14 | self.logger = logger 15 | self._consumer = Consumer(config) 16 | self.config = config 17 | 18 | def __getattr__(self, item: str): 19 | return getattr(self._consumer, item) 20 | 21 | def _add_handler(self, topic: str, handler: Callable): 22 | if self.handlers.get(topic) is None: 23 | self.handlers[topic] = [] 24 | self.handlers[topic].append(handler) 25 | 26 | def handle(self, topic): 27 | def decorator(f): 28 | self._add_handler(topic, f) 29 | return f 30 | 31 | return decorator 32 | 33 | def _run_handlers(self, msg): 34 | handlers = self.handlers.get(msg.topic(), []) 35 | for handler in handlers: 36 | try: 37 | if not callable(handler): 38 | continue 39 | handler(self, msg) 40 | except Exception as e: 41 | self.logger.error(traceback.format_exc()) 42 | 43 | def on_stop(self): 44 | self.is_running = False 45 | self.logger.info("closing consumer") 46 | self._consumer.close() 47 | self.logger.info("consumer closed") 48 | 49 | def subscribe(self): 50 | try: 51 | if not any(self.handlers.keys()): 52 | self.logger.warning("No handlers have been registered") 53 | return 54 | self._consumer.subscribe(topics=list(self.handlers.keys())) 55 | self.logger.info("Subscribing to topic(s) {}".format(list(self.handlers.keys()))) 56 | atexit.register(self.on_stop) 57 | 58 | while self.is_running: 59 | msg = self._consumer.poll(timeout=20.0) 60 | if msg is None: 61 | print("No messages received") 62 | continue 63 | 64 | if msg.error(): 65 | if msg.error().code() == KafkaError._PARTITION_EOF: 66 | # End of partition event 67 | self.logger.error('%% %s [%d] reached end at offset %d\n' % 68 | (msg.topic(), msg.partition(), msg.offset())) 69 | self._run_handlers(msg) 70 | finally: 71 | self.logger.info("closing consumer") 72 | self._consumer.close() 73 | -------------------------------------------------------------------------------- /flask_kafka/consumer_legacy.py: -------------------------------------------------------------------------------- 1 | from kafka import KafkaConsumer 2 | import threading 3 | import logging 4 | import sys 5 | 6 | 7 | class FlaskKafka: 8 | def __init__(self, interrupt_event, **kw): 9 | self.consumer = KafkaConsumer(**kw) 10 | self.handlers={} 11 | self.interrupt_event = interrupt_event 12 | logger = logging.getLogger('flask-kafka-consumer') 13 | ch = logging.StreamHandler(sys.stdout) 14 | ch.setLevel(logging.INFO) 15 | formatter = logging.Formatter('[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s') 16 | ch.setFormatter(formatter) 17 | logger.addHandler(ch) 18 | logger.setLevel(logging.INFO) 19 | self.logger = logger 20 | 21 | def _add_handler(self, topic, handler): 22 | if self.handlers.get(topic) is None: 23 | self.handlers[topic] = [] 24 | self.handlers[topic].append(handler) 25 | 26 | def handle(self, topic): 27 | def decorator(f): 28 | self._add_handler(topic, f) 29 | return f 30 | return decorator 31 | 32 | def _run_handlers(self, msg): 33 | try: 34 | handlers = self.handlers[msg.topic] 35 | for handler in handlers: 36 | handler(msg) 37 | self.consumer.commit() 38 | except Exception as e: 39 | self.logger.critical(str(e), exc_info=1) 40 | self.consumer.close() 41 | sys.exit("Exited due to exception") 42 | 43 | def signal_term_handler(self, signal, frame): 44 | self.logger.info("closing consumer") 45 | self.consumer.close() 46 | sys.exit(0) 47 | 48 | def _start(self): 49 | self.consumer.subscribe(topics=tuple(self.handlers.keys())) 50 | self.logger.info("starting consumer...registered signterm") 51 | 52 | for msg in self.consumer: 53 | self.logger.debug("TOPIC: {}, PAYLOAD: {}".format(msg.topic, msg.value)) 54 | self._run_handlers(msg) 55 | # stop the consumer 56 | if self.interrupt_event.is_set(): 57 | self.interrupted_process() 58 | self.interrupt_event.clear() 59 | 60 | def interrupted_process(self, *args): 61 | self.logger.info("closing consumer") 62 | self.consumer.close() 63 | sys.exit(0) 64 | 65 | def _run(self): 66 | self.logger.info(" * The flask Kafka application is consuming") 67 | t = threading.Thread(target=self._start) 68 | t.start() 69 | 70 | # run the consumer application 71 | def run(self): 72 | self._run() 73 | -------------------------------------------------------------------------------- /flask_kafka/flask_kafka.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Callable 2 | import atexit 3 | import os 4 | import platform 5 | import threading 6 | import logging 7 | import sys 8 | 9 | from flask_kafka.consumer import KafkaConsumer 10 | from flask_kafka.producer import KafkaProducer 11 | 12 | 13 | class FlaskKafka(object): 14 | 15 | def __init__(self, app=None): 16 | self.app = None 17 | self.logger = self.get_logger() 18 | self._consumers: Dict[str, KafkaConsumer] = {} 19 | self._producers: Dict[str, KafkaProducer] = {} 20 | if app is not None: 21 | self.init_app(app) 22 | 23 | def init_app(self, app): 24 | self.app = app 25 | if "flask_kafka" in self.app.extensions: 26 | raise RuntimeError( 27 | "A 'Kafka' instance has already been registered on this Flask app." 28 | " Import and use that instance instead." 29 | ) 30 | self.app.extensions["flask_kafka"] = self 31 | kafka_config = self.app.config.setdefault("KAFKA_CONFIG", {}) 32 | self.create_consumer("default", **kafka_config) 33 | self.create_producer("default", **kafka_config) 34 | kafka_binds = self.app.config.setdefault("KAFKA_BINDS", {}) 35 | for name, config in kafka_binds.items(): 36 | self.create_consumer(name, **config) 37 | self.create_producer(name, **config) 38 | app.app_context().push() 39 | 40 | def get_consumer(self, consumer_name: str = "default") -> KafkaConsumer: 41 | return self._consumers.get(consumer_name, None) 42 | 43 | def get_producer(self, producer_name: str = "default") -> KafkaProducer: 44 | return self._producers.get(producer_name, None) 45 | 46 | def add_consumer(self, consumer_name: str, consumer: KafkaConsumer): 47 | if consumer_name is None or consumer_name == "": 48 | raise Exception("Consumer name cannot be empty") 49 | if consumer_name in self._consumers.keys(): 50 | raise Exception("Duplicate consumer name") 51 | if not isinstance(consumer, KafkaConsumer): 52 | raise Exception(f"must be {KafkaConsumer.__class__}") 53 | self._consumers[consumer_name] = consumer 54 | 55 | def create_consumer(self, consumer_name: str, **config): 56 | self.add_consumer(consumer_name, KafkaConsumer(self.logger,**config)) 57 | 58 | def create_producer(self, producer_name: str, **config): 59 | self.add_producer(producer_name, KafkaProducer(**config)) 60 | 61 | def add_producer(self, producer_name: str, producer: KafkaProducer): 62 | if producer_name is None or producer_name == "": 63 | raise Exception("Producer name cannot be empty") 64 | if producer_name in self._producers.keys(): 65 | raise Exception("Duplicate Producer name") 66 | if not isinstance(producer, KafkaProducer): 67 | raise Exception(f"must be {KafkaProducer.__class__}") 68 | self._producers[producer_name] = producer 69 | 70 | def handle(self, topic: str, consumer: str = "default"): 71 | consumer_obj = self.get_consumer(consumer) 72 | if consumer_obj is None: 73 | raise Exception(f"name {consumer} Consumer not registered") 74 | return consumer_obj.handle(topic) 75 | 76 | def add_topic_handler(self, topic: str, callback: Callable, consumer: str = "default"): 77 | self.handle(topic, consumer)(callback) 78 | 79 | def _run(self): 80 | self.logger.info("Consumers found: {}".format(len(self._consumers.items()))) 81 | for name, consumer in self._consumers.items(): 82 | self.logger.info("Starting Consumer: {}".format(name)) 83 | t = threading.Thread(target=consumer.subscribe, name=name) 84 | t.setDaemon(True) 85 | t.start() 86 | 87 | def _start(self): 88 | self._run() 89 | 90 | def run(self, lock: bool = True): 91 | if not lock: 92 | self._start() 93 | return 94 | else: 95 | self._start_with_lock() 96 | 97 | def _start_with_lock(self): 98 | """ 99 | Start the kafka consumer with a lock 100 | :return: 101 | """ 102 | default_lock_file_path = os.path.join(os.getcwd(), "flask_kafka.lock") 103 | lock_file_path = self.app.config.setdefault("KAFKA_LOCK_FILE", default_lock_file_path) 104 | dir_path = os.path.dirname(lock_file_path) 105 | self.logger.info(dir_path) 106 | if not os.path.exists(dir_path): 107 | os.makedirs(dir_path) 108 | if platform.system() != 'Windows': 109 | fcntl = __import__("fcntl") 110 | f = open(lock_file_path, 'wb') 111 | try: 112 | fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) 113 | self._start() 114 | except: 115 | pass 116 | 117 | def unlock(): 118 | fcntl.flock(f, fcntl.LOCK_UN) 119 | f.close() 120 | 121 | atexit.register(unlock) 122 | else: 123 | msvcrt = __import__('msvcrt') 124 | f = open(lock_file_path, 'wb') 125 | try: 126 | msvcrt.locking(f.fileno(), msvcrt.LK_NBLCK, 1) 127 | self._start() 128 | except: 129 | pass 130 | 131 | def _unlock_file(): 132 | try: 133 | f.seek(0) 134 | msvcrt.locking(f.fileno(), msvcrt.LK_UNLCK, 1) 135 | except: 136 | pass 137 | 138 | atexit.register(_unlock_file) 139 | 140 | @staticmethod 141 | def get_logger(): 142 | logger = logging.getLogger('flask_kafka') 143 | ch = logging.StreamHandler(sys.stdout) 144 | ch.setLevel(logging.INFO) 145 | formatter = logging.Formatter('[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s') 146 | ch.setFormatter(formatter) 147 | logger.addHandler(ch) 148 | logger.setLevel(logging.INFO) 149 | return logger 150 | -------------------------------------------------------------------------------- /flask_kafka/producer.py: -------------------------------------------------------------------------------- 1 | from confluent_kafka import Producer 2 | 3 | 4 | class KafkaProducer(object): 5 | 6 | def __init__(self, **conf): 7 | self._producer = Producer(conf) 8 | 9 | def __getattr__(self, item): 10 | return getattr(self._producer, item) 11 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NimzyMaina/flask_kafka/6f90d37b780ecc360dd6d0ad91ed023dc2b2f56a/requirements.txt -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # please install python if it is not present in the system 2 | from setuptools import setup 3 | 4 | with open("README.md", "r") as fh: 5 | long_description = fh.read() 6 | 7 | setup( 8 | name='flask-kafka', 9 | version='0.1.0', 10 | packages=['flask_kafka'], 11 | install_requires=['kafka-python'], 12 | license='MIT', 13 | description='An easy to use kafka consumer that uses the confluent kafka library, it runs concurrently with your ' 14 | 'flask server', 15 | author='Nimrod Kevin Maina', 16 | author_email='nimzy.maina@gmail.com', 17 | keywords=['kafka', 'consumer', 'kafkaesque', 'flask', 'simple', 'consumer', 'flask style', 'decorator'], 18 | long_description=long_description, 19 | long_description_content_type="text/markdown", 20 | url="https://github.com/nimzymaina/flask_kafka", 21 | include_package_data=True, 22 | ) 23 | --------------------------------------------------------------------------------