├── .gitignore ├── Dockerfile ├── README.md ├── consumer.py ├── data.json ├── docker-compose.yml ├── kafka.ipynb ├── main.py ├── producer.py ├── product.json └── requirements.txt /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 105 | __pypackages__/ 106 | 107 | # Celery stuff 108 | celerybeat-schedule 109 | celerybeat.pid 110 | 111 | # SageMath parsed files 112 | *.sage.py 113 | 114 | # Environments 115 | .env 116 | .venv 117 | env/ 118 | venv/ 119 | ENV/ 120 | env.bak/ 121 | venv.bak/ 122 | 123 | # Spyder project settings 124 | .spyderproject 125 | .spyproject 126 | 127 | # Rope project settings 128 | .ropeproject 129 | 130 | # mkdocs documentation 131 | /site 132 | 133 | # mypy 134 | .mypy_cache/ 135 | .dmypy.json 136 | dmypy.json 137 | 138 | # Pyre type checker 139 | .pyre/ 140 | 141 | # pytype static type analyzer 142 | .pytype/ 143 | 144 | # Cython debug symbols 145 | cython_debug/ 146 | 147 | # PyCharm 148 | # JetBrains specific template is maintainted in a separate JetBrains.gitignore that can 149 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 150 | # and can be added to the global gitignore or merged into this file. For a more nuclear 151 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 152 | #.idea/ 153 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM tiangolo/uvicorn-gunicorn:python3.9-slim 2 | 3 | # set work directory 4 | WORKDIR /app 5 | 6 | # install dependencies 7 | COPY requirements.txt . 8 | RUN pip install -r requirements.txt 9 | 10 | # copy project 11 | COPY . . -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kafka With Python 2 | 3 | ## Setup 4 | 5 | [kafka.ipynb](https://github.com/bhimrazy/kafka-in-docker-and-python-fastapi/blob/main/kafka.ipynb) can be used to get hands on with kafka-python. 6 | 7 | ```sh 8 | # Compose up 9 | $ docker-compose up -d 10 | 11 | # Getinto kafka bash 12 | $ docker exec -it kafka bash 13 | 14 | # Create a topic with name quickstart 15 | $ kafka-topics --bootstrap-server kafka:9092 --create --topic quickstart 16 | 17 | # list topics 18 | $ kafka-topics --list --bootstrap-server kafka:9092 19 | 20 | ``` 21 | 22 | ## Write messages to the topic 23 | 24 | ```sh 25 | # Getinto kafka bash 26 | $ docker exec -it kafka bash 27 | 28 | # produce some message 29 | $ kafka-console-producer --bootstrap-server kafka:9092 --topic quickstart 30 | > this is my first kafka message 31 | > hello world! 32 | > this is my second kafka message. 33 | 34 | # When you’ve finished, press Ctrl-D to return to your command prompt. 35 | 36 | ``` 37 | 38 | ## Read messages from the topic 39 | 40 | ```sh 41 | # Getinto kafka bash 42 | $ docker exec -it kafka bash 43 | 44 | # consume some message 45 | $ kafka-console-consumer --bootstrap-server kafka:9092 --topic quickstart --from-beginning 46 | 47 | # When you’ve finished, press Ctrl-C to return to your command prompt. 48 | 49 | ``` 50 | 51 | ## Stop the Kafka broker 52 | 53 | ```sh 54 | # Stop containers 55 | $ docker-compose down 56 | ``` 57 | 58 | ## Author 59 | 60 | - [Bhimraj Yadav (@bhimrazy)](https://github.com/bhimrazy) 61 | 62 | ## 🧾Resources 63 | 64 | - [Apache Kafka® Quick Start](https://developer.confluent.io/quickstart/kafka-docker/) 65 | -------------------------------------------------------------------------------- /consumer.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | from enum import Enum 4 | from time import sleep 5 | from kafka import KafkaConsumer 6 | 7 | # channel 8 | topic = 'app' 9 | 10 | # consumer 11 | consumer = KafkaConsumer(topic, bootstrap_servers=[ 12 | 'localhost:9092'], auto_offset_reset='latest', value_deserializer=lambda x: json.loads(x.decode('utf-8'))) 13 | 14 | 15 | def write_to_file(file, value): 16 | with open(f"{file}.json", "r+") as file: 17 | data = json.load(file) 18 | data['data'].append(value) 19 | file.seek(0) 20 | json.dump(data, file) 21 | 22 | 23 | for message in consumer: 24 | print("Consuming.....") 25 | print("%s:%d:%d: key=%s value=%s" % (message.topic, message.partition, 26 | message.offset, message.key, message.value)) 27 | if message.key == b'create_product': 28 | write_to_file(file="product", value=message.value) 29 | print("Product written to file successfuly.") 30 | 31 | if message.key == b'create_data': 32 | write_to_file(file="data", value=message.value) 33 | print("Data written to file successfuly.") 34 | -------------------------------------------------------------------------------- /data.json: -------------------------------------------------------------------------------- 1 | {"data": [{"item_name": "Dummy", "item_price": 40000.0, "item_description": "this is description"}, {"item_name": "New", "item_price": 40000.0, "item_description": "this is description"}]} -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | 3 | services: 4 | zookeeper: 5 | image: confluentinc/cp-zookeeper:6.2.0 6 | container_name: zookeeper 7 | networks: 8 | - broker-kafka 9 | ports: 10 | - "2181:2181" 11 | environment: 12 | ZOOKEEPER_CLIENT_PORT: 2181 13 | ZOOKEEPER_TICK_TIME: 2000 14 | 15 | kafka: 16 | image: confluentinc/cp-kafka:6.2.0 17 | container_name: kafka 18 | networks: 19 | - broker-kafka 20 | depends_on: 21 | - zookeeper 22 | ports: 23 | - "9092:9092" 24 | environment: 25 | KAFKA_BROKER_ID: 1 26 | KAFKA_ADVERTISED_HOST_NAME: kafka:9092 27 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 28 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT 29 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092 30 | KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT 31 | # KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 32 | # KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 33 | # KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1 34 | # KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1 35 | # KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 36 | # KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 37 | 38 | networks: 39 | broker-kafka: 40 | driver: bridge 41 | -------------------------------------------------------------------------------- /kafka.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "# import libraries\n", 10 | "\n", 11 | "from time import sleep\n", 12 | "import json\n", 13 | "from kafka import KafkaProducer\n", 14 | "from kafka import KafkaConsumer" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 2, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "#channel\n", 24 | "topic='app'\n", 25 | "\n", 26 | "# producer\n", 27 | "producer = KafkaProducer(bootstrap_servers=['localhost:9092'],value_serializer=lambda x: json.dumps(x).encode('utf-8'))\n", 28 | "\n", 29 | "#consumer\n", 30 | "consumer = KafkaConsumer(topic, bootstrap_servers=['localhost:9092'],auto_offset_reset='earliest',value_deserializer=lambda x:json.loads(x.decode('utf-8')))\n" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 3, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "for e in range(5):\n", 40 | " data = {'number' : e}\n", 41 | " producer.send(topic, value=data)\n", 42 | " sleep(1)" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 4, 48 | "metadata": {}, 49 | "outputs": [ 50 | { 51 | "name": "stdout", 52 | "output_type": "stream", 53 | "text": [ 54 | "Message :{'number': 0}\n", 55 | "Message :{'number': 1}\n", 56 | "Message :{'number': 2}\n", 57 | "Message :{'number': 3}\n", 58 | "Message :{'number': 4}\n" 59 | ] 60 | }, 61 | { 62 | "ename": "KeyboardInterrupt", 63 | "evalue": "", 64 | "output_type": "error", 65 | "traceback": [ 66 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", 67 | "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", 68 | "\u001b[1;32m/workspace/kafka-in-docker-and-python-fastapi/kafka.ipynb Cell 4'\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[39mfor\u001b[39;00m message \u001b[39min\u001b[39;00m consumer:\n\u001b[1;32m 2\u001b[0m message \u001b[39m=\u001b[39m message\u001b[39m.\u001b[39mvalue\n\u001b[1;32m 3\u001b[0m \u001b[39mprint\u001b[39m(\u001b[39m'\u001b[39m\u001b[39mMessage :\u001b[39m\u001b[39m{}\u001b[39;00m\u001b[39m'\u001b[39m\u001b[39m.\u001b[39mformat(message))\n", 69 | "File \u001b[0;32m/workspace/kafka-in-docker-and-python-fastapi/venv/lib/python3.8/site-packages/kafka/consumer/group.py:1193\u001b[0m, in \u001b[0;36mKafkaConsumer.__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1191\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mnext_v1()\n\u001b[1;32m 1192\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[0;32m-> 1193\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mnext_v2()\n", 70 | "File \u001b[0;32m/workspace/kafka-in-docker-and-python-fastapi/venv/lib/python3.8/site-packages/kafka/consumer/group.py:1201\u001b[0m, in \u001b[0;36mKafkaConsumer.next_v2\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1199\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_iterator \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_message_generator_v2()\n\u001b[1;32m 1200\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m-> 1201\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mnext\u001b[39;49m(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_iterator)\n\u001b[1;32m 1202\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mStopIteration\u001b[39;00m:\n\u001b[1;32m 1203\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_iterator \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m\n", 71 | "File \u001b[0;32m/workspace/kafka-in-docker-and-python-fastapi/venv/lib/python3.8/site-packages/kafka/consumer/group.py:1116\u001b[0m, in \u001b[0;36mKafkaConsumer._message_generator_v2\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1114\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m_message_generator_v2\u001b[39m(\u001b[39mself\u001b[39m):\n\u001b[1;32m 1115\u001b[0m timeout_ms \u001b[39m=\u001b[39m \u001b[39m1000\u001b[39m \u001b[39m*\u001b[39m (\u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_consumer_timeout \u001b[39m-\u001b[39m time\u001b[39m.\u001b[39mtime())\n\u001b[0;32m-> 1116\u001b[0m record_map \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mpoll(timeout_ms\u001b[39m=\u001b[39;49mtimeout_ms, update_offsets\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m)\n\u001b[1;32m 1117\u001b[0m \u001b[39mfor\u001b[39;00m tp, records \u001b[39min\u001b[39;00m six\u001b[39m.\u001b[39miteritems(record_map):\n\u001b[1;32m 1118\u001b[0m \u001b[39m# Generators are stateful, and it is possible that the tp / records\u001b[39;00m\n\u001b[1;32m 1119\u001b[0m \u001b[39m# here may become stale during iteration -- i.e., we seek to a\u001b[39;00m\n\u001b[1;32m 1120\u001b[0m \u001b[39m# different offset, pause consumption, or lose assignment.\u001b[39;00m\n\u001b[1;32m 1121\u001b[0m \u001b[39mfor\u001b[39;00m record \u001b[39min\u001b[39;00m records:\n\u001b[1;32m 1122\u001b[0m \u001b[39m# is_fetchable(tp) should handle assignment changes and offset\u001b[39;00m\n\u001b[1;32m 1123\u001b[0m \u001b[39m# resets; for all other changes (e.g., seeks) we'll rely on the\u001b[39;00m\n\u001b[1;32m 1124\u001b[0m \u001b[39m# outer function destroying the existing iterator/generator\u001b[39;00m\n\u001b[1;32m 1125\u001b[0m \u001b[39m# via self._iterator = None\u001b[39;00m\n", 72 | "File \u001b[0;32m/workspace/kafka-in-docker-and-python-fastapi/venv/lib/python3.8/site-packages/kafka/consumer/group.py:655\u001b[0m, in \u001b[0;36mKafkaConsumer.poll\u001b[0;34m(self, timeout_ms, max_records, update_offsets)\u001b[0m\n\u001b[1;32m 653\u001b[0m remaining \u001b[39m=\u001b[39m timeout_ms\n\u001b[1;32m 654\u001b[0m \u001b[39mwhile\u001b[39;00m \u001b[39mTrue\u001b[39;00m:\n\u001b[0;32m--> 655\u001b[0m records \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_poll_once(remaining, max_records, update_offsets\u001b[39m=\u001b[39;49mupdate_offsets)\n\u001b[1;32m 656\u001b[0m \u001b[39mif\u001b[39;00m records:\n\u001b[1;32m 657\u001b[0m \u001b[39mreturn\u001b[39;00m records\n", 73 | "File \u001b[0;32m/workspace/kafka-in-docker-and-python-fastapi/venv/lib/python3.8/site-packages/kafka/consumer/group.py:702\u001b[0m, in \u001b[0;36mKafkaConsumer._poll_once\u001b[0;34m(self, timeout_ms, max_records, update_offsets)\u001b[0m\n\u001b[1;32m 699\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_client\u001b[39m.\u001b[39mpoll(timeout_ms\u001b[39m=\u001b[39m\u001b[39m0\u001b[39m)\n\u001b[1;32m 701\u001b[0m timeout_ms \u001b[39m=\u001b[39m \u001b[39mmin\u001b[39m(timeout_ms, \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_coordinator\u001b[39m.\u001b[39mtime_to_next_poll() \u001b[39m*\u001b[39m \u001b[39m1000\u001b[39m)\n\u001b[0;32m--> 702\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_client\u001b[39m.\u001b[39;49mpoll(timeout_ms\u001b[39m=\u001b[39;49mtimeout_ms)\n\u001b[1;32m 703\u001b[0m \u001b[39m# after the long poll, we should check whether the group needs to rebalance\u001b[39;00m\n\u001b[1;32m 704\u001b[0m \u001b[39m# prior to returning data so that the group can stabilize faster\u001b[39;00m\n\u001b[1;32m 705\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_coordinator\u001b[39m.\u001b[39mneed_rejoin():\n", 74 | "File \u001b[0;32m/workspace/kafka-in-docker-and-python-fastapi/venv/lib/python3.8/site-packages/kafka/client_async.py:602\u001b[0m, in \u001b[0;36mKafkaClient.poll\u001b[0;34m(self, timeout_ms, future)\u001b[0m\n\u001b[1;32m 599\u001b[0m timeout \u001b[39m=\u001b[39m \u001b[39mmin\u001b[39m(timeout, \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mconfig[\u001b[39m'\u001b[39m\u001b[39mretry_backoff_ms\u001b[39m\u001b[39m'\u001b[39m])\n\u001b[1;32m 600\u001b[0m timeout \u001b[39m=\u001b[39m \u001b[39mmax\u001b[39m(\u001b[39m0\u001b[39m, timeout) \u001b[39m# avoid negative timeouts\u001b[39;00m\n\u001b[0;32m--> 602\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_poll(timeout \u001b[39m/\u001b[39;49m \u001b[39m1000\u001b[39;49m)\n\u001b[1;32m 604\u001b[0m \u001b[39m# called without the lock to avoid deadlock potential\u001b[39;00m\n\u001b[1;32m 605\u001b[0m \u001b[39m# if handlers need to acquire locks\u001b[39;00m\n\u001b[1;32m 606\u001b[0m responses\u001b[39m.\u001b[39mextend(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_fire_pending_completed_requests())\n", 75 | "File \u001b[0;32m/workspace/kafka-in-docker-and-python-fastapi/venv/lib/python3.8/site-packages/kafka/client_async.py:634\u001b[0m, in \u001b[0;36mKafkaClient._poll\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m 631\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_register_send_sockets()\n\u001b[1;32m 633\u001b[0m start_select \u001b[39m=\u001b[39m time\u001b[39m.\u001b[39mtime()\n\u001b[0;32m--> 634\u001b[0m ready \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_selector\u001b[39m.\u001b[39;49mselect(timeout)\n\u001b[1;32m 635\u001b[0m end_select \u001b[39m=\u001b[39m time\u001b[39m.\u001b[39mtime()\n\u001b[1;32m 636\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_sensors:\n", 76 | "File \u001b[0;32m~/.pyenv/versions/3.8.12/lib/python3.8/selectors.py:468\u001b[0m, in \u001b[0;36mEpollSelector.select\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m 466\u001b[0m ready \u001b[39m=\u001b[39m []\n\u001b[1;32m 467\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m--> 468\u001b[0m fd_event_list \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_selector\u001b[39m.\u001b[39;49mpoll(timeout, max_ev)\n\u001b[1;32m 469\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mInterruptedError\u001b[39;00m:\n\u001b[1;32m 470\u001b[0m \u001b[39mreturn\u001b[39;00m ready\n", 77 | "\u001b[0;31mKeyboardInterrupt\u001b[0m: " 78 | ] 79 | } 80 | ], 81 | "source": [ 82 | "for message in consumer:\n", 83 | " message = message.value\n", 84 | " print('Message :{}'.format(message))" 85 | ] 86 | } 87 | ], 88 | "metadata": { 89 | "interpreter": { 90 | "hash": "9548dade6ec062757d6e7da46beadf02f50a02bb5f1690300b8b6588a0bf327e" 91 | }, 92 | "kernelspec": { 93 | "display_name": "Python 3.8.12 64-bit ('venv': venv)", 94 | "language": "python", 95 | "name": "python3" 96 | }, 97 | "language_info": { 98 | "codemirror_mode": { 99 | "name": "ipython", 100 | "version": 3 101 | }, 102 | "file_extension": ".py", 103 | "mimetype": "text/x-python", 104 | "name": "python", 105 | "nbconvert_exporter": "python", 106 | "pygments_lexer": "ipython3", 107 | "version": "3.8.12" 108 | }, 109 | "orig_nbformat": 4 110 | }, 111 | "nbformat": 4, 112 | "nbformat_minor": 2 113 | } 114 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import json 2 | import datetime 3 | from typing import Optional 4 | from fastapi import BackgroundTasks, FastAPI 5 | from producer import publish 6 | 7 | app = FastAPI() 8 | 9 | 10 | @app.get("/") 11 | def home(): 12 | return {"message": "Hello! , Welcome to my App."} 13 | 14 | 15 | @app.post("/api/v1/product") 16 | async def api_product(name: str, price: float, background_tasks: BackgroundTasks, description: Optional[str] = None): 17 | start_time = datetime.datetime.now() 18 | data = {"item_name": name, "item_price": price, 19 | "item_description": description} 20 | background_tasks.add_task(publish, method="create_product", body=data) 21 | end_time = datetime.datetime.now() 22 | time_diff = (end_time - start_time) 23 | execution_time = f'{round(time_diff.total_seconds() * 1000)} ms' 24 | return {'message': 'Success', 'data': data, 'execution_time': execution_time} 25 | 26 | 27 | @app.get("/api/v1/product") 28 | async def api_product(): 29 | start_time = datetime.datetime.now() 30 | with open("product.json") as f: 31 | data = json.load(f) 32 | end_time = datetime.datetime.now() 33 | time_diff = (end_time - start_time) 34 | execution_time = f'{round(time_diff.total_seconds() * 1000)} ms' 35 | return {'message': 'Success', 'data': data, 'execution_time': execution_time} 36 | 37 | 38 | @app.post("/api/v1/data") 39 | async def api_data(name: str, price: float, background_tasks: BackgroundTasks, description: Optional[str] = None): 40 | start_time = datetime.datetime.now() 41 | data = {"item_name": name, "item_price": price, 42 | "item_description": description} 43 | background_tasks.add_task(publish, method="create_data", body=data) 44 | end_time = datetime.datetime.now() 45 | time_diff = (end_time - start_time) 46 | execution_time = f'{round(time_diff.total_seconds() * 1000)} ms' 47 | return {'message': 'Success', 'data': data, 'execution_time': execution_time} 48 | -------------------------------------------------------------------------------- /producer.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | from enum import Enum 4 | from time import sleep 5 | from kafka import KafkaProducer 6 | 7 | # channel 8 | topic = 'app' 9 | 10 | # producer 11 | producer = KafkaProducer(bootstrap_servers=[ 12 | 'localhost:9092'], value_serializer=lambda x: json.dumps(x).encode('utf-8')) 13 | 14 | 15 | def on_send_success(record_metadata): 16 | print(record_metadata.topic) 17 | print(record_metadata.partition) 18 | print(record_metadata.offset) 19 | 20 | 21 | def on_send_error(excp): 22 | # log.error('I am an errback', exc_info=excp) 23 | # handle exception 24 | pass 25 | 26 | 27 | def publish(method: str, body: dict): 28 | producer.send(topic, key=method.encode('UTF-8'), value=body).add_callback( 29 | on_send_success).add_errback(on_send_error) 30 | print(f'Topic :{topic} Key :{method} published.') 31 | 32 | # block until all async messages are sent 33 | producer.flush() 34 | -------------------------------------------------------------------------------- /product.json: -------------------------------------------------------------------------------- 1 | {"data": [{"item_name": "Iphone", "item_price": 100000.0, "item_description": "Apple Product"}, {"item_name": "Mackbook pro", "item_price": 200000.0, "item_description": "Apple Product"}, {"message": "HelloWorld"}, {"item_name": "Redmi Note 8", "item_price": 16000.0, "item_description": "MI"}, {"item_name": "Hp Pavillion", "item_price": 100000.0, "item_description": "Hp"}, {"item_name": "Speakerhh", "item_price": 15555.0, "item_description": "This is speaker."}, {"item_name": "A modern Approach to AI", "item_price": 1000.0, "item_description": "Book"}]} -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | anyio==3.5.0 2 | asgiref==3.5.0 3 | click==8.0.3 4 | colorama==0.4.4 5 | fastapi==0.73.0 6 | h11==0.13.0 7 | httptools==0.3.0 8 | idna==3.3 9 | kafka-python==2.0.2 10 | pydantic==1.9.0 11 | python-dotenv==0.19.2 12 | PyYAML==6.0 13 | sniffio==1.2.0 14 | starlette==0.17.1 15 | typing_extensions==4.0.1 16 | uvicorn==0.17.0.post1 17 | watchgod==0.7 18 | websockets==10.1 19 | --------------------------------------------------------------------------------