├── inference_service ├── protos │ ├── __init__.py │ ├── __pycache__ │ │ ├── __init__.cpython-312.pyc │ │ ├── inference_pb2.cpython-312.pyc │ │ └── inference_pb2_grpc.cpython-312.pyc │ ├── inference.proto │ ├── inference_pb2.py │ └── inference_pb2_grpc.py ├── requirements.txt ├── __pycache__ │ ├── server.cpython-312.pyc │ └── inference_pb2.cpython-312.pyc └── server.py ├── streaming_simulator ├── requirements.txt ├── test_video.mp4 ├── __pycache__ │ ├── consumer.cpython-312.pyc │ ├── inference_pb2.cpython-312.pyc │ └── inference_pb2_grpc.cpython-312.pyc ├── producer.py └── consumer.py ├── producer.log ├── docker-compose.yml ├── server.log ├── Dockerfile ├── streamlit_app.py ├── README.md ├── consumer.log └── imagenet_labels.json /inference_service/protos/__init__.py: -------------------------------------------------------------------------------- 1 | # This file makes the protos directory a Python package -------------------------------------------------------------------------------- /streaming_simulator/requirements.txt: -------------------------------------------------------------------------------- 1 | kafka-python-ng 2 | opencv-python-headless 3 | grpcio 4 | grpcio-tools 5 | numpy -------------------------------------------------------------------------------- /streaming_simulator/test_video.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/maxprogrammer007/ml-streaming-pipeline/main/streaming_simulator/test_video.mp4 -------------------------------------------------------------------------------- /inference_service/requirements.txt: -------------------------------------------------------------------------------- 1 | grpcio 2 | grpcio-tools 3 | torch 4 | torchvision 5 | opencv-python-headless 6 | numpy 7 | Pillow 8 | requests -------------------------------------------------------------------------------- /inference_service/__pycache__/server.cpython-312.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/maxprogrammer007/ml-streaming-pipeline/main/inference_service/__pycache__/server.cpython-312.pyc -------------------------------------------------------------------------------- /producer.log: -------------------------------------------------------------------------------- 1 | %6|1760084367.993|GETSUBSCRIPTIONS|Maxprogrammer007#producer-1| [thrd:main]: Telemetry client instance id changed from AAAAAAAAAAAAAAAAAAAAAA to jrLwWFSDS9GjMB3GdWTbFA 2 | -------------------------------------------------------------------------------- /streaming_simulator/__pycache__/consumer.cpython-312.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/maxprogrammer007/ml-streaming-pipeline/main/streaming_simulator/__pycache__/consumer.cpython-312.pyc -------------------------------------------------------------------------------- /inference_service/__pycache__/inference_pb2.cpython-312.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/maxprogrammer007/ml-streaming-pipeline/main/inference_service/__pycache__/inference_pb2.cpython-312.pyc -------------------------------------------------------------------------------- /inference_service/protos/__pycache__/__init__.cpython-312.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/maxprogrammer007/ml-streaming-pipeline/main/inference_service/protos/__pycache__/__init__.cpython-312.pyc -------------------------------------------------------------------------------- /streaming_simulator/__pycache__/inference_pb2.cpython-312.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/maxprogrammer007/ml-streaming-pipeline/main/streaming_simulator/__pycache__/inference_pb2.cpython-312.pyc -------------------------------------------------------------------------------- /inference_service/protos/__pycache__/inference_pb2.cpython-312.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/maxprogrammer007/ml-streaming-pipeline/main/inference_service/protos/__pycache__/inference_pb2.cpython-312.pyc -------------------------------------------------------------------------------- /streaming_simulator/__pycache__/inference_pb2_grpc.cpython-312.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/maxprogrammer007/ml-streaming-pipeline/main/streaming_simulator/__pycache__/inference_pb2_grpc.cpython-312.pyc -------------------------------------------------------------------------------- /inference_service/protos/__pycache__/inference_pb2_grpc.cpython-312.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/maxprogrammer007/ml-streaming-pipeline/main/inference_service/protos/__pycache__/inference_pb2_grpc.cpython-312.pyc -------------------------------------------------------------------------------- /inference_service/protos/inference.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package inference; 4 | 5 | // The inference service definition. 6 | service InferenceService { 7 | // A remote procedure call (RPC) named "Predict". 8 | // It accepts an ImageRequest message and returns a PredictionResponse message. 9 | rpc Predict(ImageRequest) returns (PredictionResponse); 10 | } 11 | 12 | // The request message format. 13 | message ImageRequest { 14 | // It contains a single field: the image data, represented as raw bytes. 15 | // The "1" is the field number, which is a unique ID. 16 | bytes image_data = 1; 17 | } 18 | 19 | // The response message format. 20 | message PredictionResponse { 21 | // It contains a single field: the predicted class label as a string. 22 | string prediction = 1; 23 | } -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | # Specifies the version of the Docker Compose file format. 2 | version: '3.8' 3 | 4 | # Defines the services (containers) that make up our application. 5 | services: 6 | # 1. The Redpanda message broker service. 7 | redpanda: 8 | # Use a pre-built image for Redpanda. 9 | image: docker.redpanda.com/redpandadata/redpanda:v22.2.2 10 | command: 11 | - redpanda 12 | - start 13 | - --smp 1 14 | - --overprovisioned 15 | - --node-id 0 16 | - --kafka-addr PLAINTEXT://0.0.0.0:29092,OUTSIDE://0.0.0.0:9092 17 | - --advertise-kafka-addr PLAINTEXT://redpanda:29092,OUTSIDE://localhost:9092 18 | ports: 19 | # Maps the container's Kafka port 9092 to our local machine's port 9092. 20 | - "9092:9092" 21 | # Maps the admin port. 22 | - "9644:9644" 23 | 24 | # 2. Our custom gRPC inference service. 25 | inference_service: 26 | build: 27 | # Tells Docker Compose to build the image from the Dockerfile in the current directory. 28 | context: . 29 | dockerfile: Dockerfile 30 | ports: 31 | # Maps the container's gRPC port 50051 to our local machine's port 50051. 32 | - "50051:50051" -------------------------------------------------------------------------------- /server.log: -------------------------------------------------------------------------------- 1 | WARNING: All log messages before absl::InitializeLog() is called are written to STDERR 2 | E0000 00:00:1760084337.939876 7556 add_port.cc:83] Failed to add port to server: No address added out of total 1 resolved for '[::]:50051' 3 | Traceback (most recent call last): 4 | File "", line 198, in _run_module_as_main 5 | File "", line 88, in _run_code 6 | File "C:\Users\abhin\OneDrive\Documents\GitHub\ml-streaming-pipeline\inference_service\server.py", line 102, in 7 | serve() 8 | File "C:\Users\abhin\OneDrive\Documents\GitHub\ml-streaming-pipeline\inference_service\server.py", line 96, in serve 9 | server.add_insecure_port(f"[::]:{port}") 10 | File "C:\Users\abhin\AppData\Local\Programs\Python\Python312\Lib\site-packages\grpc\_server.py", line 1481, in add_insecure_port 11 | return _common.validate_port_binding_result( 12 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 13 | File "C:\Users\abhin\AppData\Local\Programs\Python\Python312\Lib\site-packages\grpc\_common.py", line 181, in validate_port_binding_result 14 | raise RuntimeError(_ERROR_MESSAGE_PORT_BINDING_FAILED % address) 15 | RuntimeError: Failed to bind to address [::]:50051; set GRPC_VERBOSITY=debug environment variable to see detailed error message. 16 | Loading ResNet18 model... 17 | Model and labels loaded successfully. 18 | -------------------------------------------------------------------------------- /inference_service/protos/inference_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # NO CHECKED-IN PROTOBUF GENCODE 4 | # source: inference.proto 5 | # Protobuf Python Version: 6.31.1 6 | """Generated protocol buffer code.""" 7 | from google.protobuf import descriptor as _descriptor 8 | from google.protobuf import descriptor_pool as _descriptor_pool 9 | from google.protobuf import runtime_version as _runtime_version 10 | from google.protobuf import symbol_database as _symbol_database 11 | from google.protobuf.internal import builder as _builder 12 | _runtime_version.ValidateProtobufRuntimeVersion( 13 | _runtime_version.Domain.PUBLIC, 14 | 6, 15 | 31, 16 | 1, 17 | '', 18 | 'inference.proto' 19 | ) 20 | # @@protoc_insertion_point(imports) 21 | 22 | _sym_db = _symbol_database.Default() 23 | 24 | 25 | 26 | 27 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0finference.proto\x12\tinference\"\"\n\x0cImageRequest\x12\x12\n\nimage_data\x18\x01 \x01(\x0c\"(\n\x12PredictionResponse\x12\x12\n\nprediction\x18\x01 \x01(\t2U\n\x10InferenceService\x12\x41\n\x07Predict\x12\x17.inference.ImageRequest\x1a\x1d.inference.PredictionResponseb\x06proto3') 28 | 29 | _globals = globals() 30 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 31 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'inference_pb2', _globals) 32 | if not _descriptor._USE_C_DESCRIPTORS: 33 | DESCRIPTOR._loaded_options = None 34 | _globals['_IMAGEREQUEST']._serialized_start=30 35 | _globals['_IMAGEREQUEST']._serialized_end=64 36 | _globals['_PREDICTIONRESPONSE']._serialized_start=66 37 | _globals['_PREDICTIONRESPONSE']._serialized_end=106 38 | _globals['_INFERENCESERVICE']._serialized_start=108 39 | _globals['_INFERENCESERVICE']._serialized_end=193 40 | # @@protoc_insertion_point(module_scope) 41 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # --- Stage 1: The "Builder" --- 2 | # This stage prepares all our dependencies and generated code. 3 | FROM python:3.9-slim as builder 4 | 5 | WORKDIR /app 6 | 7 | # First, install only the tool needed to generate gRPC code. 8 | RUN pip install grpcio-tools 9 | 10 | # Copy only the files needed to generate the gRPC stubs. 11 | COPY inference_service/requirements.txt . 12 | COPY inference_service/protos/inference.proto ./protos/inference.proto 13 | 14 | # Create empty __init__.py files to ensure Python treats them as packages. 15 | RUN mkdir -p protos 16 | RUN touch protos/__init__.py 17 | 18 | # Generate the gRPC Python code inside this stage. 19 | RUN python -m grpc_tools.protoc \ 20 | -I./protos \ 21 | --python_out=. \ 22 | --grpc_python_out=. \ 23 | ./protos/inference.proto 24 | 25 | # Now, install all the application dependencies. 26 | RUN pip install --no-cache-dir -r requirements.txt 27 | 28 | 29 | # --- Stage 2: The Final Production Image --- 30 | # This stage builds the final, lightweight image that will actually run. 31 | FROM python:3.9-slim 32 | 33 | WORKDIR /app 34 | 35 | # Install grpc_health_probe, a small tool to check if our gRPC server is healthy. 36 | RUN GRPC_HEALTH_PROBE_VERSION=v0.4.15 && \ 37 | wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \ 38 | chmod +x /bin/grpc_health_probe 39 | 40 | # Copy the pre-installed Python libraries from the "builder" stage. 41 | COPY --from=builder /usr/local/lib/python3.9/site-packages /usr/local/lib/python3.9/site-packages 42 | # Copy the pre-generated gRPC code from the "builder" stage. 43 | COPY --from=builder /app/protos ./protos 44 | 45 | # Finally, copy our application's source code. 46 | COPY inference_service/server.py . 47 | 48 | # Tell Docker that the container listens on port 50051. 49 | EXPOSE 50051 50 | 51 | # Define a healthcheck to ensure the service is running properly. 52 | HEALTHCHECK --interval=15s --timeout=5s --start-period=10s --retries=3 \ 53 | CMD ["/bin/grpc_health_probe", "-addr=:50051"] 54 | 55 | # The command that will be executed when the container starts. 56 | CMD ["python", "server.py"] -------------------------------------------------------------------------------- /streaming_simulator/producer.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import time 3 | from confluent_kafka import Producer 4 | import socket 5 | 6 | def delivery_report(err, msg): 7 | """ Called once for each message produced to indicate delivery result. """ 8 | if err is not None: 9 | print(f'Message delivery failed: {err}') 10 | else: 11 | # This can be very verbose, so it's commented out. 12 | # print(f'Message delivered to {msg.topic()} [{msg.partition()}]') 13 | pass 14 | 15 | # --- Confluent Kafka Configuration --- 16 | kafka_config = { 17 | 'bootstrap.servers': 'pkc-56d1g.eastus.azure.confluent.cloud:9092', 18 | 'sasl.mechanism': 'PLAIN', 19 | 'security.protocol': 'SASL_SSL', 20 | 'sasl.username': 'XECLJIIHJBRRUSBG', 21 | 'sasl.password': 'cflt69Y0dOzzy5uQOtHqNJuOfe05cFuC0I9uAv0fSSMurD8sJPFXYrOEGUAysGVA', 22 | 'client.id': socket.gethostname() 23 | } 24 | # --- END CONFIG --- 25 | 26 | KAFKA_TOPIC = "video-frames" 27 | VIDEO_SOURCE = "C:\\Users\\abhin\\OneDrive\\Documents\\GitHub\\ml-streaming-pipeline\\streaming_simulator\\test_video.mp4" 28 | 29 | def main(): 30 | print("Starting producer with Confluent client...") 31 | producer = Producer(kafka_config) 32 | 33 | cap = cv2.VideoCapture(VIDEO_SOURCE) 34 | if not cap.isOpened(): 35 | print(f"Error: Could not open video source at {VIDEO_SOURCE}") 36 | return 37 | 38 | print(f"Reading video from '{VIDEO_SOURCE}' and sending frames to topic '{KAFKA_TOPIC}'...") 39 | 40 | frame_count = 0 41 | while True: 42 | ret, frame = cap.read() 43 | if not ret: 44 | print("Video finished. Restarting from the beginning...") 45 | cap.set(cv2.CAP_PROP_POS_FRAMES, 0) 46 | continue 47 | 48 | ret, buffer = cv2.imencode('.jpg', frame) 49 | if not ret: 50 | print("Error: Failed to encode frame.") 51 | continue 52 | 53 | producer.poll(0) 54 | 55 | producer.produce(KAFKA_TOPIC, buffer.tobytes(), callback=delivery_report) 56 | 57 | frame_count += 1 58 | if frame_count % 100 == 0: 59 | print(f"{frame_count} frames sent...") 60 | 61 | time.sleep(1/30) 62 | 63 | print("Flushing messages...") 64 | producer.flush() 65 | print("Producer finished.") 66 | 67 | if __name__ == "__main__": 68 | main() -------------------------------------------------------------------------------- /streaming_simulator/consumer.py: -------------------------------------------------------------------------------- 1 | import grpc 2 | import time 3 | from confluent_kafka import Consumer, KafkaError 4 | import numpy as np 5 | import sys 6 | 7 | # We need to make sure the inference_service package is on the Python path 8 | # This is required to run this script as a module from the root directory 9 | from inference_service.protos import inference_pb2 10 | from inference_service.protos import inference_pb2_grpc 11 | 12 | KAFKA_TOPIC = "video-frames" 13 | GRPC_SERVER = "localhost:50051" 14 | RUN_DURATION_SECONDS = 30 15 | 16 | # --- NEW Confluent Kafka Configuration --- 17 | kafka_config = { 18 | 'bootstrap.servers': 'pkc-56d1g.eastus.azure.confluent.cloud:9092', 19 | 'sasl.mechanism': 'PLAIN', 20 | 'security.protocol': 'SASL_SSL', 21 | 'sasl.username': 'XECLJIIHJBRRUSBG', 22 | 'sasl.password': 'cflt69Y0dOzzy5uQOtHqNJuOfe05cFuC0I9uAv0fSSMurD8sJPFXYrOEGUAysGVA', 23 | 'group.id': 'my-video-consumer-group', # Consumer group ID 24 | 'auto.offset.reset': 'latest' # Start from the latest message 25 | } 26 | # --- END CONFIG --- 27 | 28 | def main(): 29 | print("Starting consumer with Confluent client...") 30 | # Create Consumer instance 31 | consumer = Consumer(kafka_config) 32 | # Subscribe to the topic 33 | consumer.subscribe([KAFKA_TOPIC]) 34 | 35 | # Setup gRPC client 36 | channel = grpc.insecure_channel(GRPC_SERVER) 37 | stub = inference_pb2_grpc.InferenceServiceStub(channel) 38 | 39 | print(f"Consumer will run for {RUN_DURATION_SECONDS} seconds...") 40 | latencies = [] 41 | frame_count = 0 42 | start_time = time.time() 43 | 44 | try: 45 | while time.time() - start_time < RUN_DURATION_SECONDS: 46 | # The poll() method is the core of the consumer loop 47 | msg = consumer.poll(timeout=1.0) 48 | 49 | if msg is None: 50 | continue 51 | if msg.error(): 52 | if msg.error().code() == KafkaError._PARTITION_EOF: 53 | # End of partition event 54 | print('Reached end of topic partition.') 55 | else: 56 | print(f'Error: {msg.error()}') 57 | continue 58 | 59 | # We have a valid message 60 | frame_count += 1 61 | inference_start_time = time.time() 62 | 63 | request = inference_pb2.ImageRequest(image_data=msg.value()) 64 | response = stub.Predict(request) 65 | 66 | latency_ms = (time.time() - inference_start_time) * 1000 67 | latencies.append(latency_ms) 68 | 69 | print(f"Frame {frame_count:04d}: Prediction='{response.prediction}', Latency={latency_ms:.2f} ms") 70 | 71 | except KeyboardInterrupt: 72 | print("Stopping consumer manually.") 73 | finally: 74 | # Close down consumer to commit final offsets. 75 | consumer.close() 76 | 77 | # --- Print Final Performance Metrics --- 78 | total_time = time.time() - start_time 79 | if frame_count > 0: 80 | avg_latency = np.mean(latencies) 81 | # Adjust throughput calculation for the actual run duration 82 | throughput = frame_count / (time.time() - start_time) 83 | 84 | print("\n--- 📊 Performance Summary ---") 85 | print(f"Total frames processed: {frame_count}") 86 | print(f"Total run time: {total_time:.2f} seconds") 87 | print(f"Average latency per frame: {avg_latency:.2f} ms") 88 | print(f"Average throughput: {throughput:.2f} FPS (Frames Per Second)") 89 | print("-----------------------------\n") 90 | else: 91 | print("No frames were processed.") 92 | 93 | if __name__ == "__main__": 94 | main() -------------------------------------------------------------------------------- /inference_service/protos/inference_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | import warnings 5 | 6 | from . import inference_pb2 as inference__pb2 7 | 8 | GRPC_GENERATED_VERSION = '1.75.1' 9 | GRPC_VERSION = grpc.__version__ 10 | _version_not_supported = False 11 | 12 | try: 13 | from grpc._utilities import first_version_is_lower 14 | _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) 15 | except ImportError: 16 | _version_not_supported = True 17 | 18 | if _version_not_supported: 19 | raise RuntimeError( 20 | f'The grpc package installed is at version {GRPC_VERSION},' 21 | + f' but the generated code in inference_pb2_grpc.py depends on' 22 | + f' grpcio>={GRPC_GENERATED_VERSION}.' 23 | + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' 24 | + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' 25 | ) 26 | 27 | 28 | class InferenceServiceStub(object): 29 | """The inference service definition. 30 | """ 31 | 32 | def __init__(self, channel): 33 | """Constructor. 34 | 35 | Args: 36 | channel: A grpc.Channel. 37 | """ 38 | self.Predict = channel.unary_unary( 39 | '/inference.InferenceService/Predict', 40 | request_serializer=inference__pb2.ImageRequest.SerializeToString, 41 | response_deserializer=inference__pb2.PredictionResponse.FromString, 42 | _registered_method=True) 43 | 44 | 45 | class InferenceServiceServicer(object): 46 | """The inference service definition. 47 | """ 48 | 49 | def Predict(self, request, context): 50 | """A remote procedure call (RPC) named "Predict". 51 | It accepts an ImageRequest message and returns a PredictionResponse message. 52 | """ 53 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 54 | context.set_details('Method not implemented!') 55 | raise NotImplementedError('Method not implemented!') 56 | 57 | 58 | def add_InferenceServiceServicer_to_server(servicer, server): 59 | rpc_method_handlers = { 60 | 'Predict': grpc.unary_unary_rpc_method_handler( 61 | servicer.Predict, 62 | request_deserializer=inference__pb2.ImageRequest.FromString, 63 | response_serializer=inference__pb2.PredictionResponse.SerializeToString, 64 | ), 65 | } 66 | generic_handler = grpc.method_handlers_generic_handler( 67 | 'inference.InferenceService', rpc_method_handlers) 68 | server.add_generic_rpc_handlers((generic_handler,)) 69 | server.add_registered_method_handlers('inference.InferenceService', rpc_method_handlers) 70 | 71 | 72 | # This class is part of an EXPERIMENTAL API. 73 | class InferenceService(object): 74 | """The inference service definition. 75 | """ 76 | 77 | @staticmethod 78 | def Predict(request, 79 | target, 80 | options=(), 81 | channel_credentials=None, 82 | call_credentials=None, 83 | insecure=False, 84 | compression=None, 85 | wait_for_ready=None, 86 | timeout=None, 87 | metadata=None): 88 | return grpc.experimental.unary_unary( 89 | request, 90 | target, 91 | '/inference.InferenceService/Predict', 92 | inference__pb2.ImageRequest.SerializeToString, 93 | inference__pb2.PredictionResponse.FromString, 94 | options, 95 | channel_credentials, 96 | insecure, 97 | call_credentials, 98 | compression, 99 | wait_for_ready, 100 | timeout, 101 | metadata, 102 | _registered_method=True) 103 | -------------------------------------------------------------------------------- /inference_service/server.py: -------------------------------------------------------------------------------- 1 | import grpc 2 | import cv2 3 | import numpy as np 4 | import torch 5 | import torchvision.transforms as transforms 6 | from torchvision.models import resnet18, ResNet18_Weights 7 | from PIL import Image 8 | from concurrent import futures 9 | import requests 10 | import json 11 | import os 12 | 13 | # Import the generated gRPC files 14 | from .protos import inference_pb2 15 | from .protos import inference_pb2_grpc 16 | # --- 1. Model & Label Loading --- 17 | 18 | # URL for ImageNet class labels 19 | IMAGENET_LABELS_URL = "https://storage.googleapis.com/download.tensorflow.org/data/imagenet_class_index.json" 20 | LABELS_PATH = "imagenet_labels.json" 21 | 22 | def download_labels(): 23 | """Downloads and caches ImageNet labels if they don't exist.""" 24 | if not os.path.exists(LABELS_PATH): 25 | print("Downloading ImageNet labels...") 26 | response = requests.get(IMAGENET_LABELS_URL) 27 | response.raise_for_status() 28 | with open(LABELS_PATH, 'w') as f: 29 | f.write(response.text) 30 | # Load labels into a more usable format {class_id: "class_name"} 31 | with open(LABELS_PATH) as f: 32 | labels_json = json.load(f) 33 | return {int(k): v[1] for k, v in labels_json.items()} 34 | 35 | # Load the pretrained ResNet18 model 36 | print("Loading ResNet18 model...") 37 | model = resnet18(weights=ResNet18_Weights.DEFAULT) 38 | model.eval() # IMPORTANT: Set the model to evaluation mode 39 | labels = download_labels() 40 | print("Model and labels loaded successfully.") 41 | 42 | # Define the image preprocessing pipeline 43 | preprocess = transforms.Compose([ 44 | transforms.Resize(256), 45 | transforms.CenterCrop(224), 46 | transforms.ToTensor(), 47 | transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), 48 | ]) 49 | 50 | # --- 2. gRPC Service Implementation --- 51 | 52 | class InferenceServiceImpl(inference_pb2_grpc.InferenceServiceServicer): 53 | def Predict(self, request, context): 54 | """ 55 | This method is called by the client. It receives an image, 56 | runs inference, and returns the prediction. 57 | """ 58 | try: 59 | # Step A: Decode the incoming image bytes 60 | np_arr = np.frombuffer(request.image_data, np.uint8) 61 | img = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) 62 | 63 | # Step B: Convert image format from OpenCV (BGR) to what PyTorch expects (RGB) 64 | img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) 65 | img_pil = Image.fromarray(img_rgb) 66 | 67 | # Step C: Preprocess the image using the defined pipeline 68 | input_tensor = preprocess(img_pil) 69 | input_batch = input_tensor.unsqueeze(0) # Create a batch of 1 70 | 71 | # Step D: Run inference 72 | with torch.no_grad(): # No need to calculate gradients 73 | output = model(input_batch) 74 | 75 | # Step E: Get the top prediction 76 | _, pred_idx = torch.max(output, 1) 77 | prediction_label = labels[pred_idx.item()] 78 | 79 | print(f"Prediction successful: {prediction_label}") 80 | return inference_pb2.PredictionResponse(prediction=prediction_label) 81 | 82 | except Exception as e: 83 | print(f"An error occurred during inference: {e}") 84 | context.set_code(grpc.StatusCode.INTERNAL) 85 | context.set_details(f"Internal server error: {e}") 86 | return inference_pb2.PredictionResponse() 87 | 88 | # --- 3. Start the Server --- 89 | 90 | def serve(): 91 | """Starts the gRPC server on port 50051.""" 92 | server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) 93 | inference_pb2_grpc.add_InferenceServiceServicer_to_server(InferenceServiceImpl(), server) 94 | 95 | port = "50051" 96 | server.add_insecure_port(f"[::]:{port}") 97 | print(f"🚀 Server starting on port {port}...") 98 | server.start() 99 | server.wait_for_termination() 100 | 101 | if __name__ == '__main__': 102 | serve() -------------------------------------------------------------------------------- /streamlit_app.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | import subprocess 3 | import sys 4 | import os 5 | import time 6 | 7 | # --- Page Configuration --- 8 | st.set_page_config( 9 | page_title="ML Streaming Pipeline Controller", 10 | page_icon="🚀", 11 | layout="wide" 12 | ) 13 | 14 | st.title("🚀 ML Streaming Pipeline Controller") 15 | st.caption("A Streamlit interface to manage the Kafka-gRPC inference pipeline.") 16 | 17 | # --- Session State Initialization --- 18 | if 'server_proc' not in st.session_state: 19 | st.session_state.server_proc = None 20 | if 'consumer_proc' not in st.session_state: 21 | st.session_state.consumer_proc = None 22 | if 'producer_proc' not in st.session_state: 23 | st.session_state.producer_proc = None 24 | # Store file handles for logs 25 | if 'log_files' not in st.session_state: 26 | st.session_state.log_files = {} 27 | 28 | # --- Helper Functions --- 29 | def start_process(command, process_key, log_file_name): 30 | """Starts a subprocess, redirecting its output to a log file.""" 31 | if st.session_state[process_key] is None or st.session_state[process_key].poll() is not None: 32 | # Open a log file in write mode 33 | log_file = open(log_file_name, "w") 34 | st.session_state.log_files[process_key] = log_file 35 | 36 | # Determine the command to run 37 | if ".py" in command: # Simple script 38 | run_command = [sys.executable, command] 39 | else: # Module 40 | run_command = [sys.executable, "-m"] + command.split() 41 | 42 | process = subprocess.Popen( 43 | run_command, 44 | stdout=log_file, 45 | stderr=subprocess.STDOUT 46 | ) 47 | st.session_state[process_key] = process 48 | st.info(f"Started {process_key.replace('_proc','')} with PID: {process.pid}") 49 | else: 50 | st.warning(f"{process_key.replace('_proc','')} is already running.") 51 | 52 | def stop_process(process_key): 53 | """Stops a subprocess and closes its log file.""" 54 | if st.session_state[process_key] is not None and st.session_state[process_key].poll() is None: 55 | pid = st.session_state[process_key].pid 56 | st.session_state[process_key].terminate() 57 | try: 58 | st.session_state[process_key].wait(timeout=5) 59 | st.success(f"Stopped {process_key.replace('_proc','')} (PID: {pid}).") 60 | except subprocess.TimeoutExpired: 61 | st.session_state[process_key].kill() 62 | st.warning(f"Force-killed {process_key.replace('_proc','')} (PID: {pid}).") 63 | 64 | st.session_state[process_key] = None 65 | # Close the associated log file 66 | if process_key in st.session_state.log_files: 67 | st.session_state.log_files[process_key].close() 68 | del st.session_state.log_files[process_key] 69 | 70 | def read_log_file(log_file_name): 71 | """Reads the content of a log file.""" 72 | if os.path.exists(log_file_name): 73 | with open(log_file_name, "r") as f: 74 | return f.read() 75 | return "" 76 | 77 | # --- UI Layout --- 78 | col1, col2, col3 = st.columns(3) 79 | 80 | with col1: 81 | st.header("1. Inference Server") 82 | if st.button("▶️ Start Server", key="start_server"): 83 | start_process("inference_service.server", "server_proc", "server.log") 84 | 85 | with col2: 86 | st.header("2. Stream Consumer") 87 | if st.button("▶️ Start Consumer", key="start_consumer"): 88 | start_process("streaming_simulator.consumer", "consumer_proc", "consumer.log") 89 | 90 | with col3: 91 | st.header("3. Stream Producer") 92 | if st.button("▶️ Start Producer", key="start_producer"): 93 | producer_command = os.path.join("streaming_simulator", "producer.py") 94 | start_process(producer_command, "producer_proc", "producer.log") 95 | 96 | st.divider() 97 | 98 | # --- Stop All Button --- 99 | if st.button("⏹️ Stop All Services", type="primary"): 100 | stop_process("producer_proc") 101 | stop_process("consumer_proc") 102 | stop_process("server_proc") 103 | 104 | st.divider() 105 | 106 | # --- Log Display --- 107 | st.header("Process Logs") 108 | 109 | log_col1, log_col2, log_col3 = st.columns(3) 110 | 111 | with log_col1: 112 | with st.expander("Inference Server Logs", expanded=True): 113 | log_content = read_log_file("server.log") 114 | st.text_area("Server Output", value=log_content, height=300, key="server_log_area", disabled=True) 115 | 116 | with log_col2: 117 | with st.expander("Consumer Logs", expanded=True): 118 | log_content = read_log_file("consumer.log") 119 | st.text_area("Consumer Output", value=log_content, height=300, key="consumer_log_area", disabled=True) 120 | 121 | with log_col3: 122 | with st.expander("Producer Logs", expanded=True): 123 | log_content = read_log_file("producer.log") 124 | st.text_area("Producer Output", value=log_content, height=300, key="producer_log_area", disabled=True) 125 | 126 | # A small loop to force a re-run and update the logs by re-reading the files 127 | time.sleep(1) 128 | st.rerun() -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Real-Time ML Streaming Pipeline with Streamlit Controller 4 | 5 | This project demonstrates a complete, end-to-end MLOps pipeline for real-time image classification. It ingests a video stream, performs inference using a PyTorch model served over a high-performance gRPC API, and provides a Streamlit-based web UI to control and monitor the entire process. 6 | 7 | ----- 8 | 9 | ## 🏛️ Architecture 10 | 11 | The pipeline consists of several decoupled components that communicate via a Kafka message broker. A Streamlit dashboard acts as a central control panel. 12 | 13 | ```mermaid 14 | graph TD 15 | subgraph "Control Layer" 16 | F[🎈 Streamlit UI] 17 | end 18 | 19 | subgraph "Data Plane" 20 | A[Video Source] --> B(🐍 Producer); 21 | B -- Video Frames --> C{🌊 Kafka / Confluent Cloud}; 22 | C -- Video Frames --> D(🐍 Consumer); 23 | D -- gRPC Request --> E[🚀 gRPC Inference Server
(🔥 PyTorch Model)]; 24 | E -- Prediction --> D; 25 | end 26 | 27 | F -- Manages --> B; 28 | F -- Manages --> D; 29 | F -- Manages --> E; 30 | ``` 31 | 32 | ----- 33 | 34 | ## ✨ Features 35 | 36 | * **High-Performance Inference**: A PyTorch ResNet18 model is served over gRPC for low-latency predictions. 37 | * **Real-Time Data Streaming**: Apache Kafka (using Confluent Cloud) handles high-throughput, real-time data ingestion from a video source. 38 | * **Containerized Service**: The inference server is containerized with Docker for portability, scalability, and reproducible deployments. 39 | * **Interactive Control Panel**: A Streamlit dashboard allows for starting, stopping, and monitoring all pipeline components from a single web interface, eliminating the need to manage multiple terminals. 40 | * **Decoupled Components**: The producer, consumer, and server are independent, allowing them to be scaled or updated separately. 41 | 42 | ----- 43 | 44 | ## 🛠️ Tech Stack 45 | 46 | * **Python**: Core programming language. 47 | * **PyTorch**: For the image classification model. 48 | * **gRPC**: For the high-performance inference API. 49 | * **Apache Kafka (Confluent Cloud)**: For the real-time message broker. 50 | * **Streamlit**: For the interactive web-based control panel. 51 | * **Docker & Docker Compose**: For containerization and service orchestration. 52 | * **OpenCV**: For video and image processing. 53 | 54 | ----- 55 | 56 | ## Prerequisites 57 | 58 | Before you begin, ensure you have the following installed: 59 | 60 | * Python 3.9+ 61 | * `pip` (Python package installer) 62 | * Docker Desktop 63 | 64 | ----- 65 | 66 | ## ⚙️ Setup & Installation 67 | 68 | 1. **Clone the Repository** 69 | ```bash 70 | git clone 71 | cd ml-streaming-pipeline 72 | ``` 73 | 2. **Create a Virtual Environment (Recommended)** 74 | ```bash 75 | python -m venv venv 76 | source venv/bin/activate # On Windows, use `venv\Scripts\activate` 77 | ``` 78 | 3. **Install Dependencies** 79 | This project contains multiple `requirements.txt` files. To run the Streamlit app, install the dependencies from the root `requirements.txt` file (if you created one based on our conversation) or install from both service-specific files: 80 | ```bash 81 | pip install -r requirements.txt 82 | ``` 83 | 84 | ----- 85 | 86 | ## 🔧 Configuration 87 | 88 | This project requires credentials to connect to a Kafka cluster (like Confluent Cloud). 89 | 90 | 1. **Sign up** for a free Confluent Cloud account and create a cluster. 91 | 2. **Create an API key and secret**. 92 | 3. Open the `streaming_simulator/producer.py` and `streaming_simulator/consumer.py` files. 93 | 4. Update the `kafka_config` dictionary in both files with your **Bootstrap Server**, **API Key**, and **API Secret**. 94 | 95 | ----- 96 | 97 | ## 🚀 Usage 98 | 99 | You can run this pipeline in two ways: 100 | 101 | ### Method 1: Using the Streamlit Controller (Recommended) 102 | 103 | This method provides a user-friendly web interface to manage all components. 104 | 105 | 1. **Generate gRPC Code** 106 | If you haven't already, run this command from the root directory to generate the necessary gRPC Python files: 107 | ```bash 108 | python -m grpc_tools.protoc -I=inference_service/protos --python_out=. --grpc_python_out=. inference_service/protos/inference.proto 109 | ``` 110 | 2. **Launch the Streamlit App** 111 | ```bash 112 | streamlit run streamlit_app.py 113 | ``` 114 | 3. **Control the Pipeline** 115 | * Your browser will open with the dashboard. 116 | * Click "Start Server", then "Start Consumer", then "Start Producer". 117 | * Monitor the logs in the text boxes for each component. 118 | * Click "Stop All Services" when you're finished. 119 | 120 | ### Method 2: Using Docker and Manual Scripts 121 | 122 | This method runs the inference server in a Docker container and the streaming clients locally. 123 | 124 | 1. **Start the Services** 125 | This command will build the Docker image for the inference server and start it. 126 | ```bash 127 | docker-compose up --build -d 128 | ``` 129 | 2. **Run the Consumer** 130 | Open a new terminal and run the consumer module: 131 | ```bash 132 | python -m streaming_simulator.consumer 133 | ``` 134 | 3. **Run the Producer** 135 | Open a third terminal and run the producer script: 136 | ```bash 137 | python streaming_simulator/producer.py 138 | ``` 139 | 4. **Shut Down** 140 | When you're finished, stop and remove the Docker containers: 141 | ```bash 142 | docker-compose down 143 | ``` 144 | 145 | ----- 146 | 147 | ## 📂 Folder Structure 148 | 149 | ``` 150 | . 151 | ├── inference_service/ # gRPC server and model logic 152 | ├── streaming_simulator/ # Kafka producer and consumer scripts 153 | ├── streamlit_app.py # The main Streamlit controller UI 154 | ├── Dockerfile # Docker instructions for the inference service 155 | └── docker-compose.yml # Docker Compose file for orchestration 156 | ``` -------------------------------------------------------------------------------- /consumer.log: -------------------------------------------------------------------------------- 1 | %6|1760084338.533|GETSUBSCRIPTIONS|rdkafka#consumer-1| [thrd:main]: Telemetry client instance id changed from AAAAAAAAAAAAAAAAAAAAAA to vGhewPqjQWO2/kVNu3ec+A 2 | Starting consumer with Confluent client... 3 | Consumer will run for 30 seconds... 4 | Frame 0001: Prediction='monitor', Latency=60.11 ms 5 | Frame 0002: Prediction='monitor', Latency=50.62 ms 6 | Frame 0003: Prediction='oscilloscope', Latency=49.52 ms 7 | Frame 0004: Prediction='oscilloscope', Latency=53.92 ms 8 | Frame 0005: Prediction='oscilloscope', Latency=71.69 ms 9 | Frame 0006: Prediction='oscilloscope', Latency=51.83 ms 10 | Frame 0007: Prediction='oscilloscope', Latency=50.59 ms 11 | Frame 0008: Prediction='oscilloscope', Latency=49.45 ms 12 | Frame 0009: Prediction='oscilloscope', Latency=50.31 ms 13 | Frame 0010: Prediction='oscilloscope', Latency=49.07 ms 14 | Frame 0011: Prediction='oscilloscope', Latency=51.85 ms 15 | Frame 0012: Prediction='oscilloscope', Latency=51.66 ms 16 | Frame 0013: Prediction='oscilloscope', Latency=48.99 ms 17 | Frame 0014: Prediction='oscilloscope', Latency=50.34 ms 18 | Frame 0015: Prediction='oscilloscope', Latency=46.82 ms 19 | Frame 0016: Prediction='oscilloscope', Latency=49.62 ms 20 | Frame 0017: Prediction='oscilloscope', Latency=50.05 ms 21 | Frame 0018: Prediction='oscilloscope', Latency=51.54 ms 22 | Frame 0019: Prediction='oscilloscope', Latency=49.97 ms 23 | Frame 0020: Prediction='oscilloscope', Latency=47.09 ms 24 | Frame 0021: Prediction='oscilloscope', Latency=47.80 ms 25 | Frame 0022: Prediction='oscilloscope', Latency=52.64 ms 26 | Frame 0023: Prediction='oscilloscope', Latency=46.54 ms 27 | Frame 0024: Prediction='oscilloscope', Latency=49.85 ms 28 | Frame 0025: Prediction='oscilloscope', Latency=49.54 ms 29 | Frame 0026: Prediction='oscilloscope', Latency=49.45 ms 30 | Frame 0027: Prediction='oscilloscope', Latency=49.53 ms 31 | Frame 0028: Prediction='oscilloscope', Latency=51.56 ms 32 | Frame 0029: Prediction='oscilloscope', Latency=50.04 ms 33 | Frame 0030: Prediction='oscilloscope', Latency=48.65 ms 34 | Frame 0031: Prediction='oscilloscope', Latency=52.11 ms 35 | Frame 0032: Prediction='oscilloscope', Latency=49.64 ms 36 | Frame 0033: Prediction='oscilloscope', Latency=50.05 ms 37 | Frame 0034: Prediction='oscilloscope', Latency=52.04 ms 38 | Frame 0035: Prediction='library', Latency=64.14 ms 39 | Frame 0036: Prediction='oscilloscope', Latency=49.07 ms 40 | Frame 0037: Prediction='oscilloscope', Latency=50.52 ms 41 | Frame 0038: Prediction='oscilloscope', Latency=51.54 ms 42 | Frame 0039: Prediction='oscilloscope', Latency=50.57 ms 43 | Frame 0040: Prediction='oscilloscope', Latency=50.25 ms 44 | Frame 0041: Prediction='oscilloscope', Latency=48.53 ms 45 | Frame 0042: Prediction='oscilloscope', Latency=50.71 ms 46 | Frame 0043: Prediction='oscilloscope', Latency=51.60 ms 47 | Frame 0044: Prediction='oscilloscope', Latency=49.89 ms 48 | Frame 0045: Prediction='oscilloscope', Latency=50.07 ms 49 | Frame 0046: Prediction='oscilloscope', Latency=48.53 ms 50 | Frame 0047: Prediction='oscilloscope', Latency=51.06 ms 51 | Frame 0048: Prediction='oscilloscope', Latency=56.09 ms 52 | Frame 0049: Prediction='oscilloscope', Latency=74.87 ms 53 | Frame 0050: Prediction='oscilloscope', Latency=49.76 ms 54 | Frame 0051: Prediction='oscilloscope', Latency=50.10 ms 55 | Frame 0052: Prediction='oscilloscope', Latency=47.53 ms 56 | Frame 0053: Prediction='oscilloscope', Latency=47.55 ms 57 | Frame 0054: Prediction='oscilloscope', Latency=49.85 ms 58 | Frame 0055: Prediction='oscilloscope', Latency=49.53 ms 59 | Frame 0056: Prediction='oscilloscope', Latency=66.65 ms 60 | Frame 0057: Prediction='oscilloscope', Latency=51.52 ms 61 | Frame 0058: Prediction='oscilloscope', Latency=51.21 ms 62 | Frame 0059: Prediction='oscilloscope', Latency=49.55 ms 63 | Frame 0060: Prediction='oscilloscope', Latency=52.16 ms 64 | Frame 0061: Prediction='oscilloscope', Latency=48.54 ms 65 | Frame 0062: Prediction='library', Latency=50.28 ms 66 | Frame 0063: Prediction='oscilloscope', Latency=78.24 ms 67 | Frame 0064: Prediction='oscilloscope', Latency=51.64 ms 68 | Frame 0065: Prediction='oscilloscope', Latency=53.52 ms 69 | Frame 0066: Prediction='oscilloscope', Latency=58.54 ms 70 | Frame 0067: Prediction='oscilloscope', Latency=50.65 ms 71 | Frame 0068: Prediction='oscilloscope', Latency=50.84 ms 72 | Frame 0069: Prediction='oscilloscope', Latency=57.15 ms 73 | Frame 0070: Prediction='library', Latency=53.58 ms 74 | Frame 0071: Prediction='library', Latency=51.29 ms 75 | Frame 0072: Prediction='oscilloscope', Latency=52.81 ms 76 | Frame 0073: Prediction='library', Latency=54.12 ms 77 | Frame 0074: Prediction='monitor', Latency=54.66 ms 78 | Frame 0075: Prediction='oscilloscope', Latency=55.30 ms 79 | Frame 0076: Prediction='oscilloscope', Latency=50.57 ms 80 | Frame 0077: Prediction='oscilloscope', Latency=51.57 ms 81 | Frame 0078: Prediction='oscilloscope', Latency=53.34 ms 82 | Frame 0079: Prediction='oscilloscope', Latency=62.55 ms 83 | Frame 0080: Prediction='library', Latency=52.73 ms 84 | Frame 0081: Prediction='oscilloscope', Latency=52.88 ms 85 | Frame 0082: Prediction='oscilloscope', Latency=51.62 ms 86 | Frame 0083: Prediction='oscilloscope', Latency=51.53 ms 87 | Frame 0084: Prediction='monitor', Latency=52.05 ms 88 | Frame 0085: Prediction='monitor', Latency=52.13 ms 89 | Frame 0086: Prediction='oscilloscope', Latency=52.62 ms 90 | Frame 0087: Prediction='oscilloscope', Latency=51.87 ms 91 | Frame 0088: Prediction='oscilloscope', Latency=49.98 ms 92 | Frame 0089: Prediction='library', Latency=50.04 ms 93 | Frame 0090: Prediction='library', Latency=51.33 ms 94 | Frame 0091: Prediction='oscilloscope', Latency=53.58 ms 95 | Frame 0092: Prediction='oscilloscope', Latency=55.80 ms 96 | Frame 0093: Prediction='oscilloscope', Latency=61.78 ms 97 | Frame 0094: Prediction='oscilloscope', Latency=52.05 ms 98 | Frame 0095: Prediction='oscilloscope', Latency=51.26 ms 99 | Frame 0096: Prediction='oscilloscope', Latency=54.17 ms 100 | Frame 0097: Prediction='monitor', Latency=48.53 ms 101 | Frame 0098: Prediction='oscilloscope', Latency=49.54 ms 102 | Frame 0099: Prediction='oscilloscope', Latency=49.04 ms 103 | Frame 0100: Prediction='oscilloscope', Latency=51.56 ms 104 | Frame 0101: Prediction='oscilloscope', Latency=47.53 ms 105 | Frame 0102: Prediction='oscilloscope', Latency=50.06 ms 106 | Frame 0103: Prediction='oscilloscope', Latency=55.66 ms 107 | Frame 0104: Prediction='oscilloscope', Latency=52.06 ms 108 | Frame 0105: Prediction='oscilloscope', Latency=50.54 ms 109 | Frame 0106: Prediction='oscilloscope', Latency=50.67 ms 110 | Frame 0107: Prediction='oscilloscope', Latency=51.84 ms 111 | Frame 0108: Prediction='oscilloscope', Latency=59.15 ms 112 | Frame 0109: Prediction='oscilloscope', Latency=57.62 ms 113 | Frame 0110: Prediction='oscilloscope', Latency=58.93 ms 114 | Frame 0111: Prediction='library', Latency=50.86 ms 115 | Frame 0112: Prediction='oscilloscope', Latency=48.44 ms 116 | Frame 0113: Prediction='library', Latency=49.34 ms 117 | Frame 0114: Prediction='library', Latency=79.07 ms 118 | Frame 0115: Prediction='oscilloscope', Latency=59.55 ms 119 | Frame 0116: Prediction='oscilloscope', Latency=59.65 ms 120 | Frame 0117: Prediction='oscilloscope', Latency=52.66 ms 121 | Frame 0118: Prediction='oscilloscope', Latency=59.17 ms 122 | Frame 0119: Prediction='oscilloscope', Latency=53.35 ms 123 | Frame 0120: Prediction='oscilloscope', Latency=50.39 ms 124 | Frame 0121: Prediction='oscilloscope', Latency=48.84 ms 125 | Frame 0122: Prediction='oscilloscope', Latency=49.04 ms 126 | Frame 0123: Prediction='oscilloscope', Latency=51.54 ms 127 | Frame 0124: Prediction='oscilloscope', Latency=50.04 ms 128 | Frame 0125: Prediction='oscilloscope', Latency=49.54 ms 129 | Frame 0126: Prediction='oscilloscope', Latency=51.53 ms 130 | Frame 0127: Prediction='oscilloscope', Latency=49.04 ms 131 | Frame 0128: Prediction='oscilloscope', Latency=49.36 ms 132 | Frame 0129: Prediction='oscilloscope', Latency=67.33 ms 133 | Frame 0130: Prediction='oscilloscope', Latency=53.20 ms 134 | Frame 0131: Prediction='oscilloscope', Latency=48.56 ms 135 | Frame 0132: Prediction='oscilloscope', Latency=50.04 ms 136 | Frame 0133: Prediction='oscilloscope', Latency=51.53 ms 137 | Frame 0134: Prediction='oscilloscope', Latency=49.54 ms 138 | Frame 0135: Prediction='oscilloscope', Latency=51.05 ms 139 | Frame 0136: Prediction='oscilloscope', Latency=50.25 ms 140 | Frame 0137: Prediction='oscilloscope', Latency=49.42 ms 141 | Frame 0138: Prediction='oscilloscope', Latency=48.53 ms 142 | Frame 0139: Prediction='oscilloscope', Latency=50.06 ms 143 | Frame 0140: Prediction='oscilloscope', Latency=50.58 ms 144 | Frame 0141: Prediction='oscilloscope', Latency=49.77 ms 145 | Frame 0142: Prediction='oscilloscope', Latency=56.67 ms 146 | Frame 0143: Prediction='oscilloscope', Latency=45.20 ms 147 | Frame 0144: Prediction='oscilloscope', Latency=43.26 msTraceback (most recent call last): 148 | File "", line 198, in _run_module_as_main 149 | File "", line 88, in _run_code 150 | File "C:\Users\abhin\OneDrive\Documents\GitHub\ml-streaming-pipeline\streaming_simulator\consumer.py", line 94, in 151 | main() 152 | File "C:\Users\abhin\OneDrive\Documents\GitHub\ml-streaming-pipeline\streaming_simulator\consumer.py", line 84, in main 153 | print("\n--- \U0001f4ca Performance Summary ---") 154 | File "C:\Users\abhin\AppData\Local\Programs\Python\Python312\Lib\encodings\cp1252.py", line 19, in encode 155 | return codecs.charmap_encode(input,self.errors,encoding_table)[0] 156 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 157 | UnicodeEncodeError: 'charmap' codec can't encode character '\U0001f4ca' in position 6: character maps to 158 | 159 | Frame 0145: Prediction='oscilloscope', Latency=47.81 ms 160 | Frame 0146: Prediction='oscilloscope', Latency=51.71 ms 161 | Frame 0147: Prediction='library', Latency=52.23 ms 162 | Frame 0148: Prediction='oscilloscope', Latency=57.51 ms 163 | Frame 0149: Prediction='library', Latency=58.14 ms 164 | Frame 0150: Prediction='monitor', Latency=38.62 ms 165 | Frame 0151: Prediction='oscilloscope', Latency=42.68 ms 166 | Frame 0152: Prediction='oscilloscope', Latency=40.07 ms 167 | Frame 0153: Prediction='oscilloscope', Latency=39.29 ms 168 | Frame 0154: Prediction='monitor', Latency=45.88 ms 169 | Frame 0155: Prediction='oscilloscope', Latency=45.42 ms 170 | Frame 0156: Prediction='oscilloscope', Latency=53.47 ms 171 | Frame 0157: Prediction='oscilloscope', Latency=52.68 ms 172 | Frame 0158: Prediction='oscilloscope', Latency=50.96 ms 173 | Frame 0159: Prediction='oscilloscope', Latency=43.84 ms 174 | Frame 0160: Prediction='oscilloscope', Latency=40.39 ms 175 | Frame 0161: Prediction='oscilloscope', Latency=38.04 ms 176 | Frame 0162: Prediction='oscilloscope', Latency=37.49 ms 177 | Frame 0163: Prediction='oscilloscope', Latency=40.74 ms 178 | Frame 0164: Prediction='oscilloscope', Latency=39.66 ms 179 | Frame 0165: Prediction='oscilloscope', Latency=37.43 ms 180 | Frame 0166: Prediction='oscilloscope', Latency=40.34 ms 181 | Frame 0167: Prediction='oscilloscope', Latency=70.20 ms 182 | Frame 0168: Prediction='oscilloscope', Latency=50.77 ms 183 | Frame 0169: Prediction='oscilloscope', Latency=37.79 ms 184 | Frame 0170: Prediction='oscilloscope', Latency=38.54 ms 185 | Frame 0171: Prediction='oscilloscope', Latency=35.56 ms 186 | Frame 0172: Prediction='oscilloscope', Latency=37.45 ms 187 | Frame 0173: Prediction='oscilloscope', Latency=38.61 ms 188 | Frame 0174: Prediction='oscilloscope', Latency=39.19 ms 189 | Frame 0175: Prediction='oscilloscope', Latency=37.68 ms 190 | Frame 0176: Prediction='oscilloscope', Latency=63.17 ms 191 | Frame 0177: Prediction='oscilloscope', Latency=40.60 ms 192 | Frame 0178: Prediction='oscilloscope', Latency=54.09 ms 193 | Frame 0179: Prediction='oscilloscope', Latency=42.35 ms 194 | Frame 0180: Prediction='oscilloscope', Latency=42.67 ms 195 | Frame 0181: Prediction='oscilloscope', Latency=38.64 ms 196 | Frame 0182: Prediction='oscilloscope', Latency=41.05 ms 197 | -------------------------------------------------------------------------------- /imagenet_labels.json: -------------------------------------------------------------------------------- 1 | {"0": ["n01440764", "tench"], "1": ["n01443537", "goldfish"], "2": ["n01484850", "great_white_shark"], "3": ["n01491361", "tiger_shark"], "4": ["n01494475", "hammerhead"], "5": ["n01496331", "electric_ray"], "6": ["n01498041", "stingray"], "7": ["n01514668", "cock"], "8": ["n01514859", "hen"], "9": ["n01518878", "ostrich"], "10": ["n01530575", "brambling"], "11": ["n01531178", "goldfinch"], "12": ["n01532829", "house_finch"], "13": ["n01534433", "junco"], "14": ["n01537544", "indigo_bunting"], "15": ["n01558993", "robin"], "16": ["n01560419", "bulbul"], "17": ["n01580077", "jay"], "18": ["n01582220", "magpie"], "19": ["n01592084", "chickadee"], "20": ["n01601694", "water_ouzel"], "21": ["n01608432", "kite"], "22": ["n01614925", "bald_eagle"], "23": ["n01616318", "vulture"], "24": ["n01622779", "great_grey_owl"], "25": ["n01629819", "European_fire_salamander"], "26": ["n01630670", "common_newt"], "27": ["n01631663", "eft"], "28": ["n01632458", "spotted_salamander"], "29": ["n01632777", "axolotl"], "30": ["n01641577", "bullfrog"], "31": ["n01644373", "tree_frog"], "32": ["n01644900", "tailed_frog"], "33": ["n01664065", "loggerhead"], "34": ["n01665541", "leatherback_turtle"], "35": ["n01667114", "mud_turtle"], "36": ["n01667778", "terrapin"], "37": ["n01669191", "box_turtle"], "38": ["n01675722", "banded_gecko"], "39": ["n01677366", "common_iguana"], "40": ["n01682714", "American_chameleon"], "41": ["n01685808", "whiptail"], "42": ["n01687978", "agama"], "43": ["n01688243", "frilled_lizard"], "44": ["n01689811", "alligator_lizard"], "45": ["n01692333", "Gila_monster"], "46": ["n01693334", "green_lizard"], "47": ["n01694178", "African_chameleon"], "48": ["n01695060", "Komodo_dragon"], "49": ["n01697457", "African_crocodile"], "50": ["n01698640", "American_alligator"], "51": ["n01704323", "triceratops"], "52": ["n01728572", "thunder_snake"], "53": ["n01728920", "ringneck_snake"], "54": ["n01729322", "hognose_snake"], "55": ["n01729977", "green_snake"], "56": ["n01734418", "king_snake"], "57": ["n01735189", "garter_snake"], "58": ["n01737021", "water_snake"], "59": ["n01739381", "vine_snake"], "60": ["n01740131", "night_snake"], "61": ["n01742172", "boa_constrictor"], "62": ["n01744401", "rock_python"], "63": ["n01748264", "Indian_cobra"], "64": ["n01749939", "green_mamba"], "65": ["n01751748", "sea_snake"], "66": ["n01753488", "horned_viper"], "67": ["n01755581", "diamondback"], "68": ["n01756291", "sidewinder"], "69": ["n01768244", "trilobite"], "70": ["n01770081", "harvestman"], "71": ["n01770393", "scorpion"], "72": ["n01773157", "black_and_gold_garden_spider"], "73": ["n01773549", "barn_spider"], "74": ["n01773797", "garden_spider"], "75": ["n01774384", "black_widow"], "76": ["n01774750", "tarantula"], "77": ["n01775062", "wolf_spider"], "78": ["n01776313", "tick"], "79": ["n01784675", "centipede"], "80": ["n01795545", "black_grouse"], "81": ["n01796340", "ptarmigan"], "82": ["n01797886", "ruffed_grouse"], "83": ["n01798484", "prairie_chicken"], "84": ["n01806143", "peacock"], "85": ["n01806567", "quail"], "86": ["n01807496", "partridge"], "87": ["n01817953", "African_grey"], "88": ["n01818515", "macaw"], "89": ["n01819313", "sulphur-crested_cockatoo"], "90": ["n01820546", "lorikeet"], "91": ["n01824575", "coucal"], "92": ["n01828970", "bee_eater"], "93": ["n01829413", "hornbill"], "94": ["n01833805", "hummingbird"], "95": ["n01843065", "jacamar"], "96": ["n01843383", "toucan"], "97": ["n01847000", "drake"], "98": ["n01855032", "red-breasted_merganser"], "99": ["n01855672", "goose"], "100": ["n01860187", "black_swan"], "101": ["n01871265", "tusker"], "102": ["n01872401", "echidna"], "103": ["n01873310", "platypus"], "104": ["n01877812", "wallaby"], "105": ["n01882714", "koala"], "106": ["n01883070", "wombat"], "107": ["n01910747", "jellyfish"], "108": ["n01914609", "sea_anemone"], "109": ["n01917289", "brain_coral"], "110": ["n01924916", "flatworm"], "111": ["n01930112", "nematode"], "112": ["n01943899", "conch"], "113": ["n01944390", "snail"], "114": ["n01945685", "slug"], "115": ["n01950731", "sea_slug"], "116": ["n01955084", "chiton"], "117": ["n01968897", "chambered_nautilus"], "118": ["n01978287", "Dungeness_crab"], "119": ["n01978455", "rock_crab"], "120": ["n01980166", "fiddler_crab"], "121": ["n01981276", "king_crab"], "122": ["n01983481", "American_lobster"], "123": ["n01984695", "spiny_lobster"], "124": ["n01985128", "crayfish"], "125": ["n01986214", "hermit_crab"], "126": ["n01990800", "isopod"], "127": ["n02002556", "white_stork"], "128": ["n02002724", "black_stork"], "129": ["n02006656", "spoonbill"], "130": ["n02007558", "flamingo"], "131": ["n02009229", "little_blue_heron"], "132": ["n02009912", "American_egret"], "133": ["n02011460", "bittern"], "134": ["n02012849", "crane"], "135": ["n02013706", "limpkin"], "136": ["n02017213", "European_gallinule"], "137": ["n02018207", "American_coot"], "138": ["n02018795", "bustard"], "139": ["n02025239", "ruddy_turnstone"], "140": ["n02027492", "red-backed_sandpiper"], "141": ["n02028035", "redshank"], "142": ["n02033041", "dowitcher"], "143": ["n02037110", "oystercatcher"], "144": ["n02051845", "pelican"], "145": ["n02056570", "king_penguin"], "146": ["n02058221", "albatross"], "147": ["n02066245", "grey_whale"], "148": ["n02071294", "killer_whale"], "149": ["n02074367", "dugong"], "150": ["n02077923", "sea_lion"], "151": ["n02085620", "Chihuahua"], "152": ["n02085782", "Japanese_spaniel"], "153": ["n02085936", "Maltese_dog"], "154": ["n02086079", "Pekinese"], "155": ["n02086240", "Shih-Tzu"], "156": ["n02086646", "Blenheim_spaniel"], "157": ["n02086910", "papillon"], "158": ["n02087046", "toy_terrier"], "159": ["n02087394", "Rhodesian_ridgeback"], "160": ["n02088094", "Afghan_hound"], "161": ["n02088238", "basset"], "162": ["n02088364", "beagle"], "163": ["n02088466", "bloodhound"], "164": ["n02088632", "bluetick"], "165": ["n02089078", "black-and-tan_coonhound"], "166": ["n02089867", "Walker_hound"], "167": ["n02089973", "English_foxhound"], "168": ["n02090379", "redbone"], "169": ["n02090622", "borzoi"], "170": ["n02090721", "Irish_wolfhound"], "171": ["n02091032", "Italian_greyhound"], "172": ["n02091134", "whippet"], "173": ["n02091244", "Ibizan_hound"], "174": ["n02091467", "Norwegian_elkhound"], "175": ["n02091635", "otterhound"], "176": ["n02091831", "Saluki"], "177": ["n02092002", "Scottish_deerhound"], "178": ["n02092339", "Weimaraner"], "179": ["n02093256", "Staffordshire_bullterrier"], "180": ["n02093428", "American_Staffordshire_terrier"], "181": ["n02093647", "Bedlington_terrier"], "182": ["n02093754", "Border_terrier"], "183": ["n02093859", "Kerry_blue_terrier"], "184": ["n02093991", "Irish_terrier"], "185": ["n02094114", "Norfolk_terrier"], "186": ["n02094258", "Norwich_terrier"], "187": ["n02094433", "Yorkshire_terrier"], "188": ["n02095314", "wire-haired_fox_terrier"], "189": ["n02095570", "Lakeland_terrier"], "190": ["n02095889", "Sealyham_terrier"], "191": ["n02096051", "Airedale"], "192": ["n02096177", "cairn"], "193": ["n02096294", "Australian_terrier"], "194": ["n02096437", "Dandie_Dinmont"], "195": ["n02096585", "Boston_bull"], "196": ["n02097047", "miniature_schnauzer"], "197": ["n02097130", "giant_schnauzer"], "198": ["n02097209", "standard_schnauzer"], "199": ["n02097298", "Scotch_terrier"], "200": ["n02097474", "Tibetan_terrier"], "201": ["n02097658", "silky_terrier"], "202": ["n02098105", "soft-coated_wheaten_terrier"], "203": ["n02098286", "West_Highland_white_terrier"], "204": ["n02098413", "Lhasa"], "205": ["n02099267", "flat-coated_retriever"], "206": ["n02099429", "curly-coated_retriever"], "207": ["n02099601", "golden_retriever"], "208": ["n02099712", "Labrador_retriever"], "209": ["n02099849", "Chesapeake_Bay_retriever"], "210": ["n02100236", "German_short-haired_pointer"], "211": ["n02100583", "vizsla"], "212": ["n02100735", "English_setter"], "213": ["n02100877", "Irish_setter"], "214": ["n02101006", "Gordon_setter"], "215": ["n02101388", "Brittany_spaniel"], "216": ["n02101556", "clumber"], "217": ["n02102040", "English_springer"], "218": ["n02102177", "Welsh_springer_spaniel"], "219": ["n02102318", "cocker_spaniel"], "220": ["n02102480", "Sussex_spaniel"], "221": ["n02102973", "Irish_water_spaniel"], "222": ["n02104029", "kuvasz"], "223": ["n02104365", "schipperke"], "224": ["n02105056", "groenendael"], "225": ["n02105162", "malinois"], "226": ["n02105251", "briard"], "227": ["n02105412", "kelpie"], "228": ["n02105505", "komondor"], "229": ["n02105641", "Old_English_sheepdog"], "230": ["n02105855", "Shetland_sheepdog"], "231": ["n02106030", "collie"], "232": ["n02106166", "Border_collie"], "233": ["n02106382", "Bouvier_des_Flandres"], "234": ["n02106550", "Rottweiler"], "235": ["n02106662", "German_shepherd"], "236": ["n02107142", "Doberman"], "237": ["n02107312", "miniature_pinscher"], "238": ["n02107574", "Greater_Swiss_Mountain_dog"], "239": ["n02107683", "Bernese_mountain_dog"], "240": ["n02107908", "Appenzeller"], "241": ["n02108000", "EntleBucher"], "242": ["n02108089", "boxer"], "243": ["n02108422", "bull_mastiff"], "244": ["n02108551", "Tibetan_mastiff"], "245": ["n02108915", "French_bulldog"], "246": ["n02109047", "Great_Dane"], "247": ["n02109525", "Saint_Bernard"], "248": ["n02109961", "Eskimo_dog"], "249": ["n02110063", "malamute"], "250": ["n02110185", "Siberian_husky"], "251": ["n02110341", "dalmatian"], "252": ["n02110627", "affenpinscher"], "253": ["n02110806", "basenji"], "254": ["n02110958", "pug"], "255": ["n02111129", "Leonberg"], "256": ["n02111277", "Newfoundland"], "257": ["n02111500", "Great_Pyrenees"], "258": ["n02111889", "Samoyed"], "259": ["n02112018", "Pomeranian"], "260": ["n02112137", "chow"], "261": ["n02112350", "keeshond"], "262": ["n02112706", "Brabancon_griffon"], "263": ["n02113023", "Pembroke"], "264": ["n02113186", "Cardigan"], "265": ["n02113624", "toy_poodle"], "266": ["n02113712", "miniature_poodle"], "267": ["n02113799", "standard_poodle"], "268": ["n02113978", "Mexican_hairless"], "269": ["n02114367", "timber_wolf"], "270": ["n02114548", "white_wolf"], "271": ["n02114712", "red_wolf"], "272": ["n02114855", "coyote"], "273": ["n02115641", "dingo"], "274": ["n02115913", "dhole"], "275": ["n02116738", "African_hunting_dog"], "276": ["n02117135", "hyena"], "277": ["n02119022", "red_fox"], "278": ["n02119789", "kit_fox"], "279": ["n02120079", "Arctic_fox"], "280": ["n02120505", "grey_fox"], "281": ["n02123045", "tabby"], "282": ["n02123159", "tiger_cat"], "283": ["n02123394", "Persian_cat"], "284": ["n02123597", "Siamese_cat"], "285": ["n02124075", "Egyptian_cat"], "286": ["n02125311", "cougar"], "287": ["n02127052", "lynx"], "288": ["n02128385", "leopard"], "289": ["n02128757", "snow_leopard"], "290": ["n02128925", "jaguar"], "291": ["n02129165", "lion"], "292": ["n02129604", "tiger"], "293": ["n02130308", "cheetah"], "294": ["n02132136", "brown_bear"], "295": ["n02133161", "American_black_bear"], "296": ["n02134084", "ice_bear"], "297": ["n02134418", "sloth_bear"], "298": ["n02137549", "mongoose"], "299": ["n02138441", "meerkat"], "300": ["n02165105", "tiger_beetle"], "301": ["n02165456", "ladybug"], "302": ["n02167151", "ground_beetle"], "303": ["n02168699", "long-horned_beetle"], "304": ["n02169497", "leaf_beetle"], "305": ["n02172182", "dung_beetle"], "306": ["n02174001", "rhinoceros_beetle"], "307": ["n02177972", "weevil"], "308": ["n02190166", "fly"], "309": ["n02206856", "bee"], "310": ["n02219486", "ant"], "311": ["n02226429", "grasshopper"], "312": ["n02229544", "cricket"], "313": ["n02231487", "walking_stick"], "314": ["n02233338", "cockroach"], "315": ["n02236044", "mantis"], "316": ["n02256656", "cicada"], "317": ["n02259212", "leafhopper"], "318": ["n02264363", "lacewing"], "319": ["n02268443", "dragonfly"], "320": ["n02268853", "damselfly"], "321": ["n02276258", "admiral"], "322": ["n02277742", "ringlet"], "323": ["n02279972", "monarch"], "324": ["n02280649", "cabbage_butterfly"], "325": ["n02281406", "sulphur_butterfly"], "326": ["n02281787", "lycaenid"], "327": ["n02317335", "starfish"], "328": ["n02319095", "sea_urchin"], "329": ["n02321529", "sea_cucumber"], "330": ["n02325366", "wood_rabbit"], "331": ["n02326432", "hare"], "332": ["n02328150", "Angora"], "333": ["n02342885", "hamster"], "334": ["n02346627", "porcupine"], "335": ["n02356798", "fox_squirrel"], "336": ["n02361337", "marmot"], "337": ["n02363005", "beaver"], "338": ["n02364673", "guinea_pig"], "339": ["n02389026", "sorrel"], "340": ["n02391049", "zebra"], "341": ["n02395406", "hog"], "342": ["n02396427", "wild_boar"], "343": ["n02397096", "warthog"], "344": ["n02398521", "hippopotamus"], "345": ["n02403003", "ox"], "346": ["n02408429", "water_buffalo"], "347": ["n02410509", "bison"], "348": ["n02412080", "ram"], "349": ["n02415577", "bighorn"], "350": ["n02417914", "ibex"], "351": ["n02422106", "hartebeest"], "352": ["n02422699", "impala"], "353": ["n02423022", "gazelle"], "354": ["n02437312", "Arabian_camel"], "355": ["n02437616", "llama"], "356": ["n02441942", "weasel"], "357": ["n02442845", "mink"], "358": ["n02443114", "polecat"], "359": ["n02443484", "black-footed_ferret"], "360": ["n02444819", "otter"], "361": ["n02445715", "skunk"], "362": ["n02447366", "badger"], "363": ["n02454379", "armadillo"], "364": ["n02457408", "three-toed_sloth"], "365": ["n02480495", "orangutan"], "366": ["n02480855", "gorilla"], "367": ["n02481823", "chimpanzee"], "368": ["n02483362", "gibbon"], "369": ["n02483708", "siamang"], "370": ["n02484975", "guenon"], "371": ["n02486261", "patas"], "372": ["n02486410", "baboon"], "373": ["n02487347", "macaque"], "374": ["n02488291", "langur"], "375": ["n02488702", "colobus"], "376": ["n02489166", "proboscis_monkey"], "377": ["n02490219", "marmoset"], "378": ["n02492035", "capuchin"], "379": ["n02492660", "howler_monkey"], "380": ["n02493509", "titi"], "381": ["n02493793", "spider_monkey"], "382": ["n02494079", "squirrel_monkey"], "383": ["n02497673", "Madagascar_cat"], "384": ["n02500267", "indri"], "385": ["n02504013", "Indian_elephant"], "386": ["n02504458", "African_elephant"], "387": ["n02509815", "lesser_panda"], "388": ["n02510455", "giant_panda"], "389": ["n02514041", "barracouta"], "390": ["n02526121", "eel"], "391": ["n02536864", "coho"], "392": ["n02606052", "rock_beauty"], "393": ["n02607072", "anemone_fish"], "394": ["n02640242", "sturgeon"], "395": ["n02641379", "gar"], "396": ["n02643566", "lionfish"], "397": ["n02655020", "puffer"], "398": ["n02666196", "abacus"], "399": ["n02667093", "abaya"], "400": ["n02669723", "academic_gown"], "401": ["n02672831", "accordion"], "402": ["n02676566", "acoustic_guitar"], "403": ["n02687172", "aircraft_carrier"], "404": ["n02690373", "airliner"], "405": ["n02692877", "airship"], "406": ["n02699494", "altar"], "407": ["n02701002", "ambulance"], "408": ["n02704792", "amphibian"], "409": ["n02708093", "analog_clock"], "410": ["n02727426", "apiary"], "411": ["n02730930", "apron"], "412": ["n02747177", "ashcan"], "413": ["n02749479", "assault_rifle"], "414": ["n02769748", "backpack"], "415": ["n02776631", "bakery"], "416": ["n02777292", "balance_beam"], "417": ["n02782093", "balloon"], "418": ["n02783161", "ballpoint"], "419": ["n02786058", "Band_Aid"], "420": ["n02787622", "banjo"], "421": ["n02788148", "bannister"], "422": ["n02790996", "barbell"], "423": ["n02791124", "barber_chair"], "424": ["n02791270", "barbershop"], "425": ["n02793495", "barn"], "426": ["n02794156", "barometer"], "427": ["n02795169", "barrel"], "428": ["n02797295", "barrow"], "429": ["n02799071", "baseball"], "430": ["n02802426", "basketball"], "431": ["n02804414", "bassinet"], "432": ["n02804610", "bassoon"], "433": ["n02807133", "bathing_cap"], "434": ["n02808304", "bath_towel"], "435": ["n02808440", "bathtub"], "436": ["n02814533", "beach_wagon"], "437": ["n02814860", "beacon"], "438": ["n02815834", "beaker"], "439": ["n02817516", "bearskin"], "440": ["n02823428", "beer_bottle"], "441": ["n02823750", "beer_glass"], "442": ["n02825657", "bell_cote"], "443": ["n02834397", "bib"], "444": ["n02835271", "bicycle-built-for-two"], "445": ["n02837789", "bikini"], "446": ["n02840245", "binder"], "447": ["n02841315", "binoculars"], "448": ["n02843684", "birdhouse"], "449": ["n02859443", "boathouse"], "450": ["n02860847", "bobsled"], "451": ["n02865351", "bolo_tie"], "452": ["n02869837", "bonnet"], "453": ["n02870880", "bookcase"], "454": ["n02871525", "bookshop"], "455": ["n02877765", "bottlecap"], "456": ["n02879718", "bow"], "457": ["n02883205", "bow_tie"], "458": ["n02892201", "brass"], "459": ["n02892767", "brassiere"], "460": ["n02894605", "breakwater"], "461": ["n02895154", "breastplate"], "462": ["n02906734", "broom"], "463": ["n02909870", "bucket"], "464": ["n02910353", "buckle"], "465": ["n02916936", "bulletproof_vest"], "466": ["n02917067", "bullet_train"], "467": ["n02927161", "butcher_shop"], "468": ["n02930766", "cab"], "469": ["n02939185", "caldron"], "470": ["n02948072", "candle"], "471": ["n02950826", "cannon"], "472": ["n02951358", "canoe"], "473": ["n02951585", "can_opener"], "474": ["n02963159", "cardigan"], "475": ["n02965783", "car_mirror"], "476": ["n02966193", "carousel"], "477": ["n02966687", "carpenter's_kit"], "478": ["n02971356", "carton"], "479": ["n02974003", "car_wheel"], "480": ["n02977058", "cash_machine"], "481": ["n02978881", "cassette"], "482": ["n02979186", "cassette_player"], "483": ["n02980441", "castle"], "484": ["n02981792", "catamaran"], "485": ["n02988304", "CD_player"], "486": ["n02992211", "cello"], "487": ["n02992529", "cellular_telephone"], "488": ["n02999410", "chain"], "489": ["n03000134", "chainlink_fence"], "490": ["n03000247", "chain_mail"], "491": ["n03000684", "chain_saw"], "492": ["n03014705", "chest"], "493": ["n03016953", "chiffonier"], "494": ["n03017168", "chime"], "495": ["n03018349", "china_cabinet"], "496": ["n03026506", "Christmas_stocking"], "497": ["n03028079", "church"], "498": ["n03032252", "cinema"], "499": ["n03041632", "cleaver"], "500": ["n03042490", "cliff_dwelling"], "501": ["n03045698", "cloak"], "502": ["n03047690", "clog"], "503": ["n03062245", "cocktail_shaker"], "504": ["n03063599", "coffee_mug"], "505": ["n03063689", "coffeepot"], "506": ["n03065424", "coil"], "507": ["n03075370", "combination_lock"], "508": ["n03085013", "computer_keyboard"], "509": ["n03089624", "confectionery"], "510": ["n03095699", "container_ship"], "511": ["n03100240", "convertible"], "512": ["n03109150", "corkscrew"], "513": ["n03110669", "cornet"], "514": ["n03124043", "cowboy_boot"], "515": ["n03124170", "cowboy_hat"], "516": ["n03125729", "cradle"], "517": ["n03126707", "crane"], "518": ["n03127747", "crash_helmet"], "519": ["n03127925", "crate"], "520": ["n03131574", "crib"], "521": ["n03133878", "Crock_Pot"], "522": ["n03134739", "croquet_ball"], "523": ["n03141823", "crutch"], "524": ["n03146219", "cuirass"], "525": ["n03160309", "dam"], "526": ["n03179701", "desk"], "527": ["n03180011", "desktop_computer"], "528": ["n03187595", "dial_telephone"], "529": ["n03188531", "diaper"], "530": ["n03196217", "digital_clock"], "531": ["n03197337", "digital_watch"], "532": ["n03201208", "dining_table"], "533": ["n03207743", "dishrag"], "534": ["n03207941", "dishwasher"], "535": ["n03208938", "disk_brake"], "536": ["n03216828", "dock"], "537": ["n03218198", "dogsled"], "538": ["n03220513", "dome"], "539": ["n03223299", "doormat"], "540": ["n03240683", "drilling_platform"], "541": ["n03249569", "drum"], "542": ["n03250847", "drumstick"], "543": ["n03255030", "dumbbell"], "544": ["n03259280", "Dutch_oven"], "545": ["n03271574", "electric_fan"], "546": ["n03272010", "electric_guitar"], "547": ["n03272562", "electric_locomotive"], "548": ["n03290653", "entertainment_center"], "549": ["n03291819", "envelope"], "550": ["n03297495", "espresso_maker"], "551": ["n03314780", "face_powder"], "552": ["n03325584", "feather_boa"], "553": ["n03337140", "file"], "554": ["n03344393", "fireboat"], "555": ["n03345487", "fire_engine"], "556": ["n03347037", "fire_screen"], "557": ["n03355925", "flagpole"], "558": ["n03372029", "flute"], "559": ["n03376595", "folding_chair"], "560": ["n03379051", "football_helmet"], "561": ["n03384352", "forklift"], "562": ["n03388043", "fountain"], "563": ["n03388183", "fountain_pen"], "564": ["n03388549", "four-poster"], "565": ["n03393912", "freight_car"], "566": ["n03394916", "French_horn"], "567": ["n03400231", "frying_pan"], "568": ["n03404251", "fur_coat"], "569": ["n03417042", "garbage_truck"], "570": ["n03424325", "gasmask"], "571": ["n03425413", "gas_pump"], "572": ["n03443371", "goblet"], "573": ["n03444034", "go-kart"], "574": ["n03445777", "golf_ball"], "575": ["n03445924", "golfcart"], "576": ["n03447447", "gondola"], "577": ["n03447721", "gong"], "578": ["n03450230", "gown"], "579": ["n03452741", "grand_piano"], "580": ["n03457902", "greenhouse"], "581": ["n03459775", "grille"], "582": ["n03461385", "grocery_store"], "583": ["n03467068", "guillotine"], "584": ["n03476684", "hair_slide"], "585": ["n03476991", "hair_spray"], "586": ["n03478589", "half_track"], "587": ["n03481172", "hammer"], "588": ["n03482405", "hamper"], "589": ["n03483316", "hand_blower"], "590": ["n03485407", "hand-held_computer"], "591": ["n03485794", "handkerchief"], "592": ["n03492542", "hard_disc"], "593": ["n03494278", "harmonica"], "594": ["n03495258", "harp"], "595": ["n03496892", "harvester"], "596": ["n03498962", "hatchet"], "597": ["n03527444", "holster"], "598": ["n03529860", "home_theater"], "599": ["n03530642", "honeycomb"], "600": ["n03532672", "hook"], "601": ["n03534580", "hoopskirt"], "602": ["n03535780", "horizontal_bar"], "603": ["n03538406", "horse_cart"], "604": ["n03544143", "hourglass"], "605": ["n03584254", "iPod"], "606": ["n03584829", "iron"], "607": ["n03590841", "jack-o'-lantern"], "608": ["n03594734", "jean"], "609": ["n03594945", "jeep"], "610": ["n03595614", "jersey"], "611": ["n03598930", "jigsaw_puzzle"], "612": ["n03599486", "jinrikisha"], "613": ["n03602883", "joystick"], "614": ["n03617480", "kimono"], "615": ["n03623198", "knee_pad"], "616": ["n03627232", "knot"], "617": ["n03630383", "lab_coat"], "618": ["n03633091", "ladle"], "619": ["n03637318", "lampshade"], "620": ["n03642806", "laptop"], "621": ["n03649909", "lawn_mower"], "622": ["n03657121", "lens_cap"], "623": ["n03658185", "letter_opener"], "624": ["n03661043", "library"], "625": ["n03662601", "lifeboat"], "626": ["n03666591", "lighter"], "627": ["n03670208", "limousine"], "628": ["n03673027", "liner"], "629": ["n03676483", "lipstick"], "630": ["n03680355", "Loafer"], "631": ["n03690938", "lotion"], "632": ["n03691459", "loudspeaker"], "633": ["n03692522", "loupe"], "634": ["n03697007", "lumbermill"], "635": ["n03706229", "magnetic_compass"], "636": ["n03709823", "mailbag"], "637": ["n03710193", "mailbox"], "638": ["n03710637", "maillot"], "639": ["n03710721", "maillot"], "640": ["n03717622", "manhole_cover"], "641": ["n03720891", "maraca"], "642": ["n03721384", "marimba"], "643": ["n03724870", "mask"], "644": ["n03729826", "matchstick"], "645": ["n03733131", "maypole"], "646": ["n03733281", "maze"], "647": ["n03733805", "measuring_cup"], "648": ["n03742115", "medicine_chest"], "649": ["n03743016", "megalith"], "650": ["n03759954", "microphone"], "651": ["n03761084", "microwave"], "652": ["n03763968", "military_uniform"], "653": ["n03764736", "milk_can"], "654": ["n03769881", "minibus"], "655": ["n03770439", "miniskirt"], "656": ["n03770679", "minivan"], "657": ["n03773504", "missile"], "658": ["n03775071", "mitten"], "659": ["n03775546", "mixing_bowl"], "660": ["n03776460", "mobile_home"], "661": ["n03777568", "Model_T"], "662": ["n03777754", "modem"], "663": ["n03781244", "monastery"], "664": ["n03782006", "monitor"], "665": ["n03785016", "moped"], "666": ["n03786901", "mortar"], "667": ["n03787032", "mortarboard"], "668": ["n03788195", "mosque"], "669": ["n03788365", "mosquito_net"], "670": ["n03791053", "motor_scooter"], "671": ["n03792782", "mountain_bike"], "672": ["n03792972", "mountain_tent"], "673": ["n03793489", "mouse"], "674": ["n03794056", "mousetrap"], "675": ["n03796401", "moving_van"], "676": ["n03803284", "muzzle"], "677": ["n03804744", "nail"], "678": ["n03814639", "neck_brace"], "679": ["n03814906", "necklace"], "680": ["n03825788", "nipple"], "681": ["n03832673", "notebook"], "682": ["n03837869", "obelisk"], "683": ["n03838899", "oboe"], "684": ["n03840681", "ocarina"], "685": ["n03841143", "odometer"], "686": ["n03843555", "oil_filter"], "687": ["n03854065", "organ"], "688": ["n03857828", "oscilloscope"], "689": ["n03866082", "overskirt"], "690": ["n03868242", "oxcart"], "691": ["n03868863", "oxygen_mask"], "692": ["n03871628", "packet"], "693": ["n03873416", "paddle"], "694": ["n03874293", "paddlewheel"], "695": ["n03874599", "padlock"], "696": ["n03876231", "paintbrush"], "697": ["n03877472", "pajama"], "698": ["n03877845", "palace"], "699": ["n03884397", "panpipe"], "700": ["n03887697", "paper_towel"], "701": ["n03888257", "parachute"], "702": ["n03888605", "parallel_bars"], "703": ["n03891251", "park_bench"], "704": ["n03891332", "parking_meter"], "705": ["n03895866", "passenger_car"], "706": ["n03899768", "patio"], "707": ["n03902125", "pay-phone"], "708": ["n03903868", "pedestal"], "709": ["n03908618", "pencil_box"], "710": ["n03908714", "pencil_sharpener"], "711": ["n03916031", "perfume"], "712": ["n03920288", "Petri_dish"], "713": ["n03924679", "photocopier"], "714": ["n03929660", "pick"], "715": ["n03929855", "pickelhaube"], "716": ["n03930313", "picket_fence"], "717": ["n03930630", "pickup"], "718": ["n03933933", "pier"], "719": ["n03935335", "piggy_bank"], "720": ["n03937543", "pill_bottle"], "721": ["n03938244", "pillow"], "722": ["n03942813", "ping-pong_ball"], "723": ["n03944341", "pinwheel"], "724": ["n03947888", "pirate"], "725": ["n03950228", "pitcher"], "726": ["n03954731", "plane"], "727": ["n03956157", "planetarium"], "728": ["n03958227", "plastic_bag"], "729": ["n03961711", "plate_rack"], "730": ["n03967562", "plow"], "731": ["n03970156", "plunger"], "732": ["n03976467", "Polaroid_camera"], "733": ["n03976657", "pole"], "734": ["n03977966", "police_van"], "735": ["n03980874", "poncho"], "736": ["n03982430", "pool_table"], "737": ["n03983396", "pop_bottle"], "738": ["n03991062", "pot"], "739": ["n03992509", "potter's_wheel"], "740": ["n03995372", "power_drill"], "741": ["n03998194", "prayer_rug"], "742": ["n04004767", "printer"], "743": ["n04005630", "prison"], "744": ["n04008634", "projectile"], "745": ["n04009552", "projector"], "746": ["n04019541", "puck"], "747": ["n04023962", "punching_bag"], "748": ["n04026417", "purse"], "749": ["n04033901", "quill"], "750": ["n04033995", "quilt"], "751": ["n04037443", "racer"], "752": ["n04039381", "racket"], "753": ["n04040759", "radiator"], "754": ["n04041544", "radio"], "755": ["n04044716", "radio_telescope"], "756": ["n04049303", "rain_barrel"], "757": ["n04065272", "recreational_vehicle"], "758": ["n04067472", "reel"], "759": ["n04069434", "reflex_camera"], "760": ["n04070727", "refrigerator"], "761": ["n04074963", "remote_control"], "762": ["n04081281", "restaurant"], "763": ["n04086273", "revolver"], "764": ["n04090263", "rifle"], "765": ["n04099969", "rocking_chair"], "766": ["n04111531", "rotisserie"], "767": ["n04116512", "rubber_eraser"], "768": ["n04118538", "rugby_ball"], "769": ["n04118776", "rule"], "770": ["n04120489", "running_shoe"], "771": ["n04125021", "safe"], "772": ["n04127249", "safety_pin"], "773": ["n04131690", "saltshaker"], "774": ["n04133789", "sandal"], "775": ["n04136333", "sarong"], "776": ["n04141076", "sax"], "777": ["n04141327", "scabbard"], "778": ["n04141975", "scale"], "779": ["n04146614", "school_bus"], "780": ["n04147183", "schooner"], "781": ["n04149813", "scoreboard"], "782": ["n04152593", "screen"], "783": ["n04153751", "screw"], "784": ["n04154565", "screwdriver"], "785": ["n04162706", "seat_belt"], "786": ["n04179913", "sewing_machine"], "787": ["n04192698", "shield"], "788": ["n04200800", "shoe_shop"], "789": ["n04201297", "shoji"], "790": ["n04204238", "shopping_basket"], "791": ["n04204347", "shopping_cart"], "792": ["n04208210", "shovel"], "793": ["n04209133", "shower_cap"], "794": ["n04209239", "shower_curtain"], "795": ["n04228054", "ski"], "796": ["n04229816", "ski_mask"], "797": ["n04235860", "sleeping_bag"], "798": ["n04238763", "slide_rule"], "799": ["n04239074", "sliding_door"], "800": ["n04243546", "slot"], "801": ["n04251144", "snorkel"], "802": ["n04252077", "snowmobile"], "803": ["n04252225", "snowplow"], "804": ["n04254120", "soap_dispenser"], "805": ["n04254680", "soccer_ball"], "806": ["n04254777", "sock"], "807": ["n04258138", "solar_dish"], "808": ["n04259630", "sombrero"], "809": ["n04263257", "soup_bowl"], "810": ["n04264628", "space_bar"], "811": ["n04265275", "space_heater"], "812": ["n04266014", "space_shuttle"], "813": ["n04270147", "spatula"], "814": ["n04273569", "speedboat"], "815": ["n04275548", "spider_web"], "816": ["n04277352", "spindle"], "817": ["n04285008", "sports_car"], "818": ["n04286575", "spotlight"], "819": ["n04296562", "stage"], "820": ["n04310018", "steam_locomotive"], "821": ["n04311004", "steel_arch_bridge"], "822": ["n04311174", "steel_drum"], "823": ["n04317175", "stethoscope"], "824": ["n04325704", "stole"], "825": ["n04326547", "stone_wall"], "826": ["n04328186", "stopwatch"], "827": ["n04330267", "stove"], "828": ["n04332243", "strainer"], "829": ["n04335435", "streetcar"], "830": ["n04336792", "stretcher"], "831": ["n04344873", "studio_couch"], "832": ["n04346328", "stupa"], "833": ["n04347754", "submarine"], "834": ["n04350905", "suit"], "835": ["n04355338", "sundial"], "836": ["n04355933", "sunglass"], "837": ["n04356056", "sunglasses"], "838": ["n04357314", "sunscreen"], "839": ["n04366367", "suspension_bridge"], "840": ["n04367480", "swab"], "841": ["n04370456", "sweatshirt"], "842": ["n04371430", "swimming_trunks"], "843": ["n04371774", "swing"], "844": ["n04372370", "switch"], "845": ["n04376876", "syringe"], "846": ["n04380533", "table_lamp"], "847": ["n04389033", "tank"], "848": ["n04392985", "tape_player"], "849": ["n04398044", "teapot"], "850": ["n04399382", "teddy"], "851": ["n04404412", "television"], "852": ["n04409515", "tennis_ball"], "853": ["n04417672", "thatch"], "854": ["n04418357", "theater_curtain"], "855": ["n04423845", "thimble"], "856": ["n04428191", "thresher"], "857": ["n04429376", "throne"], "858": ["n04435653", "tile_roof"], "859": ["n04442312", "toaster"], "860": ["n04443257", "tobacco_shop"], "861": ["n04447861", "toilet_seat"], "862": ["n04456115", "torch"], "863": ["n04458633", "totem_pole"], "864": ["n04461696", "tow_truck"], "865": ["n04462240", "toyshop"], "866": ["n04465501", "tractor"], "867": ["n04467665", "trailer_truck"], "868": ["n04476259", "tray"], "869": ["n04479046", "trench_coat"], "870": ["n04482393", "tricycle"], "871": ["n04483307", "trimaran"], "872": ["n04485082", "tripod"], "873": ["n04486054", "triumphal_arch"], "874": ["n04487081", "trolleybus"], "875": ["n04487394", "trombone"], "876": ["n04493381", "tub"], "877": ["n04501370", "turnstile"], "878": ["n04505470", "typewriter_keyboard"], "879": ["n04507155", "umbrella"], "880": ["n04509417", "unicycle"], "881": ["n04515003", "upright"], "882": ["n04517823", "vacuum"], "883": ["n04522168", "vase"], "884": ["n04523525", "vault"], "885": ["n04525038", "velvet"], "886": ["n04525305", "vending_machine"], "887": ["n04532106", "vestment"], "888": ["n04532670", "viaduct"], "889": ["n04536866", "violin"], "890": ["n04540053", "volleyball"], "891": ["n04542943", "waffle_iron"], "892": ["n04548280", "wall_clock"], "893": ["n04548362", "wallet"], "894": ["n04550184", "wardrobe"], "895": ["n04552348", "warplane"], "896": ["n04553703", "washbasin"], "897": ["n04554684", "washer"], "898": ["n04557648", "water_bottle"], "899": ["n04560804", "water_jug"], "900": ["n04562935", "water_tower"], "901": ["n04579145", "whiskey_jug"], "902": ["n04579432", "whistle"], "903": ["n04584207", "wig"], "904": ["n04589890", "window_screen"], "905": ["n04590129", "window_shade"], "906": ["n04591157", "Windsor_tie"], "907": ["n04591713", "wine_bottle"], "908": ["n04592741", "wing"], "909": ["n04596742", "wok"], "910": ["n04597913", "wooden_spoon"], "911": ["n04599235", "wool"], "912": ["n04604644", "worm_fence"], "913": ["n04606251", "wreck"], "914": ["n04612504", "yawl"], "915": ["n04613696", "yurt"], "916": ["n06359193", "web_site"], "917": ["n06596364", "comic_book"], "918": ["n06785654", "crossword_puzzle"], "919": ["n06794110", "street_sign"], "920": ["n06874185", "traffic_light"], "921": ["n07248320", "book_jacket"], "922": ["n07565083", "menu"], "923": ["n07579787", "plate"], "924": ["n07583066", "guacamole"], "925": ["n07584110", "consomme"], "926": ["n07590611", "hot_pot"], "927": ["n07613480", "trifle"], "928": ["n07614500", "ice_cream"], "929": ["n07615774", "ice_lolly"], "930": ["n07684084", "French_loaf"], "931": ["n07693725", "bagel"], "932": ["n07695742", "pretzel"], "933": ["n07697313", "cheeseburger"], "934": ["n07697537", "hotdog"], "935": ["n07711569", "mashed_potato"], "936": ["n07714571", "head_cabbage"], "937": ["n07714990", "broccoli"], "938": ["n07715103", "cauliflower"], "939": ["n07716358", "zucchini"], "940": ["n07716906", "spaghetti_squash"], "941": ["n07717410", "acorn_squash"], "942": ["n07717556", "butternut_squash"], "943": ["n07718472", "cucumber"], "944": ["n07718747", "artichoke"], "945": ["n07720875", "bell_pepper"], "946": ["n07730033", "cardoon"], "947": ["n07734744", "mushroom"], "948": ["n07742313", "Granny_Smith"], "949": ["n07745940", "strawberry"], "950": ["n07747607", "orange"], "951": ["n07749582", "lemon"], "952": ["n07753113", "fig"], "953": ["n07753275", "pineapple"], "954": ["n07753592", "banana"], "955": ["n07754684", "jackfruit"], "956": ["n07760859", "custard_apple"], "957": ["n07768694", "pomegranate"], "958": ["n07802026", "hay"], "959": ["n07831146", "carbonara"], "960": ["n07836838", "chocolate_sauce"], "961": ["n07860988", "dough"], "962": ["n07871810", "meat_loaf"], "963": ["n07873807", "pizza"], "964": ["n07875152", "potpie"], "965": ["n07880968", "burrito"], "966": ["n07892512", "red_wine"], "967": ["n07920052", "espresso"], "968": ["n07930864", "cup"], "969": ["n07932039", "eggnog"], "970": ["n09193705", "alp"], "971": ["n09229709", "bubble"], "972": ["n09246464", "cliff"], "973": ["n09256479", "coral_reef"], "974": ["n09288635", "geyser"], "975": ["n09332890", "lakeside"], "976": ["n09399592", "promontory"], "977": ["n09421951", "sandbar"], "978": ["n09428293", "seashore"], "979": ["n09468604", "valley"], "980": ["n09472597", "volcano"], "981": ["n09835506", "ballplayer"], "982": ["n10148035", "groom"], "983": ["n10565667", "scuba_diver"], "984": ["n11879895", "rapeseed"], "985": ["n11939491", "daisy"], "986": ["n12057211", "yellow_lady's_slipper"], "987": ["n12144580", "corn"], "988": ["n12267677", "acorn"], "989": ["n12620546", "hip"], "990": ["n12768682", "buckeye"], "991": ["n12985857", "coral_fungus"], "992": ["n12998815", "agaric"], "993": ["n13037406", "gyromitra"], "994": ["n13040303", "stinkhorn"], "995": ["n13044778", "earthstar"], "996": ["n13052670", "hen-of-the-woods"], "997": ["n13054560", "bolete"], "998": ["n13133613", "ear"], "999": ["n15075141", "toilet_tissue"]} --------------------------------------------------------------------------------