├── .env.example ├── cache.py ├── requirements.txt ├── pyproject.toml ├── .gitignore ├── .github ├── FUNDING.yml └── workflows │ └── check-availability.yml ├── substore_mapping.py ├── config.py ├── README.md ├── check_products.py ├── utils.py ├── sentry_utils.py ├── notifier.py ├── api_client.py ├── common.py ├── database.py ├── product_checker.py └── substore_list.py /.env.example: -------------------------------------------------------------------------------- 1 | TELEGRAM_BOT_TOKEN=telegram-token 2 | DATABASE_URL=postgresql://postgres:postgres@localhost:5432/postgres 3 | ADMIN_CHAT_ID=123456 4 | -------------------------------------------------------------------------------- /cache.py: -------------------------------------------------------------------------------- 1 | substore_cache = {} # substore_id -> product_status 2 | substore_pincode_map = {} # pincode -> substore_id 3 | pincode_cache = {} # pincode -> product_status 4 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | python-dotenv 2 | requests 3 | aiohttp 4 | cloudscraper 5 | python-telegram-bot[job-queue]>=20.0 6 | psutil 7 | asyncpg 8 | concurrent-log-handler==0.9.2 -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "amul-protein-notifier" 3 | version = "0.1.0" 4 | description = "Telegram bot that checks Amul protein product availability" 5 | readme = "README.md" 6 | requires-python = ">=3.12" 7 | dependencies = [ 8 | "aiohttp>=3.12.15", 9 | "asyncpg>=0.30.0", 10 | "cloudscraper>=1.2.71", 11 | "psutil>=7.0.0", 12 | "python-dotenv>=1.1.1", 13 | "python-telegram-bot[job-queue]>=20.0", 14 | "requests>=2.32.5", 15 | "sentry-sdk>=2.37.0", 16 | "concurrent-log-handler", 17 | ] 18 | 19 | [dependency-groups] 20 | dev = [ 21 | "ruff>=0.12.11", 22 | ] 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Exclude Python bytecode and cache files 2 | .venv/ 3 | .env 4 | *.log 5 | __pycache__/ 6 | *.pyc 7 | # Exclude JSON files that store user data 8 | users.json 9 | users.db 10 | # Exclude log files 11 | product_check.log* 12 | infojs_debug.log 13 | # Exclude backup and debug files 14 | BACKUP_check_products.py 15 | simple_checker.py 16 | backup-main - Copy.py 17 | backup-database.py 18 | users.db-shm 19 | users.db-wal 20 | amul_bot_backup.sql 21 | upload_to_github.py 22 | # Exclude IDE and editor specific files 23 | sqlite3.exe 24 | substore_list.py 25 | test_notification_logic.py 26 | test_users.db 27 | download_from_github.py 28 | .__product_check.lock 29 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry 12 | polar: # Replace with a single Polar username 13 | buy_me_a_coffee: e.bhikhari 14 | thanks_dev: # Replace with a single thanks.dev username 15 | custom: ['https://razorpay.me/@amulproteinnotifierbot'] 16 | -------------------------------------------------------------------------------- /.github/workflows/check-availability.yml: -------------------------------------------------------------------------------- 1 | name: Amul Protein Notifier 2 | 3 | on: 4 | schedule: 5 | - cron: "0 */6 * * *" 6 | push: 7 | branches: 8 | - main 9 | workflow_dispatch: 10 | 11 | jobs: 12 | check-products: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Checkout repository 17 | uses: actions/checkout@v4 18 | 19 | - name: Set up Python 20 | uses: actions/setup-python@v5 21 | with: 22 | python-version: '3.12' 23 | 24 | - name: Install dependencies 25 | run: | 26 | python -m pip install --upgrade pip 27 | pip install -r requirements.txt 28 | 29 | - name: Set up environment variables 30 | run: | 31 | echo "TELEGRAM_BOT_TOKEN=${{ secrets.TELEGRAM_BOT_TOKEN }}" >> $GITHUB_ENV 32 | echo "GH_PAT=${{ secrets.GH_PAT }}" >> $GITHUB_ENV 33 | echo "PRIVATE_REPO=${{ secrets.PRIVATE_REPO }}" >> $GITHUB_ENV 34 | 35 | - name: Run Amul Protein Notifier (API-based) 36 | env: 37 | TELEGRAM_BOT_TOKEN: ${{ secrets.TELEGRAM_BOT_TOKEN }} 38 | GH_PAT: ${{ secrets.GH_PAT }} 39 | PRIVATE_REPO: ${{ secrets.PRIVATE_REPO }} 40 | run: | 41 | python check_products.py 42 | 43 | - name: Upload logs 44 | if: always() 45 | uses: actions/upload-artifact@v4 46 | with: 47 | name: product-check-logs 48 | path: | 49 | product_check.log 50 | -------------------------------------------------------------------------------- /substore_mapping.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import json 3 | from config import SUBSTORE_LIST_FILE 4 | import logging 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | 9 | def load_substore_mapping(): 10 | spec = importlib.util.spec_from_file_location("substore_list", SUBSTORE_LIST_FILE) 11 | substore_list = importlib.util.module_from_spec(spec) 12 | spec.loader.exec_module(substore_list) 13 | 14 | for sub in substore_list.substore_info: 15 | # Handle _id: deduplicate whether list or comma-string 16 | if isinstance(sub["_id"], list): 17 | unique_ids = list(set(sub["_id"])) 18 | elif isinstance(sub["_id"], str) and "," in sub["_id"]: 19 | unique_ids = list( 20 | set(id_.strip() for id_ in sub["_id"].split(",") if id_.strip()) 21 | ) 22 | else: 23 | unique_ids = [sub["_id"]] if sub.get("_id") else [] 24 | 25 | if len(unique_ids) > 1: 26 | logger.warning( 27 | f"Multiple unique _ids for alias {sub.get('alias')}: {unique_ids}. Using first." 28 | ) 29 | sub["_id"] = unique_ids[0] if unique_ids else "" 30 | 31 | # Ensure pincodes is list and deduplicated 32 | pincodes = sub.get("pincodes", []) 33 | if isinstance(pincodes, str): 34 | pincodes = [p.strip() for p in pincodes.split(",") if p.strip()] 35 | sub["pincodes"] = list(set(pincodes)) # Dedup 36 | 37 | return substore_list.substore_info 38 | 39 | 40 | def save_substore_mapping(substore_info): 41 | with open(SUBSTORE_LIST_FILE, "w", encoding="utf-8") as f: 42 | f.write( 43 | "substore_info = " + json.dumps(substore_info, indent=4, ensure_ascii=False) 44 | ) 45 | -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from dotenv import load_dotenv 3 | 4 | # Load environment variables from .env file 5 | load_dotenv() 6 | 7 | # --- Database Configuration --- 8 | DATABASE_URL = os.getenv("DATABASE_URL") # PostgreSQL connection string (required) 9 | DATABASE_FILE = os.getenv( 10 | "DATABASE_FILE", "users.db" 11 | ) # SQLite file path (optional, for migration) 12 | # --- Secrets and Environment-Specific --- 13 | TELEGRAM_BOT_TOKEN = os.getenv("TELEGRAM_BOT_TOKEN") 14 | ADMIN_CHAT_ID = os.getenv("ADMIN_CHAT_ID") 15 | 16 | # --- Sentry Configuration --- 17 | SENTRY_DSN = os.getenv("SENTRY_DSN") 18 | SENTRY_ENVIRONMENT = os.getenv("SENTRY_ENVIRONMENT", "production") 19 | 20 | # --- Concurrency Settings --- 21 | SEMAPHORE_LIMIT = 1 22 | NOTIFICATION_CONCURRENCY_LIMIT = int( 23 | os.getenv("NOTIFICATION_CONCURRENCY_LIMIT", 30) 24 | ) # Default to 30 for Telegram limit 25 | MAX_RETRY = 1 26 | 27 | # --- File Paths --- 28 | LOG_FILE = "product_check.log" 29 | 30 | # --- API Configuration --- 31 | BASE_URL = "https://shop.amul.com" 32 | PROTEIN_URL = f"{BASE_URL}/en/browse/protein" 33 | API_URL = f"{BASE_URL}/api/1/entity/ms.products" 34 | PINCODE_URL = f"{BASE_URL}/entity/pincode" 35 | SETTINGS_URL = f"{BASE_URL}/entity/ms.settings/_/setPreferences" 36 | INFO_URL = f"{BASE_URL}/user/info.js" 37 | 38 | # Session management 39 | COOKIE_REFRESH_INTERVAL = 1200 40 | 41 | # API Headers 42 | API_HEADERS = { 43 | "accept": "application/json, text/plain, */*", 44 | "accept-language": "en-IN,en-GB;q=0.9,en-US;q=0.8,en;q=0.7,hi;q=0.6", 45 | "frontend": "1", 46 | "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36", 47 | "sec-ch-ua": '"Google Chrome";v="137", "Chromium";v="137", "Not/A)Brand";v="24"', 48 | "sec-ch-ua-mobile": "?0", 49 | "sec-ch-ua-platform": '"Linux"', 50 | "sec-fetch-dest": "empty", 51 | "sec-fetch-mode": "cors", 52 | "sec-fetch-site": "same-origin", 53 | "referer": BASE_URL, 54 | "x-requested-with": "XMLHttpRequest", 55 | "sec-gpc": "1", 56 | "priority": "u=1, i", 57 | "content-type": "application/json", 58 | } 59 | 60 | # --- Substore Mapping --- 61 | USE_SUBSTORE_CACHE = True 62 | FALLBACK_TO_PINCODE_CACHE = True 63 | SUBSTORE_LIST_FILE = "substore_list.py" 64 | 65 | # --- Rate Limiting Settings --- 66 | PRODUCT_API_DELAY_RANGE = (1.0, 2.0) 67 | GLOBAL_PRODUCT_API_RPS = 5 68 | 69 | # --- Logging and Monitoring --- 70 | MAX_FILE_SIZE = 1*1024*1024 # 100 MB 71 | MAX_OF_DAYS = 1 72 | 73 | # --- Execution Mode --- 74 | EXECUTION_MODE = "Concurrent" 75 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Amul Protein Notifier 2 | 3 | A modular, production-ready Telegram bot that checks Amul protein product availability and notifies users. 4 | 5 | ## Features 6 | 7 | - Checks product availability via Amul's API (no Selenium required) 8 | - Notifies users on Telegram when products are in stock 9 | - Caching and retry logic for reliability 10 | - Configurable log rotation and error handling 11 | - Modular codebase for easy maintenance 12 | 13 | ## Main Files 14 | 15 | - `check_products.py` — Entrypoint script 16 | - `product_checker.py` — Main orchestration logic 17 | - `api_client.py` — API/session logic 18 | - `notifier.py` — Telegram notification logic 19 | - `substore_mapping.py` — Persistent substore mapping 20 | - `cache.py` — In-memory cache dicts 21 | - `utils.py` — Utility functions (logging, masking, etc.) 22 | - `config.py` — All configuration (API, logging, cache, etc.) 23 | 24 | ## Requirements 25 | 26 | - Python 3.12+ 27 | - [uv](https://docs.astral.sh/uv/getting-started/installation/) - Fast Python package installer and resolver 28 | - Generate a Telegram Bot TOKEN for a dummy bot of your own using the @BotFather official bot on telegram and store it in .env 29 | 30 | ## Setup 31 | 32 | 1. Clone the repository 33 | 34 | 2. Create a `.env` file with your configuration. You can copy from `.env.example` as a template: 35 | ```bash 36 | cp .env.example .env 37 | ``` 38 | 39 | Required environment variables: 40 | - `TELEGRAM_BOT_TOKEN` - Your Telegram bot token from @BotFather 41 | - `ADMIN_CHAT_ID` - Your Telegram chat ID for admin notifications 42 | - `DATABASE_URL` - PostgreSQL connection string 43 | 44 | 45 | 3. Install uv (if not already installed): 46 | 47 | **macOS and Linux:** 48 | ```bash 49 | curl -LsSf https://astral.sh/uv/install.sh | sh 50 | ``` 51 | 52 | **Windows:** 53 | ```powershell 54 | powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex" 55 | ``` 56 | 57 | **Alternative methods:** 58 | - PyPI: `pipx install uv` or `pip install uv` 59 | - Homebrew: `brew install uv` 60 | - WinGet: `winget install --id=astral-sh.uv -e` 61 | 62 | For more installation options, see the [official uv installation guide](https://docs.astral.sh/uv/getting-started/installation/). 63 | 64 | 4. Install dependencies: 65 | 66 | ```bash 67 | uv sync 68 | ``` 69 | 70 | 4. Consider downloading and installing DB Browser 71 | 72 | 5. Run the application: 73 | 74 | **Start the Telegram bot:** 75 | ```bash 76 | uv run main.py 77 | ``` 78 | This starts the Telegram bot that users can interact with to subscribe/unsubscribe to notifications. 79 | 80 | **Fetch product details and notify users:** 81 | ```bash 82 | uv run check_products.py 83 | ``` 84 | This script fetches product availability from the Amul website and sends notifications to subscribed users. 85 | 86 | ## Excluded from Public Repo 87 | 88 | - `users.json`, `users.db`, `substore_list.py`, `.env`, logs, and backup/debug files are excluded for privacy and security. 89 | 90 | ## License 91 | 92 | Amul Paglu 93 | -------------------------------------------------------------------------------- /check_products.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | 4 | sys.path.append(os.path.dirname(os.path.abspath(__file__))) 5 | os.chdir(os.path.dirname(os.path.abspath(__file__))) 6 | from utils import setup_logging 7 | from common import is_already_running 8 | from sentry_utils import capture_cron_event 9 | import time 10 | import signal 11 | from product_checker import check_products_for_users 12 | import asyncio 13 | import config 14 | from database import Database 15 | 16 | 17 | async def main_async(): 18 | logger = setup_logging() 19 | logger.info("Starting API-based product check script") 20 | 21 | def handle_shutdown(signum, frame): 22 | logger.info("Received shutdown signal, exiting...") 23 | raise KeyboardInterrupt 24 | 25 | signal.signal(signal.SIGINT, handle_shutdown) 26 | signal.signal(signal.SIGTERM, handle_shutdown) 27 | 28 | try: 29 | # Signal to Sentry that a cron run has started 30 | capture_cron_event("check_products", status="start") 31 | if is_already_running("check_products.py"): 32 | logger.error( 33 | "Another instance of check_products.py is already running. Exiting..." 34 | ) 35 | raise SystemExit(1) 36 | 37 | # Initialize database 38 | db = Database(config.DATABASE_URL) 39 | logger.info("Initializing database...") 40 | await db._init_db() # Explicitly call _init_db 41 | logger.info("Database initialized successfully") 42 | 43 | await check_products_for_users(db) 44 | 45 | except KeyboardInterrupt: 46 | logger.info("Main process interrupted, exiting cleanly...") 47 | await db.close() 48 | capture_cron_event("check_products", status="interrupted") 49 | raise SystemExit(0) 50 | except Exception as e: 51 | logger.error(f"Unexpected error in main: {e}") 52 | # Send error event to Sentry for cron monitoring 53 | try: 54 | capture_cron_event("check_products", status="error", extra={"error": str(e)}) 55 | except Exception: 56 | pass 57 | await db.close() 58 | raise SystemExit(1) 59 | 60 | 61 | def main(): 62 | start_time = time.time() 63 | if sys.platform == "win32": 64 | try: 65 | os.system("chcp 65001 >nul 2>&1") 66 | if hasattr(sys.stdout, "reconfigure"): 67 | sys.stdout.reconfigure(encoding="utf-8") 68 | if hasattr(sys.stderr, "reconfigure"): 69 | sys.stderr.reconfigure(encoding="utf-8") 70 | except Exception: 71 | pass 72 | 73 | logger = setup_logging() 74 | try: 75 | asyncio.run(main_async()) 76 | total_time = time.time() - start_time 77 | minutes, seconds = divmod(total_time, 60) 78 | logger.info( 79 | f"Total execution time: {int(minutes)} minutes {seconds:.2f} seconds" 80 | ) 81 | print(f"Total execution time: {int(minutes)} minutes {seconds:.2f} seconds") 82 | except Exception as e: 83 | logger.error(f"Fatal error: {e}") 84 | try: 85 | capture_cron_event("check_products", status="fatal", extra={"error": str(e)}) 86 | except Exception: 87 | pass 88 | raise SystemExit(1) 89 | 90 | 91 | if __name__ == "__main__": 92 | main() 93 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from logging.handlers import RotatingFileHandler 3 | try: 4 | from concurrent_log_handler import ConcurrentRotatingFileHandler 5 | _HAS_CONCURRENT_HANDLER = True 6 | except Exception: 7 | ConcurrentRotatingFileHandler = None 8 | _HAS_CONCURRENT_HANDLER = False 9 | from config import LOG_FILE, MAX_FILE_SIZE, MAX_OF_DAYS 10 | import time 11 | import random 12 | import hashlib 13 | import os 14 | 15 | 16 | def setup_logging(): 17 | # Remove all handlers associated with the root logger object (to avoid duplicate logs) 18 | for handler in logging.root.handlers[:]: 19 | logging.root.removeHandler(handler) 20 | 21 | # Prefer a process-safe rotating handler for multi-process writes 22 | if _HAS_CONCURRENT_HANDLER: 23 | handler = ConcurrentRotatingFileHandler( 24 | LOG_FILE, maxBytes=MAX_FILE_SIZE, backupCount=1, encoding="utf-8" 25 | ) 26 | else: 27 | # Fallback: use RotatingFileHandler (not safe across multiple processes) 28 | handler = RotatingFileHandler( 29 | LOG_FILE, maxBytes=MAX_FILE_SIZE, backupCount=1, encoding="utf-8" 30 | ) 31 | 32 | logging.basicConfig( 33 | level=logging.INFO, 34 | format="%(asctime)s - %(levelname)s - %(message)s", 35 | handlers=[ 36 | logging.StreamHandler(), 37 | handler, 38 | ], 39 | ) 40 | 41 | return logging.getLogger(__name__) 42 | 43 | 44 | def mask(value, visible=2): 45 | value = str(value) 46 | if len(value) <= visible * 2: 47 | return "*" * len(value) 48 | return value[:visible] + "*" * (len(value) - 2 * visible) + value[-visible:] 49 | 50 | 51 | def is_product_in_stock(product_data, substore_id): 52 | """ 53 | Robustly determine if a product is in stock: 54 | - available == 1 55 | - substore_id is in seller_substore_ids 56 | - Returns a tuple: (in_stock: bool, inventory_quantity: int) 57 | """ 58 | logger = logging.getLogger(__name__) 59 | product_name = product_data.get("name", "Unknown") 60 | product_alias = product_data.get("alias", "Unknown") 61 | 62 | # Validate available 63 | available_raw = product_data.get("available", "0") 64 | try: 65 | available = int(available_raw) if available_raw is not None else 0 66 | except (ValueError, TypeError): 67 | available = 0 68 | logger.warning( 69 | f"Invalid 'available' field '{available_raw}' for product '{product_name}' (alias: {product_alias})" 70 | ) 71 | 72 | # Validate seller_substore_ids 73 | seller_substore_ids = product_data.get("seller_substore_ids", []) 74 | if not isinstance(seller_substore_ids, list): 75 | logger.warning( 76 | f"Invalid 'seller_substore_ids' type {type(seller_substore_ids)} for product '{product_name}' (alias: {product_alias}), defaulting to []" 77 | ) 78 | seller_substore_ids = [] 79 | 80 | # Handle substore_id 81 | substore_ids = ( 82 | [id.strip() for id in substore_id.split(",") if id.strip()] 83 | if "," in substore_id 84 | else [substore_id] 85 | ) 86 | in_stock = available == 1 and any( 87 | sid in seller_substore_ids for sid in substore_ids 88 | ) 89 | 90 | # Validate inventory_quantity 91 | inventory_quantity_raw = product_data.get("inventory_quantity", "0") 92 | try: 93 | inventory_quantity = ( 94 | int(inventory_quantity_raw) if inventory_quantity_raw is not None else 0 95 | ) 96 | if inventory_quantity < 0: 97 | inventory_quantity = 0 98 | logger.warning( 99 | f"Negative inventory_quantity {inventory_quantity_raw} for product '{product_name}' (alias: {product_alias}), set to 0" 100 | ) 101 | except (ValueError, TypeError): 102 | inventory_quantity = 0 103 | logger.warning( 104 | f"Invalid inventory_quantity '{inventory_quantity_raw}' for product '{product_name}' (alias: {product_alias}), set to 0" 105 | ) 106 | 107 | return in_stock, inventory_quantity 108 | -------------------------------------------------------------------------------- /sentry_utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import logging 4 | import sentry_sdk 5 | from sentry_sdk.integrations.logging import LoggingIntegration 6 | from sentry_sdk.integrations.asyncio import AsyncioIntegration 7 | try: 8 | # handle AioHttp integration because it may not be available in all environments 9 | from sentry_sdk.integrations.aiohttp import AioHttpIntegration 10 | except Exception: 11 | AioHttpIntegration = None 12 | 13 | LOG = logging.getLogger(__name__) 14 | 15 | 16 | def _get_release(): 17 | try: 18 | sha = ( 19 | subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) 20 | .decode() 21 | .strip() 22 | ) 23 | return sha 24 | except Exception: 25 | return os.getenv("RELEASE", None) 26 | 27 | 28 | def before_send(event, hint): 29 | """Modify events before they are sent to Sentry. 30 | It trims overly large message bodies and attachments to avoid huge events. 31 | """ 32 | try: 33 | # Trim request data if present 34 | req = event.get("request") 35 | if req: 36 | for k in ("data", "body"): 37 | if req.get(k): 38 | v = req.get(k) 39 | if isinstance(v, str) and len(v) > 2000: 40 | req[k] = v[:2000] + "...[truncated]" 41 | except Exception: 42 | LOG.exception("before_send: trimming failed") 43 | return event 44 | 45 | 46 | def init_sentry(): 47 | dsn = os.getenv("SENTRY_DSN") 48 | if not dsn: 49 | LOG.info("SENTRY_DSN not set, skipping sentry init") 50 | return 51 | 52 | sentry_logging = LoggingIntegration( 53 | level=logging.INFO, event_level=logging.ERROR 54 | ) 55 | 56 | integrations = [sentry_logging, AsyncioIntegration()] 57 | if AioHttpIntegration is not None: 58 | integrations.append(AioHttpIntegration()) 59 | 60 | traces_sample_rate = float(os.getenv("SENTRY_TRACES_SAMPLE_RATE", "0.0")) 61 | sample_rate = float(os.getenv("SENTRY_SAMPLE_RATE", "1.0")) 62 | environment = os.getenv("SENTRY_ENVIRONMENT", "production") 63 | release = _get_release() 64 | 65 | sentry_sdk.init( 66 | dsn=dsn, 67 | integrations=integrations, 68 | traces_sample_rate=traces_sample_rate, 69 | sample_rate=sample_rate, 70 | environment=environment, 71 | release=release, 72 | before_send=before_send, 73 | send_default_pii=True, 74 | ) 75 | 76 | 77 | def set_user_context_from_update(update): 78 | try: 79 | if not update: 80 | return 81 | if hasattr(update, "effective_user") and update.effective_user: 82 | u = update.effective_user 83 | sentry_sdk.set_user({"id": str(u.id), "username": u.username or None}) 84 | except Exception: 85 | LOG.exception("Failed to set user context") 86 | 87 | 88 | def capture_update_exception(update, exc): 89 | try: 90 | set_user_context_from_update(update) 91 | sentry_sdk.capture_exception(exc) 92 | except Exception: 93 | LOG.exception("Failed to capture exception to Sentry") 94 | 95 | 96 | def capture_exception(exc, extra=None, tags=None): 97 | try: 98 | with sentry_sdk.push_scope() as scope: 99 | if tags: 100 | for k, v in tags.items(): 101 | scope.set_tag(k, v) 102 | if extra: 103 | for k, v in extra.items(): 104 | scope.set_extra(k, v) 105 | sentry_sdk.capture_exception(exc) 106 | except Exception: 107 | LOG.exception("Failed to capture exception to Sentry") 108 | 109 | 110 | def capture_cron_event(name, status="start", extra=None): 111 | """Capture a lightweight cron-run message (start/finish/error). 112 | 113 | This is used by cron-invoked scripts like `check_products.py` so we can 114 | monitor successful runs and failures. 115 | """ 116 | try: 117 | message = f"cron:{name}:{status}" 118 | with sentry_sdk.push_scope() as scope: 119 | scope.set_tag("cron", name) 120 | scope.set_tag("cron_status", status) 121 | if extra: 122 | for k, v in extra.items(): 123 | scope.set_extra(k, v) 124 | sentry_sdk.capture_message(message) 125 | except Exception: 126 | LOG.exception("Failed to capture cron event to Sentry") 127 | 128 | 129 | def create_task_catching(coro): 130 | """Create asyncio Task and report exceptions to Sentry when they occur.""" 131 | import asyncio 132 | 133 | task = asyncio.create_task(coro) 134 | 135 | def _cb(t): 136 | try: 137 | exc = t.exception() 138 | if exc: 139 | capture_exception(exc) 140 | except asyncio.CancelledError: 141 | pass 142 | except Exception: 143 | LOG.exception("Error in task done callback") 144 | 145 | task.add_done_callback(_cb) 146 | return task 147 | -------------------------------------------------------------------------------- /notifier.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | 4 | sys.path.append(os.path.dirname(os.path.abspath(__file__))) 5 | import asyncio 6 | from utils import mask 7 | from common import get_product_info, create_product_url 8 | import logging 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | async def send_telegram_notification_for_user( 14 | app, chat_id, pincode, products_to_check, notify_products, max_retries=3 15 | ): 16 | # Return codes: True = success, False = temporary error (retry ok), None = permanent error (don't retry) 17 | try: 18 | chat_id = int(chat_id) # Ensure chat_id is an integer 19 | except ValueError: 20 | logger.error(f"Invalid chat_id format: {chat_id}") 21 | return None # Permanent error, don't retry 22 | 23 | logger.info( 24 | f"Attempting to send notification to chat_id {chat_id} for pincode {pincode}" 25 | ) 26 | logger.debug(f"Products to check: {products_to_check}") 27 | logger.debug(f"Notify products: {notify_products}") 28 | 29 | if not notify_products: 30 | logger.info(f"No products to notify for chat_id {chat_id}") 31 | return True # Return True as this is a valid case 32 | 33 | check_all_products = ( 34 | len(products_to_check) == 1 and products_to_check[0].strip().lower() == "any" 35 | ) 36 | 37 | in_stock_products = [ 38 | (name, status, quantity) 39 | for name, status, quantity in notify_products 40 | if status == "In Stock" 41 | ] 42 | if not in_stock_products: 43 | logger.info(f"All products Sold Out for chat_id {chat_id}, PINCODE {pincode}") 44 | return True # Return True as this is a valid case 45 | 46 | # Simplified message construction 47 | message = f"Available Amul Protein Products for PINCODE {pincode}:\n\n" 48 | relevant_products = ( 49 | in_stock_products 50 | if check_all_products 51 | else [ 52 | (name, status, quantity) 53 | for name, status, quantity in in_stock_products 54 | if name in products_to_check 55 | ] 56 | ) 57 | for name, _, quantity in relevant_products: 58 | short_name = get_product_info(name, "display_name") or name 59 | product_link = create_product_url(name) 60 | if product_link: 61 | message += f"- {short_name} \n(Quantity Left: {quantity}) | [Buy Now]({product_link})\n" 62 | else: 63 | message += f"- {short_name} \n(Quantity Left: {quantity})\n" 64 | 65 | if not check_all_products: 66 | message += "\nUse /unfollow to stop notifications for specific products." 67 | 68 | logger.info( 69 | f"Sending notification to chat_id {chat_id}: {len(relevant_products)} products" 70 | ) 71 | 72 | # Add retry logic with timeouts 73 | for attempt in range(max_retries): 74 | try: 75 | async with asyncio.timeout(10): # 10 second timeout per attempt 76 | await app.bot.send_message( 77 | chat_id=chat_id, 78 | text=message, 79 | parse_mode="Markdown", 80 | disable_web_page_preview=True, 81 | ) 82 | logger.info(f"Successfully sent notification to chat_id {chat_id}") 83 | return True # Successfully sent 84 | except asyncio.TimeoutError: 85 | if attempt < max_retries - 1: 86 | logger.warning( 87 | f"Attempt {attempt + 1} timed out for chat_id {chat_id}, retrying..." 88 | ) 89 | await asyncio.sleep(1) # Small delay between retries 90 | else: 91 | logger.error( 92 | f"Timeout sending notification to chat_id {chat_id} after {max_retries} attempts" 93 | ) 94 | return False 95 | except ValueError as e: 96 | logger.error( 97 | f"Invalid chat_id format for {chat_id}: {str(e)}. Skipping notification." 98 | ) 99 | return False 100 | except Exception as e: 101 | error_msg = str(e) 102 | if any( 103 | x in error_msg.lower() 104 | for x in [ 105 | "chat not found", 106 | "bot was blocked", 107 | "forbidden", 108 | "bad request", 109 | ] 110 | ): 111 | logger.error(f"Permanent error for chat_id {chat_id}: {error_msg}") 112 | return None # Permanent error, don't retry 113 | 114 | logger.error( 115 | f"Temporary error sending notification to chat_id {chat_id}: {error_msg}" 116 | ) 117 | if attempt < max_retries - 1: 118 | logger.info( 119 | f"Will retry notification for chat_id {chat_id} ({attempt + 2}/{max_retries})" 120 | ) 121 | await asyncio.sleep(2) # Longer delay for unexpected errors 122 | continue 123 | return False # Temporary error after all retries failed 124 | -------------------------------------------------------------------------------- /api_client.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | 4 | sys.path.append(os.path.dirname(os.path.abspath(__file__))) 5 | 6 | import requests 7 | import aiohttp 8 | import time 9 | import random 10 | import hashlib 11 | import json 12 | import re 13 | import asyncio 14 | from urllib.parse import urlencode 15 | from config import ( 16 | API_HEADERS, 17 | BASE_URL, 18 | PINCODE_URL, 19 | SETTINGS_URL, 20 | INFO_URL, 21 | API_URL, 22 | PRODUCT_API_DELAY_RANGE, 23 | GLOBAL_PRODUCT_API_RPS, 24 | ) 25 | from utils import setup_logging 26 | 27 | logger = setup_logging() 28 | 29 | 30 | # --- Global Rate Limiter --- 31 | class AsyncRateLimiter: 32 | def __init__(self, rate_per_sec): 33 | self._interval = 1.0 / rate_per_sec 34 | self._lock = None 35 | self._last = 0.0 36 | 37 | async def wait(self): 38 | import asyncio 39 | 40 | if self._lock is None: 41 | self._lock = asyncio.Lock() 42 | async with self._lock: 43 | now = time.monotonic() 44 | wait_time = max(0, self._last + self._interval - now) 45 | if wait_time > 0: 46 | logger.info( 47 | f"[RATE LIMIT] Waiting {wait_time:.2f}s to respect global rate limit" 48 | ) 49 | await asyncio.sleep(wait_time) 50 | self._last = time.monotonic() 51 | 52 | 53 | product_api_rate_limiter = AsyncRateLimiter(GLOBAL_PRODUCT_API_RPS) 54 | 55 | 56 | def get_tid_and_substore(session, pincode): 57 | logger.info(f"[SESSION] Creating session and substore for pincode: {pincode}") 58 | headers = { 59 | "user-agent": API_HEADERS["user-agent"], 60 | "accept": "application/json, text/plain, */*", 61 | "referer": BASE_URL + "/en/browse/protein", 62 | "origin": BASE_URL, 63 | "accept-language": "en-IN,en-GB;q=0.9,en-US;q=0.8,en;q=0.7,hi;q=0.6", 64 | "x-amul-b2c-access-key": "shop.amul.com", 65 | "Connection": "keep-alive", 66 | "sec-ch-ua": '"Not)A;Brand";v="8", "Chromium";v="138", "Google Chrome";v="138"', 67 | "sec-ch-ua-mobile": "?0", 68 | "sec-ch-ua-platform": '"Windows"', 69 | "sec-fetch-dest": "empty", 70 | "sec-fetch-mode": "cors", 71 | "sec-fetch-site": "same-origin", 72 | "base_url": "https://shop.amul.com/en/browse/protein", 73 | "frontend": "1", 74 | "priority": "u=1, i", 75 | "if-modified-since": "Tue, 01 Jul 2025 16:30:10 GMT", 76 | } 77 | browse_url = f"{BASE_URL}/en/browse/protein" 78 | logger.info(f"[SESSION] Visiting browse URL: {browse_url}") 79 | browse_resp = session.get(browse_url, headers=headers, timeout=10) 80 | logger.info(f"[SESSION] /en/browse/protein status: {browse_resp.status_code}") 81 | # logger.info(f"[SESSION] /en/browse/protein response (first 300 chars): {browse_resp.text[:300]}") 82 | pincode_params = { 83 | "limit": 50, 84 | "filters[0][field]": "pincode", 85 | "filters[0][value]": str(pincode), 86 | "filters[0][operator]": "regex", 87 | "cf_cache": "1h", 88 | } 89 | dummy_tid = "dummy" 90 | tid_header = calculate_tid_header(dummy_tid) 91 | pincode_headers = headers.copy() 92 | pincode_headers["referer"] = BASE_URL + "/" 93 | pincode_headers["tid"] = tid_header 94 | pincode_url = PINCODE_URL + "?" + urlencode(pincode_params) 95 | logger.info(f"[SESSION] Looking up substore for pincode: {pincode_url}") 96 | pincode_resp = session.get( 97 | PINCODE_URL, headers=pincode_headers, params=pincode_params, timeout=10 98 | ) 99 | logger.info(f"[SESSION] /entity/pincode status: {pincode_resp.status_code}") 100 | # logger.info(f"[SESSION] /entity/pincode response (first 300 chars): {pincode_resp.text[:300]}") 101 | pincode_data = pincode_resp.json() 102 | records = pincode_data.get("records", []) 103 | if not records: 104 | logger.error(f"[SESSION] No substore found for pincode {pincode}") 105 | raise Exception(f"No substore found for pincode {pincode}") 106 | substore = records[0]["substore"] 107 | substore_id = records[0]["_id"] 108 | # Store raw substore for preferences 109 | raw_substore = substore 110 | # Normalize substore to a dictionary for return 111 | if isinstance(substore, str): 112 | logger.warning( 113 | f"[SESSION] Substore is a string for pincode {pincode}: {substore}. Converting to dict for return." 114 | ) 115 | substore = { 116 | "alias": substore, 117 | "name": substore.title() or f"Unknown-{substore_id}", 118 | } 119 | elif not isinstance(substore, dict): 120 | logger.error( 121 | f"[SESSION] Unexpected substore type for pincode {pincode}: {type(substore)}. Converting to dict." 122 | ) 123 | substore = { 124 | "alias": str(substore), 125 | "name": str(substore).title() or f"Unknown-{substore_id}", 126 | } 127 | pref_headers = headers.copy() 128 | pref_headers["content-type"] = "application/json" 129 | pref_headers["x-requested-with"] = "XMLHttpRequest" 130 | pref_headers["sec-fetch-mode"] = "cors" 131 | pref_headers["sec-fetch-site"] = "same-origin" 132 | pref_headers["tid"] = tid_header 133 | cookie_str = "; ".join([f"{k}={v}" for k, v in session.cookies.get_dict().items()]) 134 | if cookie_str: 135 | pref_headers["cookie"] = cookie_str 136 | pref_payload = {"data": {"store": raw_substore}} 137 | pref_url = SETTINGS_URL 138 | logger.info(f"[SESSION] Setting preferences for substore: {raw_substore}") 139 | pref_resp = session.put( 140 | pref_url, headers=pref_headers, data=json.dumps(pref_payload), timeout=10 141 | ) 142 | logger.info(f"[SESSION] setPreferences status: {pref_resp.status_code}") 143 | # logger.info(f"[SESSION] setPreferences response (first 300 chars): {pref_resp.text[:300]}") 144 | if pref_resp.status_code == 406: 145 | logger.error( 146 | f"[SESSION] 406 Not Acceptable for setPreferences with payload: {json.dumps(pref_payload)}" 147 | ) 148 | raise Exception(f"setPreferences failed with 406 for pincode {pincode}") 149 | info_url = f"{INFO_URL}?_v={int(time.time() * 1000)}" 150 | logger.info(f"[SESSION] Fetching info.js for session data: {info_url}") 151 | info_js = session.get(info_url, headers=headers, timeout=10) 152 | logger.info(f"[SESSION] /user/info.js status: {info_js.status_code}") 153 | logger.info( 154 | f"[SESSION] /user/info.js response (first 300 chars): {info_js.text[:300]}" 155 | ) 156 | tid_match = re.search(r"session\s*=\s*(\{.*\})", info_js.text, re.DOTALL) 157 | if not tid_match: 158 | logger.error( 159 | f"[SESSION] Could not extract session JSON from info.js for pincode {pincode}" 160 | ) 161 | raise Exception("Could not extract session JSON from info.js") 162 | session_data = json.loads(tid_match.group(1)) 163 | tid = session_data.get("tid") 164 | js_substore_id = session_data.get("substore_id") 165 | js_substore_obj = session_data.get("substore", {}) 166 | if js_substore_id: 167 | substore_id = js_substore_id 168 | elif js_substore_obj: 169 | substore_id = js_substore_obj.get("_id", substore_id) 170 | if not tid or not substore_id: 171 | logger.error( 172 | f"[SESSION] tid or substore_id not found in info.js JSON for pincode {pincode}" 173 | ) 174 | raise Exception("tid or substore_id not found in info.js JSON") 175 | logger.info(f"[SESSION] Session created: tid={tid}, substore_id={substore_id}") 176 | return tid, substore, substore_id, session.cookies.get_dict() 177 | 178 | 179 | def fetch_product_data_for_alias(session, tid, substore_id, alias): 180 | calc_tid = calculate_tid_header(tid) 181 | headers = { 182 | "user-agent": API_HEADERS["user-agent"], 183 | "accept": "application/json, text/plain, */*", 184 | "referer": BASE_URL + "/en/browse/protein", 185 | "origin": BASE_URL, 186 | "accept-language": "en-US,en;q=0.9", 187 | "tid": calc_tid, 188 | "base_url": f"{BASE_URL}/en/browse/protein", 189 | "frontend": "1", 190 | "priority": "u=1, i", 191 | "x-amul-b2c-access-key": "shop.amul.com", 192 | } 193 | query = { 194 | "fields[name]": 1, 195 | "fields[alias]": 1, 196 | "fields[available]": 1, 197 | "filters[0][field]": "alias", 198 | "filters[0][value]": alias, 199 | "filters[0][operator]": "eq", 200 | "filters[0][original]": 1, 201 | "limit": 1, 202 | "substore": substore_id, 203 | } 204 | product_url = API_URL + "?" + urlencode(query, doseq=True) 205 | logger.info(f"[SESSION] Fetching product data for alias '{alias}': {product_url}") 206 | resp = session.get(API_URL, headers=headers, params=query, timeout=10) 207 | logger.info(f"[SESSION] Product API status for alias '{alias}': {resp.status_code}") 208 | # logger.info(f"[SESSION] Product API response for alias '{alias}' (first 300 chars): {resp.text[:300]}") 209 | try: 210 | return resp.json().get("data", []) 211 | except Exception as e: 212 | logger.error( 213 | f"[SESSION] Error parsing product API response for alias '{alias}': {str(e)}" 214 | ) 215 | return [] 216 | 217 | 218 | async def fetch_product_data_for_alias_async( 219 | session, tid, substore_id, alias, semaphore, cookies=None, max_retries=3 220 | ): 221 | calc_tid = calculate_tid_header(tid) 222 | headers = { 223 | "user-agent": API_HEADERS["user-agent"], 224 | "accept": "application/json, text/plain, */*", 225 | "referer": BASE_URL + "/en/browse/protein", 226 | "origin": BASE_URL, 227 | "accept-language": "en-US,en;q=0.9", 228 | "x-amul-b2c-access-key": "shop.amul.com", 229 | "tid": calc_tid, 230 | } 231 | if cookies: 232 | cookie_str = "; ".join([f"{k}={v}" for k, v in cookies.items()]) 233 | if cookie_str: 234 | headers["cookie"] = cookie_str 235 | query = {"q": json.dumps({"alias": alias}), "limit": 1} 236 | product_url = API_URL + "?" + urlencode(query) 237 | for attempt in range(1, max_retries + 1): 238 | async with semaphore: 239 | await product_api_rate_limiter.wait() 240 | await asyncio.sleep(random.uniform(*PRODUCT_API_DELAY_RANGE)) 241 | try: 242 | async with session.get( 243 | API_URL, headers=headers, params=query, timeout=10 244 | ) as resp: 245 | text = await resp.text() 246 | logger.info( 247 | f"[SESSION] Product API status for alias '{alias}': {resp.status}" 248 | ) 249 | # logger.info(f"[SESSION] Product API response for alias '{alias}' (first 300 chars): {text[:300]}") 250 | if resp.status == 401: 251 | logger.warning( 252 | f"401 Unauthorized for alias '{alias}', attempt {attempt}" 253 | ) 254 | if attempt < max_retries: 255 | logger.info( 256 | f"Retrying with fresh session for alias '{alias}'" 257 | ) 258 | # Return None to trigger session refresh in caller 259 | return None 260 | else: 261 | logger.error( 262 | f"Failed to fetch product data for alias '{alias}' after {max_retries} attempts: 401 Unauthorized" 263 | ) 264 | return [] 265 | if resp.status == 406: 266 | logger.warning( 267 | f"406 Not Acceptable for alias '{alias}', attempt {attempt}" 268 | ) 269 | await asyncio.sleep(2**attempt) 270 | continue 271 | if resp.status >= 500: 272 | logger.warning( 273 | f"Server error {resp.status} for alias '{alias}', attempt {attempt}" 274 | ) 275 | await asyncio.sleep(2**attempt) 276 | continue 277 | try: 278 | data = await resp.json() 279 | return data.get("data", []) 280 | except Exception as e: 281 | logger.error( 282 | f"[SESSION] Error parsing product API response for alias '{alias}': {str(e)}" 283 | ) 284 | return [] 285 | except Exception as e: 286 | logger.error( 287 | f"[SESSION] Network error for alias '{alias}', attempt {attempt}: {str(e)}" 288 | ) 289 | await asyncio.sleep(2**attempt) 290 | logger.error( 291 | f"[SESSION] Failed to fetch product data for alias '{alias}' after {max_retries} attempts." 292 | ) 293 | return [] 294 | 295 | 296 | def calculate_tid_header(session_tid): 297 | store_id = "62fa94df8c13af2e242eba16" 298 | timestamp = str(int(time.time() * 1000)) 299 | rand = str(random.randint(0, 1000)) 300 | base = f"{store_id}:{timestamp}:{rand}:{session_tid}" 301 | hash_bytes = hashlib.sha256(base.encode("utf-8")).hexdigest() 302 | return f"{timestamp}:{rand}:{hash_bytes}" 303 | -------------------------------------------------------------------------------- /common.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import psutil 4 | from config import LOG_FILE, BASE_URL 5 | 6 | PRODUCT_DATA = { 7 | "Any": { 8 | "display_name": "❗ Any of the products from the list", 9 | "slug": None, 10 | "temp_id": None, 11 | "category": None, 12 | }, 13 | "Amul Kool Protein Milkshake | Chocolate, 180 mL | Pack of 8": { 14 | "display_name": "🍫 Chocolate Milkshake 180mL | Pack of 8", 15 | "slug": "amul-kool-protein-milkshake-or-chocolate-180-ml-or-pack-of-8", 16 | "temp_id": "akpmc8", 17 | "category": "Milkshakes & Shakes", 18 | }, 19 | "Amul Kool Protein Milkshake | Chocolate, 180 mL | Pack of 30": { 20 | "display_name": "🍫🍫 Chocolate Milkshake 180mL | Pack of 30", 21 | "slug": "amul-kool-protein-milkshake-or-chocolate-180-ml-or-pack-of-30", 22 | "temp_id": "akpmc30", 23 | "category": "Milkshakes & Shakes", 24 | }, 25 | "Amul Kool Protein Milkshake | Arabica Coffee, 180 mL | Pack of 8": { 26 | "display_name": "☕ Coffee Milkshake 180mL | Pack of 8", 27 | "slug": "amul-kool-protein-milkshake-or-arabica-coffee-180-ml-or-pack-of-8", 28 | "temp_id": "akpmac8", 29 | "category": "Milkshakes & Shakes", 30 | }, 31 | "Amul Kool Protein Milkshake | Arabica Coffee, 180 mL | Pack of 30": { 32 | "display_name": "☕☕ Coffee Milkshake 180mL | Pack of 30", 33 | "slug": "amul-kool-protein-milkshake-or-arabica-coffee-180-ml-or-pack-of-30", 34 | "temp_id": "akpmac30", 35 | "category": "Milkshakes & Shakes", 36 | }, 37 | "Amul Kool Protein Milkshake | Kesar, 180 mL | Pack of 8": { 38 | "display_name": "🌸 Kesar Milkshake 180mL | Pack of 8", 39 | "slug": "amul-kool-protein-milkshake-or-kesar-180-ml-or-pack-of-8", 40 | "temp_id": "akpmk8", 41 | "category": "Milkshakes & Shakes", 42 | }, 43 | "Amul Kool Protein Milkshake | Kesar, 180 mL | Pack of 30": { 44 | "display_name": "🌸🌸 Kesar Milkshake 180mL | Pack of 30", 45 | "slug": "amul-kool-protein-milkshake-or-kesar-180-ml-or-pack-of-30", 46 | "temp_id": "akpmk30", 47 | "category": "Milkshakes & Shakes", 48 | }, 49 | "Amul Kool Protein Milkshake | Vanilla, 180 mL | Pack of 8": { 50 | "display_name": "🍨 Vanilla Milkshake 180mL | Pack of 8", 51 | "slug": "amul-kool-protein-milkshake-or-vanilla-180-ml-or-pack-of-8", 52 | "temp_id": "akpmv8", 53 | "category": "Milkshakes & Shakes", 54 | }, 55 | "Amul Kool Protein Milkshake | Vanilla, 180 mL | Pack of 30": { 56 | "display_name": "🍨🍨 Vanilla Milkshake 180mL | Pack of 30", 57 | "slug": "amul-kool-protein-milkshake-or-vanilla-180-ml-or-pack-of-30", 58 | "temp_id": "akpmv30", 59 | "category": "Milkshakes & Shakes", 60 | }, 61 | "Amul High Protein Blueberry Shake, 200 mL | Pack of 8": { 62 | "display_name": "🫐 Blueberry Shake 200mL | Pack of 8", 63 | "slug": "amul-high-protein-blueberry-shake-200-ml-or-pack-of-8", 64 | "temp_id": "ahpbbs8", 65 | "category": "Milkshakes & Shakes", 66 | }, 67 | "Amul High Protein Blueberry Shake, 200 mL | Pack of 30": { 68 | "display_name": "🫐🫐 Blueberry Shake 200mL | Pack of 30", 69 | "slug": "amul-high-protein-blueberry-shake-200-ml-or-pack-of-30", 70 | "temp_id": "ahpbbs30", 71 | "category": "Milkshakes & Shakes", 72 | }, 73 | "Amul High Protein Plain Lassi, 200 mL | Pack of 30": { 74 | "display_name": "🥛🥛 Plain Lassi 200mL | Pack of 30", 75 | "slug": "amul-high-protein-plain-lassi-200-ml-or-pack-of-30", 76 | "temp_id": "ahppl30", 77 | "category": "Lassi & Buttermilk", 78 | }, 79 | "Amul High Protein Rose Lassi, 200 mL | Pack of 30": { 80 | "display_name": "🌹🌹 Rose Lassi 200mL | Pack of 30", 81 | "slug": "amul-high-protein-rose-lassi-200-ml-or-pack-of-30", 82 | "temp_id": "ahprl30", 83 | "category": "Lassi & Buttermilk", 84 | }, 85 | "Amul High Protein Buttermilk, 200 mL | Pack of 30": { 86 | "display_name": "🥛🥛 Buttermilk 200mL | Pack of 30", 87 | "slug": "amul-high-protein-buttermilk-200-ml-or-pack-of-30", 88 | "temp_id": "ahpbm20030", 89 | "category": "Lassi & Buttermilk", 90 | }, 91 | "Amul High Protein Milk, 250 mL | Pack of 8": { 92 | "display_name": "🥛 Milk 250mL | Pack of 8", 93 | "slug": "amul-high-protein-milk-250-ml-or-pack-of-8", 94 | "temp_id": "ahpm2508", 95 | "category": "Milk", 96 | }, 97 | "Amul High Protein Milk, 250 mL | Pack of 32": { 98 | "display_name": "🥛🥛 Milk 250mL | Pack of 32", 99 | "slug": "amul-high-protein-milk-250-ml-or-pack-of-32", 100 | "temp_id": "ahpm32", 101 | "category": "Milk", 102 | }, 103 | "Amul High Protein Paneer, 400 g | Pack of 24": { 104 | "display_name": "🧀🧀 Paneer 400g | Pack of 24", 105 | "slug": "amul-high-protein-paneer-400-g-or-pack-of-24", 106 | "temp_id": "ahppr40024", 107 | "category": "Paneer", 108 | }, 109 | "Amul High Protein Paneer, 400 g | Pack of 2": { 110 | "display_name": "🧀 Paneer 400g | Pack of 2", 111 | "slug": "amul-high-protein-paneer-400-g-or-pack-of-2", 112 | "temp_id": "ahppr4002", 113 | "category": "Paneer", 114 | }, 115 | "Amul Whey Protein Gift Pack, 32 g | Pack of 10 sachets": { 116 | "display_name": "💪 Whey Protein 32g | Pack of 10 sachets", 117 | "slug": "amul-whey-protein-gift-pack-32-g-or-pack-of-10-sachets", 118 | "temp_id": "awpgp10", 119 | "category": "Whey Protein (Sachets)", 120 | }, 121 | "Amul Whey Protein, 32 g | Pack of 30 Sachets": { 122 | "display_name": "💪💪 Whey Protein 32g | Pack of 30 Sachets", 123 | "slug": "amul-whey-protein-32-g-or-pack-of-30-sachets", 124 | "temp_id": "awp30", 125 | "category": "Whey Protein (Sachets)", 126 | }, 127 | "Amul Whey Protein Pack, 32 g | Pack of 60 Sachets": { 128 | "display_name": "💪💪💪 Whey Protein 32g | Pack of 60 Sachets", 129 | "slug": "amul-whey-protein-32-g-or-pack-of-60-sachets", 130 | "temp_id": "awp60", 131 | "category": "Whey Protein (Sachets)", 132 | }, 133 | "Amul Chocolate Whey Protein Gift Pack, 34 g | Pack of 10 sachets": { 134 | "display_name": "🍫 Chocolate Whey 34g | Pack of 10 sachets", 135 | "slug": "amul-chocolate-whey-protein-gift-pack-34-g-or-pack-of-10-sachets", 136 | "temp_id": "acwpgp10", 137 | "category": "Whey Protein (Sachets)", 138 | }, 139 | "Amul Chocolate Whey Protein, 34 g | Pack of 30 sachets": { 140 | "display_name": "🍫🍫 Chocolate Whey 34g | Pack of 30 sachets", 141 | "slug": "amul-chocolate-whey-protein-34-g-or-pack-of-30-sachets", 142 | "temp_id": "acwp30", 143 | "category": "Whey Protein (Sachets)", 144 | }, 145 | "Amul Chocolate Whey Protein, 34 g | Pack of 60 sachets": { 146 | "display_name": "🍫🍫🍫 Chocolate Whey 34g | Pack of 60 sachets", 147 | "slug": "amul-chocolate-whey-protein-34-g-or-pack-of-60-sachets", 148 | "temp_id": "acwp60", 149 | "category": "Whey Protein (Sachets)", 150 | }, 151 | } 152 | 153 | 154 | # Generate derived data structures from the single source of truth 155 | def generate_derived_structures(): 156 | """Generate all the derived data structures from PRODUCT_DATA""" 157 | 158 | # Products list (excluding "Any") 159 | products = [name for name in PRODUCT_DATA.keys() if name != "Any"] 160 | 161 | # Product name mapping 162 | product_name_map = { 163 | name: data["display_name"] for name, data in PRODUCT_DATA.items() 164 | } 165 | 166 | # Categorized products 167 | categorized_products = {} 168 | for name, data in PRODUCT_DATA.items(): 169 | if name == "Any" or data["category"] is None: 170 | continue 171 | category = data["category"] 172 | if category not in categorized_products: 173 | categorized_products[category] = [] 174 | categorized_products[category].append(name) 175 | 176 | # Product slug mapping 177 | product_alias_map = { 178 | name: data["slug"] 179 | for name, data in PRODUCT_DATA.items() 180 | if data["slug"] is not None 181 | } 182 | 183 | return { 184 | "products": products, 185 | "product_name_map": product_name_map, 186 | "categorized_products": categorized_products, 187 | "product_alias_map": product_alias_map, 188 | } 189 | 190 | 191 | # Generate all the derived structures 192 | derived = generate_derived_structures() 193 | 194 | # Expose the derived structures as module-level constants 195 | PRODUCTS = derived["products"] 196 | PRODUCT_NAME_MAP = derived["product_name_map"] 197 | CATEGORIZED_PRODUCTS = derived["categorized_products"] 198 | PRODUCT_ALIAS_MAP = derived["product_alias_map"] 199 | 200 | # Categories list 201 | CATEGORIES = list(CATEGORIZED_PRODUCTS.keys()) 202 | 203 | # Short to full mapping (reverse of product_name_map) 204 | SHORT_TO_FULL = {v: k for k, v in PRODUCT_NAME_MAP.items()} 205 | 206 | 207 | def get_product_info(identifier, return_field="display_name", search_by="name"): 208 | """ 209 | Get product information by various identifier types. 210 | 211 | Args: 212 | identifier: The identifier to search for 213 | return_field: Field to return - "name", "display_name", "slug", "temp_id", "category", or "all" 214 | search_by: Field to search by - "name", "slug", "temp_id", or "display_name" 215 | 216 | Returns: 217 | str/dict: Product information if found, None otherwise 218 | """ 219 | if search_by == "name": 220 | # Direct lookup by full product name 221 | if identifier in PRODUCT_DATA: 222 | data = PRODUCT_DATA[identifier] 223 | if return_field == "all": 224 | return {"name": identifier, **data} 225 | else: 226 | return data.get(return_field) 227 | 228 | else: 229 | # Use list comprehension to find the product by the specified identifier type 230 | matches = [ 231 | {"name": name, **data} 232 | for name, data in PRODUCT_DATA.items() 233 | if data.get(search_by) == identifier 234 | ] 235 | 236 | if matches: 237 | data = matches[0] 238 | if return_field == "all": 239 | return data 240 | else: 241 | return data.get(return_field) 242 | 243 | return None 244 | 245 | 246 | def create_product_markdown_link(product_name, base_url=None): 247 | """ 248 | Create a markdown link for a single product. 249 | 250 | Args: 251 | product_name: The full product name to create a link for 252 | base_url: Base URL for the product link (defaults to config.BASE_URL) 253 | 254 | Returns: 255 | str: Markdown formatted link string, or just the display name if slug is not available 256 | """ 257 | display_name = get_product_info(product_name, "display_name") 258 | 259 | if not display_name: 260 | # If we can't find the product, return the original name 261 | return product_name 262 | 263 | product_url = create_product_url(product_name, base_url) 264 | 265 | if product_url: 266 | return f"[{display_name}]({product_url})" 267 | else: 268 | return display_name 269 | 270 | 271 | def create_product_list_markdown_links(product_names, base_url=None, separator="\n"): 272 | """ 273 | Create markdown links for a list of products. 274 | 275 | Args: 276 | product_names: List of product names to create links for 277 | base_url: Base URL for the product links (defaults to config.BASE_URL) 278 | separator: String to separate multiple product links (defaults to newline) 279 | 280 | Returns: 281 | str: Markdown formatted links string with each product on a new line 282 | """ 283 | if not product_names: 284 | return "" 285 | 286 | links = [] 287 | for product_name in product_names: 288 | link = create_product_markdown_link(product_name, base_url) 289 | if link: 290 | links.append(f"- {link}") 291 | 292 | return separator.join(links) 293 | 294 | 295 | def create_product_url(product_name, base_url=None): 296 | """ 297 | Create a product URL for a single product. 298 | 299 | Args: 300 | product_name: The full product name to create a URL for 301 | base_url: Base URL for the product link (defaults to config.BASE_URL) 302 | 303 | Returns: 304 | str: Full product URL if slug is available, None otherwise 305 | """ 306 | if base_url is None: 307 | base_url = BASE_URL 308 | 309 | slug = get_product_info(product_name, "slug") 310 | 311 | if slug: 312 | return f"{base_url}/en/product/{slug}" 313 | else: 314 | return None 315 | 316 | 317 | # Logging setup 318 | def setup_logging(): 319 | logging.basicConfig( 320 | level=logging.INFO, 321 | format="%(asctime)s - %(levelname)s - %(message)s", 322 | handlers=[ 323 | logging.StreamHandler(), 324 | logging.FileHandler(LOG_FILE), 325 | ], 326 | ) 327 | return logging.getLogger(__name__) 328 | 329 | 330 | # Helper functions 331 | def mask(value, visible=2): 332 | value = str(value) 333 | if len(value) <= visible * 2: 334 | return "*" * len(value) 335 | return value[:visible] + "*" * (len(value) - 2 * visible) + value[-visible:] 336 | 337 | 338 | def is_already_running(script_name): 339 | logger = logging.getLogger(__name__) 340 | logger.info("Checking for running instances of %s", script_name) 341 | current_pid = os.getpid() 342 | 343 | try: 344 | for proc in psutil.process_iter(["pid", "name", "cmdline"]): 345 | try: 346 | if ( 347 | proc.info["name"].lower() == "python" 348 | and proc.info["cmdline"] 349 | and script_name in " ".join(proc.info["cmdline"]).lower() 350 | and proc.info["pid"] != current_pid 351 | ): 352 | logger.info( 353 | "Found another running instance with PID %d", proc.info["pid"] 354 | ) 355 | return True 356 | except (psutil.AccessDenied, psutil.NoSuchProcess) as e: 357 | logger.warning( 358 | "Could not access process %d: %s", proc.info["pid"], str(e) 359 | ) 360 | continue 361 | except Exception as e: 362 | logger.error("Error checking running processes: %s", str(e)) 363 | return False 364 | 365 | logger.info("No other running instances found") 366 | return False 367 | -------------------------------------------------------------------------------- /database.py: -------------------------------------------------------------------------------- 1 | import asyncpg 2 | import logging 3 | import asyncio 4 | from datetime import datetime, timedelta 5 | from config import DATABASE_URL 6 | import json # Added for potential loads 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | class Database: 12 | def __init__(self, db_url): 13 | self.db_url = db_url 14 | self._pool = None 15 | 16 | async def _init_db(self): 17 | """Initialize the PostgreSQL connection pool and create tables.""" 18 | logging.info(f"Initializing PostgreSQL database with URL: {self.db_url}") 19 | try: 20 | self._pool = await asyncpg.create_pool( 21 | self.db_url, 22 | min_size=10, # Scaled for 5k users 23 | max_size=50, # High concurrency 24 | max_inactive_connection_lifetime=300, 25 | timeout=30, 26 | ) 27 | logging.info("Connection pool created successfully") 28 | async with self._pool.acquire() as conn: 29 | await self.create_tables(conn) 30 | logging.info("Database tables created successfully") 31 | except asyncpg.exceptions.PostgresError as e: 32 | logging.error(f"PostgreSQL error during initialization: {e}") 33 | raise 34 | except Exception as e: 35 | logging.error( 36 | f"Unexpected error during initialization: {type(e).__name__}: {e}" 37 | ) 38 | raise 39 | 40 | async def create_tables(self, conn): 41 | """Create necessary tables with proper constraints.""" 42 | try: 43 | await conn.execute(""" 44 | CREATE TABLE IF NOT EXISTS users ( 45 | chat_id BIGINT PRIMARY KEY, 46 | data JSONB NOT NULL, 47 | CONSTRAINT valid_user_data CHECK (jsonb_typeof(data) = 'object' AND data ? 'chat_id') 48 | ) 49 | """) 50 | await conn.execute(""" 51 | CREATE TABLE IF NOT EXISTS state_product_status ( 52 | state_alias TEXT, 53 | product_name TEXT, 54 | status TEXT NOT NULL CHECK (status IN ('In Stock', 'Sold Out')), 55 | inventory_quantity INTEGER NOT NULL CHECK (inventory_quantity >= 0), 56 | timestamp TEXT NOT NULL, 57 | PRIMARY KEY (state_alias, product_name) 58 | ) 59 | """) 60 | await conn.execute(""" 61 | CREATE TABLE IF NOT EXISTS state_product_history ( 62 | id BIGSERIAL PRIMARY KEY, 63 | state_alias TEXT NOT NULL, 64 | product_name TEXT NOT NULL, 65 | status TEXT NOT NULL CHECK (status IN ('In Stock', 'Sold Out')), 66 | inventory_quantity INTEGER NOT NULL CHECK (inventory_quantity >= 0), 67 | timestamp TEXT NOT NULL 68 | ) 69 | """) 70 | await conn.execute(""" 71 | CREATE INDEX IF NOT EXISTS idx_state_product_history 72 | ON state_product_history (state_alias, product_name, timestamp DESC) 73 | """) 74 | await conn.execute(""" 75 | CREATE INDEX IF NOT EXISTS idx_state_product_history_status 76 | ON state_product_history (status) 77 | """) 78 | await conn.execute(""" 79 | CREATE TABLE IF NOT EXISTS cleanup_history ( 80 | id BIGSERIAL PRIMARY KEY, 81 | last_cleanup_timestamp TEXT NOT NULL 82 | ) 83 | """) 84 | # Add GIN index for JSONB queries on users.data 85 | await conn.execute(""" 86 | CREATE INDEX IF NOT EXISTS idx_users_data_gin 87 | ON users USING GIN (data) 88 | """) 89 | logging.info("Database tables and indexes created successfully") 90 | except asyncpg.exceptions.PostgresError as e: 91 | logging.error(f"Error creating tables: {e}") 92 | raise 93 | 94 | async def get_last_cleanup_time(self): 95 | """Retrieve the timestamp of the last cleanup.""" 96 | try: 97 | async with self._pool.acquire() as conn: 98 | row = await conn.fetchrow(""" 99 | SELECT last_cleanup_timestamp FROM cleanup_history 100 | ORDER BY last_cleanup_timestamp DESC LIMIT 1 101 | """) 102 | if row: 103 | return datetime.fromisoformat(row["last_cleanup_timestamp"]) 104 | return None 105 | except asyncpg.exceptions.PostgresError as e: 106 | logging.error(f"Error getting last cleanup time: {e}") 107 | return None 108 | except ValueError as e: 109 | logging.error(f"Timestamp parse error in get_last_cleanup_time: {e}") 110 | return None 111 | 112 | async def record_cleanup_time(self): 113 | """Record the current timestamp as the last cleanup time.""" 114 | try: 115 | now_iso = datetime.now().isoformat() 116 | async with self._pool.acquire() as conn: 117 | async with conn.transaction(): 118 | await conn.execute( 119 | """ 120 | INSERT INTO cleanup_history (last_cleanup_timestamp) 121 | VALUES ($1) 122 | """, 123 | now_iso, 124 | ) 125 | logging.debug("Recorded cleanup timestamp") 126 | except asyncpg.exceptions.PostgresError as e: 127 | logging.error(f"Error recording cleanup time: {e}") 128 | 129 | async def cleanup_state_history(self, days=2): 130 | """Clean up state_product_history older than specified days.""" 131 | try: 132 | last_cleanup = await self.get_last_cleanup_time() 133 | now = datetime.now() 134 | if last_cleanup and (now - last_cleanup) < timedelta(days=2): 135 | logging.debug("Skipping cleanup: less than 2 days since last cleanup") 136 | return False 137 | cutoff_iso = (now - timedelta(days=days)).isoformat() 138 | async with self._pool.acquire() as conn: 139 | async with conn.transaction(): 140 | await conn.execute( 141 | """ 142 | DELETE FROM state_product_history 143 | WHERE timestamp < $1 144 | """, 145 | cutoff_iso, 146 | ) 147 | logging.info(f"Cleaned up state history older than {cutoff_iso}") 148 | await self.record_cleanup_time() 149 | return True 150 | except asyncpg.exceptions.PostgresError as e: 151 | logging.error(f"Error during cleanup: {e}") 152 | return False 153 | 154 | def _decode_jsonb(self, data): 155 | """Helper function to decode JSONB data from PostgreSQL.""" 156 | if isinstance(data, str): 157 | try: 158 | data = json.loads(data) 159 | except json.JSONDecodeError as e: 160 | logging.error(f"JSON decode error: {e}") 161 | return None 162 | 163 | if isinstance(data, dict): 164 | # Parse known JSONB fields 165 | if "products" in data and isinstance(data["products"], str): 166 | try: 167 | data["products"] = json.loads(data["products"]) 168 | except json.JSONDecodeError: 169 | pass # Keep as string if can't parse 170 | 171 | if "notification_preference" in data and isinstance( 172 | data["notification_preference"], str 173 | ): 174 | try: 175 | data["notification_preference"] = json.loads( 176 | data["notification_preference"] 177 | ) 178 | except json.JSONDecodeError: 179 | pass # Keep as string if can't parse 180 | 181 | if "last_notified" in data and isinstance(data["last_notified"], str): 182 | try: 183 | data["last_notified"] = json.loads(data["last_notified"]) 184 | except json.JSONDecodeError: 185 | data["last_notified"] = {} # Default to empty dict 186 | 187 | return data 188 | 189 | async def get_user(self, chat_id): 190 | """Retrieve user data by chat_id.""" 191 | try: 192 | chat_id = int(chat_id) # Ensure int for BIGINT 193 | logging.info(f"Fetching user for chat_id {chat_id} of type {type(chat_id)}") 194 | async with self._pool.acquire() as conn: 195 | row = await conn.fetchrow( 196 | """ 197 | SELECT data FROM users WHERE chat_id = $1 198 | """, 199 | chat_id, 200 | ) 201 | if not row: 202 | logging.warning(f"No row found for chat_id {chat_id}") 203 | return None 204 | 205 | data = self._decode_jsonb(row["data"]) 206 | if not isinstance(data, dict): 207 | logging.error( 208 | f"Invalid data type for chat_id {chat_id}: {type(data)}" 209 | ) 210 | return None 211 | return data 212 | except asyncpg.exceptions.PostgresError as e: 213 | logging.error(f"Error getting user {chat_id}: {e}") 214 | return None 215 | 216 | # Updated database.py with ::jsonb casts 217 | async def update_user(self, chat_id, user_data): 218 | """Update user data with full JSONB overwrite.""" 219 | try: 220 | chat_id = int(chat_id) # Ensure int for BIGINT 221 | user_json = json.dumps(user_data) # Serialize to str 222 | async with self._pool.acquire() as conn: 223 | async with conn.transaction(): 224 | await conn.execute( 225 | """ 226 | INSERT INTO users (chat_id, data) 227 | VALUES ($1, $2::jsonb) 228 | ON CONFLICT (chat_id) 229 | DO UPDATE SET data = EXCLUDED.data 230 | """, 231 | chat_id, 232 | user_json, 233 | ) 234 | logging.debug(f"Updated user {chat_id}") 235 | # Transaction is automatically committed here 236 | return True 237 | except asyncpg.exceptions.PostgresError as e: 238 | logging.error(f"Error updating user {chat_id}: {e}") 239 | return False 240 | 241 | async def update_user_partial(self, chat_id, path, value): 242 | """Perform partial JSONB update using jsonb_set.""" 243 | try: 244 | chat_id = int(chat_id) # Ensure int for BIGINT 245 | value_json = json.dumps(value) # Serialize to JSON str 246 | async with self._pool.acquire() as conn: 247 | async with conn.transaction(): 248 | await conn.execute( 249 | """ 250 | UPDATE users 251 | SET data = jsonb_set(data, $2, $3::jsonb) 252 | WHERE chat_id = $1 253 | """, 254 | chat_id, 255 | path, 256 | value_json, 257 | ) 258 | logging.debug(f"Partial update for user {chat_id} at path {path}") 259 | except asyncpg.exceptions.PostgresError as e: 260 | logging.error(f"Error partial updating user {chat_id}: {e}") 261 | raise 262 | 263 | async def delete_user(self, chat_id): 264 | """Delete user by chat_id.""" 265 | try: 266 | chat_id = int(chat_id) # Ensure int for BIGINT 267 | async with self._pool.acquire() as conn: 268 | async with conn.transaction(): 269 | await conn.execute( 270 | """ 271 | DELETE FROM users WHERE chat_id = $1 272 | """, 273 | chat_id, 274 | ) 275 | logging.info(f"Deleted user {chat_id}") 276 | except asyncpg.exceptions.PostgresError as e: 277 | logging.error(f"Error deleting user {chat_id}: {e}") 278 | raise 279 | 280 | async def get_all_users(self): 281 | """Retrieve all users for broadcasts or stats.""" 282 | try: 283 | async with self._pool.acquire() as conn: 284 | rows = await conn.fetch("SELECT data FROM users") 285 | users = [] 286 | for row in rows: 287 | data = row["data"] 288 | if isinstance(data, str): 289 | logging.warning( 290 | "Data is str in get_all_users, attempting json.loads" 291 | ) 292 | try: 293 | data = json.loads(data) 294 | except json.JSONDecodeError as e: 295 | logging.error(f"JSON decode error in get_all_users: {e}") 296 | continue 297 | if isinstance(data, dict): 298 | users.append(data) 299 | else: 300 | logging.error( 301 | f"Invalid data type in get_all_users: {type(data)}" 302 | ) 303 | if len(users) != len(rows): 304 | logging.warning( 305 | f"Filtered {len(rows) - len(users)} invalid user records" 306 | ) 307 | return users 308 | except asyncpg.exceptions.PostgresError as e: 309 | logging.error(f"Error getting all users: {e}") 310 | return [] 311 | 312 | async def record_state_change( 313 | self, state_alias, product_name, status, inventory_quantity 314 | ): 315 | """Record state change and return previous state.""" 316 | now_iso = datetime.now().isoformat() # Str for TEXT 317 | try: 318 | async with self._pool.acquire() as conn: 319 | async with conn.transaction(): 320 | row = await conn.fetchrow( 321 | """ 322 | SELECT status, inventory_quantity, timestamp 323 | FROM state_product_status 324 | WHERE state_alias = $1 AND product_name = $2 325 | """, 326 | state_alias, 327 | product_name, 328 | ) 329 | previous_state = dict(row) if row else None 330 | await conn.execute( 331 | """ 332 | INSERT INTO state_product_status 333 | (state_alias, product_name, status, inventory_quantity, timestamp) 334 | VALUES ($1, $2, $3, $4, $5) 335 | ON CONFLICT (state_alias, product_name) 336 | DO UPDATE SET 337 | status = EXCLUDED.status, 338 | inventory_quantity = EXCLUDED.inventory_quantity, 339 | timestamp = EXCLUDED.timestamp 340 | """, 341 | state_alias, 342 | product_name, 343 | status, 344 | inventory_quantity, 345 | now_iso, 346 | ) 347 | state_changed = ( 348 | not previous_state 349 | or previous_state["status"] != status 350 | or ( 351 | previous_state["status"] == "In Stock" 352 | and previous_state["inventory_quantity"] == 0 353 | and inventory_quantity > 0 354 | ) 355 | ) 356 | 357 | if state_changed: 358 | await conn.execute( 359 | """ 360 | INSERT INTO state_product_history 361 | (state_alias, product_name, status, inventory_quantity, timestamp) 362 | VALUES ($1, $2, $3, $4, $5) 363 | """, 364 | state_alias, 365 | product_name, 366 | status, 367 | inventory_quantity, 368 | now_iso, 369 | ) 370 | logging.info( 371 | f"State transition: {state_alias} - {product_name} - {status} (quantity: {inventory_quantity}) [previous: {previous_state['status'] if previous_state else 'None'}]" 372 | ) 373 | else: 374 | logging.debug( 375 | f"No significant state change for {state_alias} - {product_name}: status unchanged" 376 | ) 377 | return previous_state 378 | except asyncpg.exceptions.PostgresError as e: 379 | logging.error( 380 | f"Error recording state change for {state_alias} - {product_name}: {e}" 381 | ) 382 | raise 383 | 384 | async def is_restock_event( 385 | self, state_alias, product_name, current_status, previous_state 386 | ): 387 | """Check if current state change is a restock.""" 388 | try: 389 | # Only consider In Stock status for restock events 390 | if current_status != "In Stock": 391 | logger.debug( 392 | f"is_restock_event: current_status for {product_name} is not In Stock ({current_status})" 393 | ) 394 | return False 395 | 396 | # Use previous_state (from state_product_status before update) to decide 397 | # previous_state is the row before we updated the current status 398 | logger.debug( 399 | f"is_restock_event: previous_state for {product_name} in {state_alias}: {previous_state}" 400 | ) 401 | 402 | # If we never saw this product before, consider it a restock (new product) 403 | if not previous_state: 404 | logger.info( 405 | f"is_restock_event: no previous state for {product_name} in {state_alias} - treating as restock" 406 | ) 407 | return True 408 | 409 | prev_status = ( 410 | previous_state.get("status") 411 | if isinstance(previous_state, dict) 412 | else None 413 | ) 414 | prev_qty = ( 415 | previous_state.get("inventory_quantity") 416 | if isinstance(previous_state, dict) 417 | else None 418 | ) 419 | 420 | # If previously not in stock, and now in stock => restock 421 | if prev_status != "In Stock": 422 | logger.info( 423 | f"is_restock_event: {product_name} in {state_alias} changed from '{prev_status}' to 'In Stock' - restock" 424 | ) 425 | return True 426 | 427 | # Edge case: previous status was In Stock but quantity was 0 and now >0 428 | try: 429 | if ( 430 | prev_status == "In Stock" 431 | and isinstance(prev_qty, int) 432 | and prev_qty == 0 433 | ): 434 | logger.info( 435 | f"is_restock_event: {product_name} had In Stock with qty=0 previously, treating as restock when qty increases" 436 | ) 437 | return True 438 | except Exception: 439 | pass 440 | 441 | # Otherwise not a restock 442 | logger.debug( 443 | f"is_restock_event: {product_name} in {state_alias} is In Stock and was already In Stock previously - not a restock" 444 | ) 445 | return False 446 | 447 | except Exception as e: 448 | logger.error( 449 | f"Error checking restock event for {product_name} in {state_alias}: {e}" 450 | ) 451 | return False 452 | 453 | async def get_last_state_change(self, state_alias, product_name): 454 | """Get the last recorded state for a product.""" 455 | try: 456 | async with self._pool.acquire() as conn: 457 | row = await conn.fetchrow( 458 | """ 459 | SELECT status, inventory_quantity, timestamp 460 | FROM state_product_status 461 | WHERE state_alias = $1 AND product_name = $2 462 | """, 463 | state_alias, 464 | product_name, 465 | ) 466 | if row: 467 | row_dict = dict(row) 468 | row_dict["timestamp"] = datetime.fromisoformat( 469 | row_dict["timestamp"] 470 | ) 471 | return row_dict 472 | return None 473 | except asyncpg.exceptions.PostgresError as e: 474 | logging.error( 475 | f"Error getting last state change for {state_alias} - {product_name}: {e}" 476 | ) 477 | return None 478 | except ValueError as e: 479 | logging.error(f"Timestamp parse error in get_last_state_change: {e}") 480 | return None 481 | 482 | async def get_state_changes_since(self, state_alias, product_name, since_time): 483 | """Get state changes since a given time.""" 484 | try: 485 | async with self._pool.acquire() as conn: 486 | rows = await conn.fetch( 487 | """ 488 | SELECT status, timestamp FROM state_product_history 489 | WHERE state_alias = $1 AND product_name = $2 AND timestamp > $3 490 | ORDER BY timestamp ASC 491 | """, 492 | state_alias, 493 | product_name, 494 | since_time, 495 | ) 496 | changes = [] 497 | for row in rows: 498 | row_dict = dict(row) 499 | row_dict["timestamp"] = datetime.fromisoformat( 500 | row_dict["timestamp"] 501 | ) 502 | changes.append(row_dict) 503 | return changes 504 | except asyncpg.exceptions.PostgresError as e: 505 | logging.error( 506 | f"Error getting state changes for {state_alias} - {product_name}: {e}" 507 | ) 508 | return [] 509 | except ValueError as e: 510 | logging.error(f"Timestamp parse error in get_state_changes_since: {e}") 511 | return [] 512 | 513 | async def get_last_sold_out_before(self, state_alias, product_name, before_time): 514 | """Get the last 'Sold Out' state before a given time.""" 515 | try: 516 | async with self._pool.acquire() as conn: 517 | row = await conn.fetchrow( 518 | """ 519 | SELECT status, timestamp FROM state_product_history 520 | WHERE state_alias = $1 AND product_name = $2 AND status = 'Sold Out' AND timestamp < $3 521 | ORDER BY timestamp DESC LIMIT 1 522 | """, 523 | state_alias, 524 | product_name, 525 | before_time, 526 | ) 527 | if row: 528 | row_dict = dict(row) 529 | row_dict["timestamp"] = datetime.fromisoformat( 530 | row_dict["timestamp"] 531 | ) 532 | return row_dict 533 | return None 534 | except asyncpg.exceptions.PostgresError as e: 535 | logging.error(f"Error getting last sold out state: {e}") 536 | return None 537 | except ValueError as e: 538 | logging.error(f"Timestamp parse error in get_last_sold_out_before: {e}") 539 | return None 540 | 541 | async def close(self): 542 | """Close the database connection pool.""" 543 | try: 544 | if self._pool: 545 | await asyncio.wait_for(self._pool.close(), timeout=30) 546 | logging.info("Database connection pool closed") 547 | self._pool = None 548 | except asyncio.TimeoutError: 549 | logging.warning("Timeout closing pool; connections may linger") 550 | except asyncpg.exceptions.PostgresError as e: 551 | logging.error(f"Error closing database: {e}") 552 | raise 553 | -------------------------------------------------------------------------------- /product_checker.py: -------------------------------------------------------------------------------- 1 | from api_client import ( 2 | get_tid_and_substore, 3 | fetch_product_data_for_alias_async, 4 | product_api_rate_limiter, 5 | ) 6 | from substore_mapping import load_substore_mapping, save_substore_mapping 7 | from cache import substore_cache, substore_pincode_map, pincode_cache 8 | from utils import is_product_in_stock, mask 9 | from notifier import send_telegram_notification_for_user 10 | import asyncio 11 | import sys 12 | import os 13 | from config import ( 14 | TELEGRAM_BOT_TOKEN, 15 | SEMAPHORE_LIMIT, 16 | USE_SUBSTORE_CACHE, 17 | FALLBACK_TO_PINCODE_CACHE, 18 | NOTIFICATION_CONCURRENCY_LIMIT, 19 | ) 20 | import logging 21 | from datetime import datetime 22 | from telegram.ext import Application 23 | from common import PRODUCT_ALIAS_MAP, get_product_info 24 | import cloudscraper 25 | import aiohttp 26 | import json 27 | import sentry_sdk 28 | 29 | sys.path.append(os.path.dirname(os.path.abspath(__file__))) 30 | logger = logging.getLogger(__name__) 31 | 32 | # Global locks for user-level synchronization 33 | user_locks = {} # chat_id -> asyncio.Lock 34 | 35 | 36 | async def should_notify_user(user, product_name, status, state_alias, db, is_restock): 37 | """Determine if a notification should be sent based on user preference.""" 38 | chat_id = user.get("chat_id", "unknown") 39 | logger.info( 40 | f"Checking notification criteria for user {chat_id}, product '{product_name}', status '{status}'" 41 | ) 42 | 43 | try: 44 | if not isinstance(user, dict): 45 | logger.error(f"Invalid user data type for chat_id {chat_id}: {type(user)}") 46 | return False 47 | 48 | # Basic validation checks 49 | if status != "In Stock": 50 | logger.debug(f"Product {product_name} not in stock for user {chat_id}") 51 | return False 52 | 53 | if not user.get("active", False): 54 | logger.debug(f"User {chat_id} is not active") 55 | return False 56 | 57 | # Get and validate notification preference 58 | preference = user.get("notification_preference", "until_stop") 59 | last_notified = user.get("last_notified", {}) 60 | 61 | # Handle JSON string format of last_notified 62 | if isinstance(last_notified, str): 63 | try: 64 | last_notified = json.loads(last_notified) 65 | except json.JSONDecodeError: 66 | logger.warning( 67 | f"Invalid last_notified JSON for user {chat_id}, resetting to empty" 68 | ) 69 | last_notified = {} 70 | # DEBUG: Log current decision inputs 71 | logger.debug( 72 | f"Decision inputs for user={chat_id} product={product_name} preference={preference} " 73 | f"status={status} is_restock={is_restock} last_notified_keys={list(last_notified.keys())} active={user.get('active')} products={user.get('products')}" 74 | ) 75 | except Exception as e: 76 | logger.error( 77 | f"Error in initial notification check for user {chat_id}: {str(e)}" 78 | ) 79 | return False 80 | 81 | if preference == "once_and_stop": 82 | # For all products tracking ("Any"), handle all available products 83 | check_all_products = ( 84 | len(user.get("products", [])) == 1 85 | and user.get("products", [""])[0].lower() == "any" 86 | ) 87 | 88 | # For new tracking (empty last_notified): 89 | # 1. For specific products: notify if product is in stock 90 | # 2. For "Any": notify for any in-stock product 91 | if not last_notified: 92 | logger.info( 93 | f"First-time check for user {user.get('chat_id')}, first notification for {product_name}" 94 | ) 95 | return True 96 | 97 | # For subsequent checks: 98 | # Only notify if we've never notified for this product before 99 | # This is the core behavior of once_and_stop - one notification per product, ever 100 | if product_name not in last_notified: 101 | logger.info(f"First notification for product {product_name}") 102 | return True 103 | logger.debug( 104 | f"Already notified for {product_name}, skipping (once_and_stop). last_notified contains: {list(last_notified.keys())}" 105 | ) 106 | return False 107 | 108 | elif preference == "once_per_restock": 109 | try: 110 | # Handle first-time tracking 111 | if not last_notified: 112 | logger.info( 113 | f"First-time check for user {chat_id}, product {product_name}" 114 | ) 115 | return True 116 | 117 | # Only notify on restock events 118 | if is_restock: 119 | logger.info(f"Restock detected for {product_name} user {chat_id}") 120 | if product_name in last_notified: 121 | try: 122 | last_time = datetime.fromisoformat(last_notified[product_name]) 123 | time_since_last = datetime.now() - last_time 124 | 125 | # Since we know this is a restock event: 126 | # 1. Product is currently In Stock 127 | # 2. Product was Out of Stock at some point since last In Stock (verified by is_restock) 128 | # Just have a minimal cooldown to prevent double notifications 129 | if time_since_last.total_seconds() < 60: # 1-minute cooldown 130 | logger.debug( 131 | f"Skipping notification for {product_name} - too soon since last notification (time_since_last={time_since_last.total_seconds():.1f}s)" 132 | ) 133 | return False 134 | except (ValueError, TypeError) as e: 135 | logger.warning( 136 | f"Invalid timestamp for {product_name}, user {chat_id}: {e}" 137 | ) 138 | # Continue with notification if timestamp is invalid 139 | 140 | logger.info( 141 | f"Notifying for restock of {product_name} for user {chat_id}" 142 | ) 143 | return True 144 | 145 | # Product is in stock but not a restock event 146 | logger.debug( 147 | f"Product {product_name} is in stock but not a restock event for user {chat_id}" 148 | ) 149 | return False 150 | 151 | except Exception as e: 152 | logger.error( 153 | f"Error in once_per_restock handler for user {chat_id}: {str(e)}" 154 | ) 155 | return False 156 | 157 | elif preference == "until_stop": 158 | # Always notify while in stock 159 | return True 160 | 161 | return False 162 | 163 | 164 | # In product_checker.py, update_user_notification_tracking 165 | async def update_user_notification_tracking(user, product_name, db): 166 | """Update last_notified timestamp for a product using partial update.""" 167 | if not isinstance(user, dict): 168 | logger.error( 169 | f"Invalid user data type for chat_id {user.get('chat_id', 'unknown')}: {type(user)}" 170 | ) 171 | return False 172 | 173 | try: 174 | chat_id = int(user["chat_id"]) 175 | except (ValueError, KeyError, TypeError): 176 | logger.error(f"Invalid chat_id in user data: {user.get('chat_id', 'unknown')}") 177 | return False 178 | 179 | try: 180 | preference = user.get("notification_preference", "until_stop") 181 | now_iso = datetime.now().isoformat() 182 | 183 | if preference == "until_stop": 184 | logger.debug( 185 | f"No tracking needed for until_stop preference - user {chat_id}" 186 | ) 187 | return True 188 | 189 | # Get current last_notified data 190 | last_notified = user.get("last_notified", {}) 191 | if isinstance(last_notified, str): 192 | try: 193 | last_notified = json.loads(last_notified) 194 | except json.JSONDecodeError: 195 | logger.warning( 196 | f"Invalid last_notified JSON for user {chat_id}, resetting" 197 | ) 198 | last_notified = {} 199 | 200 | # Update the tracking based on preference 201 | if preference in ["once_and_stop", "once_per_restock"]: 202 | # Update in-memory structure first so callers see immediate change 203 | last_notified[product_name] = now_iso 204 | user["last_notified"] = last_notified 205 | path = ["last_notified"] 206 | await db.update_user_partial(chat_id, path, json.dumps(last_notified)) 207 | logger.debug( 208 | f"Updated {preference} notification tracking for user {chat_id} - {product_name}. last_notified now: {list(last_notified.keys())}" 209 | ) 210 | return True 211 | 212 | except Exception as e: 213 | logger.error( 214 | f"Error updating notification tracking for user {chat_id}, product {product_name}: {str(e)}" 215 | ) 216 | return False 217 | 218 | return True 219 | 220 | 221 | async def get_products_availability_api_only_async( 222 | pincode, max_concurrent_products=SEMAPHORE_LIMIT 223 | ): 224 | logger.info(f"Fetching availability for pincode: {pincode}") 225 | # Add a breadcrumb so Sentry shows which pincode was being processed 226 | sentry_sdk.add_breadcrumb( 227 | category="product_api", 228 | message=f"fetch_availability pincode={pincode}", 229 | level="info", 230 | ) 231 | try: 232 | sync_session = cloudscraper.create_scraper() 233 | tid, substore, substore_id, cookies = get_tid_and_substore( 234 | sync_session, pincode 235 | ) 236 | async with aiohttp.ClientSession(cookies=cookies) as session: 237 | semaphore = asyncio.Semaphore(max_concurrent_products) 238 | tasks = [ 239 | ( 240 | product_name, 241 | get_product_info(product_name, "slug"), 242 | fetch_product_data_for_alias_async( 243 | session, 244 | tid, 245 | substore_id, 246 | get_product_info(product_name, "slug"), 247 | semaphore, 248 | cookies=cookies, 249 | ), 250 | ) 251 | for product_name in PRODUCT_ALIAS_MAP.keys() 252 | ] 253 | product_status = [] 254 | results = await asyncio.gather( 255 | *[task for _, _, task in tasks], return_exceptions=True 256 | ) 257 | for (product_name, alias, _), data in zip(tasks, results): 258 | if isinstance(data, Exception): 259 | logger.error(f"Error fetching data for {product_name}: {data}") 260 | continue 261 | if data is None: 262 | logger.warning( 263 | f"Session expired for {product_name}. Refreshing session..." 264 | ) 265 | sync_session = cloudscraper.create_scraper() 266 | tid, substore, substore_id, cookies = get_tid_and_substore( 267 | sync_session, pincode 268 | ) 269 | async with aiohttp.ClientSession(cookies=cookies) as new_session: 270 | data = await fetch_product_data_for_alias_async( 271 | new_session, 272 | tid, 273 | substore_id, 274 | alias, 275 | semaphore, 276 | cookies=cookies, 277 | ) 278 | if data: 279 | in_stock, quantity = is_product_in_stock(data[0], substore_id) 280 | # Tag the product and substore for richer Sentry context 281 | sentry_sdk.set_tag("substore_id", str(substore_id)) 282 | sentry_sdk.set_tag("product_name", product_name) 283 | product_status.append( 284 | (product_name, "In Stock" if in_stock else "Sold Out", quantity) 285 | ) 286 | else: 287 | product_status.append((product_name, "Sold Out", 0)) 288 | return product_status, substore_id, substore 289 | except Exception as e: 290 | logger.error(f"Error in get_products_availability_api_only_async: {e}") 291 | sentry_sdk.capture_exception(e) 292 | return [], None, None 293 | 294 | 295 | async def check_product_availability_for_state(state_alias, sample_pincode, db): 296 | logger.info(f"Checking state {state_alias} with pincode: {sample_pincode}") 297 | sentry_sdk.add_breadcrumb( 298 | category="state_check", 299 | message=f"start_state_check state={state_alias} pincode={sample_pincode}", 300 | level="info", 301 | ) 302 | try: 303 | if USE_SUBSTORE_CACHE: 304 | cached_status = substore_cache.get(state_alias) 305 | if cached_status: 306 | logger.info(f"Cache hit for state {state_alias}") 307 | for product_name, status, inventory_quantity in cached_status: 308 | await db.record_state_change( 309 | state_alias, product_name, status, inventory_quantity 310 | ) 311 | return cached_status, {} 312 | ( 313 | product_status, 314 | substore_id, 315 | substore, 316 | ) = await get_products_availability_api_only_async(sample_pincode) 317 | restock_info = {} 318 | if product_status: 319 | for product_name, status, inventory_quantity in product_status: 320 | previous_state = await db.record_state_change( 321 | state_alias, product_name, status, inventory_quantity 322 | ) 323 | is_restock = await db.is_restock_event( 324 | state_alias, product_name, status, previous_state 325 | ) 326 | if is_restock: 327 | sentry_sdk.add_breadcrumb( 328 | category="restock", 329 | message=f"restock_detected state={state_alias} product={product_name}", 330 | level="info", 331 | ) 332 | restock_info[product_name] = is_restock 333 | if USE_SUBSTORE_CACHE: 334 | substore_cache[state_alias] = product_status 335 | return product_status, restock_info 336 | except Exception as e: 337 | logger.error(f"Error checking state {state_alias}: {e}") 338 | sentry_sdk.capture_exception(e) 339 | return [], {} 340 | 341 | 342 | async def validate_user_state(user, db): 343 | """ 344 | Validate that a user is still active and has a valid configuration. 345 | """ 346 | try: 347 | chat_id = user.get("chat_id") 348 | if not chat_id: 349 | logger.warning("User has no chat_id") 350 | return False 351 | 352 | # Check if user is marked as active 353 | if not user.get("active", False): 354 | logger.debug(f"User {chat_id} is not active") 355 | return False 356 | 357 | # Check for valid pincode 358 | pincode = user.get("pincode") 359 | if not pincode: 360 | logger.warning(f"User {chat_id} has no pincode") 361 | return False 362 | 363 | # Check for valid product preferences 364 | products = user.get("products", []) 365 | if not products: 366 | logger.warning(f"User {chat_id} has no product preferences") 367 | return False 368 | 369 | # All checks passed 370 | return True 371 | 372 | except Exception as e: 373 | logger.error(f"Error validating user state: {str(e)}") 374 | return False 375 | 376 | 377 | async def should_deactivate_user(chat_id, app): 378 | """ 379 | Determine if a user should be deactivated based on their chat state. 380 | """ 381 | try: 382 | async with asyncio.timeout(5): 383 | # Try to get chat member info 384 | chat = await app.bot.get_chat(chat_id) 385 | if not chat: 386 | logger.info(f"Chat {chat_id} not found") 387 | return True 388 | return False 389 | except Exception as e: 390 | error_msg = str(e).lower() 391 | # Deactivate if bot was blocked or chat not found 392 | if ( 393 | "blocked" in error_msg 394 | or "not found" in error_msg 395 | or "forbidden" in error_msg 396 | ): 397 | return True 398 | # For other errors, don't deactivate 399 | logger.error(f"Error checking chat state for {chat_id}: {str(e)}") 400 | return False 401 | 402 | 403 | async def check_products_for_users(db): 404 | logger.info("Starting product check for all users") 405 | try: 406 | await db.cleanup_state_history() 407 | users = await db.get_all_users() 408 | total_users = len(users) 409 | if not users: 410 | logger.warning("No users found in database") 411 | return 412 | 413 | # Log user statistics 414 | active_users = sum(1 for user in users if user.get("active", False)) 415 | configured_users = sum( 416 | 1 for user in users if user.get("pincode") and user.get("products") 417 | ) 418 | preference_stats = {"until_stop": 0, "once_and_stop": 0, "once_per_restock": 0} 419 | for user in users: 420 | pref = user.get("notification_preference", "until_stop") 421 | preference_stats[pref] = preference_stats.get(pref, 0) + 1 422 | 423 | logger.info(f"User Statistics:") 424 | logger.info(f"Total Users: {total_users}") 425 | logger.info(f"Active Users: {active_users}") 426 | logger.info(f"Configured Users: {configured_users}") 427 | logger.info(f"Notification Preferences: {preference_stats}") 428 | state_groups = {} 429 | pincode_to_state = {} 430 | unmapped_users = [] 431 | substore_info = load_substore_mapping() if USE_SUBSTORE_CACHE else [] 432 | for user in users: 433 | if not isinstance(user, dict): 434 | logger.error(f"Invalid user data type: {type(user)}") 435 | continue 436 | pincode = user.get("pincode") 437 | if not pincode: 438 | logger.warning(f"User {user.get('chat_id')} has no pincode") 439 | continue 440 | state_alias = pincode_to_state.get(pincode) 441 | if state_alias: 442 | state_groups[state_alias].append(user) 443 | continue 444 | for sub in substore_info: 445 | if str(pincode) in sub.get("pincodes", []): 446 | state_alias = sub["alias"] 447 | break 448 | if not state_alias and FALLBACK_TO_PINCODE_CACHE: 449 | state_alias = pincode_cache.get(pincode) 450 | if not state_alias: 451 | try: 452 | sync_session = cloudscraper.create_scraper() 453 | _, substore, substore_id, _ = get_tid_and_substore( 454 | sync_session, pincode 455 | ) 456 | state_alias = ( 457 | substore.get("alias", f"unknown-{pincode}") 458 | if isinstance(substore, dict) 459 | else str(substore) 460 | ) 461 | # Use substore_id if available, fallback to _id from substore object 462 | new_id = substore_id or ( 463 | substore.get("_id", "") if isinstance(substore, dict) else "" 464 | ) 465 | 466 | # Check if an entry with this alias already exists 467 | existing_sub = next( 468 | (sub for sub in substore_info if sub["alias"] == state_alias), 469 | None, 470 | ) 471 | 472 | if existing_sub: 473 | need_update = False 474 | # Add the new pincode if not already there 475 | if str(pincode) not in existing_sub["pincodes"]: 476 | existing_sub["pincodes"].append(str(pincode)) 477 | need_update = True 478 | logger.info( 479 | f"Added pincode {pincode} to existing substore {state_alias}" 480 | ) 481 | 482 | # Update _id if it's empty and we have a new one 483 | if not existing_sub["_id"] and new_id: 484 | existing_sub["_id"] = new_id 485 | need_update = True 486 | logger.info( 487 | f"Updated empty _id for substore {state_alias} to {new_id}" 488 | ) 489 | 490 | if need_update: 491 | save_substore_mapping(substore_info) 492 | else: 493 | # Create new entry only if alias doesn't exist 494 | new_sub = { 495 | "alias": state_alias, 496 | "_id": new_id, 497 | "name": substore.get("name", state_alias.title()) 498 | if isinstance(substore, dict) 499 | else state_alias.title(), 500 | "pincodes": [str(pincode)], 501 | } 502 | substore_info.append(new_sub) 503 | logger.info( 504 | f"Created new substore entry for {state_alias} with pincode {pincode}" 505 | ) 506 | save_substore_mapping(substore_info) 507 | 508 | pincode_to_state[pincode] = state_alias 509 | except Exception as e: 510 | logger.error(f"Error mapping pincode {pincode}: {e}") 511 | unmapped_users.append(user) 512 | continue 513 | state_groups.setdefault(state_alias, []).append(user) 514 | 515 | states_to_check = list(state_groups.keys()) 516 | logger.info(f"Checking {len(states_to_check)} states") 517 | 518 | app = Application.builder().token(TELEGRAM_BOT_TOKEN).build() 519 | await app.initialize() 520 | try: 521 | state_tasks = [ 522 | check_product_availability_for_state( 523 | state, state_groups[state][0]["pincode"], db 524 | ) 525 | for state in states_to_check 526 | ] 527 | results = await asyncio.gather(*state_tasks, return_exceptions=True) 528 | 529 | notification_semaphore = asyncio.Semaphore(NOTIFICATION_CONCURRENCY_LIMIT) 530 | notification_tasks = [] 531 | user_notifications = {} # Track notifications per user: chat_id -> (products, state) 532 | 533 | # First pass: collect all notifications per user across all states 534 | for idx, state_alias in enumerate(states_to_check): 535 | if isinstance(results[idx], Exception): 536 | logger.error( 537 | f"Error processing state {state_alias}: {results[idx]}" 538 | ) 539 | continue 540 | product_status, restock_info = results[idx] 541 | if not product_status: 542 | logger.warning(f"No product status for state {state_alias}") 543 | continue 544 | for user in state_groups[state_alias]: 545 | if not isinstance(user, dict): 546 | logger.error(f"Invalid user data type for state {state_alias}") 547 | continue 548 | 549 | chat_id = user.get("chat_id") 550 | if not chat_id: 551 | logger.warning(f"User in state {state_alias} has no chat_id") 552 | continue 553 | 554 | try: 555 | chat_id = int( 556 | chat_id 557 | ) # Convert to int early to catch invalid format 558 | except ValueError: 559 | logger.error( 560 | f"Invalid chat_id format in state {state_alias}: {chat_id}" 561 | ) 562 | continue 563 | 564 | products_to_check = user.get("products", []) 565 | if not chat_id or not products_to_check: 566 | continue 567 | check_all_products = ( 568 | len(products_to_check) == 1 569 | and products_to_check[0].strip().lower() == "any" 570 | ) 571 | notify_products = [ 572 | (name, status, qty) 573 | for name, status, qty in product_status 574 | if (check_all_products or name in products_to_check) 575 | and await should_notify_user( 576 | user, 577 | name, 578 | status, 579 | state_alias, 580 | db, 581 | restock_info.get(name, False), 582 | ) 583 | ] 584 | if notify_products: 585 | products_notified = [name for name, _, _ in notify_products] 586 | if chat_id in user_notifications: 587 | # Merge with existing notifications 588 | existing_products = user_notifications[chat_id][0] 589 | existing_notify = user_notifications[chat_id][1] 590 | merged_products = list( 591 | set(existing_products + products_to_check) 592 | ) 593 | merged_notify = [ 594 | (n, s, q) 595 | for n, s, q in notify_products + existing_notify 596 | if (n, s, q) not in existing_notify 597 | ] 598 | user_notifications[chat_id] = ( 599 | merged_products, 600 | merged_notify, 601 | ) 602 | logger.debug(f"Merged notifications for user {chat_id}") 603 | else: 604 | user_notifications[chat_id] = ( 605 | products_to_check, 606 | notify_products, 607 | ) 608 | if chat_id not in user_locks: 609 | user_locks[chat_id] = asyncio.Lock() 610 | logger.info( 611 | f"Prepared notifications for user {chat_id}: {products_notified}" 612 | ) 613 | 614 | # Define the notification sending function outside the loop 615 | async def locked_send( 616 | chat_id, user, products_to_check, notify_products, products_notified 617 | ): 618 | try: 619 | async with user_locks[chat_id]: 620 | if not await validate_user_state(user, db): 621 | logger.info( 622 | f"User {chat_id} is no longer active or has invalid configuration" 623 | ) 624 | return None # Don't retry for invalid users 625 | async with notification_semaphore: 626 | logger.info( 627 | f"Starting notification process for user {chat_id}" 628 | ) 629 | # Add Sentry context for this send 630 | try: 631 | with sentry_sdk.push_scope() as scope: 632 | scope.set_tag("chat_id", str(chat_id)) 633 | # try to get state alias from user or notify_products 634 | state_alias = user.get("state_alias") if isinstance(user, dict) else None 635 | if not state_alias and notify_products: 636 | # notify_products contains tuples (name, status, qty) 637 | # products_to_check contains user's product filter 638 | scope.set_tag("state_alias", str(state_alias)) 639 | scope.set_extra( 640 | "products_to_notify", 641 | [p for p, _, _ in notify_products], 642 | ) 643 | sentry_sdk.add_breadcrumb( 644 | category="notification", 645 | message=f"sending_notification chat_id={chat_id} products={[p for p,_,_ in notify_products]}", 646 | level="info", 647 | ) 648 | result = await send_telegram_notification_for_user( 649 | app, 650 | chat_id, 651 | user.get("pincode"), 652 | products_to_check, 653 | notify_products, 654 | ) 655 | except Exception as e: 656 | # Ensure exceptions during Sentry push_scope don't break notification flow 657 | logger.error(f"Error adding Sentry scope for user {chat_id}: {e}") 658 | result = await send_telegram_notification_for_user( 659 | app, 660 | chat_id, 661 | user.get("pincode"), 662 | products_to_check, 663 | notify_products, 664 | ) 665 | if result is True: # Success 666 | try: 667 | for product_name in products_notified: 668 | await update_user_notification_tracking( 669 | user, product_name, db 670 | ) 671 | logger.info( 672 | f"Successfully notified user {chat_id} for {len(products_notified)} products" 673 | ) 674 | return True 675 | except Exception as e: 676 | logger.error( 677 | f"Error updating notification tracking for user {chat_id}: {str(e)}" 678 | ) 679 | return True # Still return True as notification succeeded 680 | elif result is None: # Permanent error 681 | logger.warning( 682 | f"Permanent error for user {chat_id}, deactivating..." 683 | ) 684 | await db.update_user_partial(chat_id, ["active"], False) 685 | return None # Don't retry 686 | else: # Temporary error (False) 687 | logger.warning( 688 | f"Temporary error for user {chat_id}, may retry" 689 | ) 690 | except asyncio.CancelledError: 691 | logger.warning(f"Notification task cancelled for user {chat_id}") 692 | raise 693 | except Exception as e: 694 | logger.error( 695 | f"Unexpected error in notification task for user {chat_id}: {str(e)}" 696 | ) 697 | return False 698 | return True 699 | 700 | # After collecting all notifications, create tasks 701 | for chat_id, ( 702 | products_to_check, 703 | notify_products, 704 | ) in user_notifications.items(): 705 | products_notified = [name for name, _, _ in notify_products] 706 | logger.info( 707 | f"Creating notification task for user {chat_id} with {len(products_notified)} products" 708 | ) 709 | # Find the user object for this chat_id 710 | user = next( 711 | ( 712 | u 713 | for users in state_groups.values() 714 | for u in users 715 | if str(u.get("chat_id")) == str(chat_id) 716 | ), 717 | None, 718 | ) 719 | # Create and add the task with name for better tracking 720 | task = asyncio.create_task( 721 | locked_send( 722 | chat_id, 723 | user, 724 | products_to_check, 725 | notify_products, 726 | products_notified, 727 | ), 728 | name=f"notify_{chat_id}", 729 | ) 730 | notification_tasks.append(task) 731 | 732 | # Wait for all notification tasks to complete and handle any errors 733 | if notification_tasks: 734 | logger.info( 735 | f"Waiting for {len(notification_tasks)} notification tasks to complete..." 736 | ) 737 | try: 738 | results = await asyncio.gather( 739 | *notification_tasks, return_exceptions=True 740 | ) 741 | success_count = 0 742 | permanent_error_count = 0 743 | temp_error_count = 0 744 | 745 | for i, result in enumerate(results): 746 | task_name = notification_tasks[i].get_name() 747 | if isinstance(result, Exception): 748 | logger.error( 749 | f"Notification task {task_name} failed with error: {str(result)}" 750 | ) 751 | temp_error_count += 1 752 | elif result is True: 753 | success_count += 1 754 | elif result is None: 755 | permanent_error_count += 1 756 | else: 757 | temp_error_count += 1 758 | 759 | logger.info( 760 | f"Completed notifications: {success_count} successful, " 761 | f"{permanent_error_count} permanent failures, " 762 | f"{temp_error_count} temporary failures out of {len(notification_tasks)} total" 763 | ) 764 | except Exception as e: 765 | logger.error(f"Error while gathering notification tasks: {str(e)}") 766 | else: 767 | logger.info("No notifications to send") 768 | logger.info("All notification tasks completed") 769 | 770 | for state_alias, users in state_groups.items(): 771 | for user in users: 772 | if not isinstance(user, dict): 773 | continue 774 | chat_id = user.get("chat_id") 775 | products_to_check = user.get("products", []) 776 | check_all_products = ( 777 | len(products_to_check) == 1 778 | and products_to_check[0].strip().lower() == "any" 779 | ) 780 | if user.get("notification_preference") == "once_and_stop": 781 | last_notified = user.get("last_notified", {}) 782 | if check_all_products: 783 | # For "Any", deactivate as soon as we've notified about ANY product 784 | if ( 785 | last_notified 786 | ): # If we've notified about at least one product 787 | if user.get( 788 | "active", True 789 | ): # Only send message if user is still active 790 | user["active"] = False 791 | await db.update_user(chat_id, user) 792 | await app.bot.send_message( 793 | chat_id=chat_id, 794 | text="Notifications stopped after first available product notification. Use /start to reactivate and get notifications for more products.", 795 | parse_mode="Markdown", 796 | ) 797 | else: 798 | # For specific products, deactivate only when we've notified about all requested products 799 | notified_all = all( 800 | p in last_notified for p in products_to_check 801 | ) 802 | if notified_all and user.get( 803 | "active", True 804 | ): # Only send message if user is still active 805 | user["active"] = False 806 | await db.update_user(chat_id, user) 807 | await app.bot.send_message( 808 | chat_id=chat_id, 809 | text="Notified for all tracked products. Notifications stopped. Use /start to reactivate.", 810 | parse_mode="Markdown", 811 | ) 812 | 813 | finally: 814 | await app.shutdown() 815 | logger.info("Telegram application shutdown completed") 816 | finally: 817 | await db.close() 818 | logger.info("Database connection closed") 819 | logger.info("Product check completed") 820 | -------------------------------------------------------------------------------- /substore_list.py: -------------------------------------------------------------------------------- 1 | substore_info = [ 2 | { 3 | "alias": "west-bengal", 4 | "_id": "6650600024e61363e088c526", 5 | "name": "West-Bengal", 6 | "pincodes": [ 7 | "700002", 8 | "700048", 9 | "700031", 10 | "700094", 11 | "700067", 12 | "713301", 13 | "700015", 14 | "700023", 15 | "700099", 16 | "700141", 17 | "700147", 18 | "700074", 19 | "743363", 20 | "721253", 21 | "712123", 22 | "734005", 23 | "700049", 24 | "700039", 25 | "700005", 26 | "700079", 27 | "736101", 28 | "700061", 29 | "700088", 30 | "741222", 31 | "721306", 32 | "700083", 33 | "700034", 34 | "700008", 35 | "713409", 36 | "700064", 37 | "713104", 38 | "743145", 39 | "700046", 40 | "700025", 41 | "700056", 42 | "734429", 43 | "711226", 44 | "741101", 45 | "721657", 46 | "742223", 47 | "700077", 48 | "711302", 49 | "700071", 50 | "711109", 51 | "712235", 52 | "700033", 53 | "700082", 54 | "700098", 55 | "743329", 56 | "736123", 57 | "700089", 58 | "713206", 59 | "713365", 60 | "742302", 61 | "742235", 62 | "721302", 63 | "700091", 64 | "700136", 65 | "711104", 66 | "734011", 67 | "700026", 68 | "700102", 69 | "711101", 70 | "700106", 71 | "700047", 72 | "732101", 73 | "713373", 74 | "711106", 75 | "700105", 76 | "700029", 77 | "735202", 78 | "700019", 79 | "741235", 80 | "700020", 81 | "700161", 82 | "741139", 83 | "700007", 84 | "713363", 85 | "736156", 86 | "741201", 87 | "700054", 88 | "713204", 89 | "734203", 90 | "742236", 91 | "700084", 92 | "713201", 93 | "700110", 94 | "722102", 95 | "700045", 96 | "700162", 97 | "721607", 98 | "711102", 99 | "700027", 100 | "735101", 101 | "700078", 102 | "700137", 103 | "700119", 104 | "700055", 105 | "711103", 106 | "741245", 107 | "712248", 108 | "743331", 109 | "700124", 110 | "713423", 111 | "721145", 112 | "700016", 113 | "700101", 114 | "711112", 115 | "700092", 116 | "700127", 117 | "712245", 118 | "700156", 119 | "743235", 120 | "700157", 121 | "700075", 122 | "711107", 123 | "721507", 124 | "700053", 125 | "700060", 126 | "700163", 127 | "743271", 128 | "700028", 129 | "700009", 130 | "700058", 131 | "713102", 132 | "713101", 133 | "712234", 134 | "700122", 135 | "700040", 136 | "700006", 137 | "700104", 138 | "700093", 139 | "713304", 140 | "700103", 141 | "711202", 142 | "712232", 143 | "712233", 144 | "700107", 145 | "713209", 146 | "700129", 147 | "721101", 148 | "734006", 149 | "741252", 150 | "700038", 151 | "700035", 152 | "743166", 153 | "722101", 154 | "700086", 155 | "700024", 156 | "700090", 157 | "713212", 158 | "712503", 159 | "700159", 160 | "734101", 161 | "700051", 162 | "700059", 163 | "700030", 164 | "733103", 165 | "700132", 166 | "700145", 167 | "700014", 168 | "711205", 169 | "700135" 170 | ] 171 | }, 172 | { 173 | "alias": "telangana", 174 | "_id": "66506004aa64743ceefbed25", 175 | "name": "Telangana", 176 | "pincodes": [ 177 | "500043", 178 | "500065", 179 | "501301", 180 | "500067", 181 | "500080", 182 | "509103", 183 | "500094", 184 | "506009", 185 | "500082", 186 | "500092", 187 | "500034", 188 | "500089", 189 | "500049", 190 | "500076", 191 | "500011", 192 | "502278", 193 | "504301", 194 | "500097", 195 | "500062", 196 | "501504", 197 | "500025", 198 | "501505", 199 | "500013", 200 | "500083", 201 | "500032", 202 | "500075", 203 | "500085", 204 | "508001", 205 | "500015", 206 | "500029", 207 | "500072", 208 | "500064", 209 | "500003", 210 | "508285", 211 | "507002", 212 | "500104", 213 | "509001", 214 | "502001", 215 | "500046", 216 | "507123", 217 | "500070", 218 | "502032", 219 | "500084", 220 | "500002", 221 | "500068", 222 | "500001", 223 | "506006", 224 | "506015", 225 | "500014", 226 | "500059", 227 | "500061", 228 | "500007", 229 | "500020", 230 | "503111", 231 | "504208", 232 | "508213", 233 | "502291", 234 | "505001", 235 | "500054", 236 | "501510", 237 | "506003", 238 | "500079", 239 | "500040", 240 | "500102", 241 | "508252", 242 | "500018", 243 | "500016", 244 | "500019", 245 | "500012", 246 | "500078", 247 | "506004", 248 | "501203", 249 | "508113", 250 | "505327", 251 | "500087", 252 | "500074", 253 | "500091", 254 | "500090", 255 | "500086", 256 | "509216", 257 | "500095", 258 | "500081", 259 | "500010", 260 | "500044", 261 | "504001", 262 | "500101", 263 | "500037", 264 | "502220", 265 | "500063", 266 | "500052", 267 | "500039", 268 | "500055", 269 | "500008", 270 | "500053", 271 | "509209", 272 | "500098", 273 | "500006", 274 | "500050", 275 | "500017", 276 | "500073", 277 | "503308", 278 | "500028", 279 | "500024", 280 | "500060", 281 | "500030", 282 | "500023", 283 | "500035", 284 | "500005", 285 | "500027", 286 | "500058", 287 | "500056", 288 | "500045", 289 | "500047", 290 | "507115", 291 | "503001", 292 | "502329", 293 | "500004", 294 | "502103", 295 | "505301", 296 | "502285", 297 | "506001", 298 | "500036", 299 | "506002", 300 | "500009", 301 | "505475", 302 | "500033", 303 | "505468", 304 | "500088", 305 | "500026", 306 | "501401", 307 | "500048" 308 | ] 309 | }, 310 | { 311 | "alias": "tamil-nadu-1", 312 | "_id": "66505ff578117873bb53b56a", 313 | "name": "Tamil-Nadu-1", 314 | "pincodes": [ 315 | "600018", 316 | "600077", 317 | "600033", 318 | "600071", 319 | "600069", 320 | "641114", 321 | "600095", 322 | "638002", 323 | "600041", 324 | "600036", 325 | "638003", 326 | "638001", 327 | "600119", 328 | "600014", 329 | "636704", 330 | "625706", 331 | "600017", 332 | "627001", 333 | "600122", 334 | "600086", 335 | "600102", 336 | "600126", 337 | "641045", 338 | "600064", 339 | "600052", 340 | "641021", 341 | "641001", 342 | "635109", 343 | "600078", 344 | "603110", 345 | "627811", 346 | "637003", 347 | "603203", 348 | "600003", 349 | "600016", 350 | "641007", 351 | "600053", 352 | "632007", 353 | "641041", 354 | "600028", 355 | "600005", 356 | "600020", 357 | "626001", 358 | "627011", 359 | "600094", 360 | "600127", 361 | "641015", 362 | "600072", 363 | "625001", 364 | "625009", 365 | "603109", 366 | "632001", 367 | "600115", 368 | "620001", 369 | "600048", 370 | "638301", 371 | "600081", 372 | "600031", 373 | "642003", 374 | "603102", 375 | "600044", 376 | "625107", 377 | "600004", 378 | "641014", 379 | "610001", 380 | "607002", 381 | "600061", 382 | "600106", 383 | "621112", 384 | "632002", 385 | "600021", 386 | "603204", 387 | "611001", 388 | "624601", 389 | "641038", 390 | "600092", 391 | "636102", 392 | "600117", 393 | "600087", 394 | "600010", 395 | "600116", 396 | "637211", 397 | "600085", 398 | "600082", 399 | "641028", 400 | "600050", 401 | "623534", 402 | "600006", 403 | "600011", 404 | "600100", 405 | "600093", 406 | "641005", 407 | "630561", 408 | "600027", 409 | "600076", 410 | "600032", 411 | "637301", 412 | "600039", 413 | "632014", 414 | "600034", 415 | "600023", 416 | "600101", 417 | "600068", 418 | "603001", 419 | "620102", 420 | "636001", 421 | "613004", 422 | "632406", 423 | "600074", 424 | "600097", 425 | "638112", 426 | "600089", 427 | "600112", 428 | "600058", 429 | "600007", 430 | "600131", 431 | "600040", 432 | "607006", 433 | "600037", 434 | "632602", 435 | "641023", 436 | "601204", 437 | "636806", 438 | "626117", 439 | "603108", 440 | "600029", 441 | "600079", 442 | "600001", 443 | "600073", 444 | "636705", 445 | "600130", 446 | "641044", 447 | "600042", 448 | "620003", 449 | "632104", 450 | "600012", 451 | "636302", 452 | "600088", 453 | "600062", 454 | "600024", 455 | "638054", 456 | "641108", 457 | "641004", 458 | "600090", 459 | "600002", 460 | "600109", 461 | "620012", 462 | "600035", 463 | "641006", 464 | "600113", 465 | "625007", 466 | "625006", 467 | "603209", 468 | "641112", 469 | "600015", 470 | "622001", 471 | "635001", 472 | "600091", 473 | "600075", 474 | "600026", 475 | "641035", 476 | "603103", 477 | "626126", 478 | "600107", 479 | "603202", 480 | "600096" 481 | ] 482 | }, 483 | { 484 | "alias": "karnataka", 485 | "_id": "66505ff0998183e1b1935c75", 486 | "name": "Karnataka", 487 | "pincodes": [ 488 | "560063", 489 | "573201", 490 | "560039", 491 | "584103", 492 | "575016", 493 | "560065", 494 | "560062", 495 | "560054", 496 | "572201", 497 | "577501", 498 | "560068", 499 | "560036", 500 | "560071", 501 | "560077", 502 | "560078", 503 | "583201", 504 | "575004", 505 | "560003", 506 | "560030", 507 | "562125", 508 | "570019", 509 | "581402", 510 | "576214", 511 | "583104", 512 | "560012", 513 | "575001", 514 | "570027", 515 | "560024", 516 | "570006", 517 | "570023", 518 | "560097", 519 | "580021", 520 | "560032", 521 | "560004", 522 | "562114", 523 | "574110", 524 | "560095", 525 | "590016", 526 | "580006", 527 | "560048", 528 | "580003", 529 | "574142", 530 | "577502", 531 | "560016", 532 | "560057", 533 | "575025", 534 | "560072", 535 | "560111", 536 | "560064", 537 | "560034", 538 | "560007", 539 | "560074", 540 | "572102", 541 | "586108", 542 | "560043", 543 | "560015", 544 | "562157", 545 | "584102", 546 | "571455", 547 | "560045", 548 | "560040", 549 | "560035", 550 | "560080", 551 | "575018", 552 | "562135", 553 | "580030", 554 | "560008", 555 | "575015", 556 | "560041", 557 | "563114", 558 | "560051", 559 | "562159", 560 | "575006", 561 | "560096", 562 | "560020", 563 | "580008", 564 | "572101", 565 | "560086", 566 | "562123", 567 | "560075", 568 | "582101", 569 | "560038", 570 | "560076", 571 | "560009", 572 | "575002", 573 | "560058", 574 | "560082", 575 | "581401", 576 | "570015", 577 | "581110", 578 | "560067", 579 | "580005", 580 | "585403", 581 | "560073", 582 | "560061", 583 | "560022", 584 | "581301", 585 | "560050", 586 | "560060", 587 | "591237", 588 | "560059", 589 | "560023", 590 | "573118", 591 | "560001", 592 | "560070", 593 | "560090", 594 | "560019", 595 | "560092", 596 | "574111", 597 | "560025", 598 | "573116", 599 | "576101", 600 | "560094", 601 | "570030", 602 | "560027", 603 | "570016", 604 | "560079", 605 | "590001", 606 | "560089", 607 | "560037", 608 | "580031", 609 | "560033", 610 | "577004", 611 | "590006", 612 | "560084", 613 | "560103", 614 | "560102", 615 | "560029", 616 | "574227", 617 | "570001", 618 | "572103", 619 | "560087", 620 | "572129", 621 | "560017", 622 | "560099", 623 | "585104", 624 | "585105", 625 | "560018", 626 | "560113", 627 | "560055", 628 | "560083", 629 | "562106", 630 | "576104", 631 | "560091", 632 | "560026", 633 | "570020", 634 | "560010", 635 | "590011", 636 | "560049", 637 | "560028", 638 | "560093", 639 | "576217", 640 | "560047", 641 | "577101", 642 | "577002", 643 | "560011", 644 | "560105", 645 | "560005", 646 | "560100", 647 | "571401", 648 | "577526", 649 | "570022", 650 | "585328", 651 | "562107", 652 | "560114", 653 | "575017", 654 | "574105", 655 | "560109", 656 | "560066", 657 | "571124", 658 | "560085", 659 | "570026", 660 | "577204", 661 | "560098", 662 | "560056", 663 | "590010", 664 | "583101", 665 | "560021", 666 | "575005", 667 | "576102", 668 | "570017" 669 | ] 670 | }, 671 | { 672 | "alias": "uttar-pradesh-e", 673 | "_id": "66505ff924e61363e088c414", 674 | "name": "Uttar-Pradesh-E", 675 | "pincodes": [ 676 | "208012", 677 | "226006", 678 | "208022", 679 | "209727", 680 | "262902", 681 | "212217", 682 | "243001", 683 | "226024", 684 | "284128", 685 | "226012", 686 | "262701", 687 | "225001", 688 | "224001", 689 | "224129", 690 | "208014", 691 | "211002", 692 | "211013", 693 | "231216", 694 | "211015", 695 | "226008", 696 | "221007", 697 | "207302", 698 | "283204", 699 | "230204", 700 | "222001", 701 | "208010", 702 | "276202", 703 | "226017", 704 | "247776", 705 | "244701", 706 | "211008", 707 | "209859", 708 | "226021", 709 | "284403", 710 | "226203", 711 | "208006", 712 | "221105", 713 | "209217", 714 | "206242", 715 | "231307", 716 | "284419", 717 | "210001", 718 | "244901", 719 | "229206", 720 | "233001", 721 | "211012", 722 | "208027", 723 | "226002", 724 | "209625", 725 | "229001", 726 | "283203", 727 | "226003", 728 | "285001", 729 | "209101", 730 | "276001", 731 | "208026", 732 | "231210", 733 | "208004", 734 | "284002", 735 | "242001", 736 | "211019", 737 | "208002", 738 | "226022", 739 | "244601", 740 | "211001", 741 | "226016", 742 | "284003", 743 | "208016", 744 | "212301", 745 | "226019", 746 | "211011", 747 | "261001", 748 | "226010", 749 | "208011", 750 | "226028", 751 | "226020", 752 | "244412", 753 | "221005", 754 | "243005", 755 | "221304", 756 | "226030", 757 | "221010", 758 | "221301", 759 | "224227", 760 | "206243", 761 | "208017", 762 | "241001", 763 | "226029", 764 | "208007", 765 | "226013", 766 | "221102", 767 | "243006", 768 | "206130", 769 | "226007", 770 | "241303", 771 | "221011", 772 | "226011", 773 | "208020", 774 | "221603", 775 | "209401", 776 | "208021", 777 | "221004", 778 | "221001", 779 | "261303", 780 | "209801", 781 | "243122", 782 | "206244", 783 | "224190", 784 | "208025", 785 | "228001", 786 | "226001", 787 | "262001", 788 | "226023", 789 | "211003", 790 | "226005", 791 | "226004", 792 | "211006", 793 | "211016", 794 | "209861", 795 | "225003", 796 | "208001", 797 | "221003", 798 | "226201", 799 | "208005", 800 | "284001", 801 | "208024", 802 | "211004", 803 | "207001", 804 | "232101", 805 | "221002", 806 | "230001", 807 | "209202" 808 | ] 809 | }, 810 | { 811 | "alias": "assam", 812 | "_id": "66505ffb6510ee3d5903fef8", 813 | "name": "Assam", 814 | "pincodes": [ 815 | "781032", 816 | "781014", 817 | "781022", 818 | "781003", 819 | "781012", 820 | "781039", 821 | "781301", 822 | "781027", 823 | "781335", 824 | "785007", 825 | "781038", 826 | "785001", 827 | "788005", 828 | "783335", 829 | "782002", 830 | "782003", 831 | "786125", 832 | "781006", 833 | "781005", 834 | "781034", 835 | "786602", 836 | "786005", 837 | "782103", 838 | "781035", 839 | "788010", 840 | "783324", 841 | "781007", 842 | "788009", 843 | "786183", 844 | "781029", 845 | "786171", 846 | "788710", 847 | "784521", 848 | "781171", 849 | "785004", 850 | "782402", 851 | "781028", 852 | "781001", 853 | "782001" 854 | ] 855 | }, 856 | { 857 | "alias": "pune-br", 858 | "_id": "66506004a7cddee1b8adb014", 859 | "name": "Pune-Br", 860 | "pincodes": [ 861 | "431122", 862 | "412101", 863 | "400706", 864 | "401303", 865 | "402201", 866 | "425507", 867 | "413515", 868 | "431517", 869 | "411041", 870 | "444601", 871 | "421312", 872 | "444705", 873 | "411033", 874 | "444501", 875 | "441912", 876 | "440027", 877 | "441904", 878 | "413517", 879 | "411042", 880 | "440026", 881 | "411040", 882 | "441110", 883 | "441111", 884 | "444001", 885 | "413501", 886 | "440036", 887 | "411069", 888 | "412105", 889 | "411026", 890 | "411017", 891 | "444506", 892 | "442705", 893 | "442907", 894 | "441203", 895 | "413307", 896 | "444605", 897 | "441601", 898 | "440003", 899 | "411037", 900 | "411075", 901 | "400708", 902 | "431604", 903 | "425309", 904 | "441204", 905 | "441905", 906 | "431002", 907 | "412308", 908 | "402109", 909 | "411027", 910 | "411047", 911 | "416303", 912 | "441123", 913 | "411068", 914 | "431605", 915 | "440023", 916 | "444004", 917 | "442905", 918 | "442401", 919 | "415539", 920 | "440002", 921 | "413606", 922 | "412307", 923 | "431005", 924 | "425002", 925 | "411015", 926 | "402107", 927 | "411016", 928 | "440017", 929 | "411009", 930 | "411006", 931 | "415409", 932 | "441614", 933 | "444803", 934 | "440025", 935 | "444602", 936 | "445001", 937 | "444604", 938 | "425001", 939 | "410507", 940 | "411035", 941 | "410401", 942 | "411045", 943 | "412409", 944 | "411002", 945 | "444606", 946 | "411048", 947 | "411032", 948 | "413527", 949 | "411052", 950 | "411018", 951 | "412208", 952 | "410209", 953 | "416436", 954 | "400709", 955 | "411011", 956 | "400710", 957 | "411023", 958 | "402302", 959 | "411014", 960 | "411057", 961 | "401305", 962 | "440030", 963 | "411060", 964 | "442901", 965 | "401201", 966 | "416302", 967 | "440005", 968 | "411039", 969 | "411044", 970 | "411062", 971 | "440013", 972 | "441108", 973 | "411020", 974 | "440010", 975 | "442001", 976 | "411036", 977 | "401208", 978 | "411019", 979 | "441122", 980 | "440015", 981 | "411030", 982 | "416406", 983 | "425405", 984 | "411028", 985 | "440008", 986 | "411051", 987 | "440014", 988 | "431136", 989 | "411046", 990 | "412201", 991 | "440018", 992 | "440009", 993 | "401209", 994 | "431003", 995 | "400094", 996 | "411061", 997 | "411043", 998 | "411038", 999 | "411001", 1000 | "411007", 1001 | "413216", 1002 | "416106", 1003 | "415110", 1004 | "442301", 1005 | "444505", 1006 | "444002", 1007 | "411021", 1008 | "400703", 1009 | "410208", 1010 | "444101", 1011 | "412207", 1012 | "441106", 1013 | "410218", 1014 | "441404", 1015 | "400614", 1016 | "400612", 1017 | "440001", 1018 | "440022", 1019 | "413133", 1020 | "416415", 1021 | "411004", 1022 | "440032", 1023 | "411013", 1024 | "400701", 1025 | "444403", 1026 | "442605", 1027 | "444005", 1028 | "442903", 1029 | "401606", 1030 | "411012", 1031 | "442701", 1032 | "440019", 1033 | "431606", 1034 | "410505", 1035 | "401304", 1036 | "441206", 1037 | "401202", 1038 | "410210", 1039 | "440037", 1040 | "411031", 1041 | "411058", 1042 | "425409", 1043 | "411024", 1044 | "416014", 1045 | "445304", 1046 | "431602", 1047 | "440024", 1048 | "442102", 1049 | "400705", 1050 | "412115", 1051 | "425201" 1052 | ] 1053 | }, 1054 | { 1055 | "alias": "up-ncr", 1056 | "_id": "66505ff8c8f2d6e221b9180c", 1057 | "name": "Up-Ncr", 1058 | "pincodes": [ 1059 | "201014", 1060 | "201102", 1061 | "201306", 1062 | "201007", 1063 | "251001", 1064 | "203201", 1065 | "201012", 1066 | "246747", 1067 | "202001", 1068 | "282010", 1069 | "244001", 1070 | "247001", 1071 | "201305", 1072 | "281502", 1073 | "201301", 1074 | "201307", 1075 | "202124", 1076 | "201318", 1077 | "201003", 1078 | "201009", 1079 | "201309", 1080 | "201314", 1081 | "201304", 1082 | "281001", 1083 | "282004", 1084 | "201010", 1085 | "201001", 1086 | "201005", 1087 | "281003", 1088 | "201013", 1089 | "201308", 1090 | "250002", 1091 | "201201", 1092 | "246725", 1093 | "205001", 1094 | "201206", 1095 | "282001", 1096 | "281121", 1097 | "201313", 1098 | "203131", 1099 | "201204", 1100 | "203001", 1101 | "251003", 1102 | "281004", 1103 | "202002", 1104 | "201011", 1105 | "281406", 1106 | "281403", 1107 | "250001", 1108 | "244221", 1109 | "245304", 1110 | "250619", 1111 | "282005", 1112 | "201017", 1113 | "201002", 1114 | "203207", 1115 | "250005", 1116 | "251203", 1117 | "201310", 1118 | "282003", 1119 | "202126", 1120 | "246701", 1121 | "244411", 1122 | "282002", 1123 | "282007", 1124 | "250110", 1125 | "201016", 1126 | "250004", 1127 | "201303", 1128 | "281122", 1129 | "245101", 1130 | "201316", 1131 | "281006" 1132 | ] 1133 | }, 1134 | { 1135 | "alias": "bihar", 1136 | "_id": "66505ff9af6a3c7411d2f55f", 1137 | "name": "Bihar", 1138 | "pincodes": [ 1139 | "800014", 1140 | "852127", 1141 | "800026", 1142 | "842002", 1143 | "854318", 1144 | "824234", 1145 | "847232", 1146 | "854106", 1147 | "847226", 1148 | "802156", 1149 | "800007", 1150 | "824231", 1151 | "801507", 1152 | "813209", 1153 | "821109", 1154 | "854105", 1155 | "854325", 1156 | "800001", 1157 | "847404", 1158 | "800008", 1159 | "821307", 1160 | "804453", 1161 | "800023", 1162 | "847203", 1163 | "841226", 1164 | "843302", 1165 | "848101", 1166 | "844101", 1167 | "853204", 1168 | "800010", 1169 | "813213", 1170 | "845101", 1171 | "842005", 1172 | "812002", 1173 | "804429", 1174 | "800006", 1175 | "800025", 1176 | "811201", 1177 | "851101", 1178 | "845303", 1179 | "800027", 1180 | "800011", 1181 | "821115", 1182 | "821101", 1183 | "804408", 1184 | "813210", 1185 | "847211", 1186 | "800020", 1187 | "803118", 1188 | "847212", 1189 | "846004", 1190 | "801111", 1191 | "811311", 1192 | "824101", 1193 | "846006", 1194 | "805130", 1195 | "803215", 1196 | "842004", 1197 | "801105", 1198 | "852131", 1199 | "800004", 1200 | "801104", 1201 | "841205", 1202 | "852201", 1203 | "812001", 1204 | "842001", 1205 | "845401", 1206 | "801103", 1207 | "803110", 1208 | "801106", 1209 | "800013", 1210 | "800018", 1211 | "846001", 1212 | "801505", 1213 | "854301", 1214 | "823001", 1215 | "813203", 1216 | "800003", 1217 | "823003", 1218 | "801503", 1219 | "847103", 1220 | "811211" 1221 | ] 1222 | }, 1223 | { 1224 | "alias": "madhya-pradesh", 1225 | "_id": "66505ff6d9346de216752cd7", 1226 | "name": "Madhya-Pradesh", 1227 | "pincodes": [ 1228 | "452012", 1229 | "462004", 1230 | "475661", 1231 | "452014", 1232 | "462010", 1233 | "483504", 1234 | "474009", 1235 | "474002", 1236 | "483501", 1237 | "465337", 1238 | "453552", 1239 | "462011", 1240 | "452008", 1241 | "464993", 1242 | "462037", 1243 | "466116", 1244 | "465441", 1245 | "474015", 1246 | "462039", 1247 | "452010", 1248 | "452016", 1249 | "466001", 1250 | "481661", 1251 | "452003", 1252 | "462016", 1253 | "483220", 1254 | "462002", 1255 | "452017", 1256 | "462041", 1257 | "473001", 1258 | "473551", 1259 | "452009", 1260 | "461001", 1261 | "456006", 1262 | "452020", 1263 | "484887", 1264 | "462021", 1265 | "450331", 1266 | "466114", 1267 | "458775", 1268 | "451001", 1269 | "457001", 1270 | "470661", 1271 | "470001", 1272 | "482005", 1273 | "452001", 1274 | "450001", 1275 | "462022", 1276 | "465683", 1277 | "471606", 1278 | "455001", 1279 | "456001", 1280 | "457661", 1281 | "473990", 1282 | "461228", 1283 | "481441", 1284 | "462001", 1285 | "484551", 1286 | "456010", 1287 | "476001", 1288 | "454446", 1289 | "458118", 1290 | "462036", 1291 | "474020", 1292 | "474001", 1293 | "464551", 1294 | "480001", 1295 | "462020", 1296 | "462024", 1297 | "462043", 1298 | "453441", 1299 | "458664", 1300 | "456335", 1301 | "474006", 1302 | "465674", 1303 | "481776", 1304 | "482003", 1305 | "462003", 1306 | "485001", 1307 | "451111", 1308 | "474005", 1309 | "452005", 1310 | "461775", 1311 | "462023", 1312 | "475110", 1313 | "486001", 1314 | "482008", 1315 | "462027", 1316 | "462066", 1317 | "460001", 1318 | "462026", 1319 | "462032", 1320 | "462042", 1321 | "453556", 1322 | "484886", 1323 | "481222", 1324 | "480661", 1325 | "474011", 1326 | "454331", 1327 | "452006", 1328 | "482002", 1329 | "452018", 1330 | "482001", 1331 | "483775", 1332 | "462030", 1333 | "452002", 1334 | "474012", 1335 | "465333", 1336 | "456331", 1337 | "486886" 1338 | ] 1339 | }, 1340 | { 1341 | "alias": "kerala", 1342 | "_id": "66505ff2998183e1b1935ccd", 1343 | "name": "Kerala", 1344 | "pincodes": [ 1345 | "682316", 1346 | "671124", 1347 | "673592", 1348 | "680751", 1349 | "691532", 1350 | "683511", 1351 | "695601", 1352 | "680567", 1353 | "682511", 1354 | "695038", 1355 | "673611", 1356 | "680655", 1357 | "680007", 1358 | "680002", 1359 | "670308", 1360 | "678531", 1361 | "689542", 1362 | "695581", 1363 | "678510", 1364 | "682024", 1365 | "685589", 1366 | "673104", 1367 | "680618", 1368 | "678507", 1369 | "679340", 1370 | "683512", 1371 | "671316", 1372 | "670602", 1373 | "678005", 1374 | "673577", 1375 | "695009", 1376 | "695005", 1377 | "680302", 1378 | "682301", 1379 | "679321", 1380 | "683544", 1381 | "682011", 1382 | "686513", 1383 | "682304", 1384 | "670701", 1385 | "673017", 1386 | "683565", 1387 | "670011", 1388 | "673010", 1389 | "690106", 1390 | "673003", 1391 | "670561", 1392 | "695019", 1393 | "690525", 1394 | "682025", 1395 | "682042", 1396 | "670645", 1397 | "683101", 1398 | "689656", 1399 | "673596", 1400 | "670302", 1401 | "673302", 1402 | "678103", 1403 | "682310", 1404 | "682034", 1405 | "682001", 1406 | "695017", 1407 | "686008", 1408 | "680006", 1409 | "673616", 1410 | "691551", 1411 | "683572", 1412 | "686020", 1413 | "695316", 1414 | "670631", 1415 | "682021", 1416 | "680665", 1417 | "682026", 1418 | "682033", 1419 | "686518", 1420 | "691601", 1421 | "670674", 1422 | "680307", 1423 | "678506", 1424 | "682031", 1425 | "682019", 1426 | "683561", 1427 | "670595", 1428 | "673639", 1429 | "682028", 1430 | "689101", 1431 | "695010", 1432 | "695021", 1433 | "670107", 1434 | "683547", 1435 | "671531", 1436 | "682030", 1437 | "695003", 1438 | "682017", 1439 | "686585", 1440 | "679322", 1441 | "691578", 1442 | "695583", 1443 | "671314", 1444 | "676506", 1445 | "690519", 1446 | "673601", 1447 | "695033", 1448 | "689585", 1449 | "682501", 1450 | "695035", 1451 | "689124", 1452 | "682303", 1453 | "691002", 1454 | "676504", 1455 | "695028", 1456 | "678551", 1457 | "682020", 1458 | "676553", 1459 | "673016", 1460 | "671315", 1461 | "695615", 1462 | "695020", 1463 | "691001", 1464 | "688538", 1465 | "695015", 1466 | "690544", 1467 | "695543", 1468 | "683594", 1469 | "673571", 1470 | "680010", 1471 | "680566", 1472 | "670002", 1473 | "682023", 1474 | "695001", 1475 | "676552", 1476 | "678633", 1477 | "695024", 1478 | "682006", 1479 | "682012", 1480 | "680687", 1481 | "695008", 1482 | "671311", 1483 | "682037", 1484 | "686543", 1485 | "695011", 1486 | "682306" 1487 | ] 1488 | }, 1489 | { 1490 | "alias": "odisha", 1491 | "_id": "66505ffeaf6a3c7411d2f62c", 1492 | "name": "Odisha", 1493 | "pincodes": [ 1494 | "751009", 1495 | "753001", 1496 | "768017", 1497 | "769015", 1498 | "754211", 1499 | "770036", 1500 | "767002", 1501 | "754021", 1502 | "757001", 1503 | "753009", 1504 | "759117", 1505 | "759147", 1506 | "752050", 1507 | "751030", 1508 | "751019", 1509 | "768202", 1510 | "751016", 1511 | "754250", 1512 | "756001", 1513 | "769001", 1514 | "753004", 1515 | "759122", 1516 | "768028", 1517 | "751001", 1518 | "752001", 1519 | "751003", 1520 | "751002", 1521 | "769042", 1522 | "760008", 1523 | "755020", 1524 | "768001", 1525 | "751025", 1526 | "752062", 1527 | "753008", 1528 | "753015", 1529 | "751024", 1530 | "762101", 1531 | "751021", 1532 | "753014", 1533 | "767033", 1534 | "760001", 1535 | "760010", 1536 | "753007", 1537 | "751020", 1538 | "760002", 1539 | "768018", 1540 | "767040", 1541 | "751018", 1542 | "757107", 1543 | "759001", 1544 | "751006", 1545 | "766001", 1546 | "769008", 1547 | "758034", 1548 | "757043", 1549 | "756125", 1550 | "753003", 1551 | "751004", 1552 | "764001", 1553 | "769003", 1554 | "751013", 1555 | "768004", 1556 | "759145", 1557 | "757037", 1558 | "754103", 1559 | "751029", 1560 | "751012", 1561 | "754005", 1562 | "751007", 1563 | "756048", 1564 | "751015", 1565 | "754141", 1566 | "752104", 1567 | "766107" 1568 | ] 1569 | }, 1570 | { 1571 | "alias": "gujarat", 1572 | "_id": "66505ff06510ee3d5903fd42", 1573 | "name": "Gujarat", 1574 | "pincodes": [ 1575 | "396436", 1576 | "380050", 1577 | "383325", 1578 | "390013", 1579 | "382006", 1580 | "380009", 1581 | "388120", 1582 | "382443", 1583 | "382424", 1584 | "395001", 1585 | "382210", 1586 | "390007", 1587 | "380007", 1588 | "360003", 1589 | "395009", 1590 | "395010", 1591 | "364004", 1592 | "364001", 1593 | "388121", 1594 | "383255", 1595 | "380052", 1596 | "363642", 1597 | "360004", 1598 | "388540", 1599 | "382475", 1600 | "360007", 1601 | "380001", 1602 | "389001", 1603 | "380015", 1604 | "380014", 1605 | "380058", 1606 | "384355", 1607 | "380021", 1608 | "390024", 1609 | "390023", 1610 | "390012", 1611 | "380005", 1612 | "382415", 1613 | "361142", 1614 | "380060", 1615 | "362001", 1616 | "360005", 1617 | "390011", 1618 | "380013", 1619 | "365601", 1620 | "382003", 1621 | "390016", 1622 | "393010", 1623 | "390021", 1624 | "382016", 1625 | "380019", 1626 | "380061", 1627 | "380022", 1628 | "382330", 1629 | "384002", 1630 | "396001", 1631 | "363035", 1632 | "389350", 1633 | "382481", 1634 | "380054", 1635 | "382445", 1636 | "390001", 1637 | "394305", 1638 | "391760", 1639 | "385421", 1640 | "392001", 1641 | "380016", 1642 | "382426", 1643 | "363001", 1644 | "363030", 1645 | "394540", 1646 | "390008", 1647 | "361347", 1648 | "370201", 1649 | "380024", 1650 | "380003", 1651 | "382345", 1652 | "382715", 1653 | "384315", 1654 | "394210", 1655 | "382480", 1656 | "396424", 1657 | "395005", 1658 | "370421", 1659 | "361008", 1660 | "361005", 1661 | "361003", 1662 | "380063", 1663 | "380059", 1664 | "390004", 1665 | "380055", 1666 | "384001", 1667 | "396445", 1668 | "361004", 1669 | "395006", 1670 | "394107", 1671 | "394327", 1672 | "395008", 1673 | "364002", 1674 | "360575", 1675 | "389151", 1676 | "395017", 1677 | "388640", 1678 | "385001", 1679 | "396195", 1680 | "394510", 1681 | "382350", 1682 | "384440", 1683 | "382110", 1684 | "396125", 1685 | "395007", 1686 | "380004", 1687 | "390019", 1688 | "361006", 1689 | "382010", 1690 | "390006", 1691 | "382355", 1692 | "382427", 1693 | "395023", 1694 | "370110", 1695 | "380008", 1696 | "380027", 1697 | "390020", 1698 | "384170", 1699 | "388001", 1700 | "382028", 1701 | "391740", 1702 | "370001", 1703 | "384265", 1704 | "388325", 1705 | "382042", 1706 | "394520", 1707 | "394101", 1708 | "396590", 1709 | "396321", 1710 | "391410", 1711 | "382470", 1712 | "382421", 1713 | "380051", 1714 | "395004", 1715 | "380026", 1716 | "396191", 1717 | "382725", 1718 | "360001", 1719 | "382305", 1720 | "382007", 1721 | "390002" 1722 | ] 1723 | }, 1724 | { 1725 | "alias": "haryana", 1726 | "_id": "66505ff5af6a3c7411d2f4b2", 1727 | "name": "Haryana", 1728 | "pincodes": [ 1729 | "122098", 1730 | "136129", 1731 | "121005", 1732 | "133001", 1733 | "134109", 1734 | "121002", 1735 | "122505", 1736 | "133201", 1737 | "134116", 1738 | "125120", 1739 | "122003", 1740 | "123401", 1741 | "134113", 1742 | "123302", 1743 | "121013", 1744 | "132001", 1745 | "122050", 1746 | "127021", 1747 | "122052", 1748 | "125050", 1749 | "122004", 1750 | "136131", 1751 | "126116", 1752 | "122011", 1753 | "121008", 1754 | "125001", 1755 | "131027", 1756 | "125005", 1757 | "125006", 1758 | "122017", 1759 | "135003", 1760 | "127306", 1761 | "122015", 1762 | "125051", 1763 | "131023", 1764 | "132116", 1765 | "134112", 1766 | "121006", 1767 | "135001", 1768 | "136027", 1769 | "122022", 1770 | "122009", 1771 | "122010", 1772 | "134117", 1773 | "125055", 1774 | "134203", 1775 | "122101", 1776 | "131028", 1777 | "122006", 1778 | "132140", 1779 | "122005", 1780 | "126102", 1781 | "122018", 1782 | "121009", 1783 | "121003", 1784 | "121004", 1785 | "136118", 1786 | "122413", 1787 | "124103", 1788 | "133207", 1789 | "134003", 1790 | "122002", 1791 | "134114", 1792 | "122007", 1793 | "121010", 1794 | "124507", 1795 | "122103", 1796 | "132117", 1797 | "131001", 1798 | "123303", 1799 | "132101", 1800 | "123001", 1801 | "125033", 1802 | "132103", 1803 | "122102", 1804 | "121001", 1805 | "122016", 1806 | "133006", 1807 | "123106", 1808 | "125004", 1809 | "124001", 1810 | "122001", 1811 | "123501", 1812 | "124108", 1813 | "122105", 1814 | "125121", 1815 | "133203", 1816 | "136119", 1817 | "121007", 1818 | "136135" 1819 | ] 1820 | }, 1821 | { 1822 | "alias": "mumbai-br", 1823 | "_id": "66506000c8f2d6e221b9193a", 1824 | "name": "Mumbai-Br", 1825 | "pincodes": [ 1826 | "421308", 1827 | "421505", 1828 | "422003", 1829 | "400056", 1830 | "400010", 1831 | "400602", 1832 | "400060", 1833 | "400104", 1834 | "400061", 1835 | "400011", 1836 | "400055", 1837 | "400019", 1838 | "400014", 1839 | "400017", 1840 | "400004", 1841 | "422002", 1842 | "421605", 1843 | "410221", 1844 | "400059", 1845 | "400101", 1846 | "422012", 1847 | "400052", 1848 | "400078", 1849 | "400058", 1850 | "421004", 1851 | "400089", 1852 | "400068", 1853 | "400005", 1854 | "400054", 1855 | "401107", 1856 | "401301", 1857 | "400075", 1858 | "422013", 1859 | "400012", 1860 | "421001", 1861 | "400013", 1862 | "400016", 1863 | "421301", 1864 | "400093", 1865 | "422101", 1866 | "400066", 1867 | "400074", 1868 | "421005", 1869 | "421203", 1870 | "400053", 1871 | "400025", 1872 | "421202", 1873 | "400033", 1874 | "400008", 1875 | "400092", 1876 | "400606", 1877 | "400076", 1878 | "400069", 1879 | "421305", 1880 | "400015", 1881 | "421003", 1882 | "400018", 1883 | "401501", 1884 | "400037", 1885 | "401506", 1886 | "421503", 1887 | "401101", 1888 | "422009", 1889 | "400049", 1890 | "400031", 1891 | "400050", 1892 | "400106", 1893 | "410206", 1894 | "400103", 1895 | "422006", 1896 | "400603", 1897 | "422004", 1898 | "422011", 1899 | "400087", 1900 | "400088", 1901 | "400063", 1902 | "400057", 1903 | "400091", 1904 | "400077", 1905 | "400102", 1906 | "421501", 1907 | "400099", 1908 | "400022", 1909 | "400072", 1910 | "400608", 1911 | "400024", 1912 | "400097", 1913 | "400208", 1914 | "400604", 1915 | "400027", 1916 | "400071", 1917 | "400083", 1918 | "400009", 1919 | "401203", 1920 | "422206", 1921 | "400082", 1922 | "400051", 1923 | "400095", 1924 | "400030", 1925 | "400084", 1926 | "421103", 1927 | "400028", 1928 | "400707", 1929 | "400615", 1930 | "401504", 1931 | "421306", 1932 | "400702", 1933 | "400081", 1934 | "400096", 1935 | "400080", 1936 | "401105", 1937 | "400098", 1938 | "400070", 1939 | "422001", 1940 | "422005", 1941 | "400605", 1942 | "400001", 1943 | "400086", 1944 | "400026", 1945 | "400002", 1946 | "421302", 1947 | "400042", 1948 | "400067", 1949 | "400601", 1950 | "422010", 1951 | "400607", 1952 | "400064", 1953 | "410222", 1954 | "400079", 1955 | "401404", 1956 | "400065", 1957 | "400043", 1958 | "400610" 1959 | ] 1960 | }, 1961 | { 1962 | "alias": "delhi", 1963 | "_id": "66505ff5145c16635e6cc74d", 1964 | "name": "Delhi", 1965 | "pincodes": [ 1966 | "110055", 1967 | "110031", 1968 | "110067", 1969 | "110062", 1970 | "110008", 1971 | "110018", 1972 | "110025", 1973 | "110065", 1974 | "110033", 1975 | "110002", 1976 | "110092", 1977 | "110082", 1978 | "110017", 1979 | "110087", 1980 | "110059", 1981 | "110043", 1982 | "110047", 1983 | "110071", 1984 | "110028", 1985 | "110003", 1986 | "110088", 1987 | "110023", 1988 | "110019", 1989 | "110010", 1990 | "110070", 1991 | "110005", 1992 | "110053", 1993 | "110048", 1994 | "110083", 1995 | "110038", 1996 | "110089", 1997 | "110013", 1998 | "110042", 1999 | "110044", 2000 | "110012", 2001 | "110094", 2002 | "110057", 2003 | "110063", 2004 | "110084", 2005 | "110009", 2006 | "110016", 2007 | "110061", 2008 | "110086", 2009 | "110076", 2010 | "110054", 2011 | "110037", 2012 | "110021", 2013 | "110096", 2014 | "110060", 2015 | "110081", 2016 | "110022", 2017 | "110077", 2018 | "110014", 2019 | "110020", 2020 | "110036", 2021 | "110041", 2022 | "110075", 2023 | "110073", 2024 | "110035", 2025 | "110085", 2026 | "110066", 2027 | "110026", 2028 | "110001", 2029 | "110007", 2030 | "110015", 2031 | "110095", 2032 | "110091", 2033 | "110045", 2034 | "110030", 2035 | "110058", 2036 | "110046", 2037 | "110027", 2038 | "110024", 2039 | "110064", 2040 | "110040", 2041 | "110011", 2042 | "110034", 2043 | "110068", 2044 | "110052", 2045 | "110078", 2046 | "110049", 2047 | "110097", 2048 | "110080", 2049 | "110093", 2050 | "110051", 2051 | "110032", 2052 | "110006", 2053 | "110074", 2054 | "110029" 2055 | ] 2056 | }, 2057 | { 2058 | "alias": "chandigarh", 2059 | "_id": "66505ff1672747740fb388ec", 2060 | "name": "Chandigarh", 2061 | "pincodes": [ 2062 | "160018", 2063 | "160012", 2064 | "160036", 2065 | "160019", 2066 | "160022", 2067 | "160030", 2068 | "160023", 2069 | "160002", 2070 | "160031", 2071 | "160047", 2072 | "160020", 2073 | "160003", 2074 | "160014", 2075 | "160101" 2076 | ] 2077 | }, 2078 | { 2079 | "alias": "chhattisgarh", 2080 | "_id": "66506002998183e1b1935f41", 2081 | "name": "Chhattisgarh", 2082 | "pincodes": [ 2083 | "492007", 2084 | "495004", 2085 | "490023", 2086 | "492101", 2087 | "495001", 2088 | "493445", 2089 | "492013", 2090 | "490006", 2091 | "494001", 2092 | "492014", 2093 | "492006", 2094 | "491441", 2095 | "497339", 2096 | "495671", 2097 | "490001", 2098 | "493118", 2099 | "490020", 2100 | "493196", 2101 | "493551", 2102 | "491001", 2103 | "493778", 2104 | "491107", 2105 | "492099", 2106 | "490026", 2107 | "492004", 2108 | "494226", 2109 | "495450", 2110 | "492001", 2111 | "494334", 2112 | "490024", 2113 | "497335", 2114 | "497225", 2115 | "495684", 2116 | "494122", 2117 | "492015", 2118 | "495006", 2119 | "490042" 2120 | ] 2121 | }, 2122 | { 2123 | "alias": "jharkhand", 2124 | "_id": "66505ffb998183e1b1935dee", 2125 | "name": "Jharkhand", 2126 | "pincodes": [ 2127 | "831002", 2128 | "835103", 2129 | "815353", 2130 | "833201", 2131 | "829122", 2132 | "814112", 2133 | "834005", 2134 | "831004", 2135 | "828122", 2136 | "834007", 2137 | "828108", 2138 | "834001", 2139 | "828109", 2140 | "822116", 2141 | "834012", 2142 | "825301", 2143 | "832110", 2144 | "829150", 2145 | "831009", 2146 | "831017", 2147 | "835303", 2148 | "831005", 2149 | "831003", 2150 | "831001", 2151 | "814152", 2152 | "827001", 2153 | "828203", 2154 | "834003", 2155 | "825409", 2156 | "831013", 2157 | "834004", 2158 | "831007", 2159 | "835215", 2160 | "831011", 2161 | "828114", 2162 | "828104", 2163 | "834006", 2164 | "831020", 2165 | "814133", 2166 | "827006", 2167 | "825405", 2168 | "814101", 2169 | "834002", 2170 | "835217", 2171 | "828202" 2172 | ] 2173 | }, 2174 | { 2175 | "alias": "rajasthan", 2176 | "_id": "66505ff824e61363e088c3dd", 2177 | "name": "Rajasthan", 2178 | "pincodes": [ 2179 | "311001", 2180 | "303905", 2181 | "323001", 2182 | "314025", 2183 | "302039", 2184 | "303301", 2185 | "335704", 2186 | "302016", 2187 | "321203", 2188 | "312202", 2189 | "302020", 2190 | "342011", 2191 | "305601", 2192 | "302026", 2193 | "343027", 2194 | "302021", 2195 | "313002", 2196 | "313004", 2197 | "331403", 2198 | "332001", 2199 | "333001", 2200 | "335707", 2201 | "303007", 2202 | "344022", 2203 | "327001", 2204 | "334003", 2205 | "342027", 2206 | "313211", 2207 | "302002", 2208 | "307001", 2209 | "331023", 2210 | "314032", 2211 | "321001", 2212 | "322241", 2213 | "342006", 2214 | "303303", 2215 | "302017", 2216 | "302006", 2217 | "335001", 2218 | "324005", 2219 | "302001", 2220 | "305004", 2221 | "302018", 2222 | "322021", 2223 | "303006", 2224 | "301026", 2225 | "301001", 2226 | "312601", 2227 | "335524", 2228 | "305022", 2229 | "304001", 2230 | "306902", 2231 | "305404", 2232 | "324007", 2233 | "302031", 2234 | "302004", 2235 | "335513", 2236 | "302033", 2237 | "335523", 2238 | "305001", 2239 | "331302", 2240 | "302015", 2241 | "302037", 2242 | "334001", 2243 | "313202", 2244 | "342008", 2245 | "305901", 2246 | "324002", 2247 | "335803", 2248 | "342003", 2249 | "342005", 2250 | "324001", 2251 | "314001", 2252 | "342304", 2253 | "302012", 2254 | "302029", 2255 | "305801", 2256 | "301019", 2257 | "333031", 2258 | "302019", 2259 | "307026", 2260 | "335002", 2261 | "313001", 2262 | "342001", 2263 | "312001", 2264 | "306401" 2265 | ] 2266 | }, 2267 | { 2268 | "alias": "andhra-pradesh", 2269 | "_id": "66505ff378117873bb53b542", 2270 | "name": "Andhra-Pradesh", 2271 | "pincodes": [ 2272 | "522601", 2273 | "522501", 2274 | "530013", 2275 | "531173", 2276 | "515004", 2277 | "533105", 2278 | "531055", 2279 | "530041", 2280 | "521225", 2281 | "533106", 2282 | "533004", 2283 | "534211", 2284 | "522004", 2285 | "516360", 2286 | "524137", 2287 | "533308", 2288 | "534006", 2289 | "530017", 2290 | "530026", 2291 | "517507", 2292 | "530048", 2293 | "515002", 2294 | "522007", 2295 | "517583", 2296 | "530035", 2297 | "530043", 2298 | "518008", 2299 | "522502", 2300 | "522503", 2301 | "520010", 2302 | "533001", 2303 | "530003", 2304 | "532001", 2305 | "522034", 2306 | "521137", 2307 | "517503", 2308 | "522237", 2309 | "518134", 2310 | "517501", 2311 | "524001", 2312 | "530045", 2313 | "533101", 2314 | "531163", 2315 | "530012", 2316 | "530029", 2317 | "518007", 2318 | "515671", 2319 | "520008", 2320 | "530002", 2321 | "534447", 2322 | "530011", 2323 | "533450", 2324 | "521301", 2325 | "521212", 2326 | "520007", 2327 | "532222", 2328 | "534002", 2329 | "517325", 2330 | "515110", 2331 | "532484", 2332 | "517619", 2333 | "535270", 2334 | "533437", 2335 | "515001", 2336 | "534216", 2337 | "518004" 2338 | ] 2339 | }, 2340 | { 2341 | "alias": "punjab", 2342 | "_id": "66505ff3998183e1b1935d0e", 2343 | "name": "Punjab", 2344 | "pincodes": [ 2345 | "147001", 2346 | "144003", 2347 | "152024", 2348 | "160062", 2349 | "144013", 2350 | "144008", 2351 | "145001", 2352 | "140124", 2353 | "143602", 2354 | "160059", 2355 | "140901", 2356 | "140306", 2357 | "140601", 2358 | "148101", 2359 | "141012", 2360 | "147002", 2361 | "147004", 2362 | "144517", 2363 | "151203", 2364 | "140603", 2365 | "144630", 2366 | "144001", 2367 | "141003", 2368 | "140307", 2369 | "144505", 2370 | "144024", 2371 | "151103", 2372 | "160055", 2373 | "143001", 2374 | "141001", 2375 | "141007", 2376 | "140406", 2377 | "142022", 2378 | "160071", 2379 | "144207", 2380 | "151302", 2381 | "143505", 2382 | "140405", 2383 | "141008", 2384 | "152123", 2385 | "141002", 2386 | "141401", 2387 | "140308", 2388 | "160104", 2389 | "141013", 2390 | "148023", 2391 | "147101", 2392 | "140001", 2393 | "144411", 2394 | "140413", 2395 | "148001", 2396 | "140401", 2397 | "144005", 2398 | "143002", 2399 | "144022", 2400 | "144602", 2401 | "148021", 2402 | "147003", 2403 | "148026", 2404 | "141015", 2405 | "140301", 2406 | "142027", 2407 | "144601", 2408 | "151202", 2409 | "140501", 2410 | "151001", 2411 | "147301", 2412 | "144401", 2413 | "160103", 2414 | "144514", 2415 | "147105", 2416 | "144002", 2417 | "146001", 2418 | "148022", 2419 | "143507", 2420 | "147201", 2421 | "140604", 2422 | "140507", 2423 | "140101" 2424 | ] 2425 | }, 2426 | { 2427 | "alias": "himachal-pradesh", 2428 | "_id": "66505ff26510ee3d5903fda9", 2429 | "name": "Himachal-Pradesh", 2430 | "pincodes": [ 2431 | "177204", 2432 | "175008", 2433 | "173205", 2434 | "171002", 2435 | "177005", 2436 | "174303", 2437 | "177043", 2438 | "171006", 2439 | "176304", 2440 | "177001", 2441 | "175129", 2442 | "175018", 2443 | "175005", 2444 | "171219", 2445 | "177203", 2446 | "176318", 2447 | "174319", 2448 | "174315", 2449 | "176057", 2450 | "176102", 2451 | "176061", 2452 | "173025", 2453 | "173212", 2454 | "176081", 2455 | "177033", 2456 | "177207", 2457 | "173229", 2458 | "177201", 2459 | "176001" 2460 | ] 2461 | }, 2462 | { 2463 | "alias": "uttrakhand", 2464 | "_id": "66505ff8a7cddee1b8adae9d", 2465 | "name": "Uttrakhand", 2466 | "pincodes": [ 2467 | "263145", 2468 | "248146", 2469 | "244713", 2470 | "248006", 2471 | "263653", 2472 | "248140", 2473 | "246001", 2474 | "249408", 2475 | "273001", 2476 | "248003", 2477 | "275101", 2478 | "273008", 2479 | "272001", 2480 | "244715", 2481 | "248001", 2482 | "247667", 2483 | "272161", 2484 | "273004", 2485 | "273005", 2486 | "249203", 2487 | "249201", 2488 | "248171", 2489 | "262405", 2490 | "273010", 2491 | "262501", 2492 | "263126", 2493 | "248198", 2494 | "263153", 2495 | "249405", 2496 | "249205", 2497 | "248007", 2498 | "246401", 2499 | "263601", 2500 | "248002", 2501 | "272207", 2502 | "273017", 2503 | "273007", 2504 | "262551", 2505 | "248121", 2506 | "248005", 2507 | "248008", 2508 | "263152", 2509 | "249407", 2510 | "248014", 2511 | "273003", 2512 | "263139", 2513 | "249403", 2514 | "249401", 2515 | "248195", 2516 | "273015", 2517 | "262524", 2518 | "263159", 2519 | "274001", 2520 | "273006", 2521 | "272189", 2522 | "274303" 2523 | ] 2524 | }, 2525 | { 2526 | "alias": "nashik-br", 2527 | "_id": "66506002c8f2d6e221b91988", 2528 | "name": "Nashik-Br", 2529 | "pincodes": [ 2530 | "413705", 2531 | "414001", 2532 | "413736", 2533 | "423401", 2534 | "422306", 2535 | "414111", 2536 | "422103", 2537 | "422605", 2538 | "422403", 2539 | "413709", 2540 | "414304", 2541 | "412210", 2542 | "414003" 2543 | ] 2544 | }, 2545 | { 2546 | "alias": "jandk", 2547 | "_id": "66505ff6f40e263cf5587fb5", 2548 | "name": "Jandk", 2549 | "pincodes": [ 2550 | "180005", 2551 | "181205", 2552 | "184102", 2553 | "182121", 2554 | "180015", 2555 | "184121", 2556 | "182301", 2557 | "185151", 2558 | "190009", 2559 | "184120", 2560 | "180019", 2561 | "182101", 2562 | "181133", 2563 | "190003", 2564 | "184101", 2565 | "181141", 2566 | "191113", 2567 | "191121", 2568 | "180011", 2569 | "180010", 2570 | "181221", 2571 | "191201", 2572 | "181121", 2573 | "190008", 2574 | "190023", 2575 | "193101", 2576 | "191111", 2577 | "180002", 2578 | "181131", 2579 | "180001", 2580 | "180003", 2581 | "190010", 2582 | "180020", 2583 | "180004", 2584 | "192101" 2585 | ] 2586 | }, 2587 | { 2588 | "alias": "nagaland", 2589 | "_id": "66505ffd24e61363e088c4a5", 2590 | "name": "Nagaland", 2591 | "pincodes": [ 2592 | "797115", 2593 | "797112", 2594 | "797103" 2595 | ] 2596 | }, 2597 | { 2598 | "alias": "aurangabad-br", 2599 | "_id": "66506002aa64743ceefbecf1", 2600 | "name": "Aurangabad-Br", 2601 | "pincodes": [ 2602 | "431804", 2603 | "431512", 2604 | "431001", 2605 | "443103", 2606 | "423701", 2607 | "431203", 2608 | "431513", 2609 | "431714", 2610 | "431401", 2611 | "431712", 2612 | "431601", 2613 | "424002" 2614 | ] 2615 | }, 2616 | { 2617 | "alias": "goa", 2618 | "_id": "66506005147d6c73c1110115", 2619 | "name": "Goa", 2620 | "pincodes": [ 2621 | "403507", 2622 | "416534", 2623 | "403706", 2624 | "403726", 2625 | "403505", 2626 | "403711", 2627 | "403707", 2628 | "403602", 2629 | "416701", 2630 | "403114", 2631 | "415722", 2632 | "403506", 2633 | "403601", 2634 | "415612", 2635 | "403002", 2636 | "403511", 2637 | "403110", 2638 | "415606", 2639 | "403802", 2640 | "403512", 2641 | "403708", 2642 | "403401", 2643 | "403001", 2644 | "415711", 2645 | "416603", 2646 | "403703" 2647 | ] 2648 | }, 2649 | { 2650 | "alias": "solapur-br", 2651 | "_id": "66506004145c16635e6cc914", 2652 | "name": "Solapur-Br", 2653 | "pincodes": [ 2654 | "413001", 2655 | "413512", 2656 | "415002", 2657 | "415001", 2658 | "413003", 2659 | "413401", 2660 | "416002", 2661 | "416003" 2662 | ] 2663 | }, 2664 | { 2665 | "alias": "pondicherry", 2666 | "_id": "66505ff312a50963f24870e8", 2667 | "name": "Pondicherry", 2668 | "pincodes": [ 2669 | "605013", 2670 | "605004", 2671 | "605010", 2672 | "605005", 2673 | "605014", 2674 | "605008", 2675 | "609609" 2676 | ] 2677 | }, 2678 | { 2679 | "alias": "manipur", 2680 | "_id": "66505ffbf40e263cf5588098", 2681 | "name": "Manipur", 2682 | "pincodes": [ 2683 | "795002", 2684 | "795005", 2685 | "795001" 2686 | ] 2687 | }, 2688 | { 2689 | "alias": "tripura", 2690 | "_id": "66505ffe78117873bb53b6ad", 2691 | "name": "Tripura", 2692 | "pincodes": [ 2693 | "799003", 2694 | "799001", 2695 | "799004", 2696 | "799007", 2697 | "799014", 2698 | "799005", 2699 | "799120" 2700 | ] 2701 | }, 2702 | { 2703 | "alias": "dadra-and-nagar-haveli", 2704 | "_id": "6650600062e3d963520d0bc3", 2705 | "name": "Dadra-And-Nagar-Haveli", 2706 | "pincodes": [ 2707 | "396230" 2708 | ] 2709 | }, 2710 | { 2711 | "alias": "meghalaya", 2712 | "_id": "66505ffd672747740fb389c7", 2713 | "name": "Meghalaya", 2714 | "pincodes": [ 2715 | "793007", 2716 | "793018", 2717 | "793001" 2718 | ] 2719 | }, 2720 | { 2721 | "alias": "arunachal-pradesh", 2722 | "_id": "66505ff978117873bb53b643", 2723 | "name": "Arunachal-Pradesh", 2724 | "pincodes": [ 2725 | "791112", 2726 | "791110" 2727 | ] 2728 | }, 2729 | { 2730 | "alias": "mizoram", 2731 | "_id": "66505ffd998183e1b1935e21", 2732 | "name": "Mizoram", 2733 | "pincodes": [ 2734 | "796005" 2735 | ] 2736 | }, 2737 | { 2738 | "alias": "sikkim", 2739 | "_id": "66505ffe91ab653d60a3df2d", 2740 | "name": "Sikkim", 2741 | "pincodes": [ 2742 | "737102" 2743 | ] 2744 | } 2745 | ] --------------------------------------------------------------------------------