├── .funcignore ├── .gitignore ├── .vscode ├── extensions.json ├── launch.json ├── settings.json └── tasks.json ├── README.md ├── __init__.py ├── app ├── __init__.py ├── bot.py ├── jobs │ ├── __init__.py │ ├── base.py │ └── check.py ├── reddit.py ├── settings.py ├── stores │ ├── __init__.py │ ├── app_store │ │ ├── __init__.py │ │ ├── appsliced.py │ │ └── helpers.py │ └── classes.py └── utils.py ├── cronjob_check ├── __init__.py └── function.json ├── host.json ├── local.settings.json ├── poetry.lock ├── pyproject.toml └── requirements.txt /.funcignore: -------------------------------------------------------------------------------- 1 | .git* 2 | 3 | local.settings.json 4 | test 5 | 6 | .idea 7 | .vscode 8 | 9 | .venv -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | tmp.py 2 | 3 | *.py[cod] 4 | 5 | .idea 6 | 7 | .venv 8 | .python_packages 9 | __pycache__ -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "ms-azuretools.vscode-azurefunctions", 4 | "ms-python.python" 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "name": "Attach to Python Functions", 6 | "type": "python", 7 | "request": "attach", 8 | "port": 9091, 9 | "preLaunchTask": "func: host start" 10 | } 11 | ] 12 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "azureFunctions.deploySubpath": ".", 3 | "azureFunctions.scmDoBuildDuringDeployment": true, 4 | "azureFunctions.pythonVenv": ".venv", 5 | "azureFunctions.projectLanguage": "Python", 6 | "azureFunctions.projectRuntime": "~4", 7 | "debug.internalConsoleOptions": "neverOpen" 8 | } -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "type": "func", 6 | "command": "host start", 7 | "problemMatcher": "$func-python-watch", 8 | "isBackground": true, 9 | "dependsOn": "pip install (functions)" 10 | }, 11 | { 12 | "label": "pip install (functions)", 13 | "type": "shell", 14 | "osx": { 15 | "command": "${config:azureFunctions.pythonVenv}/bin/python -m pip install -r requirements.txt" 16 | }, 17 | "windows": { 18 | "command": "${config:azureFunctions.pythonVenv}/Scripts/python -m pip install -r requirements.txt" 19 | }, 20 | "linux": { 21 | "command": "${config:azureFunctions.pythonVenv}/bin/python -m pip install -r requirements.txt" 22 | }, 23 | "problemMatcher": [] 24 | } 25 | ] 26 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | app-info-bot 2 | ------------ 3 | 4 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__)))) 5 | -------------------------------------------------------------------------------- /app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fedecalendino/app-info-bot/3f37974de83b12a903dbecd85c1cb83e4245d53a/app/__init__.py -------------------------------------------------------------------------------- /app/bot.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from time import sleep 3 | from urllib import parse 4 | 5 | from praw.models import Submission 6 | 7 | from app.reddit import reddit 8 | from app.stores import SUPPORTED_STORES 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | def find_comment(submission: Submission): 14 | for comment in submission.comments: 15 | try: 16 | if comment.author_fullname == reddit.user.me().fullname: 17 | return comment 18 | except: # fix for deleted comments by mods. 19 | continue 20 | 21 | return None 22 | 23 | 24 | def analyze_submission(submission: Submission): 25 | if submission.is_self: 26 | logger.info(" * found invalid self-submission (%s)", submission.url) 27 | return 28 | 29 | url = parse.urlsplit(submission.url) 30 | 31 | if url.hostname not in SUPPORTED_STORES: 32 | logger.info(" * found invalid submission (%s)", submission.url) 33 | return 34 | 35 | comment = find_comment(submission) 36 | 37 | # if comment and comment.edited: 38 | # logger.info(" * skipped old submission (%s)", submission.url) 39 | # return 40 | 41 | logger.info(" * found valid submission (%s)", submission.url) 42 | 43 | scraper = SUPPORTED_STORES.get(url.hostname) 44 | info = scraper(url.geturl()) 45 | 46 | logger.info(" - fetched information for %s (%s)", info.title, info.store) 47 | 48 | if comment: 49 | comment.edit(body=str(info)) 50 | logger.info(" - updated comment (%s)", comment.permalink) 51 | else: 52 | comment = submission.reply(body=str(info)) 53 | logger.info(" - replied with comment (%s)", comment.permalink) 54 | 55 | sleep(1) 56 | 57 | 58 | def analyze_subreddit(subreddit: str) -> dict: 59 | logger.info("looking for submissions in /r/%s", subreddit) 60 | 61 | result = { 62 | "errors": [], 63 | } 64 | 65 | for submission in list(reddit.subreddit(subreddit).new(limit=3)): 66 | data = {"id": submission.id, "title": submission.title} 67 | 68 | try: 69 | analyze_submission(submission) 70 | except Exception as exc: 71 | result["errors"].append(data) 72 | logging.error(exc, exc_info=True) 73 | 74 | return result 75 | -------------------------------------------------------------------------------- /app/jobs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fedecalendino/app-info-bot/3f37974de83b12a903dbecd85c1cb83e4245d53a/app/jobs/__init__.py -------------------------------------------------------------------------------- /app/jobs/base.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from time import time, sleep 3 | 4 | from slackhooks.blocks.context import Context 5 | from slackhooks.blocks.element import MarkdownTextElement 6 | from slackhooks.blocks.section import Section 7 | from slackhooks.blocks.text import MarkdownText 8 | from slackhooks.client import Message 9 | 10 | from app import settings 11 | 12 | MAX_RETRIES = 5 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | class Job: 18 | def __init__(self, name): 19 | self.name = name 20 | 21 | def run(self): 22 | try: 23 | sleep(1) 24 | 25 | logger.info("Running job '%s'", self.name) 26 | 27 | start = time() 28 | retries_left, result = self.execute_with_retries() 29 | end = time() 30 | 31 | self.notify_success( 32 | result=result, 33 | runtime=round(end - start), 34 | retries_left=retries_left, 35 | ) 36 | except Exception as exc: 37 | logger.error("Error running job '%s': %s", self.name, str(exc)) 38 | self.notify_failure(exc) 39 | 40 | def execute(self): 41 | raise NotImplementedError() 42 | 43 | def execute_with_retries(self, retries: int = MAX_RETRIES): 44 | try: 45 | return retries, self.execute() 46 | except Exception as exc: 47 | if retries: 48 | return self.execute_with_retries(retries - 1) 49 | 50 | raise exc 51 | 52 | def notify_success(self, result: str, runtime: int, retries_left: int): 53 | if not settings.SLACK_WEBHOOK_URL: 54 | return 55 | 56 | message = Message( 57 | text=f"{self.name} > finished successfully.", 58 | blocks=[ 59 | Section( 60 | text=MarkdownText( 61 | text=f"*{self.name.upper()}* > finished successfully after {runtime} seconds. \n\n```{result}```", 62 | ), 63 | ), 64 | ], 65 | ) 66 | 67 | if retries_left < MAX_RETRIES: 68 | message.blocks.append( 69 | Context( 70 | elements=[ 71 | MarkdownTextElement(text=f"{retries_left} retries left"), 72 | ] 73 | ), 74 | ) 75 | 76 | message.send(webhook_url=settings.SLACK_WEBHOOK_URL) 77 | 78 | def notify_failure(self, exc: Exception): 79 | if not settings.SLACK_WEBHOOK_URL: 80 | return 81 | 82 | Message( 83 | text=f"{self.name} > finished unsuccessfully.", 84 | blocks=[ 85 | Section( 86 | text=MarkdownText( 87 | text=f"*{self.name.upper()}* > finished unsuccessfully. \n\n```{str(exc)}```", 88 | ), 89 | ), 90 | ], 91 | ).send( 92 | webhook_url=settings.SLACK_WEBHOOK_URL, 93 | ) 94 | -------------------------------------------------------------------------------- /app/jobs/check.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from app import bot 4 | from app import settings 5 | from .base import Job 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | 10 | class CheckJob(Job): 11 | def __init__(self): 12 | super().__init__("check") 13 | 14 | def execute(self): 15 | result = {} 16 | 17 | subreddits = settings.REDDIT_SUBREDDITS 18 | 19 | logger.info("analyzing: " + " ".join(subreddits)) 20 | 21 | for subreddit in subreddits: 22 | result[subreddit] = bot.analyze_subreddit(subreddit) 23 | 24 | return " ".join(list(map(lambda sub: f"\n * {sub}", subreddits))) 25 | -------------------------------------------------------------------------------- /app/reddit.py: -------------------------------------------------------------------------------- 1 | from praw import Reddit 2 | 3 | from app import settings 4 | 5 | reddit = Reddit( 6 | client_id=settings.REDDIT_CLIENTID, 7 | client_secret=settings.REDDIT_CLIENTSECRET, 8 | user_agent=settings.REDDIT_USERAGENT, 9 | username=settings.REDDIT_USERNAME, 10 | password=settings.REDDIT_PASSWORD, 11 | ) 12 | 13 | assert reddit.user.me().name == settings.REDDIT_USERNAME 14 | -------------------------------------------------------------------------------- /app/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | GITHUB_REPOSITORY = os.environ.get("GITHUB_REPOSITORY", "test/respository") 5 | 6 | REDDIT_DEV_USERNAME = os.environ.get("REDDIT_DEV_USERNAME", "test") 7 | 8 | REDDIT_USERNAME = os.environ.get("REDDIT_USERNAME") 9 | REDDIT_PASSWORD = os.environ.get("REDDIT_PASSWORD") 10 | 11 | REDDIT_CLIENTID = os.environ.get("REDDIT_CLIENTID") 12 | REDDIT_CLIENTSECRET = os.environ.get("REDDIT_CLIENTSECRET") 13 | REDDIT_USERAGENT = os.environ.get("REDDIT_USERAGENT") 14 | 15 | REDDIT_SUBREDDITS = os.environ.get("REDDIT_SUBREDDITS", "test,test2").lower().split(",") 16 | 17 | SLACK_WEBHOOK_URL = os.environ.get("SLACK_WEBHOOK_URL") 18 | 19 | URL_REDDIT_DEV = f"https://reddit.com/user/{REDDIT_DEV_USERNAME}" 20 | URL_GITHUB_REPOSITORY = f"https://github.com/{GITHUB_REPOSITORY}" 21 | -------------------------------------------------------------------------------- /app/stores/__init__.py: -------------------------------------------------------------------------------- 1 | from app.stores.app_store import AppStoreApplication 2 | 3 | 4 | SUPPORTED_STORES = { 5 | "apps.apple.com": AppStoreApplication, 6 | "itunes.apple.com": AppStoreApplication, 7 | } 8 | -------------------------------------------------------------------------------- /app/stores/app_store/__init__.py: -------------------------------------------------------------------------------- 1 | import html 2 | from json import loads 3 | from urllib import parse 4 | 5 | import requests 6 | from bs4 import BeautifulSoup 7 | 8 | from app import settings 9 | from app.stores.classes import ( 10 | Developer, 11 | Price, 12 | PrivacyCard, 13 | Rating, 14 | ) 15 | from .appsliced import AppSliced 16 | from .helpers import find_by_attr, find_all_by_attr 17 | 18 | TEMPLATE = """ 19 | ## [**{title}**]({url}) 20 | > by [{dev_name}]({dev_url}) 21 | 22 | {subtitle} 23 | 24 | ____ 25 | 26 | ### ℹ️ **App Info** 27 | **Category**: {category}. 28 | 29 | **Release**: {release}. 30 | 31 | **Last Update**: {last_update}. 32 | 33 | **Platforms**: {compatibility} 34 | 35 | **Rating**: {rating_value} ({rating_count}). 36 | 37 | **Size**: {size}. 38 | 39 | 40 | ### 💸 **Pricing (in USD)** 41 | **Current**: {price} 42 | 43 | **History**: {price_history} 44 | 45 | **IAPs**: {iaps} 46 | 47 | ### 🔒️ **Privacy** 48 | **Policy**: {privacy_policy} 49 | 50 | **Specification**: {privacy_cards} 51 | 52 | --- 53 | 54 | ^[dev]({dev}) ^| ^[github]({github}) 55 | 56 | """ 57 | 58 | 59 | class AppStoreApplication: 60 | store = "App Store" 61 | 62 | @staticmethod 63 | def us_store(url: str) -> str: 64 | url = parse.urlsplit(url) 65 | path = url.path 66 | 67 | if not path.startswith("/app"): 68 | path = url.path.split("/", 2)[-1] 69 | 70 | return f"https://{url.hostname}/{path}" 71 | 72 | def __init__(self, url: str, use_us_store: bool = True): 73 | if use_us_store: 74 | url = self.us_store(url) 75 | 76 | response = requests.get(url) 77 | 78 | self.soup = BeautifulSoup(response.content, features="html.parser") 79 | self.json = loads( 80 | html.unescape( 81 | self.soup.find( 82 | "script", 83 | attrs={"name": "schema:software-application"}, 84 | ).string 85 | ) 86 | ) 87 | 88 | try: 89 | self.app_sliced = AppSliced(app_id=response.url.split("/")[-1]) 90 | except: 91 | self.app_sliced = None 92 | 93 | @property 94 | def age(self) -> str: 95 | tag = self.soup.find("span", class_="badge badge--product-title") 96 | return tag.text.strip() if tag else None 97 | 98 | @property 99 | def category(self) -> str: 100 | tag = self.soup.find("dt", text="Category") 101 | 102 | if tag: 103 | tag = tag.parent.find("dd") 104 | 105 | if not tag: 106 | return None 107 | 108 | return tag.text.strip() 109 | 110 | @property 111 | def compatibility(self) -> str: 112 | tag = self.soup.find("dt", text="Compatibility") 113 | 114 | if tag: 115 | tag = tag.parent.find("dd") 116 | 117 | if not tag: 118 | return None 119 | 120 | items = [] 121 | 122 | for item in tag.find_all( 123 | "dl", class_="information-list__item__definition__item" 124 | ): 125 | items.append(item.text.strip().replace("\n \n", ": ")) 126 | 127 | return "; ".join(sorted(items)) if items else tag.text.strip() 128 | 129 | @property 130 | def description(self) -> list[str]: 131 | return self.json.get("description") 132 | 133 | @property 134 | def description_short(self) -> str: 135 | return self.soup.find("meta", attrs={"name": "twitter:description"})["content"] 136 | 137 | @property 138 | def developer(self) -> Developer: 139 | return Developer( 140 | name=self.json["author"]["name"], 141 | url=self.json["author"]["url"], 142 | ) 143 | 144 | @property 145 | def pre_order(self) -> bool: 146 | return ( 147 | self.soup.find( 148 | "li", class_="inline-list__item app-header__list__item--preorder" 149 | ) 150 | is not None 151 | ) 152 | 153 | @property 154 | def iaps(self) -> list[Price]: 155 | if self.pre_order: 156 | return [Price("Pre-Orders might not show IAPs properly", "⚠️")] 157 | 158 | tag = self.soup.find("dt", text="In-App Purchases") 159 | 160 | if not tag: 161 | return [] 162 | 163 | iaps = [] 164 | 165 | for li in tag.parent.find_all("li"): 166 | split = li.text.strip().split("\n") 167 | 168 | if len(split) == 2: 169 | iaps.append(Price(split[0], split[1])) 170 | elif len(split) == 1: 171 | iaps.append(Price(split[0], "")) 172 | 173 | return iaps 174 | 175 | @property 176 | def last_update(self) -> str: 177 | tag = find_by_attr(self.soup, "time", "data-test-we-datetime") 178 | return tag.text if tag else None 179 | 180 | @property 181 | def price(self) -> str: 182 | return self.soup.find("li", class_="app-header__list__item--price").text 183 | 184 | @property 185 | def price_history(self) -> list[str]: 186 | if self.app_sliced: 187 | return self.app_sliced.price_history 188 | else: 189 | return [] 190 | 191 | @property 192 | def privacy_cards(self) -> list[PrivacyCard]: 193 | cards = [] 194 | 195 | for tag in self.soup.find_all("div", class_="app-privacy__card"): 196 | spans = tag.find_all("span", class_="privacy-type__data-category-heading") 197 | 198 | cards.append( 199 | PrivacyCard( 200 | title=tag.find("h3", class_="privacy-type__heading").text, 201 | items=list(map(lambda span: span.text.strip(), spans)), 202 | ) 203 | ) 204 | 205 | return cards 206 | 207 | @property 208 | def privacy_policy(self) -> str: 209 | tag = self.soup.find( 210 | "a", 211 | attrs={ 212 | "data-metrics-click": '{"actionType":"navigate","targetType":"link","targetId":"LinkToPrivacyPolicy"}' 213 | }, 214 | ) 215 | return tag["href"] if tag else None 216 | 217 | @property 218 | def rating(self) -> Rating: 219 | tag = self.soup.find("div", class_="we-customer-ratings__averages") 220 | score = tag.text if tag else None 221 | 222 | tag = self.soup.find("p", class_="we-customer-ratings__count") 223 | count = tag.text.lower() if tag else None 224 | 225 | if not score or not count: 226 | return Rating("n/a", "not enough ratings") 227 | 228 | return Rating(score, count) 229 | 230 | @property 231 | def release(self) -> str: 232 | return self.json.get("datePublished") 233 | 234 | @property 235 | def size(self) -> str: 236 | tag = self.soup.find("dt", text="Size") 237 | if tag: 238 | tag = tag.parent.find("dd") 239 | 240 | return tag.text if tag else None 241 | 242 | @property 243 | def subtitle(self) -> str: 244 | tag = self.soup.find( 245 | "h2", class_="product-header__subtitle app-header__subtitle" 246 | ) 247 | return tag.text.strip() if tag else None 248 | 249 | @property 250 | def title(self) -> str: 251 | return self.json.get("name") 252 | 253 | @property 254 | def url(self) -> str: 255 | return self.soup.find("link", rel="canonical")["href"] 256 | 257 | def __str__(self) -> str: 258 | developer = self.developer 259 | iaps = self.iaps 260 | price_history = self.price_history[:5] 261 | privacy_cards = self.privacy_cards 262 | subtitle = self.subtitle 263 | 264 | # == Subtitle ===================================== 265 | if subtitle: 266 | subtitle_str = f"{subtitle}." 267 | else: 268 | subtitle_str = "" 269 | 270 | # == IAPs ========================================= 271 | if len(iaps) == 3: 272 | iap_count = "3+ " 273 | elif len(iaps) == 0: 274 | iap_count = "None " 275 | else: 276 | iap_count = f"{len(iaps)} " 277 | 278 | iap_list = [] 279 | 280 | for iap in iaps: 281 | if iap.price: 282 | iap_list.append(f"{iap.name}: {iap.price} ") 283 | else: 284 | iap_list.append(f"{iap.name}: Free ") 285 | 286 | iaps_str = "\n".join([iap_count] + [f" * {item} " for item in iap_list]) 287 | 288 | # == Price History ================================ 289 | if not price_history: 290 | price_history_str = "n/a" 291 | else: 292 | price_history_str = "\n".join( 293 | [" "] + [f" * {item} " for item in price_history] 294 | ) 295 | 296 | # == Privacy Cards ================================ 297 | if len(privacy_cards) == 1 and len(privacy_cards[0].items) == 0: 298 | privacy_cards_str = f"{privacy_cards[0].title} " 299 | else: 300 | privacy_cards_str = "\n".join( 301 | [" "] + [f" * {card} " for card in privacy_cards] 302 | ) 303 | 304 | return TEMPLATE.format( 305 | title=self.title, 306 | url=self.url, 307 | subtitle=subtitle_str, 308 | dev_name=developer.name, 309 | dev_url=developer.url, 310 | age=self.age, 311 | category=self.category, 312 | release=self.release, 313 | last_update=self.last_update, 314 | compatibility=self.compatibility, 315 | rating_value=self.rating.value, 316 | rating_count=self.rating.count, 317 | size=self.size, 318 | price=self.price, 319 | price_history=price_history_str, 320 | iaps=iaps_str, 321 | privacy_policy=self.privacy_policy, 322 | privacy_cards=privacy_cards_str, 323 | github=settings.URL_GITHUB_REPOSITORY, 324 | dev=settings.URL_REDDIT_DEV, 325 | ) 326 | -------------------------------------------------------------------------------- /app/stores/app_store/appsliced.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import os 3 | from bs4 import BeautifulSoup 4 | 5 | 6 | URL = "https://appsliced.co/apps?search={app_id}" 7 | 8 | EMAIL = os.getenv("APPSLICED_EMAIL") 9 | PASSWORD = os.getenv("APPSLICED_PASSWORD") 10 | 11 | 12 | class AppSliced: 13 | def __init__(self, app_id: str): 14 | with requests.Session() as session: 15 | url = "https://appsliced.co/f/f-login.php" 16 | 17 | headers = { 18 | "Content-Type": "application/x-www-form-urlencoded", 19 | "Origin": "https://appsliced.co", 20 | "Pragma": "no-cache", 21 | "Referer": "https://appsliced.co/login", 22 | } 23 | 24 | body = { 25 | "email": EMAIL, 26 | "password": PASSWORD, 27 | "rememberme": "1", 28 | "type": "1", 29 | } 30 | 31 | session.cookies.update( 32 | { 33 | "loc": "US", 34 | "refurl": f"%252Fapps%253Fsearch%253D{app_id}", 35 | "cstatus": "1", 36 | "UISstate": "3939313032", 37 | } 38 | ) 39 | 40 | response = session.post( 41 | url, 42 | headers=headers, 43 | data=body, 44 | allow_redirects=True, 45 | ) 46 | 47 | soup = BeautifulSoup(response.content, features="html.parser") 48 | 49 | div = soup.find("div", class_="10u title") 50 | a = div.find("a") 51 | 52 | response = requests.get(a.attrs["href"]) 53 | self.soup = BeautifulSoup(response.content, features="html.parser") 54 | 55 | @property 56 | def price_history(self) -> list[str]: 57 | ul = self.soup.find("ul", class_="price_changes") 58 | 59 | if not ul: 60 | return [] 61 | 62 | price_history = [] 63 | 64 | for li in ul.find_all("li"): 65 | string = str(li) 66 | 67 | icon = "" 68 | 69 | if "more_recent_expand" in string: 70 | continue 71 | 72 | if "more_recent_collapse" in string: 73 | continue 74 | 75 | if "history" in string: 76 | icon = "⏺️" 77 | elif "down" in string: 78 | icon = "⬇️" 79 | elif "up" in string: 80 | icon = "⬆️" 81 | 82 | price_history.append(f"{icon} {li.text.strip()}") 83 | 84 | return price_history 85 | -------------------------------------------------------------------------------- /app/stores/app_store/helpers.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | 3 | 4 | def find_all_by_attr(soup: BeautifulSoup, tag_name: str, attr_name: str): 5 | for tag in soup.find_all(tag_name): 6 | if attr_name in tag.attrs: 7 | yield tag 8 | 9 | 10 | def find_by_attr(soup: BeautifulSoup, tag_name: str, attr_name: str): 11 | try: 12 | return next(find_all_by_attr(soup, tag_name, attr_name)) 13 | except StopIteration: 14 | return None 15 | -------------------------------------------------------------------------------- /app/stores/classes.py: -------------------------------------------------------------------------------- 1 | from app.utils import fancy_join 2 | 3 | 4 | class Developer: 5 | def __init__(self, name: str, url: str): 6 | self.name = name 7 | self.url = url 8 | 9 | 10 | class Price: 11 | def __init__(self, name: str, price: str): 12 | self.name = name 13 | self.price = price 14 | 15 | 16 | class PrivacyCard: 17 | def __init__(self, title: str, items: list[str]): 18 | self.title = title 19 | self.items = items 20 | 21 | def __repr__(self): 22 | return f"{self.title}: {fancy_join(', ', self.items, ' & ')}." 23 | 24 | 25 | class Rating: 26 | def __init__(self, value: str, count: str): 27 | self.value = value 28 | self.count = count 29 | -------------------------------------------------------------------------------- /app/utils.py: -------------------------------------------------------------------------------- 1 | def fancy_join(sep: str, items: list[str], final: str = None) -> str: 2 | if not items: 3 | return "" 4 | 5 | if len(items) == 1: 6 | return items[0] 7 | 8 | if not final: 9 | final = sep 10 | 11 | *items, last = items 12 | return sep.join(items) + final + last 13 | -------------------------------------------------------------------------------- /cronjob_check/__init__.py: -------------------------------------------------------------------------------- 1 | import azure.functions as func 2 | 3 | from app.jobs.check import CheckJob 4 | 5 | 6 | def main(mytimer: func.TimerRequest) -> None: 7 | CheckJob().run() 8 | -------------------------------------------------------------------------------- /cronjob_check/function.json: -------------------------------------------------------------------------------- 1 | { 2 | "scriptFile": "__init__.py", 3 | "bindings": [ 4 | { 5 | "name": "mytimer", 6 | "type": "timerTrigger", 7 | "direction": "in", 8 | "schedule": "0 */10 * * * *", 9 | "description": "every ten minutes" 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /host.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "functionTimeout": "00:07:30", 4 | "logging": { 5 | "applicationInsights": { 6 | "samplingSettings": { 7 | "isEnabled": true, 8 | "excludedTypes": "Request" 9 | } 10 | } 11 | }, 12 | "extensionBundle": { 13 | "id": "Microsoft.Azure.Functions.ExtensionBundle", 14 | "version": "[2.*, 3.0.0)" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /local.settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "IsEncrypted": false, 3 | "Values": { 4 | "AzureWebJobsStorage": "", 5 | "FUNCTIONS_WORKER_RUNTIME": "python" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "azure-functions" 3 | version = "1.11.2" 4 | description = "Azure Functions for Python" 5 | category = "main" 6 | optional = false 7 | python-versions = "*" 8 | 9 | [package.extras] 10 | dev = ["coverage", "flake8 (>=4.0.1,<4.1.0)", "mypy", "pytest", "pytest-cov", "requests (>=2.0.0,<3.0.0)"] 11 | 12 | [[package]] 13 | name = "beautifulsoup4" 14 | version = "4.11.1" 15 | description = "Screen-scraping library" 16 | category = "main" 17 | optional = false 18 | python-versions = ">=3.6.0" 19 | 20 | [package.dependencies] 21 | soupsieve = ">1.2" 22 | 23 | [package.extras] 24 | html5lib = ["html5lib"] 25 | lxml = ["lxml"] 26 | 27 | [[package]] 28 | name = "black" 29 | version = "22.6.0" 30 | description = "The uncompromising code formatter." 31 | category = "dev" 32 | optional = false 33 | python-versions = ">=3.6.2" 34 | 35 | [package.dependencies] 36 | click = ">=8.0.0" 37 | mypy-extensions = ">=0.4.3" 38 | pathspec = ">=0.9.0" 39 | platformdirs = ">=2" 40 | tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} 41 | typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} 42 | 43 | [package.extras] 44 | colorama = ["colorama (>=0.4.3)"] 45 | d = ["aiohttp (>=3.7.4)"] 46 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 47 | uvloop = ["uvloop (>=0.15.2)"] 48 | 49 | [[package]] 50 | name = "certifi" 51 | version = "2022.6.15" 52 | description = "Python package for providing Mozilla's CA Bundle." 53 | category = "main" 54 | optional = false 55 | python-versions = ">=3.6" 56 | 57 | [[package]] 58 | name = "charset-normalizer" 59 | version = "2.1.0" 60 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 61 | category = "main" 62 | optional = false 63 | python-versions = ">=3.6.0" 64 | 65 | [package.extras] 66 | unicode_backport = ["unicodedata2"] 67 | 68 | [[package]] 69 | name = "click" 70 | version = "8.1.3" 71 | description = "Composable command line interface toolkit" 72 | category = "dev" 73 | optional = false 74 | python-versions = ">=3.7" 75 | 76 | [package.dependencies] 77 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 78 | 79 | [[package]] 80 | name = "colorama" 81 | version = "0.4.5" 82 | description = "Cross-platform colored terminal text." 83 | category = "dev" 84 | optional = false 85 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 86 | 87 | [[package]] 88 | name = "ddt" 89 | version = "1.6.0" 90 | description = "Data-Driven/Decorated Tests" 91 | category = "dev" 92 | optional = false 93 | python-versions = "*" 94 | 95 | [[package]] 96 | name = "idna" 97 | version = "3.3" 98 | description = "Internationalized Domain Names in Applications (IDNA)" 99 | category = "main" 100 | optional = false 101 | python-versions = ">=3.5" 102 | 103 | [[package]] 104 | name = "mypy-extensions" 105 | version = "0.4.3" 106 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 107 | category = "dev" 108 | optional = false 109 | python-versions = "*" 110 | 111 | [[package]] 112 | name = "pathspec" 113 | version = "0.9.0" 114 | description = "Utility library for gitignore style pattern matching of file paths." 115 | category = "dev" 116 | optional = false 117 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 118 | 119 | [[package]] 120 | name = "platformdirs" 121 | version = "2.5.2" 122 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 123 | category = "dev" 124 | optional = false 125 | python-versions = ">=3.7" 126 | 127 | [package.extras] 128 | docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] 129 | test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] 130 | 131 | [[package]] 132 | name = "praw" 133 | version = "7.6.0" 134 | description = "PRAW, an acronym for `Python Reddit API Wrapper`, is a python package that allows for simple access to Reddit's API." 135 | category = "main" 136 | optional = false 137 | python-versions = "~=3.7" 138 | 139 | [package.dependencies] 140 | prawcore = ">=2.1,<3" 141 | update-checker = ">=0.18" 142 | websocket-client = ">=0.54.0" 143 | 144 | [package.extras] 145 | ci = ["coveralls"] 146 | dev = ["betamax (>=0.8,<0.9)", "betamax-matchers (>=0.3.0,<0.5)", "packaging", "pre-commit", "pytest (>=2.7.3)", "requests (>=2.20.1,<3)", "sphinx", "sphinx-rtd-theme"] 147 | lint = ["pre-commit", "sphinx", "sphinx-rtd-theme"] 148 | readthedocs = ["sphinx", "sphinx-rtd-theme"] 149 | test = ["betamax (>=0.8,<0.9)", "betamax-matchers (>=0.3.0,<0.5)", "pytest (>=2.7.3)", "requests (>=2.20.1,<3)"] 150 | 151 | [[package]] 152 | name = "prawcore" 153 | version = "2.3.0" 154 | description = "Low-level communication layer for PRAW 4+." 155 | category = "main" 156 | optional = false 157 | python-versions = "~=3.6" 158 | 159 | [package.dependencies] 160 | requests = ">=2.6.0,<3.0" 161 | 162 | [package.extras] 163 | ci = ["coveralls"] 164 | dev = ["betamax (>=0.8,<0.9)", "betamax-matchers (>=0.4.0,<0.5)", "betamax-serializers (>=0.2.0,<0.3)", "black", "flake8", "flynt", "mock (>=0.8)", "pre-commit", "pydocstyle", "pytest", "testfixtures (>4.13.2,<7)"] 165 | lint = ["black", "flake8", "flynt", "pre-commit", "pydocstyle"] 166 | test = ["betamax (>=0.8,<0.9)", "betamax-matchers (>=0.4.0,<0.5)", "betamax-serializers (>=0.2.0,<0.3)", "mock (>=0.8)", "pytest", "testfixtures (>4.13.2,<7)"] 167 | 168 | [[package]] 169 | name = "requests" 170 | version = "2.28.1" 171 | description = "Python HTTP for Humans." 172 | category = "main" 173 | optional = false 174 | python-versions = ">=3.7, <4" 175 | 176 | [package.dependencies] 177 | certifi = ">=2017.4.17" 178 | charset-normalizer = ">=2,<3" 179 | idna = ">=2.5,<4" 180 | urllib3 = ">=1.21.1,<1.27" 181 | 182 | [package.extras] 183 | socks = ["PySocks (>=1.5.6,!=1.5.7)"] 184 | use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] 185 | 186 | [[package]] 187 | name = "slack-hooks" 188 | version = "0.4.1" 189 | description = "Slack client for webhooks with Block Kit builder." 190 | category = "main" 191 | optional = false 192 | python-versions = ">=3.8,<4.0" 193 | 194 | [package.dependencies] 195 | requests = ">=2.28.1,<3.0.0" 196 | 197 | [[package]] 198 | name = "soupsieve" 199 | version = "2.3.2.post1" 200 | description = "A modern CSS selector implementation for Beautiful Soup." 201 | category = "main" 202 | optional = false 203 | python-versions = ">=3.6" 204 | 205 | [[package]] 206 | name = "tomli" 207 | version = "2.0.1" 208 | description = "A lil' TOML parser" 209 | category = "dev" 210 | optional = false 211 | python-versions = ">=3.7" 212 | 213 | [[package]] 214 | name = "typing-extensions" 215 | version = "4.3.0" 216 | description = "Backported and Experimental Type Hints for Python 3.7+" 217 | category = "dev" 218 | optional = false 219 | python-versions = ">=3.7" 220 | 221 | [[package]] 222 | name = "update-checker" 223 | version = "0.18.0" 224 | description = "A python module that will check for package updates." 225 | category = "main" 226 | optional = false 227 | python-versions = "*" 228 | 229 | [package.dependencies] 230 | requests = ">=2.3.0" 231 | 232 | [package.extras] 233 | dev = ["black", "flake8", "pytest (>=2.7.3)"] 234 | lint = ["black", "flake8"] 235 | test = ["pytest (>=2.7.3)"] 236 | 237 | [[package]] 238 | name = "urllib3" 239 | version = "1.26.11" 240 | description = "HTTP library with thread-safe connection pooling, file post, and more." 241 | category = "main" 242 | optional = false 243 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" 244 | 245 | [package.extras] 246 | brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] 247 | secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] 248 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 249 | 250 | [[package]] 251 | name = "websocket-client" 252 | version = "1.3.3" 253 | description = "WebSocket client for Python with low level API options" 254 | category = "main" 255 | optional = false 256 | python-versions = ">=3.7" 257 | 258 | [package.extras] 259 | docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] 260 | optional = ["python-socks", "wsaccel"] 261 | test = ["websockets"] 262 | 263 | [metadata] 264 | lock-version = "1.1" 265 | python-versions = "^3.8" 266 | content-hash = "0149be7154451943f5b78ff13cb13095231a70fda6f077f89d4f5f403311ae87" 267 | 268 | [metadata.files] 269 | azure-functions = [ 270 | {file = "azure-functions-1.11.2.tar.gz", hash = "sha256:a4bca1b650810c1063140fef8d9d51fb1837f869e48fbf8869ff9eee642c42f4"}, 271 | {file = "azure_functions-1.11.2-py3-none-any.whl", hash = "sha256:db5730aad1b3eb6716790d593935abb5b49d2690bb446f2b4b83d91f3195f2f3"}, 272 | ] 273 | beautifulsoup4 = [ 274 | {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, 275 | {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, 276 | ] 277 | black = [ 278 | {file = "black-22.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69"}, 279 | {file = "black-22.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807"}, 280 | {file = "black-22.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e"}, 281 | {file = "black-22.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def"}, 282 | {file = "black-22.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666"}, 283 | {file = "black-22.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d"}, 284 | {file = "black-22.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256"}, 285 | {file = "black-22.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78"}, 286 | {file = "black-22.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849"}, 287 | {file = "black-22.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c"}, 288 | {file = "black-22.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90"}, 289 | {file = "black-22.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f"}, 290 | {file = "black-22.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e"}, 291 | {file = "black-22.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6"}, 292 | {file = "black-22.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad"}, 293 | {file = "black-22.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf"}, 294 | {file = "black-22.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c"}, 295 | {file = "black-22.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2"}, 296 | {file = "black-22.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee"}, 297 | {file = "black-22.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b"}, 298 | {file = "black-22.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4"}, 299 | {file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"}, 300 | {file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"}, 301 | ] 302 | certifi = [ 303 | {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, 304 | {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, 305 | ] 306 | charset-normalizer = [ 307 | {file = "charset-normalizer-2.1.0.tar.gz", hash = "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413"}, 308 | {file = "charset_normalizer-2.1.0-py3-none-any.whl", hash = "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5"}, 309 | ] 310 | click = [ 311 | {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, 312 | {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, 313 | ] 314 | colorama = [ 315 | {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, 316 | {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, 317 | ] 318 | ddt = [ 319 | {file = "ddt-1.6.0-py2.py3-none-any.whl", hash = "sha256:e3c93b961a108b4f4d5a6c7f2263513d928baf3bb5b32af8e1c804bfb041141d"}, 320 | {file = "ddt-1.6.0.tar.gz", hash = "sha256:f71b348731b8c78c3100bffbd951a769fbd439088d1fdbb3841eee019af80acd"}, 321 | ] 322 | idna = [ 323 | {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, 324 | {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, 325 | ] 326 | mypy-extensions = [ 327 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, 328 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, 329 | ] 330 | pathspec = [ 331 | {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, 332 | {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, 333 | ] 334 | platformdirs = [ 335 | {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, 336 | {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, 337 | ] 338 | praw = [ 339 | {file = "praw-7.6.0-py3-none-any.whl", hash = "sha256:0d8a75c8c9f78c0101f31653b68d23d0d9cee9a6ada7910f809610edcd905090"}, 340 | {file = "praw-7.6.0.tar.gz", hash = "sha256:3eeebe093f2d905b6c99e18aa8c8f64071b2e7a05e60b618166ee53397cdae72"}, 341 | ] 342 | prawcore = [ 343 | {file = "prawcore-2.3.0-py3-none-any.whl", hash = "sha256:48c17db447fa06a13ca3e722201f443031437708daa736c05a1df895fbcceff5"}, 344 | {file = "prawcore-2.3.0.tar.gz", hash = "sha256:daf1ccd4b7a80dc4e6567d48336d782e94a9a6dad83770fc2edf76dc9a84f56d"}, 345 | ] 346 | requests = [ 347 | {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, 348 | {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, 349 | ] 350 | slack-hooks = [ 351 | {file = "slack-hooks-0.4.1.tar.gz", hash = "sha256:c2207f07a04c1d10c4c6221576ab5ada73ca5acad86b5df07ef1d93a31c40f21"}, 352 | {file = "slack_hooks-0.4.1-py3-none-any.whl", hash = "sha256:dee4cb0d66d03c83867fb8d6f22376096471c9f26be02918e7ad4a5c86b2bf2a"}, 353 | ] 354 | soupsieve = [ 355 | {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, 356 | {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, 357 | ] 358 | tomli = [ 359 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 360 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 361 | ] 362 | typing-extensions = [ 363 | {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, 364 | {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, 365 | ] 366 | update-checker = [ 367 | {file = "update_checker-0.18.0-py3-none-any.whl", hash = "sha256:cbba64760a36fe2640d80d85306e8fe82b6816659190993b7bdabadee4d4bbfd"}, 368 | {file = "update_checker-0.18.0.tar.gz", hash = "sha256:6a2d45bb4ac585884a6b03f9eade9161cedd9e8111545141e9aa9058932acb13"}, 369 | ] 370 | urllib3 = [ 371 | {file = "urllib3-1.26.11-py2.py3-none-any.whl", hash = "sha256:c33ccba33c819596124764c23a97d25f32b28433ba0dedeb77d873a38722c9bc"}, 372 | {file = "urllib3-1.26.11.tar.gz", hash = "sha256:ea6e8fb210b19d950fab93b60c9009226c63a28808bc8386e05301e25883ac0a"}, 373 | ] 374 | websocket-client = [ 375 | {file = "websocket-client-1.3.3.tar.gz", hash = "sha256:d58c5f284d6a9bf8379dab423259fe8f85b70d5fa5d2916d5791a84594b122b1"}, 376 | {file = "websocket_client-1.3.3-py3-none-any.whl", hash = "sha256:5d55652dc1d0b3c734f044337d929aaf83f4f9138816ec680c1aefefb4dc4877"}, 377 | ] 378 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "app-info-bot" 3 | version = "1.0.0" 4 | description = "Reddit bot for posting information about apps." 5 | authors = ["Fede Calendino "] 6 | license = "MIT" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.8" 10 | azure-functions = "^1.11.2" 11 | beautifulsoup4 = "^4.11.1" 12 | praw = "^7.6.0" 13 | requests = "^2.28.1" 14 | slack-hooks = "^0.4.1" 15 | 16 | [tool.poetry.dev-dependencies] 17 | black = "^22.6.0" 18 | ddt = "^1.6.0" 19 | 20 | [build-system] 21 | requires = ["poetry-core>=1.0.0"] 22 | build-backend = "poetry.core.masonry.api" 23 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | azure-functions==1.11.2 ; python_version >= "3.8" and python_version < "4.0" 2 | beautifulsoup4==4.11.1 ; python_version >= "3.8" and python_version < "4.0" 3 | certifi==2022.6.15 ; python_version >= "3.8" and python_version < "4" 4 | charset-normalizer==2.1.0 ; python_version >= "3.8" and python_version < "4" 5 | idna==3.3 ; python_version >= "3.8" and python_version < "4" 6 | praw==7.6.0 ; python_version >= "3.8" and python_version < "4.0" 7 | prawcore==2.3.0 ; python_version >= "3.8" and python_version < "4.0" 8 | requests==2.28.1 ; python_version >= "3.8" and python_version < "4" 9 | slack-hooks==0.4.1 ; python_version >= "3.8" and python_version < "4.0" 10 | soupsieve==2.3.2.post1 ; python_version >= "3.8" and python_version < "4.0" 11 | update-checker==0.18.0 ; python_version >= "3.8" and python_version < "4.0" 12 | urllib3==1.26.11 ; python_version >= "3.8" and python_version < "4" 13 | websocket-client==1.3.3 ; python_version >= "3.8" and python_version < "4.0" 14 | --------------------------------------------------------------------------------