├── monitors
├── __init__.py
├── nittygrittystore.py
├── zulassungsstelle.py
├── pid_bruteforcer.py
├── eleventeamsports.py
├── prodirectsoccer_release.py
├── courir-slow.py
├── prodirectsoccer.py
├── salomen.py
├── bstn.py
├── courir.py
├── newbalance.py
├── shopify_priceerror.py
├── demandware_wishlist_morelist.py
├── kickz.py
├── shopify.py
├── svd.py
├── asos.py
├── aboutyou.py
└── wethenew.py
├── logs
└── .gitignore
├── .env
├── .gitignore
├── requirements.txt
├── threadrunner.py
├── config.json
├── user_agent.py
├── Dockerfile
├── docs.py
├── loggerfactory.py
├── docker-compose-supplyfrance.yml
├── .github
└── workflows
│ └── release.yml
├── quicktask.py
├── LICENSE
├── docker-compose.yml
├── timeout.py
├── database.py
├── webhook.py
├── proxymanager.py
├── tls.py
├── README.md
└── run.py
/monitors/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/monitors/nittygrittystore.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/logs/.gitignore:
--------------------------------------------------------------------------------
1 | *
2 | !.gitignore
--------------------------------------------------------------------------------
/.env:
--------------------------------------------------------------------------------
1 | GATEWAY=172.17.0.1
2 | IMAGEPROXY=IP_OF_IMAGEPROXY
3 | DB=MONGODB_CONNECTION_STRING
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .vscode
2 | __pycache__
3 | dev
4 | env
5 | disabled
6 | monitor.log
7 | debug.py
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nabil-ak/monitor-service/HEAD/requirements.txt
--------------------------------------------------------------------------------
/threadrunner.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 |
3 | def run(function, **kwargs):
4 | Thread(target=function, kwargs=kwargs).start()
--------------------------------------------------------------------------------
/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "auths": {
3 | "https://index.docker.io/v1/": {
4 | "auth": "AUTH_DOCKER_HUB"
5 | }
6 | }
7 | }
--------------------------------------------------------------------------------
/user_agent.py:
--------------------------------------------------------------------------------
1 | CHROME_USERAGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36"
2 |
3 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # syntax=docker/dockerfile:1
2 | FROM python:3.10
3 |
4 | WORKDIR /app
5 |
6 | COPY requirements.txt ./requirements.txt
7 |
8 | RUN pip install --no-cache-dir --upgrade -r requirements.txt
9 |
10 | COPY ./ ./
11 |
12 | CMD ["python", "run.py"]
--------------------------------------------------------------------------------
/docs.py:
--------------------------------------------------------------------------------
1 | import requests as rq
2 | import os
3 |
4 | DOCSCLIENT = f"http://{os.environ['GATEWAY']}:4501/api/v2/safeFetch"
5 |
6 |
7 | def get(url, timeout=10, **kargs):
8 | """
9 | get request wrapper
10 | """
11 |
12 | res = rq.post(DOCSCLIENT, json={
13 | "url":url
14 | }, timeout=timeout, **kargs)
15 | res.raise_for_status()
16 | res = res.json()
17 |
18 | if res["Success"]:
19 | return res["Content"]
20 |
21 | return None
--------------------------------------------------------------------------------
/loggerfactory.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 |
4 | def create(name, level=logging.DEBUG):
5 | """Factory function to create a logger"""
6 |
7 | handler = logging.FileHandler("logs/"+name+".log", mode="w")
8 | handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(message)s'))
9 | handler.setLevel(level)
10 |
11 | logger = logging.getLogger(name)
12 | logger.setLevel(level)
13 | logger.addHandler(handler)
14 |
15 | return logger
16 |
17 |
--------------------------------------------------------------------------------
/docker-compose-supplyfrance.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | watchtower:
5 | image: index.docker.io/containrrr/watchtower
6 | restart: always
7 | volumes:
8 | - /var/run/docker.sock:/var/run/docker.sock
9 | - ./config.json:/config.json
10 | environment:
11 | - WATCHTOWER_CLEANUP=true
12 | - WATCHTOWER_INCLUDE_RESTARTING=true
13 | command: --interval 30
14 | keyword:
15 | image: index.docker.io/xwe10/monitor-service-keyword-pinger
16 | env_file:
17 | - .env
18 | monitor:
19 | image: index.docker.io/xwe10/monitor-service
20 | env_file:
21 | - .env
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release to Docker Hub
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 |
8 | jobs:
9 | release:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Login to Docker Hub
13 | uses: docker/login-action@v1
14 | with:
15 | username: ${{ secrets.DOCKER_HUB_USERNAME }}
16 | password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
17 |
18 | - name: Build and push Docker image
19 | id: docker_build
20 | uses: docker/build-push-action@v2
21 | with:
22 | push: true
23 | tags: ${{ secrets.DOCKER_HUB_USERNAME }}/${{ github.event.repository.name }}:latest
--------------------------------------------------------------------------------
/quicktask.py:
--------------------------------------------------------------------------------
1 | def adonis(site, link):
2 | return f"[Adonis](https://quicktask.adonisbots.com/quicktask/?site={site}&product={link})"
3 |
4 | def koi(site, link):
5 | return f"[Koi](http://localhost:58912/protocol/quicktask?site={site}&monitorInput={link})"
6 |
7 | def panaio(site, link):
8 | return f"[PanAIO](https://www.panaio.com/quicktask?site={site}&link={link})"
9 |
10 | def loscobot(site, link):
11 | return f"[Losco](https://www.loscobot.eu/dashboard/WAIT?site={site}&url={link}&size=random)"
12 |
13 | def cybersole(site=None, link=None):
14 | return f"[Cybersole](https://cybersole.io/dashboard/quicktask?input={link})"
15 |
16 | def thunder(site, link):
17 | return f"[Thunder](http://localhost:3928/qt?site={site}&url={link})"
18 |
19 | def storm(site, link):
20 | return f"[StormAIO](https://dashboard.storm-aio.com/dashboard?quicktask=true&store={site}&product={link})"
21 |
22 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c)
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | watchtower:
5 | image: index.docker.io/containrrr/watchtower
6 | restart: always
7 | volumes:
8 | - /var/run/docker.sock:/var/run/docker.sock
9 | - ./config.json:/config.json
10 | environment:
11 | - WATCHTOWER_CLEANUP=true
12 | - WATCHTOWER_INCLUDE_RESTARTING=true
13 | command: --interval 30
14 | monitor:
15 | image: index.docker.io/xwe10/monitor-service
16 | env_file:
17 | - .env
18 | keyword:
19 | image: index.docker.io/xwe10/monitor-service-keyword-pinger
20 | env_file:
21 | - .env
22 | toolbox:
23 | image: index.docker.io/xwe10/toolbox
24 | env_file:
25 | - .env
26 | imageproxy:
27 | image: index.docker.io/xwe10/image-proxy
28 | env_file:
29 | - .env
30 | ports:
31 | - "8080:8080"
32 | tls:
33 | image: index.docker.io/xwe10/tls-api
34 | ports:
35 | - "8082:8082"
36 | clearverify:
37 | image: index.docker.io/xwe10/clear-verify
38 | safefetchapi:
39 | image: index.docker.io/xwe10/safefetchapi
40 | ports:
41 | - "4501:4501"
--------------------------------------------------------------------------------
/timeout.py:
--------------------------------------------------------------------------------
1 | import time
2 | from copy import deepcopy
3 |
4 | TIMEOUT = 30
5 | PINGDELAY = 10
6 |
7 | class timeout:
8 | def __init__(self,timeout=TIMEOUT,pingdelay=PINGDELAY):
9 | self.pings = []
10 | self.timeout = timeout
11 | self.pingdelay = pingdelay
12 |
13 | def ping(self,product):
14 | """
15 | Check if same product with same sizes was already pinged in the last 10 seconds if so timeout product for 30 seconds.
16 | """
17 | for ping in self.pings:
18 | if ping["product"] == product:
19 | if ping["timeout"] >= time.time():
20 | return False
21 | if ping["lastpingtimeout"] >= time.time():
22 | ping["timeout"] = time.time()+self.timeout
23 | return False
24 | ping["lastpingtimeout"] = time.time()+self.pingdelay
25 | return True
26 |
27 | self.pings.append({
28 | "product":deepcopy(product),
29 | "lastpingtimeout":time.time()+self.pingdelay,
30 | "timeout":-1
31 | })
32 | return True
33 |
--------------------------------------------------------------------------------
/database.py:
--------------------------------------------------------------------------------
1 | from pymongo import MongoClient
2 | import traceback
3 | import time
4 | import os
5 |
6 | def getGroups():
7 | """
8 | Get all Cooking Groups from the Database
9 | """
10 | groups = []
11 | for group in client["groups"].find({},{'_id': False}):
12 | groups.append(group)
13 | return groups
14 |
15 | def getProxys():
16 | """
17 | Get all Proxys from the Database
18 | """
19 | proxys = {}
20 | for proxy in client["proxys"].find({},{'_id': False}):
21 | proxys[proxy["name"]] = proxy["proxys"]
22 | return proxys
23 |
24 |
25 | def getSettings():
26 | """
27 | Get the Settings from the Database
28 | """
29 | return client["settings"].find_one({},{'_id': False})
30 |
31 |
32 | def Connect():
33 | """
34 | Connect Database
35 | """
36 | global client
37 | try:
38 | client = MongoClient(os.environ['DB'])["monitorsolutions"]
39 | except Exception as e:
40 | print(f"[DATABASE] Exception found: {traceback.format_exc()}")
41 | time.sleep(10)
42 | Connect()
43 |
44 |
45 | #Connect Database when the Script is imported
46 | Connect()
47 |
48 |
49 |
--------------------------------------------------------------------------------
/webhook.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | import json
3 | import requests as rq
4 |
5 |
6 | def send(group,webhook,site,title,url,thumbnail,fields,logger):
7 | """
8 | Sends a Discord webhook notification to the specified webhook URL
9 | """
10 | fields.append({
11 | "name": "Links",
12 | "value": f"[StockX](https://stockx.com/search?s={title.replace(' ','+')}) | [GOAT](https://www.goat.com/search?query={title.replace(' ','+')}) | [Wethenew](https://wethenew.com/search?type=product&q={title.replace(' ','+')})",
13 | "inline": False
14 | })
15 |
16 | data = {
17 | "username": group["Name"],
18 | "avatar_url": group["Avatar_Url"],
19 | "embeds": [{
20 | "title": title,
21 | "url": url,
22 | "thumbnail": {"url": thumbnail},
23 | "fields": fields,
24 | "color": group['Colour'],
25 | "footer": {
26 | "text": f"{group['Name']} | {datetime.now().strftime('%d.%m.%Y %H:%M:%S')}",
27 | "icon_url": group["Avatar_Url"]
28 | },
29 | "author": {
30 | "name": site
31 | }
32 | }
33 | ]
34 | }
35 |
36 | result = rq.post(webhook, data=json.dumps(data), headers={"Content-Type": "application/json"})
37 |
38 | result.raise_for_status()
39 |
40 | logger.info(msg=f'[{site}] Successfully sent Discord notification to {group["Name"]} with product {title}')
41 | print(f'[{site}] Successfully sent Discord notification to {group["Name"]} with product {title}')
42 |
--------------------------------------------------------------------------------
/proxymanager.py:
--------------------------------------------------------------------------------
1 | import database
2 | import traceback
3 | import time
4 | import random
5 | from multiprocessing import Lock
6 |
7 | PROXYS = {}
8 | EXCLUDE = ["packet"]
9 |
10 | class ProxyManager():
11 | @staticmethod
12 | def updateProxys():
13 | """
14 | Fetch newest proxys from the database
15 | """
16 | global PROXYS
17 | try:
18 | newProxys = database.getProxys()
19 | except Exception as e:
20 | print(f"[DATABASE] Exception found: {traceback.format_exc()}")
21 | time.sleep(10)
22 | database.Connect()
23 | return ProxyManager.updateProxys()
24 |
25 |
26 | if newProxys != PROXYS:
27 | PROXYS = newProxys
28 | return True
29 | else:
30 | return False
31 |
32 |
33 | def __init__(self, proxygroups=[]):
34 | self.proxygroups = proxygroups
35 | self.proxys = []
36 | self.lock = Lock()
37 |
38 | for group in PROXYS:
39 | if (self.proxygroups and group in self.proxygroups) or (not self.proxygroups and group not in EXCLUDE):
40 | self.proxys.append(PROXYS[group])
41 |
42 | self.proxys = sum(self.proxys, [])
43 | self.currentProxy = random.randint(0, len(self.proxys)-1) if self.proxys else 0
44 |
45 |
46 | def next(self):
47 | """
48 | Get the next Proxy
49 | """
50 | with self.lock:
51 | self.currentProxy = 0 if self.currentProxy >= (len(self.proxys) - 1) or not self.proxys else self.currentProxy + 1
52 |
53 | return {
54 | "http":f"http://{self.proxys[self.currentProxy]}",
55 | "https":f"http://{self.proxys[self.currentProxy]}"
56 | } if self.proxys else {}
--------------------------------------------------------------------------------
/tls.py:
--------------------------------------------------------------------------------
1 | import requests as rq
2 | import os
3 |
4 | TLSCLIENT = f"http://{os.environ['GATEWAY']}:8082"
5 |
6 |
7 | def addParamsToHeader(url, headers, proxies):
8 | """
9 | Add the url and the proxy to the headers to let the tlsclient use them
10 | """
11 | if proxies != {}:
12 | headers["Poptls-Proxy"] = proxies["http"]
13 | headers["Poptls-Url"] = url
14 | return headers
15 |
16 | def parseCookies(res, url):
17 | """
18 | Parse the cookies from the headers into the cookiejar of the response
19 | """
20 | res.cookies.clear()
21 | sescookies = res.headers["session-cookies"].split('; ')
22 | for x in range(len(sescookies)):
23 | if sescookies[x] == "":
24 | continue
25 | domain = url.split('://')[1]
26 | if '/' in domain:
27 | domain = domain.split('/')[0]
28 | res.cookies.set(sescookies[x].split('=')[0], sescookies[x].split('=')[1], domain=domain)
29 | del res.headers["session-cookies"]
30 | return res
31 |
32 | def get(url, headers={}, proxies={}, timeout=10, **kargs):
33 | """
34 | get request wrapper
35 | """
36 | headers = addParamsToHeader(url=url, headers=headers, proxies=proxies)
37 |
38 | res = rq.get(TLSCLIENT, headers=headers, timeout=timeout, **kargs)
39 |
40 | res = parseCookies(res, url)
41 |
42 | return res
43 |
44 | def post(url, headers={}, proxies={}, timeout=10, **kargs):
45 | """
46 | post request wrapper
47 | """
48 | headers = addParamsToHeader(url=url, headers=headers, proxies=proxies)
49 |
50 | res = rq.post(TLSCLIENT, headers=headers, timeout=timeout, **kargs)
51 |
52 | res = parseCookies(res, url)
53 |
54 | return res
55 |
56 | def head(url, headers={}, proxies={}, timeout=10, **kargs):
57 | """
58 | head request wrapper
59 | """
60 | headers = addParamsToHeader(url=url, headers=headers, proxies=proxies)
61 |
62 | res = rq.head(TLSCLIENT, headers=headers, timeout=timeout, **kargs)
63 |
64 | res = parseCookies(res, url)
65 |
66 | return res
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Monitor-Service
2 |
3 | A monitor service that was monitoring over 100 pages for exclusive items, using keywords or products ids.
4 | It helped members of cookgroups to make multiple thousand euros in profit from these exclusive restocks of sneakers, collectibles
5 | and other profitable items.
6 |
7 | ## Supported Sites
8 |
9 | - aboutyou
10 | - asos
11 | - bsn
12 | - courir
13 | - demandware
14 | - eleventeamsports
15 | - kickz
16 | - newbalance
17 | - nittygrittystore
18 | - pid_bruteforcer
19 | - prodirectsoccer_release
20 | - prodirectsoccer
21 | - salomen
22 | - shopify_priceerror
23 | - shopify
24 | - svd
25 | - wethenew
26 | - zuslassungsstelle
27 | - many more....
28 |
29 | ## Installation
30 |
31 | Use the package manager [pip](https://pip.pypa.io/en/stable/) to install the requirement frameworks.
32 |
33 | ```bash
34 | pip install -r requirements.txt
35 | ```
36 | ## Settings
37 | - Set the ```db``` connection string to your **MongoDB** database,
38 | ```env
39 | db = mongodb+srv://......
40 | ```
41 | ## Usage
42 | Create three collections in the **MongoDB** database ```groups```, ```proxys```:
43 | 1. ```groups``` - all the cook groups with their custom settings and webhook url for each site
44 | 2. ```proxys``` - proxy groups that can be used for specific sites to bypass their IP restrictions
45 | 3. ```settings``` - all global settings like the keywords that are monitored, the pids, which site, the delay
46 |
47 | Many sites are protected by a bot protecion that has to be bypassed.
48 | - You can use a ```tls proxy``` that will imitate tls headers of a real browser to bypass cloudflare for example
49 | - Many ```proxys``` to not get rate limited by the sites and also map specific proxysgroups to sites when they ban a subnet
50 | - A ```image proxy``` to bypass the IP restriction of the CDN of the sites, when they ban discords IP
51 |
52 | ## Docker
53 | 1. Create a Docker image
54 | ```
55 | docker build -t monitor-service .
56 | ```
57 |
58 | 2. Run it
59 | ```
60 | docker run -d --name monitor-service --env-file .env monitor-service
61 | ```
62 |
63 | You can also use ```docker compose``` to spin up multiple containers of the microservices you need.
64 |
65 | ## Example
66 |
67 |
68 |
69 | ## License
70 | [MIT](https://choosealicense.com/licenses/mit/)
--------------------------------------------------------------------------------
/monitors/zulassungsstelle.py:
--------------------------------------------------------------------------------
1 | from multiprocessing import Process
2 | from bs4 import BeautifulSoup
3 | from proxymanager import ProxyManager
4 | from user_agent import CHROME_USERAGENT
5 | import requests as rq
6 | import time
7 | import json
8 | import loggerfactory
9 | import traceback
10 | import urllib3
11 | import webhook
12 | import threadrunner
13 |
14 | SITE = __name__.split(".")[1]
15 |
16 | class zulassungsstelle(Process):
17 | def __init__(self, groups, settings):
18 | Process.__init__(self)
19 | self.groups = groups
20 | self.proxys = ProxyManager(settings["proxys"])
21 | self.delay = settings["delay"]
22 | self.firstScrape = True
23 | self.logger = loggerfactory.create(SITE)
24 | self.session = rq.Session()
25 |
26 | self.INSTOCK = []
27 |
28 | def discord_webhook(self, group, appointment):
29 | """
30 | Sends a Discord webhook notification to the specified webhook URL
31 | """
32 |
33 | fields = []
34 | fields.append({"name": "Status", "value": f"**Neuer Termin**", "inline": True})
35 |
36 | webhook.send(group=group, webhook=group[SITE], site=f"{SITE}", title=appointment, url="https://tevis.ekom21.de/fra/select2?md=2", thumbnail="https://www.auto-data.net/images/f74/Volkswagen-Golf-VII-Variant.jpg", fields=fields, logger=self.logger)
37 |
38 |
39 | def gen_session(self):
40 | response = self.session.get("https://tevis.ekom21.de/fra/select2?md=2")
41 | response.raise_for_status()
42 |
43 | def scrape_site(self):
44 | appointments = []
45 |
46 | response = self.session.get("https://tevis.ekom21.de/fra/suggest?mdt=147&select_cnc=1&cnc-930=0&cnc-932=0&cnc-935=0&cnc-933=0&cnc-939=0&cnc-931=0&cnc-934=0&cnc-929=0&cnc-942=0&cnc-936=0&cnc-941=1&cnc-940=0&cnc-938=0&cnc-872=0&cnc-879=0&cnc-925=0")
47 | response.raise_for_status()
48 |
49 | output = BeautifulSoup(response.text, 'html.parser')
50 | app = output.find('div', {'id': 'sugg_accordion'}).find_all('h3')
51 |
52 | for appointment in app:
53 | appointments.append(appointment["title"])
54 |
55 | self.logger.info(msg=f'[{SITE}] Successfully scraped Appointments')
56 | return appointments
57 |
58 |
59 | def run(self):
60 | urllib3.disable_warnings()
61 | """
62 | Initiates the monitor
63 | """
64 |
65 | print(f'STARTING {SITE} MONITOR')
66 |
67 | while True:
68 | try:
69 | startTime = time.time()
70 |
71 | self.gen_session()
72 | appointments = self.scrape_site()
73 |
74 | for app in appointments:
75 | if app not in self.INSTOCK and not self.firstScrape:
76 | print(f"[{SITE}] {app} got restocked")
77 | self.logger.info(msg=f"[{SITE}] {app} got restocked")
78 | for group in self.groups:
79 | #Send Ping to each Group
80 | threadrunner.run(
81 | self.discord_webhook,
82 | group=group,
83 | appointment=app
84 | )
85 |
86 | self.INSTOCK = appointments
87 |
88 | # Allows changes to be notified
89 | self.firstScrape = False
90 |
91 | self.logger.info(msg=f'[{SITE}] Checked appointments in {time.time()-startTime} seconds')
92 | self.session.cookies.clear()
93 |
94 | time.sleep(self.delay)
95 |
96 | except Exception as e:
97 | print(f"[{SITE}] Exception found: {traceback.format_exc()}")
98 | self.logger.error(e)
99 | time.sleep(5)
--------------------------------------------------------------------------------
/monitors/pid_bruteforcer.py:
--------------------------------------------------------------------------------
1 | from threading import Thread, Event
2 | from timeout import timeout
3 | from proxymanager import ProxyManager
4 | from user_agent import CHROME_USERAGENT
5 | import time
6 | import loggerfactory
7 | import traceback
8 | import urllib3
9 | import tls
10 | import webhook
11 | import threadrunner
12 |
13 | SITE = __name__.split(".")[1]
14 |
15 | class pid_bruteforcer(Thread):
16 | def __init__(self, groups, settings):
17 | Thread.__init__(self)
18 | self.daemon = True
19 | self.groups = groups
20 | self.proxys = ProxyManager(settings["proxys"])
21 | self.delay = settings["delay"]
22 | self.site= settings["site"]
23 | self.startPid = settings["startPid"]
24 | self.firstScrape = True
25 | self.stop = Event()
26 | self.logger = loggerfactory.create(SITE)
27 |
28 | self.INSTOCK = []
29 |
30 | def discord_webhook(self, group, title, pid, url, thumbnail, price):
31 | """
32 | Sends a Discord webhook notification to the specified webhook URL
33 | """
34 |
35 | fields = []
36 | fields.append({"name": "Price", "value": f"{price}", "inline": True})
37 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
38 | fields.append({"name": "Status", "value": f"**New Add**", "inline": True})
39 |
40 | webhook.send(group=group, webhook=group[SITE], site=f"{SITE}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
41 |
42 |
43 | def scrape_site(self, query):
44 | """
45 | Scrapes the specified eleventeamsports query site and adds items to array
46 | """
47 | items = []
48 |
49 | # Makes request to site
50 | html = tls.get(f"https://www.11teamsports.com/de-de/ds/?type=deep_search&q={query}&limit=10000&offset=0&sort=created+desc", headers={
51 | 'user-agent': CHROME_USERAGENT
52 | }, proxies=self.proxys.next())
53 | html.raise_for_status()
54 | products = html.json()["hits"]["hit"]
55 |
56 | # Stores particular details in array
57 | for product in products:
58 | product = product["fields"]
59 |
60 | #Only Ping shoes
61 | if "Schuhe" in product["category"]:
62 | product_item = {
63 | "name":product["title"],
64 | "pid":product["sku"],
65 | "price":str(product["price"])+" €",
66 | "image":product["media_file"],
67 | "url":product["deeplink"]
68 | }
69 | items.append(product_item)
70 |
71 |
72 | self.logger.info(msg=f'[{SITE}] Successfully scraped query {query}')
73 | return items
74 |
75 |
76 | def run(self):
77 | urllib3.disable_warnings()
78 | """
79 | Initiates the monitor
80 | """
81 |
82 | print(f'STARTING {SITE} MONITOR')
83 |
84 | while not self.stop.is_set():
85 | try:
86 | startTime = time.time()
87 |
88 | products = []
89 |
90 | # Makes request to site and stores products
91 | for query in self.querys:
92 |
93 | items = self.scrape_site(query)
94 |
95 | for product in items:
96 | if product["pid"] not in self.blacksku:
97 | # Check if Product is INSTOCK
98 | if product["pid"] not in self.INSTOCK and not self.firstScrape and self.timeout.ping(product):
99 | print(f"[{SITE}] {product['name']} got restocked")
100 | self.logger.info(msg=f"[{SITE}] {product['name']} got restocked")
101 | for group in self.groups:
102 | #Send Ping to each Group
103 | threadrunner.run(
104 | self.discord_webhook,
105 | group=group,
106 | title=product['name'],
107 | pid=product['pid'],
108 | url=product['url'],
109 | thumbnail=product['image'],
110 | price=product['price']
111 | )
112 | products.append(product["pid"])
113 |
114 | self.stop.wait(self.delay/len(self.querys))
115 |
116 | self.INSTOCK = products
117 |
118 | self.firstScrape = False
119 |
120 | self.logger.info(msg=f'[{SITE}] Checked all querys in {time.time()-startTime} seconds')
121 |
122 | except Exception as e:
123 | print(f"[{SITE}] Exception found: {traceback.format_exc()}")
124 | self.logger.error(e)
125 | self.stop.wait(4)
--------------------------------------------------------------------------------
/monitors/eleventeamsports.py:
--------------------------------------------------------------------------------
1 | from threading import Thread, Event
2 | from timeout import timeout
3 | from proxymanager import ProxyManager
4 | from user_agent import CHROME_USERAGENT
5 | from multiprocessing import Process
6 | import time
7 | import loggerfactory
8 | import traceback
9 | import urllib3
10 | import tls
11 | import webhook
12 | import threadrunner
13 |
14 | SITE = __name__.split(".")[1]
15 |
16 | class eleventeamsports(Process):
17 | def __init__(self, groups, settings):
18 | Process.__init__(self)
19 | self.groups = groups
20 | self.proxys = ProxyManager(settings["proxys"])
21 | self.delay = settings["delay"]
22 | self.querys= settings["query"]
23 | self.blacksku = settings["blacksku"]
24 | self.timeout = timeout(timeout=120, pingdelay=20)
25 | self.firstScrape = True
26 | self.logger = loggerfactory.create(SITE)
27 |
28 | self.INSTOCK = []
29 |
30 | def discord_webhook(self, group, title, pid, url, thumbnail, price):
31 | """
32 | Sends a Discord webhook notification to the specified webhook URL
33 | """
34 |
35 | fields = []
36 | fields.append({"name": "Price", "value": f"{price}", "inline": True})
37 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
38 | fields.append({"name": "Status", "value": f"**New Add**", "inline": True})
39 |
40 | webhook.send(group=group, webhook=group[SITE], site=f"{SITE}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
41 |
42 |
43 | def scrape_site(self, query):
44 | """
45 | Scrapes the specified eleventeamsports query site and adds items to array
46 | """
47 | items = []
48 |
49 | # Makes request to site
50 | html = tls.get(f"https://www.11teamsports.com/de-de/ds/?type=deep_search&q={query}&limit=10000&offset=0&sort=created+desc", headers={
51 | 'user-agent': CHROME_USERAGENT
52 | }, proxies=self.proxys.next())
53 | html.raise_for_status()
54 | products = html.json()["hits"]["hit"]
55 | html.close()
56 | # Stores particular details in array
57 | for product in products:
58 | product = product["fields"]
59 |
60 | #Only Ping shoes
61 | if "Schuhe" in product["category"]:
62 | product_item = {
63 | "name":product["title"],
64 | "pid":product["sku"],
65 | "price":str(product["price"])+" €",
66 | "image":product["media_file"],
67 | "url":product["deeplink"]
68 | }
69 | items.append(product_item)
70 |
71 |
72 | self.logger.info(msg=f'[{SITE}] Successfully scraped query {query}')
73 | return items
74 |
75 |
76 | def run(self):
77 | urllib3.disable_warnings()
78 | """
79 | Initiates the monitor
80 | """
81 |
82 | print(f'STARTING {SITE} MONITOR')
83 |
84 | while True:
85 | try:
86 | startTime = time.time()
87 |
88 | products = []
89 |
90 | # Makes request to site and stores products
91 | for query in self.querys:
92 |
93 | items = self.scrape_site(query)
94 |
95 | for product in items:
96 | if product["pid"] not in self.blacksku:
97 | # Check if Product is INSTOCK
98 | if product["pid"] not in self.INSTOCK and not self.firstScrape and self.timeout.ping(product):
99 | print(f"[{SITE}] {product['name']} got restocked")
100 | self.logger.info(msg=f"[{SITE}] {product['name']} got restocked")
101 | for group in self.groups:
102 | #Send Ping to each Group
103 | threadrunner.run(
104 | self.discord_webhook,
105 | group=group,
106 | title=product['name'],
107 | pid=product['pid'],
108 | url=product['url'],
109 | thumbnail=product['image'],
110 | price=product['price']
111 | )
112 | products.append(product["pid"])
113 |
114 | time.sleep(self.delay/len(self.querys))
115 |
116 | self.INSTOCK = products
117 |
118 | self.firstScrape = False
119 |
120 | self.logger.info(msg=f'[{SITE}] Checked all querys in {time.time()-startTime} seconds')
121 |
122 | except Exception as e:
123 | print(f"[{SITE}] Exception found: {traceback.format_exc()}")
124 | self.logger.error(e)
125 | time.sleep(4)
--------------------------------------------------------------------------------
/monitors/prodirectsoccer_release.py:
--------------------------------------------------------------------------------
1 | from threading import Thread, Event
2 | from multiprocessing import Process
3 | from user_agent import CHROME_USERAGENT
4 | from datetime import datetime, timedelta
5 | from proxymanager import ProxyManager
6 | import requests as rq
7 | import time
8 | import json
9 | import loggerfactory
10 | import traceback
11 | import urllib3
12 | import webhook
13 | import threadrunner
14 |
15 | SITE = __name__.split(".")[1]
16 |
17 | LAUNCHTIMEDELTA = 946684800 #01.01.2000 00.00H
18 |
19 | class prodirectsoccer_release(Process):
20 | def __init__(self, groups, site, releasecategory, settings):
21 | Process.__init__(self)
22 | self.site = site
23 | self.releasecategory = releasecategory
24 | self.groups = groups
25 | self.proxys = ProxyManager(settings["proxys"])
26 | self.delay = settings["delay"]
27 | self.querys= settings["query"]
28 | self.blacksku = settings["blacksku"]
29 | self.firstScrape = True
30 | self.logger = loggerfactory.create(f"{self.site}_release")
31 |
32 | self.INSTOCK = []
33 |
34 | def discord_webhook(self, group, title, pid, url, thumbnail, price, launch):
35 | """
36 | Sends a Discord webhook notification to the specified webhook URL
37 | """
38 | fields = []
39 | fields.append({"name": "Price", "value": f"{price}£", "inline": True})
40 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
41 | fields.append({"name": "Status", "value": f"**Release**", "inline": True})
42 | fields.append({"name": "Launch-Time", "value": f"", "inline": True})
43 |
44 | webhook.send(group=group, webhook=group[SITE], site=f"{self.site}_release", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
45 |
46 |
47 | def scrape_release_site(self,query):
48 | """
49 | Scrapes the specified prodirectsoccer release query site and adds items to array
50 | """
51 | items = []
52 |
53 | url = f"https://query.published.live1.suggest.eu1.fredhopperservices.com/pro_direct/json?scope=//catalog01/en_GB/categories%3E%7B{self.releasecategory}%7D&search={query}&callback=jsonpResponse"
54 |
55 | # Makes request to site
56 | html = rq.get(url, headers={
57 | 'user-agent': CHROME_USERAGENT
58 | }, proxies=self.proxys.next())
59 | html.raise_for_status()
60 |
61 | products = json.loads(html.text[14:-1])["suggestionGroups"][1]["suggestions"]
62 | html.close()
63 |
64 | # Stores particular details in array
65 | for product in products:
66 | product_item = {
67 | "name":product["name"],
68 | "pid":product["quickref"],
69 | "price":product["currentprice"].replace("000",""),
70 | "image":product["_thumburl"],
71 | "url":product["producturl"],
72 | "launch":LAUNCHTIMEDELTA+(int(product["launchtimedelta"])*60)
73 | }
74 | items.append(product_item)
75 |
76 |
77 | self.logger.info(msg=f'[{self.site}_release] Successfully scraped releases for query {query}')
78 | return items
79 |
80 |
81 | def run(self):
82 | urllib3.disable_warnings()
83 | """
84 | Initiates the monitor
85 | """
86 |
87 | print(f'STARTING {self.site}_release MONITOR')
88 |
89 | while True:
90 | try:
91 | startTime = time.time()
92 |
93 | products = []
94 |
95 | for query in self.querys:
96 |
97 | # Make request to release-site and stores products
98 | items = self.scrape_release_site(query)
99 | for product in items:
100 | if product["pid"] not in self.blacksku and datetime.fromtimestamp(product['launch'])>(datetime.now()-timedelta(days=1)):
101 | # Check if Product is INSTOCK
102 | if product not in self.INSTOCK and not self.firstScrape:
103 | print(f"[{self.site}_release] {product['name']} got restocked")
104 | self.logger.info(msg=f"[{self.site}_release] {product['name']} got restocked")
105 | for group in self.groups:
106 | #Send Ping to each Group
107 | threadrunner.run(
108 | self.discord_webhook,
109 | group=group,
110 | title=product['name'],
111 | pid=product['pid'],
112 | url=product['url'],
113 | thumbnail=product['image'],
114 | price=product['price'],
115 | launch=product['launch']
116 | )
117 |
118 | products.append(product)
119 |
120 | self.INSTOCK = products
121 |
122 | self.firstScrape = False
123 |
124 | self.logger.info(msg=f'[{self.site}_release] Checked all querys in {time.time()-startTime} seconds')
125 | time.sleep(self.delay)
126 |
127 | except Exception as e:
128 | print(f"[{self.site}_release] Exception found: {traceback.format_exc()}")
129 | self.logger.error(e)
130 | time.sleep(5)
--------------------------------------------------------------------------------
/monitors/courir-slow.py:
--------------------------------------------------------------------------------
1 | from threading import Thread, Event
2 | from multiprocessing import Process
3 | from bs4 import BeautifulSoup
4 | from proxymanager import ProxyManager
5 | from user_agent import CHROME_USERAGENT
6 | import tls
7 | import time
8 | import os
9 | import loggerfactory
10 | import traceback
11 | import urllib3
12 | import webhook
13 | import threadrunner
14 |
15 | SITE = __name__.split(".")[1]
16 |
17 | class courir(Process):
18 | def __init__(self, groups, settings):
19 | Process.__init__(self)
20 | self.groups = groups
21 | self.proxys = ProxyManager(settings["proxys"])
22 | self.delay = settings["delay"]
23 | self.pids = settings["pids"]
24 | self.firstScrape = True
25 | self.logger = loggerfactory.create(SITE)
26 |
27 | self.INSTOCK = []
28 |
29 | def discord_webhook(self, group, title, pid, url, thumbnail, price):
30 | """
31 | Sends a Discord webhook notification to the specified webhook URL
32 | """
33 |
34 | fields = []
35 | fields.append({"name": "Price", "value": f"{price}", "inline": True})
36 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
37 | fields.append({"name": "Status", "value": f"**New Add**", "inline": True})
38 |
39 | webhook.send(group=group, webhook=group[SITE], site=f"{SITE}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
40 |
41 |
42 | def scrape_site(self):
43 | """
44 | Scrapes the courir pids
45 | """
46 | items = []
47 |
48 | headers = {
49 | 'accept': '*/*',
50 | 'accept-language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
51 | 'cache-control': 'no-cache',
52 | 'content-type': 'application/x-www-form-urlencoded',
53 | 'origin': 'https://www.courir.com',
54 | 'pragma': 'no-cache',
55 | 'referer': 'https://www.courir.com/fr/c/accessoires/casquette-bob/',
56 | 'sec-ch-ua': '"Google Chrome";v="113", "Chromium";v="113", "Not-A.Brand";v="24"',
57 | 'sec-ch-ua-mobile': '?0',
58 | 'sec-ch-ua-platform': '"Windows"',
59 | 'sec-fetch-dest': 'empty',
60 | 'sec-fetch-mode': 'cors',
61 | 'sec-fetch-site': 'same-origin',
62 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36',
63 | }
64 |
65 | data = {
66 | 'scid': '663359b5e78b3462df55ef4a79',
67 | }
68 |
69 | for x in range(len(self.pids)):
70 | data[f"pid{x}"]=self.pids[x]
71 |
72 | # Makes request to site
73 | html = tls.post(
74 | 'https://www.courir.com/on/demandware.store/Sites-Courir-FR-Site/fr_FR/CQRecomm-Start',
75 | headers=headers,
76 | data=data,
77 | )
78 | html.raise_for_status()
79 |
80 | output = BeautifulSoup(html.text, 'html.parser')
81 | html.close()
82 | products = output.find_all('div', {'class': 'product-recommendations__item js--product-recommendations__item js-product-tile'})
83 |
84 | # Stores particular details in array
85 | for product in products:
86 | link = product.find('a')
87 | product_item = {
88 | "name":link["title"],
89 | "pid":product["data-itemid"],
90 | "price":product.find('meta', {'itemprop': 'price'})["content"]+"€",
91 | "image":product.find('img')["src"],
92 | "image":f"https://imageresize.24i.com/?w=300&url={product.find('img')['src']}",
93 | "url":link["href"]
94 | }
95 | items.append(product_item)
96 |
97 | self.logger.info(msg=f'[{SITE}] Successfully scraped {len(self.pids)} pids')
98 | return items
99 |
100 |
101 | def run(self):
102 | urllib3.disable_warnings()
103 | """
104 | Initiates the monitor
105 | """
106 |
107 | print(f'STARTING {SITE} MONITOR')
108 |
109 | while True:
110 | try:
111 | startTime = time.time()
112 |
113 | products = []
114 |
115 | # Makes request to query-site and stores products
116 | items = self.scrape_site()
117 | for product in items:
118 | # Check if Product is INSTOCK
119 | if product["pid"] not in self.INSTOCK and not self.firstScrape:
120 | print(f"[{SITE}] {product['name']} got restocked")
121 | self.logger.info(msg=f"[{SITE}] {product['name']} got restocked")
122 | for group in self.groups:
123 | #Send Ping to each Group
124 | threadrunner.run(
125 | self.discord_webhook,
126 | group=group,
127 | title=product['name'],
128 | pid=product['pid'],
129 | url=product['url'],
130 | thumbnail=product['image'],
131 | price=product['price']
132 | )
133 |
134 | products.append(product["pid"])
135 |
136 | self.INSTOCK = products
137 |
138 | # Allows changes to be notified
139 | self.firstScrape = False
140 |
141 | self.logger.info(msg=f'[{SITE}] Checked all pids in {time.time()-startTime} seconds')
142 |
143 | time.sleep(self.delay)
144 |
145 | except Exception as e:
146 | print(f"[{SITE}] Exception found: {traceback.format_exc()}")
147 | self.logger.error(e)
148 | time.sleep(5)
--------------------------------------------------------------------------------
/monitors/prodirectsoccer.py:
--------------------------------------------------------------------------------
1 | from threading import Thread, Event
2 | from multiprocessing import Process
3 | from bs4 import BeautifulSoup
4 | from proxymanager import ProxyManager
5 | from user_agent import CHROME_USERAGENT
6 | import time
7 | import json
8 | import loggerfactory
9 | import traceback
10 | import urllib3
11 | import webhook
12 | import tls
13 | import threadrunner
14 |
15 | SITE = __name__.split(".")[1]
16 |
17 | class prodirectsoccer(Process):
18 | def __init__(self, groups, settings):
19 | Process.__init__(self)
20 | self.groups = groups
21 | self.proxys = ProxyManager(settings["proxys"])
22 | self.delay = settings["delay"]
23 | self.querys= settings["query"]
24 | self.blacksku = settings["blacksku"]
25 | self.firstScrape = True
26 | self.logger = loggerfactory.create(SITE)
27 |
28 | self.INSTOCK = []
29 |
30 | def discord_webhook(self, group, title, pid, url, thumbnail, price):
31 | """
32 | Sends a Discord webhook notification to the specified webhook URL
33 | """
34 |
35 | fields = []
36 | fields.append({"name": "Price", "value": f"{price}£", "inline": True})
37 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
38 | fields.append({"name": "Status", "value": f"**New Add**", "inline": True})
39 |
40 | webhook.send(group=group, webhook=group[SITE], site=f"{SITE}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
41 |
42 |
43 | def scrape_site(self, query):
44 | """
45 | Scrapes the specified prodirectsoccer query site and adds items to array
46 | """
47 | items = []
48 |
49 | #Page Counter
50 | page = 1
51 |
52 | headers = {
53 | 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
54 | 'accept-language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
55 | 'sec-ch-ua': '"Chromium";v="106", "Google Chrome";v="106", "Not;A=Brand";v="99"',
56 | 'sec-ch-ua-mobile': '?0',
57 | 'sec-ch-ua-platform': '"Windows"',
58 | 'sec-fetch-dest': 'document',
59 | 'sec-fetch-mode': 'navigate',
60 | 'sec-fetch-site': 'none',
61 | 'sec-fetch-user': '?1',
62 | 'upgrade-insecure-requests': '1',
63 | 'user-agent': CHROME_USERAGENT,
64 | }
65 |
66 | #Scrape all available Pages
67 | while True:
68 | # Makes request to site
69 | html = tls.get(f"https://www.prodirectsoccer.com/search/?qq={query}&pg={page}", headers=headers, proxies=self.proxys.next())
70 | html.raise_for_status()
71 |
72 | output = BeautifulSoup(html.text, 'html.parser')
73 | html.close()
74 | products = output.find_all('a', {'class': 'product-thumb__link'})
75 | if not products:
76 | break
77 |
78 | # Stores particular details in array
79 | for product in products:
80 | info = json.loads(product["data-gtmi"])
81 | product_item = {
82 | "name":info["name"],
83 | "pid":info["id"],
84 | "price":info["price"],
85 | "image":product.find('img')["data-src"],
86 | "url":product["href"]
87 | }
88 | items.append(product_item)
89 |
90 | page+=1
91 |
92 | self.logger.info(msg=f'[{SITE}] Successfully scraped Query {query}')
93 | return items
94 |
95 |
96 | def run(self):
97 | urllib3.disable_warnings()
98 | """
99 | Initiates the monitor
100 | """
101 |
102 | print(f'STARTING {SITE} MONITOR')
103 |
104 | while True:
105 | try:
106 | startTime = time.time()
107 |
108 | products = []
109 |
110 | for query in self.querys:
111 | # Makes request to query-site and stores products
112 | items = self.scrape_site(query)
113 | for product in items:
114 | if product["pid"] not in self.blacksku:
115 | # Check if Product is INSTOCK
116 | if product["pid"] not in self.INSTOCK and not self.firstScrape:
117 | print(f"[{SITE}] {product['name']} got restocked")
118 | self.logger.info(msg=f"[{SITE}] {product['name']} got restocked")
119 | for group in self.groups:
120 | #Send Ping to each Group
121 | threadrunner.run(
122 | self.discord_webhook,
123 | group=group,
124 | title=product['name'],
125 | pid=product['pid'],
126 | url=product['url'],
127 | thumbnail=product['image'],
128 | price=product['price']
129 | )
130 |
131 | products.append(product["pid"])
132 |
133 | self.INSTOCK = products
134 |
135 |
136 | # Allows changes to be notified
137 | self.firstScrape = False
138 |
139 | self.logger.info(msg=f'[{SITE}] Checked all querys in {time.time()-startTime} seconds')
140 |
141 | time.sleep(self.delay)
142 |
143 | except Exception as e:
144 | print(f"[{SITE}] Exception found: {traceback.format_exc()}")
145 | self.logger.error(e)
146 | time.sleep(5)
--------------------------------------------------------------------------------
/monitors/salomen.py:
--------------------------------------------------------------------------------
1 | from multiprocessing import Process
2 | from proxymanager import ProxyManager
3 | import time
4 | import loggerfactory
5 | import traceback
6 | import urllib3
7 | import webhook
8 | import requests as rq
9 | import threadrunner
10 |
11 | SITE = __name__.split(".")[1]
12 |
13 | class salomen(Process):
14 | def __init__(self, groups, settings):
15 | Process.__init__(self)
16 | self.groups = groups
17 | self.proxys = ProxyManager(settings["proxys"])
18 | self.delay = settings["delay"]
19 | self.querys= settings["query"]
20 | self.blacksku = settings["blacksku"]
21 | self.firstScrape = True
22 | self.logger = loggerfactory.create(SITE)
23 |
24 | self.INSTOCK = []
25 |
26 | def discord_webhook(self, group, title, pid, url, thumbnail, price):
27 | """
28 | Sends a Discord webhook notification to the specified webhook URL
29 | """
30 |
31 | fields = []
32 | fields.append({"name": "Price", "value": f"{price}", "inline": True})
33 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
34 | fields.append({"name": "Status", "value": f"**New Add**", "inline": True})
35 |
36 | webhook.send(group=group, webhook=group[SITE], site=f"{SITE}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
37 |
38 |
39 | def scrape_site(self, query):
40 | """
41 | Scrapes the specified salomen query site and adds items to array
42 | """
43 | items = []
44 |
45 |
46 | headers = {
47 | 'Accept': '*/*',
48 | 'Accept-Language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
49 | 'Cache-Control': 'no-cache',
50 | 'Connection': 'keep-alive',
51 | 'Origin': 'https://www.salomon.com',
52 | 'Pragma': 'no-cache',
53 | 'Referer': 'https://www.salomon.com/',
54 | 'Sec-Fetch-Dest': 'empty',
55 | 'Sec-Fetch-Mode': 'cors',
56 | 'Sec-Fetch-Site': 'cross-site',
57 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36',
58 | 'content-type': 'application/x-www-form-urlencoded',
59 | 'sec-ch-ua': '"Google Chrome";v="113", "Chromium";v="113", "Not-A.Brand";v="24"',
60 | 'sec-ch-ua-mobile': '?0',
61 | 'sec-ch-ua-platform': '"Windows"',
62 | }
63 |
64 | data = '{"requests":[{"indexName":"prod_salomon_magento2_sln_fr_fr_products","params":"clickAnalytics=true&filters=NOT pcm_not_visible_by_reason_code: D2C&highlightPostTag=__/ais-highlight__&highlightPreTag=__ais-highlight__&hitsPerPage=50&maxValuesPerFacet=20&page=0&query='+query+'&tagFilters="}]}'
65 |
66 | html = rq.post(
67 | 'https://kq2xe2uch0-dsn.algolia.net/1/indexes/*/queries?x-algolia-agent=Algolia%20for%20JavaScript%20(4.17.0)%3B%20Browser%20(lite)%3B%20instantsearch.js%20(4.55.0)%3B%20JS%20Helper%20(3.13.0)&x-algolia-api-key=MWYxMWY1N2RkM2NlM2ZhZjA1MjkzYTdiMDA4Nzc0MDczMTg0ZGM2NzdjYjU2YTYxN2IyNWEwNGE5OTZhYWJmOHRhZ0ZpbHRlcnM9&x-algolia-application-id=KQ2XE2UCH0',
68 | headers=headers,
69 | data=data,
70 | )
71 | html.raise_for_status()
72 |
73 | products = html.json()["results"][0]["hits"]
74 |
75 | # Stores particular details in array
76 | for product in products:
77 | product_item = {
78 | "name":product["name"],
79 | "pid":product["sku"][0],
80 | "price":product["price"]["EUR"]["default_formated"],
81 | "image":product["image_url"],
82 | "url":product["url"]
83 | }
84 | items.append(product_item)
85 |
86 | self.logger.info(msg=f'[{SITE}] Successfully scraped Query {query}')
87 | return items
88 |
89 |
90 | def run(self):
91 | urllib3.disable_warnings()
92 | """
93 | Initiates the monitor
94 | """
95 |
96 | print(f'STARTING {SITE} MONITOR')
97 |
98 | while True:
99 | try:
100 | startTime = time.time()
101 |
102 | products = []
103 |
104 | for query in self.querys:
105 | # Makes request to query-site and stores products
106 | items = self.scrape_site(query)
107 | for product in items:
108 | if product["pid"] not in self.blacksku:
109 | # Check if Product is INSTOCK
110 | if product["pid"] not in self.INSTOCK and not self.firstScrape:
111 | print(f"[{SITE}] {product['name']} got restocked")
112 | self.logger.info(msg=f"[{SITE}] {product['name']} got restocked")
113 | for group in self.groups:
114 | #Send Ping to each Group
115 | threadrunner.run(
116 | self.discord_webhook,
117 | group=group,
118 | title=product['name'],
119 | pid=product['pid'],
120 | url=product['url'],
121 | thumbnail=product['image'],
122 | price=product['price']
123 | )
124 |
125 | products.append(product["pid"])
126 |
127 | self.INSTOCK = products
128 |
129 | # Allows changes to be notified
130 | self.firstScrape = False
131 |
132 | self.logger.info(msg=f'[{SITE}] Checked all querys in {time.time()-startTime} seconds')
133 |
134 | time.sleep(self.delay)
135 |
136 | except Exception as e:
137 | print(f"[{SITE}] Exception found: {traceback.format_exc()}")
138 | self.logger.error(e)
139 | time.sleep(5)
--------------------------------------------------------------------------------
/monitors/bstn.py:
--------------------------------------------------------------------------------
1 | from threading import Thread, Event
2 | from multiprocessing import Process
3 | from proxymanager import ProxyManager
4 | from user_agent import CHROME_USERAGENT
5 | from concurrent.futures import ThreadPoolExecutor
6 | import tls
7 | import time
8 | import loggerfactory
9 | import traceback
10 | import urllib3
11 | import os
12 | import webhook
13 | import threadrunner
14 |
15 | SITE = __name__.split(".")[1]
16 |
17 | class bstn(Process):
18 | def __init__(self, groups, settings):
19 | Process.__init__(self)
20 | self.groups = groups
21 | self.delay = settings["delay"]
22 | self.querys = settings["querys"]
23 | self.proxys = ProxyManager(settings["proxys"])
24 | self.blacksku = settings["blacksku"]
25 | self.firstScrape = True
26 | self.logger = loggerfactory.create(SITE)
27 |
28 | self.INSTOCK = {}
29 |
30 | def discord_webhook(self, group, title, pid, url, thumbnail, price):
31 | """
32 | Sends a Discord webhook notification to the specified webhook URL
33 | """
34 |
35 | fields = []
36 | fields.append({"name": "Price", "value": f"{price}", "inline": True})
37 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
38 | fields.append({"name": "Status", "value": f"**New Add**", "inline": True})
39 |
40 | webhook.send(group=group, webhook=group[SITE], site=f"{SITE}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
41 |
42 |
43 | def scrape_site(self, query):
44 | """
45 | Scrapes the specified bstn query site and adds items to array
46 | """
47 | items = []
48 |
49 | url = f"https://www.bstn.com/eu_de/rest/eu_de/V1/products-render-info?searchCriteria[pageSize]=10000&storeId=2¤cyCode=EUR&searchCriteria[currentPage]=1&searchCriteria[filter_groups][0][filters][0][field]=name&searchCriteria[filter_groups][0][filters][0][value]=%25{query}%25&searchCriteria[filter_groups][0][filters][0][condition_type]=like"
50 |
51 | headers = {
52 | 'authority': 'www.bstn.com',
53 | 'accept': 'application/json',
54 | 'accept-language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
55 | 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"',
56 | 'sec-ch-ua-mobile': '?0',
57 | 'sec-ch-ua-platform': '"Windows"',
58 | 'sec-fetch-dest': 'document',
59 | 'sec-fetch-mode': 'navigate',
60 | 'sec-fetch-site': 'none',
61 | 'sec-fetch-user': '?1',
62 | 'upgrade-insecure-requests': '1',
63 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
64 | }
65 |
66 | # Makes request to site
67 | html = tls.get(url, headers=headers, proxies=self.proxys.next())
68 | html.raise_for_status()
69 | products = html.json()["items"]
70 | html.close()
71 | # Stores particular details in array
72 | for product in products:
73 | product_item = {
74 | "name":product["name"],
75 | "pid":product["id"],
76 | "price":str(product["price_info"]["final_price"])+" €",
77 | 'image': product["images"][0]["url"],
78 | "url":product["url"]
79 | }
80 | items.append(product_item)
81 |
82 |
83 | self.logger.info(msg=f'[{SITE}] Successfully scraped query {query}')
84 | return [query,items]
85 |
86 |
87 | def run(self):
88 | urllib3.disable_warnings()
89 | """
90 | Initiates the monitor
91 | """
92 |
93 | print(f'STARTING {SITE} MONITOR')
94 |
95 | for query in self.querys:
96 | self.INSTOCK[query] = []
97 |
98 | while True:
99 | try:
100 | startTime = time.time()
101 |
102 | # Makes request to each category
103 | with ThreadPoolExecutor(len(query)) as executor:
104 | itemsSplited = [item for item in executor.map(self.scrape_site, self.querys)]
105 |
106 | for query, items in itemsSplited:
107 | products = []
108 |
109 | for product in items:
110 | if product["pid"] not in self.blacksku and any(query.lower() in product["name"].lower() for query in self.querys):
111 |
112 | # Check if Product is INSTOCK
113 | if not any([product["pid"] in query for query in self.INSTOCK.values()]) and not self.firstScrape:
114 | print(f"[{SITE}] {product['name']} got restocked")
115 | self.logger.info(msg=f"[{SITE}] {product['name']} got restocked")
116 | for group in self.groups:
117 | #Send Ping to each Group
118 | threadrunner.run(
119 | self.discord_webhook,
120 | group=group,
121 | title=product['name'],
122 | pid=product['pid'],
123 | url=product['url'],
124 | thumbnail=product['image'],
125 | price=product['price']
126 | )
127 | products.append(product["pid"])
128 |
129 | self.INSTOCK[query] = products
130 |
131 | # Allows changes to be notified
132 | self.firstScrape = False
133 |
134 | self.logger.info(msg=f'[{SITE}] Checked all querys in {time.time()-startTime} seconds')
135 | time.sleep(self.delay)
136 |
137 | except Exception as e:
138 | print(f"[{SITE}] Exception found: {traceback.format_exc()}")
139 | self.logger.error(e)
140 | time.sleep(3)
--------------------------------------------------------------------------------
/run.py:
--------------------------------------------------------------------------------
1 | import traceback
2 | import time
3 | import database
4 | import copy
5 |
6 | from monitors import aboutyou,shopify,wethenew,svd,prodirectsoccer,prodirectsoccer_release,eleventeamsports,asos,newbalance,shopify_priceerror,demandware_wishlist_morelist,bstn,courir,salomen
7 | from threading import Thread
8 | from proxymanager import ProxyManager
9 |
10 | cookgroups = database.getGroups()
11 | originalSettings = database.getSettings()
12 | ProxyManager.updateProxys()
13 |
14 | monitorPool = []
15 |
16 | def updateData():
17 | """
18 | Check settings, groups and proxys every 20 seconds for update
19 | """
20 | global originalSettings,cookgroups,proxys
21 | while True:
22 | try:
23 | newCookgroups = database.getGroups()
24 | newSettings = database.getSettings()
25 | except Exception as e:
26 | print(f"[DATABASE] Exception found: {traceback.format_exc()}")
27 | time.sleep(10)
28 | database.Connect()
29 |
30 | if originalSettings != newSettings or newCookgroups != cookgroups or ProxyManager.updateProxys():
31 | cookgroups = newCookgroups
32 | originalSettings = newSettings
33 | print("[UPDATER] Restart Monitors")
34 |
35 | #Restart every Monitor
36 | for mon in monitorPool:
37 | if isinstance(mon, Thread):
38 | mon.stop.set()
39 | else:
40 | mon.terminate()
41 |
42 | #Wait for each Monitor to stop
43 | for mon in monitorPool:
44 | if isinstance(mon, Thread) and mon.is_alive():
45 | mon.join()
46 |
47 | monitorPool.clear()
48 | #Start them with new Settings
49 | startMonitors()
50 |
51 | time.sleep(20)
52 |
53 |
54 | def filterGroups(sites):
55 | """
56 | Return groups that have a webhook of a specific site
57 | """
58 | filteredGroups = []
59 | for group in cookgroups:
60 | if any(site in group for site in sites):
61 | filteredGroups.append(group)
62 |
63 | return filteredGroups
64 |
65 | def startMonitors():
66 | """
67 | Start every Monitor in a Process
68 | """
69 |
70 | settings = copy.deepcopy(originalSettings)
71 |
72 | """
73 | #Create all Asos Monitors
74 | for region in settings["asos"]["regions"]:
75 | monitorPool.append(asos.asos(groups=filterGroups(["asos","asos_"+region[0]]),settings=settings["asos"],region=region[0],currency=region[1]))
76 |
77 | #Create all About You Monitors
78 | for store in settings["aboutyou"]["stores"]:
79 | monitorPool.append(aboutyou.aboutyou(groups=filterGroups(["aboutyou"]), settings=settings["aboutyou"], store=store[0], storeid=store[1]))
80 |
81 | #Create all Shopify Monitors
82 | shopifyGlobal = settings["shopify"]
83 | for s in shopifyGlobal["sites"]:
84 | if "keywords" in s:
85 | if s["keywords"]:
86 | s["keywords"] = s["keywords"]+shopifyGlobal["keywords"]
87 | else:
88 | s["keywords"] = shopifyGlobal["keywords"]
89 |
90 | if "tags" in s:
91 | if s["tags"]:
92 | s["tags"] = s["tags"]+shopifyGlobal["tags"]
93 | else:
94 | s["tags"] = shopifyGlobal["tags"]
95 |
96 | if "blacksku" in s and s["blacksku"]:
97 | s["blacksku"] = s["blacksku"]+shopifyGlobal["blacksku"]
98 | else:
99 | s["blacksku"] = shopifyGlobal["blacksku"]
100 |
101 | if "negativkeywords" in s and s["negativkeywords"]:
102 | s["negativkeywords"] = s["negativkeywords"]+shopifyGlobal["negativkeywords"]
103 | else:
104 | s["negativkeywords"] = shopifyGlobal["negativkeywords"]
105 |
106 | if "delay" not in s:
107 | s["delay"] = shopifyGlobal["delay"]
108 |
109 | s["proxys"] = shopifyGlobal["proxys"]
110 | monitorPool.append(shopify.shopify(groups=filterGroups([s["name"], "shopify"]),settings=s))
111 |
112 | #Create all Wethenew Monitor
113 | endpoints = ["products", "sell-nows", "consignment-slots"]
114 | for ep in endpoints:
115 | monitorPool.append(wethenew.wethenew(groups=filterGroups(["wethenew-"+ep]),endpoint=ep,settings=settings["wethenew"]))
116 |
117 | #Wethenew Price Error
118 | monitorPool.append(shopify_priceerror.shopify_priceerror(groups=filterGroups(["wethenew_priceerror"]),settings=settings["wethenew_priceerror"]))
119 |
120 | #Create SVD Monitor
121 | monitorPool.append(svd.svd(groups=filterGroups(["svd"]),settings=settings["svd"]))
122 |
123 | #Create bstn Monitor
124 | monitorPool.append(bstn.bstn(groups=filterGroups(["bstn"]),settings=settings["bstn"]))
125 |
126 | #Start all Demandware Wishlist MoreList Monitors
127 | for site in settings["demandware_wishlist_morelist"]:
128 | monitorPool.append(demandware_wishlist_morelist.demandware_wishlist_morelist(groups=filterGroups([site["name"]]), settings=site))
129 |
130 | #Create newbalance Monitor
131 | monitorPool.append(newbalance.newbalance(groups=filterGroups(["newbalance"]), settings=settings["newbalance"]))
132 |
133 | #Create prodirectsoccer Monitor
134 | monitorPool.append(prodirectsoccer.prodirectsoccer(groups=filterGroups(["prodirectsoccer"]),settings=settings["prodirectsoccer"]))
135 |
136 | #Create prodirectsoccer_release Monitors
137 | for p in settings["prodirectsoccer_release"]["sites"]:
138 | monitorPool.append(prodirectsoccer_release.prodirectsoccer_release(groups=filterGroups(["prodirectsoccer_release"]),site=p[0],releasecategory=p[1],settings=settings["prodirectsoccer_release"]))
139 |
140 | #Create eleventeamsports Monitor
141 | monitorPool.append(eleventeamsports.eleventeamsports(groups=filterGroups(["eleventeamsports"]),settings=settings["eleventeamsports"]))
142 |
143 | #Create courir Monitor
144 | monitorPool.append(courir.courir(groups=filterGroups(["courir"]), settings=settings["courir"]))
145 |
146 | #Create prodirectsoccer Monitor
147 | monitorPool.append(salomen.salomen(groups=filterGroups(["salomen"]),settings=settings["salomen"]))
148 |
149 | """
150 |
151 | endpoints = ["products", "sell-nows", "consignment-slots"]
152 | for ep in endpoints:
153 | monitorPool.append(wethenew.wethenew(groups=filterGroups(["wethenew-"+ep]),endpoint=ep,settings=settings["wethenew"]))
154 |
155 | #Start all Monitors
156 | for mon in monitorPool:
157 | mon.start()
158 |
159 | if __name__ == "__main__":
160 | #Start Monitors
161 | startMonitors()
162 |
163 | #Check if new Group was added or updated and also check if settings was updated
164 | updateData()
--------------------------------------------------------------------------------
/monitors/courir.py:
--------------------------------------------------------------------------------
1 | from multiprocessing import Process
2 | from proxymanager import ProxyManager
3 | from copy import deepcopy
4 | from concurrent.futures import ThreadPoolExecutor
5 | import docs
6 | from bs4 import BeautifulSoup
7 | import time
8 | import json
9 | import loggerfactory
10 | import traceback
11 | import urllib3
12 | import webhook
13 | import threadrunner
14 |
15 | SITE = __name__.split(".")[1]
16 |
17 | class courir(Process):
18 | def __init__(self, groups, settings):
19 | Process.__init__(self)
20 | self.groups = groups
21 | self.pids = settings["pids"]
22 | self.proxys = ProxyManager(settings["proxys"])
23 | self.delay = settings["delay"]
24 | self.firstScrape = True
25 | self.logger = loggerfactory.create(SITE)
26 |
27 | self.INSTOCK = []
28 |
29 | def discord_webhook(self, group, title, pid, url, thumbnail, price, sizes):
30 | """
31 | Sends a Discord webhook notification to the specified webhook URL
32 | """
33 |
34 | fields = []
35 | fields.append({"name": "Price", "value": f"{price}", "inline": True})
36 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
37 | fields.append({"name": "Stock", "value": f"{str(len(sizes))}+", "inline": True})
38 |
39 | for _ in range((len(sizes)//7)+(1 if len(sizes)%7 != 0 else 0)):
40 | sizesString = ""
41 | for size in sizes[:7]:
42 | sizesString+=f"{size}\n"
43 | fields.append({"name": f"Size", "value": sizesString, "inline": True})
44 | sizes = sizes[7:]
45 |
46 | webhook.send(group=group, webhook=group[SITE], site=f"{SITE}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
47 |
48 |
49 | def scrape_site(self, pid):
50 | """
51 | Scrape the specific courir product
52 | """
53 |
54 | headers = {
55 | 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
56 | 'accept-language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
57 | 'cache-control': 'max-age=0',
58 | 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"',
59 | 'sec-ch-ua-mobile': '?0',
60 | 'sec-ch-ua-platform': '"Windows"',
61 | 'sec-fetch-dest': 'document',
62 | 'sec-fetch-mode': 'navigate',
63 | 'sec-fetch-site': 'none',
64 | 'sec-fetch-user': '?1',
65 | 'upgrade-insecure-requests': '1',
66 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
67 | }
68 |
69 |
70 | #Fetch the Site
71 | text = docs.get(f"https://www.courir.com/on/demandware.store/Sites-Courir-FR-Site/fr_FR/Product-Variation?pid={pid}&Quantity=1&format=ajax&productlistid=undefined", headers=headers)
72 | if text:
73 | output = BeautifulSoup(text, 'html.parser')
74 |
75 | product = {
76 | 'title': output.find('span', {'class': 'product-brand js-product-brand'})["data-gtm"]+" "+
77 | output.find('span', {'class': 'product-name'}).text,
78 | 'image': "https://www.courir.com/dw/image/v2/BCCL_PRD"+json.loads(output.find('li', {'class': 'selectable'}).find('a')["data-lgimg"][:-2])["url"],
79 | 'pid': pid,
80 | 'variants': [element.find('a').text.replace("\n","") for element in output.find_all('li', {'class': 'selectable'})],
81 | "price": output.find('meta', {'itemprop': 'price'})["content"]+"€",
82 | "url": output.find('span', {'itemprop': 'url'}).text
83 | } if text and output.find('li', {'class': 'selectable'}) else {
84 | 'title': None,
85 | 'image': None,
86 | 'pid': pid,
87 | 'variants': [],
88 | "price":None,
89 | "url":None
90 | }
91 |
92 | self.logger.info(msg=f'[{SITE}] Successfully scraped {pid}')
93 | return product
94 |
95 | def remove(self, pid):
96 | """
97 | Remove all Products from INSTOCK with the same pid
98 | """
99 | for elem in self.INSTOCK:
100 | if pid == elem[2]:
101 | self.INSTOCK.remove(elem)
102 |
103 | def updated(self, product):
104 | """
105 | Check if the Variants got updated
106 | """
107 | for elem in self.INSTOCK:
108 | #Check if Product was not changed
109 | if product[2] == elem[2] and product[3] == elem[3]:
110 | return [False,False]
111 |
112 | #Dont ping if no new size was added
113 | if product[2] == elem[2] and len(product[3]) <= len(elem[3]):
114 | if all(size in elem[3] for size in product[3]):
115 | return [False,True]
116 |
117 | return[True,True]
118 |
119 |
120 | def comparitor(self,product):
121 | product_item = [product['title'], product['image'], product['pid'], product['variants']]
122 |
123 | if product['variants']:
124 | ping, updated = self.updated(product_item)
125 | if updated or self.firstScrape:
126 | # If product is available but not stored or product is stored but available sizes are changed - sends notification and stores
127 |
128 | # Remove old version of the product
129 | self.remove(product_item[2])
130 |
131 | self.INSTOCK.append(deepcopy(product_item))
132 | if ping and not self.firstScrape:
133 | print(f"[{SITE}] {product_item[0]} got restocked")
134 | self.logger.info(msg=f"[{SITE}] {product_item[0]} got restocked")
135 | for group in self.groups:
136 | #Send Ping to each Group
137 | threadrunner.run(
138 | self.discord_webhook,
139 | group=group,
140 | title=product["title"],
141 | pid=product['pid'],
142 | url=product['url'],
143 | thumbnail=product['image'],
144 | price=product['price'],
145 | sizes=product['variants'],
146 | )
147 | else:
148 | # Remove old version of the product
149 | self.remove(product_item[2])
150 |
151 | def run(self):
152 | urllib3.disable_warnings()
153 | """
154 | Initiates the monitor
155 | """
156 |
157 | print(f'STARTING {SITE} MONITOR')
158 |
159 | while True:
160 | try:
161 | startTime = time.time()
162 | with ThreadPoolExecutor(len(self.pids)) as executor:
163 | items = [item for item in executor.map(self.scrape_site, self.pids)]
164 | # Makes request to the wishlist and stores products
165 |
166 | for product in items:
167 | self.comparitor(product)
168 |
169 | self.logger.info(msg=f'[{SITE}] Checked in {time.time()-startTime} seconds')
170 |
171 | self.firstScrape = False
172 |
173 | items.clear()
174 |
175 | # User set delay
176 | time.sleep(float(self.delay))
177 | except Exception as e:
178 | print(f"[{SITE}] Exception found: {traceback.format_exc()}")
179 | self.logger.error(e)
180 | time.sleep(3)
--------------------------------------------------------------------------------
/monitors/newbalance.py:
--------------------------------------------------------------------------------
1 | from multiprocessing import Process
2 | from proxymanager import ProxyManager
3 | from copy import deepcopy
4 | from concurrent.futures import ThreadPoolExecutor
5 | import requests as rq
6 | import time
7 | import loggerfactory
8 | import traceback
9 | import urllib3
10 | import webhook
11 | import threadrunner
12 |
13 | SITE = __name__.split(".")[1]
14 |
15 | class newbalance(Process):
16 | def __init__(self, groups, settings):
17 | Process.__init__(self)
18 | self.groups = groups
19 | self.pids = settings["pids"]
20 | self.proxys = ProxyManager(settings["proxys"])
21 | self.delay = settings["delay"]
22 | self.firstScrape = True
23 | self.logger = loggerfactory.create(SITE)
24 |
25 | self.INSTOCK = []
26 |
27 | def discord_webhook(self, group, title, pid, url, thumbnail, price, sizes):
28 | """
29 | Sends a Discord webhook notification to the specified webhook URL
30 | """
31 |
32 | fields = []
33 | fields.append({"name": "Price", "value": f"{price}", "inline": True})
34 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
35 | fields.append({"name": "Stock", "value": f"{str(len(sizes))}+", "inline": True})
36 |
37 | for _ in range((len(sizes)//7)+(1 if len(sizes)%7 != 0 else 0)):
38 | sizesString = ""
39 | for size in sizes[:7]:
40 | sizesString+=f"{size}\n"
41 | fields.append({"name": f"Size", "value": sizesString, "inline": True})
42 | sizes = sizes[7:]
43 |
44 | fields.append({"name": "Links",
45 | "value": f"[FR](https://www.newbalance.fr/{pid}.html) - [DE](https://www.newbalance.de/{pid}.html) "+
46 | f"- [NL](https://www.newbalance.nl/{pid}.html) - [AT](https://www.newbalance.at/{pid}.html) - [BE](https://www.newbalance.be/{pid}.html)", "inline": False})
47 |
48 | webhook.send(group=group, webhook=group[SITE], site=f"{SITE}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
49 |
50 |
51 | def scrape_site(self, pid):
52 | """
53 | Scrape the specific new balance product
54 | """
55 |
56 | headers = {
57 | 'authority': 'www.newbalance.de',
58 | 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
59 | 'accept-language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
60 | 'cache-control': 'max-age=0',
61 | 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"',
62 | 'sec-ch-ua-mobile': '?0',
63 | 'sec-ch-ua-platform': '"Windows"',
64 | 'sec-fetch-dest': 'document',
65 | 'sec-fetch-mode': 'navigate',
66 | 'sec-fetch-site': 'none',
67 | 'sec-fetch-user': '?1',
68 | 'upgrade-insecure-requests': '1',
69 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
70 | }
71 |
72 |
73 | #Fetch the Site
74 | html = rq.get(f"https://www.newbalance.de/on/demandware.store/Sites-BANG-Site/de_DE/Wishlist-GetProduct?pid={pid}", headers=headers, proxies=self.proxys.next())
75 | html.raise_for_status()
76 | output = html.json()["product"]
77 | html.close()
78 | product = {
79 | 'title': output['brand']+" "+output['productName'],
80 | 'image': output['images']['productDetail'][0]['url'],
81 | 'pid': pid,
82 | 'pidforwebhook': output['id'],
83 | 'variants': output['variationAttributes'][1]["values"],
84 | "price":output["price"]["sales"]["formatted"],
85 | "url":f"https://www.newbalance.fr/{output['id']}.html"
86 | } if output["online"] and output['images']['productDetail'] else {
87 | 'title': output['brand']+" "+output['productName'],
88 | 'image': None,
89 | 'pid': pid,
90 | 'variants': [],
91 | "price":None,
92 | "url":f"https://www.newbalance.fr/{output['id']}.html"
93 | }
94 |
95 | self.logger.info(msg=f'[{SITE}] Successfully scraped {pid}')
96 | return product
97 |
98 | def remove(self, pid):
99 | """
100 | Remove all Products from INSTOCK with the same pid
101 | """
102 | for elem in self.INSTOCK:
103 | if pid == elem[2]:
104 | self.INSTOCK.remove(elem)
105 |
106 | def updated(self, product):
107 | """
108 | Check if the Variants got updated
109 | """
110 | for elem in self.INSTOCK:
111 | #Check if Product was not changed
112 | if product[2] == elem[2] and product[3] == elem[3]:
113 | return [False,False]
114 |
115 | #Dont ping if no new size was added
116 | if product[2] == elem[2] and len(product[3]) <= len(elem[3]):
117 | if all(size in elem[3] for size in product[3]):
118 | return [False,True]
119 |
120 | return[True,True]
121 |
122 |
123 | def comparitor(self,product):
124 | product_item = [product['title'], product['image'], product['pid']]
125 |
126 | # Collect all available sizes
127 | available_sizes = []
128 | for size in product['variants']:
129 | if size['selectable']:
130 | available_sizes.append(size['displayValue'])
131 |
132 |
133 | product_item.append(available_sizes)
134 |
135 | if available_sizes:
136 | ping, updated = self.updated(product_item)
137 | if updated or self.firstScrape:
138 | # If product is available but not stored or product is stored but available sizes are changed - sends notification and stores
139 |
140 | # Remove old version of the product
141 | self.remove(product_item[2])
142 |
143 | self.INSTOCK.append(deepcopy(product_item))
144 | if ping and not self.firstScrape:
145 | print(f"[{SITE}] {product_item[0]} got restocked")
146 | self.logger.info(msg=f"[{SITE}] {product_item[0]} got restocked")
147 | for group in self.groups:
148 | #Send Ping to each Group
149 | threadrunner.run(
150 | self.discord_webhook,
151 | group=group,
152 | title=product["title"],
153 | pid=product['pidforwebhook'],
154 | url=product['url'],
155 | thumbnail=product['image'],
156 | price=product['price'],
157 | sizes=available_sizes,
158 | )
159 | else:
160 | # Remove old version of the product
161 | self.remove(product_item[2])
162 |
163 | def run(self):
164 | urllib3.disable_warnings()
165 | """
166 | Initiates the monitor
167 | """
168 |
169 | print(f'STARTING {SITE} MONITOR')
170 |
171 | while True:
172 | try:
173 | startTime = time.time()
174 | with ThreadPoolExecutor(len(self.pids)) as executor:
175 | items = [item for item in executor.map(self.scrape_site, self.pids)]
176 | # Makes request to the wishlist and stores products
177 |
178 | for product in items:
179 | self.comparitor(product)
180 |
181 | self.logger.info(msg=f'[{SITE}] Checked in {time.time()-startTime} seconds')
182 |
183 | self.firstScrape = False
184 |
185 | items.clear()
186 |
187 | # User set delay
188 | time.sleep(float(self.delay))
189 | except Exception as e:
190 | print(f"[{SITE}] Exception found: {traceback.format_exc()}")
191 | self.logger.error(e)
192 | time.sleep(3)
--------------------------------------------------------------------------------
/monitors/shopify_priceerror.py:
--------------------------------------------------------------------------------
1 | from copy import deepcopy
2 | from multiprocessing import Process
3 | from timeout import timeout
4 | from concurrent.futures import ThreadPoolExecutor
5 | from proxymanager import ProxyManager
6 | from user_agent import CHROME_USERAGENT
7 | import quicktask as qt
8 | import random
9 | import time
10 | import json
11 | import loggerfactory
12 | import traceback
13 | import urllib3
14 | import webhook
15 | import threadrunner
16 | import tls
17 |
18 | SITE = __name__.split(".")[1]
19 |
20 | class shopify_priceerror(Process):
21 | def __init__(self, groups, settings):
22 | Process.__init__(self)
23 | self.groups = groups
24 | self.site = settings["name"]
25 | self.url = settings["url"]
26 | self.proxys = ProxyManager(settings["proxys"])
27 | self.delay = settings["delay"]
28 | self.percent = settings["percent"]
29 | self.firstScrape = True
30 | self.logger = loggerfactory.create(self.site)
31 |
32 | self.INSTOCK = []
33 | self.timeout = timeout()
34 |
35 | def discord_webhook(self, group, title, pid, url, thumbnail, sizes):
36 | """
37 | Sends a Discord webhook notification to the specified webhook URL
38 | """
39 |
40 | fields = []
41 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
42 | fields.append({"name": "Stock", "value": f"{str(len(sizes))}+", "inline": True})
43 | fields.append({"name": "Status", "value": f"**{round((1-self.percent)*100)}% price reduction**", "inline": True})
44 |
45 | for _ in range((len(sizes)//7)+(1 if len(sizes)%7 != 0 else 0)):
46 | sizesString = ""
47 | for size in sizes[:7]:
48 | sizesString+=f"{size['url']} | {size['title']} | {size['price']}€\n"
49 | fields.append({"name": f"ATC | Size | Price", "value": sizesString, "inline": True})
50 | sizes = sizes[7:]
51 |
52 | fields.append({"name": "Quicktasks", "value": f"{qt.cybersole(link=url)} - {qt.adonis(site='shopify', link=url)} - {qt.thunder(site='shopify', link=url)} - {qt.panaio(site='Shopify', link=url)}", "inline": False})
53 |
54 | webhook.send(group=group, webhook=group[self.site], site=f"{self.site}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
55 |
56 |
57 | def scrape_site(self, page):
58 | """
59 | Scrapes the specified Shopify site and adds items to array
60 | """
61 | items = []
62 |
63 | #Fetch the Shopify-Page
64 | html = tls.get(self.url + f'?page={page}&limit={random.randint(251,1000000)}', headers={"user-agent":CHROME_USERAGENT}, proxies=self.proxys.next())
65 | html.raise_for_status()
66 | output = json.loads(html.text)['products']
67 | html.close()
68 | # Stores particular details in array
69 | for product in output:
70 | product_item = {
71 | 'title': product['title'],
72 | 'image': product['images'][0]['src'] if product['images'] else "",
73 | 'handle': product['handle'],
74 | 'variants': product['variants'],
75 | 'tags':product['tags']
76 | }
77 | items.append(product_item)
78 |
79 | self.logger.info(msg=f'[{self.site}] Successfully scraped Page {page}')
80 | return items
81 |
82 | def remove(self, handle):
83 | """
84 | Remove all Products from INSTOCK with the same handle
85 | """
86 | for elem in self.INSTOCK:
87 | if handle == elem[2]:
88 | self.INSTOCK.remove(elem)
89 |
90 | def updated(self, product):
91 | """
92 | Check if the Variants got updated
93 | @return [schould ping, if updated]
94 | """
95 | for elem in self.INSTOCK:
96 | #Check if the price was changed by more than self.percent
97 | if product[2] == elem[2]:
98 | for size in product[3]:
99 | for sizeOLD in elem[3]:
100 | if size['id'] == sizeOLD['id'] and size['price'] != sizeOLD['price']:
101 | if (size['price']/sizeOLD['price'])<=self.percent:
102 | return[True,True]
103 | else:
104 | return [False,True]
105 |
106 | if product[3] != elem[3]:
107 | return [False,True]
108 | else:
109 | return [False,False]
110 |
111 | return[False,True]
112 |
113 |
114 | def comparitor(self,product):
115 | product_item = [product['title'], product['image'], product['handle']]
116 |
117 | # Collect all available sizes
118 | available_sizes = []
119 | for size in product['variants']:
120 | if size['available']: # Makes an ATC link from the variant ID
121 | available_sizes.append({
122 | 'id': size['id'],
123 | 'title': size['title'],
124 | 'url': '[ATC](' + self.url[:self.url.find('/', 10)] + '/cart/' + str(size['id']) + ':1)',
125 | 'price': float(size['price'])
126 | })
127 |
128 |
129 | product_item.append(available_sizes)
130 |
131 | if available_sizes:
132 | ping, updated = self.updated(product_item)
133 | if updated or self.firstScrape:
134 | # If product is available but not stored or product is stored but available sizes are changed - sends notification and stores
135 |
136 | # Remove old version of the product
137 | self.remove(product_item[2])
138 |
139 | self.INSTOCK.append(deepcopy(product_item))
140 | if ping and self.timeout.ping(product_item) and not self.firstScrape:
141 | print(f"[{self.site}] {product_item[0]} got a price error")
142 | self.logger.info(msg=f"[{self.site}] {product_item[0]} got a price error")
143 |
144 | for group in self.groups:
145 | #Send Ping to each Group
146 | threadrunner.run(
147 | self.discord_webhook,
148 | group=group,
149 | title=product["title"],
150 | pid=product['handle'],
151 | url=self.url.replace('.json', '/') + product['handle'],
152 | thumbnail=product['image'],
153 | sizes=available_sizes,
154 | )
155 | else:
156 | # Remove old version of the product
157 | self.remove(product_item[2])
158 |
159 | def run(self):
160 | urllib3.disable_warnings()
161 | """
162 | Initiates the monitor
163 | """
164 |
165 | print(f'STARTING {self.site} MONITOR')
166 |
167 | maxpage = 20
168 |
169 | while True:
170 | try:
171 | startTime = time.time()
172 |
173 | # Makes request to the pages and stores products
174 | with ThreadPoolExecutor(maxpage) as executor:
175 | itemsSplited = [item for item in executor.map(self.scrape_site, range(1,maxpage))]
176 |
177 | items = sum(itemsSplited, [])
178 |
179 | for product in items:
180 | self.comparitor(product)
181 |
182 | self.logger.info(msg=f'[{self.site}] Checked in {time.time()-startTime} seconds')
183 |
184 | #Check if maxpage is reached otherwise increase by 5
185 | try:
186 | maxpage = itemsSplited.index([])+2
187 | self.firstScrape = False
188 | except:
189 | maxpage+=5
190 |
191 | items.clear()
192 | itemsSplited.clear()
193 |
194 | # User set delay
195 | time.sleep(float(self.delay))
196 |
197 | except Exception as e:
198 | print(f"[{self.site}] Exception found: {traceback.format_exc()}")
199 | self.logger.error(e)
200 | time.sleep(3)
--------------------------------------------------------------------------------
/monitors/demandware_wishlist_morelist.py:
--------------------------------------------------------------------------------
1 | from multiprocessing import Process
2 | from copy import deepcopy
3 | from proxymanager import ProxyManager
4 | import tls
5 | import time
6 | import loggerfactory
7 | import traceback
8 | import urllib3
9 | import webhook
10 | import threadrunner
11 | import quicktask as qt
12 |
13 | SITE = __name__.split(".")[1]
14 |
15 | class demandware_wishlist_morelist(Process):
16 | def __init__(self, groups, settings):
17 | Process.__init__(self)
18 | self.groups = groups
19 | self.site = settings["name"]
20 | self.domain = settings["domain"]
21 | self.url = settings["url"]
22 | self.proxys = ProxyManager(settings["proxys"])
23 | self.delay = settings["delay"]
24 | self.imageproxy = settings["imageproxy"]
25 | self.firstScrape = True
26 | self.logger = loggerfactory.create(self.site)
27 |
28 | self.INSTOCK = []
29 |
30 | def discord_webhook(self, group, title, pid, url, thumbnail, price, sizes):
31 | """
32 | Sends a Discord webhook notification to the specified webhook URL
33 | """
34 |
35 | fields = []
36 | fields.append({"name": "Price", "value": f"{price}", "inline": True})
37 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
38 | fields.append({"name": "Stock", "value": f"{str(len(sizes))}+", "inline": True})
39 |
40 | for _ in range((len(sizes)//7)+(1 if len(sizes)%7 != 0 else 0)):
41 | sizesString = ""
42 | for size in sizes[:7]:
43 | sizesString+=f"{size}\n"
44 | fields.append({"name": f"Size", "value": sizesString, "inline": True})
45 | sizes = sizes[7:]
46 |
47 | fields.append({"name": "Links",
48 | "value": f"[FR](https://{self.site}.com/fr/p/{pid}.html) - [DE](https://{self.site}.com/de/p/{pid}.html) "+
49 | f"- [NL](https://{self.site}.com/nl/p/{pid}.html) - [BE](https://{self.site}.com/be/p/{pid}.html)", "inline": False})
50 |
51 | fields.append({"name": "Quicktasks", "value": f"{qt.adonis(site=self.site, link=pid)}", "inline": True})
52 |
53 | webhook.send(group=group, webhook=group[self.site], site=f"{self.site}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
54 |
55 |
56 | def scrape_site(self):
57 | """
58 | Scrapes the specified Wishlist items to array
59 | """
60 | items = []
61 | page = 1
62 |
63 | headers = {
64 | 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
65 | 'accept-language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
66 | 'cache-control': 'max-age=0',
67 | 'sec-ch-ua': '"Google Chrome";v="111", "Not(A:Brand";v="8", "Chromium";v="111"',
68 | 'sec-ch-ua-mobile': '?0',
69 | 'sec-ch-ua-platform': '"Windows"',
70 | 'sec-fetch-dest': 'document',
71 | 'sec-fetch-mode': 'navigate',
72 | 'sec-fetch-site': 'none',
73 | 'sec-fetch-user': '?1',
74 | 'upgrade-insecure-requests': '1',
75 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
76 | }
77 |
78 | while True:
79 | #Fetch the Site
80 | html = tls.get(self.url + f'&pageNumber={page}', headers=headers, proxies=self.proxys.next())
81 | html.raise_for_status()
82 | output = html.json()
83 | html.close()
84 | # Stores particular details in array
85 | for product in output["wishlist"]["items"]:
86 | product_item = {
87 | 'title': product['name'],
88 | 'image': product['imageObj']['wishlistSecondImage'][0]['url'],
89 | 'pid': product['pid'],
90 | 'variants': product['variationAttributes'][1]["values"],
91 | "price":product["price"]["sales"]["formatted"],
92 | "url":f"https://{self.domain}{product['productUrl']}"
93 | }
94 | items.append(product_item)
95 | if not output["wishlist"]["showMore"]:
96 | break
97 | page+=1
98 |
99 | self.logger.info(msg=f'[{self.site}] Successfully scraped {page} pages')
100 | return items
101 |
102 | def remove(self, pid):
103 | """
104 | Remove all Products from INSTOCK with the same pid
105 | """
106 | for elem in self.INSTOCK:
107 | if pid == elem[2]:
108 | self.INSTOCK.remove(elem)
109 |
110 | def updated(self, product):
111 | """
112 | Check if the Variants got updated
113 | """
114 | for elem in self.INSTOCK:
115 | #Check if Product was not changed
116 | if product[2] == elem[2] and product[3] == elem[3]:
117 | return [False,False]
118 |
119 | #Dont ping if no new size was added
120 | if product[2] == elem[2] and len(product[3]) <= len(elem[3]):
121 | if all(size in elem[3] for size in product[3]):
122 | return [False,True]
123 |
124 | return[True,True]
125 |
126 |
127 | def comparitor(self,product):
128 | product_item = [product['title'], product['image'], product['pid']]
129 |
130 | # Collect all available sizes
131 | available_sizes = []
132 | for size in product['variants']:
133 | if size['selectable'] and size['fitFinderSelectable'] and size['graySoldOutSizes']:
134 | available_sizes.append(size['title'])
135 |
136 |
137 | product_item.append(available_sizes)
138 |
139 | if available_sizes:
140 | ping, updated = self.updated(product_item)
141 | if updated or self.firstScrape:
142 | # If product is available but not stored or product is stored but available sizes are changed - sends notification and stores
143 |
144 | # Remove old version of the product
145 | self.remove(product_item[2])
146 |
147 | self.INSTOCK.append(deepcopy(product_item))
148 | if ping and not self.firstScrape:
149 | print(f"[{self.site}] {product_item[0]} got restocked")
150 | self.logger.info(msg=f"[{self.site}] {product_item[0]} got restocked")
151 | for group in self.groups:
152 | #Send Ping to each Group
153 | threadrunner.run(
154 | self.discord_webhook,
155 | group=group,
156 | title=product["title"],
157 | pid=product['pid'],
158 | url=product['url'],
159 | thumbnail="https://imageresize.24i.com/?w=300&url="+product['image'] if self.imageproxy else product['image'],
160 | price=product['price'],
161 | sizes=available_sizes,
162 | )
163 | else:
164 | # Remove old version of the product
165 | self.remove(product_item[2])
166 |
167 | def run(self):
168 | urllib3.disable_warnings()
169 | """
170 | Initiates the monitor
171 | """
172 |
173 | print(f'STARTING {self.site} MONITOR')
174 |
175 | while True:
176 | try:
177 | startTime = time.time()
178 |
179 | # Makes request to the wishlist and stores products
180 | items = self.scrape_site()
181 |
182 | for product in items:
183 | self.comparitor(product)
184 |
185 | self.logger.info(msg=f'[{self.site}] Checked in {time.time()-startTime} seconds')
186 |
187 | self.firstScrape = False
188 |
189 | items.clear()
190 |
191 | # User set delay
192 | time.sleep(float(self.delay))
193 |
194 | except Exception as e:
195 | print(f"[{self.site}] Exception found: {traceback.format_exc()}")
196 | self.logger.error(e)
197 | time.sleep(3)
--------------------------------------------------------------------------------
/monitors/kickz.py:
--------------------------------------------------------------------------------
1 | from threading import Thread, Event
2 | from proxymanager import ProxyManager
3 | from bs4 import BeautifulSoup
4 | from concurrent.futures import ThreadPoolExecutor
5 | from user_agent import CHROME_USERAGENT
6 | from multiprocessing import Process
7 | import random
8 | import tls
9 | import time
10 | import webhook
11 | import loggerfactory
12 | import traceback
13 | import urllib3
14 | import threadrunner
15 |
16 | SITE = __name__.split(".")[1]
17 |
18 | class kickz(Process):
19 | def __init__(self, groups, region, regionname, settings):
20 | Process.__init__(self)
21 | self.region = region
22 | self.regionname = regionname
23 | self.groups = groups
24 | self.delay = settings["delay"]
25 | self.keywords= settings["keywords"]
26 | self.proxys = ProxyManager(settings["proxys"])
27 | self.blacksku = settings["blacksku"]
28 | self.firstScrape = False
29 | self.logger = loggerfactory.create(f"{SITE}_{self.regionname}")
30 |
31 | self.INSTOCK = []
32 |
33 | def discord_webhook(self, group, title, pid, url, thumbnail, price, status, raffle_date):
34 | """
35 | Sends a Discord webhook notification to the specified webhook URL
36 | """
37 |
38 | fields = []
39 | fields.append({"name": "Price", "value": f"{price}", "inline": True})
40 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
41 |
42 | if status == "RESTOCK":
43 | fields.append({"name": "Status", "value": f"**New Add**", "inline": True})
44 | else:
45 | fields.append({"name": "Status", "value": f"**Raffle**", "inline": True})
46 | fields.append({"name": "Ending", "value": f"{raffle_date.replace('Release: ','')}", "inline": True})
47 |
48 | webhook.send(group=group, webhook=group[SITE], site=f"{SITE}_{self.regionname}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
49 |
50 |
51 | def scrape_site(self, category):
52 | """
53 | Scrapes the specified kickz query site and adds items to array
54 | """
55 | items = []
56 |
57 | url = f"https://www.kickz.com/on/demandware.store/{self.region}/en/Search-ShowAjax?cgid={category}&srule=new-arrivals&start=0&sz={random.randint(2000,100000)}&prefv1=Sneakers&prefn1=categoriesAssignment&prefv2=nike|jordan|new%20balance&prefn2=brand"
58 |
59 | headers = {
60 | 'authority': 'www.kickz.com',
61 | 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
62 | 'accept-language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
63 | 'cache-control': 'max-age=0',
64 | 'sec-ch-ua': '"Not_A Brand";v="99", "Google Chrome";v="109", "Chromium";v="109"',
65 | 'sec-ch-ua-mobile': '?0',
66 | 'sec-ch-ua-platform': '"Windows"',
67 | 'sec-fetch-dest': 'document',
68 | 'sec-fetch-mode': 'navigate',
69 | 'sec-fetch-site': 'none',
70 | 'sec-fetch-user': '?1',
71 | 'upgrade-insecure-requests': '1',
72 | 'user-agent': CHROME_USERAGENT,
73 | }
74 |
75 | # Makes request to site
76 | html = tls.get(url,
77 | headers=headers,
78 | proxies=self.proxys.next()
79 | )
80 | html.raise_for_status()
81 | output = BeautifulSoup(html.text, "html.parser")
82 | html.close()
83 | products = output.find_all("section", {"class": "b-product_tile"})
84 |
85 | # Stores particular details in array
86 | for product in products:
87 | button = product.find("a", {"class": "b-product_tile-link"})
88 | raffle_date = ""
89 |
90 | if product.find("div", {"class": "b-product_tile-release"}):
91 | status = "RAFFLE"
92 | raffle_date = product.find("div", {"class": "b-product_tile-release"}).text
93 | elif product.find("div", {"class": "b-raffle-tile_attr"}):
94 | status = "RAFFLE_OVER"
95 | else:
96 | status = "RESTOCK"
97 |
98 | product_item = {
99 | "name":button.text.replace("\n",""),
100 | "pid":button["data-pid"],
101 | "price":product.find("span", {"class": "b-price-item"}).text,
102 | "image": f"https://imageresize.24i.com/?w=300&url={product.find('img')['src']}",
103 | "url":"https://www.kickz.com"+button["href"],
104 | "status": status,
105 | "raffle_date":raffle_date
106 | }
107 | items.append(product_item)
108 |
109 |
110 | self.logger.info(msg=f'[{SITE}_{self.regionname}] Successfully scraped category {category}')
111 | return items
112 |
113 | def run(self):
114 | urllib3.disable_warnings()
115 | """
116 | Initiates the monitor
117 | """
118 |
119 | print(f'STARTING {SITE}_{self.regionname} MONITOR')
120 |
121 | #Initialise categorys and instock items for each category
122 | # new_M_shoes = New Men(https://www.kickz.com/de/l/neu/m%C3%A4nner/schuhe/)
123 | # new_F_shoes = New Women(https://www.kickz.com/de/l/neu/frauen/schuhe/)
124 | # new_U_shoes = New Unisex(https://www.kickz.com/de/l/neu/unisex/schuhe/)
125 | # 3_M_46 = Men(https://www.kickz.com/de/l/schuhe/m%C3%A4nner/sneaker/)
126 | # 3_F_46 = Women(https://www.kickz.com/de/l/schuhe/frauen/sneaker/)
127 | # 3_K_42 = Kids(https://www.kickz.com/de/l/schuhe/kinder/schuhe-grade-school/)
128 | # Air_Jordan_1 = Jordan1(https://www.kickz.com/de/l/jordan/retros/air-jordan-1-retro/)
129 | # Air_Jordan_3 = Jordan3(https://www.kickz.com/de/l/jordan/retros/air-jordan-3-retro/)
130 | categorys = ["new_M_shoes","new_F_shoes","new_U_shoes","3_M_46","3_F_46","3_K_42","Air_Jordan_1","Air_Jordan_3"]
131 | while True:
132 | try:
133 | startTime = time.time()
134 |
135 | # Makes request to each category and stores products
136 |
137 | products = []
138 |
139 | with ThreadPoolExecutor(len(categorys)) as executor:
140 | itemsSplited = [item for item in executor.map(self.scrape_site, categorys)]
141 | items = sum(itemsSplited, [])
142 |
143 |
144 | for product in items:
145 | if product["pid"] not in self.blacksku:
146 | #Check for Keywords
147 | if self.keywords and not any(key.lower() in product["name"].lower() for key in self.keywords):
148 | continue
149 |
150 | save = {
151 | "pid":product["pid"],
152 | "status":product["status"]
153 | }
154 |
155 | # Check if Product is INSTOCK
156 | if save not in products:
157 | if save not in self.INSTOCK and save["status"] != "RAFFLE_OVER" and not self.firstScrape:
158 | print(f"[{SITE}_{self.regionname}] {product['name']} got restocked")
159 | self.logger.info(msg=f"[{SITE}_{self.regionname}] {product['name']} got restocked")
160 | for group in self.groups:
161 | #Send Ping to each Group
162 | threadrunner.run(
163 | self.discord_webhook,
164 | group=group,
165 | title=product['name'],
166 | pid=product['pid'],
167 | url=product['url'],
168 | thumbnail=product['image'],
169 | price=product['price'],
170 | status=product['status'],
171 | raffle_date=product['raffle_date']
172 | )
173 | products.append(save)
174 |
175 | self.INSTOCK = products
176 |
177 | # Allows changes to be notified
178 | self.firstScrape = False
179 |
180 | self.logger.info(msg=f'[{SITE}_{self.regionname}] Checked all querys in {time.time()-startTime} seconds')
181 |
182 | time.sleep(self.delay)
183 |
184 | except Exception as e:
185 | print(f"[{SITE}_{self.regionname}] Exception found: {traceback.format_exc()}")
186 | self.logger.error(e)
187 | time.sleep(4)
--------------------------------------------------------------------------------
/monitors/shopify.py:
--------------------------------------------------------------------------------
1 | from multiprocessing import Process
2 | from concurrent.futures import ThreadPoolExecutor
3 | from timeout import timeout
4 | from proxymanager import ProxyManager
5 | from user_agent import CHROME_USERAGENT
6 | from copy import deepcopy
7 | import quicktask as qt
8 | import random
9 | import requests as rq
10 | import time
11 | import json
12 | import loggerfactory
13 | import traceback
14 | import urllib3
15 | import webhook
16 | import threadrunner
17 |
18 | SITE = __name__.split(".")[1]
19 |
20 | class shopify(Process):
21 | def __init__(self, groups, settings):
22 | Process.__init__(self)
23 | self.groups = groups
24 | self.site = settings["name"]
25 | self.url = settings["url"]
26 | self.proxys = ProxyManager(settings["proxys"])
27 | self.delay = settings["delay"]
28 | self.keywords= settings["keywords"]
29 | self.negativkeywords = settings["negativkeywords"]
30 | self.tags = settings["tags"]
31 | self.blacksku = settings["blacksku"]
32 | self.firstScrape = True
33 | self.logger = loggerfactory.create(self.site)
34 |
35 | self.INSTOCK = []
36 | self.timeout = timeout()
37 |
38 | def discord_webhook(self, group, title, pid, url, thumbnail, price, sizes):
39 | """
40 | Sends a Discord webhook notification to the specified webhook URL
41 | """
42 |
43 | if self.site in group:
44 | if len(group[self.site]) == 0:
45 | return
46 | webhookurl = group[self.site]
47 | elif "shopify" in group:
48 | webhookurl = group["shopify"]
49 |
50 | fields = []
51 | fields.append({"name": "Price", "value": f"{price}", "inline": True})
52 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
53 | fields.append({"name": "Stock", "value": f"{str(len(sizes))}+", "inline": True})
54 |
55 | for _ in range((len(sizes)//7)+(1 if len(sizes)%7 != 0 else 0)):
56 | sizesString = ""
57 | for size in sizes[:7]:
58 | sizesString+=f"{size['url']} | {size['title']}\n"
59 | fields.append({"name": f"ATC | Size", "value": sizesString, "inline": True})
60 | sizes = sizes[7:]
61 |
62 | fields.append({"name": "Quicktasks", "value": f"{qt.cybersole(link=url)} - {qt.adonis(site='shopify', link=url)} - {qt.thunder(site='shopify', link=url)} - {qt.panaio(site='Shopify', link=url)}", "inline": False})
63 |
64 | webhook.send(group=group, webhook=webhookurl, site=f"{self.site}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
65 |
66 |
67 | def scrape_site(self, page):
68 | """
69 | Scrapes the specified Shopify site and adds items to array
70 | """
71 | items = []
72 | #Fetch the Shopify-Page
73 | html = rq.get(self.url + f'?page={page}&limit={random.randint(251,1000000)}', headers={"user-agent":CHROME_USERAGENT}, proxies=self.proxys.next(), timeout=10)
74 | html.raise_for_status()
75 | output = json.loads(html.text)['products']
76 | html.close()
77 | # Stores particular details in array
78 | for product in output:
79 | product_item = {
80 | 'title': product['title'],
81 | 'image': product['images'][0]['src'] if product['images'] else "",
82 | 'handle': product['handle'],
83 | 'variants': product['variants'],
84 | 'tags':product['tags']
85 | }
86 | items.append(product_item)
87 |
88 | self.logger.info(msg=f'[{self.site}] Successfully scraped Page {page}')
89 | return items
90 |
91 | def remove(self, handle):
92 | """
93 | Remove all Products from INSTOCK with the same handle
94 | """
95 | for elem in self.INSTOCK:
96 | if handle == elem[2]:
97 | self.INSTOCK.remove(elem)
98 |
99 | def updated(self, product):
100 | """
101 | Check if the Variants got updated
102 | """
103 | for elem in self.INSTOCK:
104 | #Check if Product was not changed
105 | if product[2] == elem[2] and product[3] == elem[3]:
106 | return [False,False]
107 |
108 | #Dont ping if no new size was added
109 | if product[2] == elem[2] and len(product[3]) <= len(elem[3]):
110 | if all(size in elem[3] for size in product[3]):
111 | return [False,True]
112 |
113 | return[True,True]
114 |
115 |
116 | def comparitor(self,product):
117 | product_item = [product['title'], product['image'], product['handle']]
118 |
119 | # Collect all available sizes
120 | available_sizes = []
121 | for size in product['variants']:
122 | if size['available']: # Makes an ATC link from the variant ID
123 | available_sizes.append({'title': size['title'], 'url': '[ATC](' + self.url[:self.url.find('/', 10)] + '/cart/' + str(size['id']) + ':1)'})
124 |
125 |
126 | product_item.append(available_sizes)
127 |
128 | if available_sizes:
129 | ping, updated = self.updated(product_item)
130 | if updated or self.firstScrape:
131 | # If product is available but not stored or product is stored but available sizes are changed - sends notification and stores
132 |
133 | # Remove old version of the product
134 | self.remove(product_item[2])
135 |
136 | self.INSTOCK.append(deepcopy(product_item))
137 | if ping and self.timeout.ping(product_item) and not self.firstScrape:
138 | print(f"[{self.site}] {product_item[0]} got restocked")
139 | self.logger.info(msg=f"[{self.site}] {product_item[0]} got restocked")
140 | for group in self.groups:
141 | #Send Ping to each Group
142 | threadrunner.run(
143 | self.discord_webhook,
144 | group=group,
145 | title=product["title"],
146 | pid=product['handle'],
147 | url=self.url.replace('.json', '/') + product['handle'],
148 | thumbnail=product['image'],
149 | price=product['variants'][0]['price']+" €",
150 | sizes=available_sizes,
151 | )
152 | else:
153 | # Remove old version of the product
154 | self.remove(product_item[2])
155 |
156 | def run(self):
157 | urllib3.disable_warnings()
158 | """
159 | Initiates the monitor
160 | """
161 |
162 | print(f'STARTING {self.site} MONITOR')
163 |
164 | maxpage = 20
165 |
166 | while True:
167 | try:
168 | startTime = time.time()
169 |
170 | # Makes request to the pages and stores products
171 | with ThreadPoolExecutor(maxpage) as executor:
172 | itemsSplited = [item for item in executor.map(self.scrape_site, range(1,maxpage))]
173 |
174 | items = sum(itemsSplited, [])
175 |
176 | for product in items:
177 | if product["handle"] not in self.blacksku and not any([key in product["handle"] for key in self.negativkeywords]):
178 | if len(self.keywords) == 0 and len(self.tags) == 0:
179 | # If no keywords and tags set, checks whether item status has changed
180 | self.comparitor(product)
181 |
182 | else:
183 | # For each keyword, checks whether particular item status has changed
184 | for key in self.keywords:
185 | if key.lower() in product['title'].lower():
186 | self.comparitor(product)
187 | break
188 |
189 | # For each tag, checks whether particular item status has changed
190 | for tag in self.tags:
191 | if tag in product['tags']:
192 | self.comparitor(product)
193 | break
194 |
195 | self.logger.info(msg=f'[{self.site}] Checked in {time.time()-startTime} seconds')
196 |
197 | #Check if maxpage is reached otherwise increase by 5
198 | try:
199 | maxpage = itemsSplited.index([])+2
200 | self.firstScrape = False
201 | except:
202 | maxpage+=5
203 |
204 | items.clear()
205 | itemsSplited.clear()
206 | # User set delay
207 | time.sleep(float(self.delay))
208 |
209 | except Exception as e:
210 | print(f"[{self.site}] Exception found: {traceback.format_exc()}")
211 | self.logger.error(e)
212 | time.sleep(3)
--------------------------------------------------------------------------------
/monitors/svd.py:
--------------------------------------------------------------------------------
1 | from threading import Thread, Event
2 | from multiprocessing import Process
3 | from proxymanager import ProxyManager
4 | from user_agent import CHROME_USERAGENT
5 | from concurrent.futures import ThreadPoolExecutor
6 | from timeout import timeout
7 | import tls
8 | import time
9 | import json
10 | import loggerfactory
11 | import traceback
12 | import urllib3
13 | import webhook
14 | import threadrunner
15 | import os
16 |
17 | SITE = __name__.split(".")[1]
18 |
19 | class svd(Process):
20 | def __init__(self, groups, settings):
21 | Process.__init__(self)
22 | self.groups = groups
23 | self.delay = settings["delay"]
24 | self.keywords= settings["keywords"]
25 | self.proxys = ProxyManager(settings["proxys"])
26 | self.blacksku = settings["blacksku"]
27 | self.firstScrape = True
28 | self.logger = loggerfactory.create(SITE)
29 | self.timeout = timeout(pingdelay=20,timeout=60)
30 |
31 | self.INSTOCK = {}
32 |
33 | def discord_webhook(self, group, title, sku, url, thumbnail, price):
34 | """
35 | Sends a Discord webhook notification to the specified webhook URL
36 | """
37 |
38 | fields = []
39 | fields.append({"name": "Price", "value": f"{price}", "inline": True})
40 | fields.append({"name": "Sku", "value": f"{sku}", "inline": True})
41 | fields.append({"name": "Status", "value": f"**New Add**", "inline": True})
42 |
43 | webhook.send(group=group, webhook=group[SITE], site=f"{SITE}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
44 |
45 |
46 | def scrape_site(self, category):
47 | """
48 | Scrapes the specified svd query site and adds items to array
49 | """
50 | items = []
51 |
52 | url = f"https://www.sivasdescalzo.com/graphql?query=query%20categoryV2(%24id%3A%20Int!%2C%20%24pageSize%3A%20Int!%2C%20%24currentPage%3A%20Int!%2C%20%24filters%3A%20ProductAttributeFilterInput!%2C%20%24sort%3A%20ProductAttributeSortInput)%20%7B%0A%20%20category(id%3A%20%24id)%20%7B%0A%20%20%20%20name%0A%20%20%20%20__typename%0A%20%20%7D%0A%20%20products(pageSize%3A%20%24pageSize%2C%20currentPage%3A%20%24currentPage%2C%20filter%3A%20%24filters%2C%20sort%3A%20%24sort)%20%7B%0A%20%20%20%20items%20%7B%0A%20%20%20%20%20%20id%0A%20%20%20%20%20%20brand_name%0A%20%20%20%20%20%20name%0A%20%20%20%20%20%20sku%0A%20%20%20%20%20%20small_image%20%7B%0A%20%20%20%20%20%20%20%20url%0A%20%20%20%20%20%20%20%20__typename%0A%20%20%20%20%20%20%7D%0A%20%20%20%20%20%20url%0A%20%20%20%20%20%20original_price%0A%20%20%20%20%20%20final_price%0A%20%20%20%20%20%20percent_off%0A%20%20%20%20%20%20state%0A%20%20%20%20%20%20__typename%0A%20%20%20%20%7D%0A%20%20%20%20aggregations%20%7B%0A%20%20%20%20%20%20attribute_code%0A%20%20%20%20%20%20label%0A%20%20%20%20%20%20count%0A%20%20%20%20%20%20options%20%7B%0A%20%20%20%20%20%20%20%20label%0A%20%20%20%20%20%20%20%20value%0A%20%20%20%20%20%20%20%20count%0A%20%20%20%20%20%20%20%20__typename%0A%20%20%20%20%20%20%7D%0A%20%20%20%20%20%20__typename%0A%20%20%20%20%7D%0A%20%20%20%20page_info%20%7B%0A%20%20%20%20%20%20total_pages%0A%20%20%20%20%20%20__typename%0A%20%20%20%20%7D%0A%20%20%20%20total_count%0A%20%20%20%20__typename%0A%20%20%7D%0A%7D%0A&operationName=categoryV2&variables=%7B%22currentPage%22%3A1%2C%22id%22%3A4089%2C%22filters%22%3A%7B%22brand%22%3A%7B%22in%22%3A%5B%22Jordan%22%2C%22Nike%22%2C%22New%20Balance%22%5D%7D%2C%22category_id%22%3A%7B%22eq%22%3A%22{category}%22%7D%7D%2C%22pageSize%22%3A1000%2C%22sort%22%3A%7B%22sorting_date%22%3A%22DESC%22%7D%7D"
53 |
54 | headers = {
55 | 'authority': 'www.sivasdescalzo.com',
56 | 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
57 | 'accept-language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
58 | 'cache-control': 'max-age=0',
59 | 'sec-ch-ua': "\"Chromium\";v=\"110\", \"Not A(Brand\";v=\"24\", \"Google Chrome\";v=\"110\"",
60 | 'sec-ch-ua-mobile': '?0',
61 | 'sec-ch-ua-platform': '"Windows"',
62 | 'sec-fetch-dest': 'document',
63 | 'sec-fetch-mode': 'navigate',
64 | 'sec-fetch-site': 'none',
65 | 'sec-fetch-user': '?1',
66 | 'upgrade-insecure-requests': '1',
67 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36',
68 | }
69 |
70 | # Makes request to site
71 | html = tls.get(url, headers=headers, proxies=self.proxys.next())
72 | html.raise_for_status()
73 | products = json.loads(html.text)['data']['products']['items']
74 | html.close()
75 | # Stores particular details in array
76 | for product in products:
77 | product_item = {
78 | "name":product["brand_name"]+" "+product["name"],
79 | "sku":product["sku"],
80 | "price":str(product["final_price"])+" €",
81 | 'image': f"{os.environ['IMAGEPROXY']}?url=https://media.sivasdescalzo.com/media/catalog/product/{product['small_image']['url']}?width=300&proxy=packet",
82 | "url":"https://www.sivasdescalzo.com"+product["url"] if "sivasdescalzo" not in product["url"] else product["url"],
83 | "state":product["state"]
84 | }
85 | items.append(product_item)
86 |
87 |
88 | self.logger.info(msg=f'[{SITE}] Successfully scraped category {category}')
89 | return [category,items]
90 |
91 |
92 | def run(self):
93 | urllib3.disable_warnings()
94 | """
95 | Initiates the monitor
96 | """
97 |
98 | print(f'STARTING {SITE} MONITOR')
99 |
100 | #Initialise categorys and instock items for each category
101 | # 4089 = Sneakers (https://www.sivasdescalzo.com/en/footwear/sneakers)
102 | # 2900 = New Arrivals (https://www.sivasdescalzo.com/en/new-arrivals)
103 | # 2513(REMOVED) = Adidas Yeezy (https://www.sivasdescalzo.com/en/brands/adidas/yeezy)
104 | # 2479 = Adidas (https://www.sivasdescalzo.com/en/brands/adidas)
105 | # 3558 = Jordan Sneakers (https://www.sivasdescalzo.com/en/brands/jordan/sneakers)
106 | # 2552 = Jordan (https://www.sivasdescalzo.com/en/brands/jordan)
107 | # 3473 = Nike Sneakers(https://www.sivasdescalzo.com/en/brands/nike/sneakers)
108 | # 2572 = Nike (https://www.sivasdescalzo.com/en/brands/nike)
109 | # 33 = Footwear (https://www.sivasdescalzo.com/en/footwear)
110 | # 2569 = New Balance (https://www.sivasdescalzo.com/en/brands/new-balance)
111 | categorys = [4089,2900,2479,3558,2552,3473,2572,33,2569]
112 | for c in categorys:
113 | self.INSTOCK[c] = []
114 |
115 | while True:
116 | try:
117 | startTime = time.time()
118 |
119 | # Makes request to each category
120 | with ThreadPoolExecutor(len(categorys)) as executor:
121 | itemsSplited = [item for item in executor.map(self.scrape_site, categorys)]
122 |
123 | for c, items in itemsSplited:
124 | products = []
125 |
126 | for product in items:
127 | if product["sku"] not in self.blacksku and product["state"] not in ["Sold Out", "Raffle"] and len(product["sku"]) > 1:
128 | #Check for Keywords
129 | if self.keywords and not any(key.lower() in product["name"].lower() for key in self.keywords):
130 | continue
131 |
132 | # Check if Product is INSTOCK
133 | if not any([product["sku"] in cat for cat in self.INSTOCK.values()]) and not self.firstScrape and self.timeout.ping(product["sku"]):
134 | print(f"[{SITE}] {product['name']} got restocked")
135 | self.logger.info(msg=f"[{SITE}] {product['name']} got restocked")
136 | for group in self.groups:
137 | #Send Ping to each Group
138 | threadrunner.run(
139 | self.discord_webhook,
140 | group=group,
141 | title=product['name'],
142 | sku=product['sku'],
143 | url=product['url'],
144 | thumbnail=product['image'],
145 | price=product['price']
146 | )
147 | products.append(product["sku"])
148 |
149 | self.INSTOCK[c] = products
150 |
151 | # Allows changes to be notified
152 | self.firstScrape = False
153 |
154 | self.logger.info(msg=f'[{SITE}] Checked all querys in {time.time()-startTime} seconds')
155 | time.sleep(self.delay)
156 |
157 | except Exception as e:
158 | print(f"[{SITE}] Exception found: {traceback.format_exc()}")
159 | self.logger.error(e)
160 | time.sleep(3)
--------------------------------------------------------------------------------
/monitors/asos.py:
--------------------------------------------------------------------------------
1 | from timeout import timeout
2 | from proxymanager import ProxyManager
3 | from user_agent import CHROME_USERAGENT
4 | from copy import deepcopy
5 | from multiprocessing import Process
6 | import random
7 | import requests as rq
8 | import quicktask as qt
9 | import time
10 | import loggerfactory
11 | import traceback
12 | import urllib3
13 | import os
14 | import webhook
15 | import threadrunner
16 |
17 | SITE = __name__.split(".")[1]
18 |
19 | class asos(Process):
20 | def __init__(self, groups, settings, region, currency):
21 | Process.__init__(self)
22 | self.INSTOCK = []
23 | self.groups = groups
24 | self.region = region
25 | self.currency = currency
26 | self.pids = settings["skus"]
27 | self.proxys = ProxyManager(settings["proxys"])
28 | self.delay = settings["delay"]
29 | self.timeout = timeout()
30 | self.firstScrape = True
31 | self.logger = loggerfactory.create(f"{SITE}_{self.region}")
32 |
33 | def discord_webhook(self, group, pid, region, title, url, thumbnail, price, variants):
34 | """
35 | Sends a Discord webhook notification to the specified webhook URL
36 | """
37 |
38 | if f"{SITE}_{self.region}" in group:
39 | webhookurl = group[f"{SITE}_{self.region}"]
40 | elif "asos" in group:
41 | webhookurl = group["asos"]
42 |
43 | fields = []
44 | fields.append({"name": "Price", "value": f"{price}", "inline": True})
45 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
46 | fields.append({"name": "Region", "value": f"{region}", "inline": True})
47 |
48 | sizesSTR = "\n"
49 | sizesPIDs = ""
50 | statusSTR = ""
51 | for variant in variants:
52 | sizesSTR+=str(variant["brandSize"])+"\n"
53 | sizesPIDs+=str(variant["id"])+"\n"
54 | statusSTR+=f"{'**HIGH**' if not variant['isLowInStock'] else 'LOW'}\n"
55 | fields.append({"name": "Sizes", "value": f"{sizesSTR}", "inline": True})
56 | fields.append({"name": "Pids", "value": f"{sizesPIDs}", "inline": True})
57 | fields.append({"name": "Status", "value": f"{statusSTR}", "inline": True})
58 |
59 | fields.append({"name": "Links",
60 | "value": f"[NL](https://www.asos.com/nl/nabil/prd/{pid}) - [DE](https://www.asos.com/de/nabil/prd/{pid}) "+
61 | f"- [FR](https://www.asos.com/fr/nabil/prd/{pid}) - [IT](https://www.asos.it/p/nabil/nabil-{pid}) - [GB](https://www.asos.com/gb/nabil/prd/{pid}) "+
62 | f"- [ES](https://www.asos.com/es/nabil/prd/{pid}) - [PT](https://www.asos.com/pt/nabil/prd/{pid})", "inline": False})
63 |
64 | fields.append({"name": "Quicktasks", "value": f"{qt.adonis(site='asos', link=pid)} - {qt.koi(site='ASOS', link=pid)} - {qt.storm(site='asos', link=pid)} - {qt.panaio(site='Asos', link=pid)} - {qt.thunder(site='Asos', link=pid)}", "inline": True})
65 |
66 | webhook.send(group=group, webhook=webhookurl, site=f"{SITE}_{self.region}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
67 |
68 |
69 | def getTitle(self, pid):
70 | """
71 | Get the title of a product that belongs to a specific pid
72 | """
73 | for product in self.pids:
74 | if pid == product["sku"]:
75 | return product["title"]
76 |
77 | def scrapeSizes(self, pid):
78 | """
79 | Scrapes the specified Asos site and scrapes the sizes
80 | """
81 | variants = []
82 |
83 | html = rq.get(f"https://api.asos.com/product/catalogue/v3/products/{pid}?store={self.region}&cache={random.randint(10000,999999999)}", proxies=self.proxys.next(), headers={"user-agent":CHROME_USERAGENT})
84 | html.raise_for_status()
85 | product = html.json()
86 | html.close()
87 |
88 | for variant in product["variants"]:
89 | if variant["isInStock"]:
90 | variants.append(variant)
91 |
92 | return variants
93 |
94 | def scrape_site(self, url):
95 | """
96 | Scrapes the specified Asos site and adds items to array
97 | """
98 | items = []
99 |
100 | html = rq.get(url, proxies=self.proxys.next(), headers={"user-agent":CHROME_USERAGENT})
101 | html.raise_for_status()
102 | products = html.json()
103 | html.close()
104 | for product in products:
105 | product_item = {
106 | 'title': self.getTitle(str(product['productId'])),
107 | 'image': f"{os.environ['IMAGEPROXY']}?url=https://images.asos-media.com/products/nabil/{product['productId']}-2&proxy={','.join(self.proxys.proxygroups)}",
108 | 'id': str(product['productId']),
109 | 'variants': product['variants']}
110 | items.append(product_item)
111 |
112 | self.logger.info(msg=f'[{SITE}_{self.region}] Successfully scraped all pids')
113 | return items
114 |
115 | def remove(self, id):
116 | """
117 | Remove all Products from INSTOCK with the same id
118 | """
119 | for elem in self.INSTOCK:
120 | if id == elem[2]:
121 | self.INSTOCK.remove(elem)
122 |
123 | def checkUpdated(self, product):
124 | """
125 | Check if the Variants got updated
126 | """
127 | for elem in self.INSTOCK:
128 | #Check if Product was not changed
129 | if product[2] == elem[2] and product[3] == elem[3]:
130 | return [False,False]
131 |
132 | #Dont ping if no new size was added
133 | if product[2] == elem[2] and len(product[3]) <= len(elem[3]):
134 | if all(size in elem[3] for size in product[3]):
135 | return [False,True]
136 |
137 | return[True,True]
138 |
139 |
140 | def comparitor(self, product):
141 | product_item = [product['title'], product['image'], product['id']]
142 |
143 | # Collect all available sizes
144 | available_sizes = []
145 |
146 | for size in product['variants']:
147 | if size['isInStock']: # Check if size is instock
148 | available_sizes.append(size)
149 |
150 | product_item.append(available_sizes) # Appends in field
151 |
152 | if available_sizes:
153 | ping, updated = self.checkUpdated(product_item)
154 | if updated or self.firstScrape:
155 | # If product is available but not stored or product is stored but available sizes are changed - sends notification and stores
156 |
157 | # Remove old version of the product
158 | self.remove(product_item[2])
159 |
160 | self.INSTOCK.append(deepcopy(product_item))
161 | if ping and self.timeout.ping(product_item) and not self.firstScrape:
162 | print(f"[{SITE}_{self.region}] {product_item[0]} got restocked")
163 | self.logger.info(msg=f"[{SITE}_{self.region}] {product_item[0]} got restocked")
164 |
165 | variants = self.scrapeSizes(product['id'])
166 | if variants:
167 | for group in self.groups:
168 | #Send Ping to each Group
169 | threadrunner.run(
170 | self.discord_webhook,
171 | group=group,
172 | pid=product['id'],
173 | region=self.region,
174 | title=product['title'],
175 | url=f"https://www.asos.com/{self.region}/nabil/prd/{product['id']}",
176 | thumbnail=product['image'],
177 | price=str(product['variants'][0]['price']['current']['text']),
178 | variants=variants
179 | )
180 | else:
181 | # Remove old version of the product
182 | self.remove(product_item[2])
183 |
184 | def run(self):
185 | urllib3.disable_warnings()
186 | """
187 | Initiates the monitor
188 | """
189 |
190 | print(f'STARTING {SITE}_{self.region} MONITOR')
191 |
192 |
193 | while True:
194 | try:
195 | startTime = time.time()
196 | url = f"https://www.asos.com/api/product/catalogue/v3/stockprice?productIds={(''.join([pid['sku']+',' for pid in self.pids]))[:-1]}&store={self.region}¤cy={self.currency}&keyStoreDataversion=dup0qtf-35&cache={random.randint(10000,999999999)}"
197 |
198 |
199 | # Makes request to site and stores products
200 | items = self.scrape_site(url)
201 | for product in items:
202 | self.comparitor(product)
203 |
204 | # Allows changes to be notified
205 | self.firstScrape = False
206 |
207 | self.logger.info(msg=f'[{SITE}_{self.region}] Checked in {time.time()-startTime} seconds')
208 |
209 | items.clear()
210 | # User set delay
211 | time.sleep(float(self.delay))
212 |
213 |
214 | except Exception as e:
215 | print(f"[{SITE}_{self.region}] Exception found: {traceback.format_exc()}")
216 | self.logger.error(e)
217 | time.sleep.wait(3)
--------------------------------------------------------------------------------
/monitors/aboutyou.py:
--------------------------------------------------------------------------------
1 | from copy import deepcopy
2 | from multiprocessing import Process
3 | from timeout import timeout
4 | from proxymanager import ProxyManager
5 | from user_agent import CHROME_USERAGENT
6 | import quicktask as qt
7 | import random
8 | import time
9 | import json
10 | import loggerfactory
11 | import traceback
12 | import urllib3
13 | import tls
14 | import webhook
15 | import threadrunner
16 |
17 | SITE = __name__.split(".")[1]
18 |
19 | class aboutyou(Process):
20 | def __init__(self, groups, settings, store, storeid):
21 | Process.__init__(self)
22 | self.INSTOCK = []
23 | self.groups = groups
24 | self.proxys = ProxyManager(settings["proxys"])
25 | self.delay = settings["delay"]
26 | self.keywords = settings["keywords"]
27 | self.blacksku = settings["blacksku"]
28 | self.whitesku = settings["whitesku"]
29 | self.store = store
30 | self.storeid = storeid
31 | self.timeout = timeout()
32 | self.firstScrape = True
33 | self.logger = loggerfactory.create(f"{SITE}_{self.store}")
34 |
35 | def discord_webhook(self, group, pid, title, url, thumbnail, price, sizes, stock):
36 | """
37 | Sends a Discord webhook notification to the specified webhook URL
38 | """
39 |
40 | fields = []
41 | fields.append({"name": "Price", "value": f"{price} €", "inline": True})
42 | fields.append({"name": "Pid", "value": f"{pid}", "inline": True})
43 | fields.append({"name": "Region", "value": f"{self.store}", "inline": True})
44 |
45 | sizesSTR = "\n"
46 | stockSTR = ""
47 | for size in sizes:
48 | sizesSTR+=f"{size}\n"
49 | stockSTR+=f"{stock[size]}\n"
50 | fields.append({"name": "Sizes", "value": f"{sizesSTR}", "inline": True})
51 | fields.append({"name": "Stock", "value": f"{stockSTR}", "inline": True})
52 |
53 |
54 | fields.append({
55 | "name": "Links",
56 | "value": f"[CH](https://www.aboutyou.ch/p/nabil/nabil-{pid}) - [CZ](https://www.aboutyou.cz/p/nabil/nabil-{pid}) - [DE](https://www.aboutyou.de/p/nabil/nabil-{pid}) - [FR](https://www.aboutyou.fr/p/nabil/nabil-{pid}) - [IT](https://www.aboutyou.it/p/nabil/nabil-{pid}) - [PL](https://www.aboutyou.pl/p/nabil/nabil-{pid}) - [SK](https://www.aboutyou.sk/p/nabil/nabil-{pid}) - [ES](https://www.aboutyou.es/p/nabil/nabil-{pid}) - [NL](https://www.aboutyou.nl/p/nabil/nabil-{pid}) - [BE](https://www.aboutyou.nl/p/nabil/nabil-{pid})",
57 | "inline": False})
58 |
59 | fields.append({"name": "Quicktasks", "value": f"{qt.adonis(site='AboutYou', link=pid)} - {qt.koi(site='AboutYou', link=pid)} - {qt.loscobot(site='AboutYou', link=pid)} - {qt.panaio(site='AboutYou', link=pid)}", "inline": True})
60 |
61 | webhook.send(group=group, webhook=group[SITE], site=f"{SITE}_{self.store}", title=title, url=url, thumbnail=thumbnail, fields=fields, logger=self.logger)
62 |
63 |
64 | def scrape_site(self):
65 | """
66 | Scrapes the specified About You site and adds items to array
67 | """
68 |
69 | """
70 | Brands:
71 | 53709 = Nike Sportwear
72 | 61263 = Jordan
73 | 290 = Adidas Original
74 | 170334 = Adidas Sportwear
75 |
76 | Categorys:
77 | 20727 = Women Sneakers
78 | 21014 = Men Sneakers
79 | 20207,20215 = Men and Women Shoes
80 | 190025 = Boys GS
81 | 189974 = Boys PS
82 | 189879 = Girls GS
83 | 189823 = Girls PS
84 | """
85 | url = f"https://api-cloud.aboutyou.de/v1/products?with=attributes:key(brand|name),variants,variants.attributes:key(vendorSize)&filters[category]=20727,21014,20207,20215,190025,189974,189879,189823&filters[brand]=61263,53709,290,170334,165,55136&filters[excludedFromBrandPage]=false&sortDir=desc&sortScore=brand_scores&sortChannel=web_default&page=1&perPage={random.randint(2000, 50000)}&forceNonLegacySuffix=true&shopId={self.storeid}"
86 |
87 | items = []
88 |
89 | html = tls.get(url, proxies=self.proxys.next(), headers={"user-agent":CHROME_USERAGENT})
90 | html.raise_for_status()
91 | output = json.loads(html.text)['entities']
92 | html.close()
93 |
94 | for product in output:
95 | product_item = {
96 | 'title': product['attributes']['brand']['values']['label']+" "+product['attributes']['name']['values']['label'],
97 | 'image': "https://cdn.aboutstatic.com/file/"+product['images'][0]['hash'] if "images" in product['images'][0]['hash'] else "https://cdn.aboutstatic.com/file/images/"+product['images'][0]['hash'],
98 | 'id': product['id'],
99 | 'variants': product['variants']}
100 | items.append(product_item)
101 |
102 |
103 | self.logger.info(msg=f'[{SITE}_{self.store}] Successfully scraped all categorys')
104 | return items
105 |
106 | def remove(self, id):
107 | """
108 | Remove all Products from INSTOCK with the same id
109 | """
110 | for elem in self.INSTOCK:
111 | if id == elem[2]:
112 | self.INSTOCK.remove(elem)
113 |
114 | def checkUpdated(self, product):
115 | """
116 | Check if the Variants got updated
117 | """
118 | for elem in self.INSTOCK:
119 | #Check if product is not changed
120 | if product[2] == elem[2] and product[3] == elem[3]:
121 | return [False,False]
122 |
123 | #Dont ping if no new size was added
124 | if product[2] == elem[2] and len(product[3]) <= len(elem[3]):
125 | if all(size in elem[3] for size in product[3]):
126 | return [False,True]
127 |
128 | return[True,True]
129 |
130 |
131 | def comparitor(self, product):
132 | product_item = [product['title'], product['image'], product['id']]
133 |
134 | # Collect all available sizes
135 | available_sizes = []
136 | # Stock of every Size
137 | stocks = {}
138 | for size in product['variants']:
139 | if size['stock']['quantity'] > 0 or size['stock']['isSellableWithoutStock']: # Check if size is instock
140 | available_sizes.append(size['attributes']['vendorSize']['values']['label'])
141 | stocks[size['attributes']['vendorSize']['values']['label']] = size['stock']['quantity']
142 |
143 | product_item.append(available_sizes)
144 |
145 | if available_sizes:
146 | ping, updated = self.checkUpdated(product_item)
147 | if updated or self.firstScrape:
148 | # If product is available but not stored or product is stored but available sizes are changed - sends notification and stores
149 |
150 | # Remove old version of the product
151 | self.remove(product_item[2])
152 |
153 | self.INSTOCK.append(deepcopy(product_item))
154 | if ping and self.timeout.ping(product_item) and not self.firstScrape:
155 | print(f"[{SITE}_{self.store}] {product_item[0]} got restocked")
156 | self.logger.info(msg=f"[{SITE}_{self.store}] {product_item[0]} got restocked")
157 | for group in self.groups:
158 | #Send Ping to each Group
159 | threadrunner.run(
160 | self.discord_webhook,
161 | group=group,
162 | pid=product['id'],
163 | title=product['title'],
164 | url=f"https://www.aboutyou.{self.store}/p/nabil/nabil-{product['id']}",
165 | thumbnail=product['image'],
166 | price=str(product['variants'][0]['price']['withTax']/100),
167 | sizes=available_sizes,
168 | stock=stocks,
169 | )
170 | else:
171 | # Remove old version of the product
172 | self.remove(product_item[2])
173 |
174 | def run(self):
175 | urllib3.disable_warnings()
176 | """
177 | Initiates the monitor
178 | """
179 |
180 | print(f'STARTING {SITE} {self.store} MONITOR')
181 |
182 | while True:
183 | try:
184 | startTime = time.time()
185 |
186 | # Makes request to site and stores products
187 | items = self.scrape_site()
188 | for product in items:
189 | if int(product['id']) not in self.blacksku:
190 | if len(self.keywords) == 0 or int(product['id']) in self.whitesku:
191 | # If no keywords set or sku is whitelisted, checks whether item status has changed
192 | self.comparitor(product)
193 |
194 | else:
195 | # For each keyword, checks whether particular item status has changed
196 | for key in self.keywords:
197 | if key.lower() in product['title'].lower():
198 | self.comparitor(product)
199 |
200 | # Allows changes to be notified
201 | self.firstScrape = False
202 |
203 | self.logger.info(msg=f'[{SITE}_{self.store}] Checked in {time.time()-startTime} seconds')
204 |
205 | items.clear()
206 |
207 | # User set delay
208 | time.sleep(float(self.delay))
209 |
210 |
211 | except Exception as e:
212 | print(f"[{SITE}_{self.store}] Exception found: {traceback.format_exc()}")
213 | self.logger.error(e)
214 | time.sleep(3)
--------------------------------------------------------------------------------
/monitors/wethenew.py:
--------------------------------------------------------------------------------
1 | from copy import deepcopy
2 | from multiprocessing import Process
3 | from timeout import timeout
4 | from proxymanager import ProxyManager
5 | from user_agent import CHROME_USERAGENT
6 | import threadrunner
7 | import tls
8 | import time
9 | import loggerfactory
10 | import traceback
11 | import urllib3
12 | import webhook
13 |
14 | SITE = __name__.split(".")[1]
15 |
16 | class wethenew(Process):
17 | def __init__(self,groups,endpoint,settings):
18 | Process.__init__(self)
19 | self.groups = groups
20 | self.endpoint = endpoint
21 | self.blacksku = settings["blacksku"]
22 | self.delay = settings["delay"]
23 | self.keywords= settings["keywords"]
24 | self.auth = settings["auth"]
25 | self.proxys = ProxyManager(settings["proxys"])
26 | self.INSTOCK = []
27 | self.timeout = timeout()
28 | self.authPointer = -1
29 | self.firstScrape = True
30 | self.logger = loggerfactory.create(f"{SITE}_{self.endpoint}")
31 |
32 | self.sizesKey = {
33 | "products":"wantedSizes",
34 | "sell-nows":"sellNows",
35 | "consignment-slots":"sizes"
36 | }
37 |
38 | def getAuth(self):
39 | """
40 | Get a new Auth token
41 | """
42 | self.authPointer = 0 if self.authPointer == len(self.auth)-1 else self.authPointer+1
43 | return self.auth[self.authPointer]
44 |
45 | def discord_webhook(self, group, pid, title, thumbnail, sizes):
46 | """
47 | Sends a Discord webhook notification to the specified webhook URL
48 | """
49 |
50 | fields = []
51 | if self.endpoint == "sell-nows":
52 | s = ""
53 | prices = ""
54 | links = "\n"
55 | for size in sizes:
56 | s+=f"`{size['size']}`\n"
57 | prices+=f"`{size['price']}€`\n"
58 | links+=f"[Sell Now](https://sell.wethenew.com/instant-sales/{size['id']})\n"
59 | fields.append({"name": "Sizes", "value": s, "inline": True})
60 | fields.append({"name": "Prices", "value": prices, "inline": True})
61 | fields.append({"name": "Accept", "value": links, "inline": True})
62 | else:
63 | s = ""
64 | status = ""
65 | for size in sizes:
66 | s+=size+"\n"
67 | status+="🟡 WTB\n"
68 | fields.append({"name": "Pid", "value": f"{pid}", "inline": False})
69 | fields.append({"name": "Sizes", "value": f"{s}", "inline": True})
70 |
71 | webhook.send(group=group, webhook=group["wethenew-"+self.endpoint], site=f"{SITE}_{self.endpoint}", title=title, url=f"https://sell.wethenew.com/{'consignment' if self.endpoint == 'consignment-slots' else 'listing'}/product/"+pid, thumbnail=thumbnail, fields=fields, logger=self.logger)
72 |
73 |
74 | def scrape_site(self):
75 | """
76 | Scrapes Wethenew site and adds items to array
77 | """
78 |
79 | items = []
80 | output = []
81 | firstProduct = 0
82 | skip = 0
83 |
84 |
85 | #Get all Products from the Site
86 | while True:
87 | headers = {
88 | 'authority': 'api-sell.wethenew.com',
89 | 'accept': 'application/json, text/plain, */*',
90 | 'accept-language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
91 | 'authorization': f'Bearer {self.getAuth()}',
92 | 'cache-control': 'no-cache',
93 | 'feature-policy': "microphone 'none'; geolocation 'none'; camera 'none'; payment 'none'; battery 'none'; gyroscope 'none'; accelerometer 'none';",
94 | 'origin': 'https://sell.wethenew.com',
95 | 'pragma': 'no-cache',
96 | 'referer': 'https://sell.wethenew.com/',
97 | 'sec-ch-ua': '"Not?A_Brand";v="8", "Chromium";v="108", "Google Chrome";v="108"',
98 | 'sec-ch-ua-mobile': '?0',
99 | 'sec-ch-ua-platform': '"Windows"',
100 | 'sec-fetch-dest': 'empty',
101 | 'sec-fetch-mode': 'cors',
102 | 'sec-fetch-site': 'same-site',
103 | 'user-agent': CHROME_USERAGENT,
104 | 'x-xss-protection': '1;mode=block',
105 | }
106 |
107 | url = f"https://api-sell.wethenew.com/{self.endpoint}?skip={skip}&take=100&onlyWanted=true"
108 | self.logger.info(msg=f'[{SITE}_{self.endpoint}] Scrape {url}')
109 | response = tls.get(url, proxies=self.proxys.next(), headers=headers)
110 | response.raise_for_status()
111 |
112 | r = response.json()
113 | response.close()
114 | for product in r["results"]:
115 | output.append(product)
116 |
117 | if self.endpoint == "products":
118 | if skip == 0:
119 | firstProduct = output[0]["id"]
120 | else:
121 | if any([product["id"] == firstProduct for product in output]):
122 | break
123 | else:
124 | if r["pagination"]["totalPages"] <= r["pagination"]["page"]:
125 | break
126 | skip+=100
127 |
128 | # Stores particular details in array
129 | for product in output:
130 | product_item = {
131 | 'title': product['brand'] + " " + product['name'],
132 | 'image': product['image'],
133 | 'pid': str(product['id']),
134 | 'variants': product[self.sizesKey[self.endpoint]]
135 | }
136 | items.append(product_item)
137 |
138 | self.logger.info(msg=f'[{SITE}_{self.endpoint}] Successfully scraped site')
139 | return items
140 |
141 | def remove(self, pid):
142 | """
143 | Remove all Products from INSTOCK with the same pid
144 | """
145 | for elem in self.INSTOCK:
146 | if pid == elem[2]:
147 | self.INSTOCK.remove(elem)
148 |
149 | def updated(self, product):
150 | """
151 | Check if the Variants got updated
152 | """
153 | for elem in self.INSTOCK:
154 | #Check if Product was not changed
155 | if product[2] == elem[2] and product[3] == elem[3]:
156 | return [False,False]
157 |
158 | #Dont ping if no new size was added
159 | if product[2] == elem[2] and len(product[3]) <= len(elem[3]):
160 | if all(size in elem[3] for size in product[3]):
161 | return [False,True]
162 |
163 | return[True,True]
164 |
165 | def removeduplicate(self,items):
166 | """
167 | Remove duplicates
168 | """
169 | newItems = []
170 | pids = []
171 | for item in items:
172 | if item["pid"] not in pids:
173 | newItems.append(item)
174 | pids.append(item["pid"])
175 |
176 | return newItems
177 |
178 | def comparitor(self, product):
179 | product_item = [product['title'], product['image'], product['pid'], product['variants']]
180 |
181 | if product['variants']:
182 | ping, updated = self.updated(product_item)
183 | if updated or self.firstScrape:
184 | # If product is available but not stored or product is stored but available sizes are changed - sends notification and stores
185 |
186 | # Remove old version of the product
187 | self.remove(product_item[2])
188 |
189 | self.INSTOCK.append(deepcopy(product_item))
190 | if ping and self.timeout.ping(product_item) and not self.firstScrape:
191 | print(f"[{SITE}_{self.endpoint}] {product_item[0]} got restocked")
192 | self.logger.info(msg=f"[{SITE}_{self.endpoint}] {product_item[0]} got restocked")
193 | for group in self.groups:
194 | #Send Ping to each Group
195 | threadrunner.run(
196 | self.discord_webhook,
197 | group=group,
198 | pid=product['pid'],
199 | title=product['title'],
200 | thumbnail=product['image'],
201 | sizes=product['variants'],
202 | )
203 | else:
204 | # Remove old version of the product
205 | self.remove(product_item[2])
206 |
207 | def run(self):
208 | urllib3.disable_warnings()
209 | """
210 | Initiates the monitor
211 | """
212 |
213 | print(f'STARTING {SITE}_{self.endpoint} MONITOR')
214 |
215 | while True:
216 | try:
217 | startTime = time.time()
218 |
219 | # Makes request to site and stores products
220 | items = self.scrape_site()
221 |
222 | #Remove duplicates
223 | items = self.removeduplicate(items)
224 |
225 | for product in items:
226 | if product["pid"] not in self.blacksku:
227 | if len(self.keywords) == 0:
228 | # If no keywords set, checks whether item status has changed
229 | self.comparitor(product)
230 |
231 | else:
232 | # For each keyword, checks whether particular item status has changed
233 | for key in self.keywords:
234 | if key.lower() in product['title'].lower():
235 | self.comparitor(product)
236 |
237 |
238 | # Allows changes to be notified
239 | self.firstScrape = False
240 |
241 | self.logger.info(msg=f'[{SITE}_{self.endpoint}] Checked in {time.time()-startTime} seconds')
242 |
243 | items.clear()
244 |
245 | # User set delay
246 | time.sleep(float(self.delay))
247 |
248 | except Exception as e:
249 | print(f"[{SITE}_{self.endpoint}] Exception found: {traceback.format_exc()}")
250 | self.logger.error(e)
251 | time.sleep(4)
--------------------------------------------------------------------------------