├── LICENSE ├── README.md ├── main.py ├── captchaharvester.py └── main2.py /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Azerpas 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # nike_Raffle_FR 2 | Script made to mass register accounts to Nike Raffle, optimized for French raffles but can be easily adapted to UK,US, etc... 3 | 4 | # Steps: 5 | 6 | There's 2 scripts, one using Selenium and one using Requests Python Module (faster). 7 | I recommend using Selenium as Nike uses New Relic Browser tech on raffles websites to monitor user time spend on front-end, back-end, browser type version, etc... 8 | 9 | I had no time to try simulate JavaScript logs for New Relic Browser so I'll recommend to stick with selenium even if it's not as fast as requests. 10 | 11 | 1. Add your accounts at the end of the script (keep the syntax as shown) 12 | 2. Run the captchaharvester.py in the same folder as the main script 13 | 3. Run the script 14 | 15 | ## REQUIREMENTS: 16 | - requests 17 | - beautifulsoup 18 | - pickle 19 | - selenium 20 | 21 | ## TO DO: 22 | - proxy support 23 | 24 | ## Installation 25 | Been asked a lot 26 | 27 | - Check your Python version, must be 2.7 28 | ```python --version``` 29 | 30 | - Install pip: 31 | ```sudo easy_install pip``` 32 | 33 | Reboot terminal 34 | Install modules BeautifulSoup, requests, selenium: 35 | sudo pip requests selenium pickle 36 | 37 | Download Chromedriver: 38 | https://sites.google.com/a/chromium.org/chromedriver/ 39 | 40 | Place it in Python path (or PATH): 41 | 42 | ```/Library/Python/2.7``` 43 | 44 | ## Use 45 | - Modify your API key in captchaharvester.py & add your accounts in main or main2.py 46 | 47 | - Launch main script 48 | 49 | ```python main.py``` or ```python main2.py``` 50 | 51 | - Launch Captcha Harvester: 52 | 53 | ```python captchaharvester.py``` 54 | 55 | Take notes that I won't include an option to generate dot gmail or domain mails, you'll need to generate them first and then add them. 56 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import requests, json, time, random, datetime, threading, pickle 3 | 4 | sitekey = "6LcMxjMUAAAAALhKgWsmmRM2hAFzGSQqYcpmFqHx" 5 | 6 | 7 | def log(event): 8 | d = datetime.datetime.now().strftime("%H:%M:%S") 9 | print("Raffle by Azerpas :: " + str(d) + " :: " + event) 10 | 11 | class Raffle(object): 12 | def __init__(self): 13 | self.s = requests.session() 14 | # "https://colette.sneakers-raffle.fr/","https://starcow.sneakers-raffle.fr/" 15 | self.shoes = [ 16 | #{"url":"product/nike-air-jordan-1/","shoe_id":"2","shoe_name":"Nike Air Jordan 1","imgURL":"AirJordan.jpg"}, 17 | #{"url":"product/nike-blazer/","shoe_id":"3","shoe_name":"Nike Blazer","imgURL":"Blazer.jpg"}, 18 | {"url":"product/air-jordan-1-white/","shoe_id":"14","shoe_name":"The Ten: Air Jordan 1","imgURL":"AirJordan@100cropped.jpg"}] 19 | self.sites = [ 20 | #{"url":"https://shinzo.sneakers-raffle.fr/","siteid":"2","nomtemplate":"nike-raffle-confirm-shinzo"}, 21 | #{"url":"https://thebrokenarm.sneakers-raffle.fr/","siteid":"3","nomtemplate":"nike-raffle-confirm-the-broken-arm"}, 22 | #{"url":"https://colette.sneakers-raffle.fr/","siteid":"4","nomtemplate":"nike-raffle-confirm-colette"}, 23 | {"url":"https://off---white.sneakers-raffle.fr/","siteid":"8","nomtemplate":"nike-raffle-confirm-off-white-popup"} 24 | ] 25 | # interval etc 26 | self.api = "https://api.sneakers-raffle.fr/submit" 27 | 28 | def register(self,identity): 29 | # For each site... 30 | for sts in self.sites: 31 | # register to each shoes. 32 | for dshoes in self.shoes: 33 | 34 | # getting captcha from threading harvester 35 | d = datetime.datetime.now().strftime('%H:%M') 36 | log("Getting Captcha") 37 | flag = False 38 | while flag != True: 39 | d = datetime.datetime.now().strftime('%H:%M') 40 | try: 41 | file = open(str(d)+'.txt','r') #r as reading only 42 | flag = True 43 | except IOError: 44 | time.sleep(2) 45 | log("No captcha available(1)") 46 | flag = False 47 | try: 48 | FileList = pickle.load(file) #FileList the list where i want to pick out the captcharep 49 | except: 50 | log("Can't open file") 51 | while len(FileList) == 0: #if len(FileList) it will wait for captcha scraper 52 | d = datetime.datetime.now().strftime('%H:%M') 53 | try: 54 | file = open(str(d)+'.txt','r') 55 | FileList = pickle.load(file) 56 | if FileList == []: 57 | log("No captcha available(2)") 58 | time.sleep(3) 59 | except IOError as e: 60 | log("No file, waiting...") 61 | print(e) 62 | time.sleep(3) 63 | captchaREP = random.choice(FileList) 64 | FileList.remove(captchaREP) 65 | file = open(str(d)+'.txt','w') 66 | pickle.dump(FileList,file) 67 | log("Captcha retrieved") 68 | 69 | headers = { 70 | "authority":"api.sneakers-raffle.fr", 71 | "method":"OPTIONS", 72 | "path":"/submit", 73 | "scheme":"https", 74 | "accept":"*/*", 75 | "accept-encoding":"gzip, deflate, br", 76 | "accept-language":"fr-FR,fr;q=0.9,en-US;q=0.8,en;q=0.7", 77 | "access-control-request-headers":"content-type", 78 | "access-control-request-method":"POST", 79 | "origin": sts['url'], 80 | "user-agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.167 Safari/537.36" 81 | } 82 | 83 | rep = self.s.options(self.api,headers=headers) 84 | print(rep) 85 | print(rep.text) 86 | 87 | # captcha 88 | headers = { 89 | "authority":"api.sneakers-raffle.fr", 90 | "method":"POST", 91 | "path":"/submit", 92 | "scheme":"https", 93 | "accept":"application/json, text/plain, */*", 94 | "accept-encoding":"gzip, deflate, br", 95 | "accept-language":"fr-FR,fr;q=0.9,en-US;q=0.8,en;q=0.7", 96 | "content-type":"application/json", 97 | "origin": sts['url'], 98 | "referer": sts['url'] + dshoes['url'], 99 | "user-agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.167 Safari/537.36"} 100 | 101 | payload = {"first_name":identity['fname'], 102 | "last_name":identity['lname'], 103 | "email":identity['mail'], 104 | "phone":identity['phone'], 105 | "birthdate":identity['birthdate'], 106 | "shoesize_id":identity['shoesize'], #### SIZE 107 | "completed_captcha":captchaREP, 108 | "shoe_id":dshoes['shoe_id'], 109 | "retailer_id":sts['siteid'], 110 | "g-recaptcha-response":captchaREP, 111 | "cc":"on", 112 | "mail":{ 113 | "key":"tEUI-jW_JN_7y1h1B9bNJA", 114 | "template_name":sts['nomtemplate'], 115 | "template_content":[{"name":"example name","content":"example content"}], 116 | "message":{ 117 | "subject":"Confirmation", 118 | "from_email":"verify@sneakers-raffle.fr", 119 | "from_name":"Sneakers Raffle", 120 | "to":[{"email":identity['mail'],"type":"to"}], 121 | "headers":{"Reply-To":"no.reply@sneakers-raffle.fr"}, 122 | "merge_language":"handlebars", 123 | "global_merge_vars":[{"name":"shoe_name","content":dshoes['shoe_name']},{"name":"shoe_image","content":sts['url']+"app/uploads/2018/02/"+dshoes['imgURL']},{"name":"firstname"},{"name":"pickup_date","content":"11 November"}] 124 | } 125 | } 126 | } 127 | 128 | req = self.s.post(self.api,headers=headers,json=payload) 129 | print(req) 130 | 131 | 132 | if __name__ == "__main__": 133 | ra = Raffle() 134 | accounts = [ 135 | # ["36","36.5","37.5","38","38.5","39","40","40.5","41","42","42.5","43","44","44.5","45","45.5","46","47","47.5","48.5","49.5"] 136 | {"fname":"pete","lname":"james","mail":"petejames@gmail.com","phone":"+33612334455","birthdate":"01/01/1998","shoesize":"42",}, 137 | ] 138 | for i in accounts: 139 | ra.register(i) 140 | -------------------------------------------------------------------------------- /captchaharvester.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import random 3 | import sys 4 | import threading 5 | import time 6 | import requests, json, datetime, time, BeautifulSoup, pickle 7 | 8 | # How many threads? 9 | Hthreads = 10 10 | 11 | sitekeyEnabled = False 12 | 13 | repeat = True 14 | repeatTime = '17:30' #end of the loop 15 | # To-Add, how many does it have to run, False True, d is showing hour and minute 16 | # in format '16:18' 17 | ####### 18 | 19 | CaptchaList = [] 20 | active_threads = 0 21 | sitekey = '6LcMxjMUAAAAALhKgWsmmRM2hAFzGSQqYcpmFqHx' # 6LeWwRkUAAAAAOBsau7KpuC9AV-6J8mhw4AjC3Xz 22 | API_KEY = '' # ENTER YOUR API KEY 23 | captcha_url = 'https://shinzo.sneakers-raffle.fr/' # https://api.sneakers-raffle.fr/submit 24 | headers = { 25 | 'host': 'www.supremenewyork.com', 26 | 'If-None-Match': '"*"', 27 | 'Accept': 'application/json', 28 | 'Proxy-Connection': 'keep-alive', 29 | 'Accept-Encoding': 'gzip, deflate', 30 | 'Accept-Language': 'en-us', 31 | 'Content-Type': 'application/x-www-form-urlencoded', 32 | 'Origin': 'http://www.supremenewyork.com', 33 | 'Connection': 'keep-alive', 34 | 'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_2 like Mac OS X) AppleWebKit/602.3.12 (KHTML, like Gecko) Version/10.0 Mobile/14C92 Safari/602.1', 35 | 'Referer': 'http://www.supremenewyork.com/mobile' 36 | } 37 | 38 | def main(): 39 | global CaptchaList 40 | global sitekey 41 | global API_KEY 42 | global captcha_url 43 | global headers 44 | 45 | log('Welcome') 46 | if sitekeyEnabled == True: 47 | log('Retriving Sitekey') 48 | sitekey = get_sitekey(captcha_url) 49 | 50 | d = datetime.datetime.now().strftime('%H:%M') # print -> 16:18 51 | # Shitty coding 52 | if repeat == True: 53 | while not str(d) == repeatTime: 54 | for i in range(0,Hthreads): 55 | t = threading.Thread(target=get_captcha, args=(API_KEY,sitekey,captcha_url)) 56 | t.daemon = True 57 | t.start() 58 | time.sleep(0.1) 59 | # ce while empêche le while repeatTime de se terminer... 60 | while not active_threads == 0 or active_threads == 1: 61 | log('Active Threads ---------- ' + str(active_threads)) 62 | timeout = [] 63 | timeout.append(active_threads) 64 | if timeout.count(active_threads) == 10: 65 | break 66 | time.sleep(5) 67 | d = datetime.datetime.now().strftime('%H:%M') 68 | timeout = [] 69 | 70 | else: 71 | for i in range(0,Hthreads): 72 | t = threading.Thread(target=get_captcha, args=(API_KEY,sitekey,captcha_url)) 73 | t.daemon = True 74 | t.start() 75 | time.sleep(0.1) 76 | while not active_threads == 0 or active_threads == 1: 77 | log('Active Threads ---------- ' + str(active_threads)) 78 | timeout = [] 79 | timeout.append(active_threads) 80 | if timeout.count(active_threads) == 20: 81 | break 82 | time.sleep(5) 83 | 84 | # Only tests to check if it's saving and working good 85 | """print CaptchaList 86 | d = datetime.datetime.now().strftime('%H:%M') 87 | with open(str(d)+'.txt','r') as f: 88 | trump = pickle.load(f) 89 | item = random.choice(trump) 90 | print trump 91 | print item""" 92 | 93 | def log(event): 94 | print('Captcha by Azerpas :: ' + str(datetime.datetime.now().strftime('%H:%M:%S')) + ' :: ' + str(event)) 95 | 96 | 97 | def get_sitekey(url): 98 | if sitekeyEnabled == False: 99 | log('Sitekey scraping is disabled, using the default value') 100 | else: 101 | session = requests.session() 102 | log('Scraping sitekey') 103 | session.get(url, headers=headers) 104 | ##### finding captcha sitekey with BeautifulSoup #### 105 | 106 | def get_captcha(API_KEY,sitekey,captcha_url): 107 | global active_threads 108 | 109 | active_threads += 1 110 | 111 | session = requests.session() 112 | session.cookies.clear() 113 | randomID = random.getrandbits(16) 114 | log('Generating Captcha for task ID: ' + str(randomID)) 115 | captcha_id = session.post("http://2captcha.com/in.php?key={}&method=userrecaptcha&googlekey={}&pageurl={}".format(API_KEY, sitekey, captcha_url)).text.split('|')[1] 116 | recaptcha_answer = session.get("http://2captcha.com/res.php?key={}&action=get&id={}".format(API_KEY, captcha_id)).text 117 | while 'CAPCHA_NOT_READY' in recaptcha_answer: 118 | print(recaptcha_answer) 119 | time.sleep(3) 120 | recaptcha_answer = session.get("http://2captcha.com/res.php?key={}&action=get&id={}".format(API_KEY, captcha_id)).text 121 | try: 122 | recaptcha_answer = recaptcha_answer.split('|')[1] 123 | except IndexError: 124 | print("Captcha error") 125 | return 126 | log('Captcha successfully obtained, task ID: ' + str(randomID)) 127 | saveCaptcha(recaptcha_answer,randomID) 128 | log('Task ID ' + str(randomID) + ' is closing...') 129 | active_threads -= 1 130 | 131 | def saveCaptcha(recaptcha_answer, ID): 132 | d = datetime.datetime.now().strftime('%H:%M') 133 | log("Saving Captcha into '" + str(d) + ".txt', valid for 2 minutes") 134 | try : 135 | file = open(str(d)+'.txt','r') 136 | print('Txt already exists, task ID: ' + str(ID)) 137 | try: 138 | Filelist = pickle.load(file) 139 | except EOFError: 140 | print("--------------------") 141 | print("Captcha error") 142 | print("--------------------") 143 | return 144 | Filelist.append(recaptcha_answer) 145 | file = open(str(d)+'.txt','w') 146 | pickle.dump(Filelist,file) 147 | #file.write(Filelist) 148 | #file.write(str(recaptcha_answer)) 149 | #file.write('\n') 150 | except IOError as e: 151 | print('Creating txt, task ID: ' + str(ID)) 152 | file = open(str(d)+'.txt','w') 153 | Filelist = [] 154 | Filelist.append(recaptcha_answer) 155 | #file.write(Filelist) 156 | pickle.dump(Filelist,file) 157 | #file.write('\n') 158 | print('Captcha successfuly saved, task ID: ' + str(ID)) 159 | CaptchaList.append(recaptcha_answer) 160 | 161 | if __name__ == "__main__": 162 | main() 163 | -------------------------------------------------------------------------------- /main2.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import requests, json, time, random, datetime, threading, pickle, os 3 | from selenium import webdriver 4 | from termcolor import colored 5 | 6 | sitekey = "6LcMxjMUAAAAALhKgWsmmRM2hAFzGSQqYcpmFqHx" 7 | 8 | 9 | """ 10 | def notify(title, subtitle, message, sound): 11 | t = '-title {!r}'.format(title) 12 | s = '-subtitle {!r}'.format(subtitle) 13 | m = '-message {!r}'.format(message) 14 | so = '-sound {!r}'.format(sound) 15 | os.system('terminal-notifier {}'.format(' '.join([m, t, s, so]))) 16 | """ 17 | 18 | def log(event): 19 | d = datetime.datetime.now().strftime("%H:%M:%S") 20 | print("Raffle by Azerpas :: " + str(d) + " :: " + event) 21 | 22 | class Raffle(object): 23 | def __init__(self): 24 | self.s = requests.session() 25 | # "https://colette.sneakers-raffle.fr/","https://starcow.sneakers-raffle.fr/" 26 | self.shoes = [ 27 | #{"url":"product/nike-air-jordan-1/","shoe_id":"2","shoe_name":"Nike Air Jordan 1","imgURL":"AirJordan.jpg"}, 28 | #{"url":"product/nike-blazer/","shoe_id":"3","shoe_name":"Nike Blazer","imgURL":"Blazer.jpg"} 29 | #{"url":"product/nike-air-max-90/","shoe_id":"6","shoe_name":"Nike Air Max 90","imgURL":"AirMax90.jpg"}, 30 | #{"url":"product/nike-air-presto/","shoe_id":"7","shoe_name":"Nike Air Presto","imgURL":"AirPresto.jpg"}, 31 | #{"url":"product/nike-react-hyperdunk/","shoe_id":"8","shoe_name":"Nike React Hyperdunk","imgURL":"ReactHyperdunk.jpg"}, 32 | #{"url":"product/nike-zoom-vaporfly/","shoe_id":"9","shoe_name":"Nike Zoom Vaporfly","imgURL":"ZoomVaporfly.jpg"}, 33 | #{"url":"product/nike-vapor-max/","shoe_id":"10","shoe_name":"Nike Vapormax","imgURL":"Vapormax.jpg"}, 34 | #{"url":"product/nike-force-1-low/","shoe_id":"11","shoe_name":"Nike Air Force 1 Low","imgURL":"AirForce1.jpg"}, 35 | #{"url":"product/product/nike-air-max-97/","shoe_id":"12","shoe_name":"Nike Air Max 97","imgURL":"AirMax97.jpg"}, 36 | {"url":"product/air-jordan-1-white/","shoe_id":"14","shoe_name":"The Ten: Air Jordan 1","imgURL":"AirJordan@100cropped.jpg"}, 37 | ] 38 | self.count = 0 39 | self.sites = [ 40 | #{"url":"https://shinzo.sneakers-raffle.fr/","siteid":"2","nomtemplate":"nike-raffle-confirm-shinzo"}, 41 | #{"url":"https://thebrokenarm.sneakers-raffle.fr/","siteid":"3","nomtemplate":"nike-raffle-confirm-the-broken-arm"}, 42 | #{"url":"https://colette.sneakers-raffle.fr/","siteid":"4","nomtemplate":"nike-raffle-confirm-colette"}, 43 | #{"url":"https://starcow.sneakers-raffle.fr/","siteid":"5","nomtemplate":"nike-raffle-confirm-starcow"} 44 | {"url":"https://off---white.sneakers-raffle.fr/","siteid":"8","nomtemplate":"nike-raffle-confirm-off---white"} 45 | ] 46 | self.api = "https://api.sneakers-raffle.fr/submit" 47 | self.driver = webdriver.Firefox() #webdriver.Chrome() #service_args 48 | # interval etc 49 | 50 | def register(self,identity): 51 | log("Entering: " + str(identity['fname']) +" "+ str(identity['lname'])) 52 | # For each site... 53 | for sts in self.sites: 54 | # register to each shoes. 55 | for dshoes in self.shoes: 56 | log("Entering: " + dshoes['shoe_name'] + " on " + sts['url']) 57 | d = datetime.datetime.now().strftime('%H:%M') 58 | log("Getting Captcha") 59 | flag = False 60 | while flag != True: 61 | d = datetime.datetime.now().strftime('%H:%M') 62 | try: 63 | file = open(str(d)+'.txt','r') #r as reading only 64 | flag = True 65 | except IOError: 66 | time.sleep(2) 67 | log("No captcha available(1)") 68 | flag = False 69 | try: 70 | FileList = pickle.load(file) #FileList the list where i want to pick out the captcharep 71 | except: 72 | log("Can't open file") 73 | while len(FileList) == 0: #if len(FileList) it will wait for captcha scraper 74 | d = datetime.datetime.now().strftime('%H:%M') 75 | try: 76 | file = open(str(d)+'.txt','r') 77 | FileList = pickle.load(file) 78 | if FileList == []: 79 | log("No captcha available(2)") 80 | time.sleep(3) 81 | except IOError as e: 82 | log("No file, waiting...") 83 | print(e) 84 | time.sleep(3) 85 | captchaREP = random.choice(FileList) 86 | FileList.remove(captchaREP) 87 | file = open(str(d)+'.txt','w') 88 | pickle.dump(FileList,file) 89 | log("Captcha retrieved") 90 | 91 | if sts['siteid'] == "5" and dshoes['shoe_id'] == "11": 92 | self.driver.get(sts['url']+"product/nike-air-force-1/") 93 | else: 94 | self.driver.get(sts['url']+dshoes['url']) 95 | self.driver.find_element_by_xpath("""/html/body/div[2]/div/div[2]/div/form/div[1]/p[1]/input""").send_keys(identity['fname']) 96 | self.driver.find_element_by_xpath("""/html/body/div[2]/div/div[2]/div/form/div[1]/p[2]/input""").send_keys(identity['lname']) 97 | self.driver.find_element_by_xpath("""/html/body/div[2]/div/div[2]/div/form/div[1]/p[3]/input""").send_keys(identity['mail']) 98 | self.driver.find_element_by_xpath("""/html/body/div[2]/div/div[2]/div/form/div[1]/p[4]/input""").send_keys(identity['phone']) 99 | self.driver.find_element_by_xpath("""/html/body/div[2]/div/div[2]/div/form/div[1]/p[5]/input""").send_keys(identity['birthdate']) 100 | if identity['shoesize'] == "18": 101 | log("Changing shoe size....") 102 | identity['shoesize'] = random.choice(["36","36.5","37.5","38","38.5","39","40","40.5","41","42","42.5","43","44","44.5","45","45.5","46","47","47.5","48.5","49.5"]) #["6","9","10","11","12"]) #"4","5", 103 | log("Shoe size is now = " + str(identity['shoesize'])) 104 | for option in self.driver.find_elements_by_tag_name('option'): 105 | if option.text.lower() == identity['shoesize'].lower(): 106 | option.click() 107 | break 108 | self.driver.execute_script("""document.getElementById("g-recaptcha-response").innerHTML="%s";""" % captchaREP) 109 | self.driver.execute_script("""onSubmit("%s");""" % captchaREP) 110 | self.driver.execute_script("""document.getElementById("c1").click();""") 111 | time.sleep(random.uniform(3.4,4.5)) 112 | try: 113 | self.driver.execute_script("""document.querySelector('.raffle-form .form-valid input[type="submit"]').click();""") 114 | log("Registered") 115 | self.count += 1 116 | except Exception as e: 117 | print(e) 118 | log("Already registered") 119 | self.driver.execute_script("""onSubmit("%s");""" % captchaREP) 120 | try: 121 | self.driver.execute_script("""document.querySelector('.raffle-form .form-valid input[type="submit"]').click();""") 122 | print(colored("Registered",'red', attrs=['bold'])) 123 | self.count += 1 124 | except Exception as e: 125 | print(e) 126 | log("Problem while trying to register") 127 | time.sleep(1) 128 | self.driver.delete_all_cookies() 129 | 130 | 131 | if __name__ == "__main__": 132 | ra = Raffle() 133 | accounts = [ 134 | # ["36","36.5","37.5","38","38.5","39","40","40.5","41","42","42.5","43","44","44.5","45","45.5","46","47","47.5","48.5","49.5"] 135 | {"fname":"Frederic","lname":"Ronaldo","mail":"fredericronaldo@yahoo.com","phone":"+33612603602","birthdate":"23/12/1997","shoesize":"42"}, 136 | ] 137 | # catpcha 138 | z = 0 139 | for i in accounts: 140 | z += 1 141 | print("----------------------------------") 142 | log("Sign in : " + str(i['fname']) +" "+ str(i['lname'])) 143 | log("Account N°" + str(z) + " of " + str(len(accounts))) 144 | print("----------------------------------") 145 | ra.register(i) 146 | # Dumb IP changing process as I mostly work manual you can't add proxy support easily 147 | """ 148 | if z % 4 == 0: 149 | 150 | log("ip needs to be changed") 151 | notify(title = 'NIKE RAFFLE', 152 | subtitle = 'Proxy changing', 153 | message = 'Need to change proxy to continue', 154 | sound = 'default') 155 | raw_input("++") 156 | """ 157 | 158 | log("Signed to: " + str(ra.count) + " raffles" ) 159 | --------------------------------------------------------------------------------