├── requirements.txt ├── README.md └── puzzle.py /requirements.txt: -------------------------------------------------------------------------------- 1 | ecdsa 2 | base58 3 | git+https://github.com/mcdallas/cryptotools.git -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Bitcoin Puzzle Bruteforce (Multithreaded) 2 | 3 | ## Introduction 4 | This program has for goal to bruteforce the following puzzle : 5 | https://privatekeyfinder.io/bitcoin-puzzle/ 6 | 7 | ## Installation 8 | This script is written, in python3 9 | To use this script, you'll need to install git and pip. 10 | Then, run this command to install all required libraries. 11 | ```shell 12 | pip3 install -r requirements.txt 13 | ``` 14 | 15 | ## Run it 16 | As it is a verry long process, I strongly recommand you to run this command with nohup 17 | ```shell 18 | nohup python3 puzzle.py & 19 | ``` 20 | 21 | ## Parameters 22 | I've designed it to be multithreaded and very flexible. 23 | There is some parameters at the beginning of the cade than you can modify in order to make the process answer to your needs. 24 | 25 | ```python 26 | # Parameters : 27 | 28 | # If you don't need to recreate the dataset, set it to False 29 | need_setup_dataset = True 30 | 31 | # Number of thread (max 2 per core, more will be useless) 32 | nb_thread_max = 3 33 | 34 | # Number of address that a unique thread will have to process (WARNING : the lower this value is, the bigger the dataset will be) 35 | addr_per_thread = 7000000000 36 | 37 | #Number of temp file to create (the higher this value is, the lower RAM is used) 38 | num_of_temp_file = 100 39 | 40 | #The searched address 41 | pub_addr_searched = "16jY7qLJnxb7CHZyqBP8qca9d51gAjyXQN" 42 | 43 | # Private key range in which you want to search the address (the private key will automatically be completed with 0 before ex: 00[...]00ffffffffffffffff) 44 | start = "8000000000000000" 45 | end = "ffffffffffffffff" 46 | ``` 47 | 48 | ## Donations 49 | If this program help you or if it make you win some money here is my BTC address.. We never know ;) 50 | bc1qpst40fj88652akszrr2w2unfwl37kgdm5d7pfj 51 | 52 | -------------------------------------------------------------------------------- /puzzle.py: -------------------------------------------------------------------------------- 1 | import os 2 | import ecdsa 3 | import binascii 4 | import hashlib 5 | import base58 6 | import random 7 | import time 8 | import codecs 9 | import multiprocessing 10 | from cryptotools.BTC import PrivateKey, Address 11 | 12 | # Parameters : 13 | 14 | # If you don't need to recreate the dataset, set it to False 15 | need_setup_dataset = True 16 | 17 | # Number of thread (max 2 per core, more will be useless) 18 | nb_thread_max = 8 19 | 20 | # Number of address that a unique thread will have to process (WARNING : the lower this value is, the bigger the dataset will be) 21 | addr_per_thread = 7000 22 | 23 | #Number of temp file to create (the higher this value is, the lower RAM is used) 24 | num_of_temp_file = 10 25 | 26 | #The searched address 27 | pub_addr_searched = "13zb1hQbWVsc2S7ZTZnP2G4undNNpdh5so" 28 | 29 | # Range in wich you want to search the address 30 | start = "1" 31 | end = "186A00" 32 | 33 | nomber_ite = int((int(end, 16) - int(start, 16))/addr_per_thread) 34 | 35 | def hash160(hex_str): 36 | sha = hashlib.sha256() 37 | rip = hashlib.new('ripemd160') 38 | sha.update(hex_str) 39 | rip.update( sha.digest() ) 40 | return rip.hexdigest() # .hexdigest() is hex ASCII 41 | 42 | def get_addr(private_key): 43 | Private_key = bytes.fromhex(private_key) 44 | 45 | signing_key = ecdsa.SigningKey.from_string(Private_key, curve = ecdsa.SECP256k1) 46 | verifying_key = signing_key.get_verifying_key() 47 | public_key = bytes.fromhex("04") + verifying_key.to_string() 48 | public_key = public_key.hex() 49 | 50 | if (ord(bytearray.fromhex(public_key[-2:])) % 2 == 0): 51 | pubkey_compressed = '02' 52 | else: 53 | pubkey_compressed = '03' 54 | pubkey_compressed += public_key[2:66] 55 | hex_str = bytearray.fromhex(pubkey_compressed) 56 | 57 | # Obtain key: 58 | 59 | key_hash = '00' + hash160(hex_str) 60 | 61 | # Obtain signature: 62 | 63 | sha = hashlib.sha256() 64 | sha.update( bytearray.fromhex(key_hash) ) 65 | checksum = sha.digest() 66 | sha = hashlib.sha256() 67 | sha.update(checksum) 68 | checksum = sha.hexdigest()[0:8] 69 | 70 | return str(base58.b58encode( bytes(bytearray.fromhex(key_hash + checksum)))) 71 | 72 | def thread_f(start_l, thread_no, q): 73 | # Progress bar 74 | progress = -0.01 75 | 76 | for e in range(0, addr_per_thread) : 77 | # Show progress (can be disabled) 78 | if e%(addr_per_thread/10000) == 0: 79 | progress += 0.01 80 | f = open("nohup.out", "a") 81 | f.write(str(thread_no) + " => " + str(progress) + "%\n") 82 | f.close() 83 | 84 | # Getting the current private key 85 | current = str(hex(int(start_l,16) + e)[2:]) 86 | 87 | while(len(current) < 64): 88 | current = "0" + current 89 | 90 | #Searching the address associated 91 | addrp2pkh = (get_addr(current)[2:])[:-1] 92 | 93 | # If you find the result, it will be written in this file 94 | if(addrp2pkh == pub_addr_searched): 95 | f = open("result.txt", "a") 96 | f.write(current) 97 | f.close() 98 | print("found") 99 | 100 | q.put((thread_no, "finished")) 101 | 102 | return 103 | 104 | def setup_dataset(from_file_no = 0, to_file_no = num_of_temp_file): 105 | # Create a dataset and shuffle it 106 | # If you have a memory error in this part, you can increase the value of the num_of_temp_file variable 107 | 108 | print("Setting up dataset from file " + str(from_file_no) + " to file " + str(to_file_no) + "..") 109 | 110 | step = int((int(end, 16) - int(start, 16)) / num_of_temp_file) 111 | from_addr = step * from_file_no + int(start, 16) 112 | to_addr = step * to_file_no + int(start, 16) - 1 113 | 114 | files = [] 115 | file_selected = 0 116 | for e in range(from_file_no, to_file_no): 117 | files.append(open("todo_" + str(e) + ".txt", "w")) 118 | 119 | for e in range(from_addr, to_addr, addr_per_thread): 120 | files[file_selected].write(hex(e)[2:]+"\n") 121 | file_selected = 0 if file_selected == (to_file_no - from_file_no)-1 else file_selected+1 122 | 123 | for e in range(0, to_file_no-from_file_no): 124 | files[e].close() 125 | 126 | print("Done !\n") 127 | 128 | 129 | def shuffle_dataset(from_file_no = 0, to_file_no = num_of_temp_file): 130 | # We shuffle each temp file and we join them into one bigger file 131 | 132 | print("Shuffling") 133 | 134 | file_list = [e for e in range(from_file_no, to_file_no)] 135 | 136 | # We shuffle each file 137 | for e in range(from_file_no, to_file_no): 138 | file = open("todo_" + str(e) + ".txt", "r") 139 | content = file.read().split("\n") 140 | file.close() 141 | 142 | random.shuffle(content) 143 | file = open("todo_" + str(e) + ".txt", "w") 144 | for line in content: 145 | file.write(line + "\n") 146 | file.close() 147 | 148 | def join_dataset(from_file_no = 0, to_file_no = num_of_temp_file): 149 | # We create a list of open files 150 | 151 | file_iterators = [] 152 | for file in range(from_file_no, to_file_no): 153 | file_iterators.append(open("todo_" + str(file) + ".txt")) 154 | 155 | # We randomly read these values and apend it to the main file 156 | todo_file = open("todo.txt", "w") 157 | while len(file_iterators) > 0: 158 | file_no = random.randint(0, len(file_iterators)-1) 159 | file_iterator = file_iterators[file_no] 160 | try: 161 | line = next(file_iterator) 162 | 163 | if line == "\n": 164 | continue 165 | 166 | todo_file.write(line) 167 | except Exception as e: 168 | # If the file is empty 169 | file_iterator.close() 170 | file_iterators.remove(file_iterator) 171 | 172 | todo_file.close() 173 | 174 | for e in range(from_file_no, to_file_no): 175 | # We remove temp files 176 | os.remove("todo_" + str(e) + ".txt") 177 | 178 | print("Done !") 179 | 180 | if __name__ == "__main__": 181 | 182 | time1 = time.time() 183 | 184 | thread_l = [] 185 | 186 | if need_setup_dataset: 187 | for e in range(nb_thread_max): 188 | from_file_no = int(e * (num_of_temp_file/nb_thread_max)) 189 | to_file_no = int((e+1) * (num_of_temp_file/nb_thread_max)) 190 | x = multiprocessing.Process(target=setup_dataset, args=(from_file_no, to_file_no,)) 191 | x.start() 192 | thread_l.append(x) 193 | 194 | for e in thread_l: 195 | e.join() 196 | 197 | thread_l = [] 198 | for e in range(nb_thread_max): 199 | from_file_no = int(e * (num_of_temp_file/nb_thread_max)) 200 | to_file_no = int((e+1) * (num_of_temp_file/nb_thread_max)) 201 | x = multiprocessing.Process(target=shuffle_dataset, args=(from_file_no, to_file_no,)) 202 | x.start() 203 | thread_l.append(x) 204 | 205 | for e in thread_l: 206 | e.join() 207 | 208 | thread_l = [] 209 | join_dataset() 210 | 211 | 212 | todo_f = open("todo.txt", 'r') 213 | count = 0 214 | active_t = 0 215 | q = multiprocessing.SimpleQueue() 216 | 217 | for count in range(0, nomber_ite+1): 218 | line = next(todo_f)[:-1] 219 | 220 | if active_t != nb_thread_max: 221 | print(count, "/", nomber_ite) 222 | x = multiprocessing.Process(target=thread_f, args=(line, active_t, q,)) 223 | x.start() 224 | active_t+=1 225 | 226 | else: 227 | q.get() 228 | count-=1 229 | active_t-=1 230 | 231 | 232 | time2 = time.time() 233 | print('%s function took', (time2-time1)*1000.0, 'ms') 234 | --------------------------------------------------------------------------------