├── Auto_ano ├── capture.py ├── dataset │ ├── images │ │ └── train │ │ │ ├── icebox__0.jpg │ │ │ ├── icebox__1.jpg │ │ │ ├── icebox__10.jpg │ │ │ ├── icebox__11.jpg │ │ │ ├── icebox__12.jpg │ │ │ ├── icebox__13.jpg │ │ │ ├── icebox__14.jpg │ │ │ ├── icebox__15.jpg │ │ │ ├── icebox__2.jpg │ │ │ ├── icebox__3.jpg │ │ │ ├── icebox__4.jpg │ │ │ ├── icebox__5.jpg │ │ │ ├── icebox__6.jpg │ │ │ ├── icebox__7.jpg │ │ │ ├── icebox__8.jpg │ │ │ └── icebox__9.jpg │ └── labels │ │ └── train │ │ ├── icebox__0.txt │ │ ├── icebox__1.txt │ │ ├── icebox__10.txt │ │ ├── icebox__11.txt │ │ ├── icebox__12.txt │ │ ├── icebox__13.txt │ │ ├── icebox__14.txt │ │ ├── icebox__15.txt │ │ ├── icebox__2.txt │ │ ├── icebox__3.txt │ │ ├── icebox__4.txt │ │ ├── icebox__5.txt │ │ ├── icebox__6.txt │ │ ├── icebox__7.txt │ │ ├── icebox__8.txt │ │ └── icebox__9.txt ├── delete_rand.py └── slipt.py ├── README.md ├── dataset_link.txt ├── dxcam_main.py ├── requirements.txt └── scripts ├── aimbot.py ├── arduino-contact └── arduino-contact.ino ├── arduino-contact_aimbot └── arduino-contact_aimbot.ino ├── best.pt ├── best_nano.pt ├── best_nano_new.engine ├── best_nano_new.onnx ├── best_nano_new.pt ├── detections.py ├── dxcam_main.py ├── fov_control.py ├── main.py ├── onxx.py ├── silent_aim.py ├── slipt.py ├── test.py ├── tiggerbot.py ├── ui.py └── video-frames.py /Auto_ano/capture.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import os 3 | import torch 4 | from mss import mss 5 | import time 6 | import numpy as np 7 | 8 | def cooldown(cooldown_bool,wait): 9 | #cooldown threed for toggels or cooldowns 10 | time.sleep(wait) 11 | cooldown_bool[0] = True 12 | 13 | 14 | 15 | if __name__ == "__main__": 16 | file_name = "icebox_" 17 | img_output = r"C:\Users\PyPit\OneDrive\Desktop\Auto_ano\dataset\images\train"# image output path 18 | label_output = r"C:\Users\PyPit\OneDrive\Desktop\Auto_ano\dataset\labels\train"# label output path 19 | img_num = 0# img count 20 | MONITOR_WIDTH = 1920#base res 21 | MONITOR_HEIGHT = 1080#base res 22 | MONITOR_SCALE = 5#how much the screen shot is downsized by eg. 5 would be one fifth of the monitor dimensions 23 | region = (int(MONITOR_WIDTH/2-MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2-MONITOR_HEIGHT/MONITOR_SCALE/2),int(MONITOR_WIDTH/2+MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2+MONITOR_HEIGHT/MONITOR_SCALE/2)) 24 | x,y,width,height = region 25 | x_length = (width-x) # screenshot length 26 | y_length = (height-y) # screenshot lenght 27 | model = torch.hub.load(r'C:\Users\PyPit\OneDrive\Documents\CODE\Valorant_arduino\yolov5', 'custom', path=r"C:\Users\PyPit\OneDrive\Documents\CODE\Valorant_arduino\scripts\best.pt", source='local')#loading model onto gpu 28 | model.conf = 0.40# model confidance threshold 29 | model.iou = 0.65# overlap threshhold threshold 30 | model.maxdet = 10# max detections 31 | model.amp = True# amps model 32 | save_cooldown = [True] 33 | success = False 34 | print(region) 35 | with mss() as sct: 36 | while True: 37 | if save_cooldown[0]:#makes sure not on cooldown 38 | screenshot = np.array(sct.grab(region)) 39 | df= model(screenshot, size=640).pandas().xyxy[0]# runs model 40 | lines = []# txt lines list 41 | for i in range(0,10): 42 | try: 43 | xmin = int(df.iloc[i,0]) 44 | ymax = (int(df.iloc[i,3])) 45 | ymin = abs(int(df.iloc[i,1]) - ymax)/y_length# these devisions are for the yolov5 txt format 46 | xmax = abs((int(df.iloc[i,2]))-xmin)/x_length 47 | 48 | ymax /= y_length 49 | xmin /= x_length 50 | 51 | xmin += xmax/2#slight ajustments idk why its needed but the cords are fucked up if its not there 52 | ymax -= ymin/2 53 | line = f"{int(df.iloc[i,5])} {xmin} {ymax} {xmax} {ymin}\n"# makes txt line 54 | lines.append(line) 55 | print(img_num)# prints for every detection 56 | success = True 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | except Exception as e:# the exception is to print nothing 70 | print("", end="") 71 | # print(e) 72 | if success: 73 | with open(f'{label_output}\\{file_name}_{img_num}.txt', 'w') as f: #writes to txt file 74 | for line in lines: 75 | f.write(line) 76 | cv2.imwrite(f"{img_output}\\{file_name}_{img_num}.jpg", screenshot) # saves img 77 | img_num += 1 78 | save_cooldown[0] = False#cooldown starts and call the cooldown function 79 | cooldown(save_cooldown,0.5) 80 | 81 | success = False 82 | cv2.imshow("frame", screenshot) 83 | if(cv2.waitKey(1) == ord('q')): 84 | cv2.destroyAllWindows() 85 | break 86 | 87 | 88 | 89 | 90 | -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__0.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__0.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__1.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__10.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__10.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__11.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__11.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__12.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__12.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__13.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__13.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__14.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__14.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__15.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__15.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__2.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__3.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__4.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__5.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__6.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__6.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__7.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__7.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__8.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__8.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/images/train/icebox__9.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/Auto_ano/dataset/images/train/icebox__9.jpg -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__0.txt: -------------------------------------------------------------------------------- 1 | 1 0.0078125 0.44907407407407407 0.015625 0.027777777777777776 2 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__1.txt: -------------------------------------------------------------------------------- 1 | 1 0.2604166666666667 0.43287037037037035 0.020833333333333332 0.05092592592592592 2 | 0 0.25911458333333337 0.5856481481481481 0.0859375 0.36574074074074076 3 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__10.txt: -------------------------------------------------------------------------------- 1 | 1 0.8984375 0.5810185185185186 0.020833333333333332 0.05092592592592592 2 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__11.txt: -------------------------------------------------------------------------------- 1 | 0 0.6575520833333334 0.962962962962963 0.018229166666666668 0.07407407407407407 2 | 1 0.6575520833333334 0.9398148148148148 0.0078125 0.018518518518518517 3 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__12.txt: -------------------------------------------------------------------------------- 1 | 0 0.31380208333333337 0.8425925925925926 0.07552083333333333 0.3148148148148148 2 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__13.txt: -------------------------------------------------------------------------------- 1 | 1 0.16796875 0.4791666666666667 0.0234375 0.05092592592592592 2 | 0 0.1640625 0.6296296296296297 0.08854166666666667 0.3611111111111111 3 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__14.txt: -------------------------------------------------------------------------------- 1 | 0 0.02734375 0.6018518518518519 0.0234375 0.046296296296296294 2 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__15.txt: -------------------------------------------------------------------------------- 1 | 0 0.48828125 0.13194444444444445 0.09635416666666667 0.2638888888888889 2 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__2.txt: -------------------------------------------------------------------------------- 1 | 0 0.30078125 0.5810185185185185 0.09114583333333333 0.36574074074074076 2 | 1 0.3072916666666667 0.42824074074074076 0.020833333333333332 0.05092592592592592 3 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__3.txt: -------------------------------------------------------------------------------- 1 | 0 0.2890625 0.6458333333333334 0.08854166666666667 0.35648148148148145 2 | 1 0.2916666666666667 0.49537037037037035 0.020833333333333332 0.046296296296296294 3 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__4.txt: -------------------------------------------------------------------------------- 1 | 1 0.20703125 0.6041666666666667 0.0234375 0.05092592592592592 2 | 0 0.20182291666666666 0.7546296296296295 0.09114583333333333 0.37037037037037035 3 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__5.txt: -------------------------------------------------------------------------------- 1 | 0 0.19140625 0.6736111111111112 0.09635416666666667 0.35648148148148145 2 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__6.txt: -------------------------------------------------------------------------------- 1 | 0 0.22265625 0.5833333333333333 0.0859375 0.3611111111111111 2 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__7.txt: -------------------------------------------------------------------------------- 1 | 0 0.27734375 0.46759259259259256 0.09114583333333333 0.35185185185185186 2 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__8.txt: -------------------------------------------------------------------------------- 1 | 1 0.5169270833333334 0.2013888888888889 0.0234375 0.05092592592592592 2 | 0 0.5130208333333334 0.3495370370370371 0.08854166666666667 0.35648148148148145 3 | -------------------------------------------------------------------------------- /Auto_ano/dataset/labels/train/icebox__9.txt: -------------------------------------------------------------------------------- 1 | 1 0.87109375 0.29398148148148145 0.0234375 0.05092592592592592 2 | 0 0.8645833333333334 0.4444444444444444 0.08854166666666667 0.3611111111111111 3 | -------------------------------------------------------------------------------- /Auto_ano/delete_rand.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | os.chdir(r'C:\Users\PyPit\OneDrive\Documents\CODE\Valorant_Auto_anotate\dataset copy\images\train') 4 | 5 | for i,f in enumerate(os.listdir()): 6 | 7 | print(f) 8 | 9 | if i%3 == 0: 10 | os.remove(f); -------------------------------------------------------------------------------- /Auto_ano/slipt.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | 5 | os.chdir(r'train folder path here') 6 | 7 | 8 | 9 | # the code will loop over all the files in your train folder and will move 1/5 of them to your val folder, you can change this number moved by changing the number by the mod operator. 10 | for i,f in enumerate(os.listdir()): 11 | 12 | print(f) 13 | 14 | if i%5 == 0: 15 | shutil.move(f, r"put val folder path here"+"\\"+f) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Valorant-AI-cheats 2 | This code is for my course on Youtube where I tech people how to make external cheats. 3 | I use Yolov5 in Nvidia Tensorrt format to optimize speed. The program also has a Ui using tkinter. 4 | I utilise an Arduino Leonardo coneted to a Usb Host shield to mask the computer generated movement as mouse movement. 5 | 6 | chanel link https://www.youtube.com/channel/UCqCX-TS2HAeV3C5mOLPnGdg 7 | -------------------------------------------------------------------------------- /dataset_link.txt: -------------------------------------------------------------------------------- 1 | dataset is on this google drive link https://drive.google.com/file/d/1GI5NebbmX_dn8tcxwvXVYhmPGDRO40wa/view?usp=sharing 2 | -------------------------------------------------------------------------------- /dxcam_main.py: -------------------------------------------------------------------------------- 1 | import sys 2 | sys.path.append(r'C:\Users\PyPit\OneDrive\Desktop\cheats') 3 | import dxshot as dxcam 4 | import torch 5 | import cv2 6 | import numpy as np 7 | import time 8 | import math 9 | import keyboard 10 | import threading 11 | import serial 12 | import tkinter as tk 13 | import pywintypes 14 | import win32api 15 | import win32con 16 | import sys 17 | 18 | 19 | def cooldown(cooldown_bool,wait): 20 | time.sleep(wait) 21 | cooldown_bool[0] = True 22 | 23 | 24 | def labels(): 25 | #This function contains all the labels used and is threaded so tikinter can run a ui 26 | global fps_label 27 | global trigger_label 28 | global assist_label 29 | global silent_label 30 | global fov_label 31 | fps_label = tk.Label(text = " ", font=('Tahoma','10'), fg='white', bg='black') 32 | fps_label.master.overrideredirect(True) 33 | fps_label.master.geometry("+14+16") 34 | fps_label.master.lift() 35 | fps_label.master.wm_attributes("-topmost", True) 36 | fps_label.master.wm_attributes("-disabled", True) 37 | fps_label.master.wm_attributes("-transparentcolor", "black") 38 | fps_label.pack() 39 | fov_label = tk.Label(text = f"FOV: {activation_range}", font=('Tahoma','10'), fg='white', bg='black') 40 | fov_label.master.overrideredirect(True) 41 | fov_label.master.lift() 42 | fov_label.master.wm_attributes("-topmost", True) 43 | fov_label.master.wm_attributes("-disabled", True) 44 | fov_label.master.wm_attributes("-transparentcolor", "black") 45 | fov_label.pack() 46 | trigger_label = tk.Label(text = "Triggerbot: Unactive", font=('Tahoma','10'), fg='red', bg='black') 47 | trigger_label.master.overrideredirect(True) 48 | trigger_label.master.lift() 49 | trigger_label.master.wm_attributes("-topmost", True) 50 | trigger_label.master.wm_attributes("-disabled", True) 51 | trigger_label.master.wm_attributes("-transparentcolor", "black") 52 | trigger_label.pack() 53 | assist_label = tk.Label(text = "Aim Assist: Unactive", font=('Tahoma','10'), fg='red', bg='black') 54 | assist_label.master.overrideredirect(True) 55 | assist_label.master.lift() 56 | assist_label.master.wm_attributes("-topmost", True) 57 | assist_label.master.wm_attributes("-disabled", True) 58 | assist_label.master.wm_attributes("-transparentcolor", "black") 59 | assist_label.pack() 60 | silent_label = tk.Label(text = "Silent aim: Unactive", font=('Tahoma','10'), fg='red', bg='black') 61 | silent_label.master.overrideredirect(True) 62 | silent_label.master.lift() 63 | silent_label.master.wm_attributes("-topmost", True) 64 | silent_label.master.wm_attributes("-disabled", True) 65 | silent_label.master.wm_attributes("-transparentcolor", "black") 66 | silent_label.pack() 67 | 68 | 69 | hWindow = pywintypes.HANDLE(int(fps_label.master.frame(), 16)) 70 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 71 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 72 | hWindow = pywintypes.HANDLE(int(assist_label.master.frame(), 16)) 73 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 74 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 75 | hWindow = pywintypes.HANDLE(int(trigger_label.master.frame(), 16)) 76 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 77 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 78 | fps_label.mainloop() 79 | 80 | 81 | 82 | SENS = 0.313 83 | HUMANIZE = 0.5 84 | AIM_SPEED = 1*(1/SENS) 85 | target_multiply = [0,1.01,1.025,1.05,1.05,1.05,1.05,1.05,1.05,1.05,1.05] 86 | serialcomm = serial.Serial("COM3",115200,timeout = 0) 87 | activation_range = 100 88 | 89 | ui = threading.Thread(target=labels, args=()) 90 | ui.start() 91 | MONITOR_WIDTH = 1920#game res 92 | MONITOR_HEIGHT = 1080#game res 93 | MONITOR_SCALE = 5#how much the screen shot is downsized by eg. 5 would be one fifth of the monitor dimensions 94 | region = (int(MONITOR_WIDTH/2-MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2-MONITOR_HEIGHT/MONITOR_SCALE/2),int(MONITOR_WIDTH/2+MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2+MONITOR_HEIGHT/MONITOR_SCALE/2)) 95 | print(region) 96 | x,y,width,height = region 97 | screenshot_center = [int((width-x)/2),int((height-y)/2)] 98 | triggerbot = False 99 | triggerbot_toggle = [True] 100 | aim_assist = False 101 | aim_assist_toggle = [True] 102 | send_next = [True] 103 | silent_aim = False 104 | silent_aim_not_cooldown = [True] 105 | silent_toggle = [True] 106 | no_fov_cooldown = [True] 107 | model = torch.hub.load(r'C:\Users\PyPit\OneDrive\Desktop\cheats\yolov5' , 'custom', path= r'C:\Users\PyPit\OneDrive\Desktop\cheats\best_new.engine',source='local').cuda() 108 | model.conf = 0.25 109 | model.maxdet = 10 110 | model.amp = True 111 | model.classes = [1] 112 | camera = dxcam.create(output_idx=0, output_color="BGRA") 113 | 114 | start_time = time.time() 115 | x = 1 116 | counter = 0 117 | 118 | 119 | 120 | 121 | 122 | 123 | while True: 124 | closest_part_distance = 100000 125 | closest_part = -1 126 | screenshot = camera.grab(region) 127 | if screenshot is None: continue 128 | df = model(screenshot, size = 384).pandas().xyxy[0] 129 | 130 | counter+= 1 131 | if(time.time() - start_time) > x: 132 | fps = "Fps:"+ str(int(counter/(time.time() - start_time))) 133 | fps_label.config(text=fps) 134 | counter = 0 135 | start_time = time.time() 136 | 137 | 138 | 139 | for i in range(0,10): 140 | try: 141 | xmin = int(df.iloc[i,0]) 142 | ymin = int(df.iloc[i,1]) 143 | xmax = int(df.iloc[i,2]) 144 | ymax = int(df.iloc[i,3]) 145 | 146 | centerX = (xmax-xmin)/2+xmin 147 | centerY = (ymax-ymin)/2+ymin 148 | 149 | distance = math.dist([centerX,centerY],screenshot_center) 150 | 151 | if int(distance) < closest_part_distance: 152 | closest_part_distance = distance 153 | closest_part = i 154 | 155 | # cv2.rectangle(screenshot,(xmin,ymin),(xmax,ymax), (255,0,0),3) 156 | except: 157 | print("",end="") 158 | 159 | 160 | if keyboard.is_pressed('alt'): 161 | if triggerbot_toggle[0] == True: 162 | triggerbot = not triggerbot 163 | if triggerbot: 164 | trigger_label.config(text = "Triggerbot: Active", fg= 'green') 165 | else: 166 | trigger_label.config(text = "Triggerbot: Unactive", fg= 'red') 167 | print(triggerbot) 168 | triggerbot_toggle[0] = False 169 | thread = threading.Thread(target=cooldown, args=(triggerbot_toggle,0.2,)) 170 | thread.start() 171 | 172 | if keyboard.is_pressed('`'): 173 | if aim_assist_toggle[0] == True: 174 | aim_assist = not aim_assist 175 | if aim_assist: 176 | assist_label.config(text = "Aim Assist: Active", fg= 'green') 177 | else: 178 | assist_label.config(text = "Aim Assist: Unactive", fg= 'red') 179 | print(aim_assist) 180 | aim_assist_toggle[0] = False 181 | thread = threading.Thread(target=cooldown, args=(aim_assist_toggle,0.2,)) 182 | thread.start() 183 | 184 | if keyboard.is_pressed('p'): 185 | if silent_toggle[0] == True: 186 | silent_aim = not silent_aim 187 | if silent_aim: 188 | silent_label.config(text = "Silent Aim: Active", fg= 'green') 189 | else: 190 | silent_label.config(text = "Silent Aim: Unactive", fg= 'red') 191 | print(silent_aim) 192 | silent_toggle[0] = False 193 | thread = threading.Thread(target=cooldown, args=(silent_toggle,0.2,)) 194 | thread.start() 195 | 196 | elif keyboard.is_pressed('up') and no_fov_cooldown[0] == True: 197 | activation_range += 5 198 | fov_label.config(text=f"FOV: {activation_range}") 199 | no_fov_cooldown[0] = False 200 | thread = threading.Thread(target=cooldown, args=(no_fov_cooldown,0.05,)) 201 | thread.start() 202 | 203 | elif keyboard.is_pressed('down') and no_fov_cooldown[0] == True: 204 | activation_range -= 5 205 | fov_label.config(text=f"FOV: {activation_range}") 206 | no_fov_cooldown[0] = False 207 | thread = threading.Thread(target=cooldown, args=(no_fov_cooldown,0.05,)) 208 | thread.start() 209 | 210 | if closest_part != -1: 211 | xmin = df.iloc[closest_part,0] 212 | ymin = df.iloc[closest_part,1] 213 | xmax = df.iloc[closest_part,2] 214 | ymax = df.iloc[closest_part,3] 215 | head_center_list = [(xmax-xmin)/2+xmin,(ymax-ymin)/2+ymin] 216 | # xdif = (head_center_list[0] - screenshot_center[0])*AIM_SPEED*target_multiply[MONITOR_SCALE] 217 | # ydif = (head_center_list[1] - screenshot_center[1])*AIM_SPEED*target_multiply[MONITOR_SCALE] 218 | # print(xdif,ydif) 219 | if triggerbot == True and screenshot_center[0] in range(int(xmin),int(xmax)) and screenshot_center[1] in range(int(ymin),int(ymax)): 220 | serialcomm.write("shoot".encode()) 221 | 222 | if silent_aim == True and silent_aim_not_cooldown[0] == True: 223 | xdif = (head_center_list[0]-screenshot_center[0])*AIM_SPEED*target_multiply[MONITOR_SCALE] 224 | ydif = (head_center_list[1]-screenshot_center[1])*AIM_SPEED*target_multiply[MONITOR_SCALE] 225 | data = f"silent{int(xdif)}:{int(ydif)}" 226 | serialcomm.write(data.encode()) 227 | silent_aim_not_cooldown[0] = False 228 | thread = threading.Thread(target=cooldown, args=(silent_aim_not_cooldown,0.2,)) 229 | thread.start() 230 | if aim_assist == True and closest_part_distance < activation_range and send_next[0] == True and win32api.GetAsyncKeyState(0x01)&0x8000 > 0: 231 | xdif = (head_center_list[0] - screenshot_center[0])*AIM_SPEED*target_multiply[MONITOR_SCALE] 232 | ydif = (head_center_list[1] - screenshot_center[1])*AIM_SPEED*target_multiply[MONITOR_SCALE] 233 | if abs(xdif) > 20: 234 | xdif *= 0.2 235 | else: 236 | xdif *= 0.4 237 | if abs(ydif) > 20: 238 | ydif *= 0.2 239 | else: 240 | ydif *= 0.4 241 | data = f"{int(xdif)}:{int(ydif)}" 242 | serialcomm.write(data.encode()) 243 | # send_next[0] = False 244 | # thread = threading.Thread(target=cooldown, args=(send_next,0.06,)) 245 | # thread.start() 246 | 247 | 248 | 249 | 250 | 251 | # cv2.imshow("frame",screenshot) 252 | # if(cv2.waitKey(1) == ord('l')): 253 | # cv2.destroyAllWindows() 254 | # break 255 | 256 | 257 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/requirements.txt -------------------------------------------------------------------------------- /scripts/aimbot.py: -------------------------------------------------------------------------------- 1 | from mss import mss 2 | import torch 3 | import cv2 4 | import numpy as np 5 | import time 6 | import math 7 | import keyboard 8 | import threading 9 | import serial 10 | 11 | def cooldown(cooldown_bool,wait): 12 | time.sleep(wait) 13 | cooldown_bool[0] = True 14 | 15 | 16 | SENS = 0.313 17 | AIM_SPEED = 1*(1/SENS) 18 | target_multiply = [0,1.01,1.025,1.05,1.05,1.05,1.05,1.05,1.05,1.05,1.05] 19 | serialcomm = serial.Serial("COM3",115200,timeout = 0) 20 | activation_range = 100 21 | 22 | 23 | MONITOR_WIDTH = 1920#game res 24 | MONITOR_HEIGHT = 1080#game res 25 | MONITOR_SCALE = 5#how much the screen shot is downsized by eg. 5 would be one fifth of the monitor dimensions 26 | region = (int(MONITOR_WIDTH/2-MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2-MONITOR_HEIGHT/MONITOR_SCALE/2),int(MONITOR_WIDTH/2+MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2+MONITOR_HEIGHT/MONITOR_SCALE/2)) 27 | x,y,width,height = region 28 | screenshot_center = [int((width-x)/2),int((height-y)/2)] 29 | triggerbot = False 30 | triggerbot_toggle = [True] 31 | aim_assist = False 32 | aim_assist_toggle = [True] 33 | send_next = [True] 34 | model = torch.hub.load(r'C:\Users\PyPit\OneDrive\Desktop\cheats\yolov5' , 'custom', path= r'C:\Users\PyPit\OneDrive\Desktop\cheats\half.engine',source='local') 35 | model.conf = 0.40 36 | model.maxdet = 10 37 | model.apm = True 38 | model.classes = [1] 39 | 40 | 41 | start_time = time.time() 42 | x = 1 43 | counter = 0 44 | 45 | 46 | 47 | 48 | 49 | with mss() as stc: 50 | while True: 51 | closest_part_distance = 100000 52 | closest_part = -1 53 | screenshot = np.array(stc.grab(region)) 54 | df = model(screenshot, size=736).pandas().xyxy[0] 55 | 56 | counter+= 1 57 | if(time.time() - start_time) > x: 58 | fps = "fps:"+ str(int(counter/(time.time() - start_time))) 59 | print(fps) 60 | counter = 0 61 | start_time = time.time() 62 | 63 | 64 | 65 | for i in range(0,10): 66 | try: 67 | xmin = int(df.iloc[i,0]) 68 | ymin = int(df.iloc[i,1]) 69 | xmax = int(df.iloc[i,2]) 70 | ymax = int(df.iloc[i,3]) 71 | 72 | centerX = (xmax-xmin)/2+xmin 73 | centerY = (ymax-ymin)/2+ymin 74 | 75 | distance = math.dist([centerX,centerY],screenshot_center) 76 | 77 | if int(distance) < closest_part_distance: 78 | closest_part_distance = distance 79 | closest_part = i 80 | 81 | # cv2.rectangle(screenshot,(xmin,ymin),(xmax,ymax), (255,0,0),3) 82 | except: 83 | print("",end="") 84 | 85 | 86 | if keyboard.is_pressed('`'): 87 | if triggerbot_toggle[0] == True: 88 | triggerbot = not triggerbot 89 | print(triggerbot) 90 | triggerbot_toggle[0] = False 91 | thread = threading.Thread(target=cooldown, args=(triggerbot_toggle,0.2,)) 92 | thread.start() 93 | 94 | if keyboard.is_pressed('alt'): 95 | if aim_assist_toggle[0] == True: 96 | aim_assist = not aim_assist 97 | print(aim_assist) 98 | aim_assist_toggle[0] = False 99 | thread = threading.Thread(target=cooldown, args=(aim_assist_toggle,0.2,)) 100 | thread.start() 101 | 102 | if closest_part != -1: 103 | xmin = df.iloc[closest_part,0] 104 | ymin = df.iloc[closest_part,1] 105 | xmax = df.iloc[closest_part,2] 106 | ymax = df.iloc[closest_part,3] 107 | 108 | head_center_list = [(xmax-xmin)/2+xmin,(ymax-ymin)/2+ymin] 109 | if triggerbot == True and screenshot_center[0] in range(int(xmin),int(xmax)) and screenshot_center[1] in range(int(ymin),int(ymax)): 110 | serialcomm.write("shoot".encode()) 111 | if aim_assist == True and closest_part_distance < activation_range and send_next[0] == True: 112 | xdif = (head_center_list[0] - screenshot_center[0])*AIM_SPEED*target_multiply[MONITOR_SCALE] 113 | ydif = (head_center_list[1] - screenshot_center[1])*AIM_SPEED*target_multiply[MONITOR_SCALE] 114 | data = f"{int(xdif)}:{int(ydif)}" 115 | serialcomm.write(data.encode()) 116 | send_next[0] = False 117 | thread = threading.Thread(target=cooldown, args=(send_next,0.05,)) 118 | thread.start() 119 | 120 | 121 | 122 | 123 | 124 | # cv2.imshow("frame",screenshot) 125 | # if(cv2.waitKey(1) == ord('l')): 126 | # cv2.destroyAllWindows() 127 | # break -------------------------------------------------------------------------------- /scripts/arduino-contact/arduino-contact.ino: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include "hidmouserptparser.h" 5 | 6 | USB Usb; 7 | HIDUniversal Hid(&Usb); 8 | HIDMouseEvents MouEvents; 9 | HIDMouseReportParser Mou(&MouEvents); 10 | 11 | int dx; 12 | int dy; 13 | int dxn; 14 | int dyn; 15 | int index; 16 | int num_size; 17 | int jump = 127; 18 | 19 | 20 | void setup() { 21 | Mouse.begin(); 22 | Serial.begin(115200); 23 | Serial.println("Start"); 24 | Serial.setTimeout(1); 25 | if (Usb.Init() == -1) 26 | Serial.println("OSC did not start."); 27 | delay(200); 28 | 29 | if (!Hid.SetReportParser(0, &Mou)) 30 | ErrorMessage (PSTR("SetReportParser"), 1); 31 | } 32 | 33 | void loop() { 34 | 35 | if (Serial.available()) 36 | { 37 | 38 | 39 | 40 | String data = Serial.readString(); 41 | if (data =="shoot"){ 42 | Mouse.click(); 43 | } 44 | else if (data.substring(0,6) == "silent") 45 | { 46 | data.remove(0,6); 47 | index = 0; 48 | num_size = data.indexOf(":", index); 49 | dx = data.substring(index,num_size).toInt(); 50 | data.remove(0,num_size+1); 51 | dy = data.toInt(); 52 | dxn = dx *-1; 53 | dyn = dy *-1; 54 | 55 | if (dx > 0){ 56 | while (dx > 127) 57 | { 58 | dx -= 127; 59 | Mouse.move(127,0); 60 | } 61 | Mouse.move(dx,0); 62 | } 63 | else if (dx < 0){ 64 | while (dx < -127) 65 | { 66 | dx += 127; 67 | Mouse.move(-127,0); 68 | } 69 | Mouse.move(dx,0); 70 | } 71 | if (dy > 0){ 72 | while (dy > 127) 73 | { 74 | dy -= 127; 75 | Mouse.move(0,127); 76 | } 77 | Mouse.move(0,dy); 78 | } 79 | else if (dy < 0){ 80 | while (dy < -127) 81 | { 82 | dy += 127; 83 | Mouse.move(0,-127); 84 | } 85 | Mouse.move(0,dy); 86 | } 87 | Mouse.click(); 88 | if (dxn > 0){ 89 | while (dxn > 127) 90 | { 91 | dxn -= 127; 92 | Mouse.move(127,0); 93 | } 94 | Mouse.move(dxn,0); 95 | } 96 | else if (dxn < 0){ 97 | while (dxn < -127) 98 | { 99 | dxn += 127; 100 | Mouse.move(-127,0); 101 | } 102 | Mouse.move(dxn,0); 103 | } 104 | if (dyn > 0){ 105 | while (dyn > 127) 106 | { 107 | dyn -= 127; 108 | Mouse.move(0,127); 109 | } 110 | Mouse.move(0,dyn); 111 | } 112 | else if (dyn < 0){ 113 | while (dyn < -127) 114 | { 115 | dyn += 127; 116 | Mouse.move(0,-127); 117 | } 118 | Mouse.move(0,dyn); 119 | } 120 | } 121 | 122 | else{ 123 | index = 0; 124 | num_size = data.indexOf(":", index); 125 | dx = data.substring(index,num_size).toInt(); 126 | data.remove(0,num_size+1); 127 | dy = data.toInt(); 128 | 129 | if (dx > 0){ 130 | while (dx > jump) 131 | { 132 | dx -= jump; 133 | Mouse.move(jump,0); 134 | } 135 | Mouse.move(dx,0); 136 | } 137 | else if (dx < 0){ 138 | while (dx < -jump) 139 | { 140 | dx += jump; 141 | Mouse.move(-jump,0); 142 | } 143 | Mouse.move(dx,0); 144 | } 145 | if (dy > 0){ 146 | while (dy > jump) 147 | { 148 | dy -= jump; 149 | Mouse.move(0,jump); 150 | } 151 | Mouse.move(0,dy); 152 | } 153 | else if (dy < 0){ 154 | while (dy < -jump) 155 | { 156 | dy += jump; 157 | Mouse.move(0,-jump); 158 | } 159 | Mouse.move(0,dy); 160 | } 161 | } 162 | 163 | 164 | 165 | } 166 | Usb.Task(); 167 | } -------------------------------------------------------------------------------- /scripts/arduino-contact_aimbot/arduino-contact_aimbot.ino: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include "hidmouserptparser.h" 5 | 6 | USB Usb; 7 | HIDUniversal Hid(&Usb); 8 | HIDMouseEvents MouEvents; 9 | HIDMouseReportParser Mou(&MouEvents); 10 | 11 | int dx; 12 | int dy; 13 | int dxn; 14 | int dyn; 15 | int index; 16 | int num_size; 17 | int jump = 127; 18 | 19 | 20 | void setup() { 21 | Mouse.begin(); 22 | Serial.begin(115200); 23 | Serial.println("Start"); 24 | Serial.setTimeout(1); 25 | if (Usb.Init() == -1) 26 | Serial.println("OSC did not start."); 27 | delay(200); 28 | 29 | if (!Hid.SetReportParser(0, &Mou)) 30 | ErrorMessage (PSTR("SetReportParser"), 1); 31 | } 32 | 33 | void loop() { 34 | 35 | if (Serial.available()) 36 | { 37 | 38 | 39 | 40 | String data = Serial.readString(); 41 | if (data =="shoot"){ 42 | Mouse.click(); 43 | } 44 | else{ 45 | 46 | index = 0; 47 | num_size = data.indexOf(":", index); 48 | dx = data.substring(index,num_size).toInt(); 49 | data.remove(0,num_size+1); 50 | dy = data.toInt(); 51 | 52 | if (dx > 0){ 53 | while (dx > jump) 54 | { 55 | dx -= jump; 56 | Mouse.move(jump,0); 57 | } 58 | Mouse.move(dx,0); 59 | } 60 | else if (dx < 0){ 61 | while (dx < -jump) 62 | { 63 | dx += jump; 64 | Mouse.move(-jump,0); 65 | } 66 | Mouse.move(dx,0); 67 | } 68 | if (dy > 0){ 69 | while (dy > jump) 70 | { 71 | dy -= jump; 72 | Mouse.move(0,jump); 73 | } 74 | Mouse.move(0,dy); 75 | } 76 | else if (dy < 0){ 77 | while (dy < -jump) 78 | { 79 | dy += jump; 80 | Mouse.move(0,-jump); 81 | } 82 | Mouse.move(0,dy); 83 | } 84 | 85 | } 86 | 87 | 88 | 89 | } 90 | Usb.Task(); 91 | } -------------------------------------------------------------------------------- /scripts/best.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/scripts/best.pt -------------------------------------------------------------------------------- /scripts/best_nano.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/scripts/best_nano.pt -------------------------------------------------------------------------------- /scripts/best_nano_new.engine: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/scripts/best_nano_new.engine -------------------------------------------------------------------------------- /scripts/best_nano_new.onnx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/scripts/best_nano_new.onnx -------------------------------------------------------------------------------- /scripts/best_nano_new.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyturtle/Valorant-AI-cheats/8a2ef26da5a8c694cf0799199f2602e74f63267b/scripts/best_nano_new.pt -------------------------------------------------------------------------------- /scripts/detections.py: -------------------------------------------------------------------------------- 1 | from typing import Counter 2 | from mss import mss 3 | import torch 4 | import cv2 5 | import numpy as np 6 | import time 7 | 8 | 9 | 10 | MONITOR_WIDTH = 1920#game res 11 | MONITOR_HEIGHT = 1080#game res 12 | MONITOR_SCALE = 5#how much the screen shot is downsized by eg. 5 would be one fifth of the monitor dimensions 13 | region = (int(MONITOR_WIDTH/2-MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2-MONITOR_HEIGHT/MONITOR_SCALE/2),int(MONITOR_WIDTH/2+MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2+MONITOR_HEIGHT/MONITOR_SCALE/2)) 14 | 15 | 16 | model = torch.hub.load(r'C:\Users\PyPit\OneDrive\Desktop\cheats\yolov5' , 'custom', path= r'C:\Users\PyPit\OneDrive\Desktop\cheats\best.pt',source='local') 17 | model.conf = 0.40 18 | model.maxdet = 10 19 | model.apm = True 20 | 21 | 22 | 23 | start_time = time.time() 24 | x = 1 25 | counter = 0 26 | 27 | 28 | 29 | 30 | 31 | with mss() as stc: 32 | while True: 33 | screenshot = np.array(stc.grab(region)) 34 | df = model(screenshot, size=736).pandas().xyxy[0] 35 | 36 | counter+= 1 37 | if(time.time() - start_time) > x: 38 | fps = "fps:"+ str(int(counter/(time.time() - start_time))) 39 | print(fps) 40 | counter = 0 41 | start_time = time.time() 42 | 43 | 44 | 45 | for i in range(0,10): 46 | try: 47 | xmin = int(df.iloc[i,0]) 48 | ymin = int(df.iloc[i,1]) 49 | xmax = int(df.iloc[i,2]) 50 | ymax = int(df.iloc[i,3]) 51 | 52 | cv2.rectangle(screenshot,(xmin,ymin),(xmax,ymax), (255,0,0),3) 53 | except: 54 | print("",end="") 55 | 56 | cv2.imshow("frame",screenshot) 57 | if(cv2.waitKey(1) == ord('l')): 58 | cv2.destroyAllWindows() 59 | break -------------------------------------------------------------------------------- /scripts/dxcam_main.py: -------------------------------------------------------------------------------- 1 | import dxcam 2 | import torch 3 | import cv2 4 | import numpy as np 5 | import time 6 | import math 7 | import keyboard 8 | import threading 9 | import serial 10 | import tkinter as tk 11 | import pywintypes 12 | import win32api 13 | import win32con 14 | 15 | 16 | 17 | def cooldown(cooldown_bool,wait): 18 | time.sleep(wait) 19 | cooldown_bool[0] = True 20 | 21 | 22 | def labels(): 23 | #This function contains all the labels used and is threaded so tikinter can run a ui 24 | global fps_label 25 | global trigger_label 26 | global assist_label 27 | global silent_label 28 | global fov_label 29 | fps_label = tk.Label(text = " ", font=('Tahoma','10'), fg='white', bg='black') 30 | fps_label.master.overrideredirect(True) 31 | fps_label.master.geometry("+14+16") 32 | fps_label.master.lift() 33 | fps_label.master.wm_attributes("-topmost", True) 34 | fps_label.master.wm_attributes("-disabled", True) 35 | fps_label.master.wm_attributes("-transparentcolor", "black") 36 | fps_label.pack() 37 | fov_label = tk.Label(text = f"FOV: {activation_range}", font=('Tahoma','10'), fg='white', bg='black') 38 | fov_label.master.overrideredirect(True) 39 | fov_label.master.lift() 40 | fov_label.master.wm_attributes("-topmost", True) 41 | fov_label.master.wm_attributes("-disabled", True) 42 | fov_label.master.wm_attributes("-transparentcolor", "black") 43 | fov_label.pack() 44 | trigger_label = tk.Label(text = "Triggerbot: Unactive", font=('Tahoma','10'), fg='red', bg='black') 45 | trigger_label.master.overrideredirect(True) 46 | trigger_label.master.lift() 47 | trigger_label.master.wm_attributes("-topmost", True) 48 | trigger_label.master.wm_attributes("-disabled", True) 49 | trigger_label.master.wm_attributes("-transparentcolor", "black") 50 | trigger_label.pack() 51 | assist_label = tk.Label(text = "Aim Assist: Unactive", font=('Tahoma','10'), fg='red', bg='black') 52 | assist_label.master.overrideredirect(True) 53 | assist_label.master.lift() 54 | assist_label.master.wm_attributes("-topmost", True) 55 | assist_label.master.wm_attributes("-disabled", True) 56 | assist_label.master.wm_attributes("-transparentcolor", "black") 57 | assist_label.pack() 58 | silent_label = tk.Label(text = "Silent aim: Unactive", font=('Tahoma','10'), fg='red', bg='black') 59 | silent_label.master.overrideredirect(True) 60 | silent_label.master.lift() 61 | silent_label.master.wm_attributes("-topmost", True) 62 | silent_label.master.wm_attributes("-disabled", True) 63 | silent_label.master.wm_attributes("-transparentcolor", "black") 64 | silent_label.pack() 65 | 66 | 67 | hWindow = pywintypes.HANDLE(int(fps_label.master.frame(), 16)) 68 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 69 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 70 | hWindow = pywintypes.HANDLE(int(assist_label.master.frame(), 16)) 71 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 72 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 73 | hWindow = pywintypes.HANDLE(int(trigger_label.master.frame(), 16)) 74 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 75 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 76 | fps_label.mainloop() 77 | 78 | 79 | 80 | SENS = 0.313 81 | AIM_SPEED = 1*(1/SENS) 82 | target_multiply = [0,1.01,1.025,1.05,1.05,1.05,1.05,1.05,1.05,1.05,1.05] 83 | serialcomm = serial.Serial("COM3",115200,timeout = 0) 84 | activation_range = 100 85 | 86 | ui = threading.Thread(target=labels, args=()) 87 | ui.start() 88 | 89 | MONITOR_WIDTH = 1920#game res 90 | MONITOR_HEIGHT = 1080#game res 91 | MONITOR_SCALE = 5#how much the screen shot is downsized by eg. 5 would be one fifth of the monitor dimensions 92 | region = (int(MONITOR_WIDTH/2-MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2-MONITOR_HEIGHT/MONITOR_SCALE/2),int(MONITOR_WIDTH/2+MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2+MONITOR_HEIGHT/MONITOR_SCALE/2)) 93 | x,y,width,height = region 94 | screenshot_center = [int((width-x)/2),int((height-y)/2)] 95 | triggerbot = False 96 | triggerbot_toggle = [True] 97 | aim_assist = False 98 | aim_assist_toggle = [True] 99 | send_next = [True] 100 | silent_aim = False 101 | silent_aim_not_cooldown = [True] 102 | silent_toggle = [True] 103 | no_fov_cooldown = [True] 104 | model = torch.hub.load(r'C:\Users\PyPit\OneDrive\Desktop\cheats\yolov5' , 'custom', path= r'C:\Users\PyPit\OneDrive\Desktop\cheats\half.engine',source='local').cpu() 105 | model.conf = 0.40 106 | model.maxdet = 10 107 | model.apm = True 108 | model.classes = [1] 109 | camera = dxcam.create(output_idx=0, output_color="BGRA") 110 | 111 | start_time = time.time() 112 | x = 1 113 | counter = 0 114 | 115 | 116 | 117 | 118 | 119 | 120 | while True: 121 | closest_part_distance = 100000 122 | closest_part = -1 123 | screenshot = camera.grab(region) 124 | if screenshot is None: continue 125 | df = model(screenshot, size=736).pandas().xyxy[0] 126 | 127 | counter+= 1 128 | if(time.time() - start_time) > x: 129 | fps = "Fps:"+ str(int(counter/(time.time() - start_time))) 130 | fps_label.config(text=fps) 131 | counter = 0 132 | start_time = time.time() 133 | 134 | 135 | 136 | for i in range(0,10): 137 | try: 138 | xmin = int(df.iloc[i,0]) 139 | ymin = int(df.iloc[i,1]) 140 | xmax = int(df.iloc[i,2]) 141 | ymax = int(df.iloc[i,3]) 142 | 143 | centerX = (xmax-xmin)/2+xmin 144 | centerY = (ymax-ymin)/2+ymin 145 | 146 | distance = math.dist([centerX,centerY],screenshot_center) 147 | 148 | if int(distance) < closest_part_distance: 149 | closest_part_distance = distance 150 | closest_part = i 151 | 152 | # cv2.rectangle(screenshot,(xmin,ymin),(xmax,ymax), (255,0,0),3) 153 | except: 154 | print("",end="") 155 | 156 | 157 | if keyboard.is_pressed('`'): 158 | if triggerbot_toggle[0] == True: 159 | triggerbot = not triggerbot 160 | if triggerbot: 161 | trigger_label.config(text = "Triggerbot: Active", fg= 'green') 162 | else: 163 | trigger_label.config(text = "Triggerbot: Unactive", fg= 'red') 164 | print(triggerbot) 165 | triggerbot_toggle[0] = False 166 | thread = threading.Thread(target=cooldown, args=(triggerbot_toggle,0.2,)) 167 | thread.start() 168 | 169 | if keyboard.is_pressed('alt'): 170 | if aim_assist_toggle[0] == True: 171 | aim_assist = not aim_assist 172 | if aim_assist: 173 | assist_label.config(text = "Aim Assist: Active", fg= 'green') 174 | else: 175 | assist_label.config(text = "Aim Assist: Unactive", fg= 'red') 176 | print(aim_assist) 177 | aim_assist_toggle[0] = False 178 | thread = threading.Thread(target=cooldown, args=(aim_assist_toggle,0.2,)) 179 | thread.start() 180 | 181 | if keyboard.is_pressed('p'): 182 | if silent_toggle[0] == True: 183 | silent_aim = not silent_aim 184 | if silent_aim: 185 | silent_label.config(text = "Silent Aim: Active", fg= 'green') 186 | else: 187 | silent_label.config(text = "Silent Aim: Unactive", fg= 'red') 188 | print(silent_aim) 189 | silent_toggle[0] = False 190 | thread = threading.Thread(target=cooldown, args=(silent_toggle,0.2,)) 191 | thread.start() 192 | 193 | elif keyboard.is_pressed('up') and no_fov_cooldown[0] == True: 194 | activation_range += 5 195 | fov_label.config(text=f"FOV: {activation_range}") 196 | no_fov_cooldown[0] = False 197 | thread = threading.Thread(target=cooldown, args=(no_fov_cooldown,0.05,)) 198 | thread.start() 199 | 200 | elif keyboard.is_pressed('down') and no_fov_cooldown[0] == True: 201 | activation_range -= 5 202 | fov_label.config(text=f"FOV: {activation_range}") 203 | no_fov_cooldown[0] = False 204 | thread = threading.Thread(target=cooldown, args=(no_fov_cooldown,0.05,)) 205 | thread.start() 206 | 207 | if closest_part != -1: 208 | xmin = df.iloc[closest_part,0] 209 | ymin = df.iloc[closest_part,1] 210 | xmax = df.iloc[closest_part,2] 211 | ymax = df.iloc[closest_part,3] 212 | 213 | head_center_list = [(xmax-xmin)/2+xmin,(ymax-ymin)/2+ymin] 214 | if triggerbot == True and screenshot_center[0] in range(int(xmin),int(xmax)) and screenshot_center[1] in range(int(ymin),int(ymax)): 215 | serialcomm.write("shoot".encode()) 216 | 217 | if silent_aim == True and silent_aim_not_cooldown[0] == True: 218 | xdif = (head_center_list[0]-screenshot_center[0])*AIM_SPEED*target_multiply[MONITOR_SCALE] 219 | ydif = (head_center_list[1]-screenshot_center[1])*AIM_SPEED*target_multiply[MONITOR_SCALE] 220 | data = f"silent{int(xdif)}:{int(ydif)}" 221 | serialcomm.write(data.encode()) 222 | silent_aim_not_cooldown[0] = False 223 | thread = threading.Thread(target=cooldown, args=(silent_aim_not_cooldown,0.2,)) 224 | thread.start() 225 | 226 | if aim_assist == True and closest_part_distance < activation_range and send_next[0] == True: 227 | xdif = (head_center_list[0] - screenshot_center[0])*AIM_SPEED*target_multiply[MONITOR_SCALE] 228 | ydif = (head_center_list[1] - screenshot_center[1])*AIM_SPEED*target_multiply[MONITOR_SCALE] 229 | data = f"{int(xdif)}:{int(ydif)}" 230 | serialcomm.write(data.encode()) 231 | send_next[0] = False 232 | thread = threading.Thread(target=cooldown, args=(send_next,0.06,)) 233 | thread.start() 234 | 235 | 236 | 237 | 238 | a 239 | # cv2.imshow("frame",screenshot) 240 | # if(cv2.waitKey(1) == ord('l')): 241 | # cv2.destroyAllWindows() 242 | # break -------------------------------------------------------------------------------- /scripts/fov_control.py: -------------------------------------------------------------------------------- 1 | from mss import mss 2 | import torch 3 | import cv2 4 | import numpy as np 5 | import time 6 | import math 7 | import keyboard 8 | import threading 9 | import serial 10 | 11 | def cooldown(cooldown_bool,wait): 12 | time.sleep(wait) 13 | cooldown_bool[0] = True 14 | 15 | 16 | SENS = 0.313 17 | AIM_SPEED = 1*(1/SENS) 18 | target_multiply = [0,1.01,1.025,1.05,1.05,1.05,1.05,1.05,1.05,1.05,1.05] 19 | serialcomm = serial.Serial("COM3",115200,timeout = 0) 20 | activation_range = 100 21 | 22 | 23 | MONITOR_WIDTH = 1920#game res 24 | MONITOR_HEIGHT = 1080#game res 25 | MONITOR_SCALE = 5#how much the screen shot is downsized by eg. 5 would be one fifth of the monitor dimensions 26 | region = (int(MONITOR_WIDTH/2-MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2-MONITOR_HEIGHT/MONITOR_SCALE/2),int(MONITOR_WIDTH/2+MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2+MONITOR_HEIGHT/MONITOR_SCALE/2)) 27 | x,y,width,height = region 28 | screenshot_center = [int((width-x)/2),int((height-y)/2)] 29 | triggerbot = False 30 | triggerbot_toggle = [True] 31 | aim_assist = False 32 | aim_assist_toggle = [True] 33 | send_next = [True] 34 | silent_aim = False 35 | silent_aim_not_cooldown = [True] 36 | silent_toggle = [True] 37 | no_fov_cooldown = [True] 38 | model = torch.hub.load(r'C:\Users\PyPit\OneDrive\Desktop\cheats\yolov5' , 'custom', path= r'C:\Users\PyPit\OneDrive\Desktop\cheats\half.engine',source='local') 39 | model.conf = 0.40 40 | model.maxdet = 10 41 | model.apm = True 42 | model.classes = [1] 43 | 44 | 45 | start_time = time.time() 46 | x = 1 47 | counter = 0 48 | 49 | 50 | 51 | 52 | 53 | with mss() as stc: 54 | while True: 55 | closest_part_distance = 100000 56 | closest_part = -1 57 | screenshot = np.array(stc.grab(region)) 58 | df = model(screenshot, size=736).pandas().xyxy[0] 59 | 60 | counter+= 1 61 | if(time.time() - start_time) > x: 62 | fps = "fps:"+ str(int(counter/(time.time() - start_time))) 63 | print(fps) 64 | counter = 0 65 | start_time = time.time() 66 | 67 | 68 | 69 | for i in range(0,10): 70 | try: 71 | xmin = int(df.iloc[i,0]) 72 | ymin = int(df.iloc[i,1]) 73 | xmax = int(df.iloc[i,2]) 74 | ymax = int(df.iloc[i,3]) 75 | 76 | centerX = (xmax-xmin)/2+xmin 77 | centerY = (ymax-ymin)/2+ymin 78 | 79 | distance = math.dist([centerX,centerY],screenshot_center) 80 | 81 | if int(distance) < closest_part_distance: 82 | closest_part_distance = distance 83 | closest_part = i 84 | 85 | # cv2.rectangle(screenshot,(xmin,ymin),(xmax,ymax), (255,0,0),3) 86 | except: 87 | print("",end="") 88 | 89 | 90 | if keyboard.is_pressed('`'): 91 | if triggerbot_toggle[0] == True: 92 | triggerbot = not triggerbot 93 | print(triggerbot) 94 | triggerbot_toggle[0] = False 95 | thread = threading.Thread(target=cooldown, args=(triggerbot_toggle,0.2,)) 96 | thread.start() 97 | 98 | if keyboard.is_pressed('alt'): 99 | if aim_assist_toggle[0] == True: 100 | aim_assist = not aim_assist 101 | print(aim_assist) 102 | aim_assist_toggle[0] = False 103 | thread = threading.Thread(target=cooldown, args=(aim_assist_toggle,0.2,)) 104 | thread.start() 105 | 106 | if keyboard.is_pressed('p'): 107 | if silent_toggle[0] == True: 108 | silent_aim = not silent_aim 109 | print(silent_aim) 110 | silent_toggle[0] = False 111 | thread = threading.Thread(target=cooldown, args=(silent_toggle,0.2,)) 112 | thread.start() 113 | 114 | elif keyboard.is_pressed('up') and no_fov_cooldown[0] == True: 115 | activation_range += 5 116 | no_fov_cooldown[0] = False 117 | thread = threading.Thread(target=cooldown, args=(no_fov_cooldown,0.05,)) 118 | thread.start() 119 | 120 | elif keyboard.is_pressed('down') and no_fov_cooldown[0] == True: 121 | activation_range -= 5 122 | no_fov_cooldown[0] = False 123 | thread = threading.Thread(target=cooldown, args=(no_fov_cooldown,0.05,)) 124 | thread.start() 125 | 126 | if closest_part != -1: 127 | xmin = df.iloc[closest_part,0] 128 | ymin = df.iloc[closest_part,1] 129 | xmax = df.iloc[closest_part,2] 130 | ymax = df.iloc[closest_part,3] 131 | 132 | head_center_list = [(xmax-xmin)/2+xmin,(ymax-ymin)/2+ymin] 133 | if triggerbot == True and screenshot_center[0] in range(int(xmin),int(xmax)) and screenshot_center[1] in range(int(ymin),int(ymax)): 134 | serialcomm.write("shoot".encode()) 135 | 136 | if silent_aim == True and silent_aim_not_cooldown[0] == True: 137 | xdif = (head_center_list[0]-screenshot_center[0])*AIM_SPEED*target_multiply[MONITOR_SCALE] 138 | ydif = (head_center_list[1]-screenshot_center[1])*AIM_SPEED*target_multiply[MONITOR_SCALE] 139 | data = f"silent{int(xdif)}:{int(ydif)}" 140 | serialcomm.write(data.encode()) 141 | silent_aim_not_cooldown[0] = False 142 | thread = threading.Thread(target=cooldown, args=(silent_aim_not_cooldown,0.2,)) 143 | thread.start() 144 | 145 | if aim_assist == True and closest_part_distance < activation_range and send_next[0] == True: 146 | xdif = (head_center_list[0] - screenshot_center[0])*AIM_SPEED*target_multiply[MONITOR_SCALE] 147 | ydif = (head_center_list[1] - screenshot_center[1])*AIM_SPEED*target_multiply[MONITOR_SCALE] 148 | data = f"{int(xdif)}:{int(ydif)}" 149 | serialcomm.write(data.encode()) 150 | send_next[0] = False 151 | thread = threading.Thread(target=cooldown, args=(send_next,0.05,)) 152 | thread.start() 153 | 154 | 155 | 156 | 157 | 158 | # cv2.imshow("frame",screenshot) 159 | # if(cv2.waitKey(1) == ord('l')): 160 | # cv2.destroyAllWindows() 161 | # break -------------------------------------------------------------------------------- /scripts/main.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import cv2 3 | import numpy as np 4 | from mss import mss 5 | import win32api 6 | import win32con 7 | import math 8 | import keyboard 9 | import time 10 | import threading 11 | import tkinter as tk 12 | import pywintypes 13 | import serial 14 | 15 | 16 | 17 | AIM_SPEED = 1#aim speed/sens for aimbot 18 | activation_range = 20#activation range for aim assist 19 | serialcomm = serial.Serial('COM3',115200, timeout = 0.1)#com port for arduino 20 | MONITOR_WIDTH = 1920#base res 21 | MONITOR_HEIGHT = 1080#base res 22 | MONITOR_SCALE = 5#how much the screen shot is downsized by eg. 5 would be one fifth of the monitor dimensions 23 | 24 | 25 | 26 | 27 | def cooldown(cooldown_bool,wait): 28 | #cooldown threed for toggels or cooldowns 29 | time.sleep(wait) 30 | cooldown_bool[0] = True 31 | 32 | def shoot(): 33 | #shoots 34 | data = "shoot" 35 | serialcomm.write(data.encode()) 36 | print(data) 37 | 38 | def labels(): 39 | #This function contains all the labels used and is threaded so tikinter can run a ui 40 | global fps_label 41 | global trigger_label 42 | global assist_label 43 | global silent_label 44 | global fov_label 45 | fps_label = tk.Label(text = " ", font=('Tahoma','10'), fg='white', bg='black') 46 | fps_label.master.overrideredirect(True) 47 | fps_label.master.geometry("+14+16") 48 | fps_label.master.lift() 49 | fps_label.master.wm_attributes("-topmost", True) 50 | fps_label.master.wm_attributes("-disabled", True) 51 | fps_label.master.wm_attributes("-transparentcolor", "black") 52 | fps_label.pack() 53 | fov_label = tk.Label(text = f"FOV: {activation_range}", font=('Tahoma','10'), fg='white', bg='black') 54 | fov_label.master.overrideredirect(True) 55 | fov_label.master.lift() 56 | fov_label.master.wm_attributes("-topmost", True) 57 | fov_label.master.wm_attributes("-disabled", True) 58 | fov_label.master.wm_attributes("-transparentcolor", "black") 59 | fov_label.pack() 60 | trigger_label = tk.Label(text = "Triggerbot: Unactive", font=('Tahoma','10'), fg='red', bg='black') 61 | trigger_label.master.overrideredirect(True) 62 | trigger_label.master.lift() 63 | trigger_label.master.wm_attributes("-topmost", True) 64 | trigger_label.master.wm_attributes("-disabled", True) 65 | trigger_label.master.wm_attributes("-transparentcolor", "black") 66 | trigger_label.pack() 67 | assist_label = tk.Label(text = "Aim Assist: Unactive", font=('Tahoma','10'), fg='red', bg='black') 68 | assist_label.master.overrideredirect(True) 69 | assist_label.master.lift() 70 | assist_label.master.wm_attributes("-topmost", True) 71 | assist_label.master.wm_attributes("-disabled", True) 72 | assist_label.master.wm_attributes("-transparentcolor", "black") 73 | assist_label.pack() 74 | silent_label = tk.Label(text = "Silent aim: Unactive", font=('Tahoma','10'), fg='red', bg='black') 75 | silent_label.master.overrideredirect(True) 76 | silent_label.master.lift() 77 | silent_label.master.wm_attributes("-topmost", True) 78 | silent_label.master.wm_attributes("-disabled", True) 79 | silent_label.master.wm_attributes("-transparentcolor", "black") 80 | silent_label.pack() 81 | 82 | 83 | hWindow = pywintypes.HANDLE(int(fps_label.master.frame(), 16)) 84 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 85 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 86 | hWindow = pywintypes.HANDLE(int(assist_label.master.frame(), 16)) 87 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 88 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 89 | hWindow = pywintypes.HANDLE(int(trigger_label.master.frame(), 16)) 90 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 91 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 92 | fps_label.mainloop() 93 | 94 | 95 | 96 | 97 | 98 | if __name__ == "__main__": 99 | #creating fps overlay for the program 100 | ui = threading.Thread(target=labels, args=()) 101 | ui.start() 102 | 103 | 104 | 105 | """ 106 | Main part of the program 107 | """ 108 | target_multiply= [0,1.01,1.025,1.05,1.05,1.05] #depending on the screen ratio how much you move the mouse 109 | no_fov_cooldown = [True] 110 | triggerbot = False 111 | triggerbot_toggle = [True] 112 | silent_aim = False 113 | silent_aim_not_cooldown = [True] 114 | silent_toggle = [True] 115 | aim_assist = False 116 | send_next = [True] 117 | aim_assist_toggle = [True] 118 | model = torch.hub.load(r'C:\Users\PyPit\OneDrive\Documents\CODE\Valorant_arduino\yolov5', 'custom', path=r"C:\Users\PyPit\OneDrive\Documents\CODE\Valorant_arduino\scripts\best_nano_new.engine", source='local').eval().cuda()#loading model onto gpu 119 | model.conf = 0.40# model confidance threshold 120 | model.iou = 0.65# overlap threshhold threshold 121 | model.classes = [1] # which classe the model detects 122 | model.maxdet = 10# max detections 123 | model.amp = True# amps model 124 | region = (int(MONITOR_WIDTH/2-MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2-MONITOR_HEIGHT/MONITOR_SCALE/2),int(MONITOR_WIDTH/2+MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2+MONITOR_HEIGHT/MONITOR_SCALE/2)) 125 | x,y,width,height = region 126 | screenshot_center = [int((width-x)/2),int((height-y)/2)] 127 | 128 | 129 | print(region) 130 | start_time = time.time() 131 | x = 1 132 | counter = 0 133 | # (int(monitor_width/2-monitor_width/monitor_scale/2),int(monitor_height/2-monitor_height/monitor_height/2),int(monitor_width/2+monitor_width/monitor_scale/2),int(monitor_height/2+monitor_height/monitor_height/2)) 134 | # (730,360,1190,710) 135 | # {"top": 350, "left": 640, "width": 640, "height":400} 136 | 137 | with mss() as stc: 138 | while True: 139 | 140 | screenshot = np.array(stc.grab(region)) 141 | counter+=1 142 | if (time.time() - start_time) > x : 143 | fps = "FPS: "+str(int(counter / (time.time() - start_time))) 144 | fps_label.config(text=fps) 145 | counter = 0 146 | start_time = time.time() 147 | df= model(screenshot, size=640).pandas().xyxy[0] 148 | closest_part_distance = 1000000 149 | closest_part =-1 150 | 151 | 152 | for i in range(0,10): 153 | try: 154 | 155 | xmin = int(df.iloc[i,0]) 156 | ymin = int(df.iloc[i,1]*1.01) 157 | xmax = int(df.iloc[i,2]) 158 | ymax = int(df.iloc[i,3]*1.01) 159 | 160 | 161 | # cv2.putText(screenshot,str(int(float(df.iloc[i,4])*100)), (xmin, ymin), cv2.FONT_HERSHEY_SIMPLEX, 1,(255,255,255),1,1) 162 | 163 | # cv2.rectangle(screenshot, (xmin, ymin), (xmax, ymax), (255,0,0), 1) 164 | 165 | 166 | 167 | 168 | 169 | 170 | centerX = (xmax-xmin)/2+xmin 171 | centerY = (ymax-ymin)/2+ymin 172 | 173 | distance = math.dist([centerX, centerY],[screenshot_center[0],screenshot_center[1]]) 174 | 175 | 176 | 177 | if int(distance) x: 62 | fps = "fps:"+ str(int(counter/(time.time() - start_time))) 63 | print(fps) 64 | counter = 0 65 | start_time = time.time() 66 | 67 | 68 | 69 | for i in range(0,10): 70 | try: 71 | xmin = int(df.iloc[i,0]) 72 | ymin = int(df.iloc[i,1]) 73 | xmax = int(df.iloc[i,2]) 74 | ymax = int(df.iloc[i,3]) 75 | 76 | centerX = (xmax-xmin)/2+xmin 77 | centerY = (ymax-ymin)/2+ymin 78 | 79 | distance = math.dist([centerX,centerY],screenshot_center) 80 | 81 | if int(distance) < closest_part_distance: 82 | closest_part_distance = distance 83 | closest_part = i 84 | 85 | # cv2.rectangle(screenshot,(xmin,ymin),(xmax,ymax), (255,0,0),3) 86 | except: 87 | print("",end="") 88 | 89 | 90 | if keyboard.is_pressed('`'): 91 | if triggerbot_toggle[0] == True: 92 | triggerbot = not triggerbot 93 | print(triggerbot) 94 | triggerbot_toggle[0] = False 95 | thread = threading.Thread(target=cooldown, args=(triggerbot_toggle,0.2,)) 96 | thread.start() 97 | 98 | elif keyboard.is_pressed('alt'): 99 | if aim_assist_toggle[0] == True: 100 | aim_assist = not aim_assist 101 | print(aim_assist) 102 | aim_assist_toggle[0] = False 103 | thread = threading.Thread(target=cooldown, args=(aim_assist_toggle,0.2,)) 104 | thread.start() 105 | 106 | elif keyboard.is_pressed('p'): 107 | if silent_aim_toggle[0] == True: 108 | silent_aim = not silent_aim 109 | print(silent_aim) 110 | silent_aim_toggle[0] = False 111 | thread = threading.Thread(target=cooldown, args=(silent_aim_toggle,0.2,)) 112 | thread.start() 113 | 114 | elif keyboard.is_pressed('up'): 115 | if no_fov_cooldown[0] == True: 116 | activation_range += 5 117 | print(activation_range) 118 | no_fov_cooldown[0] = [False] 119 | thread = threading.Thread(target=cooldown, args=(no_fov_cooldown,0.05,)) 120 | thread.start() 121 | 122 | elif keyboard.is_pressed('down'): 123 | if no_fov_cooldown[0] == True: 124 | activation_range -= 5 125 | print(activation_range) 126 | no_fov_cooldown[0] = [False] 127 | thread = threading.Thread(target=cooldown, args=(no_fov_cooldown,0.05,)) 128 | thread.start() 129 | 130 | if closest_part != -1: 131 | xmin = df.iloc[closest_part,0] 132 | ymin = df.iloc[closest_part,1] 133 | xmax = df.iloc[closest_part,2] 134 | ymax = df.iloc[closest_part,3] 135 | 136 | head_center_list = [(xmax-xmin)/2+xmin,(ymax-ymin)/2+ymin] 137 | if triggerbot == True and screenshot_center[0] in range(int(xmin),int(xmax)) and screenshot_center[1] in range(int(ymin),int(ymax)): 138 | serialcomm.write("shoot".encode()) 139 | 140 | if silent_aim == True and silent_aim_not_cooldown[0] == True: 141 | xdif = (head_center_list[0] - screenshot_center[0])*AIM_SPEED*target_multiply[MONITOR_SCALE] 142 | ydif = (head_center_list[1] - screenshot_center[1])*AIM_SPEED*target_multiply[MONITOR_SCALE] 143 | data = f"silent{int(xdif)}:{int(ydif)}" 144 | serialcomm.write(data.encode()) 145 | silent_aim_not_cooldown[0] = False 146 | thread = threading.Thread(target=cooldown, args=(silent_aim_not_cooldown,0.8,)) 147 | thread.start() 148 | 149 | 150 | if aim_assist == True and closest_part_distance < activation_range and send_next[0] == True: 151 | xdif = (head_center_list[0] - screenshot_center[0])*AIM_SPEED*target_multiply[MONITOR_SCALE] 152 | ydif = (head_center_list[1] - screenshot_center[1])*AIM_SPEED*target_multiply[MONITOR_SCALE] 153 | data = f"{int(xdif)}:{int(ydif)}" 154 | serialcomm.write(data.encode()) 155 | send_next[0] = False 156 | thread = threading.Thread(target=cooldown, args=(send_next,0.05,)) 157 | thread.start() 158 | 159 | 160 | 161 | 162 | 163 | # cv2.imshow("frame",screenshot) 164 | # if(cv2.waitKey(1) == ord('l')): 165 | # cv2.destroyAllWindows() 166 | # break -------------------------------------------------------------------------------- /scripts/slipt.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | 5 | os.chdir(r'train folder path here') 6 | 7 | 8 | 9 | # the code will loop over all the files in your train folder and will move 1/5 of them to your val folder, you can change this number moved by changing the number by the mod operator. 10 | for i,f in enumerate(os.listdir()): 11 | 12 | print(f) 13 | 14 | if i%5 == 0: 15 | shutil.move(f, r"put val folder path here"+"\\"+f) -------------------------------------------------------------------------------- /scripts/test.py: -------------------------------------------------------------------------------- 1 | import dxcam 2 | import torch 3 | import cv2 4 | import numpy as np 5 | from mss import mss 6 | import win32api 7 | import win32con 8 | import math 9 | import keyboard 10 | import time 11 | import threading 12 | import tkinter as tk 13 | import pywintypes 14 | import serial 15 | 16 | SENS = 0.313 17 | AIM_SPEED = 1*(1/SENS)#aim speed/sens for aimbot 18 | activation_range = 20#activation range for aim assist 19 | serialcomm = serial.Serial('COM3',115200, timeout = 0)#com port for arduino 20 | MONITOR_WIDTH = 1920#base res 21 | MONITOR_HEIGHT = 1080#base res 22 | MONITOR_SCALE = 1.3#how much the screen shot is downsized by eg. 5 would be one fifth of the monitor dimensions 23 | 24 | 25 | 26 | 27 | def cooldown(cooldown_bool,wait): 28 | #cooldown threed for toggels or cooldowns 29 | time.sleep(wait) 30 | cooldown_bool[0] = True 31 | 32 | def shoot(): 33 | #Sends Arduino message to fire 34 | data = "shoot" 35 | serialcomm.write(data.encode()) 36 | # print(data) 37 | 38 | def labels(): 39 | #This function contains all the labels used and is threaded so tikinter can run a ui 40 | global fps_label 41 | global trigger_label 42 | global assist_label 43 | global silent_label 44 | global fov_label 45 | fps_label = tk.Label(text = " ", font=('Tahoma','10'), fg='white', bg='black') 46 | fps_label.master.overrideredirect(True) 47 | fps_label.master.geometry("+14+16") 48 | fps_label.master.lift() 49 | fps_label.master.wm_attributes("-topmost", True) 50 | fps_label.master.wm_attributes("-disabled", True) 51 | fps_label.master.wm_attributes("-transparentcolor", "black") 52 | fps_label.pack() 53 | fov_label = tk.Label(text = f"FOV: {activation_range}", font=('Tahoma','10'), fg='white', bg='black') 54 | fov_label.master.overrideredirect(True) 55 | fov_label.master.lift() 56 | fov_label.master.wm_attributes("-topmost", True) 57 | fov_label.master.wm_attributes("-disabled", True) 58 | fov_label.master.wm_attributes("-transparentcolor", "black") 59 | fov_label.pack() 60 | trigger_label = tk.Label(text = "Triggerbot: Unactive", font=('Tahoma','10'), fg='red', bg='black') 61 | trigger_label.master.overrideredirect(True) 62 | trigger_label.master.lift() 63 | trigger_label.master.wm_attributes("-topmost", True) 64 | trigger_label.master.wm_attributes("-disabled", True) 65 | trigger_label.master.wm_attributes("-transparentcolor", "black") 66 | trigger_label.pack() 67 | assist_label = tk.Label(text = "Aim Assist: Unactive", font=('Tahoma','10'), fg='red', bg='black') 68 | assist_label.master.overrideredirect(True) 69 | assist_label.master.lift() 70 | assist_label.master.wm_attributes("-topmost", True) 71 | assist_label.master.wm_attributes("-disabled", True) 72 | assist_label.master.wm_attributes("-transparentcolor", "black") 73 | assist_label.pack() 74 | silent_label = tk.Label(text = "Silent aim: Unactive", font=('Tahoma','10'), fg='red', bg='black') 75 | silent_label.master.overrideredirect(True) 76 | silent_label.master.lift() 77 | silent_label.master.wm_attributes("-topmost", True) 78 | silent_label.master.wm_attributes("-disabled", True) 79 | silent_label.master.wm_attributes("-transparentcolor", "black") 80 | silent_label.pack() 81 | 82 | 83 | hWindow = pywintypes.HANDLE(int(fps_label.master.frame(), 16)) 84 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 85 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 86 | hWindow = pywintypes.HANDLE(int(assist_label.master.frame(), 16)) 87 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 88 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 89 | hWindow = pywintypes.HANDLE(int(trigger_label.master.frame(), 16)) 90 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 91 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 92 | fps_label.mainloop() 93 | 94 | 95 | 96 | 97 | 98 | if __name__ == "__main__": 99 | #creating fps overlay for the program 100 | ui = threading.Thread(target=labels, args=()) 101 | ui.start() 102 | 103 | 104 | 105 | """ 106 | Main part of the program 107 | """ 108 | target_multiply= [0,1.01,1.025,1.05,1.05,1.05,1.05,1.05,1.05,1.05] #depending on the screen ratio how much you move the mouse 109 | no_fov_cooldown = [True] 110 | triggerbot = False 111 | triggerbot_toggle = [True] 112 | silent_aim = False 113 | silent_aim_not_cooldown = [True] 114 | silent_aim_toggle = [True] 115 | aim_assist = False 116 | send_next = [True] 117 | aim_assist_toggle = [True] 118 | model = torch.hub.load(r'C:\Users\PyPit\OneDrive\Documents\CODE\Valorant_arduino\yolov5', 'custom', path=r"C:\Users\PyPit\OneDrive\Documents\CODE\Valorant_arduino\scripts\best_nano.engine", _verbose=False, source= "local").eval().cuda()#loading model onto gpu 119 | model.conf = 0.25# model confidance threshold 120 | # model.iou = 0.45# overlap threshhold threshold 121 | model.classes = [0,1] # which classe the model detects 122 | model.maxdet = 30# max detections 123 | model.amp = True# amps model 124 | camera = dxcam.create(device_idx=0, output_idx=0, output_color= "BGRA") #making the dxcam 125 | region = (int(MONITOR_WIDTH/2-MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2-MONITOR_HEIGHT/MONITOR_SCALE/2),int(MONITOR_WIDTH/2+MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2+MONITOR_HEIGHT/MONITOR_SCALE/2)) 126 | x,y,width,height = region 127 | screenshot_center = [int((width-x)/2),int((height-y)/2)] 128 | 129 | 130 | print(region) 131 | start_time = time.time() 132 | x = 1 133 | counter = 0 134 | # (int(monitor_width/2-monitor_width/monitor_scale/2),int(monitor_height/2-monitor_height/monitor_height/2),int(monitor_width/2+monitor_width/monitor_scale/2),int(monitor_height/2+monitor_height/monitor_height/2)) 135 | # (768, 432, 1152, 648) 136 | # {"top": 350, "left": 640, "width": 640, "height":400} 137 | 138 | while True: 139 | 140 | screenshot = camera.grab(region) 141 | if screenshot is None: continue 142 | counter+=1 143 | if (time.time() - start_time) > x : 144 | fps = "FPS: "+str(int(counter / (time.time() - start_time))) 145 | fps_label.config(text=fps) 146 | counter = 0 147 | start_time = time.time() 148 | df= model(screenshot, size=640).pandas().xyxy[0] 149 | closest_part_distance = 1000000 150 | closest_part =-1 151 | 152 | 153 | for i in range(0,30): 154 | try: 155 | 156 | xmin = int(df.iloc[i,0]) 157 | ymin = int(df.iloc[i,1]) 158 | xmax = int(df.iloc[i,2]) 159 | ymax = int(df.iloc[i,3]) 160 | 161 | 162 | # cv2.putText(screenshot,str(int(float(df.iloc[i,4])*100)), (xmin, ymin), cv2.FONT_HERSHEY_SIMPLEX, 1,(255,255,255),1,1) 163 | if int(df.iloc[i,5]) == 0: 164 | cv2.rectangle(screenshot, (xmin, ymin), (xmax, ymax),(0,0,255) , 2) 165 | head_center_list = [int((xmax-xmin)/2+xmin),int((ymax-ymin)/2+ymin)] 166 | cv2.line(screenshot,(screenshot_center[0],screenshot_center[1]*2),tuple(head_center_list),(255,255,255),2) 167 | else: 168 | cv2.rectangle(screenshot, (xmin, ymin), (xmax, ymax), (0,255,255), 2) 169 | 170 | 171 | 172 | 173 | 174 | 175 | centerX = (xmax-xmin)/2+xmin 176 | centerY = (ymax-ymin)/2+ymin 177 | 178 | distance = math.dist([centerX, centerY],[screenshot_center[0],screenshot_center[1]]) 179 | 180 | 181 | 182 | if int(distance) x: 48 | fps = "fps:"+ str(int(counter/(time.time() - start_time))) 49 | print(fps) 50 | counter = 0 51 | start_time = time.time() 52 | 53 | 54 | 55 | for i in range(0,10): 56 | try: 57 | xmin = int(df.iloc[i,0]) 58 | ymin = int(df.iloc[i,1]) 59 | xmax = int(df.iloc[i,2]) 60 | ymax = int(df.iloc[i,3]) 61 | 62 | centerX = (xmax-xmin)/2+xmin 63 | centerY = (ymax-ymin)/2+ymin 64 | 65 | distance = math.dist([centerX,centerY],screenshot_center) 66 | 67 | if int(distance) < closest_part_distance: 68 | closest_part_distance = distance 69 | closest_part = i 70 | 71 | # cv2.rectangle(screenshot,(xmin,ymin),(xmax,ymax), (255,0,0),3) 72 | except: 73 | print("",end="") 74 | 75 | 76 | if keyboard.is_pressed('`'): 77 | if triggerbot_toggle[0] == True: 78 | triggerbot = not triggerbot 79 | print(triggerbot) 80 | triggerbot_toggle[0] = False 81 | thread = threading.Thread(target=cooldown, args=(triggerbot_toggle,0.2,)) 82 | thread.start() 83 | 84 | if closest_part != -1: 85 | xmin = df.iloc[closest_part,0] 86 | ymin = df.iloc[closest_part,1] 87 | xmax = df.iloc[closest_part,2] 88 | ymax = df.iloc[closest_part,3] 89 | if triggerbot == True and screenshot_center[0] in range(int(xmin),int(xmax)) and screenshot_center[1] in range(int(ymin),int(ymax)): 90 | keyboard.press_and_release("k") 91 | 92 | 93 | 94 | 95 | 96 | # cv2.imshow("frame",screenshot) 97 | # if(cv2.waitKey(1) == ord('l')): 98 | # cv2.destroyAllWindows() 99 | # break -------------------------------------------------------------------------------- /scripts/ui.py: -------------------------------------------------------------------------------- 1 | from mss import mss 2 | import torch 3 | import cv2 4 | import numpy as np 5 | import time 6 | import math 7 | import keyboard 8 | import threading 9 | import serial 10 | import tkinter as tk 11 | import pywintypes 12 | import win32api 13 | import win32con 14 | 15 | 16 | 17 | def cooldown(cooldown_bool,wait): 18 | time.sleep(wait) 19 | cooldown_bool[0] = True 20 | 21 | 22 | def labels(): 23 | #This function contains all the labels used and is threaded so tikinter can run a ui 24 | global fps_label 25 | global trigger_label 26 | global assist_label 27 | global silent_label 28 | global fov_label 29 | fps_label = tk.Label(text = " ", font=('Tahoma','10'), fg='white', bg='black') 30 | fps_label.master.overrideredirect(True) 31 | fps_label.master.geometry("+14+16") 32 | fps_label.master.lift() 33 | fps_label.master.wm_attributes("-topmost", True) 34 | fps_label.master.wm_attributes("-disabled", True) 35 | fps_label.master.wm_attributes("-transparentcolor", "black") 36 | fps_label.pack() 37 | fov_label = tk.Label(text = f"FOV: {activation_range}", font=('Tahoma','10'), fg='white', bg='black') 38 | fov_label.master.overrideredirect(True) 39 | fov_label.master.lift() 40 | fov_label.master.wm_attributes("-topmost", True) 41 | fov_label.master.wm_attributes("-disabled", True) 42 | fov_label.master.wm_attributes("-transparentcolor", "black") 43 | fov_label.pack() 44 | trigger_label = tk.Label(text = "Triggerbot: Unactive", font=('Tahoma','10'), fg='red', bg='black') 45 | trigger_label.master.overrideredirect(True) 46 | trigger_label.master.lift() 47 | trigger_label.master.wm_attributes("-topmost", True) 48 | trigger_label.master.wm_attributes("-disabled", True) 49 | trigger_label.master.wm_attributes("-transparentcolor", "black") 50 | trigger_label.pack() 51 | assist_label = tk.Label(text = "Aim Assist: Unactive", font=('Tahoma','10'), fg='red', bg='black') 52 | assist_label.master.overrideredirect(True) 53 | assist_label.master.lift() 54 | assist_label.master.wm_attributes("-topmost", True) 55 | assist_label.master.wm_attributes("-disabled", True) 56 | assist_label.master.wm_attributes("-transparentcolor", "black") 57 | assist_label.pack() 58 | silent_label = tk.Label(text = "Silent aim: Unactive", font=('Tahoma','10'), fg='red', bg='black') 59 | silent_label.master.overrideredirect(True) 60 | silent_label.master.lift() 61 | silent_label.master.wm_attributes("-topmost", True) 62 | silent_label.master.wm_attributes("-disabled", True) 63 | silent_label.master.wm_attributes("-transparentcolor", "black") 64 | silent_label.pack() 65 | 66 | 67 | hWindow = pywintypes.HANDLE(int(fps_label.master.frame(), 16)) 68 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 69 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 70 | hWindow = pywintypes.HANDLE(int(assist_label.master.frame(), 16)) 71 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 72 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 73 | hWindow = pywintypes.HANDLE(int(trigger_label.master.frame(), 16)) 74 | exStyle = win32con.WS_EX_COMPOSITED | win32con.WS_EX_LAYERED | win32con.WS_EX_NOACTIVATE | win32con.WS_EX_TOPMOST | win32con.WS_EX_TRANSPARENT 75 | win32api.SetWindowLong(hWindow, win32con.GWL_EXSTYLE, exStyle) 76 | fps_label.mainloop() 77 | 78 | 79 | 80 | SENS = 0.313 81 | AIM_SPEED = 1*(1/SENS) 82 | target_multiply = [0,1.01,1.025,1.05,1.05,1.05,1.05,1.05,1.05,1.05,1.05] 83 | serialcomm = serial.Serial("COM3",115200,timeout = 0) 84 | activation_range = 100 85 | 86 | ui = threading.Thread(target=labels, args=()) 87 | ui.start() 88 | 89 | MONITOR_WIDTH = 1920#game res 90 | MONITOR_HEIGHT = 1080#game res 91 | MONITOR_SCALE = 5#how much the screen shot is downsized by eg. 5 would be one fifth of the monitor dimensions 92 | region = (int(MONITOR_WIDTH/2-MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2-MONITOR_HEIGHT/MONITOR_SCALE/2),int(MONITOR_WIDTH/2+MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2+MONITOR_HEIGHT/MONITOR_SCALE/2)) 93 | x,y,width,height = region 94 | screenshot_center = [int((width-x)/2),int((height-y)/2)] 95 | triggerbot = False 96 | triggerbot_toggle = [True] 97 | aim_assist = False 98 | aim_assist_toggle = [True] 99 | send_next = [True] 100 | silent_aim = False 101 | silent_aim_not_cooldown = [True] 102 | silent_toggle = [True] 103 | no_fov_cooldown = [True] 104 | model = torch.hub.load(r'C:\Users\PyPit\OneDrive\Desktop\cheats\yolov5' , 'custom', path= r'C:\Users\PyPit\OneDrive\Desktop\cheats\half.engine',source='local') 105 | model.conf = 0.40 106 | model.maxdet = 10 107 | model.apm = True 108 | model.classes = [1] 109 | 110 | 111 | start_time = time.time() 112 | x = 1 113 | counter = 0 114 | 115 | 116 | 117 | 118 | 119 | with mss() as stc: 120 | while True: 121 | closest_part_distance = 100000 122 | closest_part = -1 123 | screenshot = np.array(stc.grab(region)) 124 | df = model(screenshot, size=736).pandas().xyxy[0] 125 | 126 | counter+= 1 127 | if(time.time() - start_time) > x: 128 | fps = "Fps:"+ str(int(counter/(time.time() - start_time))) 129 | fps_label.config(text=fps) 130 | counter = 0 131 | start_time = time.time() 132 | 133 | 134 | 135 | for i in range(0,10): 136 | try: 137 | xmin = int(df.iloc[i,0]) 138 | ymin = int(df.iloc[i,1]) 139 | xmax = int(df.iloc[i,2]) 140 | ymax = int(df.iloc[i,3]) 141 | 142 | centerX = (xmax-xmin)/2+xmin 143 | centerY = (ymax-ymin)/2+ymin 144 | 145 | distance = math.dist([centerX,centerY],screenshot_center) 146 | 147 | if int(distance) < closest_part_distance: 148 | closest_part_distance = distance 149 | closest_part = i 150 | 151 | # cv2.rectangle(screenshot,(xmin,ymin),(xmax,ymax), (255,0,0),3) 152 | except: 153 | print("",end="") 154 | 155 | 156 | if keyboard.is_pressed('`'): 157 | if triggerbot_toggle[0] == True: 158 | triggerbot = not triggerbot 159 | if triggerbot: 160 | trigger_label.config(text = "Triggerbot: Active", fg= 'green') 161 | else: 162 | trigger_label.config(text = "Triggerbot: Unactive", fg= 'red') 163 | print(triggerbot) 164 | triggerbot_toggle[0] = False 165 | thread = threading.Thread(target=cooldown, args=(triggerbot_toggle,0.2,)) 166 | thread.start() 167 | 168 | if keyboard.is_pressed('alt'): 169 | if aim_assist_toggle[0] == True: 170 | aim_assist = not aim_assist 171 | if aim_assist: 172 | assist_label.config(text = "Aim Assist: Active", fg= 'green') 173 | else: 174 | assist_label.config(text = "Aim Assist: Unactive", fg= 'red') 175 | print(aim_assist) 176 | aim_assist_toggle[0] = False 177 | thread = threading.Thread(target=cooldown, args=(aim_assist_toggle,0.2,)) 178 | thread.start() 179 | 180 | if keyboard.is_pressed('p'): 181 | if silent_toggle[0] == True: 182 | silent_aim = not silent_aim 183 | if silent_aim: 184 | silent_label.config(text = "Silent Aim: Active", fg= 'green') 185 | else: 186 | silent_label.config(text = "Silent Aim: Unactive", fg= 'red') 187 | print(silent_aim) 188 | silent_toggle[0] = False 189 | thread = threading.Thread(target=cooldown, args=(silent_toggle,0.2,)) 190 | thread.start() 191 | 192 | elif keyboard.is_pressed('up') and no_fov_cooldown[0] == True: 193 | activation_range += 5 194 | fov_label.config(text=f"FOV: {activation_range}") 195 | no_fov_cooldown[0] = False 196 | thread = threading.Thread(target=cooldown, args=(no_fov_cooldown,0.05,)) 197 | thread.start() 198 | 199 | elif keyboard.is_pressed('down') and no_fov_cooldown[0] == True: 200 | activation_range -= 5 201 | fov_label.config(text=f"FOV: {activation_range}") 202 | no_fov_cooldown[0] = False 203 | thread = threading.Thread(target=cooldown, args=(no_fov_cooldown,0.05,)) 204 | thread.start() 205 | 206 | if closest_part != -1: 207 | xmin = df.iloc[closest_part,0] 208 | ymin = df.iloc[closest_part,1] 209 | xmax = df.iloc[closest_part,2] 210 | ymax = df.iloc[closest_part,3] 211 | 212 | head_center_list = [(xmax-xmin)/2+xmin,(ymax-ymin)/2+ymin] 213 | if triggerbot == True and screenshot_center[0] in range(int(xmin),int(xmax)) and screenshot_center[1] in range(int(ymin),int(ymax)): 214 | serialcomm.write("shoot".encode()) 215 | 216 | if silent_aim == True and silent_aim_not_cooldown[0] == True: 217 | xdif = (head_center_list[0]-screenshot_center[0])*AIM_SPEED*target_multiply[MONITOR_SCALE] 218 | ydif = (head_center_list[1]-screenshot_center[1])*AIM_SPEED*target_multiply[MONITOR_SCALE] 219 | data = f"silent{int(xdif)}:{int(ydif)}" 220 | serialcomm.write(data.encode()) 221 | silent_aim_not_cooldown[0] = False 222 | thread = threading.Thread(target=cooldown, args=(silent_aim_not_cooldown,0.2,)) 223 | thread.start() 224 | 225 | if aim_assist == True and closest_part_distance < activation_range and send_next[0] == True: 226 | xdif = (head_center_list[0] - screenshot_center[0])*AIM_SPEED*target_multiply[MONITOR_SCALE] 227 | ydif = (head_center_list[1] - screenshot_center[1])*AIM_SPEED*target_multiply[MONITOR_SCALE] 228 | data = f"{int(xdif)}:{int(ydif)}" 229 | serialcomm.write(data.encode()) 230 | send_next[0] = False 231 | thread = threading.Thread(target=cooldown, args=(send_next,0.05,)) 232 | thread.start() 233 | 234 | 235 | 236 | 237 | 238 | # cv2.imshow("frame",screenshot) 239 | # if(cv2.waitKey(1) == ord('l')): 240 | # cv2.destroyAllWindows() 241 | # break -------------------------------------------------------------------------------- /scripts/video-frames.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import os 3 | 4 | if __name__ == "__main__": 5 | video_folder = r"C:\Users\PyPit\OneDrive\Desktop\cheats\videos" 6 | export_folder = r"C:\Users\PyPit\OneDrive\Desktop\cheats\exported frames" 7 | 8 | 9 | os.chdir(export_folder)# changes directory to exports 10 | MONITOR_WIDTH = 1920#game res 11 | MONITOR_HEIGHT = 1080#game res 12 | 13 | 14 | MONITOR_SCALE = 5#how much the screen shot is downsized by eg. 5 would be one fifth of the monitor dimensions 15 | region = (int(MONITOR_WIDTH/2-MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2-MONITOR_HEIGHT/MONITOR_SCALE/2),int(MONITOR_WIDTH/2+MONITOR_WIDTH/MONITOR_SCALE/2),int(MONITOR_HEIGHT/2+MONITOR_HEIGHT/MONITOR_SCALE/2)) 16 | x,y,width,height = region 17 | 18 | for video_name in os.listdir(video_folder): 19 | vidcap = cv2.VideoCapture(video_folder+"\\"+video_name)#assigning current video 20 | success, image = vidcap.read()# reads in a frame 21 | count = 0 22 | frame = 0 23 | while success: 24 | frame += 1 25 | success, image = vidcap.read() 26 | if (frame % 20 == 0):# only activates every 20 frames 27 | image = image[y:height,x:width]#crops image to scale cordinates 28 | cv2.imwrite(video_name+"frame%d.jpg" % count, image) # save frame as JPEG file 29 | success, image = vidcap.read() 30 | print('Read a new frame: ', success) 31 | count += 1#amount of frames 32 | --------------------------------------------------------------------------------