├── Arduino ├── FakeKeyboard │ └── FakeKeyboard.ino ├── FakeMouse │ └── FakeMouse.ino └── libraries │ └── readme.txt ├── LICENSE ├── Navigation ├── Han.jpg ├── MapCaptureTest.py ├── MapComputeTest.py ├── Maps │ ├── Bind1.jpg │ ├── Bind2.jpg │ ├── Haven1.jpg │ ├── Haven2.jpg │ ├── Split.jpg │ ├── Split1.jpg │ ├── Split2.jpg │ └── Split_Heaven Unblocked.jpg ├── NavigationTest.py ├── Thumbs.db └── ValNav.py ├── README.md ├── ReadMeFiles ├── Valorant Aimbot Demo.mp4 ├── Valorant.gif └── map demo.png ├── mouseHost.py ├── requirements.txt ├── valkset.json └── valkyrie.py /Arduino/FakeKeyboard/FakeKeyboard.ino: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | void setup() { 4 | // put your setup code here, to run once: 5 | Serial.begin(2000000); //115200//250000//2000000 6 | Keyboard.begin(); 7 | //AbsoluteMouse.begin(); 8 | } 9 | 10 | void loop() { 11 | if (Serial.available() > 0) 12 | { 13 | char in = Serial.read(); 14 | switch(in) { 15 | case 'A': 16 | Keyboard.press(KEY_UP); 17 | break; 18 | case 'B': 19 | Keyboard.press(KEY_DOWN); 20 | break; 21 | case 'C': 22 | Keyboard.press(KEY_LEFT); 23 | break; 24 | case 'D': 25 | Keyboard.press(KEY_RIGHT); 26 | break; 27 | case 'E': 28 | Keyboard.press('p'); 29 | break; 30 | case 'F': 31 | Keyboard.press('o'); 32 | break; 33 | case 'G': 34 | Keyboard.release(KEY_UP); 35 | break; 36 | case 'H': 37 | Keyboard.release(KEY_DOWN); 38 | break; 39 | case 'I': 40 | Keyboard.release(KEY_LEFT); 41 | break; 42 | case 'J': 43 | Keyboard.release(KEY_RIGHT); 44 | break; 45 | case 'K': 46 | Keyboard.release('p'); 47 | break; 48 | case 'L': 49 | Keyboard.release('o'); 50 | break; 51 | default: 52 | break; 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /Arduino/FakeMouse/FakeMouse.ino: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | void setup() { 4 | // put your setup code here, to run once: 5 | Serial.begin(2000000); //115200//250000//2000000 6 | Mouse.begin(); 7 | //Keyboard.begin(); 8 | //AbsoluteMouse.begin(); 9 | } 10 | 11 | void loop() { 12 | if (Serial.available() > 0) 13 | { 14 | char in = Serial.read(); 15 | char bray[3]; // was 3 16 | String sign; 17 | String dataString; 18 | 19 | if (in == 'X' || in == 'Y' || in == 'C' || in == 'U' || in == 'A' 20 | || in == 'B' || in == 'D' || in == 'E' || in == 'S'){ // || in == 'K' 21 | int count = 0; 22 | sign = in; 23 | while (true) 24 | { 25 | if (Serial.available() > 0) 26 | { 27 | bray[count] = Serial.read(); 28 | if (bray[count] != 10){ 29 | count ++; 30 | if (count >= 3){ // was 3 31 | String stri(bray); 32 | dataString = stri; 33 | break; 34 | } 35 | } 36 | } 37 | } 38 | //byte data = byte(in); 39 | //String strData = (String)data; 40 | 41 | //String sign = dataString.substring(0,1); 42 | //int x = dataString.substring(1,4).toInt(); //was 1 to 5 43 | int x = dataString.toInt(); //was 1 to 5 44 | int iterations = 0; 45 | //.println(x); 46 | if (x > 124){ 47 | iterations = int(x / 124); 48 | x = x % 124; 49 | } 50 | 51 | if (sign == "C"){ 52 | MoveMouse((-1*x),0); 53 | MoveMouseIteration(-120, 0, iterations); 54 | } 55 | else if (sign == "U"){ 56 | MoveMouse(0,(-1*x)); 57 | MoveMouseIteration(0, -120, iterations); 58 | } 59 | else if (sign == "X"){ 60 | MoveMouse(x,0); 61 | MoveMouseIteration(120, 0, iterations); 62 | } 63 | else if (sign == "Y"){ 64 | MoveMouse(0,x); 65 | MoveMouseIteration(0, 120, iterations); 66 | } 67 | else if (sign == "A"){ 68 | Mouse.press(); 69 | } 70 | else if (sign == "B"){ 71 | Mouse.release(); 72 | } 73 | else if (sign == "D"){ 74 | Mouse.press(MOUSE_RIGHT); 75 | } 76 | else if (sign == "E"){ 77 | Mouse.release(MOUSE_RIGHT); 78 | } 79 | /*else if (sign == "K"){ 80 | switch(x) { 81 | case 0: 82 | Keyboard.press(KEY_UP); 83 | break; 84 | case 1: 85 | Keyboard.press(KEY_DOWN); 86 | break; 87 | case 2: 88 | Keyboard.press(KEY_LEFT); 89 | break; 90 | case 3: 91 | Keyboard.press(KEY_RIGHT); 92 | break; 93 | case 4: 94 | Keyboard.release(KEY_UP); 95 | break; 96 | case 5: 97 | Keyboard.release(KEY_DOWN); 98 | break; 99 | case 6: 100 | Keyboard.release(KEY_LEFT); 101 | break; 102 | case 7: 103 | Keyboard.release(KEY_RIGHT); 104 | break; 105 | default: 106 | break; 107 | } 108 | }*/ 109 | else if (sign == "S"){ 110 | delay(x); 111 | } 112 | } 113 | } 114 | } 115 | 116 | void MoveMouseIteration(int x, int y, int iteration){ 117 | for (int i = 0; i < iteration; i++){ 118 | MoveMouse(x, y); 119 | } 120 | } 121 | 122 | void MoveMouse(int x, int y){ 123 | //if (x < 10 || y < 10){ //2 124 | // scal = 0.75f; 125 | //} 126 | //scal = 1.5f; 127 | //else{ 128 | // scal = 0.98f; 129 | //} 130 | //else{ 131 | // scal = 1.2f; 132 | //} 133 | //int scal = 0.515f; 134 | //Mouse.move((x*scal),(y*scal)); 135 | //Serial.print(x); 136 | Mouse.move(x,y); 137 | //delay(10); 138 | } 139 | -------------------------------------------------------------------------------- /Arduino/libraries/readme.txt: -------------------------------------------------------------------------------- 1 | https://github.com/NicoHood/HID -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 ChrisFirerabbit 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Navigation/Han.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/Navigation/Han.jpg -------------------------------------------------------------------------------- /Navigation/MapCaptureTest.py: -------------------------------------------------------------------------------- 1 | import time 2 | import mss 3 | import numpy as np 4 | import cv2 5 | import math 6 | import keyboard 7 | import serial 8 | 9 | # Map Size 1.1 10 | # Map Zoom 0.9 11 | 12 | 13 | ser = serial.Serial('COM6', baudrate=2000000, timeout=1) 14 | 15 | 16 | def scan_circle(img, radius, x_offset, y_offset): 17 | for theta in range(360): 18 | x = int(radius * math.cos(theta)) + x_offset 19 | y = int(radius * math.sin(theta)) + y_offset 20 | # mask_marked = cv2.circle(mask_marked, (x, y), 1, (1, 255, 1), 1) 21 | #print(theta) 22 | if img[y, x] != 0: 23 | # print(radius) 24 | return x, y 25 | return None 26 | 27 | 28 | def send_rotation(degree): 29 | move_per_degree = 10626/360 30 | move = move_per_degree * degree 31 | mod = move 32 | val = 0 33 | if move > 999: 34 | val = int(move / 999) 35 | mod = int(move % 999) 36 | 37 | send_cord = "" 38 | 39 | for n in range(val): 40 | send_cord += "X999" 41 | send_cord += parse_cords("X", mod) 42 | 43 | encode_and_send(send_cord) 44 | # ser.write(str("X999X999X999X999X999X999X999X999X999X999X636").encode()) 45 | # ser.write(str("X999X999X999X999X999X999X999X999X999X999X635").encode()) 35 to 36 46 | 47 | 48 | def encode_and_send(str_en): 49 | ser.write(str(str_en).encode()) 50 | 51 | 52 | def parse_cords(sign, value): # Fix Cords in valmain 53 | value = str(value) 54 | cords = str(sign + (value.zfill(3))) 55 | return cords 56 | 57 | 58 | def map_range(value, leftMin, leftMax, rightMin, rightMax): 59 | # Figure out how 'wide' each range is 60 | leftSpan = leftMax - leftMin 61 | rightSpan = rightMax - rightMin 62 | 63 | # Convert the left range into a 0-1 range (float) 64 | valueScaled = float(value - leftMin) / float(leftSpan) 65 | 66 | # Convert the 0-1 range into a value in the right range. 67 | return rightMin + (valueScaled * rightSpan) 68 | 69 | 70 | def angle(point_a, point_b): 71 | 72 | #if point_b[0] - point_a[0] == 0: 73 | # b_slope = 0.00000001 74 | #else: 75 | # b_slope = (point_b[0] - point_a[0]) 76 | #slope = (point_b[1] - point_a[1]) / b_slope 77 | 78 | #return math.degrees(math.atan(slope)) 79 | deg = math.degrees(math.atan2((point_b[1] - point_a[1]), (point_b[0] - point_a[0]))) 80 | if deg < 0: 81 | deg = map_range(deg, -0, -180, 0, 180) 82 | # deg = map_range(deg, -0, -180, 180, 360) 83 | else: 84 | deg = map_range(deg, 0, 180, 360, 180) 85 | # deg = map_range(deg, 0, 180, 0, 180) 86 | if deg == 360: 87 | deg = 0 88 | 89 | return deg 90 | 91 | 92 | with mss.mss() as sct: 93 | while True: 94 | monitor = {"left": 50, "top": 25, "width": 375, "height": 400} 95 | mask = np.array(sct.grab(monitor)) 96 | mask_marked = mask.copy() 97 | # mask = cv2.inRange(cv2.cvtColor(np.array(mask), cv2.COLOR_BGR2HSV), 98 | # np.array([29, 80, 220]), 99 | # np.array([30, 100, 255])) 100 | mask = cv2.inRange(cv2.cvtColor(np.array(mask), cv2.COLOR_BGR2HSV), 101 | np.array([29, 80, 220]), 102 | np.array([30, 100, 255])) 103 | 104 | mask = cv2.dilate(mask, np.ones((2, 2), np.uint8), iterations=1) 105 | 106 | #circles = cv2.HoughCircles(mask, cv2.HOUGH_GRADIENT, 1, 20, 107 | # param1=10, param2=15, minRadius=0, maxRadius=0) 108 | circles = cv2.HoughCircles(mask, cv2.HOUGH_GRADIENT, 1, 20, 109 | param1=1, param2=15, minRadius=0, maxRadius=0) 110 | 111 | if circles is not None: 112 | circles = np.uint16(np.around(circles)) 113 | for i in circles[0, :]: 114 | # draw the outer circle 115 | cv2.circle(mask_marked, (i[0], i[1]), i[2], (0, 255, 0), 2) 116 | # draw the center of the circle 117 | cv2.circle(mask_marked, (i[0], i[1]), 2, (0, 0, 255), 3) 118 | 119 | r = 18 120 | # r = 20 121 | while True: 122 | largest_offset = scan_circle(mask, r, circles[0][0][0], circles[0][0][1]) 123 | if largest_offset is None: 124 | r -= 1 125 | else: 126 | break 127 | #largest_offset = scan_circle(mask, 15, circles[0][0][0], circles[0][0][1]) 128 | #print(largest_offset) 129 | mask_marked = cv2.circle(mask_marked, largest_offset, 2, (255, 1, 1), 2) 130 | 131 | 132 | 133 | 134 | # mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, np.ones((2, 2))) 135 | cv2.imshow("mask7", mask) 136 | cv2.imshow("mask8", mask_marked) 137 | #cv2.waitKey(2000) 138 | if circles is not None and largest_offset is not None: 139 | rot = angle((circles[0][0][0], circles[0][0][1]), largest_offset) 140 | send_rotation(rot) 141 | # send_rotation(largest_offset[2]) 142 | # print(largest_offset[2]) 143 | 144 | cv2.waitKey(500) 145 | # send_rotation(360) 146 | 147 | #if keyboard.is_pressed('k') and largest_offset is not None: 148 | # align 149 | 150 | if keyboard.is_pressed('='): 151 | cv2.destroyAllWindows() 152 | break 153 | -------------------------------------------------------------------------------- /Navigation/MapComputeTest.py: -------------------------------------------------------------------------------- 1 | import time 2 | import mss 3 | import numpy as np 4 | import cv2 5 | import math 6 | import keyboard 7 | 8 | # Map Size 1.1 9 | # Map Zoom 0.9 10 | 11 | 12 | def raycast(img, point_a, point_b): 13 | # assume black is void/ boundary 14 | if point_b[0] - point_a[0] == 0: 15 | b_slope = 0.00000001 16 | else: 17 | b_slope = (point_b[0] - point_a[0]) 18 | 19 | slope = (point_b[1] - point_a[1])/b_slope 20 | y_intercept = point_b[1] - (slope * point_b[0]) 21 | # print(point_b[0] - point_a[0]) 22 | ##print(point_a[0] - point_b[0]) 23 | # print(point_a[0]) 24 | ##print(point_b[0]) 25 | traversal = -1 26 | 27 | if point_a[0] > point_b[0]: 28 | traversal = 1 29 | 30 | for x in range(point_b[0], point_a[0], traversal): 31 | y = int(int(slope * x) + y_intercept) 32 | # img = cv2.circle(img, (x, y), 2, (1, 255, 1), 2) 33 | # print(str(x) + "," + str(int(slope * x))) 34 | # print(str(img[y, x])) 35 | if img[y, x][0] == 0 and img[y, x][1] == 0 and img[y, x][2] == 0: 36 | return False 37 | 38 | if slope != 0: 39 | if point_a[1] > point_b[1]: 40 | traversal = 1 41 | else: 42 | traversal = -1 43 | 44 | for y in range(point_b[1], point_a[1], traversal): 45 | x = int((y - y_intercept) / slope) 46 | # img = cv2.circle(img, (x, y), 2, (1, 255, 1), 2) 47 | # print(str(x) + "," + str(int(slope * x))) 48 | # print(str(img[y, x])) 49 | if img[y, x][0] == 0 and img[y, x][1] == 0 and img[y, x][2] == 0: 50 | return False 51 | 52 | return True 53 | 54 | 55 | def draw_raycast(img, point_a, point_b): 56 | # assume black is void/ boundary 57 | if point_b[0] - point_a[0] == 0: 58 | b_slope = 0.00000001 59 | else: 60 | b_slope = (point_b[0] - point_a[0]) 61 | 62 | slope = (point_b[1] - point_a[1])/b_slope 63 | y_intercept = point_b[1] - (slope * point_b[0]) 64 | traversal = -1 65 | 66 | if point_a[0] > point_b[0]: 67 | traversal = 1 68 | 69 | for x in range(point_b[0], point_a[0], traversal): 70 | y = int(int(slope * x) + y_intercept) 71 | img = cv2.circle(img, (x, y), 1, (1, 255, 1), 1) 72 | if img[y, x][0] == 0 and img[y, x][1] == 0 and img[y, x][2] == 0: 73 | return False 74 | 75 | if slope != 0: 76 | if point_a[1] > point_b[1]: 77 | traversal = 1 78 | else: 79 | traversal = -1 80 | 81 | for y in range(point_b[1], point_a[1], traversal): 82 | x = int((y - y_intercept) / slope) 83 | img = cv2.circle(img, (x, y), 1, (1, 255, 1), 1) 84 | if img[y, x][0] == 0 and img[y, x][1] == 0 and img[y, x][2] == 0: 85 | return False 86 | 87 | return img 88 | 89 | 90 | def angle(point_a, point_b): 91 | 92 | if point_b[0] - point_a[0] == 0: 93 | b_slope = 0.00000001 94 | else: 95 | b_slope = (point_b[0] - point_a[0]) 96 | slope = (point_b[1] - point_a[1]) / b_slope 97 | 98 | return math.degrees(math.atan(slope)) 99 | 100 | 101 | def distance(point_a, point_b): 102 | x = (point_b[0] - point_a[0]) 103 | y = (point_b[1] - point_a[1]) 104 | return math.sqrt((x * x) + (y * y)) 105 | 106 | 107 | with mss.mss() as sct: 108 | monitor = {"left": 50, "top": 25, "width": 375, "height": 400} 109 | mask = cv2.imread("Maps\Split.jpg") 110 | 111 | filtered_mask = cv2.inRange(cv2.cvtColor(np.array(mask), cv2.COLOR_BGR2HSV), 112 | np.array([0, 50, 100]), 113 | np.array([10, 255, 255])) # 200 114 | 115 | filtered_mask_site = cv2.inRange(cv2.cvtColor(np.array(mask), cv2.COLOR_BGR2HSV), 116 | np.array([110, 50, 100]), 117 | np.array([130, 255, 255])) # 200 118 | 119 | mask_markup = mask.copy() 120 | 121 | 122 | # indices = np.nonzero(filtered_mask) 123 | 124 | rows, cols = filtered_mask.shape 125 | point_candidates = [] 126 | 127 | for i in range(0, rows): 128 | for j in range(0, cols): 129 | if filtered_mask[i, j] != 0: 130 | point_candidates.append((j, i)) 131 | 132 | for i in range(0, rows): 133 | for j in range(0, cols): 134 | if filtered_mask_site[i, j] != 0: 135 | point_candidates.append((j, i)) 136 | 137 | print(str(point_candidates)) 138 | 139 | #print(str(raycast(mask, point_candidates[0], point_candidates[4]))) 140 | #print(str(raycast(mask, point_candidates[0], point_candidates[2]))) 141 | 142 | waypoints = [] 143 | waypoints_possibilities = [] 144 | 145 | z = True 146 | if z: 147 | for x in range(len(point_candidates)): 148 | waypoints.append(point_candidates[x]) 149 | temp_possibility_list = [] 150 | for y in range(len(point_candidates)): 151 | if raycast(mask, point_candidates[x], point_candidates[y]): 152 | temp_possibility_list.append(point_candidates[y]) 153 | # mask_markup = cv2.line(mask_markup, point_candidates[x], point_candidates[y], (255, 5, 5), 1) 154 | mask_markup = draw_raycast(mask_markup, point_candidates[x], point_candidates[y]) 155 | print("RAY: " + str(x) + ", " + str(y)) 156 | # print(str(waypoints_possibilities)) 157 | waypoints_possibilities.append(temp_possibility_list) 158 | 159 | #mask_markup = cv2.line(mask_markup, point_candidates[9], point_candidates[2], (255, 5, 5), 1) 160 | #mask_markup = cv2.line(mask_markup, point_candidates[0], point_candidates[4], (255, 5, 5), 1) 161 | 162 | #print(str(indices)) 163 | 164 | 165 | #while True: 166 | 167 | # break 168 | 169 | for x in range(len(waypoints)): 170 | for i in range(len(waypoints_possibilities[x])): 171 | print(str(angle(waypoints[x], waypoints_possibilities[x][i]))) 172 | 173 | while True: 174 | cv2.imshow("mask", mask_markup) 175 | cv2.imshow("mask2", filtered_mask) 176 | cv2.imshow("mask3", filtered_mask_site) 177 | 178 | 179 | cv2.waitKey(100) -------------------------------------------------------------------------------- /Navigation/Maps/Bind1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/Navigation/Maps/Bind1.jpg -------------------------------------------------------------------------------- /Navigation/Maps/Bind2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/Navigation/Maps/Bind2.jpg -------------------------------------------------------------------------------- /Navigation/Maps/Haven1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/Navigation/Maps/Haven1.jpg -------------------------------------------------------------------------------- /Navigation/Maps/Haven2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/Navigation/Maps/Haven2.jpg -------------------------------------------------------------------------------- /Navigation/Maps/Split.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/Navigation/Maps/Split.jpg -------------------------------------------------------------------------------- /Navigation/Maps/Split1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/Navigation/Maps/Split1.jpg -------------------------------------------------------------------------------- /Navigation/Maps/Split2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/Navigation/Maps/Split2.jpg -------------------------------------------------------------------------------- /Navigation/Maps/Split_Heaven Unblocked.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/Navigation/Maps/Split_Heaven Unblocked.jpg -------------------------------------------------------------------------------- /Navigation/NavigationTest.py: -------------------------------------------------------------------------------- 1 | import time 2 | import mss 3 | import numpy as np 4 | import cv2 5 | import math 6 | import keyboard 7 | 8 | # Map Size 1.1 9 | # Map Zoom 0.9 10 | 11 | with mss.mss() as sct: 12 | while True: 13 | monitor = {"left": 50, "top": 25, "width": 375, "height": 400} 14 | mask = np.array(sct.grab(monitor)) 15 | cv2.imshow("mask", mask) 16 | cv2.waitKey(100) 17 | if keyboard.is_pressed('x'): 18 | cv2.imwrite("Han.jpg", mask) 19 | if keyboard.is_pressed('='): 20 | cv2.destroyAllWindows() 21 | break 22 | -------------------------------------------------------------------------------- /Navigation/Thumbs.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/Navigation/Thumbs.db -------------------------------------------------------------------------------- /Navigation/ValNav.py: -------------------------------------------------------------------------------- 1 | import keyboard 2 | import time 3 | import mss 4 | import numpy as np 5 | import cv2 6 | import math 7 | import serial 8 | 9 | 10 | # Map Size 1.1 11 | # Map Zoom 0.9 12 | 13 | ser = serial.Serial('COM6', baudrate=2000000, timeout=1) 14 | 15 | 16 | def raycast(img, point_a, point_b): 17 | # assume black is void/ boundary 18 | if point_b[0] - point_a[0] == 0: 19 | b_slope = 0.00000001 20 | else: 21 | b_slope = (point_b[0] - point_a[0]) 22 | 23 | slope = (point_b[1] - point_a[1])/b_slope 24 | y_intercept = point_b[1] - (slope * point_b[0]) 25 | # print(point_b[0] - point_a[0]) 26 | ##print(point_a[0] - point_b[0]) 27 | # print(point_a[0]) 28 | ##print(point_b[0]) 29 | traversal = -1 30 | 31 | if point_a[0] > point_b[0]: 32 | traversal = 1 33 | 34 | for x in range(point_b[0], point_a[0], traversal): 35 | y = int(int(slope * x) + y_intercept) 36 | # img = cv2.circle(img, (x, y), 2, (1, 255, 1), 2) 37 | # print(str(x) + "," + str(int(slope * x))) 38 | # print(str(img[y, x])) 39 | if img[y, x][0] == 0 and img[y, x][1] == 0 and img[y, x][2] == 0: 40 | return False 41 | 42 | if slope != 0: 43 | if point_a[1] > point_b[1]: 44 | traversal = 1 45 | else: 46 | traversal = -1 47 | 48 | for y in range(point_b[1], point_a[1], traversal): 49 | x = int((y - y_intercept) / slope) 50 | # img = cv2.circle(img, (x, y), 2, (1, 255, 1), 2) 51 | # print(str(x) + "," + str(int(slope * x))) 52 | # print(str(img[y, x])) 53 | if img[y, x][0] == 0 and img[y, x][1] == 0 and img[y, x][2] == 0: 54 | return False 55 | 56 | return True 57 | 58 | 59 | def draw_raycast(img, point_a, point_b): 60 | # assume black is void/ boundary 61 | if point_b[0] - point_a[0] == 0: 62 | b_slope = 0.00000001 63 | else: 64 | b_slope = (point_b[0] - point_a[0]) 65 | 66 | slope = (point_b[1] - point_a[1])/b_slope 67 | y_intercept = point_b[1] - (slope * point_b[0]) 68 | traversal = -1 69 | 70 | if point_a[0] > point_b[0]: 71 | traversal = 1 72 | 73 | for x in range(point_b[0], point_a[0], traversal): 74 | y = int(int(slope * x) + y_intercept) 75 | img = cv2.circle(img, (x, y), 1, (1, 255, 1), 1) 76 | if img[y, x][0] == 0 and img[y, x][1] == 0 and img[y, x][2] == 0: 77 | return False 78 | 79 | if slope != 0: 80 | if point_a[1] > point_b[1]: 81 | traversal = 1 82 | else: 83 | traversal = -1 84 | 85 | for y in range(point_b[1], point_a[1], traversal): 86 | x = int((y - y_intercept) / slope) 87 | img = cv2.circle(img, (x, y), 1, (1, 255, 1), 1) 88 | if img[y, x][0] == 0 and img[y, x][1] == 0 and img[y, x][2] == 0: 89 | return False 90 | 91 | return img 92 | 93 | 94 | def scan_circle(img, radius, x_offset, y_offset): 95 | for theta in range(360): 96 | x = int(radius * math.cos(theta)) + x_offset 97 | y = int(radius * math.sin(theta)) + y_offset 98 | # mask_marked = cv2.circle(mask_marked, (x, y), 1, (1, 255, 1), 1) 99 | #print(theta) 100 | if img[y, x] != 0: 101 | # print(radius) 102 | return x, y 103 | return None 104 | 105 | 106 | def send_rotation(degree): 107 | move_per_degree = 10626/360 108 | 109 | sign = "X" 110 | if degree < 0: 111 | sign = "C" 112 | 113 | degree = math.fabs(degree) 114 | 115 | move = move_per_degree * degree 116 | mod = move 117 | val = 0 118 | if move > 999: 119 | val = int(move / 999) 120 | mod = int(move % 999) 121 | 122 | send_cord = "" 123 | 124 | for n in range(val): 125 | send_cord += sign + "999" 126 | send_cord += parse_cords(sign, mod) 127 | 128 | encode_and_send(send_cord) 129 | # ser.write(str("X999X999X999X999X999X999X999X999X999X999X636").encode()) 130 | # ser.write(str("X999X999X999X999X999X999X999X999X999X999X635").encode()) 35 to 36 131 | 132 | 133 | def encode_and_send(str_en): 134 | ser.write(str(str_en).encode()) 135 | 136 | 137 | def parse_cords(sign, value): # Fix Cords in valmain 138 | value = str(value) 139 | cords = str(sign + (value.zfill(3))) 140 | return cords 141 | 142 | 143 | def map_range(value, leftMin, leftMax, rightMin, rightMax): 144 | # Figure out how 'wide' each range is 145 | leftSpan = leftMax - leftMin 146 | rightSpan = rightMax - rightMin 147 | 148 | # Convert the left range into a 0-1 range (float) 149 | valueScaled = float(value - leftMin) / float(leftSpan) 150 | 151 | # Convert the 0-1 range into a value in the right range. 152 | return rightMin + (valueScaled * rightSpan) 153 | 154 | 155 | def angle(point_a, point_b): 156 | 157 | #if point_b[0] - point_a[0] == 0: 158 | # b_slope = 0.00000001 159 | #else: 160 | # b_slope = (point_b[0] - point_a[0]) 161 | #slope = (point_b[1] - point_a[1]) / b_slope 162 | 163 | #return math.degrees(math.atan(slope)) 164 | deg = math.degrees(math.atan2((point_b[1] - point_a[1]), (point_b[0] - point_a[0]))) 165 | if deg < 0: 166 | deg = map_range(deg, -0, -180, 0, 180) 167 | # deg = map_range(deg, -0, -180, 180, 360) 168 | else: 169 | deg = map_range(deg, 0, 180, 360, 180) 170 | # deg = map_range(deg, 0, 180, 0, 180) 171 | if deg == 360: 172 | deg = 0 173 | 174 | return deg 175 | 176 | 177 | def distance(point_a, point_b): 178 | x = (point_b[0] - point_a[0]) 179 | y = (point_b[1] - point_a[1]) 180 | return math.sqrt((x * x) + (y * y)) 181 | 182 | 183 | def find_closest_point(point_a, point_list): 184 | point = None 185 | best_dis = 999999999 186 | for n in range(len(point_list)): 187 | dis = distance(point_a, point_list[n]) 188 | if dis < best_dis: 189 | best_dis = dis 190 | point = point_list[n] 191 | return point 192 | 193 | 194 | with mss.mss() as sct: 195 | monitor = {"left": 50, "top": 25, "width": 375, "height": 400} 196 | mask = cv2.imread("Maps\Split.jpg") 197 | 198 | filtered_mask = cv2.inRange(cv2.cvtColor(np.array(mask), cv2.COLOR_BGR2HSV), 199 | np.array([0, 50, 100]), 200 | np.array([10, 255, 255])) # 200 201 | 202 | filtered_mask_site = cv2.inRange(cv2.cvtColor(np.array(mask), cv2.COLOR_BGR2HSV), 203 | np.array([110, 50, 100]), 204 | np.array([130, 255, 255])) # 200 205 | 206 | mask_markup = mask.copy() 207 | 208 | 209 | # indices = np.nonzero(filtered_mask) 210 | 211 | rows, cols = filtered_mask.shape 212 | point_candidates = [] 213 | 214 | for i in range(0, rows): 215 | for j in range(0, cols): 216 | if filtered_mask[i, j] != 0: 217 | point_candidates.append((j, i)) 218 | 219 | for i in range(0, rows): 220 | for j in range(0, cols): 221 | if filtered_mask_site[i, j] != 0: 222 | point_candidates.append((j, i)) 223 | 224 | print(str(point_candidates)) 225 | 226 | #print(str(raycast(mask, point_candidates[0], point_candidates[4]))) 227 | #print(str(raycast(mask, point_candidates[0], point_candidates[2]))) 228 | 229 | waypoints = [] 230 | waypoints_possibilities = [] 231 | 232 | z = True 233 | if z: 234 | for x in range(len(point_candidates)): 235 | waypoints.append(point_candidates[x]) 236 | temp_possibility_list = [] 237 | for y in range(len(point_candidates)): 238 | if raycast(mask, point_candidates[x], point_candidates[y]): 239 | temp_possibility_list.append(point_candidates[y]) 240 | # mask_markup = cv2.line(mask_markup, point_candidates[x], point_candidates[y], (255, 5, 5), 1) 241 | mask_markup = draw_raycast(mask_markup, point_candidates[x], point_candidates[y]) 242 | print("RAY: " + str(x) + ", " + str(y)) 243 | # print(str(waypoints_possibilities)) 244 | waypoints_possibilities.append(temp_possibility_list) 245 | 246 | for x in range(len(waypoints)): 247 | for i in range(len(waypoints_possibilities[x])): 248 | print(str(angle(waypoints[x], waypoints_possibilities[x][i]))) 249 | 250 | while keyboard.is_pressed('k') is False: 251 | cv2.imshow("mask", mask_markup) 252 | cv2.imshow("mask2", filtered_mask) 253 | cv2.imshow("mask3", filtered_mask_site) 254 | cv2.waitKey(100) 255 | cv2.destroyAllWindows() 256 | 257 | while True: 258 | mask = np.array(sct.grab(monitor)) 259 | mask_marked = mask.copy() 260 | mask = cv2.inRange(cv2.cvtColor(np.array(mask), cv2.COLOR_BGR2HSV), 261 | np.array([29, 80, 220]), 262 | np.array([30, 100, 255])) 263 | 264 | mask = cv2.dilate(mask, np.ones((2, 2), np.uint8), iterations=1) 265 | circles = cv2.HoughCircles(mask, cv2.HOUGH_GRADIENT, 1, 20, 266 | param1=1, param2=15, minRadius=0, maxRadius=0) 267 | 268 | if circles is not None: 269 | circles = np.uint16(np.around(circles)) 270 | for i in circles[0, :]: 271 | cv2.circle(mask_marked, (i[0], i[1]), i[2], (0, 255, 0), 2) 272 | cv2.circle(mask_marked, (i[0], i[1]), 2, (0, 0, 255), 3) 273 | 274 | r = 18 275 | player_point = (circles[0][0][0], circles[0][0][1]) 276 | while True: 277 | largest_offset = scan_circle(mask, r, player_point[0], player_point[1]) 278 | if largest_offset is None: 279 | r -= 1 280 | else: 281 | break 282 | mask_marked = cv2.circle(mask_marked, largest_offset, 2, (255, 1, 1), 2) 283 | 284 | #rot = angle((circles[0][0][0], circles[0][0][1]), largest_offset) 285 | #send_rotation(rot) 286 | 287 | closest_point = find_closest_point(player_point, waypoints) 288 | mask_marked = cv2.line(mask_marked, player_point, closest_point, (5, 255, 5), 5) 289 | 290 | player_angle = angle(player_point, largest_offset) 291 | destination_angle = angle(player_point, closest_point) 292 | 293 | desired_rotation = player_angle - destination_angle 294 | 295 | send_rotation(desired_rotation) 296 | print(desired_rotation) 297 | #print(player_angle) 298 | #print(destination_angle) 299 | # mask_marked = draw_raycast(mask_marked, player_point, closest_point) 300 | 301 | # cv2.imshow("mask7", mask) 302 | cv2.imshow("colored", mask_marked) 303 | 304 | cv2.waitKey(500) 305 | # send_rotation(360) 306 | 307 | #if keyboard.is_pressed('k') and largest_offset is not None: 308 | # align 309 | 310 | if keyboard.is_pressed('='): 311 | cv2.destroyAllWindows() 312 | break -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Valorant-Aim-and-Navigation-Bot 2 | 3 | **FOR EDUCATIONAL PURPOSES ONLY** 4 | 5 | **For educational purposes in image processing, serial communication, and other** 6 | 7 | **Obvious errors and slowdowns have been purposefully introduced therefore solving the errors may be a learning experience.** 8 | 9 | Please use within reason: practice range, user hosted testing enviorments. I am not responsible for your ban. Also valorant has not patched this method since the Beta till the date of the projects inital posting **(I haven't played val after posting this, there I cannot verify if it still works)**. This method can be mitigated through intelligent enviorment coloring, character and ability coloring (character outlines), or through VANGUARD. This is a low maintenance cheat, if you really wanted a reliable cheat, why would you even use this method look elsewhere. 10 | 11 | ![Gif](ReadMeFiles/Valorant.gif) 12 | 13 | ## How it works? 14 | 15 | * Aimbot 16 | * This is a color based aimbot made in python that works in fullscreen mode optimized for the Phantom and Vandal. It has multiple color filterings that will automatically adjust itself when a target is detected. Target detection occurs when the right color is seen by the first filter on a large fov detection range. The second filter applies once the conditions for the first is met resulting in detection range reduction and color filtering becomes more lenient. Because the head of enemy characters are usually the highest point, we scan from top left to bottom right and the first instance of purple pixel found, filtering switches to a more accurate mode and smaller fov resulting in quicker detection on the second pass when looping through the pixel array. The size of the fov is determined if `Shift` is held down or not, auto recoil compenstation is implemented. The vertical size of the target detemines the perceived distance, the close the target is, the quicker the gun will fire. The futher the target is, the keyboard will attempt to counter strafe to stop the character movement before firing the weapon. Aim offset from first pixel is also determined by the distance of the target as to attempt to get headshots on pony tail characters. 17 | * Mouse movements will be locked when firing. Mouse commands are sent via lan, therefore firewalls must have their ports exposed. Once the mouse movement command is sent from PC 1 to PC 2, PC 2 forwards it to the ardunio via serial. 18 | 19 | * Navigation 20 | * Incomplete, generates a map data based off of where red dots are placed to help guide the bot. 21 | ![Pic](ReadMeFiles/map%20demo.png?raw=true "Demo") 22 | 23 | ## Limitations 24 | 25 | This demonstration requires 2 seperate computers and 2 seperate arduino units. Computing speed depends on PC, but is relatively and optimized for speed. Works only with 1080p screen size on Valorant PC. Scales poorly with resolution size. Harder to hit moving targets. Developers of valorant could opt to create similar colored surroundings or make abilites of similar color to make this method obsolete. 26 | 27 | ## Setup 28 | 29 | Python PIP Installs 30 | ``` 31 | pip3 install -r requirements.txt 32 | ``` 33 | 34 | The follow found in **mouseHost.py** should be scaled to your monitor's resolution. (Not PC running Valorant's resolution) 35 | ```python 36 | mon_width = 3840 37 | mon_height = 2160 38 | ``` 39 | 40 | **valkset.json** 41 | ```json 42 | {"host-ip": "IP of Mouse Host", "host-port": "6767", 43 | "client-ip": "IP of PC Running Valorant", "client-port": "6767"} 44 | ``` 45 | 46 | Flash the respective arduino files per arduino, afterwards COM ports may need to altered accordingly. 47 | * Arduino Library needed https://github.com/NicoHood/HID 48 | 49 | valkyrie.py 50 | ```python 51 | serMouse = serial.Serial('COM3', baudrate=2000000, writeTimeout=0) 52 | serKeyboard = serial.Serial('COM4', baudrate=2000000, writeTimeout=0) 53 | ``` 54 | 55 | How files and arduinos should be arranged 56 | ``` 57 | PC 1 58 | ├── mouseHost.py 59 | └── valkset.json 60 | ``` 61 | 62 | ``` 63 | PC 2 (Running Valorant) 64 | ├── valkyrie.py 65 | ├── valkset.json 66 | ├── Arduino For Mouse Emulation 67 | └── Arduino For Keyboard Emulation 68 | ``` 69 | 70 | Use PC 1's mouse and PC 2's keyboard 71 | 72 | Valorant settings should be as follows 73 | * Set Game to 0.5 sense 74 | * Set Game to 0.84 or 0.85 zoom sense, 0.9 to tracking moving targets (Weapon Dependent) 75 | * Set Game to 1920 x 1080 76 | * Set Game to Show Purple 77 | * Set Game to Hide Corpses 78 | * Set Game to Transparent Cursor 79 | * Set Game FPS to MAX 80 | * Set Walk to P (secondary) 81 | * Set Zoom to O (secondary) 82 | * Set Move Up to Up Arrow 83 | * Set Move Down to Down Arrow 84 | * Set Move Left to Left Arrow 85 | * Set Move Right to Right Arrow 86 | * (Optional) Set AA to MSAA 4x for long range precision 87 | 88 | ## Usage (Startup) 89 | 90 | Verified to work with Windows. 91 | 92 | Run on machine running Valorant 93 | ``` 94 | ./valkyrie.py 95 | ``` 96 | 97 | 98 | Run on machine hosting mouse inputs 99 | ``` 100 | ./hostHouse.py 101 | ``` 102 | 103 | ## Usage (Mouse Host PC) 104 | 105 | * Press ``` - ``` Key to exit 106 | * Press ``` ` ``` Key to pause mouse lock/ grabber 107 | * Press ``` ESC ``` Key to unpause 108 | 109 | ## Usage (Valorant PC) 110 | 111 | * Modes 112 | * Press ``` ` ``` Key to pause and go into mode selector 113 | * Press ``` ESC ``` Key to engage Mode 1: Automatic Auto Aim 114 | * Press ``` F1 ``` Key to engage Mode 2: Auto Aim when shoot button is triggered, slowly moves camera towards target 115 | * Press ``` F2 ``` Key to engage Mode 3: Auto Aim when shoot button is triggered, snaps to target and snaps back immediately, resulting effect should be (mostly) unoticable. 116 | * Other 117 | * Hold ``` [ ``` or ``` ] ``` Key to temporily pause detection and auto aim when in Mode 1 118 | * Hold ``` Middle Mouse Button ``` Key When in Mode 1 to Engage Spin bot, spin bot slightly varies when moving vs not moving 119 | * Hold ``` Shift ``` Key Single Shot 120 | * Release ``` Shift ``` Key Spray and Pray 121 | * Press ``` = ``` Key to terminate program 122 | * Settings 123 | * Look in terminal for guidence 124 | 125 | ## Thanks 126 | 127 | Thanks to all those who made this possible with simple to use python packages. If I missed any license or credits, please let me know and I'll add them here. 128 | -------------------------------------------------------------------------------- /ReadMeFiles/Valorant Aimbot Demo.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/ReadMeFiles/Valorant Aimbot Demo.mp4 -------------------------------------------------------------------------------- /ReadMeFiles/Valorant.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/ReadMeFiles/Valorant.gif -------------------------------------------------------------------------------- /ReadMeFiles/map demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheRabbitProgram/Valorant-AimBot-and-Navigation-Bot/2fa0dddb30b2f496c1d00f111b55f9bd85c56de3/ReadMeFiles/map demo.png -------------------------------------------------------------------------------- /mouseHost.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import keyboard 3 | import pyautogui 4 | from pynput.mouse import Controller 5 | import math 6 | import time 7 | import win32api 8 | import json 9 | 10 | #mon_width = 1280 11 | #mon_height = 720 12 | 13 | settings = {} 14 | 15 | with open('valkset.json') as json_file: 16 | settings = json.load(json_file) 17 | 18 | mon_width = 3840 19 | mon_height = 2160 20 | 21 | mon_width = mon_width/2 22 | mon_height = mon_height/2 23 | 24 | host = settings['host-ip'] #server mouse is running On 25 | port = int(settings['host-port']) 26 | client = (settings['client-ip'], settings['client-port']) # CHANGE THIS 27 | 28 | s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 29 | s.bind((host, port)) 30 | 31 | def parse_cords(cords, sign, value): 32 | value = str(value) 33 | cords = str(sign + (value.zfill(3))) 34 | return cords 35 | 36 | print("Server Started") 37 | pyautogui.FAILSAFE = False 38 | pyautogui.PAUSE = 0 39 | state_left = win32api.GetKeyState(0x01) 40 | state_right = win32api.GetKeyState(0x02) 41 | state_middle = win32api.GetKeyState(0x04) 42 | pause_key = False 43 | paused = False 44 | while True: 45 | if paused is False: 46 | pos = pyautogui.position() 47 | #pyautogui.moveTo(1920, 1080) 48 | pyautogui.moveTo(mon_width, mon_height) 49 | 50 | x = (pos[0] - mon_width) * 1.3 51 | y = (pos[1] - mon_height) * 1.3 52 | 53 | cords = "" 54 | 55 | if x != 0: 56 | abs_x = round(math.fabs(x)) 57 | if abs_x != 0: 58 | if abs_x > 999: 59 | abs_x = 999 60 | if x > 0: 61 | cords += parse_cords(cords, "X", abs_x) 62 | else: 63 | cords += parse_cords(cords, "C", abs_x) 64 | 65 | if y != 0: 66 | abs_y = round(math.fabs(y)) 67 | if abs_y != 0: 68 | if abs_y > 999: 69 | abs_y = 999 70 | if y > 0: 71 | cords += parse_cords(cords, "Y", abs_y) 72 | else: 73 | cords += parse_cords(cords, "U", abs_y) 74 | 75 | if cords != "": 76 | data = str(cords) 77 | print(data) 78 | s.sendto(data.encode('utf-8'), client) 79 | 80 | if keyboard.is_pressed("[") or keyboard.is_pressed("]") or \ 81 | keyboard.is_pressed("1") or keyboard.is_pressed("2") or keyboard.is_pressed("3") or \ 82 | keyboard.is_pressed("4") or keyboard.is_pressed("5") or keyboard.is_pressed("6"): 83 | a = True 84 | else: 85 | a = False 86 | if a != pause_key: # Button state changed 87 | pause_key = a 88 | # print(a) 89 | if a: 90 | s.sendto("PAUS".encode('utf-8'), client) 91 | # print("PAUS") 92 | else: 93 | s.sendto("UPAU".encode('utf-8'), client) 94 | # print("UPAUS") 95 | 96 | a = win32api.GetKeyState(0x01) 97 | if a != state_left: # Button state changed 98 | state_left = a 99 | # print(a) 100 | if a < 0: 101 | s.sendto("A000".encode('utf-8'), client) 102 | # print('Left Button Pressed') 103 | else: 104 | s.sendto("B000".encode('utf-8'), client) 105 | # print('Left Button Released') 106 | 107 | b = win32api.GetKeyState(0x02) 108 | if b != state_right: # Button state changed 109 | state_right = b 110 | # print(b) 111 | if b < 0: 112 | s.sendto("RHTD".encode('utf-8'), client) # D000 113 | # print('Right Button Pressed') 114 | else: 115 | s.sendto("RHTU".encode('utf-8'), client) # E000 116 | # print('Right Button Released') 117 | 118 | c = win32api.GetKeyState(0x04) 119 | if c != state_middle: # Button state changed 120 | state_middle = c 121 | # print(c) 122 | if c < 0: 123 | s.sendto("SPND".encode('utf-8'), client) # D000 124 | #print('Middle Button Pressed') 125 | else: 126 | s.sendto("SPNU".encode('utf-8'), client) # E000 127 | #print('Middle Button Released') 128 | 129 | if keyboard.is_pressed('-'): 130 | exit(0) 131 | if keyboard.is_pressed('`'): 132 | paused = True 133 | if keyboard.is_pressed('Esc'): 134 | paused = False 135 | 136 | # n = (time.time() - last_time_main) 137 | # print(n) 138 | time.sleep(0.01) 139 | c.close() 140 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | opencv-python == 4.3.0.36 2 | mss == 6.0.0 3 | numpy == 1.19.0 4 | keyboard == 0.13.5 5 | pyserial == 3.4 6 | sockets == 1.0.0 7 | scikit-image == 0.16.2 8 | -------------------------------------------------------------------------------- /valkset.json: -------------------------------------------------------------------------------- 1 | {"host-ip": "192.168.1.2", "host-port": "6767", "client-ip": "192.168.1.3", "client-port": "6767"} -------------------------------------------------------------------------------- /valkyrie.py: -------------------------------------------------------------------------------- 1 | import time 2 | import cv2 3 | import mss 4 | import numpy as np 5 | import skimage 6 | #from imutils import contours 7 | #import argparse 8 | #import imutils 9 | import math 10 | import keyboard 11 | # import pyautogui 12 | #import mouse 13 | import serial # pyserial 14 | #from multiprocessing import Process, Queue 15 | #import rivalcfg 16 | import socket 17 | import _thread 18 | import json 19 | 20 | #import d3dshot 21 | #import paramiko 22 | 23 | # Set Game to 0.5 sense 24 | # Set Game to 0.84 # 0.85 zoom sense, 0.9 to tracking moving targets 25 | # Set Game to 1920 x 1080 26 | # Set Game to Show Purple 27 | # Set Game to Hide Corpses 28 | # Set Game to Transparent Cursor 29 | # Set Game FPS to 144 30 | # Set Walk to P (secondary) 31 | # Set Zoom to O (secondary) 32 | # Set Move Up to Up Arrow 33 | # Set Move Down to Down Arrow 34 | # Set Move Left to Left Arrow 35 | # Set Move Right to Right Arrow 36 | # (Optional) Set AA to MSAA 4x for long range precision 37 | 38 | # Global Variables 39 | 40 | monitor_width = 1920 # 1920 # 1280 41 | monitor_height = 1080 # 1080 # 720 42 | 43 | kill_reset_time_threshold = 0.05 44 | aim_down_activation_threshold = 40 # 25 for killscan # 31 45 | 46 | # Global Variables (DO NOT TOUCH) 47 | cd_time = time.time() 48 | cd_burst_time = time.time() 49 | burstcount = 0 50 | scope_state = False 51 | no_target = True 52 | alt_profile = False 53 | is_shooting = False 54 | spin_bot = False 55 | spin_counter = 0 56 | prediction_buffer = [0, 0] 57 | up_is_pressed = False 58 | down_is_pressed = False 59 | left_is_pressed = False 60 | right_is_pressed = False 61 | walk_is_pressed = False 62 | allow_mouse_inputs = True 63 | blocking_during_scan = False 64 | temp_pause = False 65 | 66 | tracking_range = 3 67 | subactive = "[ON]" 68 | mainactive = "[ON]" 69 | paused = True 70 | kill_reset_time = 0 71 | allow_tracking_resize = True 72 | last_time_main = time.time() 73 | frame_delay_options = [5, 10, 15, 20, 25] 74 | frame_delay = 3 75 | editing_selected = False 76 | predictive_smoothing = False 77 | passive_shoot_offset = 0 78 | passive_shoot_offset_apply = 0 79 | passive_hold_shoot_timer = time.time() 80 | last_deep_tracking_ms = 0 81 | deep_tracking_ms = 0 82 | enemy_seen_delay_ms = 0 83 | 84 | previous_process_time = 0 85 | 86 | enemy_seen_delay_ms_still = 50 87 | enemy_seen_delay_ms_walk = 100 88 | enemy_seen_delay_ms_run = 155 # 175 89 | 90 | distance_division_offset = [7, 9, 12] # 7, 9 91 | distance_threshold = [12, 9, 5] # 12, 9 92 | distance_max_threshold = [18, 18, 16] # 18, 18, 16 # 12, 9 93 | distance_names = ["Aim Low", "Aim Mid", "Aim High"] # 12, 9 94 | distance_selector = 1 95 | 96 | prediction_ratio = [1, 1.5, 2, 0.00001] 97 | prediction_selector = 3 98 | 99 | prediction_scalar = [1, 2.124] 100 | prediction_scalar_selector = 0 101 | 102 | was_in_paused_state = False 103 | target_nearest_post_paused_state = False 104 | 105 | filter_names = ["Basic Filter", "Advanced-Fast Filter", "Advanced-Slow Filter"] 106 | filter_selector = 1 107 | 108 | last_height = 0 109 | is_using_passive_aim = False 110 | is_using_passive_aim_lock = False 111 | manual_shooting = False 112 | manual_scope = False 113 | 114 | #use_d3d = False 115 | #use_ssh = False 116 | 117 | settings = {} 118 | 119 | with open('valkset.json') as json_file: 120 | settings = json.load(json_file) 121 | 122 | #host = '192.168.1.197' # client ip # CHANGE THIS 123 | host = settings['client-ip']#'192.168.1.15' # client ip # CHANGE THIS 124 | port = int(settings['client-port']) 125 | server = (settings['host-ip'], int(settings['host-port'])) #107 126 | 127 | #if use_ssh: 128 | # ssh = paramiko.SSHClient() 129 | # ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 130 | 131 | # Initial Initialization 132 | #ser = serial.Serial('COM6', baudrate=2000000, timeout=1) # 115200#250000#2000000 133 | serMouse = serial.Serial('COM3', baudrate=2000000, writeTimeout=0) # 115200#250000#2000000 134 | serKeyboard = serial.Serial('COM4', baudrate=2000000, writeTimeout=0) # 115200#250000#2000000 135 | 136 | s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 137 | s.bind((host, port)) 138 | 139 | # Aim Profiles 140 | """ 141 | class WeaponProfilesBak: 142 | class ProfileTemplate: 143 | def __int__(self, offset_x=5, offset_y=4, down_offset_y=45, fire_cd=0.1, ato_scope=False, ato_scope_adv=False): 144 | self.aim_offset_x = offset_x 145 | self.aim_offset_y = offset_y 146 | self.aim_down_offset_y = down_offset_y 147 | self.shoot_cd = fire_cd 148 | self.auto_scope = ato_scope 149 | self.auto_scope_adv = ato_scope_adv 150 | 151 | class BodyShot: 152 | Guardian = WeaponProfiles.ProfileTemplate(5, 4, 45, 0.1, False, False) 153 | 154 | class HeadShot: 155 | GuardianZoom = WeaponProfiles.ProfileTemplate(2, 4, 0, 0.375, False, True) 156 | Guardian = WeaponProfiles.ProfileTemplate(2, 4, 0, 0.375, False, False) 157 | Sheriff = WeaponProfiles.ProfileTemplate(2, 7, 0, 0.375, False, False) 158 | """ 159 | 160 | 161 | class WeaponProfiles: 162 | class BodyShot: 163 | class Guardian: 164 | aim_offset_x = 1 # 5 165 | aim_offset_y = 4 # 45 # 50 shoots between legs at mid range 166 | aim_down_offset_y = 45 # 60 167 | shoot_cd = 0.1 # 0.2 # 0.1 # 0.07 168 | auto_scope = False 169 | auto_scope_adv = False 170 | 171 | class GuardianZoom: 172 | aim_offset_x = 1 # 5 173 | aim_offset_y = 4 # 45 # 50 shoots between legs at mid range 174 | aim_down_offset_y = 45 # 60 175 | shoot_cd = 0.1 # 0.2 # 0.1 # 0.07 176 | auto_scope = False 177 | auto_scope_adv = True 178 | 179 | class HeadShot: 180 | class GuardianZoom: 181 | aim_offset_x = 1 # 0 # 2 # 7 was good 182 | aim_offset_y = 4 # 4 # 14 for anti simp # 7 Was original # 5 was original # 3 good against hanzo # 7 was good against sage # 15 # 13 is good 183 | #walk_aim_offset_y = 0 # 5 originally # 7 was pretty good 184 | aim_down_offset_y = 0 185 | #moving_aim_down_offset_y = 0 186 | #moving_aim_down_speed_multiplier = 1 187 | shoot_cd = 0.375 # 0.355 # 0.375 # 0.4 original 188 | #shoot_cd_strafe = 0.45 189 | auto_scope = False 190 | auto_scope_adv = True 191 | 192 | class Guardian: 193 | aim_offset_x = 1 # 0 # 2 # 7 was good 194 | aim_offset_y = 4 # 4 # 5 was original # 3 good against hanzo # 7 was good against sage # 15 # 13 is good 195 | #walk_aim_offset_y = 0 # 5 originally # 7 was pretty good 196 | aim_down_offset_y = 0 197 | #moving_aim_down_offset_y = 0 198 | #moving_aim_down_speed_multiplier = 1 199 | shoot_cd = 0.375 # 0.355 # 0.375 # 0.4 original 200 | #shoot_cd_strafe = 0.45 201 | auto_scope = False 202 | auto_scope_adv = False 203 | 204 | class GuardianNoXOffset: 205 | aim_offset_x = 0 # 7 was good 206 | aim_offset_y = 4 # 4 # 5 was original # 3 good against hanzo # 7 was good against sage # 15 # 13 is good 207 | #walk_aim_offset_y = 0 # 5 originally # 7 was pretty good 208 | aim_down_offset_y = 0 209 | #moving_aim_down_offset_y = 0 210 | #moving_aim_down_speed_multiplier = 1 211 | shoot_cd = 0.375 # 0.355 # 0.375 # 0.4 original 212 | #shoot_cd_strafe = 0.45 213 | auto_scope = False 214 | auto_scope_adv = False 215 | 216 | class Sheriff: 217 | aim_offset_x = 2 # 7 was good 218 | aim_offset_y = 7 # 5 was original # 3 good against hanzo # 7 was good against sage # 15 # 13 is good 219 | #walk_aim_offset_y = 0 # 7 was pretty good 220 | aim_down_offset_y = 0 221 | #moving_aim_down_offset_y = 0 222 | #moving_aim_down_speed_multiplier = 1 223 | shoot_cd = 0.375 # 0.5 original # 0.4 original 224 | #shoot_cd_strafe = 0.5 225 | # you need a burst cooldown timer 226 | auto_scope = False 227 | auto_scope_adv = False 228 | 229 | 230 | # Color Profiles 231 | class Colors: 232 | yellow = ((np.array([30, 160, 240]), np.array([30, 255, 255])), # aggressive filtering, for range 2-3 233 | (np.array([30, 160, 240]), np.array([30, 255, 255])), # gracious for range 0-1 234 | (np.array([30, 160, 240]), np.array([30, 255, 255]))) # lenient filtering for square mask in range 0-1 235 | 236 | purple = ((np.array([145, 150, 240]), np.array([150, 255, 255])), # aggressive filtering, for range 2-3 237 | (np.array([140, 70, 80]), np.array([155, 255, 255])), # gracious for range 0-1 238 | (np.array([140, 70, 80]), np.array([170, 255, 255]))) # lenient filtering for square mask in range 0-1 239 | 240 | antiphoenix = ((np.array([145, 150, 240]), np.array([150, 255, 255])), # aggressive filtering, for range 2-3 241 | (np.array([140, 70, 204]), np.array([150, 255, 255])), # gracious for range 0-1 242 | #(np.array([16, 194, 150]), np.array([17, 235, 200]))) # phoenix hair tracking 243 | (np.array([15, 194, 150]), np.array([18, 235, 200]))) # phoenix hair tracking 244 | 245 | primary = ((np.array([145, 180, 240]), np.array([149, 255, 255])), # HYPER aggressive filtering, for range 2-3 246 | (np.array([140, 70, 204]), np.array([150, 255, 255])), # gracious for range 0-1 247 | #(np.array([16, 194, 150]), np.array([17, 235, 200]))) # phoenix hair tracking 248 | (np.array([15, 194, 150]), np.array([18, 235, 200])),# phoenix hair tracking 249 | (np.array([140, 70, 80]), np.array([170, 255, 255])))# lenient purple 250 | 251 | 252 | aggressive = (([0,0,0], [0,0,0]), # Red 253 | (np.array([145, 110, 125]), np.array([150, 255, 255])), # Purple 254 | (np.array([145, 150, 240]), np.array([150, 255, 255])), # Purple Increase Precision 255 | (np.array([30, 150, 200]), np.array([30, 165, 255]))) # Yellow 256 | 257 | standard = (([0,0,0], [0,0,0]), # Red 258 | (np.array([140, 70, 100]), np.array([150, 255, 255])), # Purple 259 | (np.array([140, 70, 204]), np.array([150, 255, 255])), # 204 240 Purple Increase Precision 150 260 | (np.array([30, 125, 150]), np.array([30, 255, 255]))) # Yellow 261 | 262 | test = ((np.array([0, 0, 0]), np.array([0, 0, 0])), # Red 263 | (np.array([0, 0, 0]), np.array([255, 255, 0])), # blk 264 | (np.array([79, 180, 250]), np.array([80, 190, 255])), # lime green 265 | (np.array([0, 0, 245]), np.array([0, 0, 255])), # white 266 | (np.array([30, 125, 150]), np.array([30, 255, 255])), # Yellow 267 | (np.array([0, 0, 0]), np.array([0, 0, 0])), # Red 268 | (np.array([15, 100, 100]), np.array([16, 230, 230])), # Orng 269 | (np.array([15, 100, 100]), np.array([16, 230, 230])), # Orng 270 | (np.array([30, 125, 150]), np.array([30, 255, 255])), # Yellow 271 | (np.array([15, 234, 0]), np.array([20, 235, 255])), # phenix brown 272 | (np.array([17, 234, 0]), np.array([18, 235, 255])), # phenix hair specific 273 | (np.array([0, 125, 0]), np.array([10, 130, 255])), # phenix brown 274 | (np.array([15, 194, 0]), np.array([20, 235, 255]))) # a bit gracious but works better at long range 275 | 276 | kill = ((np.array([70, 0, 200]), np.array([90, 50, 255])), # white lime 277 | (np.array([0, 0, 240]), np.array([0, 0, 255])), # white 278 | (np.array([79, 180, 250]), np.array([80, 190, 255])), # lime green 279 | (np.array([79, 180, 250]), np.array([80, 190, 255]))) # lime green 280 | 281 | 282 | # Pre Compute Resolution Profiles 283 | class MonitorProcessing: 284 | class ResolutionProfile: 285 | def __int__(self): 286 | self.width = 0 287 | self.height = 0 288 | self.height_offset = 0 289 | self.res_w = 0 290 | self.res_h = 0 291 | self.monitor = None 292 | self.monitor_d3d = None 293 | 294 | @staticmethod 295 | def monitor_pre_cal(monitor_width, monitor_height): 296 | mon = (MonitorProcessing.ResolutionProfile(), MonitorProcessing.ResolutionProfile(), MonitorProcessing.ResolutionProfile(), 297 | MonitorProcessing.ResolutionProfile(), MonitorProcessing.ResolutionProfile(), MonitorProcessing.ResolutionProfile(), 298 | MonitorProcessing.ResolutionProfile(), MonitorProcessing.ResolutionProfile(), MonitorProcessing.ResolutionProfile(), 299 | MonitorProcessing.ResolutionProfile()) 300 | 301 | half_width = math.floor((monitor_width / 2)) 302 | half_height = math.floor((monitor_height / 2)) 303 | 304 | mon[0].width = 100 # 70 # 100 305 | mon[0].height = 130 306 | mon[0].height_offset = 35 307 | 308 | mon[1].width = 100 309 | mon[1].height = 175 310 | mon[1].height_offset = 0 311 | 312 | mon[2].width = 800 # 800 # 500 # 600 313 | mon[2].height = 350 # 800 # 200 # 200 314 | mon[2].height_offset = -70 315 | 316 | mon[3].width = 800 # 800 # 600 317 | mon[3].height = 350 # 350 # 200 318 | mon[3].height_offset = -70 319 | 320 | mon[4].width = 0 # 600 321 | mon[4].height = 0 # 200 322 | mon[4].height_offset = 0 323 | 324 | mon[5].width = 0 # 600 325 | mon[5].height = 0 # 200 326 | mon[5].height_offset = 0 327 | 328 | mon[6].width = 100 # 600 329 | mon[6].height = 200 # 200 330 | mon[6].height_offset = 0 331 | 332 | mon[7].width = 75 # 600 333 | mon[7].height = 150 # 200 334 | mon[7].height_offset = 0 335 | 336 | mon[8].width = 150 # 600 337 | mon[8].height = 150 # 200 338 | mon[8].height_offset = 0 339 | 340 | mon[9].width = 250 # 600 341 | mon[9].height = 150 # 200 342 | mon[9].height_offset = 0 343 | # mon[3].width = 800 344 | # mon[3].height = 250 345 | 346 | for x in mon: 347 | x.res_w = math.floor(x.width / 2) 348 | x.res_h = math.floor(x.height / 2) 349 | 350 | x.monitor = {"left": (half_width - x.res_w), "top": (half_height - x.res_h + x.height_offset), 351 | "width": x.width, "height": x.height} 352 | 353 | x.monitor_d3d = ((half_width - x.res_w), (half_height - x.res_h + x.height_offset), 354 | (half_width + x.res_w), (half_height + x.res_h - x.height_offset)) 355 | 356 | print(x.monitor) 357 | 358 | return mon 359 | 360 | @staticmethod 361 | def monitor_kill_indicators(monitor_width, monitor_height): 362 | mon = (MonitorProcessing.ResolutionProfile(), MonitorProcessing.ResolutionProfile()) 363 | 364 | #mon[0].monitor = {"left": round(monitor_width * 0.53802083333), "top": round(monitor_height * 0.801851851), # original scans 365 | # "width": 1, "height": 1} 366 | #mon[1].monitor = {"left": round(monitor_width * 0.46145833333), "top": round(monitor_height * 0.802777777), 367 | # "width": 1, "height": 1} 368 | 369 | #mon[0].monitor = {"left": 885, "top": 867, 370 | # "width": 2, "height": 2} 371 | #mon[1].monitor = {"left": 1033, "top": 866, 372 | # "width": 2, "height": 2} 373 | 374 | mon[0].monitor = {"left": 885, "top": 909, 375 | "width": 1, "height": 1} 376 | mon[1].monitor = {"left": 1033, "top": 909, 377 | "width": 1, "height": 1} 378 | 379 | 380 | #mon[0].monitor = {"left": round(monitor_width * 0.4671875), "top": round(monitor_height * 0.7962962), # new scans 381 | # "width": 1, "height": 1} 382 | #mon[1].monitor = {"left": round(monitor_width * 0.53177083), "top": round(monitor_height * 0.7962962), 383 | # "width": 1, "height": 1} 384 | return mon 385 | 386 | @staticmethod 387 | def monitor_indicators_zone(monitor_width, monitor_height): 388 | mon = MonitorProcessing.ResolutionProfile() 389 | 390 | mon.monitor = {"left": 885, "top": 909, 391 | "width": 149, "height": 1} 392 | 393 | return mon 394 | 395 | 396 | # Prediction Engine 397 | class PredictionEngine: 398 | @staticmethod 399 | def add_cords_for_prediction(x): 400 | prediction_buffer[0] = prediction_buffer[1] 401 | prediction_buffer[1] = x 402 | 403 | @staticmethod 404 | def run_prediction_engine(): 405 | x = 0 406 | if prediction_buffer[0] == prediction_buffer[1]: 407 | x = prediction_buffer[0] 408 | 409 | cords = "" 410 | if x > 0: 411 | cords += HidController.parse_cords("X", x) 412 | else: 413 | cords += HidController.parse_cords("C", x) 414 | 415 | HidController.encode_and_send_mouse(cords) 416 | return x 417 | 418 | 419 | # Image Manipulation 420 | class ImageProcessing: 421 | class ImageCords: 422 | def __int__(self): 423 | self.img = None 424 | self.cords = (None, None) 425 | 426 | @staticmethod 427 | def screen_capture(tracking_range): 428 | #global use_d3d 429 | #global d 430 | #if use_d3d: 431 | # return d.screenshot(region=mon[tracking_range].monitor_d3d) 432 | #else: 433 | return np.array(sct.grab(mon[tracking_range].monitor)) 434 | 435 | class Filtering: 436 | @staticmethod 437 | def test(tracking_range, colors, array_value): # original purps filters 438 | color_mask = cv2.inRange(cv2.cvtColor(ImageProcessing.screen_capture(tracking_range), cv2.COLOR_BGR2HSV), 439 | colors[array_value][0], # 140,70,100 # 75 to 90 440 | colors[array_value][1]) # 200 441 | 442 | return color_mask 443 | 444 | @staticmethod 445 | def aggressive(tracking_range, colors): 446 | color_mask = cv2.inRange(cv2.cvtColor(ImageProcessing.screen_capture(tracking_range), cv2.COLOR_BGR2HSV), 447 | colors[0][0], 448 | colors[0][1]) # 200 449 | return color_mask 450 | 451 | @staticmethod 452 | def standard(tracking_range, colors): # original purps filters 453 | color_mask = cv2.inRange(cv2.cvtColor(ImageProcessing.screen_capture(tracking_range), cv2.COLOR_BGR2HSV), 454 | colors[1][0], #140,70,100 # 75 to 90 455 | colors[1][1]) # 200 456 | 457 | #color_mask2 = cv2.inRange(cv2.cvtColor(np.array(sct.grab(mon[tracking_range].monitor)), cv2.COLOR_BGR2HSV), 458 | # np.array(colors[1][0]), #140,70,100 # 75 to 90 459 | # np.array(colors[1][1])) # 200 460 | 461 | #color_mask = cv2.add(color_mask, color_mask2) 462 | 463 | return color_mask 464 | 465 | @staticmethod 466 | def advanced_split_standard(tracking_range, colors, height_upper, height_lower, width): # original purps filters 467 | return ImageProcessing.Filtering.advanced_split(tracking_range, colors, height_upper, height_lower, width, 1, 2) 468 | 469 | @staticmethod 470 | def advanced_split_standard_3(tracking_range, colors, height_upper, height_lower, width): # original purps filters 471 | return ImageProcessing.Filtering.advanced_split_3(tracking_range, colors, height_upper, height_lower, width, 1, 2, 3) 472 | 473 | @staticmethod 474 | def advanced_split_aggressive(tracking_range, colors, height_upper, height_lower, width): # original purps filters 475 | return ImageProcessing.Filtering.advanced_split(tracking_range, colors, height_upper, height_lower, width, 0, 1) 476 | 477 | @staticmethod 478 | def advanced_split(tracking_range, colors, height_upper, height_lower, width, color_1, color_2): # original purps filters 479 | #last_time_main = time.time() # timing 480 | capture = ImageProcessing.screen_capture(tracking_range) 481 | #n = (time.time() - last_time_main) # timing 482 | #print(str(n)) 483 | center = (mon[tracking_range].width/2, (mon[tracking_range].height/2) - mon[tracking_range].height_offset) 484 | color_mask = cv2.inRange(cv2.cvtColor(capture, cv2.COLOR_BGR2HSV), 485 | colors[color_1][0], 486 | colors[color_1][1]) 487 | 488 | color_mask_2 = cv2.inRange(cv2.cvtColor(capture, cv2.COLOR_BGR2HSV), 489 | colors[color_2][0], 490 | colors[color_2][1]) 491 | 492 | rect_mask = np.zeros(color_mask.shape, dtype="uint8") 493 | rect_mask = cv2.rectangle(rect_mask, (int(center[0]-(width/2)), int(center[1]-height_upper)), 494 | (int(center[0]+(width/2)), int(center[1]+height_lower)), (255, 24, 206), -1) 495 | 496 | color_mask_2 = cv2.bitwise_and(color_mask_2, color_mask_2, mask=rect_mask) 497 | 498 | img_out = cv2.add(color_mask, color_mask_2) 499 | 500 | return img_out # Find The Height of target in color_mask then use that to determine the crop size of color_mask_2 501 | # scan the cropped image, apply offsets to center the image, find first pixel, apply offsets to first pixel, recalcuate height 502 | 503 | #run kill indicator on another thread, usually u go get the kills, so yeah, u might as well? 504 | ###################################################################################################################################################################################### 505 | @staticmethod 506 | def advanced_split_3(tracking_range, colors, height_upper, height_lower, width, color_1, color_2, color_3): 507 | #global rect_mask 508 | 509 | capture = ImageProcessing.screen_capture(tracking_range) 510 | center = ( 511 | mon[tracking_range].width / 2, (mon[tracking_range].height / 2) - mon[tracking_range].height_offset) 512 | color_mask = cv2.inRange(cv2.cvtColor(capture, cv2.COLOR_BGR2HSV), 513 | colors[color_1][0], 514 | colors[color_1][1]) 515 | 516 | #if rect_mask is None: 517 | rect_mask = np.zeros(color_mask.shape, dtype="uint8") 518 | rect_mask = cv2.rectangle(rect_mask, (int(center[0] - (width / 2)), int(center[1] - height_upper)), 519 | (int(center[0] + (width / 2)), int(center[1] + height_lower)), (255, 24, 206), -1) 520 | #rect_mask_in = rect_mask.copy() 521 | 522 | color_mask_2 = cv2.inRange(cv2.cvtColor(capture, cv2.COLOR_BGR2HSV), 523 | colors[color_2][0], 524 | colors[color_2][1]) 525 | 526 | color_mask_3 = cv2.inRange(cv2.cvtColor(capture, cv2.COLOR_BGR2HSV), 527 | colors[color_3][0], 528 | colors[color_3][1]) 529 | 530 | color_mask_2 = cv2.bitwise_and(color_mask_2, color_mask_2, mask=rect_mask) 531 | color_mask_3 = cv2.bitwise_and(color_mask_3, color_mask_3, mask=rect_mask) 532 | 533 | img_out = cv2.add(color_mask, color_mask_2, color_mask_3) 534 | 535 | return img_out 536 | 537 | @staticmethod 538 | def multi_track(tracking_range): # original purps filters # PROOF OF CONCEPT 539 | #last_time_main = time.time() # timing 540 | 541 | color_picker = 2 542 | colors = (([], []), # Red 543 | ([145, 150, 240], [150, 255, 255]), # Purple Aggressive Increased Precision 544 | ([140, 70, 240], [150, 255, 255]), # Purple Increase Precision 545 | ([30, 125, 150], [30, 255, 255])) # Yellow 546 | 547 | color_mask = cv2.inRange(cv2.cvtColor(ImageProcessing.screen_capture(tracking_range), cv2.COLOR_BGR2HSV), 548 | colors[color_picker][0], #140,70,100 # 75 to 90 549 | colors[color_picker][1]) # 200 550 | 551 | color_mask = cv2.morphologyEx(color_mask, cv2.MORPH_CLOSE, np.ones((35, 15))) # np.ones((55, 20))) 552 | color_mask = cv2.morphologyEx(color_mask, cv2.MORPH_OPEN, np.ones((4, 4))) 553 | 554 | color_mask = ImageProcessing.split_targets(color_mask) 555 | 556 | #n = (time.time() - last_time_main) # timing 557 | #print(str(n)) 558 | 559 | return color_mask 560 | 561 | @staticmethod 562 | def scan_kill(mon_in, colors): 563 | #last_time_main = time.time() # timing 564 | 565 | img = np.array(sct.grab(mon_in[0].monitor)) 566 | img2 = np.array(sct.grab(mon_in[1].monitor)) 567 | 568 | #print((time.time() - last_time_main)) 569 | img_out = cv2.inRange(cv2.cvtColor(img, cv2.COLOR_BGR2HSV), 570 | colors[0][0], 571 | colors[0][1]) 572 | 573 | img_temp = cv2.inRange(cv2.cvtColor(img, cv2.COLOR_BGR2HSV), 574 | colors[1][0], 575 | colors[1][1]) 576 | img_out = cv2.add(img_out, img_temp) 577 | 578 | img_out2 = cv2.inRange(cv2.cvtColor(img2, cv2.COLOR_BGR2HSV), 579 | colors[0][0], 580 | colors[0][1]) 581 | 582 | img_temp = cv2.inRange(cv2.cvtColor(img2, cv2.COLOR_BGR2HSV), 583 | colors[1][0], 584 | colors[1][1]) 585 | img_out2 = cv2.add(img_out2, img_temp) 586 | # cv2.imshow("kill confirmed", color_mask) 587 | 588 | #print((time.time() - last_time_main)) 589 | return img_out, img_out2 590 | 591 | @staticmethod 592 | def kill_indicator_slow(mon_in, colors): 593 | mask, mask2 = ImageProcessing.Filtering.scan_kill(mon_in, colors) 594 | #m1 = False 595 | #m2 = False 596 | #if mask[0, 0] != 0 or mask[0, 1] != 0 or mask[1, 0] != 0 or mask[1, 1] != 0: 597 | # m1 = True 598 | #if mask2[0, 0] != 0 or mask2[0, 1] != 0 or mask2[1, 0] != 0 or mask2[1, 1] != 0: 599 | # m2 = True 600 | #if m1 is True and m2 is True: 601 | # return True 602 | #print(str(mask[0, 0]) + str(mask2[0, 0])) # + str(mask3[0, 0])) 603 | if mask[0, 0] != 0 and mask2[0, 0] != 0:# and mask3[0, 0] != 0: 604 | return True 605 | return False 606 | 607 | @staticmethod 608 | def kill_indicator(mon_in, colors): 609 | 610 | img = np.array(sct.grab(mon_in.monitor)) 611 | 612 | # print((time.time() - last_time_main)) 613 | img_out = cv2.inRange(cv2.cvtColor(img, cv2.COLOR_BGR2HSV), 614 | colors[0][0], 615 | colors[0][1]) 616 | 617 | img_temp = cv2.inRange(cv2.cvtColor(img, cv2.COLOR_BGR2HSV), 618 | colors[1][0], 619 | colors[1][1]) 620 | img_out = cv2.add(img_out, img_temp) 621 | 622 | if img_out[0, 0] != 0 and img_out[0, 148] != 0: # and mask3[0, 0] != 0: 623 | return True 624 | return False 625 | 626 | @staticmethod 627 | def crop(color_mask): # crop and search colormask2 628 | return 0 629 | 630 | @staticmethod 631 | def split_targets(color_mask): 632 | masks_array = [] 633 | labels = skimage.measure.label(color_mask, neighbors=8, background=0) 634 | mask = np.zeros(color_mask.shape, dtype="uint8") 635 | #countr = 1 636 | for label in np.unique(labels): 637 | if label == 0: 638 | continue 639 | labelMask = np.zeros(color_mask.shape, dtype="uint8") 640 | labelMask[labels == label] = 255 641 | numPixels = cv2.countNonZero(labelMask) 642 | 643 | #if numPixels > 1: 644 | #print(countr) 645 | #countr += 1 646 | masks_array.append([labelMask, numPixels]) 647 | 648 | ##img_out = find_first_pixel_npy_argmax_fast(labelMask) 649 | ##if img_out.cords[0] != None: 650 | ## cv2.putText(labelMask, str(len(masks_array)) + ": Alive", (img_out.cords[0], img_out.cords[1] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (209, 80, 0, 255)) 651 | 652 | #cv2.imshow("Set", labelMask) 653 | #cv2.waitKey(1000) 654 | ##mask = cv2.add(mask, labelMask) # reconstruct the mask 655 | 656 | #masks_array.sort() 657 | #masks_array = sorted(masks_array) 658 | 659 | masks_array.sort(key=lambda x: int(x[1]), reverse=True) 660 | 661 | for i in masks_array: 662 | #print(i[1]) 663 | 664 | img_out = ImageSearch.find_first_pixel_npy_argmax_fast(i[0]) 665 | if img_out.cords[0] is not None: 666 | x, y, w, h = cv2.boundingRect(i[0]) 667 | i[0] = cv2.rectangle(i[0], (x, y), (x + w, y + h), (209, 80, 0), 2) 668 | 669 | if ImageProcessing.check_life(w, h) == 1: 670 | stat = "Alive" 671 | else: 672 | stat = "Dead" 673 | 674 | cv2.putText(i[0], str(i[1]) + ": " + stat, (img_out.cords[0], img_out.cords[1] - 10), 675 | cv2.FONT_HERSHEY_SIMPLEX, 0.4, (209, 80, 0, 255)) 676 | 677 | mask = cv2.add(mask, i[0]) 678 | 679 | return mask 680 | # alt_color_mask = cv2.add(alt_color_mask, labelMask) 681 | #return alt_color_mask 682 | 683 | @staticmethod 684 | def check_life(w, h): 685 | if (w / h) > 1.6: # 3: 686 | return 0 687 | return 1 688 | 689 | # Image Details 690 | @staticmethod 691 | def apply_text(mask, textA, textB, textC, textD, textE, tracking_range): 692 | height = mon[tracking_range].height 693 | cv2.putText(mask, textA, (0, height - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (209, 80, 0, 255)) 694 | cv2.putText(mask, textB, (0, height - 20), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (209, 80, 0, 255)) 695 | cv2.putText(mask, textC, (0, height - 35), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (209, 80, 0, 255)) 696 | cv2.putText(mask, textD, (0, height - 50), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (209, 80, 0, 255)) 697 | cv2.putText(mask, textE, (0, height - 65), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (209, 80, 0, 255)) 698 | return mask 699 | 700 | 701 | """" 702 | @staticmethod 703 | def kill_detector(): 704 | temp_mon = MonitorProcessing.monitor_kill_indicators(monitor_width, monitor_height) 705 | temp_last = False 706 | temp_cnt = 0 707 | 708 | while True: 709 | new = ImageProcessing.kill_indicator(temp_mon, Colors.kill) 710 | if new is True and temp_last is False: 711 | temp_cnt += 1 712 | print("Kill: " + str(temp_cnt)) 713 | temp_last = new 714 | """ 715 | 716 | 717 | # Search 718 | class ImageSearch: 719 | @staticmethod 720 | def find_first_pixel(color_mask, color_mask_alt, wepprofile, tracking_range, alt_profile): 721 | # In case the code ever goes to C, consider this method 722 | second_pass = False 723 | sp_h = 0 724 | sp_v = 0 725 | if tracking_range == 0: 726 | scan_h = 2#2 # 4 # 2 # 2 # 2 727 | scan_v = 2#5 # 10 # 6 # 5 # 4 728 | second_pass = True 729 | sp_h = 1 730 | sp_v = 1 731 | elif tracking_range == 1: 732 | scan_h = 2 # 2 733 | scan_v = 5 # 4 734 | elif tracking_range >= 2: 735 | scan_h = 15 # 15 # 10 # use 1 FOR HIGH PRECISION! 736 | scan_v = 16 # 8 737 | if alt_profile: 738 | second_pass = True 739 | sp_h = 2#2 #1 for development testing 740 | sp_v = 2 #2 741 | 742 | # Search Algorithm 743 | 744 | rows, cols = color_mask.shape 745 | img_cords = ImageProcessing.ImageCords() 746 | 747 | for i in range(0, rows, scan_h): # 2 #4 #200 max/ up - down/ vertical 748 | for j in range(0, cols, scan_v): # 4 #6 749 | if color_mask[i, j] != 0: 750 | # print(i, j) 751 | # Second Pass 752 | if second_pass: 753 | istart = 0 754 | jstart = 0 755 | jfin = 0 756 | if i > scan_h and j > scan_v: 757 | istart = scan_h 758 | jstart = scan_v 759 | if alt_profile and tracking_range < 2 and color_mask_alt is not None: 760 | if (i - (scan_h * 2)) > 0: 761 | istart *= 2 762 | if (j - (scan_v * 2)) > 0: 763 | jstart *= 2 764 | if (j + scan_v) < mon[tracking_range].width - 1: 765 | jfin = scan_v 766 | 767 | for k in range(i - istart, i, sp_h): # 2 #4 #200 up - down 768 | for p in range(j - jstart, j + jfin, sp_v): 769 | if color_mask_alt[k, p] != 0: 770 | i = k 771 | j = p 772 | break 773 | else: 774 | continue 775 | break 776 | 777 | color_mask = cv2.circle(color_mask, (j - jstart, i - istart), 5, (255, 24, 206), -1) # (j, i+15) #(j+10, i-5) 778 | 779 | x = j # + wepprofile.aim_offset_x # 5 780 | y = i # + wepprofile.aim_offset_y # 15 781 | 782 | color_mask = cv2.circle(color_mask, (x, y), 5, (255, 24, 206), -1) # (j, i+15) #(j+10, i-5) 783 | img_cords.cords = (x, y) 784 | break 785 | else: 786 | img_cords.cords = (None, None) 787 | continue 788 | break 789 | 790 | img_cords.img = color_mask 791 | 792 | return img_cords 793 | 794 | @staticmethod 795 | def find_first_pixel_npy(color_mask): 796 | img_cords = ImageProcessing.ImageCords() 797 | img_cords.img = color_mask 798 | 799 | zeros = color_mask#np.zeros((100, 100), dtype=np.uint8) 800 | #zeros[:5, :5] = 255 801 | 802 | #indices = np.where(zeros != 0) 803 | indices = np.nonzero(zeros) 804 | #coordinates = zip(indices[0], indices[1]) 805 | 806 | #print(indices[0], indices[1]) 807 | if len(indices[0]) > 1 and len(indices[1]) > 1: 808 | img_cords.cords = (indices[1][0], indices[0][0]) 809 | #print(img_cords.cords) 810 | else: 811 | img_cords.cords = (None, None) 812 | return img_cords 813 | 814 | @staticmethod 815 | def find_first_pixel_npy_argmax_fast(color_mask): # , wepprofile, tracking_range): 816 | img_cords = ImageProcessing.ImageCords() 817 | img_cords.img = color_mask#[::-1] 818 | img_cords.cords = (None, None) 819 | 820 | for i, ele in enumerate(np.argmax(color_mask, axis=1, )): 821 | if ele != 0 or color_mask[i][0] != 0: 822 | #img_cords.cords = (ele + wepprofile.aim_offset_x, 823 | # i + wepprofile.aim_offset_y + distance_offset + mon[tracking_range].height_offset) # 8 if offset is 7#10 was good for offset with deg besides fighting simp 824 | img_cords.cords = (ele, i) 825 | break 826 | # print(str(img_cords.cords)) 827 | return img_cords 828 | 829 | @staticmethod 830 | def find_first_pixel_npy_argmax(color_mask, wepprofile, tracking_range, mon, scope_state): 831 | #global mon 832 | #global scope_state 833 | global distance_division_offset 834 | global distance_threshold 835 | global distance_max_threshold 836 | global distance_selector 837 | global alt_profile 838 | #global last_height 839 | 840 | img_cords = ImageProcessing.ImageCords() 841 | img_cords.img = color_mask#[::-1] 842 | img_cords.cords = (None, None) 843 | enemy_height = None 844 | 845 | for i, ele in enumerate(np.argmax(color_mask, axis=1, )): 846 | if ele != 0 or color_mask[i][0] != 0: 847 | #if tracking_range < 1 and alt_profile: 848 | #if tracking_range < 2: 849 | # height = ImageSearch.reverse_find_first_pixel_npy_argmax(color_mask, wepprofile) 850 | #else: 851 | # height = 0 852 | #last_time_main = time.time() 853 | 854 | height = ImageSearch.reverse_find_first_pixel_npy_argmax(color_mask, wepprofile) 855 | 856 | #n = (time.time() - last_time_main) # timing 857 | #print(str(n) + "TIME") 858 | 859 | enemy_height = mon[tracking_range].height - i - height 860 | #side_offset = 0 861 | distance_offset = (enemy_height/distance_division_offset[distance_selector]) - 4 # -4 /7 862 | if distance_offset < 0: 863 | distance_offset = 0 864 | #side_offset = -2 865 | elif distance_offset > distance_threshold[distance_selector]: #12 866 | distance_offset = distance_max_threshold[distance_selector] 867 | if alt_profile is False: # for running and gunning close range 868 | distance_offset += 10 869 | #if (enemy_height/mon[tracking_range].height) < 0.3 and wepprofile.auto_scope_adv: 870 | # scope_state = HidController.hold_scope(scope_state) 871 | if (enemy_height/mon[tracking_range].height) < 0.15 and wepprofile.auto_scope_adv: 872 | scope_state = HidController.hold_scope(scope_state) 873 | #print ("heee: " + str(enemy_height/mon[tracking_range].height)) 874 | 875 | #if distance_offset > 100: 876 | # distance_offset = 100 877 | #print(str(distance_offset)) 878 | #print(str(height) + ":" + str(i) + ":" + str(i-height)) 879 | #print(str(ele + wepprofile.aim_offset_x)) 880 | img_cords.cords = (ele + wepprofile.aim_offset_x, # + side_offset, 881 | i + wepprofile.aim_offset_y + distance_offset + mon[tracking_range].height_offset) # 8 if offset is 7#10 was good for offset with deg besides fighting simp 882 | #print(distance_offset) 883 | #img_cords.cords = (int(img_cords.cords[0]), int(img_cords.cords[1])) 884 | #img_cords.img = cv2.circle(img_cords.img, (ele, i), 5, (255, 24, 206), -1) 885 | break 886 | # print(str(img_cords.cords)) 887 | return img_cords, scope_state, enemy_height 888 | 889 | @staticmethod 890 | def reverse_find_first_pixel_npy_argmax(color_mask, wepprofile): 891 | height = 0 892 | #last_time_main = time.time() # timing 893 | #for i, ele in enumerate(np.argmax(color_mask[::-1], axis=1, )): 894 | for i, ele in enumerate(np.argmax(np.flipud(color_mask), axis=1)): 895 | if ele != 0 or color_mask[i][0] != 0: 896 | height = i 897 | # img_cords.img = cv2.circle(color_mask, img_cords.cords, 5, (255, 24, 206), -1) 898 | break 899 | # print(str(img_cords.cords)) 900 | #n = (time.time() - last_time_main) # timing 901 | #print(str(n) + "TIME") 902 | return height 903 | 904 | 905 | # Select Target and Kill 906 | class HidController: 907 | @staticmethod 908 | def move_to_target(target, center, wepprofile, allow_tracking_resize, alt_profile, predictive_smoothing, 909 | deep_tracking_ms, enemy_seen_delay_ms, en_height): 910 | 911 | global scope_state 912 | global tracking_range 913 | global aim_down_activation_threshold 914 | global no_target 915 | global spin_bot 916 | global spin_counter 917 | global up_is_pressed 918 | global down_is_pressed 919 | global left_is_pressed 920 | global right_is_pressed 921 | global walk_is_pressed 922 | global blocking_during_scan 923 | global allow_mouse_inputs 924 | global is_shooting 925 | global enemy_seen_delay_ms_still 926 | #global last_time_main 927 | #global frame_delay_options 928 | #global frame_delay 929 | #global previous_process_time 930 | #global enemy_seen_delay_ms_running_offset 931 | 932 | #global kill_mon 933 | #global kill_last 934 | #global kill_cnt 935 | 936 | #global d_down 937 | #global a_down 938 | 939 | if target[0] is None: # Pass through Mouse? 940 | #if tracking_range < 2: 941 | # is_shooting = HidController.arduino_unshoot(is_shooting) 942 | #else: 943 | # allow_mouse_inputs = True 944 | 945 | if tracking_range > 1: 946 | allow_mouse_inputs = True 947 | 948 | is_shooting = HidController.arduino_unshoot(is_shooting) 949 | 950 | if alt_profile: 951 | tracking_range = 2 952 | else: 953 | tracking_range = 3 954 | # print("No Target") 955 | HidController.reset_mouse() 956 | HidController.reset_tracking_timers() 957 | if wepprofile.auto_scope: # and tracking_range >= 3: 958 | scope_state = HidController.release_scope(scope_state) 959 | if wepprofile.auto_scope_adv: 960 | scope_state = HidController.release_scope(scope_state) 961 | # encode_and_send_mouse("X0000Y0000") 962 | # SKIP CV2.WAIT TIME BECAUSE THE MOUSE DIDNT MOVE!!!!!!!!!!!!!!!!!!!!!!!! 963 | 964 | if spin_bot: 965 | no_target = False 966 | movement_detected = False 967 | #encode_and_send_mouse("X999X120") use when not moving 968 | #encode_and_send_mouse("X966") 11 969 | #encode_and_send_mouse("X999X772") # 6 970 | if keyboard.is_pressed('a') or keyboard.is_pressed('d') or keyboard.is_pressed('w') or keyboard.is_pressed('s'): 971 | movement_detected = True 972 | 973 | if movement_detected: 974 | spin_counter += 1 975 | if spin_counter == 6: 976 | spin_counter = 0 977 | no_target = False 978 | HidController.encode_and_send_mouse("X999X772") 979 | else: 980 | HidController.encode_and_send_mouse("X999X120") 981 | spin_counter = 0 982 | 983 | else: 984 | #no_target = True 985 | #if spin_counter != 0: 986 | # spins_needed = 11 - spin_counter 987 | # spins = "" 988 | # for _x in range(spins_needed): 989 | #spins += "X999X772" 990 | # spins += "X966" 991 | # encode_and_send_mouse(spins) 992 | # spin_counter = 0 993 | if spin_counter != 0: 994 | no_target = False 995 | spin_counter += 1 996 | if spin_counter == 6: 997 | spin_counter = 0 998 | HidController.encode_and_send_mouse("X999X772") 999 | else: 1000 | no_target = True 1001 | else: 1002 | #if alt_profile: 1003 | allow_mouse_inputs = False 1004 | #else: 1005 | #allow_mouse_inputs = True 1006 | 1007 | x_dis = target[0] - center[0] 1008 | y_dis = target[1] - center[1] 1009 | 1010 | #print(x_dis) 1011 | 1012 | abs_x_dis = math.fabs(x_dis) 1013 | abs_y_dis = math.fabs(y_dis) 1014 | 1015 | #if tracking_range == 0: 1016 | # if abs_x_dis > 5 and abs_x_dis < 20: 1017 | #tim = (time.time() - last_time_main) 1018 | #print(tim) 1019 | # abs_x_dis = abs_x_dis * 2 #3 * (tim*100) 1020 | 1021 | #PredictionEngine.add_cords_for_prediction(x_dis) 1022 | 1023 | #if HidController.check_mouse_click(wepprofile) is True: # abs_x_dis > 2 or abs_y_dis > 2 and 1024 | # print(abs_x_dis, abs_y_dis) 1025 | 1026 | if wepprofile.auto_scope: # and tracking_range < 2: 1027 | scope_state = HidController.hold_scope(scope_state) 1028 | 1029 | # abs_x_dis = math.ceil(abs_x_dis * 0.55) # * 0.52) .51 was okay # 0.515 better # 0.52 better 1030 | # abs_y_dis = math.ceil(abs_y_dis * 0.55) # * 0.52) 1031 | # if tracking_range < 2: 1032 | # abs_x_dis = int(abs_x_dis * 0.9) # 0.49 best one s0 far # 45 is more accurate in high fps, 1033 | # # but requires multiple itterations maybe 1034 | # abs_y_dis = int(abs_y_dis * 2) # * 1) # 82 1035 | # else: 1036 | cmc = HidController.check_mouse_click(wepprofile) 1037 | if cmc or predictive_smoothing is False or tracking_range > 0:# or tracking_range == 1: 1038 | abs_x_dis = round(abs_x_dis * 2.124) # 2.124 # 1.062# 1.062 #1.124 was good when lagging # 1.07 good for close# * 0.54) # * 0.52) .51 was okay # 0.515 better # 0.52 better 1039 | abs_y_dis = round(abs_y_dis * 2.124) # 2.124 # 2.08 # * 0.6) # * 0.52) 1040 | #ssh_x_mouse = round(x_dis * 2.124) 1041 | #ssh_y_mouse = round(y_dis * 2.124) 1042 | else: 1043 | abs_x_dis = round(abs_x_dis * 1.7) # 1.5 is good too # 1.4 good but wonky 1044 | abs_y_dis = round(abs_y_dis) 1045 | 1046 | if abs_x_dis > 999: 1047 | abs_x_dis = 999 1048 | if abs_y_dis > 999: 1049 | abs_y_dis = 999 1050 | 1051 | cords = "" 1052 | 1053 | #print(ssh_x_mouse) 1054 | 1055 | if x_dis > 0: 1056 | cords += HidController.parse_cords("X", abs_x_dis) 1057 | else: 1058 | cords += HidController.parse_cords("C", abs_x_dis) 1059 | 1060 | if y_dis > 0: 1061 | cords += HidController.parse_cords("Y", abs_y_dis) 1062 | else: 1063 | cords += HidController.parse_cords("U", abs_y_dis) 1064 | 1065 | #ssh_x_mouse = "1" 1066 | #ssh_y_mouse = "0" 1067 | 1068 | #print(str(cords)) 1069 | 1070 | #HidController.ssh_move(ssh_x_mouse, ssh_y_mouse) 1071 | HidController.encode_and_send_mouse(cords) 1072 | 1073 | if allow_tracking_resize: 1074 | if alt_profile: 1075 | tracking_range = 0 1076 | else: 1077 | tracking_range = 1 1078 | 1079 | #print("height: " + str(en_height) + " X_Dis: " + str(x_dis)) 1080 | 1081 | if cmc: 1082 | if alt_profile: 1083 | if deep_tracking_ms > enemy_seen_delay_ms: # > for if range < 2 and >= for insta shoot 1084 | #HidController.simple_predictive_aim(abs_x_dis, x_dis, en_height) 1085 | #HidController.physics_predictive_aim(abs_x_dis, x_dis, True) 1086 | is_shooting = HidController.arduino_tap_shoot(is_shooting) 1087 | tracking_range = 2 1088 | HidController.reset_tracking_timers() 1089 | else: 1090 | if tracking_range < 2: 1091 | if enemy_seen_delay_ms == enemy_seen_delay_ms_still: 1092 | if deep_tracking_ms > 50: 1093 | HidController.simple_predictive_aim(abs_x_dis, x_dis, en_height) 1094 | #HidController.physics_predictive_aim(abs_x_dis, x_dis, False) 1095 | is_shooting = HidController.arduino_shoot_no_dup(is_shooting) 1096 | else: 1097 | if en_height > 65 and deep_tracking_ms > 50: 1098 | HidController.simple_predictive_aim(abs_x_dis, x_dis, en_height) 1099 | #HidController.physics_predictive_aim(abs_x_dis, x_dis, False) 1100 | is_shooting = HidController.arduino_shoot_no_dup(is_shooting) 1101 | elif deep_tracking_ms > 125: # 175 1102 | HidController.simple_predictive_aim(abs_x_dis, x_dis, en_height) 1103 | ##HidController.physics_predictive_aim(abs_x_dis, x_dis, False) 1104 | is_shooting = HidController.arduino_shoot_no_dup(is_shooting) 1105 | 1106 | blocking_during_scan = False 1107 | #print(deep_tracking_ms) 1108 | if alt_profile: 1109 | #if deep_tracking_ms > (enemy_seen_delay_ms + 50): 1110 | #tracking_range = 2 1111 | #HidController.reset_tracking_timers() 1112 | if cmc or HidController.time_after_last_shot() > (wepprofile.shoot_cd - 0.025):# and alt_profile: #0.50 for better accuracy 1113 | if keyboard.is_pressed('a'): 1114 | right_is_pressed = HidController.move_right_press(right_is_pressed) 1115 | else: 1116 | right_is_pressed = HidController.move_right_released(right_is_pressed) 1117 | 1118 | if keyboard.is_pressed('d'): 1119 | left_is_pressed = HidController.move_left_press(left_is_pressed) 1120 | else: 1121 | left_is_pressed = HidController.move_left_released(left_is_pressed) 1122 | 1123 | if keyboard.is_pressed('w'): 1124 | down_is_pressed = HidController.move_down_press(down_is_pressed) 1125 | else: 1126 | down_is_pressed = HidController.move_down_released(down_is_pressed) 1127 | 1128 | if keyboard.is_pressed('s'): 1129 | up_is_pressed = HidController.move_up_press(up_is_pressed) 1130 | else: 1131 | up_is_pressed = HidController.move_up_released(up_is_pressed) 1132 | else: 1133 | up_is_pressed = HidController.move_up_released(up_is_pressed) 1134 | down_is_pressed = HidController.move_down_released(down_is_pressed) 1135 | left_is_pressed = HidController.move_left_released(left_is_pressed) 1136 | right_is_pressed = HidController.move_right_released(right_is_pressed) 1137 | walk_is_pressed = HidController.walk_released(walk_is_pressed) 1138 | else: 1139 | walk_is_pressed = HidController.walk_press(walk_is_pressed) 1140 | if deep_tracking_ms < enemy_seen_delay_ms: 1141 | if keyboard.is_pressed('a'): 1142 | right_is_pressed = HidController.move_right_press(right_is_pressed) 1143 | else: 1144 | right_is_pressed = HidController.move_right_released(right_is_pressed) 1145 | 1146 | if keyboard.is_pressed('d'): 1147 | left_is_pressed = HidController.move_left_press(left_is_pressed) 1148 | else: 1149 | left_is_pressed = HidController.move_left_released(left_is_pressed) 1150 | 1151 | if keyboard.is_pressed('w'): 1152 | down_is_pressed = HidController.move_down_press(down_is_pressed) 1153 | else: 1154 | down_is_pressed = HidController.move_down_released(down_is_pressed) 1155 | 1156 | if keyboard.is_pressed('s'): 1157 | up_is_pressed = HidController.move_up_press(up_is_pressed) 1158 | else: 1159 | up_is_pressed = HidController.move_up_released(up_is_pressed) 1160 | else: 1161 | up_is_pressed = HidController.move_up_released(up_is_pressed) 1162 | down_is_pressed = HidController.move_down_released(down_is_pressed) 1163 | left_is_pressed = HidController.move_left_released(left_is_pressed) 1164 | right_is_pressed = HidController.move_right_released(right_is_pressed) 1165 | 1166 | spin_counter = 0 1167 | no_target = False 1168 | # return False 1169 | 1170 | # > 60 medium far 1171 | # > 70 medium 1172 | # > 90 medium close 1173 | # > 120 or > 140 close 1174 | # 100 to 122 cap when head target 1175 | # 165 when full auto 1176 | 1177 | # target crouching reduces height by ~15% 1178 | 1179 | # Dis: 85, standstill: 0 ~ 4, running: 10 ~ 15 | 5.666666 | 0.1764 1180 | # Dis: 90, standstill: 0 ~ 4, running: 10 ~ 17 | 5.222222 | 0.1888 1181 | # Dis: 120, standstill: 0 ~ 4, running: 11 ~ 30 | 4 | 0.25 1182 | 1183 | # suggested ratio: 0.1411 for movment determination 1184 | 1185 | @staticmethod 1186 | def move_to_target_passive(target, center, wepprofile, alt_profile, en_height, lock): 1187 | 1188 | global scope_state 1189 | global tracking_range 1190 | global aim_down_activation_threshold 1191 | global no_target 1192 | global spin_bot 1193 | global spin_counter 1194 | global up_is_pressed 1195 | global down_is_pressed 1196 | global left_is_pressed 1197 | global right_is_pressed 1198 | global walk_is_pressed 1199 | # global blocking_during_scan 1200 | global allow_mouse_inputs 1201 | global is_shooting 1202 | global manual_shooting 1203 | #global skip_move_counter 1204 | 1205 | if target[0] is None: # Pass through Mouse? 1206 | allow_mouse_inputs = True 1207 | #skip_move_counter = 0 1208 | HidController.reset_tracking_timers() 1209 | 1210 | if manual_shooting: 1211 | is_shooting = HidController.arduino_shoot_no_dup(is_shooting) 1212 | else: 1213 | is_shooting = HidController.arduino_unshoot(is_shooting) 1214 | 1215 | if manual_scope: 1216 | scope_state = HidController.hold_scope(scope_state) 1217 | else: 1218 | scope_state = HidController.release_scope(scope_state) 1219 | else: 1220 | x_dis = target[0] - center[0] 1221 | y_dis = target[1] - center[1] 1222 | abs_x_dis = math.fabs(x_dis) 1223 | abs_y_dis = math.fabs(y_dis) 1224 | 1225 | #cmc = HidController.check_mouse_click(wepprofile) 1226 | #if cmc or predictive_smoothing is False or tracking_range > 0: # or tracking_range == 1: 1227 | #if is_shooting is False: 1228 | abs_x_dis = round(abs_x_dis * 2.124) 1229 | abs_y_dis = round(abs_y_dis * 2.124)# * 0.75) 1230 | #else: 1231 | # abs_x_dis = round(abs_x_dis * 1.124) 1232 | # abs_y_dis = round(abs_y_dis * 1.124) 1233 | 1234 | if abs_x_dis > 999: 1235 | abs_x_dis = 999 1236 | if abs_y_dis > 999: 1237 | abs_y_dis = 999 1238 | 1239 | # abs_x_dis_rev = abs_x_dis 1240 | 1241 | #if abs_x_dis_rev > 20: 1242 | # abs_x_dis_rev = 20 1243 | #else: 1244 | # abs_x_dis_rev = 0 1245 | 1246 | cords = "" 1247 | cords_rev = "" 1248 | 1249 | if lock is True: 1250 | x_rev = 0.5 1251 | y_rev = 0.8 1252 | else: 1253 | x_rev = 1 1254 | y_rev = 1 1255 | 1256 | if x_dis > 0: 1257 | cords += HidController.parse_cords("X", abs_x_dis) 1258 | cords_rev += HidController.parse_cords("C", abs_x_dis * x_rev)#- abs_x_dis_rev) 1259 | else: 1260 | cords += HidController.parse_cords("C", abs_x_dis) 1261 | cords_rev += HidController.parse_cords("X", abs_x_dis * x_rev)#- abs_x_dis_rev) 1262 | 1263 | if y_dis > 0: 1264 | cords += HidController.parse_cords("Y", abs_y_dis) 1265 | cords_rev += HidController.parse_cords("U", abs_y_dis * y_rev) 1266 | else: 1267 | cords += HidController.parse_cords("U", abs_y_dis) 1268 | cords_rev += HidController.parse_cords("Y", abs_y_dis * y_rev) 1269 | 1270 | if manual_scope: 1271 | if scope_state is False and lock is True: 1272 | HidController.encode_and_send_mouse(cords) 1273 | cords = None 1274 | cords_rev = None 1275 | scope_state = HidController.hold_scope(scope_state) 1276 | else: 1277 | scope_state = HidController.release_scope(scope_state) 1278 | 1279 | if manual_shooting or spin_bot: 1280 | #if lock is True: 1281 | allow_mouse_inputs = False 1282 | #else: 1283 | #allow_mouse_inputs = True 1284 | HidController.MouseBuffer.playback() 1285 | HidController.MouseBuffer.reset() 1286 | if lock is False: 1287 | allow_mouse_inputs = True 1288 | 1289 | if cords is not None:# and skip_move_counter == 0: 1290 | HidController.encode_and_send_mouse(cords) 1291 | is_shooting = HidController.arduino_shoot(is_shooting) 1292 | #is_shooting = HidController.arduino_shoot_no_dup(is_shooting) 1293 | if cords_rev is not None: 1294 | HidController.encode_and_send_mouse(cords_rev) 1295 | #HidController.encode_and_send_mouse(cords) 1296 | walk_is_pressed = HidController.walk_press(walk_is_pressed) 1297 | if keyboard.is_pressed('a'): 1298 | right_is_pressed = HidController.move_right_press(right_is_pressed) 1299 | else: 1300 | right_is_pressed = HidController.move_right_released(right_is_pressed) 1301 | 1302 | if keyboard.is_pressed('d'): 1303 | left_is_pressed = HidController.move_left_press(left_is_pressed) 1304 | else: 1305 | left_is_pressed = HidController.move_left_released(left_is_pressed) 1306 | 1307 | if keyboard.is_pressed('w'): 1308 | down_is_pressed = HidController.move_down_press(down_is_pressed) 1309 | else: 1310 | down_is_pressed = HidController.move_down_released(down_is_pressed) 1311 | 1312 | if keyboard.is_pressed('s'): 1313 | up_is_pressed = HidController.move_up_press(up_is_pressed) 1314 | else: 1315 | up_is_pressed = HidController.move_up_released(up_is_pressed) 1316 | #skip_move_counter += 1 1317 | #if skip_move_counter > 3: 1318 | # skip_move_counter = 0 1319 | else: 1320 | allow_mouse_inputs = True 1321 | is_shooting = HidController.arduino_unshoot(is_shooting) 1322 | up_is_pressed = HidController.move_up_released(up_is_pressed) 1323 | down_is_pressed = HidController.move_down_released(down_is_pressed) 1324 | left_is_pressed = HidController.move_left_released(left_is_pressed) 1325 | right_is_pressed = HidController.move_right_released(right_is_pressed) 1326 | walk_is_pressed = HidController.walk_released(walk_is_pressed) 1327 | #skip_move_counter = 0 1328 | 1329 | spin_counter = 0 1330 | no_target = False 1331 | 1332 | @staticmethod 1333 | def simple_predictive_aim(abs_x_dis, x_dis, height): 1334 | global frame_delay_options 1335 | global frame_delay 1336 | global prediction_ratio 1337 | global prediction_selector 1338 | #print("height: " + str(height)) 1339 | if prediction_selector == 3: 1340 | return # [1, 1.5, 2, 0.00001] 1341 | #elif prediction_ratio[prediction_selector] == 2: 1342 | #movement check? 1343 | if x_dis == 0: 1344 | return 1345 | #if (height * 0.141176) < abs_x_dis: 1346 | #if height > 70 and 11 < abs_x_dis: # engage prediction 1347 | frame = frame_delay_options[frame_delay]/20 1348 | if height > 50 and (height * 0.141176 * frame) < abs_x_dis: # engage prediction 1349 | mov_dist = height * 0.25 * frame 1350 | if mov_dist > 999: 1351 | return 1352 | if x_dis > 0: 1353 | HidController.encode_and_send_mouse(HidController.parse_cords("X", mov_dist)) 1354 | else: 1355 | HidController.encode_and_send_mouse(HidController.parse_cords("C", mov_dist)) 1356 | print("Engaged_Prediction: " + str(mov_dist)) 1357 | 1358 | @staticmethod 1359 | def physics_predictive_aim(abs_x_dis, x_dis, take_prev_time): 1360 | global last_time_main 1361 | global frame_delay_options 1362 | global frame_delay 1363 | global prediction_ratio 1364 | global prediction_selector 1365 | global prediction_scalar 1366 | global prediction_scalar_selector 1367 | global previous_process_time 1368 | # testing done at capping 120 fps 1369 | # if en_height > 65: 1370 | if abs_x_dis >= (frame_delay_options[frame_delay]/prediction_ratio[prediction_selector]): # 10 at 20ms 1371 | if take_prev_time: 1372 | mov_vel = abs_x_dis / (previous_process_time + frame_delay_options[frame_delay]) # s = d/t 1373 | else: 1374 | mov_vel = abs_x_dis / (frame_delay_options[frame_delay]) # s = d/t 1375 | proc_tim = (time.time() - last_time_main) * 1000 1376 | mov_dist = mov_vel * proc_tim # d = s * t 1377 | mov_dist = round(mov_dist * prediction_scalar[prediction_scalar_selector]) # * 2.124 * 2) 1378 | if mov_dist != 0: 1379 | if mov_dist > 100: 1380 | return 1381 | #mov_dist = 0 1382 | if x_dis > 0: 1383 | HidController.encode_and_send_mouse(HidController.parse_cords("X", mov_dist)) 1384 | else: 1385 | HidController.encode_and_send_mouse(HidController.parse_cords("C", mov_dist)) 1386 | print("Base: " + str(abs_x_dis) + " | Time: " + str(proc_tim) + " | Result: " + str(mov_dist)) 1387 | 1388 | @staticmethod 1389 | def ssh_move(x, y): 1390 | max_int = 254 # 255 1391 | 1392 | max_int_x = max_int 1393 | max_int_y = max_int 1394 | 1395 | if x < 0: 1396 | max_int_x = -1 * max_int 1397 | if y < 0: 1398 | max_int_y = -1 * max_int 1399 | 1400 | if x == 0 and y == 0: 1401 | return 1402 | 1403 | x_iter = math.floor(x / max_int_x) 1404 | x_rem = x % max_int 1405 | 1406 | y_iter = math.floor(y / max_int_y) 1407 | y_rem = y % max_int 1408 | 1409 | if x > y: 1410 | length = x_iter 1411 | else: 1412 | length = y_iter 1413 | 1414 | if x_rem != 0 or y_rem != 0: 1415 | length += 1 1416 | 1417 | for o in range(length): 1418 | x_in = 0 1419 | y_in = 0 1420 | 1421 | # print("o val: " + str(o) + "x_iter: " + str(x_iter) + " y_iter: " + str(y_iter)) 1422 | 1423 | if x_iter > o: 1424 | x_in = max_int_x 1425 | elif x_iter == o: 1426 | x_in = x_rem 1427 | 1428 | if y_iter > o: 1429 | y_in = max_int_y 1430 | elif y_iter == o: 1431 | y_in = y_rem 1432 | 1433 | print("cords: " + str(x_in) + ", " + str(y_in)) 1434 | # UNCOMMENT 1435 | #ssh.exec_command("virsh -c qemu:///system qemu-monitor-command Valorant-001 mouse_move " + str(x_in) + " " + str(y_in) +" --hmp") 1436 | 1437 | # Serial Communication 1438 | @staticmethod 1439 | def encode_and_send_mouse(str_en): 1440 | serMouse.write(str(str_en).encode()) 1441 | #pass 1442 | 1443 | @staticmethod 1444 | def encode_and_send_keyboard(str_en): 1445 | serKeyboard.write(str(str_en).encode()) 1446 | 1447 | @staticmethod 1448 | def parse_cords(sign, value): 1449 | value = str(value) 1450 | cords = str(sign + (value.zfill(3))) 1451 | return cords 1452 | 1453 | 1454 | # Shooting 1455 | @staticmethod 1456 | def arduino_shoot(is_shooting): 1457 | if is_shooting is False: 1458 | #if keyboard.is_pressed("["): 1459 | # return 1460 | #encode_and_send("S050B000A000") 1461 | HidController.encode_and_send_mouse("A000") 1462 | HidController.reset_mouse_after_shoot() 1463 | # CODE TO AIM DOWN MORE PER SHOT?! 1464 | return True 1465 | else: 1466 | HidController.encode_and_send_mouse("B000A000") 1467 | HidController.reset_mouse_after_shoot() 1468 | return True 1469 | 1470 | @staticmethod 1471 | def arduino_shoot_no_dup(is_shooting): 1472 | HidController.encode_and_send_mouse("A000") # no dupe 1473 | #HidController.encode_and_send_mouse("B000A000") 1474 | HidController.reset_mouse_after_shoot() 1475 | # CODE TO AIM DOWN MORE PER SHOT?! 1476 | return True 1477 | 1478 | @staticmethod 1479 | def arduino_unshoot(is_shooting): 1480 | if is_shooting is True: 1481 | HidController.encode_and_send_mouse("B000") 1482 | # CODE TO RESET AIM DOWN! 1483 | return False 1484 | return is_shooting 1485 | 1486 | @staticmethod 1487 | def arduino_tap_shoot(is_shooting): 1488 | 1489 | #if keyboard.is_pressed("["): 1490 | # return 1491 | #encode_and_send("S050B000A000B000") 1492 | HidController.encode_and_send_mouse("B000A000B000") 1493 | HidController.reset_mouse_after_shoot() 1494 | return False 1495 | # CODE TO AIM DOWN MORE PER SHOT?! 1496 | # return is_shooting 1497 | 1498 | @staticmethod 1499 | def hold_scope(scope_state): 1500 | if scope_state is False: 1501 | # pyautogui.mouseDown(button='right') 1502 | HidController.encode_and_send_mouse("D000") 1503 | return True 1504 | return scope_state 1505 | 1506 | @staticmethod 1507 | def release_scope(scope_state): 1508 | if scope_state is True: 1509 | # pyautogui.mouseUp(button='right') 1510 | HidController.encode_and_send_mouse("E000") 1511 | return False 1512 | return scope_state 1513 | 1514 | @staticmethod 1515 | def reset_mouse(): 1516 | # global cd_time 1517 | global cd_burst_time 1518 | global burstcount 1519 | global is_shooting 1520 | 1521 | burstcount = 0 1522 | cd_burst_time = time.time() 1523 | is_shooting = False 1524 | 1525 | @staticmethod 1526 | def reset_mouse_after_shoot(): 1527 | global cd_time 1528 | global cd_burst_time 1529 | global burstcount 1530 | 1531 | cd_time = time.time() 1532 | cd_burst_time = time.time() 1533 | burstcount += 1 1534 | 1535 | @staticmethod 1536 | def check_mouse_click(wepprofile): 1537 | global cd_time 1538 | global cd_burst_time 1539 | global burstcount 1540 | 1541 | #if keyboard.is_pressed("["): # or tracking_range >= 3: # or keyboard.is_pressed(""): 1542 | # return False 1543 | if (time.time() - cd_time) > wepprofile.shoot_cd: 1544 | return True 1545 | return False 1546 | 1547 | @staticmethod 1548 | def time_after_last_shot(): 1549 | global cd_time 1550 | return (time.time() - cd_time) 1551 | 1552 | # Moving 1553 | @staticmethod 1554 | def move_up_press(is_pressed): 1555 | if is_pressed is False: 1556 | # pyautogui.keyDown("Up") 1557 | HidController.encode_and_send_keyboard("A") 1558 | return True 1559 | return is_pressed 1560 | 1561 | @staticmethod 1562 | def move_down_press(is_pressed): 1563 | if is_pressed is False: 1564 | # pyautogui.keyDown("Down") 1565 | HidController.encode_and_send_keyboard("B") 1566 | return True 1567 | return is_pressed 1568 | 1569 | @staticmethod 1570 | def move_left_press(is_pressed): 1571 | if is_pressed is False: 1572 | # pyautogui.keyDown("Left") 1573 | HidController.encode_and_send_keyboard("C") 1574 | return True 1575 | return is_pressed 1576 | 1577 | @staticmethod 1578 | def move_right_press(is_pressed): 1579 | if is_pressed is False: 1580 | # pyautogui.keyDown("Right") 1581 | HidController.encode_and_send_keyboard("D") 1582 | return True 1583 | return is_pressed 1584 | 1585 | @staticmethod 1586 | def walk_press(is_pressed): 1587 | if is_pressed is False: 1588 | # pyautogui.keyDown('p') 1589 | HidController.encode_and_send_keyboard("E") 1590 | return True 1591 | return is_pressed 1592 | 1593 | @staticmethod 1594 | def move_up_released(is_pressed): 1595 | if is_pressed is True: 1596 | # pyautogui.keyUp("Up") 1597 | HidController.encode_and_send_keyboard("G") 1598 | return False 1599 | return is_pressed 1600 | 1601 | @staticmethod 1602 | def move_down_released(is_pressed): 1603 | if is_pressed is True: 1604 | # pyautogui.keyUp("Down") 1605 | HidController.encode_and_send_keyboard("H") 1606 | return False 1607 | return is_pressed 1608 | 1609 | @staticmethod 1610 | def move_left_released(is_pressed): 1611 | if is_pressed is True: 1612 | # pyautogui.keyUp("Left") 1613 | HidController.encode_and_send_keyboard("I") 1614 | return False 1615 | return is_pressed 1616 | 1617 | @staticmethod 1618 | def move_right_released(is_pressed): 1619 | if is_pressed is True: 1620 | # pyautogui.keyUp("Right") 1621 | HidController.encode_and_send_keyboard("J") 1622 | return False 1623 | return is_pressed 1624 | 1625 | @staticmethod 1626 | def walk_released(is_pressed): 1627 | if is_pressed is True: 1628 | # pyautogui.keyUp('p') 1629 | HidController.encode_and_send_keyboard("K") 1630 | return False 1631 | return is_pressed 1632 | 1633 | class MouseBuffer: 1634 | buffer = [] 1635 | 1636 | @staticmethod 1637 | def add(cords): 1638 | if cords[0] == "X" or cords[0] == "Y" or cords[0] == "C" or cords[0] == "U": 1639 | HidController.MouseBuffer.buffer.append(cords) 1640 | 1641 | @staticmethod 1642 | def reset(): 1643 | HidController.MouseBuffer.buffer = [] 1644 | 1645 | @staticmethod 1646 | def playback(): 1647 | for x in HidController.MouseBuffer.buffer: 1648 | x = x.replace("C","J") 1649 | x = x.replace("U","K") 1650 | x = x.replace("X","C") 1651 | x = x.replace("Y","U") 1652 | x = x.replace("J","X") 1653 | x = x.replace("K","Y") 1654 | HidController.encode_and_send_mouse(x) 1655 | 1656 | # Threading 1657 | @staticmethod 1658 | def mouse_relay(): 1659 | global allow_mouse_inputs 1660 | global blocking_during_scan 1661 | global temp_pause 1662 | global spin_bot 1663 | global manual_shooting 1664 | global manual_scope 1665 | 1666 | while True: 1667 | #print("x") 1668 | #while blocking_during_scan: 1669 | #pass 1670 | # s.sendto(message.encode('utf-8'), server) 1671 | data, addr = s.recvfrom(1024) 1672 | data = data.decode('utf-8') 1673 | if data == "PAUS": 1674 | temp_pause = True 1675 | elif data == "UPAU": 1676 | temp_pause = False 1677 | elif data == "RHTD": 1678 | # pyautogui.keyDown('o') 1679 | # HidController.encode_and_send_keyboard("F") 1680 | if is_using_passive_aim is False: 1681 | serKeyboard.write(str("F").encode()) # O Key 1682 | manual_scope = True 1683 | elif data == "RHTU": 1684 | 1685 | # HidController.encode_and_send_keyboard("L") 1686 | if is_using_passive_aim is False: 1687 | serKeyboard.write(str("L").encode()) # O Key 1688 | manual_scope = False 1689 | elif data == "SPND": 1690 | spin_bot = True 1691 | elif data == "SPNU": 1692 | spin_bot = False 1693 | else: 1694 | if is_using_passive_aim: 1695 | if data == "A000": 1696 | manual_shooting = True 1697 | data = "" 1698 | elif data == "B000": 1699 | manual_shooting = False 1700 | data = "" 1701 | if allow_mouse_inputs: 1702 | # print("Received from server: " + data) 1703 | if data != "": 1704 | serMouse.write(str(data).encode()) 1705 | HidController.MouseBuffer.add(data) 1706 | #print(allow_mouse_inputs) 1707 | # time.sleep(0.001) 1708 | # message = input("-> ") 1709 | 1710 | @staticmethod 1711 | def reset_tracking_timers(): 1712 | global up_is_pressed 1713 | global down_is_pressed 1714 | global left_is_pressed 1715 | global right_is_pressed 1716 | global walk_is_pressed 1717 | global deep_tracking_ms 1718 | global last_deep_tracking_ms 1719 | 1720 | up_is_pressed = HidController.move_up_released(up_is_pressed) 1721 | down_is_pressed = HidController.move_down_released(down_is_pressed) 1722 | left_is_pressed = HidController.move_left_released(left_is_pressed) 1723 | right_is_pressed = HidController.move_right_released(right_is_pressed) 1724 | walk_is_pressed = HidController.walk_released(walk_is_pressed) 1725 | # non_track_counter = 0 1726 | deep_tracking_ms = 0 1727 | last_deep_tracking_ms = time.time() 1728 | 1729 | #cv2.floodFill(mask,mask1,(0,0),255) 1730 | 1731 | # Runtime 1732 | mon = MonitorProcessing.monitor_pre_cal(monitor_width, monitor_height) 1733 | subwepprofile = WeaponProfiles.HeadShot.Guardian 1734 | mainwepprofile = WeaponProfiles.BodyShot.Guardian 1735 | wepprofile = mainwepprofile 1736 | img_cords = ImageProcessing.ImageCords() 1737 | 1738 | print(serMouse.name) 1739 | print(serKeyboard.name) 1740 | _thread.start_new_thread(HidController.mouse_relay, ()) 1741 | print("Thread Started") 1742 | #try: 1743 | # print("Starting Mouse Relay Thread") 1744 | # thread.start_new_thread(mouse_relay) 1745 | #except: 1746 | # print("Cannot Start Thread") 1747 | #kill_mon = MonitorProcessing.monitor_kill_indicators(monitor_width, monitor_height) 1748 | kill_mon = MonitorProcessing.monitor_indicators_zone(monitor_width, monitor_height) 1749 | kill_last = False 1750 | kill_cnt = 0 1751 | 1752 | #skip_move_counter = 0 1753 | #if use_d3d is True: 1754 | # print("Using D3D") 1755 | # d = d3dshot.create(capture_output="numpy") 1756 | #else: 1757 | # print("Using MSS") 1758 | 1759 | with mss.mss() as sct: 1760 | settings_mask = ImageProcessing.Filtering.test(tracking_range, Colors.purple, 1) 1761 | while True: 1762 | if temp_pause or paused or keyboard.is_pressed("Tab"): # or keyboard.is_pressed("]") or keyboard.is_pressed("[") 1763 | allow_mouse_inputs = True 1764 | was_in_paused_state = True 1765 | spin_counter = 0 1766 | 1767 | if keyboard.is_pressed("esc"): 1768 | paused = False 1769 | is_using_passive_aim = False 1770 | elif keyboard.is_pressed("F1"): 1771 | paused = False 1772 | is_using_passive_aim = True 1773 | is_using_passive_aim_lock = True 1774 | elif keyboard.is_pressed("F2"): 1775 | paused = False 1776 | is_using_passive_aim = True 1777 | is_using_passive_aim_lock = False 1778 | tracking_range = 3 1779 | 1780 | is_shooting = HidController.arduino_unshoot(is_shooting) 1781 | HidController.reset_mouse() 1782 | HidController.reset_tracking_timers() 1783 | #scope_state = releasescope() 1784 | 1785 | up_is_pressed = HidController.move_up_released(up_is_pressed) 1786 | down_is_pressed = HidController.move_down_released(down_is_pressed) 1787 | left_is_pressed = HidController.move_left_released(left_is_pressed) 1788 | right_is_pressed = HidController.move_right_released(right_is_pressed) 1789 | walk_is_pressed = HidController.walk_released(walk_is_pressed) 1790 | # print("No Target") 1791 | 1792 | passive_hold_shoot_timer = (time.time() - passive_hold_shoot_timer) 1793 | passive_shoot_offset -= passive_hold_shoot_timer * 125 1794 | if passive_shoot_offset < -40: 1795 | passive_shoot_offset = -40 1796 | if passive_shoot_offset < 0 or wepprofile.aim_down_offset_y == 0 or alt_profile: 1797 | passive_shoot_offset_apply = 0 1798 | else: 1799 | passive_shoot_offset_apply = passive_shoot_offset 1800 | passive_hold_shoot_timer = time.time() 1801 | 1802 | if keyboard.is_pressed('`'): 1803 | editing_selected = False 1804 | paused = True 1805 | 1806 | # if wepprofile.auto_scope and scope_state is True: # and tracking_range >= 3: 1807 | # if scope_state is True: # and tracking_range >= 3: 1808 | scope_state = HidController.release_scope(scope_state) 1809 | 1810 | mask = ImageProcessing.Filtering.test(tracking_range, Colors.purple, 0) 1811 | #mask = settings_mask.copy() 1812 | 1813 | if keyboard.is_pressed("-"): 1814 | editing_selected = True 1815 | 1816 | if editing_selected: 1817 | if keyboard.is_pressed("m"): 1818 | editing_selected = False 1819 | elif keyboard.is_pressed('Tab'): 1820 | if keyboard.is_pressed('1'): 1821 | subwepprofile = WeaponProfiles.HeadShot.GuardianNoXOffset 1822 | mainwepprofile = WeaponProfiles.BodyShot.Guardian 1823 | elif keyboard.is_pressed('2'): 1824 | subwepprofile = WeaponProfiles.HeadShot.Guardian 1825 | mainwepprofile = WeaponProfiles.BodyShot.Guardian 1826 | elif keyboard.is_pressed('3'): 1827 | subwepprofile = WeaponProfiles.HeadShot.GuardianZoom 1828 | mainwepprofile = WeaponProfiles.BodyShot.Guardian 1829 | elif keyboard.is_pressed('4'): 1830 | subwepprofile = WeaponProfiles.HeadShot.GuardianZoom 1831 | mainwepprofile = WeaponProfiles.BodyShot.GuardianZoom 1832 | elif keyboard.is_pressed('5'): 1833 | subwepprofile = WeaponProfiles.HeadShot.Sheriff 1834 | mainwepprofile = WeaponProfiles.BodyShot.Guardian 1835 | elif keyboard.is_pressed('6'): 1836 | subwepprofile = WeaponProfiles.HeadShot.Guardian 1837 | mainwepprofile = WeaponProfiles.BodyShot.Guardian 1838 | elif keyboard.is_pressed('7'): 1839 | subwepprofile = WeaponProfiles.HeadShot.Guardian 1840 | mainwepprofile = WeaponProfiles.BodyShot.Guardian 1841 | elif keyboard.is_pressed('8'): 1842 | subwepprofile = WeaponProfiles.HeadShot.Guardian 1843 | mainwepprofile = WeaponProfiles.BodyShot.Guardian 1844 | elif keyboard.is_pressed('9'): 1845 | subwepprofile = WeaponProfiles.HeadShot.Guardian 1846 | mainwepprofile = WeaponProfiles.BodyShot.Guardian 1847 | elif keyboard.is_pressed('0'): 1848 | subwepprofile = WeaponProfiles.HeadShot.Guardian 1849 | mainwepprofile = WeaponProfiles.BodyShot.Guardian 1850 | elif keyboard.is_pressed("1"): 1851 | frame_delay = 0 1852 | elif keyboard.is_pressed("2"): 1853 | frame_delay = 1 1854 | elif keyboard.is_pressed("3"): 1855 | frame_delay = 2 1856 | elif keyboard.is_pressed("4"): 1857 | frame_delay = 3 1858 | elif keyboard.is_pressed("5"): 1859 | frame_delay = 4 1860 | elif keyboard.is_pressed("7"): 1861 | prediction_selector = 0 1862 | elif keyboard.is_pressed("8"): 1863 | prediction_selector = 1 1864 | elif keyboard.is_pressed("9"): 1865 | prediction_selector = 2 1866 | elif keyboard.is_pressed("0"): 1867 | prediction_selector = 3 1868 | 1869 | elif keyboard.is_pressed("["): 1870 | prediction_scalar_selector = 0 1871 | elif keyboard.is_pressed("]"): 1872 | prediction_scalar_selector = 1 1873 | 1874 | elif keyboard.is_pressed("k"): 1875 | #predictive_smoothing = True 1876 | filter_selector = 0 1877 | elif keyboard.is_pressed("l"): 1878 | #predictive_smoothing = False 1879 | filter_selector = 1 1880 | elif keyboard.is_pressed(":"): 1881 | filter_selector = 2 1882 | elif keyboard.is_pressed("i"): 1883 | distance_selector = 0 1884 | elif keyboard.is_pressed("o"): 1885 | distance_selector = 1 1886 | elif keyboard.is_pressed("p"): 1887 | distance_selector = 2 1888 | elif keyboard.is_pressed("h"): 1889 | target_nearest_post_paused_state = True 1890 | elif keyboard.is_pressed("j"): 1891 | target_nearest_post_paused_state = False 1892 | #elif keyboard.is_pressed("u"): 1893 | # enemy_seen_delay_ms_default = 125 1894 | #elif keyboard.is_pressed("i"): 1895 | # enemy_seen_delay_ms_default = 0 1896 | 1897 | mask = ImageProcessing.apply_text(mask, subactive + "[Tab + 1-9] Profile-Sub: " + str(subwepprofile), 1898 | "[Tab + 1-9] Profile: "+ str(mainwepprofile), 1899 | "[H/J] Target Close After Paused: " + str(target_nearest_post_paused_state) + 1900 | " | [I/O/P] Aim Adjustment: " + str(distance_names[distance_selector]) + 1901 | " | [7-0] Prediction: " + str(prediction_ratio[prediction_selector]) + 1902 | " | [[/]] Pred-Scalar: " + str(prediction_scalar[prediction_scalar_selector]), 1903 | "[1-5] Target Delay: " + str(frame_delay_options[frame_delay]) + 1904 | #" | [K/L] Overshoot: " + str(predictive_smoothing) + 1905 | " | [K/L/:] Filter: " + str(filter_names[filter_selector]) 1906 | #" | [O/P] Passive Mode: " + str(passive_mode) + 1907 | , "Paused: [esc] to Resume | [M] to leave edit", tracking_range) 1908 | 1909 | else: 1910 | mask = ImageProcessing.apply_text(mask, subactive + "Profile-Sub: " + str(subwepprofile), "Profile: " 1911 | + str(mainwepprofile), 1912 | "Aim Adjustment: " + str(distance_names[distance_selector]) + 1913 | " | Prediction: " + str(prediction_ratio[prediction_selector]) + 1914 | " | Pred-Scalar: " + str(prediction_scalar[prediction_scalar_selector]), 1915 | "Target Delay: " + str(frame_delay_options[frame_delay]) + 1916 | #" | Overshoot: " + str(predictive_smoothing) + 1917 | " | Filter: " + str(filter_names[filter_selector]) + 1918 | #" | Passive Mode: " + str(passive_mode) + 1919 | " | Target Close After Paused: " + str(target_nearest_post_paused_state) 1920 | , "Paused: [esc] to Resume | [-] to edit", tracking_range) 1921 | 1922 | cv2.imshow("Setura", mask) 1923 | 1924 | cv2.waitKey(1) 1925 | cv2.waitKey(750) 1926 | 1927 | if keyboard.is_pressed('='): 1928 | #c.close() 1929 | cv2.destroyAllWindows() 1930 | break 1931 | elif is_using_passive_aim is False: # non passive 1932 | # Should it target nearest to cursor after pause key hit?? 1933 | if was_in_paused_state and target_nearest_post_paused_state: 1934 | was_in_paused_state = False 1935 | #non_track_counter = 0 1936 | deep_tracking_ms = 0 1937 | last_deep_tracking_ms = time.time() 1938 | if alt_profile: 1939 | tracking_range = 0 1940 | else: 1941 | tracking_range = 1 1942 | # Mouse Buffer 1943 | if allow_mouse_inputs is False: 1944 | HidController.MouseBuffer.playback() 1945 | HidController.MouseBuffer.reset() 1946 | # Scan 1947 | last_time_main = time.time() # timing 1948 | blocking_during_scan = True 1949 | #tracking_range = 0 1950 | 1951 | if tracking_range < 2: 1952 | deep_tracking_ms += ((time.time() - last_deep_tracking_ms) * 1000) 1953 | last_deep_tracking_ms = time.time() 1954 | if filter_selector == 0: 1955 | mask = ImageProcessing.Filtering.advanced_split_standard(tracking_range, Colors.antiphoenix, 20, 0, 20) 1956 | elif filter_selector == 1: 1957 | mask = ImageProcessing.Filtering.advanced_split_standard_3(tracking_range, Colors.primary, 10, 10, 20) 1958 | elif filter_selector == 2: 1959 | mask = ImageProcessing.Filtering.advanced_split_standard_3(tracking_range, Colors.primary, 10, 10, 20) 1960 | 1961 | else: 1962 | # mask = ImageProcessing.Filtering.aggressive(tracking_range, Colors.purple) 1963 | if filter_selector == 0: 1964 | mask = ImageProcessing.Filtering.aggressive(tracking_range, Colors.purple) 1965 | elif filter_selector == 1: 1966 | mask = ImageProcessing.Filtering.aggressive(tracking_range, Colors.primary) 1967 | # mask = ImageProcessing.Filtering.advanced_split_standard(tracking_range, Colors.antiphoenix, 20, 0, 20) 1968 | elif filter_selector == 2: 1969 | # mask = ImageProcessing.Filtering.aggressive(tracking_range, Colors.purple) 1970 | # mask = ImageProcessing.Filtering.advanced_split_standard_3(tracking_range, Colors.primary, 10, 10, 20) 1971 | mask = ImageProcessing.Filtering.advanced_split_aggressive(tracking_range, Colors.primary, 10, 10, 1972 | 20) 1973 | 1974 | img_cords, scope_state, en_height = ImageSearch.find_first_pixel_npy_argmax(mask, wepprofile, tracking_range, mon, scope_state) 1975 | 1976 | # last_time_main = time.time() # timing 1977 | 1978 | HidController.move_to_target(img_cords.cords, (mon[tracking_range].res_w, mon[tracking_range].res_h - 1979 | passive_shoot_offset_apply), wepprofile, 1980 | allow_tracking_resize, alt_profile, predictive_smoothing, deep_tracking_ms, enemy_seen_delay_ms, en_height) 1981 | 1982 | previous_process_time = (time.time() - last_time_main) # timing 1983 | last_time_main = time.time() 1984 | #if tracking_range == 0: 1985 | # print(n) 1986 | 1987 | blocking_during_scan = False 1988 | 1989 | mask = img_cords.img 1990 | # Initial KeyPress Checker 1991 | if keyboard.is_pressed("Shift"): 1992 | wepprofile = subwepprofile 1993 | if alt_profile is False and (wepprofile.auto_scope_adv is False or wepprofile.auto_scope is False): 1994 | scope_state = HidController.release_scope(scope_state) 1995 | alt_profile = True 1996 | mainactive = "" 1997 | subactive = "[ON]" 1998 | else: 1999 | wepprofile = mainwepprofile 2000 | if alt_profile is True and (wepprofile.auto_scope_adv is False or wepprofile.auto_scope is False): 2001 | scope_state = HidController.release_scope(scope_state) 2002 | alt_profile = False 2003 | mainactive = "[ON]" 2004 | subactive = "" 2005 | 2006 | if keyboard.is_pressed('a') or keyboard.is_pressed('d') or keyboard.is_pressed('w') or keyboard.is_pressed('s'): 2007 | if alt_profile: 2008 | enemy_seen_delay_ms = enemy_seen_delay_ms_walk 2009 | else: 2010 | enemy_seen_delay_ms = enemy_seen_delay_ms_run 2011 | else: 2012 | enemy_seen_delay_ms = enemy_seen_delay_ms_still 2013 | # KeyPress Detection 2014 | if keyboard.is_pressed('`'): 2015 | paused = True 2016 | editing_selected = False 2017 | # Aim Down 2018 | passive_hold_shoot_timer = (time.time() - passive_hold_shoot_timer) 2019 | if is_shooting: 2020 | passive_shoot_offset += passive_hold_shoot_timer * 125 # 200 # * wepprofile.moving_aim_down_speed_multiplier # 150 2021 | if passive_shoot_offset > wepprofile.aim_down_offset_y: # + moving_aim_offset: 2022 | passive_shoot_offset = wepprofile.aim_down_offset_y # + moving_aim_offset 2023 | else: 2024 | passive_shoot_offset -= passive_hold_shoot_timer * 100 2025 | if passive_shoot_offset < - 20: # 40 #-60 #-20 #-40 2026 | passive_shoot_offset = -20 # 40 #-60 #-20 #-40 2027 | #print(str(passive_shoot_offset)) 2028 | if passive_shoot_offset < 0 or wepprofile.aim_down_offset_y == 0 or alt_profile: 2029 | passive_shoot_offset_apply = 0 2030 | else: 2031 | passive_shoot_offset_apply = passive_shoot_offset 2032 | passive_hold_shoot_timer = time.time() 2033 | # Kill Scanner 2034 | if tracking_range == 1: 2035 | l2time = time.time() 2036 | new = ImageProcessing.kill_indicator(kill_mon, Colors.kill) 2037 | if new is True and kill_last is False: 2038 | kill_cnt += 1 2039 | #print("Kill: " + str(kill_cnt)) 2040 | tracking_range = 3 2041 | kill_last = new 2042 | #print("scan time dif: "+str(time.time() - l2time)) 2043 | # Display 2044 | 2045 | mask = ImageProcessing.apply_text(mask, subactive + "Profile-Sub: " + str(subwepprofile), mainactive + "Profile: " 2046 | + str(mainwepprofile), 2047 | "Aim Adjustment: " + str(distance_names[distance_selector]) + 2048 | " | Prediction: " + str(prediction_ratio[prediction_selector]) + 2049 | " | Pred-Scalar: " + str(prediction_scalar[prediction_scalar_selector]), 2050 | "Target Delay: " + str(frame_delay_options[frame_delay]) + 2051 | #" | Overshoot: " + str(predictive_smoothing) + 2052 | " | Filter: " + str(filter_names[filter_selector]) + 2053 | #" | Passive Mode: " + str(passive_mode) + 2054 | " | Target Close After Paused: " + str(target_nearest_post_paused_state) 2055 | , "timing: " + str(previous_process_time), tracking_range) 2056 | 2057 | cv2.imshow("Setura", mask) 2058 | 2059 | proposed_delay = int((frame_delay_options[frame_delay]) - ((time.time() - last_time_main) * 1000)) 2060 | if proposed_delay < 1: 2061 | proposed_delay = 1 2062 | 2063 | #print(proposed_delay) 2064 | 2065 | if no_target: 2066 | cv2.waitKey(1) 2067 | else: 2068 | cv2.waitKey(proposed_delay) # 5 is risky #15 was really good, but 5 works now? # 10 # desired_frame_time # 20 edge of good 2069 | cv2.waitKey(750) 2070 | 2071 | #cv2.waitKey(1000) 2072 | else: 2073 | #if manual_scope: 2074 | # tracking_range = 8 2075 | #else: 2076 | # tracking_range = 7 2077 | tracking_range = 8 2078 | # Mouse Buffer 2079 | #if allow_mouse_inputs is False: 2080 | # HidController.MouseBuffer.playback() 2081 | HidController.MouseBuffer.reset() 2082 | # Scan 2083 | last_time_main = time.time() # timing 2084 | blocking_during_scan = True 2085 | 2086 | #deep_tracking_ms += ((time.time() - last_deep_tracking_ms)*1000) 2087 | #last_deep_tracking_ms = time.time() 2088 | if filter_selector == 0: 2089 | mask = ImageProcessing.Filtering.advanced_split_standard(tracking_range, Colors.antiphoenix, 20, 0, 20) 2090 | elif filter_selector == 1: 2091 | mask = ImageProcessing.Filtering.advanced_split_standard_3(tracking_range, Colors.primary, 10, 10, 20) 2092 | elif filter_selector == 2: 2093 | mask = ImageProcessing.Filtering.advanced_split_standard_3(tracking_range, Colors.primary, 10, 10, 20) 2094 | 2095 | img_cords, scope_state, en_height = ImageSearch.find_first_pixel_npy_argmax(mask, wepprofile, 2096 | tracking_range, mon, 2097 | scope_state) 2098 | 2099 | # last_time_main = time.time() # timing 2100 | 2101 | HidController.move_to_target_passive(img_cords.cords, (mon[tracking_range].res_w, mon[tracking_range].res_h - 2102 | passive_shoot_offset_apply), wepprofile, alt_profile, en_height, is_using_passive_aim_lock) 2103 | 2104 | previous_process_time = (time.time() - last_time_main) # timing 2105 | last_time_main = time.time() 2106 | # if tracking_range == 0: 2107 | # print(n) 2108 | 2109 | blocking_during_scan = False 2110 | 2111 | mask = img_cords.img 2112 | # KeyPress Detection 2113 | if keyboard.is_pressed('`'): 2114 | paused = True 2115 | editing_selected = False 2116 | # Aim Down 2117 | passive_hold_shoot_timer = (time.time() - passive_hold_shoot_timer) 2118 | if manual_shooting or spin_bot: 2119 | passive_shoot_offset += passive_hold_shoot_timer * 125 # 200 # * wepprofile.moving_aim_down_speed_multiplier # 150 2120 | if passive_shoot_offset > wepprofile.aim_down_offset_y: # + moving_aim_offset: 2121 | passive_shoot_offset = wepprofile.aim_down_offset_y # + moving_aim_offset 2122 | else: 2123 | passive_shoot_offset -= passive_hold_shoot_timer * 100 2124 | if passive_shoot_offset < -40: # -60 #-20 #-40 2125 | passive_shoot_offset = -40 # -60 #-20 #-40 2126 | # print(str(passive_shoot_offset)) 2127 | if passive_shoot_offset < 0 or wepprofile.aim_down_offset_y == 0 or alt_profile: 2128 | passive_shoot_offset_apply = 0 2129 | else: 2130 | passive_shoot_offset_apply = passive_shoot_offset 2131 | passive_hold_shoot_timer = time.time() 2132 | 2133 | # Display 2134 | 2135 | mask = ImageProcessing.apply_text(mask, subactive + "Profile-Sub: " + str(subwepprofile), 2136 | mainactive + "Profile: " 2137 | + str(mainwepprofile), 2138 | "Aim Adjustment: " + str(distance_names[distance_selector]) + 2139 | " | Prediction: " + str(prediction_ratio[prediction_selector]) + 2140 | " | Pred-Scalar: " + str(prediction_scalar[prediction_scalar_selector]), 2141 | "Target Delay: " + str(frame_delay_options[frame_delay]) + 2142 | # " | Overshoot: " + str(predictive_smoothing) + 2143 | " | Filter: " + str(filter_names[filter_selector]) + 2144 | # " | Passive Mode: " + str(passive_mode) + 2145 | " | Target Close After Paused: " + str(target_nearest_post_paused_state) 2146 | , "timing: " + str(previous_process_time), tracking_range) 2147 | 2148 | cv2.imshow("Setura", mask) 2149 | 2150 | proposed_delay = int((frame_delay_options[frame_delay]) - ((time.time() - last_time_main) * 1000)) 2151 | if proposed_delay < 1: 2152 | proposed_delay = 1 2153 | 2154 | # print(proposed_delay) 2155 | 2156 | if no_target: 2157 | cv2.waitKey(1) 2158 | else: 2159 | cv2.waitKey(proposed_delay) 2160 | cv2.waitKey(750) 2161 | 2162 | 2163 | 2164 | 2165 | # to do: 2166 | # when moving mouse faster than x speed, increase tracking range 2167 | # test save movment in buffer and reverse playback it if enemy detected 2168 | # test right click stuff 2169 | 2170 | # make aimbot move speed depening on ur moment speed --------------------------------------------------------------------------------