├── README.md
├── dash.py
├── requirements.txt
├── sc2.png
├── scanlib
├── __pycache__
│ └── socialmedia.cpython-310.pyc
├── bitcoin.py
├── compete.py
├── contact.py
├── githubscrape.py
├── js.py
├── scanwebsite.py
└── socialmedia.py
├── sites.py
└── test.py
└── compete.py
/README.md:
--------------------------------------------------------------------------------
1 | ```
2 | ____ _ ____ _ _
3 | | _ \ / \ / ___|| | | |
4 | | | | |/ _ \ \___ \| |_| |
5 | | |_| / ___ \ ___) | _ |
6 | |____/_/ \_\____/|_| |_| 1.3
7 | ```
8 | DASH is deep osint Scanner. You just need username to start osint scan. Fast requesting and you can find potantial info (Country , special data and more...).
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 | ## Installation
17 |
18 | Use the package manager [pip](https://pip.pypa.io/en/stable/) to install foobar.
19 |
20 | ```bash
21 | git clone https://github.com/TheSadError/dash
22 | cd dash
23 | pip install -r requirements.txt
24 | python3 dash.py --u username
25 | ```
26 | **Write to issues for any bug.**
27 | ## Usage
28 |
29 | ```python
30 | python3 dash.py --u username # start OSINT
31 | python3 dash.py # If you dont want to use parameters like --u . This command Will ask you username and It will start to scan... Good Luck
32 | ```
33 | ## Contact :
34 | **Discord : thesaderror#1351**
35 |
36 | ## License
37 | [MIT](https://choosealicense.com/licenses/mit/)
38 |
--------------------------------------------------------------------------------
/dash.py:
--------------------------------------------------------------------------------
1 | from colorama import *
2 | import optparse
3 | import datetime
4 | from scanlib import socialmedia
5 | from scanlib import scanwebsite
6 | from scanlib import bitcoin
7 | from scanlib import contact
8 | from scanlib import compete
9 | import requests
10 | import socket
11 | import os
12 | def banner():
13 | print(Fore.BLUE+f"""
14 | _____ _______ _______ _______
15 | | \| _ | __| | |
16 | | -- | |__ | |
17 | |_____/|___|___|_______|___|___| 1.6
18 |
19 | [!] This OSINT Tool created and developed by TheSadError
20 | """)
21 |
22 | def start(username,time):
23 | os.system('cls' if os.name=='nt' else 'clear')
24 | banner()
25 | print(Fore.RED+f"""
26 | Contact :
27 |
28 | Github : https://github.com/TheSadError
29 | Youtube : https://www.youtube.com/channel/UCUfTuo3-85qD_7v1n-W98rw
30 | Discord : err0r#4018
31 | """)
32 | print(Fore.BLUE+f"\nStarting DASH OSINT scanner... ( https://github.com/TheSadError/DASH ) Time : {time}")
33 | socialmedia.scan(username)
34 | scanwebsite.scan(username)
35 | bitcoin.scan(username)
36 | contact.scan(username)
37 | compete.scan(username)
38 | print(f"""
39 |
40 | [+] {Fore.RED}Thanks You to use {Fore.BLUE}project dash. Dont forget to star and watch to get updates annoucements.
41 | """)
42 |
43 | def main():
44 | time= datetime.datetime.now()
45 | parser = optparse.OptionParser(f" sudo python3 dash.py --u username")
46 |
47 | parser.add_option("-u","--u",dest = "username",type="string") # username parameter
48 | (options,args) = parser.parse_args()
49 | username = options.username
50 | if(username == None ):
51 | username = input(Fore.BLUE+"[+] Username : ")
52 | start(username,time)
53 |
54 |
55 | if __name__ == "__main__":
56 | main()
57 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | datetime
2 | colorama
3 | requests
4 | socket
5 |
--------------------------------------------------------------------------------
/sc2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theahmadov/dash/788026a3a506389576eea3cf24cde987f43f0166/sc2.png
--------------------------------------------------------------------------------
/scanlib/__pycache__/socialmedia.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/theahmadov/dash/788026a3a506389576eea3cf24cde987f43f0166/scanlib/__pycache__/socialmedia.cpython-310.pyc
--------------------------------------------------------------------------------
/scanlib/bitcoin.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import json
3 | from colorama import *
4 | import socket
5 |
6 | udata = [
7 | "https://www.minds.com/{}", # 1
8 | "https://bitcoinforum.com/profile/{}/",# 2
9 | "https://www.analystforum.com/u/{}",# 3
10 | "http://www.money-talk.org/profile.php?mode=viewprofile&u={}" # 4
11 | ]
12 | sdata = [
13 | "status", # 1
14 | "https://bitcoinforum.com/profile/adminofwebsite/", # 2
15 | "status",# 3
16 | "http://www.money-talk.org/profile.php?mode=viewprofile&u=dawd"# 4
17 | ]
18 |
19 |
20 | def scan(username):
21 | print(Fore.BLUE+f"""
22 |
23 | Bitcoin & Financial Websites Scan :
24 |
25 | """)
26 | asia = 0
27 | western = 0
28 | for i in range(0,len(udata)):
29 | if sdata[i] == "status":
30 | url = udata[i].format(username)
31 | r = requests.get(url)
32 | if r.status_code == 200 :
33 | print(Fore.GREEN+f"[+] User Found : {url}")
34 | else:
35 | print(Fore.RED+f"[-] Not Found : {url}")
36 | else:
37 | url = udata[i].format(username)
38 | r1 = requests.get(url).text
39 | r2 = requests.get(sdata[i]).text
40 | if r1 == r2:
41 | print(Fore.GREEN+f"[+] User Found : {url}")
42 | else:
43 | print(Fore.RED+f"[-] Not Found : {url}")
44 |
--------------------------------------------------------------------------------
/scanlib/compete.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import json
3 | from colorama import *
4 | import socket
5 |
6 | udata = [
7 | "https://www.chess.com/member/{}", # 1
8 | "https://dribbble.com/{}",# 2
9 | "https://{}.carbonmade.com/",# 3
10 | "https://www.duolingo.com/profile/{}", # 4
11 | "https://www.artstation.com/{}",# 5
12 | "https://www.behance.net/{}",# 6
13 | "https://www.buymeacoffee.com/{}",# 7
14 | "https://codepen.io/{}", # 8
15 | "https://www.colourlovers.com/lover/{}", # 9
16 | "https://lichess.org/@/{}",
17 | "https://www.lesswrong.com/users/{}",
18 | "https://{}.newgrounds.com/",
19 | "https://opensource.com/users/{}",
20 | "https://pastebin.com/u/{}",
21 | "https://www.polygon.com/users/{}",
22 | "https://www.sportlerfrage.net/nutzer/{}",
23 | "https://unsplash.com/@{}"
24 | ]
25 | sdata = [
26 | "status", # 1
27 | "status", # 2
28 | "status", # 3
29 | "status",# 4
30 | "status",# 5
31 | "status",# 6
32 | "status",# 7
33 | "status",# 8
34 | "status",# 9
35 | "status",
36 | "status",
37 | "status",
38 | "status",
39 | "status",
40 | "status",
41 | "status",
42 | "status"
43 | ]
44 |
45 |
46 | def scan(username):
47 | print(Fore.BLUE+f"""
48 |
49 | Competitive & Portfolio & Education & News Websites Scan :
50 |
51 | """)
52 | asia = 0
53 | western = 0
54 | for i in range(0,len(udata)):
55 | if sdata[i] == "status":
56 | url = udata[i].format(username)
57 | r = requests.get(url)
58 | if r.status_code == 200 :
59 | print(Fore.GREEN+f"[+] User Found : {url}")
60 | else:
61 | print(Fore.RED+f"[-] Not Found : {url}")
62 | else:
63 | url = udata[i].format(username)
64 | r1 = requests.get(url).text
65 | r2 = requests.get(sdata[i]).text
66 | if r1 == r2:
67 | print(Fore.GREEN+f"[+] User Found : {url}")
68 | else:
69 | print(Fore.RED+f"[-] Not Found : {url}")
70 |
--------------------------------------------------------------------------------
/scanlib/contact.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import json
3 | from colorama import *
4 | import socket
5 |
6 | udata = [
7 | "https://ask.fm/{}", # 1
8 | "https://www.bikemap.net/en/u/{}/routes/created/",# 2
9 | "https://forum.dangerousthings.com/u/{}/summary",# 3
10 | "https://forums.envato.com/u/{}", # 4
11 | "https://www.cracked.com/members/{}",# 5
12 | "https://ask.fedoraproject.org/u/{}",# 6
13 | "https://community.cryptomator.org/u/{}",# 7
14 | "https://forum.ionicframework.com/u/{}", # 8
15 | "https://discourse.joplinapp.org/u/{}", # 9
16 | "https://forum.rclone.org/u/{}", # 10
17 | "https://forum.sublimetext.com/u/{}", # 11
18 | "https://discourse.wicg.io/u/{}", # 12
19 | "https://bionluk.com/{}", # 13
20 | "https://www.warriorforum.com/members/{}-1.html", # 14
21 | "https://forums.whonix.org/u/{}", # 15
22 | "https://quantnet.com/members/{}.1/",
23 | "https://lobste.rs/u/{}",
24 | "https://{}.livejournal.com/",
25 | "https://www.producthunt.com/@{}",
26 | "https://community.signalusers.org/u/{}"
27 | ]
28 | sdata = [
29 | "status", # 1
30 | "status", # 2
31 | "status", # 3
32 | "status",# 4
33 | "status",# 5
34 | "status",# 6
35 | "status",# 7
36 | "status",# 8
37 | "status",# 9
38 | "status", # 10
39 | "status",# 11
40 | "status",# 12
41 | "status", # 13
42 | "status", # 14
43 | "status", # 15
44 | "status",
45 | "status",
46 | "status",
47 | "status",
48 | "status"
49 | ]
50 |
51 |
52 | def scan(username):
53 | print(Fore.BLUE+f"""
54 |
55 | Contact & Forum Websites Scan :
56 |
57 | """)
58 | asia = 0
59 | western = 0
60 | for i in range(0,len(udata)):
61 | if sdata[i] == "status":
62 | url = udata[i].format(username)
63 | r = requests.get(url)
64 | if r.status_code == 200 :
65 | print(Fore.GREEN+f"[+] User Found : {url}")
66 | else:
67 | print(Fore.RED+f"[-] Not Found : {url}")
68 | else:
69 | url = udata[i].format(username)
70 | r1 = requests.get(url).text
71 | r2 = requests.get(sdata[i]).text
72 | if r1 == r2:
73 | print(Fore.GREEN+f"[+] User Found : {url}")
74 | else:
75 | print(Fore.RED+f"[-] Not Found : {url}")
76 |
--------------------------------------------------------------------------------
/scanlib/githubscrape.py:
--------------------------------------------------------------------------------
1 | import json
2 | from re import I
3 | import requests
4 | from colorama import *
5 | import scanlib.js as js
6 | def getinfo(username):
7 | url = "https://api.github.com/users/{}".format(username)
8 | data = requests.get("https://api.github.com/users/{}".format(username)).text
9 | js.jsd("error", data)
10 | need = [
11 | 'name',
12 | 'type',
13 | 'company',
14 | 'location',
15 | 'email'
16 | ]
17 | req = requests.get(url).json()
18 | dt = js.pdata(need, req)
19 | print(json.dumps(dt, indent= True))
20 |
--------------------------------------------------------------------------------
/scanlib/js.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | def jsd(filename , json_data):
4 | with open('{}.json'.format(filename), 'w') as fp:
5 | json.dump(json_data, fp , indent= True)
6 |
7 | def pdata(need, s, ):
8 | Data = {}
9 | for (k, v) in s.items():
10 |
11 | if k in need:
12 | Data[k] = v
13 |
14 | return Data
15 |
--------------------------------------------------------------------------------
/scanlib/scanwebsite.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from colorama import *
3 | from urllib.request import urlopen
4 | from urllib.error import *
5 |
6 | lst = [
7 | "http://{}.cf",
8 | "http://{}.tl",
9 | "http://{}.ml",
10 | "http://{}.com",
11 | "http://{}.org",
12 | "http://{}.me",
13 | "http://{}.info",
14 | "http://{}.net",
15 | "http://{}.online",
16 | "http://{}.site",
17 | "http://{}.tr",
18 | "http://{}.de",
19 | "http://{}.app",
20 | "http://{}.live",
21 | "http://{}.store",
22 | "http://{}.shop",
23 | "http://{}.website",
24 | "http://{}.xyz",
25 | "http://{}.plus",
26 | "http://{}.bet",
27 | "http://{}.group",
28 | "http://{}.io"
29 | ]
30 |
31 | def scan(username):
32 | print(Fore.BLUE+f"""
33 |
34 | Website Domain Scan :
35 |
36 | """)
37 | for i in range(0, len(lst)):
38 | url = lst[i].format(username)
39 | try :
40 | req = requests.get(url)
41 | found = True
42 | except:
43 | found = False
44 | if found == True:
45 | print(Fore.GREEN+f"[+] Website Found : {url}")
46 | #print(Fore.RED+f"[!] Website Not Found : {url}")
47 |
48 |
--------------------------------------------------------------------------------
/scanlib/socialmedia.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import json
3 | from colorama import *
4 | import socket
5 | import scanlib.githubscrape as githubscrape
6 | udata = [
7 | "https://github.com/{}", # 1
8 | "https://instagram.com/{}",# 2
9 | "https://twitter.com/{}",# 3
10 | "https://t.me/{}", # 4
11 | "https://{}.tumblr.com/",# 5
12 | "https://tieba.baidu.com/f?kw={}",# 6
13 | "https://www.pinterest.com/{}",# 7
14 | "https://vk.com/{}", # 8
15 | "https://www.reddit.com/user/{}", # 9
16 | "https://mix.com/{}", # 10
17 | "https://{}.skyrock.com/", # 11
18 | "https://www.facebook.com/{}", # 12
19 | "https://bionluk.com/{}", # 13
20 | "https://gitlab.com/{}", # 14
21 | "https://www.tiktok.com/@{}", # 15
22 | "https://www.quora.com/{}", # 16
23 | "https://medium.com/@{}", # 17
24 | "https://digg.com/@{}", # 18
25 | "https://www.linkedin.com/in/{}", # 19
26 | "https://www.deviantart.com/{}", # 20
27 | "https://www.twitch.tv/{}" # 21
28 | ]
29 | sdata = [
30 | "status", # 1
31 | "status", # 2
32 | "status",# 3
33 | "status",# 4
34 | "status",# 5
35 | "status",# 6
36 | "status",# 7
37 | "status",# 8
38 | "https://www.reddit.com/user/admwkdamwkdamwkawd",# 9
39 | "https://mix.com/", # 10
40 | "status",# 11
41 | "status",# 12
42 | "status", # 13
43 | "https://gitlab.com/users/sign_in", # 14
44 | "status", # 15
45 | "status", # 16
46 | "status", # 17
47 | "status", # 18
48 | "status", # 19
49 | "status", # 20
50 | "status" # 21
51 | ]
52 |
53 |
54 | def scan(username):
55 | print(Fore.BLUE+f"""
56 |
57 | Social Media Scan :
58 |
59 | """)
60 | asia = 0
61 | western = 0
62 | for i in range(0,len(udata)):
63 | if sdata[i] == "status":
64 | url = udata[i].format(username)
65 | r = requests.get(url)
66 | if r.status_code == 200 :
67 | print(Fore.GREEN+f"[+] User Found : {url}")
68 | else:
69 | print(Fore.RED+f"[-] Not Found : {url}")
70 | else:
71 | url = udata[i].format(username)
72 | r1 = requests.get(url).text
73 | r2 = requests.get(sdata[i]).text
74 | if r1 == r2:
75 | print(Fore.GREEN+f"[+] User Found : {url}")
76 | else:
77 | print(Fore.RED+f"[-] Not Found : {url}")
78 |
79 | print(Fore.BLUE+f"""
80 |
81 | Potantial Data :
82 | """)
83 | print(Fore.BLUE+"Data from Github : ")
84 | githubscrape.getinfo(username)
85 |
--------------------------------------------------------------------------------
/sites.py:
--------------------------------------------------------------------------------
1 | # Social Media Websites :
2 | udata = [
3 | "https://github.com/{}", # 1
4 | "https://instagram.com/{}",# 2
5 | "https://twitter.com/{}",# 3
6 | "https://t.me/{}", # 4
7 | "https://{}.tumblr.com/",# 5
8 | "https://tieba.baidu.com/f?kw={}",# 6
9 | "https://www.pinterest.com/{}",# 7
10 | "https://vk.com/{}", # 8
11 | "https://www.reddit.com/user/{}", # 9
12 | "https://mix.com/{}", # 10
13 | "https://{}.skyrock.com/", # 11
14 | "https://www.facebook.com/{}", # 12
15 | "https://bionluk.com/{}", # 13
16 | "https://gitlab.com/{}", # 14
17 | "https://www.tiktok.com/@{}", # 15
18 | "https://www.quora.com/{}", # 16
19 | "https://medium.com/@{}", # 17
20 | "https://digg.com/@{}", # 18
21 | "https://www.linkedin.com/in/{}", # 19
22 | "https://www.deviantart.com/{}", # 20
23 | "https://www.twitch.tv/{}" # 21
24 | ]
25 |
--------------------------------------------------------------------------------
/test.py/compete.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import json
3 | from colorama import *
4 | import socket
5 |
6 | udata = [
7 | "https://www.chess.com/member/{}", # 1
8 | "https://codeforces.com/profile/{}"# 2
9 | #"https://forum.dangerousthings.com/u/{}/summary"# 3
10 | #"https://t.me/{}", # 4
11 | #"https://{}.tumblr.com/",# 5
12 | #"https://tieba.baidu.com/f?kw={}",# 6
13 | #"https://www.pinterest.com/{}",# 7
14 | #"https://vk.com/{}", # 8
15 | #"https://www.reddit.com/user/{}", # 9
16 | #"https://mix.com/{}", # 10
17 | #"https://{}.skyrock.com/", # 11
18 | #"https://www.facebook.com/{}", # 12
19 | #"https://bionluk.com/{}", # 13
20 | #"https://gitlab.com/{}", # 14
21 | #"https://www.tiktok.com/@{}", # 15
22 | #"https://www.quora.com/{}", # 16
23 | #"https://medium.com/@{}", # 17
24 | #"https://digg.com/@{}", # 18
25 | #"https://www.linkedin.com/in/{}", # 19
26 | #"https://www.deviantart.com/{}", # 20
27 | #"https://www.twitch.tv/{}" # 21
28 | ]
29 | sdata = [
30 | "status", # 1
31 | "status" # 2
32 | #"status" # 3
33 | #"status",# 4
34 | #"status",# 5
35 | #"status",# 6
36 | #"status",# 7
37 | #"status",# 8
38 | #"https://www.reddit.com/user/admwkdamwkdamwkawd",# 9
39 | #"https://mix.com/", # 10
40 | #"status",# 11
41 | #"status",# 12
42 | #"status", # 13
43 | #"https://gitlab.com/users/sign_in", # 14
44 | #"status", # 15
45 | #"status", # 16
46 | #"status", # 17
47 | #"status", # 18
48 | #"status", # 19
49 | #"status", # 20
50 | #"status" # 21
51 | ]
52 |
53 |
54 | def scan(username):
55 | print(Fore.BLUE+f"""
56 |
57 | Competitive & Portfolio Websites Scan :
58 |
59 | """)
60 | asia = 0
61 | western = 0
62 | for i in range(0,len(udata)):
63 | if sdata[i] == "status":
64 | url = udata[i].format(username)
65 | r = requests.get(url)
66 | if r.status_code == 200 :
67 | print(Fore.GREEN+f"[+] User Found : {url}")
68 | else:
69 | print(Fore.RED+f"[-] Not Found : {url}")
70 | else:
71 | url = udata[i].format(username)
72 | r1 = requests.get(url).text
73 | r2 = requests.get(sdata[i]).text
74 | if r1 == r2:
75 | print(Fore.GREEN+f"[+] User Found : {url}")
76 | else:
77 | print(Fore.RED+f"[-] Not Found : {url}")
78 |
79 | scan("apppppppppppppppppppppppppppppppppppposo")
--------------------------------------------------------------------------------