├── .gitignore ├── LICENSE ├── README.md ├── _config.yml ├── check_version.py ├── netizenship.py ├── poetry.lock ├── pyproject.toml ├── requirements.txt ├── setup.py ├── sneak.gif └── test_.py /.gitignore: -------------------------------------------------------------------------------- 1 | /venv/* -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Rahul Raj 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # Netizenship 3 | 4 | 5 | ![issues](https://img.shields.io/github/issues/rahulrajpl/netizenship) 6 | ![stars](https://img.shields.io/github/stars/rahulrajpl/netizenship?style=social) 7 | ![forks](https://img.shields.io/github/forks/rahulrajpl/netizenship?style=social) 8 | 9 | 10 | This is a commandline tool to find the online presence of a username in popular social media websites like Facebook, Instagram, Twitter, etc. Current version is 0.2.3 11 | 12 | ![sneakpeak](./sneak.gif) 13 | 14 | ## Installation 15 | 16 | Install this tool via following command 17 | 18 | ~~~ 19 | $ sudo pip3 install netizenship 20 | ~~~ 21 | To upgrade an old version, run 22 | 23 | ~~~ 24 | $ sudo pip3 install --upgrade netizenship 25 | ~~~ 26 | 27 | ## Usage 28 | 29 | Once the tool is installed, run it by executing the following command : 30 | 31 | ~~~ 32 | $ netizenship 33 | ~~~ 34 | 35 | and then enter the username to search for 36 | 37 | ## Contribute 38 | 39 | This tool is presently at infant stage. 38 websites are covered presently. I highly appreciate improvements and suggestions. Scope for additions/ improvements are broadly as follows: - 40 | 41 | - More websites to be added. 42 | - Report broken links or bugs with certain websites 43 | - Add email lookup as an option 44 | 45 | ### Thanks to contributors 46 | - [Robert Putt](https://github.com/robputt796) 47 | 48 | ## License 49 | 50 | ![license](https://img.shields.io/github/license/rahulrajpl/netizenship) 51 | 52 | Copyright (c) 2020 Rahul Raj 53 | 54 | Read full license [here](./LICENSE) 55 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-modernist -------------------------------------------------------------------------------- /check_version.py: -------------------------------------------------------------------------------- 1 | # credits: https://stackoverflow.com/users/2650249/hoefling 2 | 3 | import json 4 | import urllib.request 5 | import sys 6 | 7 | try: 8 | from importlib.metadata import version 9 | except ImportError: 10 | from importlib_metadata import version 11 | 12 | from distutils.version import LooseVersion 13 | 14 | 15 | def latest_version(): 16 | name = sys.argv[1] 17 | installed_version = LooseVersion(version(name)) 18 | 19 | # fetch package metadata from PyPI 20 | pypi_url = f'https://pypi.org/pypi/{name}/json' 21 | response = urllib.request.urlopen(pypi_url).read().decode() 22 | latest_version = max(LooseVersion(s) for s in json.loads(response)['releases'].keys()) 23 | 24 | # print('package:', name, 'installed:', installed_version, 'latest:', latest_version)\ 25 | return installed_version==latest_version 26 | 27 | if not latest_version(): 28 | print('Do you want to update to latest version?') -------------------------------------------------------------------------------- /netizenship.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | """ 3 | Tool to automatically check the membership of a given username 4 | in popular websites. 5 | 6 | Inspired by: 7 | https://github.com/thelinuxchoice/userrecon/blob/master/userrecon.sh 8 | 9 | MIT License 10 | 11 | Copyright (c) 2020 Rahul Raj 12 | """ 13 | 14 | import requests 15 | from termcolor import colored 16 | from bs4 import BeautifulSoup 17 | from multiprocessing.pool import ThreadPool 18 | from pyfiglet import figlet_format 19 | import json 20 | import urllib.request 21 | 22 | 23 | try: 24 | from importlib.metadata import version 25 | except ImportError: 26 | from importlib_metadata import version 27 | 28 | from distutils.version import LooseVersion 29 | 30 | 31 | def check_latest_version(): 32 | name = 'netizenship' 33 | installed_version = LooseVersion(version(name)) 34 | 35 | # fetch package metadata from PyPI 36 | pypi_url = f'https://pypi.org/pypi/{name}/json' 37 | response = urllib.request.urlopen(pypi_url).read().decode() 38 | latest_version = max(LooseVersion(s) for s in json.loads(response)['releases'].keys()) 39 | print(f'Current version: {installed_version}') 40 | 41 | if not installed_version == latest_version: 42 | print(f'Version {latest_version} available. To continue using the ' 43 | 'tool, run "sudo pip3 install --upgrade netizenship"') 44 | exit() 45 | 46 | 47 | def main(): 48 | def banner(text, ch='=', length=78): 49 | spaced_text = ' %s ' % text 50 | banner = spaced_text.center(length, ch) 51 | print(banner) 52 | 53 | ascii_banner = figlet_format('Netizenship') 54 | print(ascii_banner) 55 | 56 | # Check the version status. 57 | check_latest_version() 58 | 59 | banner_text = "MIT License, Copyright (c) 2020 Rahul Raj" 60 | banner(banner_text) 61 | 62 | wiki_link = 'https://en.wikipedia.org/wiki/List_of_HTTP_status_codes' 63 | uname = input("Enter username: ") 64 | width = 15 # to pretty print 65 | global counter 66 | counter = 0 # to count no of success 67 | page = requests.get(wiki_link) 68 | soup = BeautifulSoup(page.content, 'html.parser') 69 | user_agent = ('Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) ' 70 | 'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130' 71 | ' Mobile Safari/537.36') 72 | headers = {'user-agent': user_agent} 73 | 74 | def get_website_membership(site): 75 | 76 | def print_fail(): 77 | print(site.rjust(width), ':', colored(state.ljust(width//2), 'red'), '(Status:', msg, ')') 78 | 79 | def print_success(): 80 | print(site.rjust(width), ':', colored(state.ljust(width//2), 'green'), '(Status:', msg, ')') 81 | 82 | url = websites[site] 83 | global counter 84 | state = "FAIL" 85 | msg = '--exception--' 86 | 87 | if not url[:1] == 'h': 88 | link = 'https://'+uname+url 89 | else: 90 | link = url+uname 91 | 92 | try: 93 | if site == 'Youtube' or 'Twitter': 94 | response = requests.get(link) 95 | else: 96 | response = requests.get(link, headers=headers) 97 | tag = soup.find(id=response.status_code) 98 | msg = tag.find_parent('dt').text 99 | response.raise_for_status() 100 | 101 | except Exception: 102 | print_fail() 103 | 104 | else: 105 | res_soup = BeautifulSoup(response.content, 'html.parser') 106 | if site == 'Pastebin': 107 | if len(res_soup.find_all('h1')) == 0: 108 | msg = 'broken URL' 109 | print_fail() 110 | 111 | else: 112 | state = 'SUCCESS' 113 | counter += 1 114 | print_success() 115 | 116 | elif site == 'Wordpress': 117 | if 'doesn’t exist' or 'blocked' in res_soup: 118 | msg = 'broken URL' 119 | print_fail() 120 | else: 121 | state = 'SUCCESS' 122 | counter += 1 123 | print_success() 124 | 125 | # elif site == 'Imgur': 126 | # ToDo 127 | 128 | elif site == 'GitLab': 129 | if 'Sign in' in res_soup.title.text: 130 | msg = 'broken URL' 131 | print_fail() 132 | else: 133 | state = 'SUCCESS' 134 | counter += 1 135 | print_success() 136 | elif site == 'HackerNews': 137 | if 'No such user.' in res_soup: 138 | msg = 'No Such User!' 139 | print_fail() 140 | else: 141 | state = 'SUCCESS' 142 | counter += 1 143 | print_success() 144 | elif site == 'ProductHunt': 145 | if 'Page Not Found' in res_soup.text: 146 | msg = 'No Such User!' 147 | print_fail() 148 | else: 149 | state = 'SUCCESS' 150 | counter += 1 151 | print_success() 152 | else: 153 | state = 'SUCCESS' 154 | counter += 1 155 | print_success() 156 | 157 | websites = { 158 | 'Facebook': 'https://www.facebook.com/', 159 | 'Twitter': 'https://twitter.com/', 160 | 'Instagram': 'https://www.instagram.com/', 161 | 'Youtube': 'https://www.youtube.com/user/', 162 | # 'Reddit': 'https://www.reddit.com/user/', To Do 163 | 'ProductHunt': 'https://www.producthunt.com/@', 164 | 'PInterest': 'https://www.pinterest.com/', 165 | 'Flickr': 'https://www.flickr.com/people/', 166 | 'Vimeo': 'https://vimeo.com/', 167 | 'Soundcloud': 'https://soundcloud.com/', 168 | 'Disqus': 'https://disqus.com/', 169 | 'Medium': 'https://medium.com/@', 170 | 'AboutMe': 'https://about.me/', 171 | # 'Imgur': 'https://imgur.com/user/', returns a landing page. to do 172 | 'Flipboard': 'https://flipboard.com/', 173 | 'Slideshare': 'https://slideshare.net/', 174 | 'Spotify': 'https://open.spotify.com/user/', 175 | 'Scribd': 'https://www.scribd.com/', 176 | 'Patreon': 'https://www.patreon.com/', 177 | 'BitBucket': 'https://bitbucket.org/', 178 | 'GitLab': 'https://gitlab.com/', 179 | 'Github': 'https://www.github.com/', 180 | 'GoodReads': 'https://www.goodreads.com/', 181 | 'Instructable': 'https://www.instructables.com/member/', 182 | 'CodeAcademy': 'https://www.codecademy.com/', 183 | 'Gravatar': 'https://en.gravatar.com/', 184 | 'Pastebin': 'https://pastebin.com/u/', 185 | 'FourSquare': 'https://foursquare.com/', 186 | 'TripAdvisor': 'https://tripadvisor.com/members/', 187 | 'Wikipedia': 'https://www.wikipedia.org/wiki/User:', 188 | 'HackerNews': 'https://news.ycombinator.com/user?id=', 189 | 'CodeMentor': 'https://www.codementor.io/', 190 | 'Trip': 'https://www.trip.skyscanner.com/user/', 191 | 'Blogger': '.blogspot.com', 192 | 'Wordpress': '.wordpress.com', 193 | 'Tumbler': '.tumblr.com', 194 | # 'Deviantart': '.deviantart.com"', 195 | # ^ This website is either blocking/delaying the script 196 | 'LiveJournel': '.livejournal.com', 197 | 'Slack': '.slack.com', 198 | } 199 | 200 | p = ThreadPool(10) 201 | p.map(get_website_membership, list(websites.keys())) 202 | n_websites = len(list(websites.keys())) 203 | print('Summary: User {} has membership in {}/{} websites' 204 | .format(uname, counter, n_websites)) 205 | banner('completed') 206 | 207 | 208 | if __name__ == '__main__': 209 | main() 210 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "beautifulsoup4" 3 | version = "4.8.2" 4 | description = "Screen-scraping library" 5 | category = "main" 6 | optional = false 7 | python-versions = "*" 8 | 9 | [package.dependencies] 10 | soupsieve = ">=1.2" 11 | 12 | [package.extras] 13 | html5lib = ["html5lib"] 14 | lxml = ["lxml"] 15 | 16 | [[package]] 17 | name = "certifi" 18 | version = "2019.11.28" 19 | description = "Python package for providing Mozilla's CA Bundle." 20 | category = "main" 21 | optional = false 22 | python-versions = "*" 23 | 24 | [[package]] 25 | name = "chardet" 26 | version = "3.0.4" 27 | description = "Universal encoding detector for Python 2 and 3" 28 | category = "main" 29 | optional = false 30 | python-versions = "*" 31 | 32 | [[package]] 33 | name = "idna" 34 | version = "2.8" 35 | description = "Internationalized Domain Names in Applications (IDNA)" 36 | category = "main" 37 | optional = false 38 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 39 | 40 | [[package]] 41 | name = "importlib-metadata" 42 | version = "1.4.0" 43 | description = "Read metadata from Python packages" 44 | category = "main" 45 | optional = false 46 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 47 | 48 | [package.dependencies] 49 | zipp = ">=0.5" 50 | 51 | [package.extras] 52 | docs = ["sphinx", "rst.linker"] 53 | testing = ["packaging", "importlib-resources"] 54 | 55 | [[package]] 56 | name = "more-itertools" 57 | version = "8.1.0" 58 | description = "More routines for operating on iterables, beyond itertools" 59 | category = "main" 60 | optional = false 61 | python-versions = ">=3.5" 62 | 63 | [[package]] 64 | name = "pyfiglet" 65 | version = "0.8.post1" 66 | description = "Pure-python FIGlet implementation" 67 | category = "main" 68 | optional = false 69 | python-versions = "*" 70 | 71 | [[package]] 72 | name = "requests" 73 | version = "2.22.0" 74 | description = "Python HTTP for Humans." 75 | category = "main" 76 | optional = false 77 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 78 | 79 | [package.dependencies] 80 | certifi = ">=2017.4.17" 81 | chardet = ">=3.0.2,<3.1.0" 82 | idna = ">=2.5,<2.9" 83 | urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" 84 | 85 | [package.extras] 86 | security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)"] 87 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 88 | 89 | [[package]] 90 | name = "soupsieve" 91 | version = "1.9.5" 92 | description = "A modern CSS selector implementation for Beautiful Soup." 93 | category = "main" 94 | optional = false 95 | python-versions = "*" 96 | 97 | [[package]] 98 | name = "termcolor" 99 | version = "1.1.0" 100 | description = "ANSII Color formatting for output in terminal." 101 | category = "main" 102 | optional = false 103 | python-versions = "*" 104 | 105 | [[package]] 106 | name = "urllib3" 107 | version = "1.25.8" 108 | description = "HTTP library with thread-safe connection pooling, file post, and more." 109 | category = "main" 110 | optional = false 111 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 112 | 113 | [package.extras] 114 | brotli = ["brotlipy (>=0.6.0)"] 115 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 116 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 117 | 118 | [[package]] 119 | name = "zipp" 120 | version = "2.0.0" 121 | description = "Backport of pathlib-compatible object wrapper for zip files" 122 | category = "main" 123 | optional = false 124 | python-versions = ">=3.6" 125 | 126 | [package.dependencies] 127 | more-itertools = "*" 128 | 129 | [package.extras] 130 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] 131 | testing = ["pathlib2", "contextlib2", "unittest2"] 132 | 133 | [metadata] 134 | lock-version = "1.1" 135 | python-versions = "^3.7" 136 | content-hash = "8d479e18bc4588bf69ef0f8bdbc4e63bae258ae49fcf79873ae31dfc54f7d0c3" 137 | 138 | [metadata.files] 139 | beautifulsoup4 = [ 140 | {file = "beautifulsoup4-4.8.2-py2-none-any.whl", hash = "sha256:e1505eeed31b0f4ce2dbb3bc8eb256c04cc2b3b72af7d551a4ab6efd5cbe5dae"}, 141 | {file = "beautifulsoup4-4.8.2-py3-none-any.whl", hash = "sha256:9fbb4d6e48ecd30bcacc5b63b94088192dcda178513b2ae3c394229f8911b887"}, 142 | {file = "beautifulsoup4-4.8.2.tar.gz", hash = "sha256:05fd825eb01c290877657a56df4c6e4c311b3965bda790c613a3d6fb01a5462a"}, 143 | ] 144 | certifi = [ 145 | {file = "certifi-2019.11.28-py2.py3-none-any.whl", hash = "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3"}, 146 | {file = "certifi-2019.11.28.tar.gz", hash = "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"}, 147 | ] 148 | chardet = [ 149 | {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, 150 | {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, 151 | ] 152 | idna = [ 153 | {file = "idna-2.8-py2.py3-none-any.whl", hash = "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"}, 154 | {file = "idna-2.8.tar.gz", hash = "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"}, 155 | ] 156 | importlib-metadata = [ 157 | {file = "importlib_metadata-1.4.0-py2.py3-none-any.whl", hash = "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359"}, 158 | {file = "importlib_metadata-1.4.0.tar.gz", hash = "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8"}, 159 | ] 160 | more-itertools = [ 161 | {file = "more-itertools-8.1.0.tar.gz", hash = "sha256:c468adec578380b6281a114cb8a5db34eb1116277da92d7c46f904f0b52d3288"}, 162 | {file = "more_itertools-8.1.0-py3-none-any.whl", hash = "sha256:1a2a32c72400d365000412fe08eb4a24ebee89997c18d3d147544f70f5403b39"}, 163 | ] 164 | pyfiglet = [ 165 | {file = "pyfiglet-0.8.post1-py2.py3-none-any.whl", hash = "sha256:d555bcea17fbeaf70eaefa48bb119352487e629c9b56f30f383e2c62dd67a01c"}, 166 | {file = "pyfiglet-0.8.post1.tar.gz", hash = "sha256:c6c2321755d09267b438ec7b936825a4910fec696292139e664ca8670e103639"}, 167 | ] 168 | requests = [ 169 | {file = "requests-2.22.0-py2.py3-none-any.whl", hash = "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"}, 170 | {file = "requests-2.22.0.tar.gz", hash = "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4"}, 171 | ] 172 | soupsieve = [ 173 | {file = "soupsieve-1.9.5-py2.py3-none-any.whl", hash = "sha256:bdb0d917b03a1369ce964056fc195cfdff8819c40de04695a80bc813c3cfa1f5"}, 174 | {file = "soupsieve-1.9.5.tar.gz", hash = "sha256:e2c1c5dee4a1c36bcb790e0fabd5492d874b8ebd4617622c4f6a731701060dda"}, 175 | ] 176 | termcolor = [ 177 | {file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"}, 178 | ] 179 | urllib3 = [ 180 | {file = "urllib3-1.25.8-py2.py3-none-any.whl", hash = "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc"}, 181 | {file = "urllib3-1.25.8.tar.gz", hash = "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"}, 182 | ] 183 | zipp = [ 184 | {file = "zipp-2.0.0-py3-none-any.whl", hash = "sha256:57147f6b0403b59f33fd357f169f860e031303415aeb7d04ede4839d23905ab8"}, 185 | {file = "zipp-2.0.0.tar.gz", hash = "sha256:7ae5ccaca427bafa9760ac3cd8f8c244bfc259794b5b6bb9db4dda2241575d09"}, 186 | ] 187 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "netizenship" 3 | version = "0.2.3" 4 | description = "Tool to check the username with popular websites for membership" 5 | authors = ["Rahul Raj "] 6 | readme = "README.md" 7 | homepage = "https://github.com/rahulrajpl/netizenship" 8 | documentation = "https://github.com/rahulrajpl/netizenship/blob/master/README.md" 9 | 10 | [tool.poetry.dependencies] 11 | python = "^3.7" 12 | requests = "^2.22.0" 13 | beautifulsoup4 = "^4.8.2" 14 | pyfiglet = "^0.8.post1" 15 | termcolor = "^1.1.0" 16 | importlib_metadata = "^1.4.0" 17 | 18 | [tool.poetry.dev-dependencies] 19 | 20 | [tool.poetry.scripts] 21 | netizenship = 'netizenship:main' 22 | [build-system] 23 | requires = ["poetry>=0.12"] 24 | build-backend = "poetry.masonry.api" 25 | 26 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4==4.8.2 2 | certifi==2019.11.28 3 | chardet==3.0.4 4 | idna==2.8 5 | pyfiglet==0.8.post1 6 | requests==2.22.0 7 | soupsieve==1.9.5 8 | termcolor==1.1.0 9 | urllib3==1.25.7 10 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | with open("README.md", "r") as fh: 4 | long_description = fh.read() 5 | 6 | setuptools.setup( 7 | name="netizenship-rahulrajpl", # Replace with your own username 8 | version="0.0.1", 9 | author="Rahul Raj", 10 | author_email="rahulrajpl@gmail.com", 11 | description="Tool for checking online presence, given a username", 12 | long_description=long_description, 13 | long_description_content_type="text/markdown", 14 | url="https://github.com/pypa/sampleproject", 15 | packages=setuptools.find_packages(), 16 | classifiers=[ 17 | "Programming Language :: Python :: 3", 18 | "License :: OSI Approved :: MIT License", 19 | "Operating System :: OS Independent", 20 | ], 21 | python_requires='>=3.6', 22 | ) -------------------------------------------------------------------------------- /sneak.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rahulrajpl/netizenship/32c98a9efeb67e07a775dd9fb7db883416ac6298/sneak.gif -------------------------------------------------------------------------------- /test_.py: -------------------------------------------------------------------------------- 1 | """This is a test script to check the landing page for broken urls 2 | which is returning a 200 OK response. 3 | """ 4 | 5 | import requests 6 | from bs4 import BeautifulSoup 7 | 8 | url = "https://www.reddit.com/user/rahulrajpl1111" 9 | headers = {'user-agent':'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Mobile Safari/537.36'} 10 | 11 | response = requests.(url , headers=headers ) 12 | soup = BeautifulSoup(response.content, 'html.parser') 13 | 14 | print(soup.prettify()) 15 | # print(response.status_code) 16 | 17 | # print ('----------------------') 18 | 19 | # print(soup.find_all('div',{'class':'_3VTI5BOpJO70xoBKSqz3O9'})) 20 | # print(soup.find_all('h3')) 21 | 22 | # if 'The person may have been banned' in soup.text: 23 | # print("Page Not Found") --------------------------------------------------------------------------------