├── gmail_analytics_check ├── _version.py ├── __init__.py ├── __main__.py ├── report.py ├── executor.py ├── cli.py └── core.py ├── run.py ├── Makefile ├── setup.py ├── LICENSE ├── README.md ├── .gitignore └── check.py /gmail_analytics_check/_version.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.0.1' 2 | -------------------------------------------------------------------------------- /gmail_analytics_check/__init__.py: -------------------------------------------------------------------------------- 1 | from ._version import __version__ 2 | from .cli import * -------------------------------------------------------------------------------- /run.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python3 2 | from gmail_analytics_check import cli 3 | 4 | if __name__ == "__main__": 5 | cli.run() 6 | -------------------------------------------------------------------------------- /gmail_analytics_check/__main__.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python3 2 | """ 3 | Entrypoint 4 | """ 5 | from cli import run 6 | 7 | if __name__ == "__main__": 8 | run() 9 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | PROJECT_NAME=gmail_analytics_check 2 | LINT_FILES=gmail_analytics_check tests 3 | 4 | test: 5 | coverage run --source=./gmail_analytics_check -m pytest tests 6 | coverage report -m 7 | coverage html 8 | 9 | rerun-tests: 10 | pytest --lf -vv 11 | 12 | lint: 13 | @echo 'syntax errors or undefined names' 14 | flake8 --count --select=E9,F63,F7,F82 --show-source --statistics ${LINT_FILES} 15 | @echo 'warning' 16 | flake8 --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics --ignore=E731,W503,E501 ${LINT_FILES} 17 | 18 | @echo 'mypy' 19 | mypy ${LINT_FILES} 20 | 21 | format: 22 | @echo 'black' 23 | black --skip-string-normalization ${LINT_FILES} 24 | 25 | clean: 26 | rm -rf reports htmcov dist build *.egg-info *.txt *.csv *.pdf 27 | 28 | install: 29 | pip3 install . 30 | 31 | rename: 32 | @python3 update.py -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | exec(open('gmail_analytics_check/_version.py').read()) 4 | 5 | with open('requirements.txt') as rf: 6 | requires = rf.read().splitlines() 7 | 8 | with open('README.md') as fh: 9 | long_description = fh.read() 10 | 11 | setup( 12 | name="gmail_analytics_check", 13 | version=__version__, 14 | description="A skeleton for OSINT CLI tool", 15 | long_description=long_description, 16 | long_description_content_type="text/markdown", 17 | url="https://github.com/soxoj/osint-cli-tool-skeleton", 18 | author="Soxoj", 19 | author_email="soxoj@protonmail.com", 20 | entry_points={'console_scripts': ['gmail_analytics_check = gmail_analytics_check.__init__:run']}, 21 | license="MIT", 22 | packages=find_packages(), 23 | include_package_data=True, 24 | install_requires=requires, 25 | ) 26 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 soxoj 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Gmail Analytics Check 2 | 3 | - Find GAIA ID for any Gmail 4 | - Resolve any alternate email of Gmail account to the primary email 5 | 6 | ## Setup 7 | 8 | You must do it only once for preparing `data.txt` file with long-living cookies. 9 | 10 | 1. Create a [Google Analytics project](https://analytics.google.com/analytics/web/#/). 11 | 2. Download [GHunt](https://github.com/mxrch/GHunt), run "check_and_gen" 12 | 3. Generate [cookies the most convenient way](https://github.com/mxrch/GHunt/tree/master#usage 13 | ). 14 | 4. Сopy resources/data.txt file to path with script. 15 | 16 | ## Usage 17 | 18 | **The tool based on the template [osint-cli-tool-skeleton](https://github.com/soxoj/osint-cli-tool-skeleton)**. Read its README to explore all the available functionality. 19 | 20 | ```sh 21 | $ ./run.py ceo@telegram.org 22 | 23 | Target: ceo@telegram.org 24 | Results found: 1 25 | 1) Gaia Id: 105057129383411154227 26 | Canonical Email: ceo@telegram.org 27 | 28 | ------------------------------ 29 | Total found: 1 30 | 31 | 32 | ``` 33 | 34 | ## How it works 35 | 36 | https://twitter.com/subfnSecurity/status/1255741950914727942 37 | 38 | See also: https://t.me/osint_mindset/62 39 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /gmail_analytics_check/report.py: -------------------------------------------------------------------------------- 1 | from colorama import init 2 | import csv 3 | import termcolor 4 | 5 | from .core import OutputData, OutputDataList 6 | 7 | 8 | # use Colorama to make Termcolor work on Windows too 9 | init() 10 | 11 | 12 | class Output: 13 | def __init__(self, data: OutputDataList, *args, **kwargs): 14 | self.data = data 15 | 16 | def put(self): 17 | pass 18 | 19 | 20 | class PlainOutput(Output): 21 | def __init__(self, *args, **kwargs): 22 | self.is_colored = kwargs.get('colored', True) 23 | super().__init__(*args, **kwargs) 24 | 25 | def colored(self, val, color): 26 | if not self.is_colored: 27 | return val 28 | 29 | return termcolor.colored(val, color) 30 | 31 | def put(self): 32 | text = '' 33 | total = 0 34 | olist = self.data 35 | 36 | for o in olist: 37 | i = o.input_data 38 | 39 | text += f'Target: {self.colored(str(i), "green")}\n' 40 | text += f'Results found: {len(o.results)}\n' 41 | 42 | for n, r in enumerate(o.results): 43 | text += f'{n+1}) ' 44 | total += 1 45 | 46 | for k in r.fields: 47 | key = k.title().replace('_', ' ') 48 | val = r.__dict__.get(k) 49 | if val is None: 50 | val = '' 51 | 52 | text += f'{self.colored(key, "yellow")}: {val}\n' 53 | 54 | text += '\n' 55 | 56 | text += '-'*30 + '\n' 57 | 58 | text += f'Total found: {total}\n' 59 | 60 | return text 61 | 62 | 63 | class TXTOutput(PlainOutput): 64 | def __init__(self, *args, **kwargs): 65 | self.filename = kwargs.get('filename', 'report.txt') 66 | super().__init__(*args, **kwargs) 67 | self.is_colored = False 68 | 69 | def put(self): 70 | text = super().put() 71 | with open(self.filename, 'w') as f: 72 | f.write(text) 73 | 74 | return f'Results were saved to file {self.filename}' 75 | 76 | 77 | class CSVOutput(Output): 78 | def __init__(self, *args, **kwargs): 79 | self.filename = kwargs.get('filename', 'report.csv') 80 | super().__init__(*args, **kwargs) 81 | 82 | def put(self): 83 | if not len(self.data) and not len(self.data[0].results): 84 | return '' 85 | 86 | fields = self.data[0].results[0].fields 87 | fieldnames = ['Target'] + [k.title().replace('_', ' ') for k in fields] 88 | 89 | with open(self.filename, 'w') as csvfile: 90 | writer = csv.DictWriter(csvfile, fieldnames=fieldnames, quoting=csv.QUOTE_ALL) 91 | writer.writeheader() 92 | 93 | for o in self.data: 94 | i = o.input_data 95 | row = {'Target': i} 96 | 97 | for r in o.results: 98 | for k in fields: 99 | key = k.title().replace('_', ' ') 100 | val = r.__dict__.get(k) 101 | row[key] = val 102 | 103 | writer.writerow(row) 104 | 105 | return f'Results were saved to file {self.filename}' 106 | -------------------------------------------------------------------------------- /check.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from hashlib import sha1 3 | import json 4 | import os 5 | import re 6 | import requests 7 | import sys 8 | import time 9 | 10 | 11 | URL = 'https://analyticssuitefrontend-pa.clients6.google.com/v1/usermanagement/users?alt=json&key={}' 12 | 13 | HEADERS = { 14 | 'authority': 'analyticssuitefrontend-pa.clients6.google.com', 15 | 'pragma': 'no-cache', 16 | 'cache-control': 'no-cache', 17 | 'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="96", "Google Chrome";v="96"', 18 | 'x-goog-encode-response-if-executable': 'base64', 19 | 'x-origin': 'https://analytics.google.com', 20 | 'x-clientdetails': 'appVersion=5.0%20(Macintosh%3B%20Intel%20Mac%20OS%20X%2010_15_7)%20AppleWebKit%2F537.36%20(KHTML%2C%20like%20Gecko)%20Chrome%2F96.0.4664.110%20Safari%2F537.36&platform=MacIntel&userAgent=Mozilla%2F5.0%20(Macintosh%3B%20Intel%20Mac%20OS%20X%2010_15_7)%20AppleWebKit%2F537.36%20(KHTML%2C%20like%20Gecko)%20Chrome%2F96.0.4664.110%20Safari%2F537.36', 21 | 'sec-ch-ua-mobile': '?0', 22 | 'content-type': 'application/json', 23 | 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36', 24 | 'x-requested-with': 'XMLHttpRequest', 25 | 'x-javascript-user-agent': 'google-api-javascript-client/1.1.0', 26 | 'x-goog-authuser': '0', 27 | 'x-referer': 'https://analytics.google.com', 28 | 'sec-ch-ua-platform': '"macOS"', 29 | 'accept': '*/*', 30 | 'origin': 'https://analyticssuitefrontend-pa.clients6.google.com', 31 | 'sec-fetch-site': 'same-origin', 32 | 'sec-fetch-mode': 'cors', 33 | 'sec-fetch-dest': 'empty', 34 | 'accept-language': 'en,ru-RU;q=0.9,ru;q=0.8,en-US;q=0.7', 35 | 'x-client-data': 'CIi2yQEIorbJAQjEtskBCKmdygEI0aDKAQidicsBCOvyywEInvnLAQjX/MsBCOeEzAEItYXMAQjLicwBCISNzAEIrY7MAQjSj8wBCNqQzAEYjp7LAQ==', 36 | } 37 | 38 | COOKIES = {} 39 | KEY_REGEXP = re.compile(r'gmsSuiteApiKey\\x22:\\x22(.+?)\\x22,\\x22') 40 | 41 | 42 | def use_ghunt_cookies(filename): 43 | global HEADERS, COOKIES, URL 44 | 45 | if not os.path.exists(filename): 46 | print(f'There is not file {filename} with Google cookies! Download GHunt, make "check_and_gen" and copy resources/data.txt file to this path.') 47 | print('https://github.com/mxrch/GHunt/tree/master#usage') 48 | return False 49 | 50 | data = json.load(open(filename)) 51 | HEADERS.update({'cookies': '; '.join([f'{k}={v}' for k,v in data['cookies'].items()])}) 52 | COOKIES = data["cookies"] 53 | 54 | return True 55 | 56 | def use_analytics_api_key(): 57 | global URL 58 | r = requests.get('https://analytics.google.com/analytics/web/', cookies=COOKIES) 59 | key = KEY_REGEXP.search(r.text).groups()[0] 60 | URL = URL.format(key) 61 | 62 | 63 | def use_analytics_auth(): 64 | global HEADERS 65 | cur_time = str(int(time.time())) 66 | auth_hash = sha1(' '.join([cur_time, COOKIES['SAPISID'], 'https://analytics.google.com']).encode()).hexdigest() 67 | HEADERS.update({'authorization': f'SAPISIDHASH {cur_time}_{auth_hash}'}) 68 | 69 | 70 | if __name__ == '__main__': 71 | if len(sys.argv) == 1: 72 | print('Usage: ./check.py alex@telegram.org') 73 | sys.exit(1) 74 | 75 | if not use_ghunt_cookies('data.txt'): 76 | sys.exit(2) 77 | 78 | use_analytics_api_key() 79 | use_analytics_auth() 80 | 81 | email = sys.argv[1] 82 | 83 | print(HEADERS) 84 | print(COOKIES) 85 | data = json.dumps({"email":[email]}) 86 | print(data) 87 | 88 | r = requests.post('https://httpbin.org/anything', headers=HEADERS, cookies=COOKIES, data=data) 89 | print(r.text) 90 | -------------------------------------------------------------------------------- /gmail_analytics_check/executor.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import time 4 | import tqdm 5 | import sys 6 | from typing import Iterable, Any, List, Tuple, Callable, Dict 7 | 8 | 9 | QueryDraft = Tuple[Callable, List, List] 10 | 11 | 12 | def create_task_func(): 13 | if sys.version_info.minor > 6: 14 | create_asyncio_task = asyncio.create_task 15 | else: 16 | loop = asyncio.get_event_loop() 17 | create_asyncio_task = loop.create_task 18 | return create_asyncio_task 19 | 20 | 21 | class AsyncExecutor: 22 | def __init__(self, *args, **kwargs): 23 | self.logger = kwargs.get('logger', logging.getLogger('osint-tool')) 24 | 25 | async def run(self, tasks: Iterable[QueryDraft]): 26 | start_time = time.time() 27 | results = await self._run(tasks) 28 | self.execution_time = time.time() - start_time 29 | self.logger.debug(f'Spent time: {self.execution_time}') 30 | return results 31 | 32 | async def _run(self, tasks: Iterable[QueryDraft]): 33 | await asyncio.sleep(0) 34 | 35 | 36 | class AsyncioSimpleExecutor(AsyncExecutor): 37 | def __init__(self, *args, **kwargs): 38 | super().__init__(*args, **kwargs) 39 | 40 | async def _run(self, tasks: Iterable[QueryDraft]): 41 | futures = [f(*args, **kwargs) for f, args, kwargs in tasks] 42 | return await asyncio.gather(*futures) 43 | 44 | 45 | class AsyncioProgressbarExecutor(AsyncExecutor): 46 | def __init__(self, *args, **kwargs): 47 | super().__init__(*args, **kwargs) 48 | 49 | async def _run(self, tasks: Iterable[QueryDraft]): 50 | futures = [f(*args, **kwargs) for f, args, kwargs in tasks] 51 | results = [] 52 | for f in tqdm.asyncio.tqdm.as_completed(futures): 53 | results.append(await f) 54 | return results 55 | 56 | 57 | class AsyncioProgressbarSemaphoreExecutor(AsyncExecutor): 58 | def __init__(self, *args, **kwargs): 59 | super().__init__(*args, **kwargs) 60 | self.semaphore = asyncio.Semaphore(kwargs.get('in_parallel', 1)) 61 | 62 | async def _run(self, tasks: Iterable[QueryDraft]): 63 | async def _wrap_query(q: QueryDraft): 64 | async with self.semaphore: 65 | f, args, kwargs = q 66 | return await f(*args, **kwargs) 67 | 68 | async def semaphore_gather(tasks: Iterable[QueryDraft]): 69 | coros = [_wrap_query(q) for q in tasks] 70 | results = [] 71 | for f in tqdm.asyncio.tqdm.as_completed(coros): 72 | results.append(await f) 73 | return results 74 | 75 | return await semaphore_gather(tasks) 76 | 77 | 78 | class AsyncioProgressbarQueueExecutor(AsyncExecutor): 79 | def __init__(self, *args, **kwargs): 80 | super().__init__(*args, **kwargs) 81 | self.workers_count = kwargs.get('in_parallel', 10) 82 | self.progress_func = kwargs.get('progress_func', tqdm.tqdm) 83 | self.queue = asyncio.Queue(self.workers_count) 84 | self.timeout = kwargs.get('timeout') 85 | 86 | async def worker(self): 87 | while True: 88 | try: 89 | f, args, kwargs = self.queue.get_nowait() 90 | except asyncio.QueueEmpty: 91 | return 92 | 93 | query_future = f(*args, **kwargs) 94 | query_task = create_task_func()(query_future) 95 | try: 96 | result = await asyncio.wait_for(query_task, timeout=self.timeout) 97 | except asyncio.TimeoutError: 98 | result = kwargs.get('default') 99 | 100 | self.results.append(result) 101 | self.progress.update(1) 102 | self.queue.task_done() 103 | 104 | async def _run(self, queries: Iterable[QueryDraft]): 105 | self.results: List[Any] = [] 106 | 107 | queries_list = list(queries) 108 | 109 | min_workers = min(len(queries_list), self.workers_count) 110 | 111 | workers = [create_task_func()(self.worker()) for _ in range(min_workers)] 112 | 113 | self.progress = self.progress_func(total=len(queries_list)) 114 | for t in queries_list: 115 | await self.queue.put(t) 116 | await self.queue.join() 117 | for w in workers: 118 | w.cancel() 119 | self.progress.close() 120 | return self.results 121 | -------------------------------------------------------------------------------- /gmail_analytics_check/cli.py: -------------------------------------------------------------------------------- 1 | """ 2 | Commandline interface 3 | """ 4 | import asyncio 5 | import logging 6 | import os 7 | import platform 8 | import sys 9 | 10 | from argparse import ArgumentParser, RawDescriptionHelpFormatter 11 | 12 | from .core import * 13 | from .report import * 14 | 15 | 16 | def setup_arguments_parser(): 17 | from aiohttp import __version__ as aiohttp_version 18 | from ._version import __version__ 19 | 20 | version_string = '\n'.join( 21 | [ 22 | f'%(prog)s {__version__}', 23 | f'Python: {platform.python_version()}', 24 | f'Aiohttp: {aiohttp_version}', 25 | ] 26 | ) 27 | 28 | parser = ArgumentParser( 29 | formatter_class=RawDescriptionHelpFormatter, 30 | description=f"OSINT tool v{__version__}\n" 31 | ) 32 | target_group = parser.add_argument_group( 33 | 'INPUT', 'Options for input data' 34 | ) 35 | target_group.add_argument( 36 | "target", 37 | nargs='*', 38 | metavar="TARGET", 39 | help="One or more target to get info by.", 40 | ) 41 | target_group.add_argument( 42 | "--target-list", 43 | action="store", 44 | dest="target_list_filename", 45 | default='', 46 | help="Path to text file with list of targets.", 47 | ) 48 | target_group.add_argument( 49 | "--targets-from-stdin", 50 | action="store_true", 51 | dest="target_list_stdin", 52 | default=False, 53 | help="Read all the lines from standard input.", 54 | ) 55 | out_group = parser.add_argument_group( 56 | 'OUTPUT', 'Options for output reports' 57 | ) 58 | out_group.add_argument( 59 | "--csv-report", 60 | "-oC", 61 | action="store", 62 | dest="csv_filename", 63 | default='', 64 | help="Path to file for saving CSV report.", 65 | ) 66 | out_group.add_argument( 67 | "--text-report", 68 | "-oT", 69 | action="store", 70 | dest="txt_filename", 71 | default='', 72 | help="Path to file for saving TXT report (grepable console output).", 73 | ) 74 | parser.add_argument( 75 | "--version", 76 | action="version", 77 | version=version_string, 78 | help="Display version information and dependencies.", 79 | ) 80 | parser.add_argument( 81 | "--timeout", 82 | action="store", 83 | metavar='TIMEOUT', 84 | dest="timeout", 85 | default=100, 86 | help="Time in seconds to wait for execution.", 87 | ) 88 | parser.add_argument( 89 | "--cookie-jar-file", 90 | metavar="COOKIE_FILE", 91 | dest="cookie_file", 92 | default='', 93 | help="File with cookies.", 94 | ) 95 | parser.add_argument( 96 | "--proxy", 97 | "-p", 98 | metavar='PROXY_URL', 99 | action="store", 100 | dest="proxy", 101 | default='', 102 | help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080", 103 | ) 104 | parser.add_argument( 105 | "--verbose", 106 | "-v", 107 | action="store_true", 108 | dest="verbose", 109 | default=False, 110 | help="Display extra information and metrics.", 111 | ) 112 | parser.add_argument( 113 | "--silent", 114 | "-s", 115 | action="store_true", 116 | dest="silent", 117 | default=False, 118 | help="Suppress console output.", 119 | ) 120 | parser.add_argument( 121 | "--info", 122 | "-vv", 123 | action="store_true", 124 | dest="info", 125 | default=False, 126 | help="Display extra/service information and metrics.", 127 | ) 128 | parser.add_argument( 129 | "--debug", 130 | "-vvv", 131 | "-d", 132 | action="store_true", 133 | dest="debug", 134 | default=False, 135 | help="Display extra/service/debug information and metrics, save responses in debug.log.", 136 | ) 137 | parser.add_argument( 138 | "--no-color", 139 | action="store_true", 140 | dest="no_color", 141 | default=False, 142 | help="Don't color terminal output", 143 | ) 144 | parser.add_argument( 145 | "--no-progressbar", 146 | action="store_true", 147 | dest="no_progressbar", 148 | default=False, 149 | help="Don't show progressbar.", 150 | ) 151 | 152 | return parser 153 | 154 | 155 | async def main(): 156 | # Logging 157 | log_level = logging.ERROR 158 | logging.basicConfig( 159 | format='[%(filename)s:%(lineno)d] %(levelname)-3s %(asctime)s %(message)s', 160 | datefmt='%H:%M:%S', 161 | level=log_level, 162 | ) 163 | logger = logging.getLogger('osint-cli-tool-skeleton') 164 | logger.setLevel(log_level) 165 | 166 | arg_parser = setup_arguments_parser() 167 | args = arg_parser.parse_args() 168 | 169 | if args.debug: 170 | log_level = logging.DEBUG 171 | elif args.info: 172 | log_level = logging.INFO 173 | elif args.verbose: 174 | log_level = logging.WARNING 175 | 176 | logger.setLevel(log_level) 177 | 178 | input_data = [] 179 | 180 | # read from file 181 | if args.target_list_filename: 182 | if not os.path.exists(args.target_list_filename): 183 | print(f'There is no file {args.target_list_filename}') 184 | else: 185 | with open(args.target_list_filename) as f: 186 | input_data = [InputData(t) for t in f.read().splitlines()] 187 | 188 | # or read from stdin 189 | # e.g. cat list.txt | ./run.py --targets-from-stdin 190 | elif args.target_list_stdin: 191 | for line in sys.stdin: 192 | input_data.append(InputData(line.strip())) 193 | 194 | # or read from arguments 195 | elif args.target: 196 | input_data = [InputData(t) for t in args.target] 197 | 198 | if not input_data: 199 | print('There are no targets to check!') 200 | sys.exit(1) 201 | 202 | # convert input to output 203 | processor = Processor( 204 | no_progressbar=args.no_progressbar, 205 | proxy=args.proxy, 206 | ) 207 | output_data = await processor.process(input_data) 208 | 209 | # console output 210 | if not args.silent: 211 | r = PlainOutput(output_data, colored=not args.no_color) 212 | print(r.put()) 213 | 214 | # save CSV report 215 | if args.csv_filename: 216 | r = CSVOutput(output_data, filename=args.csv_filename) 217 | print(r.put()) 218 | 219 | # save TXT report 220 | 221 | if args.txt_filename: 222 | r = TXTOutput(output_data, filename=args.txt_filename) 223 | print(r.put()) 224 | 225 | await processor.close() 226 | 227 | 228 | def run(): 229 | loop = asyncio.get_event_loop() 230 | loop.run_until_complete(main()) 231 | 232 | 233 | if __name__ == "__main__": 234 | run() -------------------------------------------------------------------------------- /gmail_analytics_check/core.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from hashlib import sha1 3 | import json 4 | import os 5 | import re 6 | import ssl 7 | import sys 8 | import time 9 | from typing import List, Any 10 | 11 | from aiohttp import TCPConnector, ClientSession 12 | from bs4 import BeautifulSoup as bs 13 | 14 | from .executor import AsyncioProgressbarQueueExecutor, AsyncioSimpleExecutor 15 | 16 | 17 | def create_ssl_context(proto=ssl.PROTOCOL_SSLv23, 18 | verify_mode=ssl.CERT_NONE, 19 | protocols=None, 20 | options=None, 21 | ciphers="ALL"): 22 | protocols = protocols or ('PROTOCOL_SSLv3','PROTOCOL_TLSv1', 23 | 'PROTOCOL_TLSv1_1','PROTOCOL_TLSv1_2') 24 | options = options or ('OP_CIPHER_SERVER_PREFERENCE','OP_SINGLE_DH_USE', 25 | 'OP_SINGLE_ECDH_USE','OP_NO_COMPRESSION') 26 | context = ssl.SSLContext(proto) 27 | context.verify_mode = verify_mode 28 | # reset protocol, options 29 | # context.protocol = 0 30 | context.options = 0 31 | # for p in protocols: 32 | # context.protocol |= getattr(ssl, p, 0) 33 | for o in options: 34 | context.options |= getattr(ssl, o, 0) 35 | context.set_ciphers(ciphers) 36 | return context 37 | 38 | 39 | class InputData: 40 | def __init__(self, value: str): 41 | self.value = value 42 | 43 | def __str__(self): 44 | return self.value 45 | 46 | def __repr__(self): 47 | return self.value 48 | 49 | 50 | class OutputData: 51 | def __init__(self, gaia_id, canonical_email, error): 52 | self.gaia_id = gaia_id 53 | self.canonical_email = canonical_email 54 | self.error = error 55 | 56 | @property 57 | def fields(self): 58 | fields = list(self.__dict__.keys()) 59 | fields.remove('error') 60 | 61 | return fields 62 | 63 | def __str__(self): 64 | error = '' 65 | if self.error: 66 | error = f' (error: {str(self.error)}' 67 | 68 | result = '' 69 | 70 | for field in self.fields: 71 | field_pretty_name = field.title().replace('_', ' ') 72 | value = self.__dict__.get(field) 73 | if value: 74 | result += f'{field_pretty_name}: {str(value)}\n' 75 | 76 | result += f'{error}' 77 | return result 78 | 79 | 80 | class OutputDataList: 81 | def __init__(self, input_data: InputData, results: List[OutputData]): 82 | self.input_data = input_data 83 | self.results = results 84 | 85 | def __repr__(self): 86 | return f'Target {self.input_data}:\n' + '--------\n'.join(map(str, self.results)) 87 | 88 | 89 | class Processor: 90 | URL = 'https://analyticssuitefrontend-pa.clients6.google.com/v1/usermanagement/users?alt=json&key={}' 91 | HEADERS_WEB = { 92 | 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36', 93 | 'accept-language': 'en,ru-RU;q=0.9,ru;q=0.8,en-US;q=0.7', 94 | 'sec-fetch-site': 'same-origin', 95 | 'sec-fetch-mode': 'cors', 96 | 'sec-fetch-dest': 'empty', 97 | 'x-referer': 'https://analytics.google.com', 98 | 'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="96", "Google Chrome";v="96"', 99 | 'sec-ch-ua-mobile': '?0', 100 | } 101 | HEADERS = { 102 | # 'authority': 'analyticssuitefrontend-pa.clients6.google.com', 103 | 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36', 104 | 'pragma': 'no-cache', 105 | 'accept-encoding': 'gzip, deflate', 106 | 'accept': '*/*', 107 | 'cache-control': 'no-cache', 108 | 'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="96", "Google Chrome";v="96"', 109 | 'x-goog-encode-response-if-executable': 'base64', 110 | 'x-origin': 'https://analytics.google.com', 111 | 'x-clientdetails': 'appVersion=5.0%20(Macintosh%3B%20Intel%20Mac%20OS%20X%2010_15_7)%20AppleWebKit%2F537.36%20(KHTML%2C%20like%20Gecko)%20Chrome%2F96.0.4664.110%20Safari%2F537.36&platform=MacIntel&userAgent=Mozilla%2F5.0%20(Macintosh%3B%20Intel%20Mac%20OS%20X%2010_15_7)%20AppleWebKit%2F537.36%20(KHTML%2C%20like%20Gecko)%20Chrome%2F96.0.4664.110%20Safari%2F537.36', 112 | 'sec-ch-ua-mobile': '?0', 113 | 'content-type': 'application/json', 114 | 'x-requested-with': 'XMLHttpRequest', 115 | 'x-javascript-user-agent': 'google-api-javascript-client/1.1.0', 116 | 'x-goog-authuser': '0', 117 | 'x-referer': 'https://analytics.google.com', 118 | 'sec-ch-ua-platform': '"macOS"', 119 | 'origin': 'https://analyticssuitefrontend-pa.clients6.google.com', 120 | 'sec-fetch-site': 'same-origin', 121 | 'sec-fetch-mode': 'cors', 122 | 'sec-fetch-dest': 'empty', 123 | 'accept-language': 'en,ru-RU;q=0.9,ru;q=0.8,en-US;q=0.7', 124 | 'x-client-data': 'CIi2yQEIorbJAQjEtskBCKmdygEI0aDKAQidicsBCOvyywEInvnLAQjX/MsBCOeEzAEItYXMAQjLicwBCISNzAEIrY7MAQjSj8wBCNqQzAEYjp7LAQ==', 125 | } 126 | COOKIES = {} 127 | KEY_REGEXP = re.compile(r'gmsSuiteApiKey\\x22:\\x22(.+?)\\x22,\\x22') 128 | 129 | def __init__(self, *args, **kwargs): 130 | from aiohttp_socks import ProxyConnector 131 | 132 | # make http client session 133 | proxy = kwargs.get('proxy') 134 | self.proxy = proxy 135 | if proxy: 136 | connector = ProxyConnector.from_url(proxy, ssl=False) 137 | else: 138 | connector = TCPConnector(ssl=False) 139 | # connector = TCPConnector( 140 | # ssl_context=create_ssl_context(), 141 | # use_dns_cache=True, 142 | # ) 143 | self.session = ClientSession( 144 | connector=connector, trust_env=False 145 | ) 146 | if kwargs.get('no_progressbar'): 147 | self.executor = AsyncioSimpleExecutor() 148 | else: 149 | self.executor = AsyncioProgressbarQueueExecutor() 150 | 151 | def use_ghunt_cookies(self, filename): 152 | if not os.path.exists(filename): 153 | print(f'There is not file {filename} with Google cookies! Download GHunt, make "check_and_gen" and copy resources/data.txt file to this path.') 154 | print('https://github.com/mxrch/GHunt/tree/master#usage') 155 | return False 156 | 157 | data = json.load(open(filename)) 158 | self.HEADERS.update({'cookie': '; '.join([k+'='+v.strip('"') for k,v in data['cookies'].items()])}) 159 | self.COOKIES = data["cookies"] 160 | 161 | return True 162 | 163 | async def use_analytics_api_key(self): 164 | r = await self.session.get('https://analytics.google.com/analytics/web/', headers=self.HEADERS_WEB, cookies=self.COOKIES) 165 | text = await r.text() 166 | key = self.KEY_REGEXP.search(text).groups()[0] 167 | self.URL = self.URL.format(key.strip('"')) 168 | 169 | def use_analytics_auth(self): 170 | cur_time = str(int(time.time())) 171 | auth_hash = sha1(' '.join([cur_time, self.COOKIES['SAPISID'], 'https://analytics.google.com']).encode()).hexdigest() 172 | self.HEADERS.update({'authorization': f'SAPISIDHASH {cur_time}_{auth_hash}'}) 173 | 174 | async def close(self): 175 | await self.session.close() 176 | 177 | async def request(self, input_data: InputData) -> OutputDataList: 178 | result = None 179 | error = None 180 | output_data = [] 181 | 182 | # gmail setup 183 | res = self.use_ghunt_cookies('data.txt') 184 | await self.use_analytics_api_key() 185 | self.use_analytics_auth() 186 | 187 | try: 188 | email = input_data.value 189 | data = json.dumps({"email":[email]}) 190 | r = await self.session.post(self.URL, headers=self.HEADERS, data=data) 191 | result = await r.json() 192 | 193 | for user_data in result.get('principal', []): 194 | gaia_id = user_data['user']['gaiaId'] 195 | canonical_email = user_data['user']['email'] 196 | output_data.append(OutputData(gaia_id, canonical_email, error)) 197 | 198 | if 'error' in result: 199 | print(error) 200 | # TODO: proper errors processing 201 | # output_data.append(OutputData('', '', result['error']['message'])) 202 | 203 | except Exception as e: 204 | error = e 205 | 206 | results = OutputDataList(input_data, output_data) 207 | 208 | return results 209 | 210 | 211 | async def process(self, input_data: List[InputData]) -> List[OutputDataList]: 212 | tasks = [ 213 | ( 214 | self.request, # func 215 | [i], # args 216 | {} # kwargs 217 | ) 218 | for i in input_data 219 | ] 220 | 221 | results = await self.executor.run(tasks) 222 | 223 | return results 224 | --------------------------------------------------------------------------------