├── .gitignore ├── LICENSE ├── README.adoc ├── backup_aircraft.py ├── doc └── S3.png ├── requirements.txt ├── secrets.conf └── src ├── __init__.py ├── adsb_exchange.py ├── aws.py ├── backup.py ├── twilio_api.py └── utilities.py /.gitignore: -------------------------------------------------------------------------------- 1 | # all generated files 2 | *.gz 3 | *.log 4 | 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | pip-wheel-metadata/ 28 | share/python-wheels/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | MANIFEST 33 | 34 | # PyInstaller 35 | # Usually these files are written by a python script from a template 36 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 37 | *.manifest 38 | *.spec 39 | 40 | # Installer logs 41 | pip-log.txt 42 | pip-delete-this-directory.txt 43 | 44 | # Unit test / coverage reports 45 | htmlcov/ 46 | .tox/ 47 | .nox/ 48 | .coverage 49 | .coverage.* 50 | .cache 51 | nosetests.xml 52 | coverage.xml 53 | *.cover 54 | *.py,cover 55 | .hypothesis/ 56 | .pytest_cache/ 57 | test 58 | .vscode 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # IPython 87 | profile_default/ 88 | ipython_config.py 89 | 90 | # pyenv 91 | .python-version 92 | 93 | # pipenv 94 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 95 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 96 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 97 | # install all needed dependencies. 98 | #Pipfile.lock 99 | 100 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 101 | __pypackages__/ 102 | 103 | # Celery stuff 104 | celerybeat-schedule 105 | celerybeat.pid 106 | 107 | # SageMath parsed files 108 | *.sage.py 109 | 110 | # Environments 111 | .env 112 | .venv 113 | env/ 114 | venv/ 115 | ENV/ 116 | env.bak/ 117 | venv.bak/ 118 | 119 | # Spyder project settings 120 | .spyderproject 121 | .spyproject 122 | 123 | # Rope project settings 124 | .ropeproject 125 | 126 | # mkdocs documentation 127 | /site 128 | 129 | # mypy 130 | .mypy_cache/ 131 | .dmypy.json 132 | dmypy.json 133 | 134 | # Pyre type checker 135 | .pyre/ 136 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 cribdragg3r 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.adoc: -------------------------------------------------------------------------------- 1 | = PiAware Parser 2 | joshua.faust@sevrosecurity.com 3 | :toc: 4 | 5 | If you're using dump1090-fa on a Raspberry Pi to track aircraft movement, this package allows you to save aircraft information to your local system and/or AWS S3. 6 | 7 | Functionality to enrich your data with the https://rapidapi.com/adsbx/api/adsbexchange-com1/pricing[ADSBechanage API] has also been added. 8 | 9 | == How it works 10 | 11 | Flight aware uses a dynamic json file `/run/dump1090-fa/aircraft.json` which is the base information you will see within your SkyAware Web dashboard. This package reads the data from the file, writes the data to a persistent gzipped CSV file, and continually checks for changes within `aircraft.json`. 12 | 13 | Essentially: 14 | 15 | . Read `aircraft.json` 16 | . Check if user has requested to use https://rapidapi.com/adsbx/api/adsbexchange-com1/pricing[ADSBexchange API]. 17 | .. This is to enrich the data if you do have access. 18 | . Write GZIPPED CSV data to a persistent file. 19 | .. Filename is: `aircraft_.csv.gz` 20 | . Check if `aircraft.json` has new data. 21 | 22 | == Additional API's 23 | 24 | I've added some optional and configurable APIs if the user wants to use them. These are all turned off by default and are enabled via CLI flags. 25 | 26 | === ADSB Exchange API 27 | [%collapsible] 28 | ==== 29 | 30 | https://rapidapi.com/adsbx/api/adsbexchange-com1/pricing[ADSBexchange API] allows us to enrich the local aircraft data from `aircraft.json` with several new fields. To fully utilize this API, you will need to purchase a monthly license which, will give you an API key that you can add to `secrets.conf`. 31 | 32 | Here is an example of not enriched and enriched: 33 | 34 | .Not Enriched 35 | [source, json] 36 | ---- 37 | { 38 | "epoch": 1609112746.1, 39 | "icao": "ac6364", 40 | "ident": "DAL1936", 41 | "alt_baro": "11025", 42 | "alt_geom": "10475", 43 | "track": "4.5", 44 | "lat": "44.350596", 45 | "lon": "-93.20506" 46 | } 47 | ---- 48 | 49 | .Enriched 50 | [source, json] 51 | ---- 52 | { 53 | "epoch": 1609112746.1, 54 | "icao": "ac6364", 55 | "ident": "DAL1936", 56 | "alt_baro": "11025", 57 | "alt_geom": "10475", 58 | "track": "4.5", 59 | "lat": "44.350596", 60 | "lon": "-93.20506", 61 | "ac": [ 62 | { 63 | "postime": "1609112744641", 64 | "icao": "AC6364", 65 | "reg": "N898DN", 66 | "type": "B739", 67 | "wtc": "2", 68 | "spd": "309.9", 69 | "altt": "0", 70 | "alt": "11050", 71 | "galt": "11060", 72 | "talt": "7008", 73 | "lat": "44.348877", 74 | "lon": "-93.205261", 75 | "vsit": "0", 76 | "vsi": "-1152", 77 | "trkh": "0", 78 | "ttrk": "0", 79 | "trak": "4.4", 80 | "sqk": "6137", 81 | "call": "DAL1936", 82 | "gnd": "0", 83 | "trt": "5", 84 | "pos": "1", 85 | "mlat": "0", 86 | "tisb": "0", 87 | "sat": "0", 88 | "opicao": "DAL", 89 | "cou": "United States", 90 | "mil": "0", 91 | "interested": "0", 92 | "from": "LGA La Guardia New York United States", 93 | "to": "BOS General Edward Lawrence Logan Boston United States" 94 | } 95 | ], 96 | "total": 1, 97 | "ctime": 1609112746914 98 | } 99 | ---- 100 | ==== 101 | 102 | === AWS API 103 | [%collapsible] 104 | ==== 105 | I've build is AWS S3 saving capability. All you need to provide is your AWS account `Access_Key` and `Secret_key` within `secrets.conf` and supply the CLI `--s3` argument to tell the program to save data to S3. 106 | 107 | image::doc/S3.png[] 108 | ==== 109 | 110 | === Twilio API 111 | [%collapsible] 112 | ==== 113 | https://www.twilio.com[Twilio] allows us to send our phones text messages. In this case, a text message is sent to you phone when: 114 | 115 | * The script is started 116 | * After an internet connection outage has been remediated 117 | * Every hour to let you know your system is functioning properly. 118 | 119 | There are certainly more use cases for this in the future but, the main purpose is to let you know if/when your system breaks or goes down. 120 | 121 | To configure Twilio, you'll need to purchase a number and add the following to `secrets.conf` 122 | 123 | [source, conf] 124 | ---- 125 | account_sid = 126 | auth_token = 127 | to_phone_number = 128 | from_phone_number = 129 | ---- 130 | 131 | ==== 132 | 133 | 134 | == How to use 135 | 136 | * Python 3.7+ 137 | * `pip install -r requirements.txt` 138 | * If you're going to use the ADSBexchange API, make sure to add you API key to `secrets.conf` 139 | * If you want to upload your files to AWS S3, add your credentials to `secrets.conf` 140 | 141 | [source, text] 142 | ---- 143 | usage: backup_aircraft.py [-h] [-a] [-s] [-t] 144 | 145 | optional arguments: 146 | -h, --help show this help message and exit 147 | -a, --useapi Use the ADSBexchange api to enrich aircraft data 148 | -s, --s3 Upload compressed flight data to S3 149 | -t, --twilio Use Twilio to send text messages for outages and uptime info 150 | ---- 151 | -------------------------------------------------------------------------------- /backup_aircraft.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import os 3 | import logging 4 | import argparse 5 | import random 6 | import time 7 | 8 | import src.backup as backup 9 | import src.utilities as utils 10 | import src.twilio_api as twil 11 | 12 | from src.adsb_exchange import adsb_api_key_exists 13 | from src.aws import aws_api_keys_exist 14 | from datetime import datetime as dt 15 | from datetime import date, timedelta 16 | 17 | logname = f"aircraft_{date.today()}.log" 18 | logging.basicConfig(filename=logname, level=logging.INFO) 19 | LAST_MESSAGE_SENT = dt.now() 20 | 21 | 22 | # The user may have V3 access and therefore can enrich their data if wanted: 23 | parser = argparse.ArgumentParser() 24 | parser.add_argument( 25 | "-a", 26 | "--useapi", 27 | action="store_true", 28 | required=False, 29 | dest="adsb_api", 30 | default=False, 31 | help="Use the FlightAware V3 api to enrich aircraft data", 32 | ) 33 | parser.add_argument( 34 | "-s", 35 | "--s3", 36 | action="store_true", 37 | required=False, 38 | dest="aws_api", 39 | default=False, 40 | help="Upload compressed flight data to S3" 41 | ) 42 | parser.add_argument( 43 | "-t", 44 | "--twilio", 45 | action="store_true", 46 | required=False, 47 | dest="twi_api", 48 | default=False, 49 | help="Use Twilio to send text messages for outages and uptime info" 50 | ) 51 | args = parser.parse_args() 52 | 53 | if args.adsb_api: 54 | if not adsb_api_key_exists(): 55 | print(f"[!] You have not added a valid ADS API key to secrets.conf") 56 | exit(1) 57 | 58 | if args.aws_api: 59 | if not adsb_api_key_exists(): 60 | print(f"[!] You have not added a valid AWS API key to secrets.conf") 61 | exit(1) 62 | 63 | if args.twi_api: 64 | if not twil.twilio_api_keys_exist(): 65 | print(f"[!] You have not added a valid Twilio API information to secrets.conf") 66 | exit(1) 67 | else: 68 | twil.send_text_message("PiAware has been started!") 69 | 70 | 71 | previous_file_hash = "" 72 | aircraft_file_path = "/run/dump1090-fa/aircraft.json" 73 | print("[i] Parser is Running.") 74 | 75 | while True: 76 | file_hash = utils.get_file_sha256(aircraft_file_path) 77 | 78 | if previous_file_hash != file_hash: 79 | previous_file_hash = file_hash 80 | backup.get_local_aircraft_data(aircraft_file_path, args.adsb_api, args.aws_api, args.twi_api) 81 | 82 | if args.twi_api: 83 | runtime = utils.get_time_delta_hours(dt.now(), LAST_MESSAGE_SENT) 84 | if runtime >= 1: 85 | twil.send_text_message("PiAware is Still Running Successfully!") 86 | LAST_MESSAGE_SENT = dt.now() 87 | 88 | t = random.uniform(3, 10) 89 | logging.info(f"[{dt.now()}]:Sleeping for {t} seconds") 90 | time.sleep(t) 91 | -------------------------------------------------------------------------------- /doc/S3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshfaust/piaware_parser/38dbb1b1a875520e1fad892071ef34e7c21c9c52/doc/S3.png -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | boto3==1.16.43 2 | botocore==1.19.43 3 | certifi==2020.12.5 4 | chardet==4.0.0 5 | configparser==5.0.1 6 | idna==2.10 7 | jmespath==0.10.0 8 | pkg-resources==0.0.0 9 | PyJWT==2.0.0 10 | python-dateutil==2.8.1 11 | pytz==2020.5 12 | requests==2.25.1 13 | s3transfer==0.3.3 14 | six==1.15.0 15 | twilio==6.50.1 16 | tzlocal==2.1 17 | urllib3>=1.26.4 -------------------------------------------------------------------------------- /secrets.conf: -------------------------------------------------------------------------------- 1 | [adsbexchange] 2 | key = None 3 | 4 | [aws] 5 | access_key = None 6 | secret_key = None 7 | 8 | [twilio] 9 | account_sid = None 10 | auth_token = None 11 | to_phone_number = None 12 | from_phone_number = None -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshfaust/piaware_parser/38dbb1b1a875520e1fad892071ef34e7c21c9c52/src/__init__.py -------------------------------------------------------------------------------- /src/adsb_exchange.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | import configparser 4 | import logging 5 | import src.utilities as utils 6 | 7 | 8 | def _get_api_key() -> str: 9 | """ 10 | Retrieves the ADSBexchange API key from secrets.conf 11 | """ 12 | config = configparser.ConfigParser() 13 | config.read("secrets.conf") 14 | return config.get("adsbexchange", "key") 15 | 16 | 17 | def adsb_api_key_exists() -> bool: 18 | """ 19 | Make sure that the API keys have been entered into secrets.conf 20 | """ 21 | key= _get_api_key() 22 | if (key != "None"): 23 | return True 24 | return False 25 | 26 | 27 | def get_aircraft_by_icao(icao: str) -> dict: 28 | """ 29 | Queries ADSBExchange Aircraft/flight data by icao 30 | """ 31 | try: 32 | uri = f"https://adsbexchange-com1.p.rapidapi.com/icao/{icao.upper()}/" 33 | headers = { 34 | 'x-rapidapi-key': _get_api_key(), 35 | 'x-rapidapi-host': "adsbexchange-com1.p.rapidapi.com" 36 | } 37 | r = requests.get(uri, headers=headers) 38 | aircraft_data = r.json() 39 | return aircraft_data 40 | except requests.exceptions.ConnectionError as e: 41 | logging.error(f"ADSB HTTP Exception:{e}") 42 | return {} 43 | except requests.exceptions.HTTPError as e: 44 | logging.error(f"ADSB HTTP Exception:{e}") 45 | return {} 46 | except json.JSONDecodeError as e: 47 | logging.error(f"JSON_DECODING_ERROR: {r.text}") 48 | return {"error":r.text} 49 | 50 | 51 | def get_aircraft_by_registration(reg: str) -> dict: 52 | """ 53 | Queries ADSBExchange Aircraft/flight data by registration 54 | """ 55 | try: 56 | uri = f"https://adsbexchange-com1.p.rapidapi.com/registration/{reg.upper()}/" 57 | headers = { 58 | 'x-rapidapi-key': _get_api_key(), 59 | 'x-rapidapi-host': "adsbexchange-com1.p.rapidapi.com" 60 | } 61 | r = requests.get(uri, headers=headers) 62 | aircraft_data = r.json() 63 | return aircraft_data 64 | except requests.exceptions.ConnectionError as e: 65 | logging.error(f"ADSB HTTP Exception:{e}") 66 | return {} 67 | except requests.exceptions.HTTPError as e: 68 | logging.error(f"ADSB HTTP Exception:{e}") 69 | return {} 70 | except json.JSONDecodeError as e: 71 | logging.error(f"JSON_DECODING_ERROR: {r.text}") 72 | return {"error":r.text} 73 | -------------------------------------------------------------------------------- /src/aws.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import logging 3 | import configparser 4 | from botocore.exceptions import ClientError, NoCredentialsError 5 | 6 | 7 | def _get_api_keys() -> tuple: 8 | config = configparser.ConfigParser() 9 | config.read("secrets.conf") 10 | access_key =config.get("aws", "access_key") 11 | secret_key =config.get("aws", "secret_key") 12 | return (secret_key, access_key) 13 | 14 | 15 | def aws_api_keys_exist() -> bool: 16 | """ 17 | Make sure that the API keys have been entered into secrets.conf 18 | """ 19 | sk, ak = _get_api_keys() 20 | if (sk != "None") and (ak != "None"): 21 | return True 22 | return False 23 | 24 | 25 | def check_bucket_exists(bucket_name: str) -> bool: 26 | """ 27 | Check if am AWS bucket exists 28 | """ 29 | try: 30 | region="us-east-1" 31 | secret_key, access_key = _get_api_keys() 32 | s3 = boto3.client('s3', region_name=region, aws_access_key_id=access_key, aws_secret_access_key=secret_key) 33 | response = s3.list_buckets() 34 | return bucket_name in response["Buckets"] 35 | except ClientError as e: 36 | logging.error(e) 37 | exit(1) 38 | 39 | 40 | def create_bucket(bucket_name: str) -> bool: 41 | """ 42 | Create an S3 bucket in a defined region (us-east-1) that 43 | will hold our saved aircraft information. 44 | """ 45 | try: 46 | region="us-east-1" 47 | secret_key, access_key = _get_api_keys() 48 | s3_client = boto3.client('s3', aws_access_key_id=access_key, aws_secret_access_key=secret_key) 49 | s3_client.create_bucket(Bucket=bucket_name) 50 | except ClientError as e: 51 | logging.error(e) 52 | return False 53 | return True 54 | 55 | 56 | def upload_to_s3(bucket_name: str, local_file: str, s3_file_name: str) -> bool: 57 | """ 58 | Uploads the gzip file to S3 59 | """ 60 | try: 61 | region="us-east-1" 62 | secret_key, access_key = _get_api_keys() 63 | s3_client = boto3.client('s3', region_name=region, aws_access_key_id=access_key, aws_secret_access_key=secret_key) 64 | response = s3_client.upload_file(local_file, bucket_name, s3_file_name) 65 | except ClientError as e: 66 | logging.error(e) 67 | return False 68 | return True -------------------------------------------------------------------------------- /src/backup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import logging 4 | import time 5 | import src.adsb_exchange as ads 6 | import src.utilities as utils 7 | import src.twilio_api as twil 8 | 9 | from datetime import datetime as dt 10 | from datetime import date, timedelta 11 | 12 | # GLOBAL 13 | START_TIME = dt.now() 14 | SEEN_AIRCRAFT = set() 15 | 16 | 17 | def check_if_duplicate(identifier: str) -> None: 18 | """ 19 | Check to see if we've seen this aircraft before. 20 | """ 21 | global SEEN_AIRCRAFT 22 | if identifier in SEEN_AIRCRAFT: 23 | return True 24 | else: 25 | SEEN_AIRCRAFT.add(identifier) 26 | return False 27 | 28 | 29 | def reset_seen_aircraft(current_flight_identifiers: list) -> bool: 30 | """ 31 | Given a list of current flight identifiers, sanitize SEEN_AIRCRAFT 32 | and reload with the current aircraft identifiers we've just seen 33 | in order to mitigate duplication. 34 | """ 35 | try: 36 | global START_TIME, SEEN_AIRCRAFT 37 | START_TIME = dt.now() 38 | SEEN_AIRCRAFT = set() 39 | 40 | for ident in current_flight_identifiers: 41 | SEEN_AIRCRAFT.add(ident) 42 | 43 | return True 44 | except Exception as e: 45 | logging.error(e) 46 | return False 47 | 48 | 49 | def get_script_runtime() -> float: 50 | """ 51 | Calculates the time difference from when the program 52 | started to the current time 53 | """ 54 | global START_TIME 55 | time_delta = dt.now() - START_TIME 56 | time_delta_hours = divmod(time_delta.total_seconds(), 3600)[0] 57 | return time_delta_hours 58 | 59 | 60 | def get_local_aircraft_data(aircraft_file_path: str, using_ads_api: bool, using_aws_api: bool, using_twilio_api: bool) -> None: 61 | """ 62 | Takes an boolean value that dictates whether we should attempt 63 | to enrich our data. Extracts know values from the json stream that 64 | exists on the Raspberry Pi host file "aircraft.json" and writes 65 | the cleaned up data to a GZIPPED CSV file. 66 | """ 67 | try: 68 | global SEEN_AIRCRAFT 69 | output_filename = f"aircraft_{date.today()}.json.gz" 70 | current_flight_identifiers = [] # Holds most recent flight identifiers for deduplication 71 | 72 | # Pull in the aircrafts.json file 73 | with open(aircraft_file_path, "r") as aircraft: 74 | file_contents = aircraft.read() 75 | json_data = json.loads(file_contents) 76 | 77 | local_flight_data = { 78 | "epoch": None, 79 | "icao": None, 80 | "ident": None, 81 | "alt_baro": None, 82 | "alt_geom": None, 83 | "track": None, 84 | "lat": None, 85 | "lon": None, 86 | } 87 | 88 | local_flight_data_keys = ["icao", "ident", "alt_baro", "alt_geom", "track", "lat", "lon"] 89 | local_aircraft_conf_keys = ["hex", "flight", "alt_baro", "alt_geom", "track", "lat", "lon"] 90 | local_flight_data["epoch"] = json_data["now"] 91 | for flight in json_data["aircraft"]: 92 | 93 | # load the cleaned values into the local dict 94 | for i, local_key in enumerate(local_flight_data_keys): 95 | try: 96 | local_flight_data[local_key] = str(flight[local_aircraft_conf_keys[i]]).strip() 97 | except KeyError as e: 98 | local_flight_data[local_key] = None 99 | 100 | flight_unique_identifier = utils.get_string_md5(f"{local_flight_data['icao']};{str(date.today())}") 101 | current_flight_identifiers.append(flight_unique_identifier) 102 | duplicate_check = check_if_duplicate(flight_unique_identifier) 103 | 104 | if not duplicate_check: 105 | # ADSBExchange API 106 | if using_ads_api: 107 | """ 108 | Check if we've seen this aircraft/flight before, if we have skip the API query 109 | as those API calls cost $$. 110 | """ 111 | enriched_flight_data = ads.get_aircraft_by_icao(local_flight_data["icao"]) 112 | 113 | """ 114 | Empty dictionaries evaluate as False and in this case, an empty 115 | dict represents a HTTP error or we lost internet connection. We 116 | need to re-verify we have internet connection and and pause until 117 | we have re-established a connection. 118 | """ 119 | if not bool(enriched_flight_data): 120 | lost_connection = dt.now() 121 | logging.error("Starting Internet Connections Checks.") 122 | while not utils.check_internet_connection(): 123 | time.sleep(5) 124 | if using_twilio_api: 125 | twil.send_text_message(f"PiAware Looks to Have Lost Internet Connection at: {lost_connection.strftime('%Y-%m-%d %H:%M:%S')}. It's back up now.") 126 | enriched_flight_data = ads.get_aircraft_by_icao(local_flight_data["icao"]) 127 | 128 | local_flight_data.update(enriched_flight_data) 129 | 130 | # Write data to local file 131 | utils.write_to_gzip_file(output_filename, str(local_flight_data)) 132 | 133 | if using_aws_api: 134 | utils.write_to_s3(output_filename, "local-aircraft-data") 135 | 136 | # Check how long program has been running and reset SEEN_AIRCRAFT if needed 137 | runtime = get_script_runtime() 138 | if runtime >= 23.0: 139 | if using_twilio_api: 140 | twil.send_text_message(f"It's been 24 hours!\n- Aircraft Seen: {len(SEEN_AIRCRAFT)}\n- Currently Tracking: {len(current_flight_identifiers)}") 141 | if reset_seen_aircraft(current_flight_identifiers): 142 | logging.info(f"RUNTIME: {runtime}-Resetting SEEN_AIRCRAFT and START_TIME.") 143 | 144 | except KeyError as e: 145 | logging.error(f"[{dt.now()}]-KEY_ERROR:{e}") 146 | except TypeError as e: 147 | logging.error(f"[{dt.now()}]-TYPE_ERROR:{e}") 148 | -------------------------------------------------------------------------------- /src/twilio_api.py: -------------------------------------------------------------------------------- 1 | import configparser 2 | from twilio.rest import Client 3 | 4 | 5 | def _get_twilio_creds() -> tuple: 6 | """ 7 | Obtain the Twilio Creds from secrets.conf 8 | """ 9 | config = configparser.ConfigParser() 10 | config.read("secrets.conf") 11 | account_sid = config.get("twilio", "account_sid") 12 | auth_token = config.get("twilio", "auth_token") 13 | to_phone_number = config.get("twilio", "to_phone_number") 14 | from_phone_number = config.get("twilio", "from_phone_number") 15 | return (account_sid, auth_token, to_phone_number, from_phone_number) 16 | 17 | 18 | def twilio_api_keys_exist() -> bool: 19 | """ 20 | Make sure that the API keys have been entered into secrets.conf 21 | """ 22 | sk, ak, num_a, num_b = _get_twilio_creds() 23 | if (sk != "None") and (ak != "None") and (num_a != "None") and (num_b != "None"): 24 | return True 25 | return False 26 | 27 | 28 | def send_text_message(message: str) -> bool: 29 | """ 30 | Sends a text message 31 | """ 32 | sid, token, to_number, from_number = _get_twilio_creds() 33 | client = Client(sid, token) 34 | message = client.messages.create( 35 | to=f"+{to_number}", 36 | from_=f"+{from_number}", 37 | body=message 38 | ) -------------------------------------------------------------------------------- /src/utilities.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import logging 3 | import gzip 4 | import os 5 | import hashlib 6 | import src.aws as aws 7 | 8 | from datetime import datetime as dt 9 | 10 | 11 | def check_internet_connection() -> bool: 12 | """ 13 | Checks if we have an internet connection by submitting a 14 | GET request to google.com. 15 | """ 16 | try: 17 | r = requests.get("https://google.com") 18 | if r.status_code == 200: 19 | return True 20 | else: 21 | raise requests.exceptions.ConnectionError 22 | 23 | except requests.exceptions.ConnectionError as e: 24 | logging.error(f"NO INTERNET CONNECTION:{e}") 25 | return False 26 | except requests.exceptions.HTTPError as e: 27 | logging.error(f"INVALID HTTP RESPONSE:{e}") 28 | return False 29 | 30 | 31 | def write_to_gzip_file(filename: str, data: str) -> None: 32 | """ 33 | Takes a filename and the data to write a gzip stream to. 34 | """ 35 | data_bytes = (data + "\n").encode("utf-8") 36 | with gzip.open(filename, "ab") as f: 37 | f.write(data_bytes) 38 | 39 | 40 | def write_to_s3(filename: str, bucket_name: str) -> None: 41 | """ 42 | Write a file to AWS S3 43 | """ 44 | if not aws.check_bucket_exists(bucket_name): 45 | if not aws.create_bucket(bucket_name): 46 | logging.error(f"Unable to Create bucket") 47 | exit(1) 48 | if not aws.upload_to_s3(bucket_name, filename, filename): 49 | logging.error(f"Unable to upload gzipped file to S3") 50 | 51 | 52 | def get_file_sha256(file_path: str) -> str: 53 | """ 54 | Obtains a filehash given a file path. 55 | """ 56 | try: 57 | if not os.path.isfile(file_path): 58 | raise Exception(f"{file_path} does not exist!") 59 | sha256_hash = hashlib.sha256() 60 | 61 | with open(file_path, "rb") as f: 62 | for chunk in iter(lambda: f.read(4096), b""): 63 | sha256_hash.update(chunk) 64 | 65 | file_hash = sha256_hash.hexdigest() 66 | 67 | return file_hash 68 | 69 | except Exception as e: 70 | logging.error(f"[{dt.now()}]-HASING_ERROR: {e}") 71 | 72 | 73 | def get_string_md5(data: str) -> str: 74 | """ 75 | Create an MD5 hash from a string 76 | """ 77 | hash = hashlib.md5(data.encode("utf-8")) 78 | return hash.hexdigest() 79 | 80 | 81 | def get_time_delta_hours(start: dt, end: dt) -> float: 82 | """ 83 | Calculates the time difference between two datetime objects 84 | """ 85 | time_delta = start - end 86 | time_delta_hours = divmod(time_delta.total_seconds(), 3600)[0] 87 | return time_delta_hours --------------------------------------------------------------------------------