├── tests ├── __init__.py ├── test_s3_connection.py ├── test_strava_api_connection.py └── test_mysql_connection.py ├── src ├── utilities │ ├── __init__.py │ ├── s3_utils.py │ ├── mysql_utils.py │ ├── redshift_utils.py │ └── strava_api_utils.py ├── validate_load_data.sh ├── build_data_model.py ├── copy_to_redshift_staging.py ├── redshift_staging_to_production.py ├── validator.py └── extract_strava_data.py ├── sql ├── validation │ ├── activity_dup_zero.sql │ ├── zscore_90_twosided.sql │ ├── activity_dup.sql │ ├── weekly_kudos_avg_zscore.sql │ └── weekly_activity_count_zscore.sql ├── tables │ ├── populate_last_extracted_table.sql │ └── create_redshift_table.sql └── data_models │ ├── average_kudos_by_workout.sql │ ├── yearly_statistics.sql │ ├── monthly_statistics.sql │ ├── percentage_weekly_kudos_change.sql │ └── build_monthly_data_model.sql ├── images ├── DAG.png ├── dashboard.png ├── dashboard_map.png ├── slack_output.png └── system_diagram.png ├── strava_data ├── 2022_06_19_export_file.csv └── 2022_06_18_export_file.csv ├── setup.py ├── pipeline.conf ├── .gitignore ├── airflow └── dags │ └── elt_strava_pipeline.py └── README.md /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/utilities/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /sql/validation/activity_dup_zero.sql: -------------------------------------------------------------------------------- 1 | SELECT 0; -------------------------------------------------------------------------------- /sql/validation/zscore_90_twosided.sql: -------------------------------------------------------------------------------- 1 | SELECT 1.645; -------------------------------------------------------------------------------- /images/DAG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jackmleitch/StravaDataPipline/HEAD/images/DAG.png -------------------------------------------------------------------------------- /images/dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jackmleitch/StravaDataPipline/HEAD/images/dashboard.png -------------------------------------------------------------------------------- /images/dashboard_map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jackmleitch/StravaDataPipline/HEAD/images/dashboard_map.png -------------------------------------------------------------------------------- /images/slack_output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jackmleitch/StravaDataPipline/HEAD/images/slack_output.png -------------------------------------------------------------------------------- /images/system_diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jackmleitch/StravaDataPipline/HEAD/images/system_diagram.png -------------------------------------------------------------------------------- /sql/tables/populate_last_extracted_table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE last_extracted ( 2 | LastUpdated timestamp 3 | ); 4 | 5 | INSERT INTO last_extracted 6 | VALUES('2016-01-01 00:00:00'); -------------------------------------------------------------------------------- /tests/test_s3_connection.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from src.utilities.s3_utils import connect_s3 3 | 4 | 5 | def test_connect_s3(): 6 | s3 = connect_s3() 7 | assert s3, "S3 connection failed!" 8 | -------------------------------------------------------------------------------- /sql/validation/activity_dup.sql: -------------------------------------------------------------------------------- 1 | WITH activity_dups AS 2 | ( 3 | SELECT id, Count(*) 4 | FROM public.strava_activity_data 5 | GROUP BY id 6 | HAVING COUNT(*) > 1 7 | ) 8 | SELECT COUNT(*) 9 | FROM activity_dups; -------------------------------------------------------------------------------- /strava_data/2022_06_19_export_file.csv: -------------------------------------------------------------------------------- 1 | 7334530051|Father and son(‘s)|23336.3|6166|6814|451.7|Run|2|United Kingdom|2|26|0|2|3.785|6.162|86.5|22|140.3|165.0|40.0|2022-06-19 10:39:57|Europe/London|50.84448675625026|-0.3919993992894888 2 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | 3 | setup( 4 | name="StravaDataPipeline", 5 | packages=find_packages(), 6 | version="0.1.0", 7 | description="", 8 | author="Jack Leitch", 9 | license="MIT", 10 | ) -------------------------------------------------------------------------------- /sql/data_models/average_kudos_by_workout.sql: -------------------------------------------------------------------------------- 1 | SELECT workout_type, AVG(kudos_count) AS average_kudos, AVG(average_speed) AS average_speed 2 | FROM public.strava_activity_data 3 | WHERE type = 'Run' 4 | GROUP BY workout_type 5 | ORDER BY average_kudos DESC; -------------------------------------------------------------------------------- /src/validate_load_data.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | python src/validator.py sql/validation/activity_dup.sql sql/validation/activity_dup_zero.sql equals warn 3 | python src/validator.py sql/validation/weekly_activity_count_zscore.sql sql/validation/zscore_90_twosided.sql greater_equals warn 4 | python src/validator.py sql/validation/weekly_kudos_avg_zscore.sql sql/validation/zscore_90_twosided.sql greater_equals warn 5 | -------------------------------------------------------------------------------- /sql/data_models/yearly_statistics.sql: -------------------------------------------------------------------------------- 1 | SELECT EXTRACT(YEAR FROM start_date) AS activity_year, 2 | ROUND(SUM(distance)/1609) AS total_miles_ran, 3 | ROUND(SUM(moving_time)/(60*60)) AS total_running_time_hours, 4 | ROUND(SUM(total_elevation_gain)) AS total_elevation_gain_meters, 5 | ROUND(SUM(athlete_count)) AS total_people_ran_with, 6 | ROUND(AVG(athlete_count)) AS average_people_ran_with 7 | FROM public.strava_activity_data 8 | WHERE type='Run' 9 | GROUP BY activity_year 10 | ORDER BY activity_year; -------------------------------------------------------------------------------- /sql/data_models/monthly_statistics.sql: -------------------------------------------------------------------------------- 1 | SELECT DATE_TRUNC('month', start_date::date) AS activity_month, 2 | ROUND(SUM(distance)/1609) AS total_miles_ran, 3 | ROUND(SUM(moving_time)/(60*60)) AS total_running_time_hours, 4 | ROUND(SUM(total_elevation_gain)) AS total_elevation_gain_meters, 5 | ROUND(SUM(athlete_count)) AS total_people_ran_with, 6 | ROUND(AVG(athlete_count)) AS average_people_ran_with 7 | FROM public.strava_activity_data 8 | WHERE type='Run' 9 | GROUP BY activity_month 10 | ORDER BY activity_month; 11 | -------------------------------------------------------------------------------- /src/build_data_model.py: -------------------------------------------------------------------------------- 1 | from src.utilities.redshift_utils import connect_redshift 2 | 3 | 4 | def build_data_model(sql_script_path: str) -> None: 5 | """Execute sql query to build data model.""" 6 | rs_conn = connect_redshift() 7 | cursor = rs_conn.cursor() 8 | sql_file = open(sql_script_path, "r") 9 | cursor.execute(sql_file.read()) 10 | cursor.close() 11 | 12 | 13 | if __name__ == "__main__": 14 | sql_script_path = "sql/data_models/build_monthly_data_model.sql" 15 | build_data_model(sql_script_path) 16 | -------------------------------------------------------------------------------- /src/utilities/s3_utils.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import configparser 3 | 4 | 5 | def connect_s3(): 6 | """Get the MySQL connection info and connect.""" 7 | # load the aws_boto_credentials values 8 | parser = configparser.ConfigParser() 9 | parser.read("pipeline.conf") 10 | access_key = parser.get("aws_boto_credentials", "access_key") 11 | secret_key = parser.get("aws_boto_credentials", "secret_key") 12 | bucket_name = parser.get("aws_boto_credentials", "bucket_name") 13 | # connect to s3 bucket 14 | s3 = boto3.client( 15 | "s3", aws_access_key_id=access_key, aws_secret_access_key=secret_key 16 | ) 17 | return s3 18 | -------------------------------------------------------------------------------- /sql/data_models/percentage_weekly_kudos_change.sql: -------------------------------------------------------------------------------- 1 | WITH weekly_kudos_count AS ( 2 | SELECT DATE_PART('week', start_date) AS week_of_year, 3 | workout_type, 4 | SUM(kudos_count) AS total_kudos 5 | FROM public.strava_activity_data 6 | WHERE type = 'Run' AND DATE_PART('year', start_date) = '2022' 7 | GROUP BY week_of_year, workout_type 8 | ), 9 | 10 | weekly_kudos_count_lag AS ( 11 | SELECT *, 12 | LAG(total_kudos) OVER(PARTITION BY workout_type ORDER BY week_of_year) 13 | AS previous_week_total_kudos 14 | FROM weekly_kudos_count 15 | ) 16 | 17 | SELECT *, 18 | COALESCE(ROUND(((total_kudos - previous_week_total_kudos)/previous_week_total_kudos)*100),0) 19 | AS percent_kudos_change 20 | FROM weekly_kudos_count_lag; -------------------------------------------------------------------------------- /pipeline.conf: -------------------------------------------------------------------------------- 1 | [mysql_config] 2 | hostname = localhost 3 | username = root 4 | password = xxxxxxxxxx 5 | port = 3306 6 | database = xxxxxxxxxx 7 | 8 | [strava_api_config] 9 | auth_url = https://www.strava.com/oauth/token 10 | activites_url = https://www.strava.com/api/v3/athlete/activities 11 | client_id = xxxxxxxxxx 12 | client_secret = xxxxxxxxxx 13 | refresh_token = xxxxxxxxxx 14 | 15 | [aws_boto_credentials] 16 | access_key = xxxxxxxxxx 17 | secret_key = xxxxxxxxxx 18 | bucket_name = strava-data-pipeline 19 | account_id = xxxxxxxxxx 20 | 21 | [aws_redshift_creds] 22 | database = dev 23 | username = root 24 | password = xxxxxxxxxx 25 | host = xxxxxxxxxx 26 | port = 5439 27 | iam_role = RedshiftLoadRoleStrava 28 | table_name = public.strava_activity_data 29 | 30 | [slack_config] 31 | webhook_url = xxxxxxxxxx 32 | -------------------------------------------------------------------------------- /src/utilities/mysql_utils.py: -------------------------------------------------------------------------------- 1 | import pymysql 2 | import configparser 3 | 4 | 5 | def connect_mysql(): 6 | """Get the MySQL connection info and connect.""" 7 | parser = configparser.ConfigParser() 8 | parser.read("pipeline.conf") 9 | hostname = parser.get("mysql_config", "hostname") 10 | port = parser.get("mysql_config", "port") 11 | username = parser.get("mysql_config", "username") 12 | dbname = parser.get("mysql_config", "database") 13 | password = parser.get("mysql_config", "password") 14 | 15 | conn = pymysql.connect( 16 | host=hostname, user=username, password=password, db=dbname, port=int(port) 17 | ) 18 | if conn is None: 19 | print("Error connecting to the MySQL database") 20 | else: 21 | print("MySQL connection established!") 22 | return conn 23 | -------------------------------------------------------------------------------- /sql/tables/create_redshift_table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE IF NOT EXISTS public.strava_activity_data ( 2 | "id" VARCHAR NULL PRIMARY KEY, 3 | "name" VARCHAR NULL, 4 | "distance" FLOAT NULL, 5 | "moving_time" FLOAT NULL, 6 | "elapsed_time" FLOAT NULL, 7 | "total_elevation_gain" DECIMAL NULL, 8 | "type" VARCHAR NULL, 9 | "workout_type" VARCHAR NULL, 10 | "location_country" VARCHAR NULL, 11 | "achievement_count" INTEGER NULL, 12 | "kudos_count" INTEGER NULL, 13 | "comment_count" INTEGER NULL, 14 | "athlete_count" INTEGER NULL, 15 | "average_speed" FLOAT NULL, 16 | "max_speed" FLOAT NULL, 17 | "average_cadence" FLOAT NULL, 18 | "average_temp" FLOAT NULL, 19 | "average_heartrate" FLOAT NULL, 20 | "max_heartrate" INTEGER NULL, 21 | "suffer_score" INTEGER NULL, 22 | "start_date" TIMESTAMP NULL, 23 | "timezone" VARCHAR NULL, 24 | "lat" FLOAT NULL, 25 | "lng" FLOAT NULL); -------------------------------------------------------------------------------- /sql/data_models/build_monthly_data_model.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE IF NOT EXISTS activity_summary_monthly ( 2 | activity_month numeric, 3 | total_miles_ran int, 4 | total_running_time_hours int, 5 | total_elevation_gain_meters int, 6 | total_people_ran_with int, 7 | avg_people_ran_with int, 8 | avg_kudos real, 9 | std_kudos real 10 | ); 11 | 12 | TRUNCATE activity_summary_monthly; 13 | 14 | INSERT INTO activity_summary_monthly 15 | SELECT DATE_TRUNC('month', start_date::date) AS activity_month, 16 | ROUND(SUM(distance)/1609) AS total_miles_ran, 17 | ROUND(SUM(moving_time)/(60*60)) AS total_running_time_hours, 18 | ROUND(SUM(total_elevation_gain)) AS total_elevation_gain_meters, 19 | ROUND(SUM(athlete_count)) AS total_people_ran_with, 20 | ROUND(AVG(athlete_count)) AS avg_people_ran_with, 21 | ROUND(AVG(kudos_count), 1) AS avg_kudos, 22 | ROUND(STDDEV(kudos_count), 1) AS std_kudos 23 | FROM public.strava_activity_data 24 | WHERE type='Run' 25 | GROUP BY activity_month 26 | ORDER BY activity_month; 27 | -------------------------------------------------------------------------------- /sql/validation/weekly_kudos_avg_zscore.sql: -------------------------------------------------------------------------------- 1 | with kudos_by_week AS ( 2 | SELECT 3 | date_trunc('week', start_date::date) AS activity_week, 4 | AVG(kudos_count) AS avg_weekly_kudos 5 | FROM public.strava_activity_data 6 | GROUP BY activity_week 7 | ORDER BY activity_week 8 | ), 9 | 10 | kudos_by_week_statistics AS ( 11 | SELECT 12 | AVG(avg_weekly_kudos) AS avg_weekly_kudos, 13 | STDDEV(avg_weekly_kudos) AS std_weekly_kudos 14 | FROM kudos_by_week 15 | ), 16 | 17 | staging_table_avg_kudos AS ( 18 | SELECT AVG(kudos_count) AS staging_avg_kudos 19 | FROM staging_table 20 | ), 21 | 22 | weekly_avg_kudos_zscore AS ( 23 | SELECT 24 | s.staging_avg_kudos AS staging_avg_kudos, 25 | p.avg_weekly_kudos AS avg_weekly_kudos, 26 | p.std_weekly_kudos as std_weekly_kudos, 27 | --compute zscore for weekly kudos average 28 | (staging_avg_kudos - avg_weekly_kudos) / std_weekly_kudos AS z_score 29 | FROM staging_table_avg_kudos s, kudos_by_week_statistics p 30 | ) 31 | 32 | SELECT ABS(z_score) AS two_sized_zscore 33 | FROM weekly_avg_kudos_zscore; -------------------------------------------------------------------------------- /sql/validation/weekly_activity_count_zscore.sql: -------------------------------------------------------------------------------- 1 | with activities_by_week AS ( 2 | SELECT 3 | date_trunc('week', start_date::date) AS activity_week, 4 | COUNT(*) AS activity_count 5 | FROM public.strava_activity_data 6 | GROUP BY activity_week 7 | ORDER BY activity_week 8 | ), 9 | 10 | activities_by_week_statistics AS ( 11 | SELECT 12 | AVG(activity_count) AS avg_activities_per_week, 13 | STDDEV(activity_count) AS std_activities_per_week 14 | FROM activities_by_week 15 | ), 16 | 17 | staging_table_weekly_count AS ( 18 | SELECT COUNT(*) AS staging_weekly_count 19 | FROM staging_table 20 | ), 21 | 22 | activity_count_zscore AS ( 23 | SELECT 24 | s.staging_weekly_count AS staging_table_count, 25 | p.avg_activities_per_week AS avg_activities_per_week, 26 | p.std_activities_per_week as std_activities_per_week, 27 | --compute zscore for weekly activity count 28 | (staging_table_count - avg_activities_per_week) / std_activities_per_week AS z_score 29 | FROM staging_table_weekly_count s, activities_by_week_statistics p 30 | ) 31 | 32 | SELECT ABS(z_score) AS two_sized_zscore 33 | FROM activity_count_zscore; -------------------------------------------------------------------------------- /tests/test_strava_api_connection.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from datetime import datetime 3 | 4 | from src.extract_strava_data import make_strava_api_request, extract_strava_activities 5 | from src.utilities.strava_api_utils import connect_strava 6 | 7 | 8 | @pytest.mark.filterwarnings("ignore::urllib3.exceptions.InsecureRequestWarning") 9 | def test_connect_strava(): 10 | header = connect_strava() 11 | assert isinstance( 12 | header, dict 13 | ), "Header returned is not dictionary. Connection has likley failed." 14 | assert ( 15 | header["Authorization"].endswith("Bearer ") == False 16 | ), "Access token not found." 17 | 18 | 19 | @pytest.mark.filterwarnings("ignore::urllib3.exceptions.InsecureRequestWarning") 20 | def test_make_strava_api_request(): 21 | header = connect_strava() 22 | response_json = make_strava_api_request(header=header, activity_num=1) 23 | assert isinstance(response_json, dict), "API should respond with a dictionary." 24 | assert "id" in response_json.keys(), "Response dictionary does not contain id key." 25 | assert isinstance(response_json["id"], int), "Activity ID should be an integer." 26 | -------------------------------------------------------------------------------- /src/copy_to_redshift_staging.py: -------------------------------------------------------------------------------- 1 | import configparser 2 | 3 | from src.utilities.redshift_utils import connect_redshift, get_s3_and_iam_details 4 | 5 | 6 | def copy_to_redshift_staging( 7 | table_name: str, rs_conn, s3_file_path: str, role_string: str 8 | ) -> None: 9 | """Copy data from s3 into Redshift staging table.""" 10 | # write queries to execute on redshift 11 | create_temp_table = f"CREATE TABLE staging_table (LIKE {table_name});" 12 | sql_copy_to_temp = f"COPY staging_table FROM {s3_file_path} iam_role {role_string};" 13 | 14 | # execute queries 15 | cur = rs_conn.cursor() 16 | cur.execute(create_temp_table) 17 | cur.execute(sql_copy_to_temp) 18 | rs_conn.commit() 19 | 20 | 21 | if __name__ == "__main__": 22 | # get redshift table name 23 | parser = configparser.ConfigParser() 24 | parser.read("pipeline.conf") 25 | table_name = parser.get("aws_redshift_creds", "table_name") 26 | # copy s3 data to redshift staging table 27 | rs_conn = connect_redshift() 28 | s3_file_path, role_string = get_s3_and_iam_details() 29 | copy_to_redshift_staging(table_name, rs_conn, s3_file_path, role_string) 30 | -------------------------------------------------------------------------------- /src/redshift_staging_to_production.py: -------------------------------------------------------------------------------- 1 | import configparser 2 | 3 | from src.utilities.redshift_utils import connect_redshift 4 | 5 | 6 | def redshift_staging_to_production(table_name: str, rs_conn) -> None: 7 | """Copy data from Redshift staging table to production table.""" 8 | # if id already exists in table, we remove it and add new id record during load 9 | delete_from_table = f"DELETE FROM {table_name} USING staging_table WHERE '{table_name}'.id = staging_table.id;" 10 | insert_into_table = f"INSERT INTO {table_name} SELECT * FROM staging_table;" 11 | drop_temp_table = "DROP TABLE staging_table;" 12 | # execute queries 13 | cur = rs_conn.cursor() 14 | cur.execute(delete_from_table) 15 | cur.execute(insert_into_table) 16 | cur.execute(drop_temp_table) 17 | rs_conn.commit() 18 | 19 | 20 | if __name__ == "__main__": 21 | # get redshift table name 22 | parser = configparser.ConfigParser() 23 | parser.read("pipeline.conf") 24 | table_name = parser.get("aws_redshift_creds", "table_name") 25 | # copy redshift staging table to production table 26 | table_name = "public.strava_activity_data" 27 | rs_conn = connect_redshift() 28 | redshift_staging_to_production(table_name, rs_conn) 29 | -------------------------------------------------------------------------------- /src/utilities/redshift_utils.py: -------------------------------------------------------------------------------- 1 | import configparser 2 | import psycopg2 3 | from datetime import datetime 4 | from typing import Tuple 5 | 6 | 7 | def connect_redshift(): 8 | """Connect to the redshift cluster.""" 9 | parser = configparser.ConfigParser() 10 | parser.read("pipeline.conf") 11 | dbname = parser.get("aws_redshift_creds", "database") 12 | user = parser.get("aws_redshift_creds", "username") 13 | password = parser.get("aws_redshift_creds", "password") 14 | host = parser.get("aws_redshift_creds", "host") 15 | port = parser.get("aws_redshift_creds", "port") 16 | conn = psycopg2.connect( 17 | dbname=dbname, host=host, port=port, user=user, password=password 18 | ) 19 | return conn 20 | 21 | 22 | def get_s3_and_iam_details( 23 | date: datetime = datetime.today().strftime("%Y_%m_%d"), 24 | ) -> Tuple[str, str]: 25 | parser = configparser.ConfigParser() 26 | parser.read("pipeline.conf") 27 | account_id = parser.get("aws_boto_credentials", "account_id") 28 | iam_role = parser.get("aws_redshift_creds", "iam_role") 29 | bucket_name = parser.get("aws_boto_credentials", "bucket_name") 30 | s3_file_path = f"s3://{bucket_name}/strava_data/{date}_export_file.csv" 31 | role_string = f"arn:aws:iam::{account_id}:role/{iam_role}" 32 | return s3_file_path, role_string 33 | -------------------------------------------------------------------------------- /tests/test_mysql_connection.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from datetime import datetime 3 | from src.utilities.mysql_utils import connect_mysql 4 | from src.extract_strava_data import get_date_of_last_warehouse_update 5 | 6 | 7 | def test_connect_mysql(): 8 | conn = connect_mysql() 9 | assert conn, "Connection to MySQL not found." 10 | 11 | 12 | def test_basic_query(): 13 | conn = connect_mysql() 14 | test_query = """ 15 | SELECT * 16 | FROM last_extracted;""" 17 | mysql_cursor = conn.cursor() 18 | mysql_cursor.execute(test_query) 19 | result = mysql_cursor.fetchone() 20 | assert result, "SELECT * query failed." 21 | 22 | 23 | def test_last_extraction_table(): 24 | conn = connect_mysql() 25 | get_earliest_update_query = """ 26 | SELECT MIN(LastUpdated) 27 | FROM last_extracted;""" 28 | mysql_cursor = conn.cursor() 29 | mysql_cursor.execute(get_earliest_update_query) 30 | result = mysql_cursor.fetchone()[0] 31 | assert result == datetime( 32 | 2022, 1, 1, 0, 0 33 | ), "First date in updates tables is incorrect." 34 | 35 | 36 | def test_get_date_of_last_warehouse_update(): 37 | last_update, current_date = get_date_of_last_warehouse_update() 38 | 39 | assert isinstance( 40 | last_update, datetime 41 | ), "Last update date should be a datetime object." 42 | assert isinstance(current_date, str), "Last update date should be a string." 43 | current_datetime = datetime.strptime(current_date, "%Y-%m-%d %H:%M:%S") 44 | assert ( 45 | current_datetime > last_update 46 | ), "Current date should be bigger than last update date in database" 47 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # conf file 2 | pipeline.conf 3 | 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | pip-wheel-metadata/ 27 | share/python-wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .nox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | *.py,cover 54 | .hypothesis/ 55 | .pytest_cache/ 56 | 57 | # Translations 58 | *.mo 59 | *.pot 60 | 61 | # Django stuff: 62 | *.log 63 | local_settings.py 64 | db.sqlite3 65 | db.sqlite3-journal 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | 77 | # PyBuilder 78 | target/ 79 | 80 | # Jupyter Notebook 81 | .ipynb_checkpoints 82 | 83 | # IPython 84 | profile_default/ 85 | ipython_config.py 86 | 87 | # pyenv 88 | .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 98 | __pypackages__/ 99 | 100 | # Celery stuff 101 | celerybeat-schedule 102 | celerybeat.pid 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | 116 | # Spyder project settings 117 | .spyderproject 118 | .spyproject 119 | 120 | # Rope project settings 121 | .ropeproject 122 | 123 | # mkdocs documentation 124 | /site 125 | 126 | # mypy 127 | .mypy_cache/ 128 | .dmypy.json 129 | dmypy.json 130 | 131 | # Pyre type checker 132 | .pyre/ -------------------------------------------------------------------------------- /src/utilities/strava_api_utils.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import configparser 3 | import urllib3 4 | import re 5 | 6 | from typing import Dict 7 | from datetime import datetime 8 | 9 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 10 | 11 | 12 | def connect_strava() -> Dict[str, str]: 13 | """Get the Strava API connection info and return header.""" 14 | # get strava api info 15 | parser = configparser.ConfigParser() 16 | parser.read("pipeline.conf") 17 | auth_url = parser.get("strava_api_config", "auth_url") 18 | client_id = parser.get("strava_api_config", "client_id") 19 | client_secret = parser.get("strava_api_config", "client_secret") 20 | refresh_token = parser.get("strava_api_config", "refresh_token") 21 | 22 | # connect to API 23 | payload = { 24 | "client_id": client_id, 25 | "client_secret": client_secret, 26 | "refresh_token": refresh_token, 27 | "grant_type": "refresh_token", 28 | "f": "json", 29 | } 30 | res = requests.post(auth_url, data=payload, verify=False) 31 | access_token = res.json()["access_token"] 32 | header = {"Authorization": "Bearer " + access_token} 33 | return header 34 | 35 | 36 | def convert_strava_start_date(date: str) -> datetime: 37 | date_format = "%Y-%m-%dT%H:%M:%SZ" 38 | converted_date = datetime.strptime(date, date_format) 39 | return converted_date 40 | 41 | 42 | def parse_api_output(response_json: dict) -> list: 43 | """Parse output from Strava API.""" 44 | activity = [] 45 | cols_to_extract = [ 46 | "id", 47 | "name", 48 | "distance", 49 | "moving_time", 50 | "elapsed_time", 51 | "total_elevation_gain", 52 | "type", 53 | "workout_type", 54 | "location_country", 55 | "achievement_count", 56 | "kudos_count", 57 | "comment_count", 58 | "athlete_count", 59 | "average_speed", 60 | "max_speed", 61 | "average_cadence", 62 | "average_temp", 63 | "average_heartrate", 64 | "max_heartrate", 65 | "suffer_score", 66 | ] 67 | for col in cols_to_extract: 68 | try: 69 | activity.append(response_json[col]) 70 | # if col is not found in API repsonse 71 | except KeyError: 72 | activity.append(None) 73 | try: 74 | start_date = convert_strava_start_date(response_json["start_date"]) 75 | activity.append(start_date) 76 | except KeyError: 77 | activity.append(None) 78 | try: 79 | # remove timezone info 80 | timezone = response_json["timezone"] 81 | timezone = re.sub("[\(\[].*?[\)\]]", "", timezone) 82 | activity.append(timezone[1:]) 83 | except KeyError: 84 | activity.append(None) 85 | try: 86 | start_latlng = response_json["start_latlng"] 87 | if len(start_latlng) == 2: 88 | lat, lng = start_latlng[0], start_latlng[1] 89 | activity.append(lat) 90 | activity.append(lng) 91 | else: 92 | activity.append(None) 93 | activity.append(None) 94 | except KeyError: 95 | activity.append(None) 96 | activity.append(None) 97 | return activity 98 | -------------------------------------------------------------------------------- /airflow/dags/elt_strava_pipeline.py: -------------------------------------------------------------------------------- 1 | import os 2 | from airflow import DAG 3 | from airflow.operators.bash_operator import BashOperator 4 | from airflow.utils.dates import days_ago 5 | from datetime import timedelta, datetime 6 | 7 | os.chdir('/Users/Jack/Documents/projects/StravaDataPipeline/') 8 | 9 | schedule_interval = '@weekly' 10 | start_date = days_ago(1) 11 | 12 | default_args = {"owner": "airflow", "depends_on_past": False, "retries": 1} 13 | 14 | with DAG( 15 | dag_id='elt_strava_pipeline', 16 | description ='Strava data EtLT pipeline', 17 | schedule_interval=schedule_interval, 18 | default_args=default_args, 19 | start_date=start_date, 20 | catchup=True, 21 | max_active_runs=1, 22 | tags=['StravaELT'], 23 | ) as dag: 24 | 25 | extract_strava_data = BashOperator( 26 | task_id = 'extract_strava_data', 27 | bash_command = "src/extract_strava_data.py", 28 | dag = dag, 29 | ) 30 | extract_strava_data.doc_md = 'Extract Strava data and store as csv in S3 bucket.' 31 | 32 | copy_to_redshift_staging = BashOperator( 33 | task_id = 'copy_to_redshift_staging', 34 | bash_command = "src/copy_to_redshift_staging.py", 35 | dag = dag, 36 | ) 37 | copy_to_redshift_staging.doc_md = 'Copy S3 csv file to Redshift staging table.' 38 | 39 | validate_staging_data_dup = BashOperator( 40 | task_id = 'validate_staging_data_dup', 41 | bash_command = "python src/validator.py sql/validation/activity_dup.sql sql/validation/activity_dup_zero.sql equals warn", 42 | dag = dag, 43 | ) 44 | validate_staging_data_dup.doc_md = 'Validate data: check for duplicates.' 45 | 46 | validate_staging_data_weekly_activity_count = BashOperator( 47 | task_id = 'validate_staging_data_weekly_activity_count', 48 | bash_command = "python src/validator.py sql/validation/weekly_activity_count_zscore.sql sql/validation/zscore_90_twosided.sql greater_equals warn", 49 | dag = dag, 50 | ) 51 | validate_staging_data_weekly_activity_count.doc_md = 'Validate data: z-test weekly activity count.' 52 | 53 | validate_staging_data_weekly_kudos_avg = BashOperator( 54 | task_id = 'validate_staging_data_weekly_kudos_avg', 55 | bash_command = "python src/validator.py sql/validation/weekly_kudos_avg_zscore.sql sql/validation/zscore_90_twosided.sql greater_equals warn", 56 | dag = dag, 57 | ) 58 | validate_staging_data_weekly_kudos_avg.doc_md = 'Validate data: z-test weekly kudos average' 59 | 60 | redshift_staging_to_production = BashOperator( 61 | task_id = 'redshift_staging_to_production', 62 | bash_command = "src/redshift_staging_to_production.py", 63 | dag = dag, 64 | ) 65 | redshift_staging_to_production.doc_md = 'Insert redshift staging table into production and remove duplicates.' 66 | 67 | build_data_model = BashOperator( 68 | task_id = 'build_data_model', 69 | bash_command = "src/build_data_model.py", 70 | dag = dag, 71 | ) 72 | build_data_model.doc_md = 'Build monthly statistics data model.' 73 | 74 | extract_strava_data >> copy_to_redshift_staging 75 | copy_to_redshift_staging >> validate_staging_data_dup 76 | copy_to_redshift_staging >> validate_staging_data_weekly_activity_count 77 | copy_to_redshift_staging >> validate_staging_data_weekly_kudos_avg 78 | validate_staging_data_dup >> redshift_staging_to_production 79 | validate_staging_data_weekly_activity_count >> redshift_staging_to_production 80 | validate_staging_data_weekly_kudos_avg >> redshift_staging_to_production 81 | redshift_staging_to_production >> build_data_model -------------------------------------------------------------------------------- /src/validator.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import requests 3 | import json 4 | import configparser 5 | from src.utilities.redshift_utils import connect_redshift 6 | 7 | 8 | def execute_test(db_conn, script_1: str, script_2: str, comp_operator: str) -> bool: 9 | """ 10 | Execute test made up of two scripts and a comparison operator 11 | :param comp_operator: comparison operator to compare script outcome 12 | (equals, greater_equals, greater, less_equals, less, not_equals) 13 | :return: True/False for test pass/fail 14 | """ 15 | # execute the 1st script and store the result 16 | cursor = db_conn.cursor() 17 | sql_file = open(script_1, "r") 18 | cursor.execute(sql_file.read()) 19 | record = cursor.fetchone() 20 | result_1 = record[0] 21 | db_conn.commit() 22 | cursor.close() 23 | 24 | # execute the 2nd script and store the result 25 | cursor = db_conn.cursor() 26 | sql_file = open(script_2, "r") 27 | cursor.execute(sql_file.read()) 28 | record = cursor.fetchone() 29 | result_2 = record[0] 30 | db_conn.commit() 31 | cursor.close() 32 | 33 | print("Result 1 = " + str(result_1)) 34 | print("Result 2 = " + str(result_2)) 35 | 36 | # compare values based on the comp_operator 37 | if comp_operator == "equals": 38 | return result_1 == result_2 39 | elif comp_operator == "greater_equals": 40 | return result_1 >= result_2 41 | elif comp_operator == "greater": 42 | return result_1 > result_2 43 | elif comp_operator == "less_equals": 44 | return result_1 <= result_2 45 | elif comp_operator == "less": 46 | return result_1 < result_2 47 | elif comp_operator == "not_equal": 48 | return result_1 != result_2 49 | 50 | # tests have failed if we make it here 51 | return False 52 | 53 | 54 | # test_result should be True/False 55 | def send_slack_notification( 56 | webhook_url: str, 57 | script_1: str, 58 | script_2: str, 59 | comp_operator: str, 60 | test_result: bool, 61 | ) -> bool: 62 | 63 | try: 64 | if test_result == True: 65 | message = ( 66 | f"Validation Test Passed!: {script_1} / {script_2} / {comp_operator}" 67 | ) 68 | else: 69 | message = ( 70 | f"Validation Test FAILED!: {script_1} / {script_2} / {comp_operator}" 71 | ) 72 | # send test result to Slack 73 | slack_data = {"text": message} 74 | response = requests.post( 75 | webhook_url, 76 | data=json.dumps(slack_data), 77 | headers={"Content-Type": "application/json"}, 78 | ) 79 | # if post request to Slack fails 80 | if response.status_code != 200: 81 | print(response) 82 | return False 83 | 84 | except Exception as e: 85 | print("Error sending slack notification") 86 | print(str(e)) 87 | return False 88 | 89 | 90 | if __name__ == "__main__": 91 | 92 | if len(sys.argv) == 2 and sys.argv[1] == "-h": 93 | print("Usage: python validator.py script1.sql script2.sql comparison_operator") 94 | print( 95 | "Valid comparison_operator values: (equals, greater_equals, greater, less_equals, less, not_equals)" 96 | ) 97 | exit(0) 98 | 99 | if len(sys.argv) != 5: 100 | print( 101 | "Usage: python validator.py script1.sql script2.sql comparison_operator severity_level" 102 | ) 103 | exit(-1) 104 | 105 | # load arguments 106 | script_1 = sys.argv[1] 107 | script_2 = sys.argv[2] 108 | comp_operator = sys.argv[3] 109 | sev_level = sys.argv[4] 110 | # execute test 111 | db_conn = connect_redshift() 112 | test_result = execute_test(db_conn, script_1, script_2, comp_operator) 113 | print("Result of test: " + str(test_result)) 114 | # load slack webhook_url 115 | parser = configparser.ConfigParser() 116 | parser.read("pipeline.conf") 117 | webhook_url = parser.get("slack_config", "webhook_url") 118 | send_slack_notification(webhook_url, script_1, script_2, comp_operator, test_result) 119 | # exit 120 | if sev_level == "halt": 121 | exit(1) 122 | else: 123 | exit(0) 124 | -------------------------------------------------------------------------------- /src/extract_strava_data.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import requests 3 | import time 4 | 5 | from typing import Dict, List, Tuple 6 | from datetime import datetime 7 | 8 | from src.utilities.mysql_utils import connect_mysql 9 | from src.utilities.strava_api_utils import ( 10 | connect_strava, 11 | convert_strava_start_date, 12 | parse_api_output, 13 | ) 14 | from src.utilities.s3_utils import connect_s3 15 | 16 | 17 | def get_date_of_last_warehouse_update() -> Tuple[datetime, str]: 18 | """ 19 | Get the datetime of last time data was extracted from Strava API 20 | by querying MySQL database and also return current datetime. 21 | """ 22 | mysql_conn = connect_mysql() 23 | get_last_updated_query = """ 24 | SELECT COALESCE(MAX(LastUpdated), '1900-01-01') 25 | FROM last_extracted;""" 26 | mysql_cursor = mysql_conn.cursor() 27 | mysql_cursor.execute(get_last_updated_query) 28 | result = mysql_cursor.fetchone() 29 | last_updated_warehouse = datetime.strptime(result[0], "%Y-%m-%d %H:%M:%S") 30 | current_datetime = datetime.today().strftime("%Y-%m-%d %H:%M:%S") 31 | return last_updated_warehouse, current_datetime 32 | 33 | 34 | def make_strava_api_request( 35 | header: Dict[str, str], activity_num: int = 1 36 | ) -> Dict[str, str]: 37 | """Use Strava API to get recent page of new data.""" 38 | param = {"per_page": 1, "page": activity_num} 39 | api_response = requests.get( 40 | "https://www.strava.com/api/v3/athlete/activities", headers=header, params=param 41 | ).json() 42 | response_json = api_response[0] 43 | return response_json 44 | 45 | 46 | def extract_strava_activities(last_updated_warehouse: datetime) -> List[List]: 47 | """Connect to Strava API and get data up until last_updated_warehouse datetime.""" 48 | header = connect_strava() 49 | all_activities = [] 50 | activity_num = 1 51 | # while activity has not been extracted yet 52 | while True: 53 | # Strava has a rate limit of 100 requests every 15 mins 54 | if activity_num % 75 == 0: 55 | print("Rate limit hit, sleeping for 15 minutes...") 56 | time.sleep(15 * 60) 57 | try: 58 | response_json = make_strava_api_request(header, activity_num) 59 | # rate limit has exceeded, wait 15 minutes 60 | except KeyError: 61 | print("Rate limit hit, sleeping for 15 minutes...") 62 | time.sleep(15 * 60) 63 | response_json = make_strava_api_request(header, activity_num) 64 | date = response_json["start_date"] 65 | converted_date = convert_strava_start_date(date) 66 | if converted_date > last_updated_warehouse: 67 | activity = parse_api_output(response_json) 68 | all_activities.append(activity) 69 | activity_num += 1 70 | else: 71 | break 72 | return all_activities 73 | 74 | 75 | def save_data_to_csv(all_activities: List[List]) -> str: 76 | """Save extracted data to .csv file.""" 77 | todays_date = datetime.today().strftime("%Y_%m_%d") 78 | export_file_path = f"strava_data/{todays_date}_export_file.csv" 79 | with open(export_file_path, "w") as fp: 80 | csvw = csv.writer(fp, delimiter="|") 81 | csvw.writerows(all_activities) 82 | print("Strava data extracted from API!") 83 | return export_file_path 84 | 85 | 86 | def upload_csv_to_s3(export_file_path: str) -> None: 87 | """Upload extracted .csv file to s3 bucket.""" 88 | s3 = connect_s3() 89 | s3.upload_file(export_file_path, "strava-data-pipeline", export_file_path) 90 | print("Strava data uploaded to s3 bucket!") 91 | 92 | 93 | def save_extraction_date_to_database(current_datetime: datetime) -> None: 94 | """Update last extraction date in MySQL database to todays datetime.""" 95 | mysql_conn = connect_mysql() 96 | update_last_updated_query = """ 97 | INSERT INTO last_extracted (LastUpdated) 98 | VALUES (%s);""" 99 | mysql_cursor = mysql_conn.cursor() 100 | mysql_cursor.execute(update_last_updated_query, current_datetime) 101 | mysql_conn.commit() 102 | print("Extraction datetime added to MySQL database!") 103 | 104 | 105 | if __name__ == "__main__": 106 | last_updated_warehouse, current_datetime = get_date_of_last_warehouse_update() 107 | all_activities = extract_strava_activities(last_updated_warehouse) 108 | if all_activities: 109 | export_file_path = save_data_to_csv(all_activities) 110 | upload_csv_to_s3(export_file_path) 111 | save_extraction_date_to_database(current_datetime) 112 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Strava ELT Data Pipline 2 | **EtLT of my own Strava data using the Strava API, MySQL, Python, S3, Redshift, and Airflow** 3 | 4 | ![system_diagram](https://github.com/jackmleitch/StravaDataPipline/blob/master/images/system_diagram.png) 5 | 6 | **I build an EtLT pipeline to ingest my [Strava data](https://www.strava.com/athletes/5028644) from the Strava API and load it into a [Redshift](https://aws.amazon.com/redshift/) data warehouse. This pipeline is then run once a week using [Airflow](https://airflow.apache.org) to extract any new activity data. The end goal is then to use this data warehouse to build an automatically updating dashboard in Tableau and also to trigger automatic re-training of my [Strava Kudos Prediction model](https://github.com/jackmleitch/StravaKudos).** 7 | 8 | 9 | 10 | ## [Data Extraction](https://github.com/jackmleitch/StravaDataPipline/blob/master/src/extract_strava_data.py) 11 | 12 | My Strava activity data is first **ingested incrementally** using the [Strava API](https://developers.strava.com) and 13 | loaded into an **S3 bucket**. On each ingestion run, we query a MySQL database to get the date of the last extraction: 14 | 15 | ```python 16 | def get_date_of_last_warehouse_update() -> Tuple[datetime, str]: 17 | """ 18 | Get the datetime of last time data was extracted from Strava API 19 | by querying MySQL database and also return current datetime. 20 | """ 21 | mysql_conn = connect_mysql() 22 | get_last_updated_query = """ 23 | SELECT COALESCE(MAX(LastUpdated), '1900-01-01') 24 | FROM last_extracted;""" 25 | mysql_cursor = mysql_conn.cursor() 26 | mysql_cursor.execute(get_last_updated_query) 27 | result = mysql_cursor.fetchone() 28 | last_updated_warehouse = datetime.strptime(result[0], "%Y-%m-%d %H:%M:%S") 29 | current_datetime = datetime.today().strftime("%Y-%m-%d %H:%M:%S") 30 | return last_updated_warehouse, current_datetime 31 | ``` 32 | 33 | We then make repeated calls to the REST API using the `requests` library until we have all activity data between now and `last_updated_warehouse`. We include a `time.sleep()` command to comply with Strava's set rate limit of 100 requests/15 minutes. We also include `try: except:` blocks to combat 34 | missing data on certain activities. 35 | 36 | ```python 37 | def make_strava_api_request( 38 | header: Dict[str, str], activity_num: int = 1 39 | ) -> Dict[str, str]: 40 | """Use Strava API to get recent page of new data.""" 41 | param = {"per_page": 1, "page": activity_num} 42 | api_response = requests.get( 43 | "https://www.strava.com/api/v3/athlete/activities", headers=header, params=param 44 | ).json() 45 | response_json = api_response[0] 46 | return response_json 47 | 48 | def extract_strava_activities(last_updated_warehouse: datetime) -> List[List]: 49 | """Connect to Strava API and get data up until last_updated_warehouse datetime.""" 50 | header = connect_strava() 51 | all_activities = [] 52 | activity_num = 1 53 | # while activity has not been extracted yet 54 | while True: 55 | # Strava has a rate limit of 100 requests every 15 mins 56 | if activity_num % 75 == 0: 57 | print("Rate limit hit, sleeping for 15 minutes...") 58 | time.sleep(15 * 60) 59 | try: 60 | response_json = make_strava_api_request(header, activity_num) 61 | # rate limit has exceeded, wait 15 minutes 62 | except KeyError: 63 | print("Rate limit hit, sleeping for 15 minutes...") 64 | time.sleep(15 * 60) 65 | response_json = make_strava_api_request(header, activity_num) 66 | date = response_json["start_date"] 67 | if date > last_updated_warehouse: 68 | activity = parse_api_output(response_json) 69 | all_activities.append(activity) 70 | activity_num += 1 71 | else: 72 | break 73 | return all_activities 74 | ``` 75 | 76 | Before exporting the data locally into a flat pipe-delimited `.csv` file, we perform a few minor transformations such as formatting dates and timezone columns. Hence the little 't' in EtLT! After we save the data, it is then uploaded to an S3 bucket for later loading into the data warehouse. 77 | 78 | ```python 79 | def save_data_to_csv(all_activities: List[List]) -> str: 80 | """Save extracted data to .csv file.""" 81 | todays_date = datetime.today().strftime("%Y_%m_%d") 82 | export_file_path = f"strava_data/{todays_date}_export_file.csv" 83 | with open(export_file_path, "w") as fp: 84 | csvw = csv.writer(fp, delimiter="|") 85 | csvw.writerows(all_activities) 86 | return export_file_path 87 | 88 | def upload_csv_to_s3(export_file_path: str) -> None: 89 | """Upload extracted .csv file to s3 bucket.""" 90 | s3 = connect_s3() 91 | s3.upload_file(export_file_path, "strava-data-pipeline", export_file_path) 92 | ``` 93 | 94 | Finally, we execute a query to update the MySQL database on the last date of extraction. 95 | 96 | ```python 97 | def save_extraction_date_to_database(current_datetime: datetime) -> None: 98 | """Update last extraction date in MySQL database to todays datetime.""" 99 | mysql_conn = connect_mysql() 100 | update_last_updated_query = """ 101 | INSERT INTO last_extracted (LastUpdated) 102 | VALUES (%s);""" 103 | mysql_cursor = mysql_conn.cursor() 104 | mysql_cursor.execute(update_last_updated_query, current_datetime) 105 | mysql_conn.commit() 106 | ``` 107 | 108 | ## [Data Loading](https://github.com/jackmleitch/StravaDataPipline/blob/master/src/copy_to_redshift_staging.py) 109 | 110 | Once the data is loaded into the S3 data lake it is then loaded into our **Redshift** data warehouse. We load the data in two parts: 111 | 112 | - We first load the data from the S3 bucket into a staging table with the same schema as our production table 113 | - We then perform validation tests between the staging table and the production table (see [here](#data-validation)). If all critical tests pass we then remove all duplicates between the two tables by first deleting them from the production table. The data from the staging table is then fully inserted into the production table. 114 | 115 | ```python 116 | def copy_to_redshift_staging(table_name: str, rs_conn, s3_file_path: str, role_string: str) -> None: 117 | """Copy data from s3 into Redshift staging table.""" 118 | # write queries to execute on redshift 119 | create_temp_table = f"CREATE TABLE staging_table (LIKE {table_name});" 120 | sql_copy_to_temp = f"COPY staging_table FROM {s3_file_path} iam_role {role_string};" 121 | 122 | # execute queries 123 | cur = rs_conn.cursor() 124 | cur.execute(create_temp_table) 125 | cur.execute(sql_copy_to_temp) 126 | rs_conn.commit() 127 | 128 | def redshift_staging_to_production(table_name: str, rs_conn) -> None: 129 | """Copy data from Redshift staging table to production table.""" 130 | # if id already exists in table, we remove it and add new id record during load 131 | delete_from_table = f"DELETE FROM {table_name} USING staging_table WHERE '{table_name}'.id = staging_table.id;" 132 | insert_into_table = f"INSERT INTO {table_name} SELECT * FROM staging_table;" 133 | drop_temp_table = "DROP TABLE staging_table;" 134 | # execute queries 135 | cur = rs_conn.cursor() 136 | cur.execute(delete_from_table) 137 | cur.execute(insert_into_table) 138 | cur.execute(drop_temp_table) 139 | rs_conn.commit() 140 | ``` 141 | 142 | ## [Data Validation](https://github.com/jackmleitch/StravaDataPipline/blob/master/src/validator.py) 143 | 144 | We implement a simple framework in python that is used to execute SQL-based data validation checks in our data pipeline. Although it lacks many features we would expect to see in a production environment, it is a good start and provides some insight into how we can improve our infrastructure. 145 | 146 | The `validatior.py` script executes a pair of SQL scripts on Redshift and compares the two based on a comparison operator (>, <, =). The test then passes or fails based on the outcome of the two executed scripts. We execute this validation step after we upload our newly ingested data to the staging table but before we insert this table into the production table. 147 | 148 | ```python 149 | def execute_test(db_conn, script_1: str, script_2: str, comp_operator: str) -> bool: 150 | """ 151 | Execute test made up of two scripts and a comparison operator 152 | :param comp_operator: comparison operator to compare script outcome 153 | (equals, greater_equals, greater, less_equals, less, not_equals) 154 | :return: True/False for test pass/fail 155 | """ 156 | # execute the 1st script and store the result 157 | cursor = db_conn.cursor() 158 | sql_file = open(script_1, "r") 159 | cursor.execute(sql_file.read()) 160 | record = cursor.fetchone() 161 | result_1 = record[0] 162 | db_conn.commit() 163 | cursor.close() 164 | 165 | # execute the 2nd script and store the result 166 | ... 167 | 168 | print("Result 1 = " + str(result_1)) 169 | print("Result 2 = " + str(result_2)) 170 | 171 | # compare values based on the comp_operator 172 | if comp_operator == "equals": return result_1 == result_2 173 | elif comp_operator == "greater_equals": return result_1 >= result_2 174 | ... 175 | 176 | # tests have failed if we make it here 177 | return False 178 | ``` 179 | 180 | As a starting point, I implemented checks that check for duplicates, compare the distribution of the total activities in the staging table (Airflow is set to execute at the end of each week) to the average historical weekly activity count, and compare the distribution of the Kudos Count metric to the historical distribution using the z-score. In other words, the last two queries check if the values are within a 90% confidence interval in either direction of what's expected based on history. For example, the following query computes the z-score for the total activities uploaded in a given week (found in the staging table). 181 | 182 | ```sql 183 | with activities_by_week AS ( 184 | SELECT 185 | date_trunc('week', start_date::date) AS activity_week, 186 | COUNT(*) AS activity_count 187 | FROM public.strava_activity_data 188 | GROUP BY activity_week 189 | ORDER BY activity_week 190 | ), 191 | 192 | activities_by_week_statistics AS ( 193 | SELECT 194 | AVG(activity_count) AS avg_activities_per_week, 195 | STDDEV(activity_count) AS std_activities_per_week 196 | FROM activities_by_week 197 | ), 198 | 199 | staging_table_weekly_count AS ( 200 | SELECT COUNT(*) AS staging_weekly_count 201 | FROM staging_table 202 | ), 203 | 204 | activity_count_zscore AS ( 205 | SELECT 206 | s.staging_weekly_count AS staging_table_count, 207 | p.avg_activities_per_week AS avg_activities_per_week, 208 | p.std_activities_per_week as std_activities_per_week, 209 | --compute zscore for weekly activity count 210 | (staging_table_count - avg_activities_per_week) / std_activities_per_week AS z_score 211 | FROM staging_table_weekly_count s, activities_by_week_statistics p 212 | ) 213 | 214 | SELECT ABS(z_score) AS two_sized_zscore 215 | FROM activity_count_zscore; 216 | ``` 217 | 218 | By running 219 | 220 | ```sh 221 | python src/validator.py sql/validation/weekly_activity_count_zscore.sql sql/validation/zscore_90_twosided.sql.sql greater_equals warn` 222 | ``` 223 | 224 | in the terminal we compare this z-score found in the previous query to the 90% confidence interval z-score `SELECT 1.645;`. The 'warn' at the end of the command tells the script not to exit with an error but to warn us instead. On the other hand, if we add 'halt' to the end the script will exit with an error code and halt all further downstream tasks. 225 | 226 | We also implement a system to send a notification to a given Slack channel with the validation test results, this validation system was inspired by the Data Validation in Pipelines chapter of James Densmore's excellent Data Pipelines book. 227 | 228 | ```python 229 | def send_slack_notification(webhook_url: str, script_1: str, script_2: str, 230 | comp_operator: str, test_result: bool) -> bool: 231 | try: 232 | if test_result == True: 233 | message = (f"Validation Test Passed!: {script_1} / {script_2} / {comp_operator}") 234 | else: 235 | message = (f"Validation Test FAILED!: {script_1} / {script_2} / {comp_operator}") 236 | # send test result to Slack 237 | slack_data = {"text": message} 238 | response = requests.post(webhook_url, data=json.dumps(slack_data), 239 | headers={"Content-Type": "application/json"}) 240 | # if post request to Slack fails 241 | if response.status_code != 200: 242 | print(response) 243 | return False 244 | except Exception as e: 245 | print("Error sending slack notification") 246 | print(str(e)) 247 | return False 248 | ``` 249 | 250 | We then combine all the tests to a shell script `validate_load_data.sh` that we run after loading the data from the S3 bucket to a staging table but before we insert this data into the production table. Running this pipeline on last week's data gives us the following output: 251 | ![slack](https://github.com/jackmleitch/StravaDataPipline/blob/master/images/slack_output.png) 252 | It's great to see that our second test failed because I didn't run anywhere near as much last week as I usually do! 253 | 254 | Although this validation framework is very basic, it is a good foundation that can be built upon at a later date. 255 | 256 | ## [Data Transformations](https://github.com/jackmleitch/StravaDataPipline/blob/master/src/build_data_model.py) 257 | 258 | Now the data has been ingested into the data warehouse, the next step in the pipeline is data transformations. Data transformations in this case include both non-contextual manipulation of the data and modeling of the data with context and logic in mind. The benefit of using the ELT methodology instead of the ETL framework, in this case, is that it gives us, the end-user, the freedom to transform the data the way we need as opposed to having a fixed data model that we cannot change (or at least not change without hassle). In my case, I am connecting my Redshift data warehouse to Tableau building out a dashboard. We can, for example, build a data model to extract monthly statistics: 259 | 260 | ```sql 261 | CREATE TABLE IF NOT EXISTS activity_summary_monthly ( 262 | activity_month numeric, 263 | ... 264 | std_kudos real 265 | ); 266 | 267 | TRUNCATE activity_summary_monthly; 268 | 269 | INSERT INTO activity_summary_monthly 270 | SELECT DATE_TRUNC('month', start_date::date) AS activity_month, 271 | ROUND(SUM(distance)/1609) AS total_miles_ran, 272 | ... 273 | ROUND(STDDEV(kudos_count), 1) AS std_kudos 274 | FROM public.strava_activity_data 275 | WHERE type='Run' 276 | GROUP BY activity_month 277 | ORDER BY activity_month; 278 | ``` 279 | 280 | We can also build more complicated data models. For example, we can get the week-by-week percentage change in total weekly kudos broken down by workout type: 281 | 282 | ```sql 283 | WITH weekly_kudos_count AS ( 284 | SELECT DATE_TRUNC('week', start_date::date) AS week_of_year, 285 | workout_type, 286 | SUM(kudos_count) AS total_kudos 287 | FROM public.strava_activity_data 288 | WHERE type = 'Run' AND DATE_PART('year', start_date) = '2022' 289 | GROUP BY week_of_year, workout_type 290 | ), 291 | 292 | weekly_kudos_count_lag AS ( 293 | SELECT *, 294 | LAG(total_kudos) OVER(PARTITION BY workout_type ORDER BY week_of_year) 295 | AS previous_week_total_kudos 296 | FROM weekly_kudos_count 297 | ) 298 | 299 | SELECT *, 300 | COALESCE(ROUND(((total_kudos - previous_week_total_kudos)/previous_week_total_kudos)*100),0) 301 | AS percent_kudos_change 302 | FROM weekly_kudos_count_lag; 303 | ``` 304 | 305 | A further direction to take this would be to utilize a 3rd party tool such as [dbt](https://www.getdbt.com) to implement data modeling. 306 | 307 | ## [Putting it All Together with Airflow](https://github.com/jackmleitch/StravaDataPipline/blob/master/airflow/dags/elt_strava_pipeline.py) 308 | 309 | We create a DAG to orchestrate our data pipeline. We set the pipeline to run weekly which means it will run once a week at midnight on Sunday morning. As seen in the diagram below, our DAG will: 310 | 311 | - First, extract any recent data using the Strava API and upload it to an S3 bucket 312 | - It will then load this data into a staging table in our Redshift cluster 313 | - The 3 validation tests will then be executed, messaging our Slack channel the results 314 | - The staging table will then be inserted into the production table, removing any duplicates in the process 315 | - Finally, a monthly aggregation data model will be created in a new table `activity_summary_monthly` 316 | 317 | ![dag](https://github.com/jackmleitch/StravaDataPipline/blob/master/images/DAG.png) 318 | 319 | ## Data Visualization 320 | 321 | With the data transformations done we were then able to build out an interactive dashboard using Tableau that updates automatically when new data gets ingested to the data warehouse, which is weekly. The dashboard I created was built to investigate how Kudos on my Strava activities changes over time and location. After building this project I shut down the Redshift server to not incur any costs but a screenshot of the dashboard can be seen below. 322 | ![dashboard](https://github.com/jackmleitch/StravaDataPipline/blob/master/images/dashboard.png) 323 | ![dashboard](https://github.com/jackmleitch/StravaDataPipline/blob/master/images/dashboard_map.png) 324 | 325 | ## [Unit Testing](https://github.com/jackmleitch/StravaDataPipline/tree/master/tests) 326 | 327 | Unit testing was performed using PyTest and all tests can be found in the tests directory. For example, below we see a unit test to test the `make_strava_api_request` function. It asserts that a dictionary response is received and also that the response contains an 'id' key that is an integer. 328 | 329 | ```python 330 | @pytest.mark.filterwarnings("ignore::urllib3.exceptions.InsecureRequestWarning") 331 | def test_make_strava_api_request(): 332 | header = connect_strava() 333 | response_json = make_strava_api_request(header=header, activity_num=1) 334 | assert "id" in response_json.keys(), "Response dictionary does not contain id key." 335 | assert isinstance(response_json, dict), "API should respond with a dictionary." 336 | assert isinstance(response_json["id"], int), "Activity ID should be an integer." 337 | ``` 338 | 339 | ## Further Directions and Considerations 340 | - **Improve Airflow with Docker**: I could have used the docker image of Airflow to run the pipeline in a Docker container which would've made things more robust. This would also make deploying the pipeline at scale much easier! 341 | - **Implement more validation tests**: For a real production pipeline, I would implement more validation tests all through the pipeline. I could, for example, have used an open-source tool like [Great Expectations](https://greatexpectations.io/). 342 | - **Simplify the process**: The pipeline could probably be run in a much simpler way. An alternative could be to use Cron for orchestration and PostgreSQL or SQLite for storage. 343 | - **Data streaming**: To keep the Dashboard consistently up to date we could benefit from something like [Kafka](https://kafka.apache.org/). 344 | -------------------------------------------------------------------------------- /strava_data/2022_06_18_export_file.csv: -------------------------------------------------------------------------------- 1 | 7322776451|Like a Sunday, but on a Friday |16201.8|4333|4594|341.7|Run|0|United Kingdom|4|41|0|3|3.739|5.643|86.4|27|153.6|177.0|68.0|2022-06-17 08:36:46|Europe/London|50.8445505425334|-0.3920185938477516 2 | 7311897211|What goes up (must come down)|16108.3|3979|4006|178.0|Run|0|United Kingdom|4|38|0|1|4.048|6.913|87.3|26|154.8|176.0|72.0|2022-06-15 08:47:49|Europe/London|50.844477117061615|-0.3920391295105219 3 | 7309124560|Track Tuesdays |16191.4|3913|6807|27.0|Run|0|United Kingdom|2|42|3|1|4.138|7.812|86.4|25|152.0|181.0|59.0|2022-06-14 16:59:25|Europe/London|50.81664743833244|-0.40840077213943005 4 | 7300947554|yarn|13261.8|3669|3689|189.3|Run|0|United Kingdom|0|32|0|1|3.615|5.325|85.9|23|138.4|159.0|17.0|2022-06-13 08:51:03|Europe/London|50.84439321421087|-0.3921159915626049 5 | 7295978518|Always guaranteed to see someone you know 🦖 |20035.5|4999|5589|464.6|Run|0|United Kingdom|2|52|0|1|4.008|8.294|87.1|25|160.8|191.0|115.0|2022-06-12 09:37:12|Europe/London|50.84449429996312|-0.39204390719532967 6 | 7290341010|Knew that sausage role wasn’t a good idea|15081.9|3644|3696|281.6|Run|0|United Kingdom|8|44|1|1|4.139|5.97|87.8|26|174.8|193.0|147.0|2022-06-11 11:02:02|Europe/London|50.844409642741084|-0.392047930508852 7 | 7285886698|Tour de Findon|16108.2|4097|4257|153.0|Run|0|United Kingdom|4|38|3|1|3.932|5.825|86.4|24|155.6|183.0|70.0|2022-06-10 08:30:06|Europe/London|50.844393549486995|-0.39208573289215565 8 | 7280163361|oooo Findon fwends |16106.3|3989|4390|64.0|Run|0|United Kingdom|1|36|3|1|4.038|6.21|86.2|26|153.6|177.0|61.0|2022-06-09 11:29:16|Europe/London|50.844404278323054|-0.3920398000627756 9 | 7274388148|Home sweet home |14850.2|3623|3853|59.0|Run|0|United Kingdom|1|41|5|1|4.099|6.373|86.3|23|160.8|174.0|87.0|2022-06-08 09:41:17|Europe/London|50.8443105686456|-0.39195950143039227 10 | 7265090011|45 min spin class |0.0|2700|2700|0|Ride|12|United Kingdom|0|20|0|1|0.0|0||||||2022-06-06 13:30:00|Europe/London|| 11 | 7259821344|Accidentally gate crashed a fun run |14198.6|3679|3832|63.0|Run|0|United Kingdom|0|42|2|1|3.859|5.622|86.3|24|152.3|168.0|56.0|2022-06-05 14:00:15|America/New_York|42.31149449944496|-71.24005791731179 12 | 7254363932|Chipmunk szn 🐿 |16452.3|3875|3942|45.0|Run|0|United Kingdom|3|44|2|1|4.246|5.856|87.2|23|164.9|186.0|102.0|2022-06-04 14:10:53|America/New_York|42.31146038509905|-71.24012991786003 13 | 7243323848|No rounding up |15807.9|3858|4193|45.0|Run|0|United Kingdom|1|35|4|1|4.097|5.506|86.9|22|149.6|164.0|46.0|2022-06-02 13:46:55|America/New_York|42.311519142240286|-71.2400468531996 14 | 7237626192|Running late |15285.1|3653|3826|47.0|Run|0|United Kingdom|2|39|0|1|4.184|5.718|87.0|22|158.6|177.0|79.0|2022-06-01 13:20:19|America/New_York|42.311461893841624|-71.23995766974986 15 | 7237626109|Last run in Boi|13430.3|3504|3655|22.0|Run|0|United Kingdom|1|19|2|2|3.833|6.232|87.0|12|147.8|164.0|37.0|2022-05-30 16:09:18|America/Boise|43.59923779964447|-116.19106334634125 16 | 7223151710|Wedding crashers 💒 |13497.8|3606|3714|22.0|Run|0|United Kingdom|0|34|0|1|3.743|5.756|86.5|15|134.2|155.0|17.0|2022-05-29 16:02:38|America/Boise|43.5992899350822|-116.19167815893888 17 | 7211995502|Absolute 🍑 of a day|13372.3|3600|3765|146.0|Run|0|United Kingdom|2|28|0|3|3.715|5.61|86.0|25|149.0|174.0|42.0|2022-05-27 16:03:11|America/Boise|43.59929303638637|-116.19164982810616 18 | 7201196131|Shane’s 🤑|13208.7|3562|3746|158.0|Run|0|United Kingdom|2|32|1|4|3.708|5.038|85.4|25|144.0|168.0|30.0|2022-05-25 15:35:04|America/Boise|43.59936151653528|-116.19166097603738 19 | 7190646603|Last council run |11852.7|3241|3571|14.0|Run|0|United Kingdom|0|36|0|5|3.657|6.836|86.0|20|146.7|165.0|31.0|2022-05-23 15:03:12|America/Boise|43.5992674715817|-116.1916462238878 20 | 7274990825|Evening Run|4774.2|1801|4278|0.0|Run||United Kingdom|0|1|0|1|2.651|5.868|||145.7|175.0|19.0|2022-05-17 02:04:38|America/Boise|43.60059189610183|-116.20267597027123 21 | 7136899590|It’s well ‘ot!|8602.5|2276|2407|4.0|Run|0|United Kingdom|0|19|0|6|3.78|4.665|87.0|26|143.1|162.0|17.0|2022-05-13 17:34:01|America/Los_Angeles|36.801706943660975|-119.69873735681176 22 | 7133978758|WD|4279.2|1351|1409|7.0|Run|0|United Kingdom|0|7|0|4|3.167|4.036|86.3|20|139.5|158.0|6.0|2022-05-13 06:02:32|America/Los_Angeles|36.852870248258114|-119.71146971918643 23 | 7134008507|Conference 10k - 12th|10000.0|1915|1915|0|Run|1|United Kingdom|0|71|4|1|5.222|0||||||2022-05-12 21:10:00|Europe/London|| 24 | 7133729416|WU|5165.9|1473|1632|8.0|Run|0|United Kingdom|0|5|0|5|3.507|4.042|85.9|23|139.9|152.0|7.0|2022-05-13 04:15:55|America/Los_Angeles|36.85295850969851|-119.71145085990429 25 | 7131720466|Shake oot|2032.9|617|641|1.0|Run|0|United Kingdom|0|7|0|8|3.295|3.62|85.6|26|134.8|146.0|2.0|2022-05-12 17:36:16|America/Los_Angeles|36.80155439302325|-119.69898671843112 26 | 7131719718|Post travel shake |2175.7|600|642|2.0|Run|0|United Kingdom|1|10|0|8|3.626|4.155|85.6|26|127.4|141.0|2.0|2022-05-12 00:34:07|America/Los_Angeles|36.80177559144795|-119.69872109591961 27 | 7125215326|Final premeet |8238.9|2110|2968|20.0|Run|0|United Kingdom|0|31|2|3|3.905|8.7|87.1|17|149.8|171.0|27.0|2022-05-11 14:32:31|America/Boise|43.59963443130255|-116.19163834489882 28 | 7121501184|Moocho|11275.0|2944|3025|20.0|Run|0|United Kingdom|3|17|0|4|3.83|5.005|86.6|18|147.5|163.0|33.0|2022-05-10 20:09:23|America/Boise|43.610051376745105|-116.19180455803871 29 | 7116885144|WD|3244.7|952|956|7.0|Run|0|United Kingdom|0|5|0|4|3.408|4.083|86.4|16|152.0|160.0|13.0|2022-05-09 22:20:43|America/Boise|43.60997937619686|-116.19159685447812 30 | 7116884852|Premeet 🥜|7303.4|1521|1547|1.0|Run|3|United Kingdom|0|19|0|7|4.802|8.013|90.7|16|136.9|163.0|8.0|2022-05-09 21:50:00|America/Boise|43.60958098433912|-116.19156542234123 31 | 7116884267|WU|5053.7|1348|2154|10.0|Run|0|United Kingdom|0|7|0|10|3.749|6.695|86.2|22|128.9|152.0|5.0|2022-05-09 21:10:00|America/Boise|43.60997786745429|-116.19166374206543 32 | 7109522237|A more mercurial week than de Mello himself |17723.5|5095|6116|257.0|Run|0|United Kingdom|5|58|6|2|3.479|5.238|85.5|13|157.9|185.0|105.0|2022-05-08 14:41:14|America/Boise|43.59935070388019|-116.19169928133488 33 | 7098456682|WD|4955.6|1423|1597|26.0|Run|0|United Kingdom|0|10|0|6|3.482|4.382|86.2|26|148.4|162.0|15.0|2022-05-06 17:17:17|America/Boise|43.609696654602885|-116.19183699600399 34 | 7098324663|🦌|4949.3|1233|1255|23.0|Run|0|United Kingdom|0|21|0|10|4.014|8.303|83.4|23|140.2|161.0|6.0|2022-05-06 16:50:52|America/Boise|43.60916473902762|-116.1923492141068 35 | 7098282035|WU|5102.3|1409|2282|39.0|Run|0|United Kingdom|0|14|0|9|3.621|7.056|86.1|21|145.2|163.0|13.0|2022-05-06 16:09:35|America/Boise|43.610073421150446|-116.19188318029046 36 | 7092871960|Run with Dad!|18603.7|5082|6634|97.0|Run|0|United Kingdom|4|42|3|1|3.661|7.978|86.0|23|142.3|169.0|37.0|2022-05-05 14:33:23|America/Boise|43.59956528060138|-116.19167614728212 37 | 7087485574|Happy May 4th x|16235.2|4426|4623|18.0|Run|0|United Kingdom|0|34|0|5|3.668|5.926|85.7|16|148.6|167.0|60.0|2022-05-04 15:02:10|America/Boise|43.59938037581742|-116.19151915423572 38 | 7083736600|Foot all g 🤑|13428.3|3605|3688|15.0|Run|0|United Kingdom|2|31|0|2|3.725|6.662|86.1|23|148.3|162.0|43.0|2022-05-03 20:12:51|America/Boise|43.61008641310036|-116.19173599407077 39 | 7068228954|Fresno 5k DNF|2200.0|366|366|0|Run|1|United Kingdom|0|49|7|1|6.011|0||||||2022-04-30 20:11:00|Europe/London|| 40 | 7067941717|WU|4392.1|1266|1594|12.0|Run|0|United Kingdom|0|12|0|3|3.469|5.847|85.5|25|152.3|169.0|18.0|2022-05-01 03:18:52|America/Los_Angeles|36.853579273447394|-119.71291626803577 41 | 7066435841|Shakeout |3307.9|930|983|2.0|Run|0|United Kingdom|0|18|0|2|3.557|4.286|86.2|27|127.3|142.0|3.0|2022-04-30 17:32:33|America/Los_Angeles|36.801529666408896|-119.69892251305282 42 | 7061992663|Shake oot |2148.8|600|633|2.0|Run|0|United Kingdom|1|11|1|5|3.581|4.223|86.4|29|124.2|140.0|2.0|2022-04-30 00:03:48|America/Los_Angeles|36.8015812151134|-119.698880687356 43 | 7060213538|Premeet 🫡 |8078.2|2151|2962|23.0|Run|0|United Kingdom|0|30|0|3|3.756|7.037|87.8|17|152.7|168.0|33.0|2022-04-29 14:31:37|America/Boise|43.59927845187485|-116.19169224053621 44 | 7057053076|Heat sheet Sherman strikes again|12886.6|3399|3489|16.0|Run|0|United Kingdom|2|31|1|4|3.791|5.362|86.7|17|156.3|173.0|65.0|2022-04-28 21:09:19|America/Boise|43.609957080334425|-116.19155871681869 45 | 7049901296|Simp loop |16116.3|4348|4575|26.6|Run|0|United Kingdom|0|29|0|5|3.707|5.284||||||2022-04-27 14:31:26|America/Boise|43.599389512091875|-116.19174538180232 46 | 7046246315|WD|3257.0|923|955|33.0|Run|0|United Kingdom|0|5|0|2|3.529|4.267|87.3|22|156.1|165.0|18.0|2022-04-26 21:23:26|America/Boise|43.61020510084927|-116.19178745895624 47 | 7046231856|Pathetic fallacy |7491.4|1496|1606|15.0|Run|3|United Kingdom|0|33|0|10|5.008|7.748|90.3|22|148.1|166.0|15.0|2022-04-26 20:49:31|America/Boise|43.60959095880389|-116.19158369489014 48 | 7046168924|WU|5149.3|1402|2173|48.0|Run|0|United Kingdom|0|11|0|11|3.673|7.057|87.0|22|148.5|166.0|17.0|2022-04-26 20:10:08|America/Boise|43.60998876392841|-116.19166298769414 49 | 7044094427|It’s always refreshing seeing a duck |6482.1|1778|1795|9.0|Run|0|United Kingdom|0|25|0|2|3.646|7.402|87.1|18|146.3|157.0|15.0|2022-04-26 14:30:58|America/Boise|43.59928851015866|-116.1916446313262 50 | 7041095423|Dub|7527.5|2007|3173|48.0|Run|0|United Kingdom|0|28|0|8|3.751|8.654|86.8|27|132.7|150.0|8.0|2022-04-25 21:08:35|America/Boise|43.60993185080588|-116.19169123470783 51 | 7039095828|A new trail! |13022.6|3673|3934|158.0|Run|0|United Kingdom|1|37|1|3|3.545|5.364|86.1|18|140.1|166.0|27.0|2022-04-25 14:31:15|America/Boise|43.5997714754194|-116.1917307972908 52 | 7029092533|bobs your uncle 👦🏻|21044.3|5899|6238|509.5|Run|2|United Kingdom|9|54|2|7|3.567|5.78|86.8|15|154.9|167.0|107.0|2022-04-23 15:06:59|America/Boise|43.60998574644327|-116.19163289666176 53 | 7023958196|WD|5252.2|1535|4904|21.0|Run|0|United Kingdom|1|7|0|2|3.422|4.908|86.6|19|150.1|164.0|22.0|2022-04-22 16:36:44|America/Boise|43.61004785634577|-116.19174596853554 54 | 7023957365|4 x 2k [lap jog], 4 x 150m strides 😎|12118.5|2410|2702|7.0|Run|3|United Kingdom|1|45|3|3|5.028|9.015|92.2|13|153.4|167.0|38.0|2022-04-22 15:47:35|America/Boise|43.60958140343428|-116.19161965325475 55 | 7023955647|WU|5181.8|1442|2122|17.0|Run|0|United Kingdom|0|8|0|3|3.593|7.107|86.8|18|139.5|171.0|10.0|2022-04-22 15:01:12|America/Boise|43.599653374403715|-116.19180397130549 56 | 7020038252|pee stop|6608.7|1783|1839|48.0|Run|0|United Kingdom|1|23|0|5|3.707|4.522|86.2|21|130.8|151.0|7.0|2022-04-21 21:10:59|America/Boise|43.6100170109421|-116.19168620556593 57 | 7018208333|grass |16343.1|4315|4431|17.0|Run|0|United Kingdom|2|37|0|3|3.788|5.148|86.6|15|153.6|180.0|70.0|2022-04-21 14:31:52|America/Boise|43.599466206505895|-116.19164546951652 58 | 7012927986|3 bears 🐻 |19040.2|5277|5639|289.3|Run|2|United Kingdom|1|48|1|3|3.608|5.124|86.1|12|152.4|174.0|81.0|2022-04-20 14:21:30|America/Boise|43.5995003208518|-116.19144346565008 59 | 7009400417|WD|3233.2|982|1019|22.0|Run|0|United Kingdom|0|5|0|4|3.292|4.278|86.7|21|143.9|162.0|7.0|2022-04-19 21:39:22|America/Boise|43.610201831907034|-116.19200287386775 60 | 7009399752|5 x k [2:30], 4 x 400 [70s] 🦊|8932.9|1901|2124|12.0|Run|3|United Kingdom|2|45|4|5|4.699|8.603|87.8|20|139.4|167.0|13.0|2022-04-19 20:54:28|America/Boise|43.60963177867234|-116.19165183976293 61 | 7009398265|WU|5186.9|1430|2396|35.0|Run|0|United Kingdom|0|5|0|9|3.627|7.398|85.8|23|131.8|151.0|5.0|2022-04-19 20:11:32|America/Boise|43.61004609614611|-116.19167841039598 62 | 7007343326|The geese have been on one recently |6447.0|1760|1775|6.0|Run|0|United Kingdom|1|25|0|3|3.663|6.218|86.6|15|154.3|178.0|30.0|2022-04-19 14:30:57|America/Boise|43.599505769088864|-116.1915790848434 63 | 7004240783|Sprints |7083.4|1868|2944|47.0|Run|0|United Kingdom|0|28|0|5|3.792|7.834|86.6|27|135.3|150.0|7.0|2022-04-18 21:09:45|America/Boise|43.61006487160921|-116.19187379255891 64 | 7002695921|usual from the usual |16174.2|4465|4945|183.0|Run|0|United Kingdom|4|36|2|4|3.622|5.632|86.0|17|150.6|172.0|61.0|2022-04-18 14:32:07|America/Boise|43.599514570087194|-116.19151228107512 65 | 6997835435|back in boi|12348.4|3413|3647|24.0|Run|0|United Kingdom|0|32|0|2|3.618|4.896|86.1|16|152.6|176.0|56.0|2022-04-17 16:02:39|America/Boise|43.599340645596385|-116.19161797687411 66 | 6993002466|Some fantastic trails once we got the route right|18133.2|5489|5835|301.0|Run|2|United Kingdom|0|62|5|5|3.304|5.512|85.3|15|150.9|170.0|79.0|2022-04-16 14:24:02|America/Los_Angeles|34.12156549282372|-117.77475798502564 67 | 6986819001|Didn’t manage to find the trail|9563.7|2863|3078|38.0|Run|0|United Kingdom|0|26|0|3|3.34|4.986|85.9|21|123.7|142.0|9.0|2022-04-15 16:05:23|America/Los_Angeles|34.07303100451827|-117.87631220184267 68 | 6983717782|Mt *Sac 10,000m invitational - 29:33|10000.0|1773|1773|0|Run|1|United Kingdom|0|66|3|1|5.64|0||||||2022-04-14 19:00:00|Europe/London|| 69 | 6983416231|WU|4901.5|1273|2066|7.0|Run|0|United Kingdom|0|16|0|3|3.85|6.716|86.3|21|148.1|163.0|13.0|2022-04-15 02:29:54|America/Los_Angeles|34.04471735469997|-117.83730994910002 70 | 6981679558|Shake oot |3157.6|900|964|2.0|Run|0|United Kingdom|0|19|0|6|3.508|4.416|85.9|24|144.7|161.0|7.0|2022-04-14 17:03:10|America/Los_Angeles|34.07277585938573|-117.87622821517289 71 | 6978028215|Walnut 🔩 |3225.6|938|950|26.0|Run|0|United Kingdom|0|17|0|4|3.439|4.812|84.8|25|127.6|144.0|3.0|2022-04-13 23:49:08|America/Los_Angeles|34.07305589877069|-117.87593945860863 72 | 6975773167|Why tf is there snow 😵‍💫|8443.2|2357|3621|31.0|Run|0|United Kingdom|0|31|0|6|3.582|6.598|86.7|13|131.1|155.0|10.0|2022-04-13 14:32:29|America/Boise|43.59956922009587|-116.19171981699765 73 | 6972384502|New 8 mile loop|12966.5|3545|3663|17.0|Run|0|United Kingdom|1|32|0|7|3.658|4.721|86.0|20|144.7|184.0|37.0|2022-04-12 20:10:08|America/Boise|43.61005607061088|-116.19164505042136 74 | 6967807416|WD|3302.9|1036|1045|17.0|Run|0|United Kingdom|0|7|0|4|3.188|4.104|85.9|15|154.4|166.0|18.0|2022-04-11 22:32:33|America/Boise|43.609928246587515|-116.19163348339498 75 | 6967122122|Premeet |8959.4|1807|1857|3.0|Run|3|United Kingdom|0|30|1|7|4.958|7.588|89.4|14|154.0|168.0|30.0|2022-04-11 21:50:19|America/Boise|43.60958450473845|-116.19158260524273 76 | 6967121212|WU|5164.4|1430|2253|34.0|Run|0|United Kingdom|0|12|0|9|3.611|7.007|86.2|12|149.5|184.0|16.0|2022-04-11 21:11:27|America/Boise|43.61002279445529|-116.19164194911718 77 | 6965059455|Double |6505.9|1801|1801|12.0|Run|0|United Kingdom|0|25|0|3|3.612|4.747|86.6|20|146.5|161.0|17.0|2022-04-11 14:30:54|America/Boise|43.599371491000056|-116.19158763438463 78 | 6961159931|little one to round off the week|8612.6|2354|2717|14.0|Run|0|United Kingdom|0|27|0|4|3.659|6.588|87.1|17|150.8|169.0|33.0|2022-04-10 16:01:00|America/Boise|43.59932438470423|-116.19157330133021 79 | 6955554387|Shortened long one |21108.9|5515|5756|37.0|Run|2|United Kingdom|2|47|0|4|3.828|5.224|87.1|12|154.5|170.0|98.0|2022-04-09 15:08:27|America/Boise|43.61004224047065|-116.1916602216661 80 | 6951268760|WD|3828.5|1051|1059|21.0|Run|0|United Kingdom|0|12|0|6|3.643|4.639|86.1|25|148.5|165.0|10.0|2022-04-08 21:31:01|America/Boise|43.61603940837085|-116.16000931710005 81 | 6951258010|Stay strapped or get clapped|10306.7|2431|2441|31.0|Run|0|United Kingdom|0|43|0|8|4.24|6.892|88.7|24|144.1|157.0|16.0|2022-04-08 20:44:36|America/Boise|43.616108894348145|-116.15987210534513 82 | 6951256900|WU|4978.6|1360|1608|87.0|Run|0|United Kingdom|1|12|0|7|3.661|6.002|86.3|25|149.9|163.0|17.0|2022-04-08 20:12:30|America/Boise|43.6101068649441|-116.19172752834857 83 | 6949822800|Baker Street|6444.4|1778|1778|8.0|Run|0|United Kingdom|0|29|0|3|3.624|5.908|86.5|15|147.6|160.0|19.0|2022-04-08 14:31:31|America/Boise|43.5992051102221|-116.1916427873075 84 | 6946966309|Double |8056.0|2197|2247|46.0|Run|0|United Kingdom|1|29|0|4|3.667|5.074|85.4|24|149.6|164.0|26.0|2022-04-07 21:09:33|America/Boise|43.610084652900696|-116.19167841039598 85 | 6945406146|footies |14376.2|3938|4170|198.0|Run|0|United Kingdom|0|28|0|2|3.651|5.61|86.5|17|144.0|162.0|29.0|2022-04-07 14:31:29|America/Boise|43.59929068945348|-116.19169131852686 86 | 6940852536|🤧 🤧 |19334.8|5100|5259|205.0|Run|2|United Kingdom|6|43|0|1|3.791|5.702|86.1|20|154.4|179.0|81.0|2022-04-06 15:02:51|America/Boise|43.59933695755899|-116.19158587418497 87 | 6937323868|WD|3033.0|867|872|15.0|Run|0|United Kingdom|0|10|0|1|3.498|4.49|86.8|15|152.6|163.0|13.0|2022-04-05 21:47:39|America/Boise|43.61000770702958|-116.19164354167879 88 | 6937256917|6 mile threshold (29:58), 6 x 200m |12883.1|2678|2970|16.0|Run|3|United Kingdom|0|43|0|4|4.811|8.606|91.4|14|149.0|161.0|28.0|2022-04-05 20:48:21|America/Boise|43.603730499744415|-116.19224770925939 89 | 6937255726|WU|4988.3|1406|2094|17.0|Run|0|United Kingdom|0|12|0|6|3.548|7.572|85.8|18|146.6|163.0|11.0|2022-04-05 20:11:31|America/Boise|43.61002321355045|-116.19164580479264 90 | 6935317426|double |6541.2|1772|1794|11.0|Run|0|United Kingdom|0|24|1|1|3.691|6.37|87.1|16|146.8|162.0|17.0|2022-04-05 14:41:08|America/Boise|43.60080094076693|-116.19003639556468 91 | 6932260101|sprints |8157.9|2151|3419|68.0|Run|0|United Kingdom|0|31|0|4|3.793|8.98|87.1|19|142.7|162.0|18.0|2022-04-04 21:11:56|America/Boise|43.610081216320395|-116.19170296937227 92 | 6930810362|in the bleak midwinter|16122.5|4278|4464|25.0|Run|0|United Kingdom|2|37|0|2|3.769|5.956|87.4|13|151.9|167.0|61.0|2022-04-04 14:32:24|America/Boise|43.59937115572393|-116.19157615117729 93 | 6926910551|Something new |11992.8|3406|3524|30.0|Run|0|United Kingdom|0|47|0|6|3.521|5.26|86.9|17|155.8|170.0|65.0|2022-04-03 16:02:40|America/Boise|43.59960467554629|-116.1916845291853 94 | 6921462843|gps extra |1300.0|300|300|0|Run||United Kingdom|0|7|0|1|4.333|0||||||2022-04-02 08:30:00|Europe/London|| 95 | 6921374986|some bloody marvellous single track |23767.1|7195|7388|621.3|Run|2|United Kingdom|0|58|1|3|3.303|6.788|86.4|17|157.6|183.0|145.0|2022-04-02 15:15:31|America/Los_Angeles|37.425962844863534|-122.3075564391911 96 | 6918126397|Stanford invitational 5000m - 14:05|5000.0|845|845|0|Run|1|United Kingdom|0|98|3|1|5.917|0||||||2022-04-01 21:00:52|Europe/London|| 97 | 6918112031|The campus is disgustingly legit |8051.8|2655|3558|23.0|Run|0|United Kingdom|3|31|0|3|3.033|6.405|85.7|18|152.6|170.0|40.0|2022-04-02 03:07:17|America/Los_Angeles|37.4318325240165|-122.16169338673353 98 | 6917929437|little pick up |1281.8|304|304|0.0|Run|0|United Kingdom|0|8|0|1|4.216|5.84|88.1|21|153.1|163.0|5.0|2022-04-02 02:05:52|America/Los_Angeles|37.43198591284454|-122.16130555607378 99 | 6917927602|WU|3865.3|1128|1194|6.0|Run|0|United Kingdom|0|8|0|3|3.427|4.522|86.5|24|148.9|165.0|12.0|2022-04-02 01:39:21|America/Los_Angeles|37.4318980704993|-122.16156648471951 100 | 6916581204|Shake oot|3232.7|992|1028|13.0|Run|0|United Kingdom|0|15|0|5|3.259|4.15|86.3|21|144.1|167.0|7.0|2022-04-01 17:31:43|America/Los_Angeles|37.40501219406724|-122.11901164613664 101 | 6913200155|yes this is nice |8441.2|2270|3222|62.0|Run|0|United Kingdom|3|34|0|4|3.719|8.382|86.6|25|141.7|157.0|13.0|2022-03-31 20:22:29|America/Los_Angeles|37.360717775300145|-122.12439894676208 102 | 6911412196|little wake and shake |4281.0|1200|1261|4.0|Run|0|United Kingdom|0|21|0|1|3.567|5.195|86.6|17|150.8|165.0|16.0|2022-03-31 13:45:48|America/Boise|43.59913654625416|-116.19168327189982 103 | 6907069113|Shane’s |16103.1|4504|4622|182.0|Run|0|United Kingdom|0|32|0|2|3.575|4.588|86.7|15|154.8|171.0|79.0|2022-03-30 14:21:46|America/Boise|43.59913570806384|-116.19159718975425 104 | 6903848534|WD|3305.9|985|999|14.0|Run|0|United Kingdom|0|5|0|4|3.356|4.096|87.6|24|153.5|167.0|15.0|2022-03-29 21:37:27|America/Boise|43.61005414277315|-116.19175250642002 105 | 6903848092|premeet, but 🥩 |10384.6|2349|2366|12.0|Run|3|United Kingdom|0|37|0|10|4.421|7.862|88.4|24|148.7|164.0|27.0|2022-03-29 20:49:58|America/Boise|43.60962800681591|-116.19163314811885 106 | 6903779903|WU|5064.5|1334|1757|37.0|Run|0|United Kingdom|0|7|0|9|3.796|6.846|87.0|24|141.9|161.0|9.0|2022-03-29 20:11:10|America/Boise|43.61005732789636|-116.19165410287678 107 | 6901767391|Bridge to bridge |6464.2|1756|1756|8.0|Run|0|United Kingdom|1|23|0|3|3.681|5.153|87.3|16|153.7|167.0|29.0|2022-03-29 14:32:10|America/Boise|43.599442820996046|-116.19149761274457 108 | 6898652142|Stanford week bb|12891.3|3297|5250|60.0|Run|0|United Kingdom|0|22|0|4|3.91|8.184|86.6|22|145.6|177.0|32.0|2022-03-28 20:39:24|America/Boise|43.60960847698152|-116.19151043705642 109 | 6891854063|lap of lake |12007.9|3123|3187|64.0|Run|0|United Kingdom|2|30|0|1|3.845|5.047|86.9|15|158.7|183.0|65.0|2022-03-27 13:57:05|America/New_York|42.31138251721859|-71.2402662076056 110 | 6886843545|Lake Waban is actually legit |26066.3|6271|6660|104.0|Run|2|United Kingdom|4|41|2|1|4.157|6.212|88.0|18|152.6|173.0|99.0|2022-03-26 15:31:58|America/New_York|42.31169038452208|-71.24003696255386 111 | 6881983405|Some good trail-age |7020.6|1807|1836|27.0|Run|0|United Kingdom|1|22|0|1|3.885|4.933|86.9|20|150.9|165.0|23.0|2022-03-25 20:02:36|America/New_York|42.31167621910572|-71.24031976796687 112 | 6880553958|WD|5027.2|1315|1341|36.0|Run|0|United Kingdom|2|12|0|1|3.823|5.626|87.1|17|151.2|159.0|17.0|2022-03-25 14:55:19|America/New_York|42.302356380969286|-71.28200297243893 113 | 6880553924|5mi prog, 3 x k 🏴‍☠️|12601.8|2520|2665|16.0|Run|3|United Kingdom|5|43|4|1|5.001|7.986|91.0|13|153.9|173.0|41.0|2022-03-25 14:06:05|America/New_York|42.302038790658116|-71.28334592096508 114 | 6880553817|WU|5269.6|1397|1739|57.0|Run|0|United Kingdom|0|9|0|1|3.772|6.853|86.9|18|138.2|154.0|6.0|2022-03-25 13:32:39|America/New_York|42.311519645154476|-71.24009873718023 115 | 6877105319|wet double |8454.0|2209|2340|61.0|Run|0|United Kingdom|0|25|0|1|3.827|5.228|86.7|13|146.7|162.0|26.0|2022-03-24 19:39:52|America/New_York|42.31160086579621|-71.24022563919425 116 | 6875391695|very moist, very unenjoyable |16298.2|4172|4306|65.0|Run|0|United Kingdom|1|35|2|1|3.907|5.69|86.9|17|153.4|178.0|67.0|2022-03-24 13:46:20|America/New_York|42.31154487468302|-71.24005808494985 117 | 6870425497|reunited |21013.7|5095|5318|63.0|Run|2|United Kingdom|4|41|3|1|4.124|6.138|87.7|13|155.0|168.0|92.0|2022-03-23 14:00:37|America/New_York|42.31154336594045|-71.2402128148824 118 | 6865125879|WD|6446.0|1888|2297|114.0|Run|0|United Kingdom|0|11|0|6|3.414|4.898|87.1|18|154.1|170.0|32.0|2022-03-22 16:24:46|America/Boise|43.60889081843197|-116.19173725135624 119 | 6864919086|20 x 200 🦊|8724.8|1789|1789|1.0|Run|3|United Kingdom|0|35|0|15|4.877|9.44|93.6|15|155.0|171.0|30.0|2022-03-22 15:52:17|America/Boise|43.609590539708734|-116.19154153391719 120 | 6864917619|WU|5309.8|1434|2412|40.0|Run|0|United Kingdom|0|8|1|8|3.703|6.764|87.9|19|146.1|158.0|14.0|2022-03-22 15:08:50|America/Boise|43.61010678112507|-116.1916581261903 121 | 6862147501|“How longs a piece of wood”|8051.8|2201|2263|14.0|Run|0|United Kingdom|0|29|0|2|3.658|4.896|86.4|18|148.2|168.0|24.0|2022-03-21 22:32:56|America/Boise|43.59930602833629|-116.19105504825711 122 | 6860080180|10 but this time from the track |16128.1|4173|5150|214.0|Run|0|United Kingdom|8|34|0|7|3.865|8.323|87.4|17|153.8|169.0|70.0|2022-03-21 15:10:58|America/Boise|43.6101829726249|-116.19186549447477 123 | 6855141186|Deceptively cold |9716.1|2700|2821|9.0|Run|0|United Kingdom|0|31|0|5|3.599|4.945|86.7|14|152.2|169.0|41.0|2022-03-20 16:01:33|America/Boise|43.59929731115699|-116.19160037487745 124 | 6849705004|SLR 🧑‍🚀|32237.0|7649|8075|68.0|Run|0|United Kingdom|14|73|6|3|4.215|5.886|87.6|14|156.5|180.0|149.0|2022-03-19 15:07:19|America/Boise|43.61005112528801|-116.1918706074357 125 | 6845175755|junk miles |5384.0|1393|1490|8.0|Run|0|United Kingdom|2|26|1|1|3.865|6.302|86.2|21|152.3|163.0|20.0|2022-03-18 22:20:54|America/Boise|43.59939822927117|-116.1917033046484 126 | 6843825709|WD|5605.8|1581|2543|19.0|Run|0|United Kingdom|0|7|1|1|3.546|4.583|86.8|20|136.5|151.0|6.0|2022-03-18 16:12:21|America/Boise|43.616153402253985|-116.15988979116082 127 | 6843824430|20 x 1 min hills 👬|13764.6|3263|3270|157.6|Run|3|United Kingdom|0|45|2|2|4.218|6.836|90.1|12|153.9|172.0|54.0|2022-03-18 15:12:28|America/Boise|43.61608760431409|-116.15995424799621 128 | 6843820993|WU|5496.5|1598|2009|97.0|Run|0|United Kingdom|0|8|0|2|3.44|6.053|87.1|19|132.2|151.0|6.0|2022-03-18 14:31:39|America/Boise|43.599329413846135|-116.19171713478863 129 | 6840403360|nice lil dub|8150.8|2140|2191|9.0|Run|0|United Kingdom|0|23|0|4|3.809|5.025|86.1|18|151.4|166.0|30.0|2022-03-17 21:10:11|America/Boise|43.610212309286|-116.19180397130549 130 | 6838882692|Classic loop but the wrong way round |14509.6|4035|4526|188.0|Run|0|United Kingdom|0|32|0|4|3.596|6.674|86.8|11|150.5|167.0|52.0|2022-03-17 14:31:40|America/Boise|43.59943653456867|-116.19166089221835 131 | 6833547175|fOoThIlLs OnLy|19342.9|5187|5544|28.0|Run|2|United Kingdom|3|45|0|4|3.729|7.81|87.4|10|156.3|173.0|102.0|2022-03-16 14:20:22|America/Boise|43.59920519404113|-116.19167941622436 132 | 6830228759|WD|3317.5|1007|1052|16.0|Run|0|United Kingdom|0|13|0|2|3.294|4.792|87.3|17|150.3|161.0|13.0|2022-03-15 21:52:35|America/Boise|43.610027907416224|-116.19177689775825 133 | 6830138034|4 x 2k, 1k |15244.7|2946|3010|10.0|Run|3|United Kingdom|0|51|0|10|5.175|7.78|90.6|17|152.6|174.0|45.0|2022-03-15 20:52:44|America/Boise|43.60957955941558|-116.19167271070182 134 | 6830135764|WU|5121.1|1337|2276|42.0|Run|0|United Kingdom|0|19|0|12|3.83|7.322|87.7|21|141.3|155.0|8.0|2022-03-15 20:11:09|America/Boise|43.61001483164728|-116.1917204875499 135 | 6828022159|wet wet |6506.8|1738|1738|7.0|Run|0|United Kingdom|0|26|1|2|3.744|6.164|87.8|20|127.2|148.0|6.0|2022-03-15 14:32:15|America/Boise|43.59950660727918|-116.19161009788513 136 | 6824919903|sprints |8269.2|2117|2974|61.0|Run|0|United Kingdom|0|30|0|6|3.906|9.266|86.8|20|149.4|167.0|26.0|2022-03-14 21:12:46|America/Boise|43.61000619828701|-116.19160230271518 137 | 6823122723|MMMM|16463.6|4533|5154|197.0|Run|0|United Kingdom|5|41|0|6|3.632|5.802|87.5|13|143.6|165.0|44.0|2022-03-14 14:19:48|America/Boise|43.59909706749022|-116.191366603598 138 | 6819431150|El Classico |9704.6|2627|3270|12.0|Run|0|United Kingdom|1|39|0|3|3.694|5.594|88.0|16|140.6|166.0|15.0|2022-03-13 17:01:52|America/Boise|43.59939068555832|-116.19162049144506 139 | 6813872052|Long and strong 🐛|30835.1|7400|7679|66.0|Run|2|United Kingdom|13|70|1|3|4.167|6.14|88.0|16|153.6|179.0|121.0|2022-03-12 16:05:58|America/Boise|43.61009521409869|-116.19190212339163 140 | 6809333399|WD|3832.2|1089|1089|19.0|Run|0|United Kingdom|0|11|0|6|3.519|4.739|86.4|16|156.2|169.0|21.0|2022-03-11 22:46:48|America/Boise|43.61613538116217|-116.15988409146667 141 | 6809333134|20 of the finest 1 min hill reps 🐺|13690.6|3244|3248|157.0|Run|3|United Kingdom|0|46|0|15|4.22|8.464|90.3|14|154.4|167.0|56.0|2022-03-11 21:46:46|America/Boise|43.61607737839222|-116.15990412421525 142 | 6809331849|WU|5130.1|1390|1812|100.0|Run|0|United Kingdom|1|13|0|10|3.691|6.987|86.9|18|148.2|162.0|16.0|2022-03-11 21:14:10|America/Boise|43.610012736171484|-116.19171218946576 143 | 6807828472|Hills later |6490.0|1707|1707|6.0|Run|0|United Kingdom|3|21|0|2|3.802|4.753|87.4|16|147.5|161.0|18.0|2022-03-11 15:31:45|America/Boise|43.59936537221074|-116.19159978814423 144 | 6804841154|Double.|8223.6|2249|2269|11.0|Run|0|United Kingdom|0|32|0|6|3.657|5.514|86.5|15|149.7|167.0|27.0|2022-03-10 22:10:43|America/Boise|43.609998235479|-116.19158771820366 145 | 6803704449|10 boys out this am! wowza|14437.9|3945|4114|232.0|Run|0|United Kingdom|6|40|0|8|3.66|5.848|87.2|10|151.3|170.0|54.0|2022-03-10 15:32:49|America/Boise|43.599399318918586|-116.19165644980967 146 | 6798339115|Absolute grind |19550.4|5400|5651|239.4|Run|0|United Kingdom|3|45|0|2|3.62|5.262|86.9|14|148.8|170.0|68.0|2022-03-09 15:19:57|America/Boise|43.59915456734598|-116.19156450033188 147 | 6794535694|WD|2105.0|621|779|6.0|Run|0|United Kingdom|0|7|0|6|3.39|4.228|87.1|13|156.8|168.0|12.0|2022-03-08 22:32:31|America/Boise|43.60427565872669|-116.1929327622056 148 | 6794500646|10mi progression |16100.9|3193|3202|29.0|Run|3|United Kingdom|5|60|0|6|5.043|6.96|91.4|13|156.7|174.0|62.0|2022-03-08 21:36:36|America/Boise|43.6037537176162|-116.19224360212684 149 | 6794499178|WU|3231.9|876|1338|3.0|Run|0|United Kingdom|0|12|0|5|3.689|5.115|86.4|16|140.6|154.0|4.0|2022-03-08 21:10:20|America/Boise|43.6096681561321|-116.19177220389247 150 | 6793969993|Dub|6450.9|1784|1784|8.0|Run|0|United Kingdom|0|24|0|4|3.616|5.491|87.4|17|148.6|164.0|19.0|2022-03-08 15:31:44|America/Boise|43.599287336692214|-116.1911337543279 151 | 6789403644|Striders |7110.0|1790|2823|66.0|Run|0|United Kingdom|0|23|0|8|3.972|8.666|87.1|21|143.1|169.0|14.0|2022-03-07 22:08:54|America/Boise|43.61007400788367|-116.19165594689548 152 | 6787789187|The council was 9 deep this am|16303.5|4415|4616|217.1|Run|0|United Kingdom|6|45|0|6|3.693|7.086|87.5|10|153.5|172.0|76.0|2022-03-07 15:33:26|America/Boise|43.59957743436098|-116.19170757941902 153 | 6784214923|debrief |11370.5|2887|3129|12.0|Run|0|United Kingdom|0|50|0|3|3.939|7.472|87.9|17|147.9|175.0|36.0|2022-03-06 18:33:00|America/Boise|43.59930803999305|-116.1916390992701 154 | 6778103876|big boise loop|29531.6|7139|7538|66.0|Run|2|United Kingdom|12|53|2|3|4.137|6.098|88.1|11|157.4|175.0|148.0|2022-03-05 16:17:51|America/Boise|43.6101689748466|-116.19183951057494 155 | 6773518758|WD|3792.7|1073|1073|18.0|Run|0|United Kingdom|0|7|0|10|3.535|4.324|86.7|15|152.6|162.0|16.0|2022-03-04 22:44:06|America/Boise|43.61607410944998|-116.15983723662794 156 | 6773518231|20 x 1 min hills 🦁|13005.5|3084|3117|147.2|Run|3|United Kingdom|0|44|0|14|4.217|7.682|89.4|13|155.8|168.0|59.0|2022-03-04 21:45:33|America/Boise|43.61603572033346|-116.15986338816583 157 | 6773516431|WU|4922.1|1343|1431|95.0|Run|0|United Kingdom|0|9|0|13|3.665|4.952|87.2|20|151.9|172.0|20.0|2022-03-04 21:12:51|America/Boise|43.6100653745234|-116.19176692329347 158 | 6771989782|Dub|6446.1|1781|1781|9.0|Run|0|United Kingdom|2|23|0|4|3.619|4.964|86.9|16|154.7|164.0|31.0|2022-03-04 15:33:35|America/Boise|43.59939822927117|-116.19167773984373 159 | 6769329173|A lovely little Micheal Doublé|8237.8|2118|2164|9.0|Run|0|United Kingdom|2|20|0|3|3.889|5.078|86.7|19|152.4|167.0|30.0|2022-03-03 22:10:54|America/Boise|43.61007710918784|-116.19179466739297 160 | 6767362813|Switchbacks first is the way to go |14381.4|3917|4055|202.0|Run|0|United Kingdom|3|37|0|3|3.672|5.78|87.0|14|156.9|177.0|78.0|2022-03-03 15:31:11|America/Boise|43.59931893646717|-116.19168335571885 161 | 6764316662|big boi season|18302.4|4949|5291|196.0|Run|0|United Kingdom|1|32|0|5|3.698|5.438|87.2|15|153.3|169.0|85.0|2022-03-02 15:19:10|America/Boise|43.599248360842466|-116.19162476621568 162 | 6759236430|Some dece hillage |13756.7|3600|3719|237.1|Run|0|United Kingdom|6|35|0|5|3.821|4.926|86.8|19|158.2|190.0|75.0|2022-03-01 20:45:28|America/Boise|43.61001290380955|-116.19169911369681 163 | 6757421857|Latest double in a while |7635.4|1873|2054|11.0|Run|0|United Kingdom|2|25|0|1|4.077|5.304|87.2|14|151.3|166.0|25.0|2022-03-01 17:18:37|America/Boise|43.59935129061341|-116.19103032164276 164 | 6753637557|Sprints |7035.7|1808|2704|25.0|Run|0|United Kingdom|1|23|0|6|3.891|8.432|87.2|20|140.0|167.0|14.0|2022-02-28 22:08:57|America/Boise|43.610093453899026|-116.1918690148741 165 | 6752165641|The council |13503.5|3757|4140|181.0|Run|0|United Kingdom|0|31|0|4|3.594|5.626|86.9|18|153.3|169.0|60.0|2022-02-28 16:01:21|America/Boise|43.599324468523264|-116.19154999963939 166 | 6744535172|Afternoon Run|3226.9|1005|1005|13.0|Run||United Kingdom|0|15|0|5|3.211|4.127|86.3|20|154.0|170.0|16.0|2022-02-26 21:16:30|America/Denver|35.087259812280536|-106.64875105954707 167 | 6743070301|Conference 3000|3000.0|510|510|0|Run|1|United Kingdom|0|47|0|1|5.882|0||||||2022-02-26 14:00:00|Europe/London|| 168 | 6742716343|WU|3266.0|928|930|9.0|Run|0|United Kingdom|0|8|0|1|3.519|5.174|86.3|22|141.6|154.0|5.0|2022-02-26 19:55:26|America/Denver|35.0870974548161|-106.64886245504022 169 | 6741274892|Shake oot |2606.3|722|722|19.0|Run|0|United Kingdom|0|9|0|3|3.61|5.018|87.2|21|131.2|139.0|2.0|2022-02-26 14:59:16|America/Denver|35.10462988168001|-106.5674932859838 170 | 6738209767|Conference 5000m - 10th|5000.0|879|879|0|Run|1|United Kingdom|0|61|1|1|5.688|0||||||2022-02-25 17:45:10|Europe/London|| 171 | 6738206130|WD|3186.9|1018|1159|4.0|Run|0|United Kingdom|0|8|1|6|3.131|5.757|86.3|19|149.5|166.0|12.0|2022-02-26 00:39:15|America/Denver|35.08724447339773|-106.64879875257611 172 | 6737955047|WU|3320.3|912|1162|3.0|Run|0|United Kingdom|0|11|0|5|3.641|5.572|87.2|23|148.7|161.0|10.0|2022-02-25 23:08:13|America/Denver|35.08696376346052|-106.64886010810733 173 | 6736745922|Shake oot|2734.6|747|747|25.0|Run|0|United Kingdom|0|32|0|5|3.661|4.338|87.9|21|143.4|157.0|5.0|2022-02-25 17:05:03|America/Denver|35.10455260053277|-106.56754952855408 174 | 6733573282|Pre meet|7729.0|2029|3713|5.0|Run|0|United Kingdom|0|20|0|8|3.809|7.845|87.1|22|147.5|166.0|21.0|2022-02-24 21:52:30|America/Denver|35.086323134601116|-106.64808738045394 175 | 6727077050|Shrinkage |10785.9|2910|2966|15.0|Run|0|United Kingdom|0|29|1|5|3.706|5.098|88.0|9|149.9|170.0|40.0|2022-02-23 15:33:56|America/Boise|43.599611883983016|-116.19162652641535 176 | 6723595094|WD|2386.1|655|655|29.0|Run|0|United Kingdom|0|9|0|7|3.643|4.344|87.1|6|149.6|164.0|7.0|2022-02-22 22:16:21|America/Boise|43.60957880504429|-116.1917500756681 177 | 6723558539|Premeet |6105.5|1177|1233|6.0|Run|3|United Kingdom|0|32|0|6|5.187|7.903|91.1|7|153.5|171.0|18.0|2022-02-22 21:49:23|America/Boise|43.6095854267478|-116.19167849421501 178 | 6723557799|WU|5122.0|1341|2113|38.0|Run|0|United Kingdom|0|13|0|7|3.82|7.493|87.3|16|140.7|153.0|8.0|2022-02-22 21:10:48|America/Boise|43.609968312084675|-116.19169944897294 179 | 6721644707|Morning Run|6447.6|1749|1814|6.0|Run||United Kingdom|0|23|0|3|3.686|5.423|87.9|11|150.2|166.0|25.0|2022-02-22 15:32:33|America/Boise|43.599335700273514|-116.19163365103304 180 | 6718542497|Sprints |6668.6|1686|2841|53.0|Run|0|United Kingdom|0|20|0|9|3.955|8.984|88.4|13|137.0|159.0|10.0|2022-02-21 22:09:09|America/Boise|43.61011457629502|-116.1917152069509 181 | 6716666732|Conference week baby |13116.3|3513|3892|17.0|Run|0|United Kingdom|2|35|0|4|3.734|5.292|86.7|19|126.2|144.0|12.0|2022-02-21 15:32:44|America/Boise|43.59920100308955|-116.19158185087144 182 | 6712740175|Some dirt to round out the week|8058.1|2198|2249|12.0|Run|0|United Kingdom|0|20|0|4|3.666|6.429|87.2|15|151.2|163.0|31.0|2022-02-20 16:32:37|America/Boise|43.59933662228286|-116.19167363271117 183 | 6707559658|Table rock 🪨|18247.4|5420|5731|366.1|Run|2|United Kingdom|0|45|1|9|3.367|5.313||||||2022-02-19 16:08:10|America/Boise|43.610042|-116.191767 184 | 6703495509|WD|5325.6|1477|1487|12.0|Run|0|United Kingdom|0|15|0|6|3.606|4.582|87.0|20|147.5|157.0|13.0|2022-02-18 22:17:40|America/Boise|43.61018842086196|-116.19184730574489 185 | 6703308226|2 x 5 x 300m [60s] 🚁|4946.1|1259|1284|10.0|Run|3|United Kingdom|0|32|0|12|3.929|8.023|86.0|19|137.4|153.0|5.0|2022-02-18 21:47:37|America/Boise|43.60954402014613|-116.19160003960133 186 | 6703212767|WU|5166.5|1388|2107|39.0|Run|0|United Kingdom|0|15|2|13|3.722|7.732|86.5|20|141.6|156.0|8.0|2022-02-18 21:09:55|America/Boise|43.610213566571474|-116.19191377423704 187 | 6701940241|The perfect 4 mile loop|6473.4|1722|1722|8.0|Run|0|United Kingdom|3|23|0|2|3.759|5.014|86.8|18|130.6|140.0|6.0|2022-02-18 15:32:01|America/Boise|43.59927233308554|-116.191564835608 188 | 6699116424|A little Micheal Doublé|6444.9|1728|1787|13.0|Run|0|United Kingdom|2|28|0|9|3.73|5.332|86.7|17|149.6|172.0|22.0|2022-02-17 22:10:34|America/Boise|43.610217589884996|-116.1917685996741 189 | 6697399238|5757|13129.2|3477|3728|24.0|Run|0|United Kingdom|0|24|0|5|3.776|4.898|87.1|14|134.6|153.0|14.0|2022-02-17 15:32:34|America/Boise|43.59923302195966|-116.19158126413822 190 | 6692595896|Hump day |16567.2|4593|4795|24.0|Run|0|United Kingdom|0|39|0|3|3.607|5.73|86.8|11|147.7|162.0|46.0|2022-02-16 15:31:29|America/Boise|43.59918926842511|-116.19159249588847 191 | 6689166839|WD|2096.4|601|601|0.0|Run|0|United Kingdom|0|10|0|4|3.488|4.48|86.4|14|145.4|155.0|4.0|2022-02-15 22:37:05|America/Boise|43.60959330573678|-116.19188292883337 192 | 6689159161|Fartlek 🦞|10749.8|2323|2589|35.0|Run|3|United Kingdom|3|43|0|5|4.628|8.438|90.5|12|153.1|168.0|38.0|2022-02-15 21:45:58|America/Boise|43.60379671677947|-116.19244669564068 193 | 6689044753|WU|4539.2|1221|2006|9.0|Run|0|United Kingdom|0|11|0|3|3.718|7.418|87.2|20|149.7|165.0|15.0|2022-02-15 21:10:47|America/Boise|43.61010493710637|-116.19187555275857 194 | 6687182155|Slick |6465.0|1741|1741|5.0|Run|0|United Kingdom|2|23|0|3|3.713|6.102|87.6|18|151.5|170.0|24.0|2022-02-15 15:32:53|America/Boise|43.59928716905415|-116.19160305708647 195 | 6684651132|Monday sprints |7015.0|1859|2888|49.0|Run|0|United Kingdom|1|23|0|6|3.774|8.234|87.3|20|142.6|166.0|16.0|2022-02-14 22:10:52|America/Boise|43.610177440568805|-116.1918676737696 196 | 6682581803|He’s back!|13156.0|3600|3923|17.0|Run|0|United Kingdom|2|41|0|4|3.654|5.994|87.0|19|133.0|149.0|14.0|2022-02-14 15:27:23|America/Boise|43.599325558170676|-116.19164329022169 197 | 6678860495|Good to be back in the footies|21656.8|6008|6426|345.2|Run|2|United Kingdom|6|50|1|6|3.605|5.745|87.3|17|130.9|169.0|26.0|2022-02-13 16:28:37|America/Boise|43.59944055788219|-116.1915893945843 198 | 6673316755|Bloody lovely spot for some jogging |14019.2|3845|4087|46.0|Run|0|United Kingdom|12|47|0|4|3.646|6.148|87.1|18|126.6|139.0|13.0|2022-02-12 18:08:50|America/Los_Angeles|47.661557011306286|-122.31443454511464 199 | 6672771562|Husky invite 5000|5000.0|860|860|0|Run|1|United Kingdom|0|57|3|1|5.814|0||||||2022-02-11 18:00:00|Europe/London|| 200 | 6669070859|WD|4035.5|1289|5674|20.0|Run|0|United Kingdom|0|15|0|2|3.131|9.94|85.8|16|149.7|171.0|16.0|2022-02-12 01:55:06|America/Los_Angeles|47.652390310540795|-122.3007254395634 201 | 6669070450|WU|4306.3|1183|1225|11.0|Run|0|United Kingdom|6|13|0|2|3.64|4.373|85.8|18|148.0|166.0|15.0|2022-02-12 01:20:03|America/Los_Angeles|47.652899427339435|-122.3002291470766 202 | 6669070235|Shake oot |2101.9|631|659|18.0|Run|0|United Kingdom|0|10|0|3|3.331|5.54|86.9|20|124.1|141.0|2.0|2022-02-11 18:02:50|America/Los_Angeles|47.66133799217641|-122.31390371918678 203 | 6663973088|Shake oot|3238.7|913|931|34.0|Run|0|United Kingdom|0|17|0|1|3.547|5.152|86.1|20|148.0|162.0|9.0|2022-02-11 00:20:05|America/Los_Angeles|47.66140622086823|-122.31380841694772 204 | 6661810862|Pre meet |7562.3|2162|3048|23.0|Run|0|United Kingdom|0|32|0|3|3.498|8.078|87.0|12|140.5|164.0|15.0|2022-02-10 15:47:01|America/Boise|43.5994519572705|-116.19161152280867 205 | 6656350990|Morning dirt |11274.0|3044|3155|16.0|Run|0|United Kingdom|0|26|0|3|3.704|6.122|87.2|15|133.9|155.0|15.0|2022-02-09 15:01:21|America/Boise|43.599386997520924|-116.19161487556994 206 | 6653010269|WD|3219.5|959|1143|8.0|Run|0|United Kingdom|0|9|0|6|3.357|4.062|87.4|14|148.0|158.0|10.0|2022-02-08 22:21:22|America/Boise|43.61010879278183|-116.19191888719797 207 | 6652987506|Premeet 😸|7274.0|1590|1601|12.0|Run|3|United Kingdom|0|35|0|12|4.575|8.588|90.6|15|143.0|167.0|10.0|2022-02-08 21:50:01|America/Boise|43.60958752222359|-116.19163943454623 208 | 6652986593|WU|5010.4|1319|2215|50.0|Run|0|United Kingdom|0|9|0|11|3.799|7.05|87.3|15|135.9|155.0|6.0|2022-02-08 21:11:05|America/Boise|43.610179955139756|-116.19186323136091 209 | 6650892190|bridge2bridge |6489.9|1722|1805|17.0|Run|0|United Kingdom|2|27|0|3|3.769|5.458|87.9|15|148.4|167.0|18.0|2022-02-08 15:34:15|America/Boise|43.59932287596166|-116.19100836105645 210 | 6647594278|Sprintos |6717.1|1724|2839|73.0|Run|0|United Kingdom|0|24|0|10|3.896|8.602|88.8|19|143.0|160.0|13.0|2022-02-07 22:08:40|America/Boise|43.61009621992707|-116.19184898212552 211 | 6647308263|Optional practical training |13526.3|3509|3622|27.0|Run|0|United Kingdom|0|25|0|1|3.855|6.424|86.9|17|130.8|142.0|13.0|2022-02-07 16:36:50|America/Boise|43.59921106137335|-116.191625604406 212 | 6641861419|It’s been a while |13513.7|3491|3764|33.0|Run|0|United Kingdom|2|51|0|4|3.871|5.344|87.5|13|152.7|166.0|59.0|2022-02-06 16:57:12|America/Boise|43.59945765696466|-116.19157279841602 213 | 6637075082|False alarm 🚨|13560.4|3727|4056|167.0|Run|0|United Kingdom|0|68|1|4|3.638|4.837|86.4|19|149.1|163.0|42.0|2022-02-05 19:41:07|America/Boise|43.59931642189622|-116.19157330133021 214 | 6620789399|Lovely stuff |16630.0|4418|4804|30.0|Run|0|United Kingdom|0|37|2|2|3.764|5.087|87.5|8|144.1|179.0|34.0|2022-02-02 15:14:20|America/Boise|43.599331425502896|-116.19160414673388 215 | 6617407952|WD|3007.0|810|810|35.0|Run|0|United Kingdom|0|8|0|3|3.712|4.546|87.6|7|152.8|167.0|12.0|2022-02-01 22:40:48|America/Boise|43.60962901264429|-116.19181687943637 216 | 6617407584|6 x K, 2 x 400 🛸|9805.8|2206|2323|17.0|Run|3|United Kingdom|1|46|2|6|4.445|8.408|91.0|10|150.8|166.0|27.0|2022-02-01 21:56:17|America/Boise|43.60958743840456|-116.19163792580366 217 | 6617406332|WU|5108.7|1353|2370|49.0|Run|0|United Kingdom|0|12|0|11|3.776|7.558|87.5|16|142.7|158.0|9.0|2022-02-01 21:13:57|America/Boise|43.610164197161794|-116.19187630712986 218 | 6615267091|Morning Run|6503.6|1737|1753|9.0|Run||United Kingdom|3|20|1|3|3.744|5.894|87.6|11|154.5|167.0|30.0|2022-02-01 15:31:07|America/Boise|43.599367970600724|-116.19159894995391 219 | 6612695515|Sprints |6781.4|1813|2792|69.0|Run|0|United Kingdom|0|31|0|7|3.74|8.532|86.8|20|132.1|164.0|7.0|2022-01-31 22:08:53|America/Boise|43.610064033418894|-116.1917862854898 220 | 6610386459|8mile|13139.9|3523|3658|18.0|Run|0|United Kingdom|3|42|0|5|3.73|4.756|87.4|7|159.6|195.0|79.0|2022-01-31 15:33:19|America/Boise|43.59925255179405|-116.19163499213755 221 | 6606532964|Weekend debrief |22301.2|5786|6052|41.0|Run|2|United Kingdom|2|55|1|7|3.854|6.09|88.2|7|133.4|153.0|24.0|2022-01-30 16:02:31|America/Boise|43.599188178777695|-116.19155611842871 222 | 6601180340|UW Invite 3000 - 8:13|3000.0|493|493|0|Run|1|United Kingdom|0|87|2|1|6.085|0||||||2022-01-29 11:47:06|Europe/London|| 223 | 6601190230|WD|8049.2|1979|3612|21.0|Run|0|United Kingdom|0|20|0|5|4.067|6.198|89.3|15|148.6|168.0|23.0|2022-01-29 18:52:59|America/Los_Angeles|47.65257672406733|-122.30015002191067 224 | 6601164700|WU|4408.6|1169|2459|5.0|Run|0|United Kingdom|0|8|0|3|3.771|5.612|87.2|18|150.8|167.0|16.0|2022-01-29 17:24:55|America/Los_Angeles|47.65226734802127|-122.29977074079216 225 | 6601163799|Shake oot|2062.1|604|607|21.0|Run|0|United Kingdom|0|10|0|1|3.414|9.006|87.0|19|153.4|166.0|9.0|2022-01-29 14:31:31|America/Los_Angeles|47.662016758695245|-122.31446648016572 226 | 6595778007|Premeet |8243.4|2322|3880|85.0|Run|0|United Kingdom|15|34|1|4|3.55|7.806|86.8|18|136.6|155.0|11.0|2022-01-28 18:03:24|America/Los_Angeles|47.6613022852689|-122.31407127343118 227 | 6592367479|per diem 🤑|5848.4|1669|1776|66.0|Run|0|United Kingdom|2|29|1|4|3.504|5.473|86.4|18|135.3|148.0|6.0|2022-01-28 00:11:13|America/Los_Angeles|47.661504708230495|-122.31416238471866 228 | 6590204949|Turf laps |11181.9|3007|3393|27.0|Run|0|United Kingdom|0|39|0|4|3.719|5.521|87.3|9|150.3|163.0|38.0|2022-01-27 15:32:39|America/Boise|43.59943754039705|-116.19160238653421 229 | 6589860219|A classic Wednesday 10|16416.7|4449|4865|39.0|Run|0|United Kingdom|0|30|0|3|3.69|7.637|88.1|3|154.9|181.0|82.0|2022-01-26 15:14:02|America/Boise|43.59947333112359|-116.19166952557862 230 | 6581824842|WD|2403.4|662|662|41.0|Run|0|United Kingdom|0|10|0|3|3.63|4.616|87.3|7|156.5|165.0|13.0|2022-01-25 22:21:33|America/Boise|43.609556341543794|-116.19183716364205 231 | 6581824472|Standard premeet seshwoin|6956.3|1365|1588|10.0|Run|3|United Kingdom|0|48|1|10|5.096|7.826|90.2|9|147.6|162.0|14.0|2022-01-25 21:49:24|America/Boise|43.609586264938116|-116.19162476621568 232 | 6581823569|WU|5001.6|1335|2331|43.0|Run|0|United Kingdom|0|14|0|10|3.746|6.836|87.6|17|146.9|163.0|14.0|2022-01-25 21:09:42|America/Boise|43.61017291434109|-116.19187236763537 233 | 6579649037|Golf loopage |6677.4|1778|1801|15.0|Run|0|United Kingdom|0|27|0|2|3.756|5.222|88.0|8|152.5|165.0|28.0|2022-01-25 15:02:23|America/Boise|43.59928876161575|-116.19158998131752 234 | 6577130025|Turf and sprints |7426.8|2101|2982|79.0|Run|0|United Kingdom|0|21|0|5|3.535|8.746|83.3|16|127.6|143.0|7.0|2022-01-24 22:06:58|America/Boise|43.609512923285365|-116.19181419722736 235 | 6574643384|Race week 🤑|13534.1|3600|3732|20.0|Run|0|United Kingdom|3|41|0|2|3.759|5.76|87.6|18|127.1|146.0|13.0|2022-01-24 15:00:49|America/Boise|43.59935657121241|-116.19165921583772 236 | 6570757611|Good stuff|10318.2|2726|2852|19.0|Run|0|United Kingdom|0|45|0|5|3.785|5.818|88.1|16|153.8|168.0|46.0|2022-01-23 17:01:58|America/Boise|43.59950099140406|-116.19161537848413 237 | 6565080204|A very pleasant loops with some very pleasant boys |20149.3|5405|5557|81.0|Run|2|United Kingdom|0|54|0|8|3.728|5.454||16||||2022-01-22 16:08:28|America/Boise|43.61015799455345|-116.19181151501834 238 | 6560360803|Afternoon Run|2699.6|766|766|39.0|Run||United Kingdom|0|8|0|3|3.524|4.528|87.2|11|140.0|150.0|3.0|2022-01-21 22:33:22|America/Boise|43.609511414542794|-116.19165234267712 239 | 6560360373|Winter speed 🤗|7893.8|1775|2058|28.0|Run|3|United Kingdom|0|40|4|6|4.447|9.16|89.4|11|143.1|156.0|11.0|2022-01-21 21:50:02|America/Boise|43.60959556885064|-116.19167069904506 240 | 6560359096|Wu|4095.8|1169|1425|6.0|Run|0|United Kingdom|0|9|0|10|3.504|6.31|86.7|18|130.7|145.0|4.0|2022-01-21 21:14:36|America/Boise|43.610064117237926|-116.19182417169213 241 | 6558846633|icy and spicy |6454.5|1839|1967|4.0|Run|0|United Kingdom|0|23|0|4|3.51|4.953|87.3|12|127.6|142.0|6.0|2022-01-21 15:32:43|America/Boise|43.59933813102543|-116.19101498275995 242 | 6555828716|Snowy turf and track laps with Milarvie |6535.0|1684|1684|12.0|Run|0|United Kingdom|0|37|0|6|3.881|4.796|87.1|17|152.0|168.0|24.0|2022-01-20 22:05:47|America/Boise|43.609468499198556|-116.19182651862502 243 | 6555249925|Not 8 or 10 miles, wow|10112.5|2768|2827|20.0|Run|0|United Kingdom|0|34|0|3|3.653|5.237|87.0|7|156.7|168.0|55.0|2022-01-20 15:32:55|America/Boise|43.599617164582014|-116.19157740846276 244 | 6549109618|Mid week hump|16711.1|4500|4611|37.0|Run|0|United Kingdom|2|28|0|3|3.714|5.398|87.3|18|131.1|159.0|18.0|2022-01-19 15:02:35|America/Boise|43.599247774109244|-116.19162082672119 245 | 6545807012|WD|2323.5|685|690|10.0|Run|0|United Kingdom|0|11|0|9|3.392|4.063|87.5|11|144.9|153.0|4.0|2022-01-18 22:49:08|America/Boise|43.6096425075084|-116.19186390191317 246 | 6545806591|Daaaaamage|13919.8|2636|2896|23.0|Run|3|United Kingdom|2|61|2|5|5.281|7.482|92.4|10|141.2|158.0|15.0|2022-01-18 21:49:54|America/Boise|43.60374349169433|-116.19234384968877 247 | 6545804793|WU|4521.1|1236|1721|7.0|Run|0|United Kingdom|0|7|0|8|3.658|4.527|86.9|16|119.3|134.0|3.0|2022-01-18 21:11:36|America/Boise|43.610068056732416|-116.19186474010348 248 | 6543486900|Morning Run|6681.8|1881|1881|12.0|Run||United Kingdom|0|27|0|3|3.552|4.267|87.7|10|125.5|154.0|7.0|2022-01-18 15:02:22|America/Boise|43.5993258934468|-116.19097206741571 249 | 6540835756|mileage, sprints, and rugby|6666.7|1924|3906|25.0|Run|0|United Kingdom|0|29|0|3|3.465|9.937|87.8|20|129.3|152.0|7.0|2022-01-17 23:42:43|America/Boise|43.599526304751635|-116.19166675955057 250 | 6538562347|monday morning milage |16102.6|4082|4277|28.0|Run|0|United Kingdom|4|39|0|4|3.945|5.312|88.0|14|135.9|152.0|17.0|2022-01-17 15:34:42|America/Boise|43.59933402389288|-116.19161487556994 251 | 6534340850|Morning Run|21055.0|5528|5827|51.0|Run||United Kingdom|4|53|1|5|3.809|6.464|87.6|11|142.4|169.0|43.0|2022-01-16 15:31:36|America/Boise|43.59907837584615|-116.19141538627446 252 | 6529936325|WD|8591.5|2108|2151|14.0|Run|0|United Kingdom|1|17|0|3|4.076|6.842|88.5|14|138.9|161.0|13.0|2022-01-15 23:35:29|America/Los_Angeles|47.652371786534786|-122.29983410798013 253 | 6530038801|UW preview - 3000m|3000.0|499|499|0|Run|1|United Kingdom|0|73|2|1|6.012|0||||||2022-01-15 15:00:56|Europe/London|| 254 | 6529935604|WU|4939.1|1324|2602|7.0|Run|0|United Kingdom|0|12|1|3|3.73|7.194|86.1|19|131.1|151.0|5.0|2022-01-15 22:05:45|America/Los_Angeles|47.65243548899889|-122.30012563057244 255 | 6528173205|Morning Run|2216.0|739|751|23.0|Run||United Kingdom|0|11|0|3|2.999|4.49|86.8|22|124.6|141.0|2.0|2022-01-15 16:04:34|America/Los_Angeles|47.66132265329361|-122.31436539441347 256 | 6524510569|This campus is legit |4145.1|1220|1523|60.0|Run|0|United Kingdom|1|29|1|4|3.398|5.016|85.8|21|122.1|145.0|4.0|2022-01-15 00:08:00|America/Los_Angeles|47.661415692418814|-122.31437302194536 257 | 6522920941|Premeet |8008.9|2086|3035|11.0|Run|0|United Kingdom|0|31|0|5|3.839|7.77|87.7|19|128.1|157.0|8.0|2022-01-14 16:01:26|America/Boise|43.59930778853595|-116.19168536737561 258 | 6519775914|Doublé|6537.6|1801|1813|8.0|Run|0|United Kingdom|1|24|1|9|3.63|4.434|86.6|19|150.3|166.0|25.0|2022-01-13 22:13:13|America/Boise|43.61004299484193|-116.19175099767745 259 | 6517626574|Good stuff |12867.5|3513|3819|23.0|Run|0|United Kingdom|0|26|0|3|3.663|7.428|87.1|15|134.3|158.0|14.0|2022-01-13 15:14:10|America/Boise|43.59950275160372|-116.19150205515325 260 | 6512447961|Have a great day champions |16315.3|4410|4461|24.0|Run|0|United Kingdom|1|42|0|2|3.7|5.346|87.8|4|155.3|170.0|81.0|2022-01-12 15:03:20|America/Boise|43.59931013546884|-116.19163859635592 261 | 6509631159|WD|3197.9|962|1026|22.0|Run|0|United Kingdom|0|8|0|5|3.324|4.366|87.9|17|146.8|161.0|8.0|2022-01-11 22:38:42|America/Boise|43.610023045912385|-116.19175803847611 262 | 6509630736|20 x 200 on off 😎|8674.6|1805|1805|18.0|Run|3|United Kingdom|0|44|0|6|4.806|9.094|92.8|12|131.5|157.0|8.0|2022-01-11 21:54:46|America/Boise|43.60958852805197|-116.19163708761334 263 | 6509629759|WU|5016.4|1353|2432|44.0|Run|0|United Kingdom|0|13|0|5|3.708|6.992|87.3|18|135.7|152.0|6.0|2022-01-11 21:09:04|America/Boise|43.61016796901822|-116.19178310036659 264 | 6507006406|A nice little golf course double |7279.0|1998|2098|13.0|Run|0|United Kingdom|1|25|0|1|3.643|5.422|87.9|18|126.5|137.0|7.0|2022-01-11 15:16:10|America/Boise|43.599358247593045|-116.1915928311646 265 | 6504220471|Sprinteroonies |6542.7|1824|2737|13.0|Run|0|United Kingdom|0|29|0|3|3.587|7.852|87.1|18|130.3|152.0|7.0|2022-01-10 23:31:22|America/Boise|43.59929898753762|-116.19172660633922 266 | 6501991083|Mileage with the Leadville house |13122.3|3653|4330|29.0|Run|0|United Kingdom|4|43|0|4|3.592|5.158|87.3|10|148.7|164.0|45.0|2022-01-10 15:01:46|America/Boise|43.59935321845114|-116.19160808622837 267 | 6498364308|Back in boi |12952.2|3654|3935|22.0|Run|0|United Kingdom|0|41|2|3|3.545|5.087|87.0|10|148.4|165.0|45.0|2022-01-09 17:32:27|America/Boise|43.599203852936625|-116.19161252863705 268 | 6491469170|Still a lot of snow |23081.2|5400|5528|0|Run|2|United Kingdom|0|33|1|1|4.274|5.262|85.5|22|145.6|168.0|48.0|2022-01-08 13:32:57|Europe/London|| 269 | 6486598744|Snow day |20100.0|4502|4502|0|Run|3|United Kingdom|0|41|0|1|4.465|0||||||2022-01-07 08:30:27|Europe/London|| 270 | 6483599953|Coffee solves jet lag |6816.7|1711|1768|43.0|Run|0|United Kingdom|2|32|0|1|3.984|6.24|86.8|15|151.3|168.0|23.0|2022-01-06 21:09:52|America/New_York|42.311413530260324|-71.24006076715887 271 | 6481660525|omg lagging so bad, 100 ping |18259.4|4432|4514|94.0|Run|0|United Kingdom|1|50|0|1|4.12|6.53|88.3|12|148.5|165.0|45.0|2022-01-06 13:21:14|America/New_York|42.31138067319989|-71.24019336886704 272 | 6472680464|Shipping off to Boston |6473.8|1570|1762|30.0|Run|0|United Kingdom|1|37|1|1|4.123|5.566|87.9|16|140.1|153.0|8.0|2022-01-04 18:11:35|Europe/London|50.844204956665635|-0.3920463379472494 273 | 6470459370|WD|4897.0|1289|1611|24.0|Run|0|United Kingdom|0|18|0|1|3.799|7.064|88.5|18|147.1|164.0|11.0|2022-01-04 10:40:31|Europe/London|50.84778092801571|-0.3912328742444515 274 | 6470456719|Brisky Morning Munchy - Jar Jar 🦦|13482.0|2702|2702|39.0|Run|3|United Kingdom|1|61|1|2|4.99|6.894|91.4|15|166.9|183.0|78.0|2022-01-04 09:53:42|Europe/London|50.84899513050914|-0.3913896158337593 275 | 6470375268|WU|4037.9|1092|1527|37.0|Run|0|United Kingdom|0|14|0|2|3.698|4.408|86.8|19|137.3|150.0|4.0|2022-01-04 09:26:59|Europe/London|50.844578789547086|-0.39208196103572845 276 | 6467167671|Another brick in the wall|6478.2|1636|1641|30.0|Run|0|United Kingdom|1|33|0|1|3.96|5.038|87.7|18|140.2|153.0|7.0|2022-01-03 16:34:23|Europe/London|50.84454333409667|-0.39218371734023094 277 | 6465396864|Now THIS is podracing |16281.2|4071|4589|75.0|Run|0|United Kingdom|2|35|1|1|3.999|7.173|87.6|21|140.3|161.0|23.0|2022-01-03 10:01:03|Europe/London|50.844507459551096|-0.3921133931726217 278 | 6459162151|Sunday morning meadows mileage |8076.8|2234|2348|16.0|Run|0|United Kingdom|2|30|0|1|3.615|6.746|87.0|22|134.6|146.0|9.0|2022-01-02 08:16:58|Europe/London|55.934689482674|-3.167901998385787 279 | 6456103708|Chewy bodd |24166.6|6635|7406|133.0|Run|2|United Kingdom|8|57|0|2|3.642|7.646|86.6|17|144.9|170.0|57.0|2022-01-01 12:47:57|Europe/London|55.93467405997217|-3.1681371107697487 280 | --------------------------------------------------------------------------------