├── src └── fitly │ ├── __version__.py │ ├── prod_settings.py │ ├── assets │ ├── favicon.ico │ ├── images │ │ ├── hrv0.png │ │ ├── hrv1.png │ │ ├── hrv21.png │ │ ├── hrv22.png │ │ ├── hrv23.png │ │ ├── hrv3.png │ │ ├── hrv4.png │ │ ├── hrv5.png │ │ └── hrv6.png │ ├── font-awesome │ │ └── webfonts │ │ │ ├── fa-brands-400.eot │ │ │ ├── fa-brands-400.ttf │ │ │ ├── fa-solid-900.eot │ │ │ ├── fa-solid-900.ttf │ │ │ ├── fa-solid-900.woff │ │ │ ├── fa-brands-400.woff │ │ │ ├── fa-brands-400.woff2 │ │ │ ├── fa-regular-400.eot │ │ │ ├── fa-regular-400.ttf │ │ │ ├── fa-regular-400.woff │ │ │ ├── fa-regular-400.woff2 │ │ │ └── fa-solid-900.woff2 │ └── fitly.css │ ├── exceptions.py │ ├── pages │ ├── __init__.py │ ├── music.py │ └── lifting.py │ ├── api │ ├── database.py │ ├── notifications.py │ ├── api_withings.py │ ├── fitbodAPI.py │ ├── stravaApi.py │ ├── strydAPI.py │ ├── datapull.py │ ├── ouraAPI.py │ ├── sqlalchemy_declarative.py │ └── spotifyAPI.py │ ├── wsgi.py │ ├── dev_cli.py │ ├── components.py │ ├── settings.py │ ├── layouts.py │ ├── index.py │ ├── app.py │ ├── __init__.py │ └── utils.py ├── .dockerignore ├── Dockerfile ├── requirements.txt ├── LICENSE ├── gunicorn_conf.py ├── .github └── workflows │ └── main.yml ├── config └── config.ini.example ├── .gitignore ├── setup.py ├── bin └── run-fitly-prod └── README.md /src/fitly/__version__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.1.0" 2 | -------------------------------------------------------------------------------- /src/fitly/prod_settings.py: -------------------------------------------------------------------------------- 1 | # let mod_wsgi do this instead 2 | COMPRESS = False 3 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .git 2 | Dockerfile 3 | .dockerignore 4 | node_modules 5 | npm-debug.log -------------------------------------------------------------------------------- /src/fitly/assets/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/favicon.ico -------------------------------------------------------------------------------- /src/fitly/assets/images/hrv0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/images/hrv0.png -------------------------------------------------------------------------------- /src/fitly/assets/images/hrv1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/images/hrv1.png -------------------------------------------------------------------------------- /src/fitly/assets/images/hrv21.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/images/hrv21.png -------------------------------------------------------------------------------- /src/fitly/assets/images/hrv22.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/images/hrv22.png -------------------------------------------------------------------------------- /src/fitly/assets/images/hrv23.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/images/hrv23.png -------------------------------------------------------------------------------- /src/fitly/assets/images/hrv3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/images/hrv3.png -------------------------------------------------------------------------------- /src/fitly/assets/images/hrv4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/images/hrv4.png -------------------------------------------------------------------------------- /src/fitly/assets/images/hrv5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/images/hrv5.png -------------------------------------------------------------------------------- /src/fitly/assets/images/hrv6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/images/hrv6.png -------------------------------------------------------------------------------- /src/fitly/exceptions.py: -------------------------------------------------------------------------------- 1 | class FitlyBaseException(Exception): 2 | pass 3 | 4 | 5 | class InvalidLayoutError(FitlyBaseException): 6 | pass 7 | -------------------------------------------------------------------------------- /src/fitly/assets/font-awesome/webfonts/fa-brands-400.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/font-awesome/webfonts/fa-brands-400.eot -------------------------------------------------------------------------------- /src/fitly/assets/font-awesome/webfonts/fa-brands-400.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/font-awesome/webfonts/fa-brands-400.ttf -------------------------------------------------------------------------------- /src/fitly/assets/font-awesome/webfonts/fa-solid-900.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/font-awesome/webfonts/fa-solid-900.eot -------------------------------------------------------------------------------- /src/fitly/assets/font-awesome/webfonts/fa-solid-900.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/font-awesome/webfonts/fa-solid-900.ttf -------------------------------------------------------------------------------- /src/fitly/assets/font-awesome/webfonts/fa-solid-900.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/font-awesome/webfonts/fa-solid-900.woff -------------------------------------------------------------------------------- /src/fitly/assets/font-awesome/webfonts/fa-brands-400.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/font-awesome/webfonts/fa-brands-400.woff -------------------------------------------------------------------------------- /src/fitly/assets/font-awesome/webfonts/fa-brands-400.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/font-awesome/webfonts/fa-brands-400.woff2 -------------------------------------------------------------------------------- /src/fitly/assets/font-awesome/webfonts/fa-regular-400.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/font-awesome/webfonts/fa-regular-400.eot -------------------------------------------------------------------------------- /src/fitly/assets/font-awesome/webfonts/fa-regular-400.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/font-awesome/webfonts/fa-regular-400.ttf -------------------------------------------------------------------------------- /src/fitly/assets/font-awesome/webfonts/fa-regular-400.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/font-awesome/webfonts/fa-regular-400.woff -------------------------------------------------------------------------------- /src/fitly/assets/font-awesome/webfonts/fa-regular-400.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/font-awesome/webfonts/fa-regular-400.woff2 -------------------------------------------------------------------------------- /src/fitly/assets/font-awesome/webfonts/fa-solid-900.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethanopp/fitly/HEAD/src/fitly/assets/font-awesome/webfonts/fa-solid-900.woff2 -------------------------------------------------------------------------------- /src/fitly/pages/__init__.py: -------------------------------------------------------------------------------- 1 | import dash_html_components as _html 2 | 3 | 4 | def page_not_found(pathname): 5 | return _html.P("No page '{}'".format(pathname)) 6 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM tiangolo/meinheld-gunicorn:python3.7 2 | LABEL maintainer="ethanopp" 3 | 4 | COPY . . 5 | 6 | RUN pip install -U pip && pip install -r ./requirements.txt 7 | 8 | ENV NGINX_WORKER_PROCESSES auto 9 | -------------------------------------------------------------------------------- /src/fitly/api/database.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import create_engine 2 | from sqlalchemy.ext.declarative import declarative_base 3 | from sqlalchemy.orm import sessionmaker 4 | 5 | SQLALCHEMY_DATABASE_URL = 'sqlite:///./config/fitness.db' 6 | 7 | engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}) 8 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) 9 | 10 | Base = declarative_base() 11 | -------------------------------------------------------------------------------- /src/fitly/wsgi.py: -------------------------------------------------------------------------------- 1 | from .app import server as application 2 | from .app import app 3 | 4 | 5 | # When using a WSGI server or running with Flask, the dev tools need to be 6 | # manually enabled. By default debug mode is off. To enable dev mode, set the 7 | # environment variable `DASH_DEBUG` to `true`. You can also turn individual dev 8 | # tools features on using this method. See https://dash.plot.ly/devtools 9 | app.enable_dev_tools(debug=False) 10 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | APScheduler==3.9.1 2 | click==8.0.4 3 | configparser==5.2.0 4 | dash==1.19.0 5 | dash-bootstrap-components==0.11.3 6 | dash-core-components==1.15.0 7 | dash-daq==0.5.0 8 | dash-html-components==1.1.2 9 | dash-renderer==1.9.0 10 | dash-table==4.11.2 11 | numpy==1.21.5 12 | oura==1.1.4 13 | pandas==1.3.5 14 | PyMySQL==1.0.2 15 | pyocclient==0.6 16 | scikit-learn==1.0.2 17 | SQLAlchemy==1.4.32 18 | stravalib==0.10.4 19 | sweat==0.4.0 20 | tekore==4.3.0 21 | withings-api==2.4.0 22 | greenlet<0.5,>=0.4.5 -------------------------------------------------------------------------------- /src/fitly/dev_cli.py: -------------------------------------------------------------------------------- 1 | """Click command line script for running the development webserver.""" 2 | 3 | import click 4 | 5 | from .app import app 6 | 7 | 8 | @click.command() 9 | @click.option( 10 | "-p", 11 | "--port", 12 | default=8050, 13 | metavar="PORT", 14 | type=int, 15 | help="Port to run the development webserver on. Defaults to 8050.", 16 | ) 17 | @click.option( 18 | "-h", 19 | "--host", 20 | default="127.0.0.1", 21 | metavar="HOST", 22 | help=( 23 | "The hostname to listen on. Set this to '0.0.0.0' to have the server " 24 | "available externally as well. Defaults to '127.0.0.1'." 25 | ), 26 | ) 27 | @click.option( 28 | "--debug/--no-debug", 29 | default=True, 30 | help="Toggles whether the Dash app is run in debug mode. Defaults to True", 31 | ) 32 | def main(port, host, debug): 33 | app.run_server(port=port, debug=debug, host=host) 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020, Ethan Oppenheim 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /gunicorn_conf.py: -------------------------------------------------------------------------------- 1 | import json 2 | import multiprocessing 3 | import os 4 | 5 | workers_per_core_str = os.getenv("WORKERS_PER_CORE", "2") 6 | web_concurrency_str = os.getenv("WEB_CONCURRENCY", None) 7 | host = os.getenv("HOST", "0.0.0.0") 8 | port = os.getenv("PORT", "80") 9 | bind_env = os.getenv("BIND", None) 10 | use_loglevel = os.getenv("LOG_LEVEL", "info") 11 | if bind_env: 12 | use_bind = bind_env 13 | else: 14 | use_bind = f"{host}:{port}" 15 | 16 | cores = multiprocessing.cpu_count() 17 | workers_per_core = float(workers_per_core_str) 18 | default_web_concurrency = workers_per_core * cores 19 | if web_concurrency_str: 20 | web_concurrency = int(web_concurrency_str) 21 | assert web_concurrency > 0 22 | else: 23 | web_concurrency = int(default_web_concurrency) 24 | 25 | # Gunicorn config variables 26 | loglevel = use_loglevel 27 | workers = web_concurrency 28 | bind = use_bind 29 | keepalive = 120 30 | errorlog = "-" 31 | 32 | # Preload the app so scheduler does not duplicate 33 | preload_app = True 34 | # Set timeout for callbacks 35 | timeout = os.getenv("TIMEOUT", 1200) 36 | 37 | # For debugging and testing 38 | log_data = { 39 | "loglevel": loglevel, 40 | "workers": workers, 41 | "bind": bind, 42 | # Additional, non-gunicorn variables 43 | "workers_per_core": workers_per_core, 44 | "host": host, 45 | "port": port, 46 | } 47 | print(json.dumps(log_data)) 48 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow to help you get started with Actions 2 | 3 | name: CI 4 | 5 | # Controls when the workflow will run 6 | on: 7 | # Triggers the workflow on push or pull request events but only for the master branch 8 | push: 9 | branches: [ master ] 10 | 11 | # Allows you to run this workflow manually from the Actions tab 12 | workflow_dispatch: 13 | 14 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 15 | jobs: 16 | push_to_registry: 17 | name: Push Docker image to Docker Hub 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Check out the repo 21 | uses: actions/checkout@v2 22 | 23 | - name: Log in to Docker Hub 24 | uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 25 | with: 26 | username: ${{ secrets.DOCKER_USERNAME }} 27 | password: ${{ secrets.DOCKER_PASSWORD }} 28 | 29 | - name: Extract metadata (tags, labels) for Docker 30 | id: meta 31 | uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 32 | with: 33 | images: ethanopp/fitly 34 | 35 | - name: Build and push Docker image 36 | uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc 37 | with: 38 | context: . 39 | push: true 40 | tags: ethanopp/fitly:latest 41 | labels: ${{ steps.meta.outputs.labels }} 42 | -------------------------------------------------------------------------------- /config/config.ini.example: -------------------------------------------------------------------------------- 1 | [logger] 2 | level = DEBUG 3 | 4 | [cron] 5 | hourly_pull = False 6 | 7 | [settings] 8 | password = 9 | 10 | # Restart is required after connecting to spotify via settings page for stream to start working 11 | [spotify] 12 | client_id = 13 | client_secret = 14 | redirect_uri = 15 | # Range of % complete song needs to be in when changed to be considered a 'Skip'. Used to train model for 'liked' songs 16 | skip_min_threshold = .05 17 | skip_max_threshold = .80 18 | # Min seconds song has to be on player for it to be inserted into db (ignore quick skips) 19 | min_secs_listened = 15 20 | # interval at which spotify 'currently_playing' will be polled, .5 = 2 requests per second 21 | poll_interval_seconds = .5 22 | 23 | [peloton] 24 | username = 25 | password = 26 | 27 | [stryd] 28 | username = 29 | password = 30 | compare_against_age=1 31 | compare_against_gender=1 32 | compare_against_race_event=1 33 | 34 | [strava] 35 | activities_after_date = 2018-01-01T00:00:00Z 36 | client_id = 37 | client_secret = 38 | redirect_uri = http://127.0.0.1:8050/settings?strava 39 | 40 | [oura] 41 | redirect_uri = http://127.0.0.1:8050/settings?oura 42 | client_id = 43 | client_secret = 44 | days_back = 7 45 | white = rgb(220, 220, 220) 46 | teal = rgb(134, 201, 250) 47 | light_blue = rgb(85, 139, 189) 48 | dark_blue = rgb(43, 70, 119) 49 | orange = rgb(234, 109, 95) 50 | 51 | [withings] 52 | redirect_uri = http://127.0.0.1:8050/settings?withings 53 | client_id = 54 | client_secret = 55 | 56 | [nextcloud] 57 | url = 58 | username = 59 | password = 60 | fitbod_path = 61 | 62 | [timezone] 63 | timezone = America/New_York 64 | 65 | [dashboard] 66 | transition=2000 -------------------------------------------------------------------------------- /src/fitly/components.py: -------------------------------------------------------------------------------- 1 | import dash_core_components as dcc 2 | import dash_html_components as html 3 | import dash_bootstrap_components as dbc 4 | from flask import current_app as server 5 | 6 | from .utils import get_url, component 7 | 8 | 9 | def fa(className): 10 | """A convenience component for adding Font Awesome icons""" 11 | return html.I(className=className) 12 | 13 | 14 | @component 15 | def make_brand(**kwargs): 16 | return html.Header( 17 | className="brand", 18 | children=dcc.Link( 19 | href=get_url(""), 20 | children=html.H1([fa("far fa-chart-bar"), server.config["TITLE"]]), 21 | ), 22 | **kwargs, 23 | ) 24 | 25 | 26 | @component 27 | def make_header(**kwargs): 28 | navbar_items = dbc.Row( 29 | html.Ul( 30 | id=server.config["NAVBAR_CONTAINER_ID"], className="navbar-nav" 31 | ), 32 | no_gutters=True, 33 | className="ml-auto flex-nowrap mt-0", 34 | align="center", 35 | ) 36 | 37 | return dbc.Navbar( 38 | id="header", 39 | className="sticky-top", 40 | color="primary", 41 | dark=True, 42 | children=[ 43 | make_brand(), 44 | dbc.NavbarToggler(id="navbar-toggler"), 45 | dbc.Collapse(navbar_items, id="navbar-collapse", navbar=True), 46 | ], 47 | **kwargs, 48 | ) 49 | 50 | 51 | @component 52 | def make_sidebar(**kwargs): 53 | return html.Nav( 54 | id=f"sidebar", 55 | className="nav navbar-dark bg-dark flex-column align-items-start", 56 | children=[make_brand(), html.Div(id=server.config["NAVBAR_CONTAINER_ID"])], 57 | **kwargs, 58 | ) 59 | -------------------------------------------------------------------------------- /src/fitly/api/notifications.py: -------------------------------------------------------------------------------- 1 | from ..api.sqlalchemy_declarative import withings, stravaSummary, athlete 2 | from sqlalchemy import func 3 | from datetime import datetime, timedelta 4 | import dash_bootstrap_components as dbc 5 | from ..app import app 6 | 7 | 8 | def last_body_measurement_notification(): 9 | last_measurement_date = app.session.query(func.max(withings.date_utc))[0][0] 10 | 11 | app.session.remove() 12 | 13 | if last_measurement_date: 14 | days_since_last_measurement = datetime.utcnow().date() - last_measurement_date.date() 15 | 16 | if days_since_last_measurement >= timedelta(days=7): 17 | return dbc.Alert( 18 | "It's been {:.0f} days since your last body measurement".format(days_since_last_measurement.days), 19 | color='primary', 20 | style={'borderRadius': '4px'}) 21 | 22 | 23 | def last_ftp_test_notification(ftp_type): 24 | last_ftp_test_date = \ 25 | app.session.query(func.max(stravaSummary.start_date_utc)).filter( 26 | (stravaSummary.name.ilike('%ftp test%')) & (stravaSummary.type.ilike(ftp_type)) 27 | )[0][0] 28 | ftp_week_threshold = app.session.query(athlete).filter( 29 | athlete.athlete_id == 1).first().ftp_test_notification_week_threshold 30 | 31 | app.session.remove() 32 | 33 | if last_ftp_test_date: 34 | weeks_since_ftp_test = ((datetime.utcnow() - last_ftp_test_date).days) / 7.0 35 | if weeks_since_ftp_test >= ftp_week_threshold: 36 | return dbc.Alert( 37 | "It's been {:.1f} weeks since your last {} FTP test".format(weeks_since_ftp_test, ftp_type), 38 | color='primary', 39 | style={'borderRadius': '4px'}) 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # Jupyter Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # SageMath parsed files 80 | *.sage.py 81 | 82 | # dotenv 83 | .env 84 | 85 | # virtualenv 86 | .venv 87 | venv/ 88 | ENV/ 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | .spyproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # mkdocs documentation 98 | /site 99 | 100 | # mypy 101 | .mypy_cache/ 102 | /config/fitness.db 103 | /config/config.ini 104 | /config/peloton_class_dict.json 105 | 106 | /peloton-cache.csv 107 | /stryd-cache.csv -------------------------------------------------------------------------------- /src/fitly/settings.py: -------------------------------------------------------------------------------- 1 | # Variables defined in this file will be passed to the 'config' attribute of the 2 | # Flask instance used by the Dash app. Any values corresponding to Dash 3 | # keword arguments will be passed They must be in UPPER CASE in order to take effect. For more information see 4 | # http://flask.pocoo.org/docs/config. 5 | 6 | # Your App's title. The value of this parameter will be propagated into 7 | # `app.title` 8 | TITLE = "Fit.ly" 9 | 10 | # The value of this parameter will be propagated into both 11 | # `app.scripts.config.serve_locally` and `app.css.config.serve_locally` 12 | SERVE_LOCALLY = False 13 | 14 | # 15 | # Dash.__init__ keyword arguments 16 | # 17 | 18 | # URL prefix for client-side requests and client-side requests. If not None, 19 | # must begin and end with a '/'. 20 | REQUESTS_PATHNAME_PREFIX = None 21 | 22 | # URL prefix for server-side routes. If not None, must begin and end with a 23 | # '/'. 24 | ROUTES_PATHNAME_PREFIX = None 25 | 26 | # Externally hosted CSS files go in here. If you want to use Bootstrap from a 27 | # CDN, Dash Bootstrap Components contains links to bootstrapcdn: 28 | # 29 | # import dash_bootstrap_components as dbc 30 | # EXTERNAL_STYLESHEETS = [dbc.themes.BOOTSTRAP] 31 | # 32 | # or if you want to use a Bootswatch theme: 33 | # 34 | import dash_bootstrap_components as dbc 35 | 36 | EXTERNAL_STYLESHEETS = [dbc.themes.SLATE] 37 | 38 | META_TAGS = [{"name": "viewport", "content": "width=device-width, initial-scale=1"}] 39 | 40 | # Externally hosted Javascript files go in here. 41 | EXTERNAL_SCRIPTS = [] 42 | 43 | # 44 | # Layout config 45 | # 46 | 47 | # The ID of the dcc.Location component used for multi-page apps 48 | LOCATION_COMPONENT_ID = "dash-location" 49 | 50 | # The ID of the element used to inject each page of the multi-page app into 51 | CONTENT_CONTAINER_ID = "page-content" 52 | 53 | # The ID of the element used to inject the navbar items into 54 | NAVBAR_CONTAINER_ID = "navbar-items" 55 | -------------------------------------------------------------------------------- /src/fitly/layouts.py: -------------------------------------------------------------------------------- 1 | """Contains layouts suitable for being the value of the 'layout' attribute of 2 | Dash app instances. 3 | """ 4 | 5 | from flask import current_app as server 6 | import dash_core_components as dcc 7 | import dash_html_components as html 8 | import dash_bootstrap_components as dbc 9 | 10 | from .components import make_header, make_sidebar 11 | 12 | 13 | def main_layout_header(): 14 | """Dash layout with a top-header""" 15 | return html.Div( 16 | [ 17 | make_header(), 18 | dbc.Container( 19 | dbc.Row(dbc.Col(id=server.config["CONTENT_CONTAINER_ID"])), fluid=True 20 | ), 21 | dcc.Location(id=server.config["LOCATION_COMPONENT_ID"], refresh=False), 22 | 23 | dbc.Toast( 24 | id="db-refresh-toast", 25 | header="Fit.ly", 26 | is_open=False, 27 | dismissable=False, 28 | icon="danger", 29 | # top: 66 positions the toast below the navbar 30 | style={"position": "fixed", "top": 66, "right": 10, "width": 350}, 31 | children=[ 32 | dbc.Row(className='align-items-center text-center', children=[ 33 | dbc.Col(className='col-2', children=[dbc.Spinner(size='md', color="danger")]), 34 | dbc.Col(className='col-8 text-center', children=['Database Refresh in Progress']) 35 | ]) 36 | ], 37 | ), 38 | dcc.Interval(id='db-refresh-toast-interval', interval=3 * 1000, n_intervals=0), 39 | ] 40 | ) 41 | 42 | 43 | def main_layout_sidebar(): 44 | """Dash layout with a sidebar""" 45 | return html.Div( 46 | [ 47 | dbc.Container( 48 | fluid=True, 49 | children=dbc.Row( 50 | [ 51 | dbc.Col( 52 | make_sidebar(className="px-2"), width=2, className="px-0" 53 | ), 54 | dbc.Col(id=server.config["CONTENT_CONTAINER_ID"], width=10), 55 | ] 56 | ), 57 | ), 58 | dcc.Location(id=server.config["LOCATION_COMPONENT_ID"], refresh=False), 59 | ] 60 | ) 61 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from setuptools import setup, find_packages 4 | 5 | 6 | # Package meta-data. 7 | NAME = "Fit.ly" 8 | DESCRIPTION = "Web analytics for endurance athletes" 9 | 10 | # These can be set to None if you want to omit them 11 | URL = "https://github.com/ethanopp/fitly" 12 | AUTHOR = "Ethan Oppenheim" 13 | AUTHOR_EMAIL = "ethanopp@gmail.com" 14 | LICENSE = "MIT license" 15 | REQUIRES_PYTHON = ">=3.6.0" 16 | VERSION = None # get this from __version__.py 17 | 18 | 19 | # What packages are required for this module to be executed? 20 | REQUIRED = [line.rstrip('\n') for line in open('requirements.txt')] 21 | 22 | # What packages are optional? 23 | EXTRAS = {"prod": ["mod_wsgi"]} 24 | 25 | # get the absolute path to this file 26 | here = os.path.abspath(os.path.dirname(__file__)) 27 | 28 | 29 | # Import the README and use it as the long-description. 30 | # Note: this will only work if "README.md" is present in your MANIFEST.in file! 31 | try: 32 | with open(os.path.join(here, "README.md"), encoding="utf-8") as f: 33 | long_description = "\n" + f.read() 34 | except FileNotFoundError: 35 | long_description = DESCRIPTION 36 | 37 | 38 | # If VERSION not specified above, load the package"s __version__.py module as a 39 | # dictionary. 40 | about = {} 41 | if not VERSION: 42 | with open( 43 | os.path.join(here, "src", "fitly", "__version__.py") 44 | ) as f: 45 | exec(f.read(), about) 46 | else: 47 | about["__version__"] = VERSION 48 | 49 | 50 | setup( 51 | name=NAME, 52 | author=AUTHOR, 53 | author_email=AUTHOR_EMAIL, 54 | url=URL, 55 | license=LICENSE, 56 | python_requires=REQUIRES_PYTHON, 57 | install_requires=REQUIRED, 58 | extras_require=EXTRAS, 59 | description=DESCRIPTION, 60 | long_description=long_description, 61 | long_description_content_type="text/markdown", 62 | version=about["__version__"], 63 | packages=find_packages("src"), 64 | package_dir={"": "src"}, 65 | package_data={ 66 | "": [ 67 | "assets/favicon.ico", 68 | "assets/*.css", 69 | "assets/*.js", 70 | "assets/font-awesome/css/*.css", 71 | "assets/font-awesome/webfonts/*.eot", 72 | "assets/font-awesome/webfonts/*.svg", 73 | "assets/font-awesome/webfonts/*.ttf", 74 | "assets/font-awesome/webfonts/*.woff", 75 | "assets/font-awesome/webfonts/*.woff2", 76 | ] 77 | }, 78 | scripts=["bin/run-fitly-prod"], 79 | entry_points={ 80 | "console_scripts": [ 81 | "run-fitly-dev=fitly.dev_cli:main" 82 | ] 83 | }, 84 | ) 85 | -------------------------------------------------------------------------------- /src/fitly/index.py: -------------------------------------------------------------------------------- 1 | import dash_html_components as html 2 | 3 | from .app import app 4 | from .utils import DashRouter, DashNavBar 5 | from .pages import home, lifting, performance, power, music, settings 6 | from .components import fa 7 | from dash.dependencies import Input, Output, State 8 | from .api.sqlalchemy_declarative import dbRefreshStatus, athlete 9 | 10 | athlete_info = app.session.query(athlete).filter(athlete.athlete_id == 1).first() 11 | use_power = True if athlete_info.use_run_power or athlete_info.use_cycle_power else False 12 | app.session.remove() 13 | 14 | # Ordered iterable of routes: tuples of (route, layout), where 'route' is a 15 | # string corresponding to path of the route (will be prefixed with Dash's 16 | # 'routes_pathname_prefix' and 'layout' is a Dash Component. 17 | urls = ( 18 | ("", home.get_layout), 19 | ("home", home.get_layout), 20 | ("performance", performance.get_layout), 21 | ("power", power.get_layout), 22 | ("lifting", lifting.get_layout), 23 | ("music", music.get_layout), 24 | ("settings", settings.get_layout), 25 | 26 | ) 27 | 28 | # Ordered iterable of navbar items: tuples of `(route, display)`, where `route` 29 | # is a string corresponding to path of the route (will be prefixed with 30 | # 'routes_pathname_prefix') and 'display' is a valid value for the `children` 31 | # keyword argument for a Dash component (ie a Dash Component or a string). 32 | nav_items = ( 33 | ("home", html.Div([fa("fas fa-home"), "Home"])), 34 | ("performance", html.Div([fa("fas fa-seedling"), "Performance"])), 35 | ("power", html.Div([fa("fas fa-bolt"), "Power"])), 36 | ("lifting", html.Div([fa("fas fa-dumbbell"), "Lifting"])), 37 | ("music", html.Div([fa("fas fa-music"), "Music"])), 38 | ("settings", html.Div([fa("fa fa-sliders-h"), "Settings"])), 39 | ) 40 | 41 | router = DashRouter(app, urls) 42 | navbar = DashNavBar(app, nav_items) 43 | 44 | 45 | # add callback for toggling the collapse on small screens 46 | @app.callback( 47 | Output("navbar-collapse", "is_open"), 48 | [Input("navbar-toggler", "n_clicks")], 49 | [State("navbar-collapse", "is_open")], 50 | ) 51 | def toggle_navbar_collapse(n, is_open): 52 | if n: 53 | return not is_open 54 | return is_open 55 | 56 | 57 | @app.callback( 58 | Output('db-refresh-toast', 'is_open'), 59 | [Input('db-refresh-toast-interval', 'n_intervals')] 60 | ) 61 | def truncate_and_refresh(interval): 62 | processing = app.session.query(dbRefreshStatus).filter(dbRefreshStatus.refresh_method == 'processing').first() 63 | app.session.remove() 64 | if processing: 65 | return True 66 | else: 67 | return False 68 | -------------------------------------------------------------------------------- /src/fitly/app.py: -------------------------------------------------------------------------------- 1 | from . import create_flask, create_dash, db_startup 2 | from .layouts import main_layout_header, main_layout_sidebar 3 | from apscheduler.schedulers.background import BackgroundScheduler 4 | from .utils import spotify_credentials_supplied 5 | 6 | # The Flask instance 7 | server = create_flask() 8 | 9 | # The Dash instance 10 | app = create_dash(server) 11 | 12 | # New DB startup tasks 13 | db_startup(app) 14 | 15 | # Logging 16 | import logging 17 | from logging.handlers import RotatingFileHandler 18 | from .utils import config 19 | from .api.sqlalchemy_declarative import dbRefreshStatus 20 | 21 | # Can also use %(pathname)s for full pathname for file instead of %(module)s 22 | handler = RotatingFileHandler('./config/log.log', maxBytes=10000000, backupCount=5) 23 | formatter = logging.Formatter("[%(asctime)s] %(levelname)s from %(module)s line %(lineno)d - %(message)s") 24 | handler.setFormatter(formatter) 25 | app.server.logger.setLevel(config.get('logger', 'level')) 26 | app.server.logger.addHandler(handler) 27 | # Suppress WSGI info logs 28 | logging.getLogger('werkzeug').setLevel(logging.ERROR) 29 | 30 | # Push an application context so we can use Flask's 'current_app' 31 | with server.app_context(): 32 | # load the rest of our Dash app 33 | from . import index 34 | 35 | # Enable refresh cron 36 | if config.get('cron', 'hourly_pull').lower() == 'true': 37 | try: 38 | from .api.datapull import refresh_database 39 | 40 | scheduler = BackgroundScheduler() 41 | scheduler.add_job(func=refresh_database, trigger="cron", hour='*') 42 | 43 | # Add spotify job on 20 min schedule since API only allows grabbing the last 50 songs 44 | if spotify_credentials_supplied: 45 | from .api.spotifyAPI import stream, get_spotify_client, spotify_connected 46 | 47 | if spotify_connected(): 48 | app.server.logger.debug("Listening to Spotify stream...") 49 | # Use this job to pull 'last 50' songs from spotify every 20 mins 50 | # scheduler.add_job(func=save_spotify_play_history, trigger="cron", minute='*/20') 51 | 52 | # Use this job for polling every second (much more precise data with this method can detect skips, etc.) 53 | scheduler.add_job(stream, "interval", seconds=float(config.get('spotify', 'poll_interval_seconds')), 54 | max_instances=2) 55 | else: 56 | app.server.logger.debug('Spotify not connected. Not listening to stream.') 57 | app.server.logger.info('Starting cron jobs') 58 | scheduler.start() 59 | except BaseException as e: 60 | app.server.logger.error(f'Error starting cron jobs: {e}') 61 | 62 | # Delete any audit logs for running processes, since restarting server would stop any processes 63 | app.session.query(dbRefreshStatus).filter(dbRefreshStatus.refresh_method == 'processing').delete() 64 | app.session.commit() 65 | app.session.remove() 66 | # configure the Dash instance's layout 67 | app.layout = main_layout_header() 68 | # app.layout = main_layout_sidebar() 69 | -------------------------------------------------------------------------------- /bin/run-fitly-prod: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | 4 | function usage (){ 5 | cat << EOF 6 | This script runs your Dash app with mod_wsgi-express. Script options are: 7 | 8 | --port PORT Runs the app on port PORT. 9 | 10 | --processes PROCESSES Run the app with specified number of worker proccesses. 11 | 12 | --settings PATH Use the supplied PATH as a settings file that will take 13 | precedence over any settings within the internal 14 | settings.py file. Useful for providing production 15 | configuration. The path can be absolute or relative to the 16 | working directory. 17 | 18 | --debug Run mod_wsgi-express with --debug-mode, --enable-debugger, 19 | and --log-to-terminal flags enabled. 20 | 21 | --echo Don't run the app, just echo the underlying mod_wsgi-express 22 | command that would have been run with the configuration 23 | specified 24 | 25 | --help Show this message. 26 | EOF 27 | } 28 | 29 | APP_MODULE=fitly 30 | SCRIPTPATH=$(cd $(dirname $0); pwd -P) 31 | echo $SCRIPTPATH 32 | PORT=8000 33 | PROCESSES=2 34 | SETTINGS_PATH=$(python -c "from ${APP_MODULE} import prod_settings; print(prod_settings.__file__)") 35 | 36 | 37 | # loop over all flags starting with '--' and processing them along with any 38 | # arguments if they have them 39 | while [[ ${1} == --* ]]; do 40 | case ${1} in 41 | --port) 42 | PORT=${2} 43 | shift 2 44 | ;; 45 | --debug) 46 | export DASH_DEBUG=true 47 | DEBUGFLAGS="--debug-mode --enable-debugger --reload-on-changes" 48 | shift 1 49 | ;; 50 | --processes) 51 | PROCESSES=${2} 52 | shift 2 53 | ;; 54 | --settings) 55 | if [[ $2 = /* ]]; then 56 | # Use the absolute path supplied 57 | SETTINGS_PATH=${2} 58 | else 59 | # A relative path was supplied; append to the current working directory 60 | SETTINGS_PATH=${PWD}/${2} 61 | fi 62 | 63 | shift 2 64 | ;; 65 | --echo) 66 | ECHO=echo 67 | shift 1 68 | ;; 69 | --help) 70 | usage 71 | exit 72 | ;; 73 | --*) 74 | echo "Do not know option ${1}. Use --help for usage." 75 | exit 76 | ;; 77 | esac 78 | done 79 | 80 | 81 | # Export location of the production settings file 82 | export SLAPDASH_SETTINGS=${SETTINGS_PATH} 83 | 84 | # Add any additional environment varaibles to be exported here 85 | 86 | 87 | # Run Apache! 88 | ${ECHO} mod_wsgi-express start-server \ 89 | --application-type module \ 90 | --entry-point ${APP_MODULE}.wsgi \ 91 | --port ${PORT} \ 92 | ${DEBUGFLAGS} \ 93 | --url-alias assets ${SCRIPTPATH}/src/${APP_MODULE}/assets \ 94 | --processes ${PROCESSES} \ 95 | --compress-responses \ 96 | --isatty \ 97 | 98 | # TODO get the value of the assets folder name from the Dash config 99 | 100 | -------------------------------------------------------------------------------- /src/fitly/api/api_withings.py: -------------------------------------------------------------------------------- 1 | from withings_api import WithingsApi 2 | from withings_api.common import get_measure_value, MeasureType, Credentials, CredentialsType 3 | from ..api.sqlalchemy_declarative import apiTokens, withings 4 | from ..api.database import engine 5 | from sqlalchemy import func, delete 6 | from datetime import datetime 7 | import ast 8 | import pandas as pd 9 | import numpy as np 10 | from ..app import app 11 | from ..utils import config 12 | import pickle 13 | from typing import cast 14 | 15 | client_id = config.get('withings', 'client_id') 16 | client_secret = config.get('withings', 'client_secret') 17 | redirect_uri = config.get('withings', 'redirect_uri') 18 | 19 | 20 | def save_withings_token(credentials: CredentialsType) -> None: 21 | app.server.logger.debug('***** ATTEMPTING TO SAVE TOKENS *****') 22 | # Delete current tokens 23 | app.session.execute(delete(apiTokens).where(apiTokens.service == 'Withings')) 24 | # Insert new tokens 25 | app.session.add(apiTokens(date_utc=datetime.utcnow(), service='Withings', tokens=pickle.dumps(credentials))) 26 | app.session.commit() 27 | 28 | app.session.remove() 29 | app.server.logger.debug('***** SAVED TOKENS *****') 30 | 31 | 32 | def load_credentials() -> CredentialsType: 33 | try: 34 | token_pickle = app.session.query(apiTokens.tokens).filter(apiTokens.service == 'Withings').first().tokens 35 | creds = cast(CredentialsType, pickle.loads(token_pickle)) 36 | app.session.remove() 37 | except BaseException as e: 38 | app.server.logger.error(e) 39 | creds = None 40 | 41 | return creds 42 | 43 | 44 | def withings_connected(): 45 | try: 46 | client = WithingsApi(credentials=load_credentials(), refresh_cb=save_withings_token) 47 | measures = client.measure_get_meas() 48 | app.server.logger.debug('Withings Connected') 49 | return True 50 | except BaseException as e: 51 | app.server.logger.error('Withings not connected') 52 | app.server.logger.error(e) 53 | return False 54 | 55 | 56 | ## Provide link for button on settings page 57 | def connect_withings_link(auth_client): 58 | url = auth_client.get_authorize_url() 59 | return url 60 | 61 | 62 | def pull_withings_data(): 63 | # UTC dates will get sampled into daily 64 | if withings_connected(): 65 | client = WithingsApi(load_credentials(), refresh_cb=save_withings_token) 66 | df = pd.DataFrame(columns=['date_utc', 'weight', 'fat_ratio', 'hydration']) 67 | meas_result = client.measure_get_meas(startdate=None, enddate=None, lastupdate=None) 68 | for x in meas_result.measuregrps: 69 | date = pd.to_datetime(str(x.date)) 70 | weight = get_measure_value(x, with_measure_type=MeasureType.WEIGHT) 71 | fat_ratio = get_measure_value(x, with_measure_type=MeasureType.FAT_RATIO) 72 | hydration = get_measure_value(x, with_measure_type=MeasureType.HYDRATION) 73 | 74 | if weight and fat_ratio: 75 | df = df.append({'date_utc': date, 'weight': weight, 'fat_ratio': fat_ratio, 'hydration': hydration}, 76 | ignore_index=True) 77 | 78 | df = df.set_index(df['date_utc'].apply(lambda x: x.replace(tzinfo=None))) 79 | 80 | df = df[['weight', 'fat_ratio', 'hydration']] 81 | # Convert to lbs 82 | df['weight'] *= 2.20462 83 | 84 | # Filter to days later than what is already in db 85 | withings_max_date = app.session.query(func.max(withings.date_utc)).first()[0] 86 | withings_max_date = datetime.strptime('1991-08-30 00:00:00', 87 | '%Y-%m-%d %H:%M:%S') if not withings_max_date else withings_max_date 88 | 89 | app.session.remove() 90 | 91 | df = df[(df.index > withings_max_date) & (~np.isnan(df['weight'])) & (~np.isnan(df['fat_ratio']))] 92 | if len(df) > 0: 93 | app.server.logger.info('New withings measurements found!') 94 | df.to_sql('withings', engine, if_exists='append', index=True) 95 | -------------------------------------------------------------------------------- /src/fitly/api/fitbodAPI.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | import owncloud 3 | import os 4 | from ..api.sqlalchemy_declarative import fitbod 5 | from ..api.database import engine 6 | from sqlalchemy import func 7 | import pandas as pd 8 | from ..app import app 9 | from ..utils import config 10 | 11 | 12 | def pull_fitbod_data(): 13 | app.server.logger.debug('Logging into Nextcloud') 14 | oc = owncloud.Client(config.get('nextcloud', 'url')) 15 | # Login to NextCloud 16 | oc.login(config.get('nextcloud', 'username'), config.get('nextcloud', 'password')) 17 | # Get filename 18 | try: 19 | filepath = oc.list(config.get('nextcloud', 'fitbod_path'))[0].path 20 | app.server.logger.debug('Fitbod file found') 21 | except: 22 | app.server.logger.debug('No fitbod file found on nextcloud') 23 | filepath = None 24 | if filepath: 25 | filename = filepath.split('/')[-1] 26 | # Download file 27 | oc.get_file(filepath) 28 | # Convert file into df 29 | df = pd.read_csv(filename) 30 | 31 | # Remove non-lifting exercises 32 | df = df[df['Distance(m)'] == 0] 33 | 34 | df = df[ 35 | (~df['Exercise'].str.contains('Running')) & 36 | (~df['Exercise'].str.contains('Cycling')) & 37 | (~df['Exercise'].str.contains('Hiking')) & 38 | (~df['Exercise'].str.contains('Rowing')) & 39 | (~df['Exercise'].str.contains('Elliptical')) & 40 | (~df['Exercise'].str.contains('Stair Stepper')) & 41 | (~df['Exercise'].str.contains('Foam')) & 42 | (~df['Exercise'].str.contains('Cat Cow')) & 43 | (~df['Exercise'].str.contains("Child's Pose")) & 44 | (~df['Exercise'].str.contains("Downward Dog")) & 45 | (~df['Exercise'].str.contains("Up Dog")) & 46 | (~df['Exercise'].str.contains("Stretch")) & 47 | (~df['Exercise'].str.contains("Butt Kick")) & 48 | (~df['Exercise'].str.contains("Chest Expansion")) & 49 | (~df['Exercise'].str.contains("Chin Drop")) & 50 | (~df['Exercise'].str.contains("Crab Pose")) & 51 | (~df['Exercise'].str.contains("Dead Hang")) & 52 | (~df['Exercise'].str.contains("Head Tilt")) & 53 | (~df['Exercise'].str.contains("Pigeon Pose")) & 54 | (~df['Exercise'].str.contains("Reach Behind and Open")) & 55 | (~df['Exercise'].str.contains("Seated Figure Four")) & 56 | (~df['Exercise'].str.contains("Seated Forward Bend")) & 57 | (~df['Exercise'].str.contains("Standing Forward Bend")) & 58 | (~df['Exercise'].str.contains("Shin Box Hip Flexor")) & 59 | (~df['Exercise'].str.contains("Shin Box Quad")) & 60 | (~df['Exercise'].str.contains("Single Leg Straight Forward Bend")) & 61 | (~df['Exercise'].str.contains("Standing Hip Circle")) & 62 | (~df['Exercise'].str.contains("Walkout")) & 63 | (~df['Exercise'].str.contains("Walkout to Push Up")) 64 | ] 65 | 66 | # Create lbs column 67 | df['Weight'] = df['Weight(kg)'] * 2.20462 68 | # Modify columns in df as needed 69 | df['Date_UTC'] = pd.to_datetime(df['Date']).dt.tz_localize(None) 70 | # Rename duration 71 | df = df.rename(columns={'Duration(s)': 'Duration'}) 72 | # Remove unecessary columns 73 | df = df[['Date_UTC', 'Exercise', 'Reps', 'Weight', 'Duration', 'isWarmup', 'Note']] 74 | # Date currently is not unique to exercise set - only unique to workout so should not be used as index 75 | # Autogenerate index for now until (if?) fitbod updates their export to data to have a PK (timestamp) 76 | # df = df.set_index('Date_UTC') 77 | 78 | # DB Operations 79 | 80 | max_date = app.session.query(func.max(fitbod.date_utc)).first()[0] 81 | if max_date: 82 | max_date = pd.to_datetime(max_date) 83 | # Filter df to new workouts only for appending table 84 | df = df[df['Date_UTC'] > max_date] 85 | # Insert fitbod table into DB 86 | df.to_sql('fitbod', engine, if_exists='append', index=False) 87 | app.session.commit() 88 | 89 | app.session.remove() 90 | # Delete file in local folder 91 | os.remove(filename) 92 | # Empty the dir on nextcloud 93 | oc.delete(filepath) 94 | -------------------------------------------------------------------------------- /src/fitly/api/stravaApi.py: -------------------------------------------------------------------------------- 1 | from stravalib.client import Client 2 | import datetime 3 | from datetime import datetime 4 | from sqlalchemy import delete 5 | from ..api.sqlalchemy_declarative import apiTokens 6 | from ..utils import config 7 | from ..app import app 8 | import time 9 | import pickle 10 | 11 | client_id = config.get('strava', 'client_id') 12 | client_secret = config.get('strava', 'client_secret') 13 | redirect_uri = config.get('strava', 'redirect_uri') 14 | 15 | 16 | # Retrieve current tokens from db 17 | def current_token_dict(): 18 | try: 19 | token_dict = app.session.query(apiTokens.tokens).filter(apiTokens.service == 'Strava').first().tokens 20 | token_pickle = pickle.loads(token_dict) 21 | app.session.remove() 22 | except BaseException as e: 23 | app.server.logger.error(e) 24 | token_pickle = {} 25 | return token_pickle 26 | 27 | 28 | # Function for auto saving strava token_dict to db 29 | def save_strava_token(token_dict): 30 | # Delete current key 31 | app.server.logger.debug('Deleting current strava tokens') 32 | app.session.execute(delete(apiTokens).where(apiTokens.service == 'Strava')) 33 | # Insert new key 34 | app.server.logger.debug('Inserting new strava tokens') 35 | app.session.add(apiTokens(date_utc=datetime.utcnow(), service='Strava', tokens=pickle.dumps(token_dict))) 36 | app.session.commit() 37 | app.session.remove() 38 | 39 | 40 | def get_strava_client(): 41 | token_dict = current_token_dict() 42 | if token_dict: 43 | client = Client() 44 | client.access_token = token_dict['access_token'] 45 | client.refresh_token = token_dict['refresh_token'] 46 | # If token is old, refresh it 47 | if time.time() > token_dict['expires_at']: 48 | app.server.logger.debug('Strava tokens expired, refreshing...') 49 | refresh_response = client.refresh_access_token(client_id=client_id, client_secret=client_secret, 50 | refresh_token=client.refresh_token) 51 | # Save to db 52 | save_strava_token(refresh_response) 53 | # Update client 54 | client.access_token = refresh_response['access_token'] 55 | client.refresh_token = refresh_response['refresh_token'] 56 | else: 57 | client = Client() 58 | 59 | return client 60 | 61 | 62 | # Refreshes tokens with refresh token if available in db 63 | def strava_connected(): 64 | try: 65 | client = get_strava_client() 66 | test = client.get_athlete() 67 | app.server.logger.debug('Strava connected') 68 | return True 69 | except BaseException as e: 70 | app.server.logger.error('Strava not connected') 71 | app.server.logger.error(e) 72 | return False 73 | 74 | 75 | ## Provide link for button on settings page 76 | def connect_strava_link(auth_client): 77 | url = auth_client.authorization_url(client_id=client_id, redirect_uri=redirect_uri, 78 | scope=['read', 'read_all', 'profile:read_all', 'profile:write', 'activity:read', 79 | 'activity:read_all', 'activity:write']) 80 | return url 81 | 82 | # def check_data_insert(): 83 | # If data found in db later than data that is being insert, delete all data after earliest date being insert 84 | 85 | # NOTE: Strava removes stopped periods in calculations, TrainingPeaks and Garmin Connect does not. Leaving non-moving periods in calculations 86 | # https://github.com/mtraver/python-fitanalysis 87 | 88 | 89 | # TODO: Look into ways to auto import LTHR similar to oura rest heart rate 90 | 91 | # def test_strava(): 92 | # # Parsing for Strava_Samples 93 | # client = get_strava_client() 94 | # streams = GetStreams(client, 3110471562, types) 95 | # df_samples = pd.DataFrame() 96 | # 97 | # # # Write each row to a dataframe 98 | # for item in types: 99 | # if item in streams.keys(): 100 | # df_samples[item] = pd.Series(streams[item].data, index=None) 101 | # 102 | # df_samples.to_csv('test_samples.csv') 103 | # 104 | # # Testing Summary 105 | # activities = GetActivities(client, '2019-09-20T00:00:00Z', limit) 106 | # for act in activities: 107 | # ParseActivitySummary(get_strava_client(), act).to_csv('df.csv',sep=',') 108 | -------------------------------------------------------------------------------- /src/fitly/assets/fitly.css: -------------------------------------------------------------------------------- 1 | /* Default link colours */ 2 | a { 3 | color: #0275d8 !important 4 | } 5 | 6 | a:hover { 7 | color: #014c8c !important 8 | } 9 | 10 | 11 | #header a, #sidebar a { 12 | color: white !important; 13 | } 14 | 15 | #header, #sidebar { 16 | color: white 17 | } 18 | 19 | #header h1 { 20 | margin-bottom: 0; 21 | line-height: inherit; 22 | } 23 | 24 | #sidebar { 25 | height: 100vh 26 | } 27 | 28 | .nav-item { 29 | font-size: 1.1em 30 | } 31 | 32 | .nav-link.active, .nav-link:hover { 33 | text-decoration: underline 34 | } 35 | 36 | .brand i, .nav-link i { 37 | padding: 0 0.5rem 38 | } 39 | 40 | .brand a:hover { 41 | text-decoration: none 42 | } 43 | 44 | 45 | /* 46 | * loading spinners 47 | */ 48 | 49 | 50 | /* This loader will hide the component and add a spinner after 1 second */ 51 | *[data-dash-is-loading="true"].loader { 52 | transition-delay: 1s; 53 | transition-property: visibility; 54 | visibility: hidden; 55 | } 56 | 57 | 58 | /* This loader will fade the component and add a spinner after 1 second */ 59 | *[data-dash-is-loading="true"].loader-fade { 60 | transition-delay: 1s; 61 | transition-property: opacity; 62 | transition-duration: 1s; 63 | opacity: 0.6; 64 | } 65 | 66 | 67 | /* the spinner animation */ 68 | @keyframes spinner { 69 | to { 70 | transform: rotate(360deg); 71 | } 72 | } 73 | 74 | 75 | /* Animation that makes elements visible sharply at end of animation */ 76 | @keyframes offset { 77 | from { 78 | visibility: hidden; 79 | } 80 | 99% { 81 | visibility: hidden; 82 | } 83 | to { 84 | visibility: visible; 85 | } 86 | } 87 | 88 | 89 | *[data-dash-is-loading="true"].loader::before, 90 | *[data-dash-is-loading="true"].loader-fade::before { 91 | z-index: 9999; 92 | visibility: visible; 93 | content: ''; 94 | box-sizing: border-box; 95 | position: absolute; 96 | top: 50%; 97 | left: 50%; 98 | width: 30px; 99 | height: 30px; 100 | margin-top: -10px; 101 | margin-left: -10px; 102 | border-radius: 50%; 103 | border: 2px solid #999; 104 | border-top-color: #222; 105 | /* Wait for 1s before displaying spinner using 'offset' anomation This 106 | avoids flickering when wait times are low. */ 107 | animation: offset 1s, spinner .6s linear infinite; 108 | } 109 | 110 | 111 | /* Font Awesome Icons */ 112 | .fa, .fas { 113 | color: #aaa; 114 | background: rgba(0, 0, 0, 0) !important; 115 | } 116 | /* Dash Charts */ 117 | .main-svg { 118 | background: rgba(0, 0, 0, 0) !important; 119 | } 120 | 121 | /* Dash Components */ 122 | .fsmTNM { 123 | width: 0 !important; 124 | } 125 | .eMfdXL { 126 | width: 0 !important; 127 | } 128 | .gJuplL, .cmSQpo { 129 | padding-left: 0% !important; 130 | padding-right: 0% !important; 131 | margin-right: 0% !important; 132 | margin-left: 0% !important; 133 | } 134 | .selector-rect{ 135 | fill: rgb(66,66,66) !important; 136 | } 137 | .rangeslider-bg, .rangeslider-mask-min, .rangeslider-mask-max{ 138 | fill-opacity: .25 !important; 139 | } 140 | 141 | .DateInput_input { 142 | height: calc(1.5em + 0.5rem + 2px); 143 | padding: 0.25rem 0.5rem; 144 | font-size: 0.8203125rem; 145 | line-height: 1.5; 146 | border-radius: 0.2rem; 147 | text-align: center; 148 | font-weight: 400; 149 | } 150 | 151 | /* Dash Table */ 152 | 153 | .dash-cell column-0 cell--selected focused{ 154 | border-bottom: #3fcbeb !important; 155 | } 156 | 157 | .dash-delete-cell{ 158 | width: 3vw !important; 159 | } 160 | 161 | .dash-table-container .dash-spreadsheet-container .dash-spreadsheet-inner table{ 162 | --hover: rgb(66,66,66) !important; 163 | } 164 | 165 | .dash-table-container .dash-spreadsheet-container .dash-spreadsheet-inner input:not([type=radio]):not([type=checkbox]){ 166 | text-align: center !important; 167 | color: rgb(255,255,255) !important; 168 | } 169 | 170 | /* Webkit / 171 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 172 | ::-webkit-scrollbar { 173 | -webkit-appearance: none; 174 | width: .4rem; 175 | height: .4rem; 176 | } 177 | 178 | ::-webkit-scrollbar-track { 179 | background-color: rgba(0, 0, 0, 0); 180 | } 181 | 182 | ::-webkit-scrollbar-thumb { 183 | border-radius: 0.3rem; 184 | background-color: rgba(0, 0, 0, .5); 185 | -webkit-box-shadow: 0 0 1px rgba(255, 255, 255, .5); 186 | } 187 | 188 | /* Other */ 189 | .contributorright { 190 | float: right; 191 | margin-bottom: 0; 192 | font-size: .75rem; 193 | } 194 | 195 | .contributorleft { 196 | float: left; 197 | margin-bottom: 0; 198 | font-size: .75rem; 199 | } 200 | 201 | .contentbutton { 202 | padding-top: 0%; 203 | padding-bottom: 0%; 204 | padding-left: 0%; 205 | padding-right: 0%; 206 | } 207 | -------------------------------------------------------------------------------- /src/fitly/api/strydAPI.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import datetime 3 | import pandas as pd 4 | from ..app import app 5 | from ..utils import config 6 | from .sqlalchemy_declarative import strydSummary 7 | from ..api.database import engine 8 | from sqlalchemy import func 9 | 10 | 11 | def auth_stryd_session(): 12 | requestJSON = {"email": config.get('stryd', 'username'), "password": config.get('stryd', 'password')} 13 | responseData = requests.post("https://www.stryd.com/b/email/signin", json=requestJSON) 14 | if responseData.status_code != 200: 15 | app.server.logger.debug("Stryd could not authenticate") 16 | authenticated = False 17 | raise Exception("failed password authentication") 18 | else: 19 | app.server.logger.debug("Stryd authenticated") 20 | authenticated = True 21 | tempData = responseData.json() 22 | userID = tempData['id'] 23 | sessionID = tempData['token'] 24 | return sessionID 25 | 26 | 27 | ############################## 28 | ## get the list of workouts ## 29 | ############################## 30 | def pull_stryd_data(): 31 | sessionID = auth_stryd_session() 32 | today = datetime.datetime.now() + datetime.timedelta( 33 | days=1) # Pass tomorrow's date to ensure no issues with timezones 34 | start = today - datetime.timedelta(days=9999) 35 | headers = {'Authorization': 'Bearer: {}'.format(sessionID)} 36 | url = "https://www.stryd.com/b/api/v1/activities/calendar?srtDate={start}&endDate={today}&sortBy=StartDate".format( 37 | start=start.strftime("%m-%d-%Y"), today=today.strftime("%m-%d-%Y")) 38 | jsonData = {'srtDate': start.strftime("%m-%d-%Y"), 'endDate': today.strftime("%m-%d-%Y"), 'sortBy': 'StartDate'} 39 | 40 | responseData = requests.get(url, headers=headers, params=jsonData) 41 | df = pd.DataFrame(responseData.json()['activities']) # returns summary data for each workout 42 | df.rename(columns={ 43 | "timestamp": "start_date_local", 44 | "ftp": "stryd_ftp", 45 | "stress": "rss"}, 46 | inplace=True) 47 | 48 | df['start_date_local'] = df['start_date_local'].apply(datetime.datetime.fromtimestamp) 49 | df.set_index(pd.to_datetime(df['start_date_local']), inplace=True) 50 | 51 | # Specify which columns from stryd we want to bring over 52 | df = df[['stryd_ftp', 53 | 'total_elevation_gain', 54 | 'total_elevation_loss', 55 | 'max_elevation', 56 | 'min_elevation', 57 | 'average_cadence', 58 | 'max_cadence', 59 | 'min_cadence', 60 | 'average_stride_length', 61 | 'max_stride_length', 62 | 'min_stride_length', 63 | 'average_ground_time', 64 | 'max_ground_time', 65 | 'min_ground_time', 66 | 'average_oscillation', 67 | 'max_oscillation', 68 | 'min_oscillation', 69 | 'average_leg_spring', 70 | 'rss', 71 | 'max_vertical_stiffness', 72 | 'stryds', 73 | 'elevation', 74 | 'temperature', 75 | 'humidity', 76 | 'windBearing', 77 | 'windSpeed', 78 | 'windGust', 79 | 'dewPoint']] 80 | 81 | # Filter df for only new records not yet in DB 82 | last_styrd_date = app.session.query(func.max(strydSummary.start_date_local))[0][0] 83 | if last_styrd_date: 84 | df = df[df.index > last_styrd_date] 85 | if len(df) > 0: 86 | app.server.logger.info('New stryd workouts found!') 87 | # Insert into db 88 | df.to_sql('stryd_summary', engine, if_exists='append', index=True) 89 | app.session.commit() 90 | app.session.remove() 91 | 92 | return df 93 | 94 | 95 | def get_training_distribution(race=1, gender=1, age=1): 96 | sessionID = auth_stryd_session() 97 | headers = {'Authorization': 'Bearer: {}'.format(sessionID)} 98 | url = f"https://www.stryd.com/b/api/v1/users/runner-attribute?race={config.get('stryd', 'compare_against_race_event')}&gender={config.get('stryd', 'compare_against_gender')}&age={config.get('stryd', 'compare_against_age')}" 99 | responseData = requests.get(url, headers=headers) 100 | return responseData.json() 101 | 102 | # '''{'attr': {'age': 28, 103 | # 'endurance': 1835, 104 | # 'fatigue_resistance': 1272, 105 | # 'fitness': 3.0895057604261837, 106 | # 'gender': 'male', 107 | # 'muscle_power': 5.38494805940594, 108 | # 'race': '5k', 109 | # 'timestamp': 1594587608, 110 | # 'user_key': 'Eg8KBHVzZXIQgIDkup6d3Ak'}, 111 | # 'fatigue_resistance_threshold': 1, 112 | # 'percentile': {'endurance': 0.05242718446601946, 113 | # 'fatigue_resistance': 0.4, 114 | # 'fitness': 0.1475728155339806, 115 | # 'median_endurance': 5361, 116 | # 'median_fatigue_resistance': 1445, 117 | # 'median_fitness': 3.9397466897464706, 118 | # 'median_muscle_power': 6.089743589743589, 119 | # 'muscle_power': 0.31456310679611654}} 120 | # ''' 121 | -------------------------------------------------------------------------------- /src/fitly/__init__.py: -------------------------------------------------------------------------------- 1 | from flask import Flask 2 | from dash import Dash 3 | 4 | from .__version__ import __version__ 5 | from .utils import get_dash_args_from_flask_config 6 | from sqlalchemy.orm import scoped_session 7 | from .api.database import SessionLocal, engine 8 | from .api.sqlalchemy_declarative import * 9 | from datetime import datetime 10 | 11 | 12 | def create_flask(config_object=f"{__package__}.settings"): 13 | """Create the Flask instance for this application""" 14 | server = Flask(__package__) 15 | 16 | # load default settings 17 | server.config.from_object(config_object) 18 | 19 | # load additional settings that will override the defaults in settings.py. eg 20 | # $ export FITLY_SETTINGS=/some/path/prod_settings.py 21 | server.config.from_envvar( 22 | "FITLY_SETTINGS", silent=True 23 | ) 24 | 25 | return server 26 | 27 | 28 | def create_dash(server): 29 | Base.metadata.create_all(bind=engine) 30 | 31 | """Create the Dash instance for this application""" 32 | app = Dash( 33 | name=__package__, 34 | server=server, 35 | suppress_callback_exceptions=True, 36 | **get_dash_args_from_flask_config(server.config), 37 | ) 38 | 39 | # Update the Flask config a default "TITLE" and then with any new Dash 40 | # configuration parameters that might have been updated so that we can 41 | # access Dash config easily from anywhere in the project with Flask's 42 | # 'current_app' 43 | server.config.setdefault("TITLE", "Dash") 44 | server.config.update({key.upper(): val for key, val in app.config.items()}) 45 | 46 | app.title = server.config["TITLE"] 47 | 48 | app.session = scoped_session(SessionLocal) 49 | 50 | if "SERVE_LOCALLY" in server.config: 51 | app.scripts.config.serve_locally = server.config["SERVE_LOCALLY"] 52 | app.css.config.serve_locally = server.config["SERVE_LOCALLY"] 53 | 54 | return app 55 | 56 | 57 | def db_startup(app): 58 | athlete_exists = True if len(app.session.query(athlete).all()) > 0 else False 59 | # If no athlete created in db, create one 60 | if not athlete_exists: 61 | dummy_athlete = athlete(name='New User') 62 | app.session.add(dummy_athlete) 63 | app.session.commit() 64 | 65 | db_refresh_record = True if len(app.session.query(dbRefreshStatus).all()) > 0 else False 66 | # Insert initial system load refresh record 67 | if not db_refresh_record: 68 | dummy_db_refresh_record = dbRefreshStatus( 69 | timestamp_utc=datetime.utcnow(), 70 | refresh_method='system', 71 | oura_status='System Startup', 72 | strava_status='System Startup', 73 | withings_status='System Startup', 74 | fitbod_status='System Startup') 75 | app.session.add(dummy_db_refresh_record) 76 | app.session.commit() 77 | 78 | # If fitbod_muslces table not populated create 79 | fitbod_muscles_table = True if len(app.session.query(fitbod_muscles).all()) > 0 else False 80 | if not fitbod_muscles_table: 81 | for exercise, muscle in [ 82 | # Abs 83 | ('Crunch', 'Abs'), 84 | ('Russian Twist', 'Abs'), 85 | ('Leg Raise', 'Abs'), 86 | ('Flutter Kicks', 'Abs'), 87 | ('Sit-Up', 'Abs'), 88 | ('Side Bridge', 'Abs'), 89 | ('Scissor Kick', 'Abs'), 90 | ('Toe Touchers', 'Abs'), 91 | ('Pallof Press', 'Abs'), 92 | ('Cable Wood Chop', 'Abs'), 93 | ('Scissor Crossover Kick', 'Abs'), 94 | ('Plank', 'Abs'), 95 | ('Leg Pull-In', 'Abs'), 96 | ('Knee Raise', 'Abs'), 97 | ('Bird Dog', 'Abs'), 98 | ('Dead Bug', 'Abs'), 99 | ('Dip', 'Abs'), 100 | ('Abs', 'Abs'), 101 | 102 | # Arms 103 | ('Tricep', 'Triceps'), 104 | ('Bench Dips', 'Triceps'), 105 | ('Dumbbell Floor Press', 'Triceps'), 106 | ('Dumbbell Kickback', 'Triceps'), 107 | ('Skullcrusher', 'Triceps'), 108 | ('Skull Crusher', 'Triceps'), 109 | ('Tate', 'Triceps'), 110 | ('bell Curl', 'Biceps'), 111 | ('EZ-Bar Curl', 'Biceps'), 112 | ('Hammer Curl', 'Biceps'), 113 | ('Bicep', 'Biceps'), 114 | ('Preacher Curl', 'Biceps'), 115 | ('No Money', 'Biceps'), 116 | ('Concentration Curls', 'Biceps'), 117 | ('Zottman', 'Biceps'), 118 | ('bell Wrist Curl', 'Forearms'), 119 | 120 | # Chest 121 | ('Cable Crossover Fly', 'Chest'), 122 | ('Chest', 'Chest'), 123 | ('Bench Press', 'Chest'), 124 | ('Machine Fly', 'Chest'), 125 | ('Decline Fly', 'Chest'), 126 | ('Dumbbell Fly', 'Chest'), 127 | ('Push Up', 'Chest'), 128 | ('Pullover', 'Chest'), 129 | ('Floor Press', 'Chest'), 130 | ('Smith Machine Press', 'Chest'), 131 | ('Svend', 'Chest'), 132 | 133 | # Back 134 | ('Pulldown', 'Back'), 135 | ('Pull Down', 'Back'), 136 | ('Cable Row', 'Back'), 137 | ('Machine Row', 'Back'), 138 | ('Bent Over Row', 'Back'), 139 | ('bell Row', 'Back'), 140 | ('Pull Up', 'Back'), 141 | ('Pull-Up', 'Back'), 142 | ('Pullup', 'Back'), 143 | ('Chin Up', 'Back'), 144 | ('Renegade', 'Back'), 145 | ('Smith Machine Row', 'Back'), 146 | ('Shotgun Row', 'Back'), 147 | ('Landmine Row', 'Back'), 148 | ('Ball Slam', 'Back'), 149 | ('T-Bar', 'Back'), 150 | ('Back Extension', 'Lower Back'), 151 | ('Superman', 'Lower Back'), 152 | ('Leg Crossover', 'Lower Back'), 153 | ('Hyperextension', 'Lower Back'), 154 | 155 | ('Stiff-Legged Barbell Good Morning', 'Lower Back'), 156 | ('Hip', 'Glutes'), 157 | ('Step Up', 'Glutes'), 158 | ('Leg Lift', 'Glutes'), 159 | ('Glute', 'Glutes'), 160 | ('Rack Pulls', 'Glutes'), 161 | ('Pull Through', 'Glutes'), 162 | ('Leg Kickback', 'Glutes'), 163 | ('Balance Trainer Reverse Hyperextension', 'Glutes'), 164 | 165 | # Soulders 166 | ('Shoulder', 'Shoulders'), 167 | ('Lateral', 'Shoulders'), 168 | ('Face Pull', 'Shoulders'), 169 | ('Delt', 'Shoulders'), 170 | ('Elbows Out', 'Shoulders'), 171 | ('Back Fly', 'Shoulders'), 172 | ('One-Arm Upright Row', 'Shoulders'), 173 | ('Dumbbell Raise', 'Shoulders'), 174 | ('Plate Raise', 'Shoulders'), 175 | ('Arnold', 'Shoulders'), 176 | ('Iron Cross', 'Shoulders'), 177 | ('Push Press', 'Shoulders'), 178 | ('Landmine Press', 'Shoulders'), 179 | ('Overhead Press', 'Shoulders'), 180 | 181 | # Neck 182 | ('Upright Row', 'Traps'), 183 | ('Barbell Shrug', 'Traps'), 184 | ('Neck', 'Traps'), 185 | 186 | # Legs 187 | ('Leg Press', 'Quads'), 188 | ('Leg Extension', 'Quads'), 189 | ('Lunge', 'Quads'), 190 | ('Squat', 'Quads'), 191 | ('Tuck Jump', 'Quads'), 192 | ('Mountain Climber', 'Quads'), 193 | ('Burpee', 'Quads'), 194 | ('Power Clean', 'Quads'), 195 | ('Wall Sit', 'Quads'), 196 | ('bell Clean', 'Hamstrings'), 197 | ('Leg Curl', 'Hamstrings'), 198 | ('Deadlift', 'Hamstrings'), 199 | ('Dumbbell Snatch', 'Hamstrings'), 200 | ('Swing', 'Hamstrings'), 201 | ('Morning', 'Hamstrings'), 202 | ('Calf Raise', 'Calves'), 203 | ('Heel Press', 'Calves'), 204 | ('Thigh Abductor', 'Abductors'), 205 | ('Clam', 'Abductors'), 206 | ('Thigh Adductor', 'Adductors') 207 | ]: 208 | app.session.add(fitbod_muscles(exercise=exercise, muscle=muscle)) 209 | app.session.commit() 210 | app.session.remove() 211 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Fit.ly 2 | Web analytics for endurance athletes 3 | ![Image description](https://i.imgur.com/Z3mfOMR.jpeg) 4 | ![Image description](https://i.imgur.com/A5rJNff.png) 5 | ![Image description](https://i.imgur.com/PewZiKt.png) 6 | ![Image description](https://i.imgur.com/hsSPvyn.png) 7 | ![Image description](https://i.imgur.com/26Bglbc.jpg) 8 | ![Image description](https://i.imgur.com/tbx5YmT.png) 9 | ![Image description](https://i.imgur.com/zeNnCvn.jpg) 10 | ![Image description](https://i.imgur.com/7j6Ez9K.jpg) 11 | ![Image description](https://i.imgur.com/uafoBFI.jpg) 12 | 13 | Special thanks to Slapdash for helping organize! 14 | https://github.com/ned2/slapdash 15 | # Installation Methods 16 | ## Docker (Recommended) 17 | docker create --name=fitly \ 18 | --restart unless-stopped \ 19 | -e MODULE_NAME=src.fitly.app \ 20 | -e VARIABLE_NAME=server \ 21 | -e TZ=America/New_York \ 22 | -e TIMEOUT=1200 \ 23 | -e DASH_DEBUG=true \ 24 | -p 8050:80 \ 25 | -v :/app/config \ 26 | ethanopp/fitly:latest 27 | 28 | ## Python IDE 29 | After cloning/downloading the repository, install Fit.ly into your environment: 30 | 31 | $ pip install -e PATH_TO_fitly 32 | 33 | # Configuring Your App 34 | Edit the `config.ini.example` file on your local mount path with your settings (more information below) and change the name of the file to `config.ini`. 35 | 36 | ## Required Data Sources 37 | 38 | ### Strava 39 | Copy your client key and secret into your config.ini file. 40 | 41 | In your strava settings (https://www.strava.com/settings/api) set the autorization callback to **127.0.0.1:8050?strava**. All other fields you can update as you'd like. 42 | 43 | ## Optional data sources 44 | Some charts will not work unless these data sources are provided, or until new data sources are added that can pull similar data 45 | 46 | ### Oura 47 | The oura connections is currently required to generate the home page. 48 | 49 | In addition to the home page, data points from oura will be use to make performance analytics more accurate. If oura data is not provided, performance analytics will rely on statically defined metrics in the athlete table (i.e. resting heartrate) 50 | 51 | Create a developer account at https://cloud.ouraring.com/oauth/applications 52 | 53 | Copy your client key and secret into your config.ini file. 54 | 55 | Set the redirect URI to: http://127.0.0.1:8050/settings?oura 56 | 57 | ### Withings 58 | Sign up for a withings developer account here: https://account.withings.com/partner/dashboard_oauth2 59 | 60 | In addition to the home page, data points from withings will be use to make performance analytics more accurate. If withings data is not provided, performance analytics will rely on statically defined metrics in the athlete table (i.e. weight) 61 | 62 | Set the redirect URI to: http://127.0.0.1:8050/settings?withings 63 | 64 | Copy your client key and secret into your config.ini file. 65 | 66 | ### Stryd 67 | Pull critical power (ftp) from Stryd. Since Stryd does not share their proprietary formula for calculating CP, we just pull the number rather than trying to recalculate it ourselves. 68 | 69 | Enter username and password into config.ini file. 70 | 71 | ### Peloton 72 | Fitly does not pull workout data directly from peloton, strava is the main hub for our workout data (so sync peloton directly to strava). 73 | 74 | For those working out to peloton classes, but not necessarily recording their data via the peloton device (using stryd pod on tread, using wahoo fitness trainer with peloton digital app, etc.), fitly will match workouts started around the same time to workouts published to strava, and update the titles of the strava workout with the peloton class name. 75 | 76 | If using Oura, HRV recommendations can be used to auto-bookmark new classes on your peloton device daily. Class types to be bookmarked can be configured on the settings page (i.e. on days where HRV recommendation is "Low" effort, auto bookmark some new "Running" workouts of the class type "Fun Run", "Endurance Run", "Outdoor Fun Run", and "Outdoor Endurance Run") 77 | 78 | ![Image description](https://i.imgur.com/q654WHY.png) 79 | 80 | Enter username and password into config.ini file. 81 | 82 | ### Fitbod & Nextcloud 83 | Fitbod allows exporting your data via the mobile app (Log > Settings icon > Export workout data) 84 | 85 | Export your fitbod file to a nextcloud location, and provide that nextcloud location in your config.ini for fit.ly to incorporate into the dashboards. 86 | 87 | ### Spotify 88 | The spotify connections is currently required to generate the music page. 89 | 90 | Fitly can keep a history of every song you listen to on spotify and analyze your listenind behavior (skipped, fast forwarded, rewound ,etc.) to determine song likeablity. Listening behavior can then be analyzed by activity type and intensity (i.e what music do you listen to during high intensity runs), clustered into music type (K-means cluster on spotify audio features) and playlists can be automatically generated with recommended music for your next recommended workout. 91 | 92 | Create a developer account here: https://developer.spotify.com/dashboard/ 93 | 94 | Set the redirect URI to: http://127.0.0.1:8050/settings?spotify 95 | 96 | Copy your client ID and secret into your config.ini file. 97 | 98 | # Dashboard startup 99 | Navigate to http://127.0.0.1:8050/pages/settings 100 | 101 | Enter the password from your `config.ini` [settings] password 102 | 103 | Connect account buttons on top left of screen. Each successful authentication should save your tokens to the api_tokens table in your database. 104 | 105 | Click the 'Refresh' button to pull data 106 | 107 | ### Dashboard startup tips for python IDE users 108 | Installing this package into your virtualenv will result into the development 109 | executable being installed into your path when the virtualenv is activated. This 110 | command invokes your Dash app's `run_server` method, which in turn uses the 111 | Flask development server to run your app. The command is invoked as follows: 112 | 113 | $ run-fitly-dev 114 | 115 | The script takes a couple of arguments optional parameters, which you can 116 | discover with the `--help` flag. You may need to set the port using the `--port` 117 | parameter. If you need to expose your app outside your local machine, you will 118 | want to set `--host 0.0.0.0`. 119 | 120 | # Hosting your application externally (docker compose with nginx) 121 | version: '3' 122 | services: 123 | letsencrypt: 124 | image: linuxserver/letsencrypt 125 | container_name: letsencrypt 126 | cap_add: 127 | - NET_ADMIN 128 | restart: always 129 | ports: 130 | - "80:80" 131 | - "443:443" 132 | environment: 133 | - TZ=America/New_York 134 | - EMAIL= 135 | - URL= 136 | - SUBDOMAINS=fit # this would give a website like fit.website.com 137 | volumes: 138 | - :/config 139 | fitly: 140 | image: ethanopp/fitly:latest 141 | container_name: fitly 142 | restart: always 143 | depends_on: 144 | - letsencrypt 145 | ports: 146 | - "8050:80" 147 | environment: 148 | - MODULE_NAME=src.fitly.app 149 | - VARIABLE_NAME=server 150 | - TZ=America/New_York 151 | - TIMEOUT=1200 152 | - DASH_DEBUG=true 153 | volumes: 154 | - :/app/config 155 | - /keys:/app/keys 156 | 157 | ### NGINX (subdomain example) 158 | server { 159 | listen 443 ssl; 160 | listen [::]:443 ssl; 161 | 162 | server_name fit.*; 163 | 164 | include /config/nginx/ssl.conf; 165 | 166 | client_max_body_size 0; 167 | 168 | # enable for ldap auth, fill in ldap details in ldap.conf 169 | #include /config/nginx/ldap.conf; 170 | 171 | location / { 172 | # enable the next two lines for http auth 173 | #auth_basic "Restricted"; 174 | #auth_basic_user_file /config/nginx/.htpasswd; 175 | 176 | # enable the next two lines for ldap auth 177 | #auth_request /auth; 178 | #error_page 401 =200 /login; 179 | 180 | include /config/nginx/proxy.conf; 181 | resolver 127.0.0.11 valid=30s; 182 | set $upstream_fitly fitly; 183 | proxy_pass http://$upstream_fitly:80; 184 | } 185 | } 186 | -------------------------------------------------------------------------------- /src/fitly/utils.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from functools import wraps 3 | from urllib.parse import parse_qs 4 | 5 | import dash 6 | import dash_html_components as html 7 | import dash_bootstrap_components as dbc 8 | from dash.dependencies import Output, Input 9 | from dash.exceptions import PreventUpdate 10 | from dash.development.base_component import Component 11 | from flask import current_app as server 12 | from werkzeug.datastructures import MultiDict 13 | 14 | from .pages import page_not_found 15 | from .exceptions import InvalidLayoutError 16 | 17 | 18 | def component(func): 19 | """Decorator to help vanilla functions as pseudo Dash Components""" 20 | 21 | @wraps(func) 22 | def function_wrapper(*args, **kwargs): 23 | # remove className and style args from input kwargs so the component 24 | # function does not have to worry about clobbering them. 25 | className = kwargs.pop("className", None) 26 | style = kwargs.pop("style", None) 27 | 28 | # call the component function and get the result 29 | result = func(*args, **kwargs) 30 | 31 | # now restore the initial classes and styles by adding them 32 | # to any values the component introduced 33 | 34 | if className is not None: 35 | if hasattr(result, "className"): 36 | result.className = f"{className} {result.className}" 37 | else: 38 | result.className = className 39 | 40 | if style is not None: 41 | if hasattr(result, "style"): 42 | result.style = style.update(result.style) 43 | else: 44 | result.style = style 45 | 46 | return result 47 | 48 | return function_wrapper 49 | 50 | 51 | class DashRouter: 52 | """A URL Router for Dash multipage apps""" 53 | 54 | def __init__(self, app, urls): 55 | """Initialise the router. 56 | 57 | Params: 58 | app: A Dash instance to associate the router with. 59 | urls: Ordered iterable of routes: tuples of (route, layout). 'route' is a 60 | string corresponding to the URL path of the route (will be prefixed 61 | with Dash's 'routes_pathname_prefix' and 'layout' is a Dash Component 62 | or callable that returns a Dash Component. The callable will also have 63 | any URL query parameters passed in as keyword arguments. 64 | """ 65 | self.routes = {get_url(route): layout for route, layout in urls} 66 | 67 | @app.callback( 68 | Output(app.server.config["CONTENT_CONTAINER_ID"], "children"), 69 | [ 70 | Input(server.config["LOCATION_COMPONENT_ID"], "pathname"), 71 | Input(server.config["LOCATION_COMPONENT_ID"], "search"), 72 | ], 73 | ) 74 | def router_callback(pathname, search): 75 | """The router""" 76 | if pathname is None: 77 | raise PreventUpdate("Ignoring first Location.pathname callback") 78 | 79 | page = self.routes.get(pathname, None) 80 | 81 | if page is None: 82 | layout = page_not_found(pathname) 83 | elif isinstance(page, Component): 84 | layout = page 85 | elif callable(page): 86 | kwargs = MultiDict(parse_qs(search.lstrip("?"))) 87 | layout = page(**kwargs) 88 | if not isinstance(layout, Component): 89 | msg = ( 90 | "Layout function must return a Dash Component.\n\n" 91 | f"Function {page.__name__} from module {page.__module__} " 92 | f"returned value of type {type(layout)} instead." 93 | ) 94 | raise InvalidLayoutError(msg) 95 | else: 96 | msg = ( 97 | "Page layouts must be a Dash Component or a callable that " 98 | f"returns a Dash Component. Received value of type {type(page)}." 99 | ) 100 | raise InvalidLayoutError(msg) 101 | return layout 102 | 103 | 104 | class DashNavBar: 105 | """A Dash navbar for multipage apps""" 106 | 107 | def __init__(self, app, nav_items): 108 | """Initialise the navbar. 109 | 110 | Params: 111 | app: A Dash instance to associate the router with. 112 | 113 | nav_items: Ordered iterable of navbar items: tuples of `(route, display)`, 114 | where `route` is a string corresponding to path of the route 115 | (will be prefixed with Dash's 'routes_pathname_prefix') and 116 | 'display' is a valid value for the `children` keyword argument 117 | for a Dash component (ie a Dash Component or a string). 118 | """ 119 | self.nav_items = nav_items 120 | 121 | @app.callback( 122 | Output(server.config["NAVBAR_CONTAINER_ID"], "children"), 123 | [Input(server.config["LOCATION_COMPONENT_ID"], "pathname")], 124 | ) 125 | def update_nav_callback(pathname): 126 | """Create the navbar with the current page set to active""" 127 | if pathname is None: 128 | # pathname is None on the first load of the app; ignore this 129 | raise PreventUpdate("Ignoring first Location.pathname callback") 130 | return self.make_nav(pathname) 131 | 132 | @component 133 | def make_nav(self, current_path, **kwargs): 134 | nav_items = [] 135 | route_prefix = server.config["ROUTES_PATHNAME_PREFIX"] 136 | for i, (path, text) in enumerate(self.nav_items): 137 | href = get_url(path) 138 | active = (current_path == href) or (i == 0 and current_path == route_prefix) 139 | nav_item = dbc.NavItem(dbc.NavLink(text, href=href, active=active)) 140 | nav_items.append(nav_item) 141 | return html.Ul(nav_items, className="navbar-nav", **kwargs) 142 | 143 | 144 | def get_dash_args_from_flask_config(config): 145 | """Get a dict of Dash params that were specified """ 146 | # all arg names less 'self' 147 | dash_args = set(inspect.getfullargspec(dash.Dash.__init__).args[1:]) 148 | return {key.lower(): val for key, val in config.items() if key.lower() in dash_args} 149 | 150 | 151 | def get_url(path): 152 | """Expands an internal URL to include prefix the app is mounted at""" 153 | return f"{server.config['ROUTES_PATHNAME_PREFIX']}{path}" 154 | 155 | 156 | ## Fitly specific Util ## 157 | 158 | 159 | import json 160 | from datetime import timedelta 161 | import configparser 162 | import pytz 163 | 164 | config = configparser.ConfigParser() 165 | config.read('./config/config.ini') 166 | 167 | local_tz = pytz.timezone(config.get('timezone', 'timezone')) 168 | 169 | oura_credentials_supplied = True if config.get('oura', 'client_id').strip() and config.get('oura', 170 | 'client_secret').strip() else False 171 | peloton_credentials_supplied = True if config.get('peloton', 'username').strip() and config.get('peloton', 172 | 'password').strip() else False 173 | withings_credentials_supplied = True if config.get('withings', 'client_id').strip() and config.get('withings', 174 | 'client_secret').strip() else False 175 | 176 | stryd_credentials_supplied = True if config.get('stryd', 'username').strip() and config.get('stryd', 177 | 'password').strip() else False 178 | 179 | nextcloud_credentials_supplied = True if config.get('nextcloud', 'username').strip() and config.get('nextcloud', 180 | 'password') and config.get( 181 | 'nextcloud', 'fitbod_path').strip() else False 182 | 183 | spotify_credentials_supplied = True if config.get('spotify', 'client_id').strip() and config.get('spotify', 184 | 'client_secret').strip() else False 185 | 186 | A_OK_HTTP_CODES = [ 187 | 200, 188 | 207 189 | ] 190 | 191 | A_ERROR_HTTP_CODES = { 192 | 400: "Request was invalid", 193 | 401: "Invalid API key", 194 | 403: "Bad OAuth scope", 195 | 404: "Selector did not match any lights", 196 | 422: "Missing or malformed parameters", 197 | 426: "HTTP is required to perform transaction", 198 | # see http://api.developer.lifx.com/v1/docs/rate-limits 199 | 429: "Rate limit exceeded", 200 | 500: "API currently unavailable", 201 | 502: "API currently unavailable", 202 | 503: "API currently unavailable", 203 | 523: "API currently unavailable" 204 | } 205 | 206 | 207 | ############################## 208 | # Main 209 | ############################## 210 | def parse_response(response): 211 | """Parse JSON API response, return object.""" 212 | parsed_response = json.loads(response.text) 213 | return parsed_response 214 | 215 | 216 | def handle_error(response): 217 | """Raise appropriate exceptions if necessary.""" 218 | status_code = response.status_code 219 | 220 | if status_code not in A_OK_HTTP_CODES: 221 | logError(response) 222 | error_explanation = A_ERROR_HTTP_CODES.get(status_code) 223 | raise_error = "{}: {}".format(status_code, error_explanation) 224 | raise Exception(raise_error) 225 | else: 226 | return True 227 | 228 | 229 | def full_url(base, suffix): 230 | return base + suffix 231 | 232 | 233 | def getResponse(session, url, payload, cookieDict): 234 | response = session.get(url, json=payload, cookies=cookieDict) 235 | parsed_response = parse_response(response) 236 | handle_error(response) 237 | 238 | return parsed_response 239 | 240 | 241 | def logError(response): 242 | request = response.request 243 | url = request.url 244 | headers = request.headers 245 | 246 | 247 | def calc_next_saturday(d): 248 | return d.date() + timedelta((12 - d.weekday()) % 7) 249 | 250 | 251 | def calc_prev_sunday(d): 252 | return calc_next_saturday(d) - timedelta(days=6) 253 | 254 | 255 | def update_config(section, parameter, value): 256 | config.set(section, parameter, value) 257 | with open('./config/config.ini', 'w') as configfile: 258 | config.write(configfile) 259 | 260 | 261 | def utc_to_local(utc_dt): 262 | local_dt = utc_dt.replace(tzinfo=pytz.utc).astimezone(local_tz) 263 | return local_tz.normalize(local_dt).replace(tzinfo=None) # .tz_localize(None) # .normalize might be unnecessary 264 | -------------------------------------------------------------------------------- /src/fitly/api/datapull.py: -------------------------------------------------------------------------------- 1 | from ..api.stravaApi import get_strava_client, strava_connected 2 | from ..api.ouraAPI import pull_oura_data 3 | from ..api.api_withings import pull_withings_data 4 | from ..api.fitbodAPI import pull_fitbod_data 5 | from ..api.pelotonApi import get_peloton_class_names 6 | from ..api.strydAPI import pull_stryd_data 7 | from ..api.sqlalchemy_declarative import * 8 | from sqlalchemy import func, delete 9 | import datetime 10 | from ..api.fitlyAPI import * 11 | import pandas as pd 12 | from ..app import app 13 | from ..utils import config, withings_credentials_supplied, oura_credentials_supplied, nextcloud_credentials_supplied 14 | 15 | 16 | def latest_refresh(): 17 | latest_date = app.session.query(func.max(dbRefreshStatus.timestamp_utc))[0][0] 18 | 19 | app.session.remove() 20 | return latest_date 21 | 22 | 23 | def refresh_database(refresh_method='system', truncate=False, truncateDate=None): 24 | run_time = datetime.utcnow() 25 | athlete_info = app.session.query(athlete).filter(athlete.athlete_id == 1).first() 26 | processing = app.session.query(dbRefreshStatus).filter(dbRefreshStatus.refresh_method == 'processing').first() 27 | # Add record for refresh audit trail 28 | refresh_record = dbRefreshStatus(timestamp_utc=run_time, refresh_method=refresh_method, 29 | truncate=True if truncate or truncateDate else False) 30 | app.session.add(refresh_record) 31 | app.session.commit() 32 | 33 | if not processing: 34 | try: 35 | # If athlete settings are defined 36 | if athlete_info.name and athlete_info.birthday and athlete_info.sex and athlete_info.weight_lbs and athlete_info.resting_hr and athlete_info.run_ftp and athlete_info.ride_ftp: 37 | # Insert record into table for 'processing' 38 | db_process_flag(flag=True) 39 | 40 | # If either truncate parameter is passed 41 | if truncate or truncateDate: 42 | 43 | # If only truncating past a certain date 44 | if truncateDate: 45 | try: 46 | app.server.logger.debug('Truncating strava_summary') 47 | app.session.execute( 48 | delete(stravaSummary).where(stravaSummary.start_date_utc >= truncateDate)) 49 | app.server.logger.debug('Truncating strava_samples') 50 | app.session.execute( 51 | delete(stravaSamples).where(stravaSamples.timestamp_local >= truncateDate)) 52 | app.server.logger.debug('Truncating strava_best_samples') 53 | app.session.execute( 54 | delete(stravaBestSamples).where(stravaBestSamples.timestamp_local >= truncateDate)) 55 | app.server.logger.debug('Truncating stryd_summary') 56 | app.session.execute( 57 | delete(strydSummary).where(strydSummary.start_date_local >= truncateDate)) 58 | app.server.logger.debug('Truncating oura_readiness_summary') 59 | app.session.execute( 60 | delete(ouraReadinessSummary).where(ouraReadinessSummary.report_date >= truncateDate)) 61 | app.server.logger.debug('Truncating oura_sleep_summary') 62 | app.session.execute( 63 | delete(ouraSleepSummary).where(ouraSleepSummary.report_date >= truncateDate)) 64 | app.server.logger.debug('Truncating oura_sleep_samples') 65 | app.session.execute( 66 | delete(ouraSleepSamples).where(ouraSleepSamples.report_date >= truncateDate)) 67 | app.server.logger.debug('Truncating oura_activity_summary') 68 | app.session.execute( 69 | delete(ouraActivitySummary).where(ouraActivitySummary.summary_date >= truncateDate)) 70 | app.server.logger.debug('Truncating oura_activity_samples') 71 | app.session.execute( 72 | delete(ouraActivitySamples).where(ouraActivitySamples.timestamp_local >= truncateDate)) 73 | app.server.logger.debug('Truncating hrv_workout_step_log') 74 | # Delete extra day back so hrv workflow can recalculate the 'completed_yesterday' flag 75 | app.session.execute(delete(workoutStepLog).where( 76 | workoutStepLog.date >= (truncateDate - timedelta(days=1)))) 77 | app.server.logger.debug('Truncating withings') 78 | app.session.execute(delete(withings).where(withings.date_utc >= truncateDate)) 79 | app.session.commit() 80 | except BaseException as e: 81 | app.session.rollback() 82 | app.server.logger.error(e) 83 | else: 84 | try: 85 | app.server.logger.debug('Truncating strava_summary') 86 | app.session.execute(delete(stravaSummary)) 87 | app.server.logger.debug('Truncating strava_samples') 88 | app.session.execute(delete(stravaSamples)) 89 | app.server.logger.debug('Truncating strava_best_samples') 90 | app.session.execute(delete(stravaBestSamples)) 91 | app.server.logger.debug('Truncating oura_readiness_summary') 92 | app.session.execute(delete(ouraReadinessSummary)) 93 | app.server.logger.debug('Truncating oura_sleep_summary') 94 | app.session.execute(delete(ouraSleepSummary)) 95 | app.server.logger.debug('Truncating oura_sleep_samples') 96 | app.session.execute(delete(ouraSleepSamples)) 97 | app.server.logger.debug('Truncating oura_activity_summary') 98 | app.session.execute(delete(ouraActivitySummary)) 99 | app.server.logger.debug('Truncating oura_activity_samples') 100 | app.session.execute(delete(ouraActivitySamples)) 101 | app.server.logger.debug('Truncating hrv_workout_step_log') 102 | app.session.execute(delete(workoutStepLog)) 103 | app.server.logger.debug('Truncating withings') 104 | app.session.execute(delete(withings)) 105 | app.server.logger.debug('Truncating fitbod') 106 | app.session.execute(delete(fitbod)) 107 | app.session.commit() 108 | except BaseException as e: 109 | app.session.rollback() 110 | app.server.logger.error(e) 111 | 112 | app.session.remove() 113 | 114 | ### Pull Weight Data ### 115 | 116 | # If withings credentials in config.ini, populate withings table 117 | if withings_credentials_supplied: 118 | try: 119 | app.server.logger.info('Pulling withings data...') 120 | pull_withings_data() 121 | withings_status = 'Successful' 122 | except BaseException as e: 123 | app.server.logger.error('Error pulling withings data: {}'.format(e)) 124 | withings_status = str(e) 125 | else: 126 | withings_status = 'No Credentials' 127 | 128 | ### Pull Fitbod Data ### 129 | 130 | # If nextcloud credentials in config.ini, pull fitbod data from nextcloud location 131 | if nextcloud_credentials_supplied: 132 | try: 133 | app.server.logger.info('Pulling fitbod data...') 134 | pull_fitbod_data() 135 | fitbod_status = 'Successful' 136 | except BaseException as e: 137 | app.server.logger.error('Error pulling fitbod data: {}'.format(e)) 138 | fitbod_status = str(e) 139 | else: 140 | fitbod_status = 'No Credentials' 141 | 142 | ### Pull Oura Data ### 143 | 144 | if oura_credentials_supplied: 145 | # Pull Oura Data before strava because resting heart rate used in strava sample heart rate zones 146 | try: 147 | app.server.logger.info('Pulling oura data...') 148 | oura_status = pull_oura_data() 149 | oura_status = 'Successful' if oura_status else 'Oura cloud not yet updated' 150 | except BaseException as e: 151 | app.server.logger.error('Error pulling oura data: {}'.format(e)) 152 | oura_status = str(e) 153 | else: 154 | oura_status = 'No Credentials' 155 | 156 | ### Pull Stryd Data ### 157 | if stryd_credentials_supplied: 158 | try: 159 | app.server.logger.info('Pulling stryd data...') 160 | pull_stryd_data() 161 | except Exception as e: 162 | app.server.logger.error(f'Error puling stryd data {e}') 163 | 164 | ### This has been moved to crontab as spotify refresh is required more frequently than hourly ### 165 | # ### Pull Spotify Data ### 166 | # if spotify_credentials_supplied: 167 | # app.server.logger.info('Pulling spotify play history...') 168 | # save_spotify_play_history() 169 | 170 | ### Pull Strava Data ### 171 | 172 | # Only pull strava data if oura cloud has been updated with latest day, or no oura credentials so strava will use athlete static resting hr 173 | if oura_status == 'Successful' or oura_status == 'No Credentials': 174 | try: 175 | app.server.logger.info('Pulling strava data...') 176 | 177 | if strava_connected(): 178 | athlete_id = 1 # TODO: Make this dynamic if ever expanding to more users 179 | client = get_strava_client() 180 | after = config.get('strava', 'activities_after_date') 181 | activities = client.get_activities(after=after, 182 | limit=0) # Use after to sort from oldest to newest 183 | 184 | athlete_info = app.session.query(athlete).filter(athlete.athlete_id == athlete_id).first() 185 | min_non_warmup_workout_time = athlete_info.min_non_warmup_workout_time 186 | # Loop through the activities, and create a dict of the dataframe stream data of each activity 187 | db_activities = pd.read_sql( 188 | sql=app.session.query(stravaSummary.activity_id).filter( 189 | stravaSummary.athlete_id == athlete_id).distinct( 190 | stravaSummary.activity_id).statement, 191 | con=engine) 192 | 193 | app.session.remove() 194 | new_activities = [] 195 | for act in activities: 196 | # If not already in db, parse and insert 197 | if act.id not in db_activities['activity_id'].unique(): 198 | new_activities.append(FitlyActivity(act)) 199 | app.server.logger.info('New Workout found: "{}"'.format(act.name)) 200 | # If new workouts found, analyze and insert 201 | if len(new_activities) > 0: 202 | for fitly_act in new_activities: 203 | fitly_act.stravaScrape(athlete_id=athlete_id) 204 | # Only run hrv training workflow if oura connection available to use hrv data or readiness score 205 | if oura_status == 'Successful': 206 | training_workflow(min_non_warmup_workout_time=min_non_warmup_workout_time, 207 | metric=app.session.query(athlete).filter( 208 | athlete.athlete_id == 1).first().recovery_metric) 209 | 210 | app.server.logger.debug('stravaScrape() complete...') 211 | strava_status = 'Successful' 212 | except BaseException as e: 213 | app.server.logger.error('Error pulling strava data: {}'.format(e)) 214 | strava_status = str(e) 215 | else: 216 | app.server.logger.info('Oura cloud not yet updated. Waiting to pull Strava data') 217 | strava_status = 'Awaiting oura cloud update' 218 | 219 | app.server.logger.debug('Updating db refresh record with status...') 220 | refresh_record = app.session.query(dbRefreshStatus).filter( 221 | dbRefreshStatus.timestamp_utc == run_time).first() 222 | refresh_record.oura_status = oura_status 223 | refresh_record.fitbod_status = fitbod_status 224 | refresh_record.strava_status = strava_status 225 | refresh_record.withings_status = withings_status 226 | refresh_record.refresh_method = refresh_method 227 | app.session.commit() 228 | 229 | # Refresh peloton class types local json file 230 | if peloton_credentials_supplied: 231 | get_peloton_class_names() 232 | 233 | db_process_flag(flag=False) 234 | app.server.logger.info('Refresh Complete') 235 | app.session.remove() 236 | 237 | else: 238 | app.server.logger.info('Please define all athlete settings prior to refreshing data') 239 | except: 240 | # Just in case the job fails, remove any processing records that may have been added to audit log as to not lock the next job 241 | db_process_flag(flag=False) 242 | else: 243 | if refresh_method == 'manual': 244 | app.server.logger.info('Database is already running a refresh job') 245 | 246 | app.session.remove() 247 | -------------------------------------------------------------------------------- /src/fitly/pages/music.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import dash 3 | import dash_table 4 | from dash.dependencies import Input, Output, State 5 | import dash_bootstrap_components as dbc 6 | import dash_core_components as dcc 7 | import dash_html_components as html 8 | from ..app import app 9 | from ..api.spotifyAPI import get_played_tracks 10 | from ..api.database import engine 11 | from ..api.sqlalchemy_declarative import stravaSummary, spotifyPlayHistory 12 | import plotly.graph_objs as go 13 | from ..utils import config 14 | from sklearn.preprocessing import MinMaxScaler 15 | import operator 16 | import re 17 | from ..utils import spotify_credentials_supplied 18 | import os 19 | import pytz 20 | from tzlocal import get_localzone 21 | 22 | transition = int(config.get('dashboard', 'transition')) 23 | default_icon_color = 'rgb(220, 220, 220)' 24 | white = 'rgb(220, 220, 220)' 25 | teal = 'rgb(100, 217, 236)' 26 | light_blue = 'rgb(56, 128, 139)' 27 | dark_blue = 'rgb(39, 77, 86)' 28 | orange = 'rgb(217,100,43)' 29 | grey = 'rgb(50,50,50)' 30 | 31 | 32 | def get_layout(**kwargs): 33 | if not spotify_credentials_supplied: 34 | return html.H1('Spotify not connected', className='text-center') 35 | else: 36 | music_data_exists = app.session.query(spotifyPlayHistory).first() 37 | if not music_data_exists: 38 | return html.H1('No music history found', className='text-center') 39 | else: 40 | # Get sports that have music listened during the last PoP Ytd 41 | sports = [x for x in get_played_tracks(pop_time_period='ytd')['workout_type'].unique() if x != ''] 42 | sport_options = [{'label': 'All Sports', 'value': 'all'}] 43 | sport_options.extend([{'label': re.sub(r"(\w)([A-Z])", r"\1 \2", x), 'value': x} for x in sorted(sports)]) 44 | 45 | return html.Div([ 46 | html.Div(children=[ 47 | html.Div(id='music-filter-shelf', className='row align-items-center text-center mt-2 mb-2', 48 | children=[ 49 | html.Div(className='col-lg-4', children=[ 50 | dcc.Dropdown( 51 | id='music-time-selector', 52 | options=[ 53 | {'label': 'All History', 'value': 'all'}, 54 | {'label': 'Year to Date', 'value': 'ytd'}, 55 | {'label': 'Last 90 days', 'value': 'l90d'}, 56 | {'label': 'Last 6 weeks', 'value': 'l6w'}, 57 | {'label': 'Last 30 days', 'value': 'l30d'}], 58 | value='l90d', 59 | multi=False 60 | ), 61 | ]), 62 | html.Div(className='col-lg-4', children=[ 63 | dcc.Dropdown( 64 | id='music-intensity-selector', 65 | placeholder="Workout Intensity", 66 | options=[ 67 | {'label': 'All Listening', 'value': 'all'}, 68 | {'label': 'Non-Workout Listening', 'value': 'rest'}, 69 | {'label': 'All Workout Listening', 'value': 'workout'}, 70 | {'label': 'High Intensity Workout', 'value': 'high'}, 71 | {'label': 'Mod Intensity Workout', 'value': 'mod'}, 72 | {'label': 'Low Intensity Workout', 'value': 'low'}], 73 | 74 | value='workout', 75 | multi=False 76 | ), 77 | ]), 78 | html.Div(className='col-lg-4', children=[ 79 | dcc.Dropdown( 80 | id='music-sport-selector', 81 | options=sport_options, 82 | value='all', 83 | multi=False 84 | ), 85 | ]), 86 | ]), 87 | ]), 88 | 89 | html.Div(className='row mb-2', children=[ 90 | html.Div(className='col-lg-6', children=[ 91 | dbc.Card(children=[ 92 | dbc.CardHeader(html.H4('Music Profile', className='mb-0')), 93 | dbc.CardBody( 94 | style={'padding': '.5rem'}, 95 | children=[ 96 | dbc.Spinner(color='info', children=[ 97 | dcc.Graph( 98 | id='radar-chart', 99 | config={'displayModeBar': False}, 100 | style={'height': '100%'}, 101 | ) 102 | ]) 103 | ] 104 | ) 105 | ]) 106 | 107 | ]), 108 | html.Div(className='col-lg-6', children=[ 109 | html.H1('Placeholder') 110 | ]) 111 | ]), 112 | html.Div(className='row', children=[ 113 | html.Div(className='col-lg-8', children=[ 114 | dbc.Card([ 115 | dbc.CardBody([ 116 | html.Div(className='col-lg-12', style={'overflow': 'hidden'}, 117 | children=dash_table.DataTable( 118 | id='play-history-table', 119 | columns=[ 120 | # {'name': 'Played', 'id': 'timestamp'}, 121 | {'name': 'Track Name', 'id': 'track_name'}, 122 | {'name': 'Artist Name', 'id': 'artist_name'}, 123 | {'name': 'Album Name', 'id': 'album_name'}, 124 | {'name': '% Listened', 'id': 'percentage_listened'}, 125 | {'name': 'Liked', 'id': 'liked'} 126 | ], 127 | style_as_list_view=True, 128 | fixed_rows={'headers': True, 'data': 0}, 129 | style_table={'height': '100%'}, 130 | style_header={'backgroundColor': 'rgba(0,0,0,0)', 131 | 'borderBottom': '1px solid rgb(220, 220, 220)', 132 | 'borderTop': '0px', 133 | # 'textAlign': 'left', 134 | 'fontWeight': 'bold', 135 | 'fontFamily': '"Open Sans", "HelveticaNeue", "Helvetica Neue", Helvetica, Arial, sans-serif', 136 | # 'fontSize': '1.2rem' 137 | }, 138 | style_cell={ 139 | 'backgroundColor': 'rgba(0,0,0,0)', 140 | 'color': 'rgb(220, 220, 220)', 141 | 'borderBottom': '1px solid rgb(73, 73, 73)', 142 | 'textAlign': 'center', 143 | # 'whiteSpace': 'no-wrap', 144 | # 'overflow': 'hidden', 145 | 'textOverflow': 'ellipsis', 146 | 'maxWidth': 175, 147 | 'minWidth': 50, 148 | # 'padding': '0px', 149 | 'fontFamily': '"Open Sans", "HelveticaNeue", "Helvetica Neue", Helvetica, Arial, sans-serif', 150 | # 'fontSize': '1.2rem' 151 | }, 152 | style_cell_conditional=[ 153 | { 154 | 'if': {'column_id': 'activity_id'}, 155 | 'display': 'none' 156 | } 157 | ], 158 | filter_action="native", 159 | page_action="none", 160 | # page_current=0, 161 | # page_size=10, 162 | ) 163 | 164 | ), 165 | ]), ]), 166 | ]), 167 | 168 | ]) 169 | ]) 170 | 171 | 172 | # TODO: Add graph for top artists/tracks 173 | # TODO: Add chart for PR tracks 174 | 175 | 176 | def get_radar_chart(workout_intensity, sport, pop_time_period): 177 | df_tracks = get_played_tracks(workout_intensity=workout_intensity, sport=sport, pop_time_period=pop_time_period) 178 | 179 | radar_features = ['danceability', # Mood 180 | 'energy', # Mood 181 | 'valence', # Mood 182 | 'tempo', # Mood 183 | 'loudness', # Properties 184 | 'speechiness', # Properties 185 | 'instrumentalness', # Properties 186 | 'acousticness', # Context 187 | 'liveness', # Context 188 | ] 189 | 190 | df_tracks_liked = df_tracks[df_tracks['skipped'] == 0] 191 | df_tracks_cur = df_tracks_liked[df_tracks_liked['Period'] == 'Current'][radar_features] 192 | df_tracks_prev = df_tracks_liked[df_tracks_liked['Period'] == 'Previous'][radar_features] 193 | df_tracks_disliked = df_tracks[df_tracks['skipped'] == 1] 194 | df_tracks_cur_disliked = df_tracks_disliked[df_tracks_disliked['Period'] == 'Current'][radar_features] 195 | df_tracks_prev_disliked = df_tracks_disliked[df_tracks_disliked['Period'] == 'Previous'][radar_features] 196 | data = [] 197 | 198 | if len(df_tracks_prev) > 0: 199 | # Scale all audio features from 0-1 so they can be compared on radar chart 200 | df_tracks_prev_disliked.loc[:] = MinMaxScaler().fit_transform(df_tracks_prev_disliked.loc[:]) 201 | data.append( 202 | go.Scatterpolar( 203 | r=df_tracks_prev_disliked.mean() * 100, 204 | theta=[x.title() for x in df_tracks_prev_disliked.columns], 205 | text=['{}: {:.2f}%'.format(y, x) for x, y in 206 | zip(df_tracks_prev_disliked.mean() * 100, [x.title() for x in df_tracks_prev_disliked.columns])], 207 | 208 | hoverinfo='text', 209 | fill='toself', 210 | name='❌ Prev. YTD' if pop_time_period == 'ytd' else pop_time_period.upper().replace('L', '❌ Prev. '), 211 | line=dict(color='rgba(0,0,0,0)'), 212 | marker=dict(color='rgba(0,0,0,0)'), 213 | fillcolor='rgba(217,100,43,.6)', 214 | visible='legendonly' 215 | ) 216 | ) 217 | df_tracks_prev.loc[:] = MinMaxScaler().fit_transform(df_tracks_prev.loc[:]) 218 | data.append( 219 | go.Scatterpolar( 220 | r=df_tracks_prev.mean() * 100, 221 | theta=[x.title() for x in df_tracks_prev.columns], 222 | text=['{}: {:.2f}%'.format(y, x) for x, y in 223 | zip(df_tracks_prev.mean() * 100, [x.title() for x in df_tracks_prev.columns])], 224 | 225 | hoverinfo='text', 226 | fill='toself', 227 | name='👍🏼 Prev. YTD' if pop_time_period == 'ytd' else pop_time_period.upper().replace('L', 228 | '👍🏼 Prev. '), 229 | line=dict(color='rgba(0,0,0,0)'), 230 | marker=dict(color='rgba(0,0,0,0)'), 231 | fillcolor='rgba(220,220,220,.6)' 232 | ) 233 | ) 234 | if len(df_tracks_cur) > 0: 235 | # Scale all audio features from 0-1 so they can be compared on radar chart 236 | df_tracks_cur_disliked.loc[:] = MinMaxScaler().fit_transform(df_tracks_cur_disliked.loc[:]) 237 | data.append( 238 | go.Scatterpolar( 239 | r=df_tracks_cur_disliked.mean() * 100, 240 | theta=[x.title() for x in df_tracks_cur_disliked.columns], 241 | text=['{}: {:.2f}%'.format(y, x) for x, y in 242 | zip(df_tracks_cur_disliked.mean() * 100, [x.title() for x in df_tracks_cur_disliked.columns])], 243 | hoverinfo='text', 244 | fill='toself', 245 | name='❌ All Time' if pop_time_period == 'all' else '❌ YTD' if pop_time_period == 'ytd' else pop_time_period.upper().replace( 246 | 'L', '❌ Last '), 247 | # color=teal, 248 | line=dict(color='rgba(0,0,0,0)'), 249 | marker=dict(color='rgba(0,0,0,0)'), 250 | fillcolor='rgba(217,100,43,.6)', 251 | visible='legendonly' 252 | ) 253 | ) 254 | df_tracks_cur.loc[:] = MinMaxScaler().fit_transform(df_tracks_cur.loc[:]) 255 | data.append( 256 | go.Scatterpolar( 257 | r=df_tracks_cur.mean() * 100, 258 | theta=[x.title() for x in df_tracks_cur.columns], 259 | text=['{}: {:.2f}%'.format(y, x) for x, y in 260 | zip(df_tracks_cur.mean() * 100, [x.title() for x in df_tracks_cur.columns])], 261 | hoverinfo='text', 262 | fill='toself', 263 | name='👍🏼 All Time' if pop_time_period == 'all' else '👍🏼 YTD' if pop_time_period == 'ytd' else pop_time_period.upper().replace( 264 | 'L', '👍🏼 Last '), 265 | # color=teal, 266 | line=dict(color='rgba(0,0,0,0)'), 267 | marker=dict(color='rgba(0,0,0,0)'), 268 | fillcolor='rgba(100, 217, 236,.6)', 269 | ) 270 | ) 271 | figure = { 272 | 'data': data, 273 | 'layout': go.Layout( 274 | # transition=dict(duration=transition), 275 | font=dict( 276 | size=10, 277 | color=white 278 | ), 279 | 280 | height=400, 281 | polar=dict( 282 | bgcolor='rgba(0,0,0,0)', 283 | radialaxis=dict( 284 | visible=True, 285 | range=[0, 100], 286 | showticklabels=False, 287 | ticks='', 288 | showline=False, 289 | 290 | )), 291 | showlegend=True, 292 | legend=dict(bgcolor='rgba(127, 127, 127, 0)'), 293 | margin={'l': 50, 'b': 25, 't': 25, 'r': 50}, 294 | 295 | ) 296 | } 297 | 298 | return figure 299 | 300 | 301 | # Create Radar Chart 302 | # Zone and distribution callback for sport/date fitlers. Also update date label/card header with callback here 303 | @app.callback( 304 | Output('radar-chart', 'figure'), 305 | [Input('music-intensity-selector', 'value'), 306 | Input('music-time-selector', 'value'), 307 | Input('music-sport-selector', 'value')], 308 | [State('music-intensity-selector', 'value'), 309 | State('music-time-selector', 'value'), 310 | State('music-sport-selector', 'value'), 311 | ] 312 | ) 313 | def update_radar_chart(*args): 314 | ctx = dash.callback_context 315 | pop_time_period = ctx.states['music-time-selector.value'] 316 | workout_intensity = ctx.states['music-intensity-selector.value'] 317 | sport = ctx.states['music-sport-selector.value'] 318 | 319 | figure = get_radar_chart(workout_intensity=workout_intensity, sport=sport, pop_time_period=pop_time_period) 320 | return figure 321 | 322 | 323 | @app.callback( 324 | Output('play-history-table', 'data'), 325 | [Input('music-intensity-selector', 'value'), 326 | Input('music-time-selector', 'value'), 327 | Input('music-sport-selector', 'value')], 328 | [State('music-intensity-selector', 'value'), 329 | State('music-time-selector', 'value'), 330 | State('music-sport-selector', 'value'), 331 | ] 332 | ) 333 | def populate_history_table(*args): 334 | ctx = dash.callback_context 335 | tracks_df = get_played_tracks(workout_intensity=ctx.states['music-intensity-selector.value'], 336 | sport=ctx.states['music-sport-selector.value'], 337 | pop_time_period=ctx.states['music-time-selector.value']) 338 | 339 | tracks_df['timestamp'] = tracks_df.index.tz_localize('UTC').tz_convert(get_localzone()).strftime( 340 | '%Y-%m-%d %I:%M %p') 341 | tracks_df['liked'] = tracks_df['skipped'].astype('str').apply( 342 | lambda x: '👍🏼' if x.lower() == 'false' else '❌') 343 | tracks_df['percentage_listened'] = tracks_df['percentage_listened'].apply(lambda x: '{:.0f}%'.format(x * 100)) 344 | 345 | return tracks_df[[ 346 | # 'timestamp', 347 | 'track_name', 348 | 'artist_name', 349 | 'album_name', 350 | 'percentage_listened', 351 | 'liked' 352 | ]].sort_index( 353 | ascending=False).to_dict( 354 | 'records') 355 | -------------------------------------------------------------------------------- /src/fitly/pages/lifting.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import dash 3 | import dash_core_components as dcc 4 | import dash_html_components as html 5 | import plotly.graph_objs as go 6 | from ..app import app 7 | from dash.dependencies import Input, Output, State 8 | from ..api.sqlalchemy_declarative import fitbod, fitbod_muscles 9 | from ..api.database import engine 10 | import math 11 | from datetime import datetime, timedelta, date 12 | from dateutil.relativedelta import * 13 | import dash_bootstrap_components as dbc 14 | from ..utils import config, nextcloud_credentials_supplied 15 | from difflib import SequenceMatcher 16 | 17 | 18 | def get_layout(**kwargs): 19 | muscles = sorted(pd.read_sql(sql=app.session.query(fitbod_muscles).statement, con=engine)['Muscle'].unique()) 20 | muscles.append('Unmapped') 21 | muscle_options = [{'label': x, 'value': x} for x in muscles] 22 | app.session.remove() 23 | # Oura data required for home page 24 | if not nextcloud_credentials_supplied: 25 | return html.H1('Please provide nextcloud credentials in config', className='text-center') 26 | else: 27 | return html.Div([ 28 | html.Div(className='row align-items-center text-center', children=[ 29 | html.Div(className='col-lg-12 mt-2 mb-2', children=[ 30 | html.Div(id='lifting-date-buttons', children=[ 31 | dbc.Button('All Time', id='all-button', color='primary', size='sm', 32 | style={'marginRight': '1vw'}), 33 | dbc.Button('Year to Date', id='ytd-button', color='primary', size='sm', 34 | style={'marginRight': '1vw'}), 35 | dbc.Button('Last 6 Months', id='l6m-button', color='primary', size='sm', 36 | style={'marginRight': '1vw'}), 37 | dbc.Button('Last 90 Days', id='l90d-button', color='primary', size='sm', 38 | style={'marginRight': '1vw'}), 39 | dbc.Button('Last 6 Weeks', id='l6w-button', color='primary', size='sm', 40 | style={'marginRight': '1vw'}), 41 | ]), 42 | ]), 43 | ]), 44 | html.Div(id='lifting-header', className='row align-items-center text-center', children=[ 45 | html.Div(className='col-lg-6 offset-md-3 mt-2 mb-2', children=[ 46 | 47 | dcc.Dropdown(id='muscle-options', className='bg-light', 48 | style={'backgroundColor': 'rgba(0,0,0,0)'}, 49 | options=muscle_options, 50 | value=muscles, 51 | multi=True, 52 | placeholder='Select Muscle(s)...' 53 | ) 54 | ]), 55 | ]), 56 | 57 | html.Div(id='workout-charts', className='col-12', children=[ 58 | dbc.Spinner(color='info', children=[ 59 | html.Div(className='row', children=[ 60 | html.Div(id='exercise-containers', className='col-lg-12') 61 | ]) 62 | ]), 63 | ]) 64 | ]) 65 | 66 | 67 | white = config.get('oura', 'white') 68 | teal = config.get('oura', 'teal') 69 | light_blue = config.get('oura', 'light_blue') 70 | dark_blue = config.get('oura', 'dark_blue') 71 | orange = config.get('oura', 'orange') 72 | ftp_color = 'rgb(100, 217, 236)' 73 | 74 | 75 | def find_muscle(name, muscles): 76 | ''' 77 | 78 | :param name: name of exercise 79 | :param muscles: dictionary of exercise/muscle mapping 80 | :return: mapped musle for exercise 81 | ''' 82 | results = [] 83 | 84 | for key, muscle in muscles.items(): 85 | if key in name: 86 | return muscle 87 | 88 | for key, muscle in muscles.items(): 89 | matcher = SequenceMatcher(None, key, name) 90 | ratio = matcher.ratio() 91 | if ratio >= 0.75: 92 | results.append((ratio, muscle)) 93 | 94 | if not results: 95 | return 'Unmapped' 96 | app.server.logger.error(f'No matching muscles for: {name}') 97 | 98 | return sorted(results)[0][1] 99 | 100 | 101 | def generate_exercise_charts(timeframe, muscle_options, sort_ascending=True): 102 | df = pd.read_sql(sql=app.session.query(fitbod).statement, con=engine) 103 | 104 | # Merge 'muscle' into exercise table for mapping 105 | muscle_dict = \ 106 | pd.read_sql(sql=app.session.query(fitbod_muscles).statement, con=engine).set_index('Exercise').to_dict()[ 107 | 'Muscle'] 108 | 109 | df['Muscle'] = df['Exercise'].apply(lambda x: find_muscle(x, muscle_dict)) 110 | 111 | app.session.remove() 112 | 113 | # Filter on selected msucles 114 | df = df[df['Muscle'].isin(muscle_options)] 115 | 116 | # Filter on selected date range 117 | if timeframe == 'ytd': 118 | # df = df[df['date_UTC'].dt.date >= date(datetime.today().year, 1, 1)] 119 | daterange = [date(datetime.today().year, 1, 1), datetime.today().date()] 120 | elif timeframe == 'l6w': 121 | # df = df[df['date_UTC'].dt.date >= (datetime.now().date() - timedelta(days=42))] 122 | daterange = [datetime.now().date() - timedelta(days=42), datetime.today().date()] 123 | elif timeframe == 'l6m': 124 | # df = df[df['date_UTC'].dt.date >= (datetime.now().date() - timedelta(months=6))] 125 | daterange = [datetime.now().date() - relativedelta(months=6), datetime.today().date()] 126 | elif timeframe == 'l90d': 127 | daterange = [datetime.now().date() - relativedelta(days=90), datetime.today().date()] 128 | else: 129 | # Dummy start date for 'All' 130 | daterange = [date(1980, 1, 1)] 131 | 132 | if len(df) > 0: 133 | # Calculate 1RM for exercise that have both weight and reps 134 | df_1rm = df[(df['Weight']) > 0 & (df['Reps'] > 0)] 135 | # Calculate Brzycki 1RM based off last 6 weeks of workouts 136 | df_1rm['1RM'] = (df_1rm['Weight'] * (36 / (37 - df_1rm['Reps']))) 137 | df_1rm['1RM_Type'] = '1RM (lbs)' 138 | 139 | # Show total Reps for exercises with no weight (where 1RM can't be calculated) 140 | df_reps = df[(df['Weight'] == 0) & (df['Reps'] != 0) & (df['Duration'] == 0)] 141 | df_reps['1RM'] = df_reps['Reps'] 142 | df_reps['1RM_Type'] = 'Reps' 143 | # Remove exercises which have sets both with and without weight to avoid skewing % increases 144 | df_reps = df_reps[~df_reps['Exercise'].isin(df_1rm['Exercise'].unique())] 145 | 146 | # Show total volume (duration * weight) for time-based exercises (don't have reps so 1RM can't be calculated) 147 | df_duration = df[(df['Weight'] == 0) & (df['Reps'] == 0) & (df['Duration'] != 0)] 148 | df_duration['1RM'] = df_duration['Duration'] * df['Weight'].replace(0, 1) 149 | df_duration['1RM_Type'] = 'Volume' 150 | 151 | # Consolidate dfs 152 | df = pd.concat([df_1rm, df_reps, df_duration], ignore_index=True) 153 | # Get max from each set 154 | df = df.groupby(['date_UTC', 'Exercise', '1RM_Type'])['1RM'].max().reset_index() 155 | 156 | # Sort by % change 157 | for exercise in df['Exercise'].sort_values().unique(): 158 | df_temp = df[(df['Exercise'] == exercise) & (df['date_UTC'].dt.date >= daterange[0])] 159 | try: 160 | percent_change = ((df_temp['1RM'].tail(1).values[0] - 161 | df_temp['1RM'].head(1).values[0]) / 162 | df_temp['1RM'].head(1).values[0]) * 100 163 | except: 164 | percent_change = 0 165 | df.at[df['Exercise'] == exercise, '% Change'] = percent_change 166 | 167 | # Change sort of 'no change' so they show up at bottom 168 | df.at[df['% Change'] == 0, '% Change'] = 9123456789 169 | # Sort exercises by areas which have least improvement on a % basis 170 | df = df.sort_values(by='% Change', ascending=sort_ascending) 171 | # Change back so correct % Change shows 172 | df.at[df['% Change'] == 9123456789, '% Change'] = 0 173 | 174 | widgets = [] 175 | for exercise in df['Exercise'].unique(): 176 | df_temp = df[df['Exercise'] == exercise] 177 | # Only plot exercise if at least 2 different dates with that exercise 178 | if len(df_temp['date_UTC'].unique()) > 1: 179 | try: 180 | backgroundColor = 'border-danger' if df_temp['% Change'].values[0] < 0 else 'border-success' if \ 181 | df_temp['% Change'].values[0] > 0 else '' 182 | except: 183 | backgroundColor = '' 184 | 185 | # Sort by date ascending 186 | df_temp = df_temp.sort_values(by=['date_UTC']) 187 | tooltip = [df_temp['1RM_Type'].iloc[0] + ':{:.0f}'.format(x) for x in df_temp['1RM']] 188 | 189 | widgets.append([exercise, backgroundColor, 190 | dcc.Graph(id=exercise + '-trend', 191 | style={'height': '100%'}, 192 | config={'displayModeBar': False, }, 193 | figure={ 194 | 'data': [ 195 | go.Scatter( 196 | x=df_temp['date_UTC'], 197 | # y=df_temp['% Change'], 198 | y=df_temp['1RM'], 199 | mode='lines+markers', 200 | text=tooltip, 201 | hoverinfo='x+text', 202 | opacity=0.7, 203 | line={'color': teal}, 204 | line_shape='spline' 205 | ), 206 | ], 207 | 'layout': go.Layout( 208 | height=150, 209 | font=dict( 210 | color='rgb(220,220,220)', 211 | size=10, 212 | ), 213 | 214 | # hoverlabel={'font': {'size': 10}}, 215 | xaxis=dict( 216 | showline=True, 217 | color='rgb(220,220,220)', 218 | showgrid=False, 219 | showticklabels=True, 220 | tickformat='%b %d', 221 | # Specify range to get rid of auto x-axis padding when using scatter markers 222 | range=[df_temp['date_UTC'].min(), 223 | df_temp[ 224 | 'date_UTC'].max()] if timeframe == 'all' else daterange, 225 | rangeselector=dict( 226 | buttons=list([ 227 | dict(step="all", 228 | label="All"), 229 | dict(count=1, 230 | label="YTD", 231 | step="year", 232 | stepmode="todate"), 233 | dict(count=6, 234 | label="L6M", 235 | step="month", 236 | stepmode="backward"), 237 | dict(count=42, 238 | label="L6W", 239 | step="day", 240 | stepmode="backward"), 241 | ]), 242 | xanchor='center', 243 | font=dict( 244 | size=10, 245 | ), 246 | x=.5, 247 | y=1, 248 | ), 249 | rangeslider=dict( 250 | visible=True 251 | ), 252 | ), 253 | yaxis=dict( 254 | showgrid=False, 255 | showticklabels=True, 256 | gridcolor='rgb(73, 73, 73)', 257 | gridwidth=.5, 258 | # tickformat='%', 259 | 260 | ), 261 | margin={'l': 20, 'b': 0, 't': 20, 'r': 20}, 262 | showlegend=False, 263 | annotations=[ 264 | go.layout.Annotation( 265 | font={'size': 10}, 266 | bgcolor='rgba(92,89,96,1)', 267 | x=df_temp.loc[df_temp['date_UTC'].idxmax()]['date_UTC'], 268 | y=df_temp.loc[df_temp['date_UTC'].idxmax()]['1RM'], 269 | text="{} {:.0f}%".format(timeframe.upper(), 270 | df_temp.loc[ 271 | df_temp['date_UTC'].idxmax()][ 272 | '% Change']), 273 | showarrow=True, 274 | arrowhead=0, 275 | arrowcolor=white, 276 | ax=-20, 277 | ay=-20, 278 | ) 279 | ], 280 | hovermode='x', 281 | autosize=True, 282 | # title=exercise 283 | ) 284 | }) 285 | ]) 286 | 287 | widgets = [ 288 | html.Div(className='col-lg-2 mb-3', children=[ 289 | dbc.Card(className=backgroundColor, children=[ 290 | dbc.CardHeader(exercise), 291 | dbc.CardBody( 292 | style={'padding': '.5rem'}, 293 | children=chart) 294 | ])] 295 | ) for exercise, backgroundColor, chart in widgets] 296 | 297 | # Set up each div of 6 graphs to be placed in 298 | num_divs = math.ceil(len(widgets) / 6) 299 | div_layout = [] 300 | for i in range(0, num_divs): 301 | children = [] 302 | for widget in widgets[:6]: 303 | children.append(widget) 304 | widgets.remove(widget) 305 | 306 | div_layout.append(html.Div(className='row', children=children)) 307 | # div_layout.append( 308 | # html.Div(className='row')) 309 | 310 | return div_layout 311 | 312 | 313 | # Group power profiles 314 | @app.callback([Output('exercise-containers', 'children'), 315 | Output('all-button', 'style'), 316 | Output('ytd-button', 'style'), 317 | Output('l6m-button', 'style'), 318 | Output('l90d-button', 'style'), 319 | Output('l6w-button', 'style')], 320 | [Input('muscle-options', 'value'), 321 | Input('all-button', 'n_clicks'), 322 | Input('ytd-button', 'n_clicks'), 323 | Input('l6m-button', 'n_clicks'), 324 | Input('l90d-button', 'n_clicks'), 325 | Input('l6w-button', 'n_clicks')], 326 | [State('all-button', 'style'), 327 | State('ytd-button', 'style'), 328 | State('l6m-button', 'style'), 329 | State('l90d-button', 'style'), 330 | State('l6w-button', 'style')] 331 | ) 332 | def update_exercise_charts(muscle_options, all_n_clicks, ytd_n_clicks, l6m_n_clicks, l90d_n_clicks, l6w_n_clicks, 333 | all_style, ytd_style, 334 | l6m_style, l90d_style, l6w_style): 335 | latest_dict = {'all-button': 'all', 'ytd-button': 'ytd', 'l6m-button': 'l6m', 'l90d-button': 'l90d', 336 | 'l6w-button': 'l6w'} 337 | style = {'all': {'marginRight': '1vw'}, 'ytd': {'marginRight': '1vw'}, 'l6m': {'marginRight': '1vw'}, 338 | 'l90d': {'marginRight': '1vw'}, 'l6w': {'marginRight': '1vw'}} 339 | ctx = dash.callback_context 340 | if not ctx.triggered: 341 | latest = 'ytd' 342 | elif ctx.triggered[0]['prop_id'] == 'muscle-options.value': 343 | for key, value in {'all': all_style, 'ytd': ytd_style, 'l6m': l6m_style, 'l90d': l90d_style, 344 | 'l6w': l6w_style}.items(): 345 | if value == {'marginRight': '1vw', 'color': '#64D9EC', 'borderColor': '#64D9EC'}: 346 | latest = key 347 | else: 348 | latest = latest_dict[ctx.triggered[0]['prop_id'].split('.')[0]] 349 | 350 | style[latest] = {'marginRight': '1vw', 'color': '#64D9EC', 'borderColor': '#64D9EC'} 351 | 352 | return generate_exercise_charts(timeframe=latest, muscle_options=muscle_options), style['all'], style['ytd'], style[ 353 | 'l6m'], style['l90d'], style['l6w'] 354 | -------------------------------------------------------------------------------- /src/fitly/api/ouraAPI.py: -------------------------------------------------------------------------------- 1 | from oura import OuraClient 2 | from ..api.sqlalchemy_declarative import ouraReadinessSummary, ouraActivitySummary, \ 3 | ouraActivitySamples, ouraSleepSamples, ouraSleepSummary, apiTokens 4 | from ..api.database import engine 5 | from sqlalchemy import func, delete 6 | from datetime import datetime, timedelta 7 | import pandas as pd 8 | import numpy as np 9 | from ..app import app 10 | import ast 11 | from ..utils import config 12 | from functools import reduce 13 | import pickle 14 | 15 | client_id = config.get('oura', 'client_id') 16 | client_secret = config.get('oura', 'client_secret') 17 | redirect_uri = config.get('oura', 'redirect_uri') 18 | 19 | 20 | def current_token_dict(): 21 | try: 22 | token_dict = app.session.query(apiTokens.tokens).filter(apiTokens.service == 'Oura').first().tokens 23 | token_pickle = pickle.loads(token_dict) 24 | app.session.remove() 25 | except BaseException as e: 26 | app.server.logger.error(e) 27 | token_pickle = {} 28 | 29 | return token_pickle 30 | 31 | 32 | # Function for auto saving oura token_dict to db 33 | def save_oura_token(token_dict): 34 | # Delete current key 35 | app.session.execute(delete(apiTokens).where(apiTokens.service == 'Oura')) 36 | # Insert new key 37 | try: 38 | app.session.add(apiTokens(date_utc=datetime.utcnow(), service='Oura', tokens=pickle.dumps(token_dict))) 39 | app.session.commit() 40 | except: 41 | app.session.rollback() 42 | 43 | app.session.remove() 44 | 45 | 46 | def oura_connected(): 47 | token_dict = current_token_dict() 48 | try: 49 | if token_dict: 50 | oura = OuraClient(client_id=client_id, client_secret=client_secret, access_token=token_dict['access_token'], 51 | refresh_token=token_dict['refresh_token'], refresh_callback=save_oura_token) 52 | app.server.logger.debug('Oura Connected') 53 | return True 54 | except BaseException as e: 55 | app.server.logger.error('Oura not connected') 56 | app.server.logger.error(e) 57 | return False 58 | 59 | 60 | ## For manual pulling of tokens 61 | # def connect_oura(): 62 | # # IF refresh tokens required ## 63 | # auth_client = OuraOAuth2Client(client_id=client_id, client_secret=client_secret) 64 | # url = auth_client.authorize_endpoint(scope=["email", "personal", "daily"], 65 | # redirect_uri=redirect_uri) 66 | # print(url) 67 | # auth_code = input('code') 68 | # ### 69 | # auth_client.fetch_access_token(auth_code) 70 | # 71 | # ## Take code and manually test with OuraClient() 72 | # save_oura_token(auth_client.app.session.token) 73 | 74 | 75 | ## Provide link for button on settings page 76 | def connect_oura_link(auth_client): 77 | url = auth_client.authorize_endpoint(scope=["email", "personal", "daily"], 78 | redirect_uri=redirect_uri) 79 | return url[0] 80 | 81 | 82 | def pull_readiness_data(oura, days_back=7): 83 | # Get latest date in db and pull everything after 84 | start = app.session.query(func.max(ouraReadinessSummary.report_date)) 85 | 86 | app.session.remove() 87 | start = '1999-01-01' if start[0][0] is None else datetime.strftime(start[0][0] - timedelta(days=days_back), 88 | '%Y-%m-%d') 89 | 90 | app.server.logger.debug('Pulling readiness from max date in oura_readiness_summary {}'.format(start)) 91 | oura_data = oura.readiness_summary(start=start)['readiness'] 92 | 93 | if len(oura_data) > 0: 94 | df_readiness_summary = pd.DataFrame.from_dict(oura_data) 95 | # Readiness shows the 'summary' of the previous day. 96 | # To align with charts when filtering on date use readiness summary_date + 1 day 97 | df_readiness_summary['report_date'] = ( 98 | pd.to_datetime(df_readiness_summary['summary_date']) + timedelta(days=1)).dt.date 99 | 100 | # Only take max period_id from readiness data (don't want naps in our readiness data screwing up main daily scores) 101 | df_readiness_summary = df_readiness_summary.loc[ 102 | df_readiness_summary.reset_index().groupby(['summary_date'])['period_id'].idxmax()] 103 | 104 | df_readiness_summary.set_index('report_date', inplace=True) 105 | 106 | return df_readiness_summary 107 | else: 108 | return [] 109 | 110 | 111 | def insert_readiness_data(df_readiness_summary, days_back=7): 112 | start = app.session.query(func.max(ouraReadinessSummary.report_date)) 113 | start = '1999-01-01' if start[0][0] is None else datetime.strftime(start[0][0] - timedelta(days=days_back), 114 | '%Y-%m-%d') 115 | # Delete latest dates records from db to ensure values are being overridden from api pull 116 | try: 117 | app.server.logger.debug('Deleting >= {} records from oura_readiness_summary'.format(start)) 118 | app.session.execute(delete(ouraReadinessSummary).where(ouraReadinessSummary.summary_date >= start)) 119 | app.session.commit() 120 | except BaseException as e: 121 | app.server.logger.error(e) 122 | 123 | app.session.remove() 124 | 125 | app.server.logger.debug('Inserting oura readiness summary') 126 | df_readiness_summary.to_sql('oura_readiness_summary', engine, if_exists='append', index=True) 127 | 128 | 129 | def pull_activity_data(oura, days_back=7): 130 | # Activity data updates throughout day and score is generated based off current day (in data) 131 | # Do not need to generate 'report date' 132 | 133 | # Get latest date in db and pull everything after 134 | start = app.session.query(func.max(ouraActivitySummary.summary_date))[0][0] 135 | 136 | app.session.remove() 137 | 138 | start = '1999-01-01' if start is None else datetime.strftime(start - timedelta(days=days_back), '%Y-%m-%d') 139 | 140 | app.server.logger.debug('Pulling activity from max date in oura_activity_summary {}'.format(start)) 141 | oura_data = oura.activity_summary(start=start)['activity'] 142 | 143 | if len(oura_data) > 0: 144 | df_activity_summary = pd.DataFrame.from_dict(oura_data) 145 | df_activity_summary['summary_date'] = pd.to_datetime(df_activity_summary['summary_date']).dt.date 146 | df_activity_summary.set_index('summary_date', inplace=True) 147 | df_activity_summary['day_end_local'] = pd.to_datetime( 148 | df_activity_summary['day_end']).apply(lambda x: x.replace(tzinfo=None)) 149 | df_activity_summary['day_start_local'] = pd.to_datetime( 150 | df_activity_summary['day_start']).apply(lambda x: x.replace(tzinfo=None)) 151 | df_activity_summary = df_activity_summary.drop(columns=['met_1min', 'day_end', 'day_start'], axis=1) 152 | 153 | # Generate Activity Samples 154 | df_1min_list, df_5min_list = [], [] 155 | for x in oura_data: 156 | # build 1 min metrics df 157 | df_1min = pd.Series(x.get('met_1min'), name='met_1min').to_frame() 158 | df_1min['timestamp_local'] = pd.to_datetime(x.get('day_start')) + pd.to_timedelta(df_1min.index, unit='m') 159 | df_1min = df_1min.set_index('timestamp_local') 160 | # Remove timezone info from date, we are just storing whatever the local time was, where the person was 161 | df_1min.index = df_1min.index.tz_localize(None) 162 | df_1min['summary_date'] = pd.to_datetime(x.get('summary_date')).date() 163 | df_1min_list.append(df_1min) 164 | 165 | # build 5 min metrics df 166 | df_5min = pd.Series([int(y) for y in x.get('class_5min')], name='class_5min').to_frame() 167 | df_5min['class_5min_desc'] = df_5min['class_5min'].fillna('5').astype('str').map( 168 | {'0': 'Rest', '1': 'Inactive', '2': 'Low', '3': 'Medium', '4': 'High', '5': 'Non-Wear'}) 169 | df_5min.index += 1 170 | df_5min['timestamp_local'] = (pd.to_datetime(x.get('day_start')) + pd.to_timedelta(df_5min.index * 5, 171 | unit='m')) - pd.to_timedelta( 172 | 5, unit='m') 173 | df_5min = df_5min.set_index('timestamp_local') 174 | # Remove timezone info from date, we are just storing whatever the local time was, where the person was 175 | df_5min.index = df_5min.index.tz_localize(None) 176 | df_5min['summary_date'] = pd.to_datetime(x.get('summary_date')).date() 177 | df_5min_list.append(df_5min) 178 | 179 | df_1min = pd.concat(df_1min_list) 180 | df_5min = pd.concat(df_5min_list) 181 | 182 | df_activity_samples = df_1min.merge(df_5min, how='left', left_index=True, right_index=True) 183 | df_activity_samples['summary_date'] = df_activity_samples['summary_date_x'] 184 | df_activity_samples = df_activity_samples.drop(columns=['summary_date_x', 'summary_date_y'], axis=1) 185 | 186 | return df_activity_summary, df_activity_samples 187 | else: 188 | return [], [] 189 | 190 | 191 | def insert_activity_data(df_activity_summary, df_activity_samples, days_back=7): 192 | start = app.session.query(func.max(ouraActivitySummary.summary_date))[0][0] 193 | start = '1999-01-01' if start is None else datetime.strftime(start - timedelta(days=days_back), '%Y-%m-%d') 194 | 195 | # Delete latest dates records from db to ensure values are being overridden from api pull 196 | try: 197 | app.server.logger.debug('Deleting >= {} records from oura_activity_summary'.format(start)) 198 | app.session.execute(delete(ouraActivitySummary).where(ouraActivitySummary.summary_date >= start)) 199 | app.server.logger.debug('Deleting >= {} records from oura_activity_samples'.format(start)) 200 | app.session.execute(delete(ouraActivitySamples).where(ouraActivitySamples.timestamp_local >= start)) 201 | app.session.commit() 202 | except BaseException as e: 203 | app.server.logger.error(e) 204 | 205 | app.session.remove() 206 | 207 | # Insert Activity Summary 208 | app.server.logger.debug('Inserting oura activity summary') 209 | try: 210 | df_activity_summary.to_sql('oura_activity_summary', engine, if_exists='append', index=True) 211 | 212 | 213 | except BaseException as e: 214 | app.server.logger.error(e) 215 | 216 | # Insert Activity Samples 217 | app.server.logger.debug('Inserting oura activity samples') 218 | try: 219 | df_activity_samples.to_sql('oura_activity_samples', engine, if_exists='append', index=True) 220 | 221 | except BaseException as e: 222 | app.server.logger.error(e) 223 | 224 | 225 | def pull_sleep_data(oura, days_back=7): 226 | # Get latest date in db and pull everything after 227 | start = app.session.query(func.max(ouraSleepSummary.report_date))[0][0] 228 | 229 | app.session.remove() 230 | start = '1999-01-01' if start is None else datetime.strftime(start - timedelta(days=days_back), '%Y-%m-%d') 231 | 232 | app.server.logger.debug('Pulling sleep from max date in oura_sleep_summary {}'.format(start)) 233 | oura_data = oura.sleep_summary(start=start)['sleep'] 234 | 235 | if len(oura_data) > 0: 236 | # Sleep Summary 237 | df_sleep_summary = pd.DataFrame.from_dict(oura_data) 238 | # Sleep shows the 'summary' of the previous day. 239 | # To align with charts when filtering on date use readiness summary_date + 1 day 240 | df_sleep_summary['report_date'] = (pd.to_datetime(df_sleep_summary['summary_date']) + timedelta(days=1)).dt.date 241 | df_sleep_summary = df_sleep_summary.set_index('report_date') 242 | # Remove timestamps from bedtimes as we want whatever the time was locally 243 | df_sleep_summary['bedtime_end_local'] = pd.to_datetime( 244 | df_sleep_summary['bedtime_end']).apply(lambda x: x.replace(tzinfo=None)) 245 | df_sleep_summary['bedtime_start_local'] = pd.to_datetime( 246 | df_sleep_summary['bedtime_start']).apply(lambda x: x.replace(tzinfo=None)) 247 | 248 | df_sleep_summary = df_sleep_summary.drop(columns=['rmssd_5min', 'hr_5min', 'bedtime_end', 'bedtime_start'], 249 | axis=1) 250 | 251 | # Sleep Samples 252 | df_samples_list = [] 253 | for x in oura_data: 254 | df = pd.concat( 255 | [pd.Series(x.get('hr_5min'), name='hr_5min'), pd.Series(x.get('rmssd_5min'), name='rmssd_5min'), 256 | pd.Series([int(y) for y in x.get('hypnogram_5min')], name='hypnogram_5min')], 257 | axis=1) 258 | df['hypnogram_5min_desc'] = df['hypnogram_5min'].map( 259 | {1: 'Deep', 2: 'Light', 3: 'REM', 4: 'Awake'}) 260 | 261 | df.index += 1 262 | df['timestamp_local'] = (pd.to_datetime(x.get('bedtime_start')) + pd.to_timedelta(df.index * 5, 263 | unit='m')) - pd.to_timedelta( 264 | 5, unit='m') 265 | 266 | df['summary_date'] = pd.to_datetime(x.get('summary_date')).date() 267 | df['report_date'] = df['summary_date'] + timedelta(days=1) 268 | df = df.set_index('timestamp_local') 269 | # Remove timezone info from date, we are just storing whatever the local time was, where the person was 270 | df.index = df.index.tz_localize(None) 271 | df_samples_list.append(df) 272 | 273 | df_sleep_samples = pd.concat(df_samples_list) 274 | 275 | return df_sleep_summary, df_sleep_samples 276 | else: 277 | return [], [] 278 | 279 | 280 | def insert_sleep_data(df_sleep_summary, df_sleep_samples, days_back=7): 281 | start = app.session.query(func.max(ouraSleepSummary.report_date))[0][0] 282 | start = '1999-01-01' if start is None else datetime.strftime(start - timedelta(days=days_back), '%Y-%m-%d') 283 | 284 | # Delete latest dates records from db to ensure values are being overridden from api pull 285 | try: 286 | app.server.logger.debug('Deleting >= {} records from oura_sleep_summary'.format(start)) 287 | app.session.execute(delete(ouraSleepSummary).where(ouraSleepSummary.summary_date >= start)) 288 | app.server.logger.debug('Deleting >= {} records from oura_sleep_samples'.format(start)) 289 | app.session.execute(delete(ouraSleepSamples).where(ouraSleepSamples.summary_date >= start)) 290 | app.session.commit() 291 | except BaseException as e: 292 | app.server.logger.error(e) 293 | 294 | app.session.remove() 295 | 296 | # Insert Sleep Summary 297 | app.server.logger.debug('Inserting oura sleep summary') 298 | try: 299 | df_sleep_summary.to_sql('oura_sleep_summary', engine, if_exists='append', index=True) 300 | 301 | except BaseException as e: 302 | app.server.logger.error(e) 303 | 304 | # Insert Sleep Samples 305 | # app.server.logger.debug('Inserting oura sleep samples') 306 | try: 307 | df_sleep_samples.to_sql('oura_sleep_samples', engine, if_exists='append', index=True) 308 | 309 | except BaseException as e: 310 | app.server.logger.error(e) 311 | 312 | 313 | def generate_oura_correlations(lookback_days=180): 314 | ''' 315 | Generates correlations of oura metrics 316 | 317 | ''' 318 | lookback = pd.to_datetime(datetime.today() - timedelta(days=lookback_days)).date() 319 | 320 | activity = pd.read_sql( 321 | sql=app.session.query(ouraActivitySummary).filter(ouraActivitySummary.summary_date >= lookback).statement, 322 | con=engine, 323 | index_col='summary_date').sort_index( 324 | ascending=True) 325 | # Drop columns we don't want to correlate over 326 | activity.drop( 327 | columns=['inactivity_alerts', 'met_min_high', 'met_min_inactive', 'met_min_low', 'met_min_medium', 328 | 'rest_mode_state', 'score_meet_daily_targets', 'score_move_every_hour', 'score_recovery_time', 329 | 'score_stay_active', 'score_training_frequency', 'score_training_volume', 'target_calories', 330 | 'target_km', 'target_miles', 'to_target_km', 'to_target_miles', 'timezone'], inplace=True) 331 | activity = activity.add_prefix('Activity_') 332 | 333 | readiness = pd.read_sql( 334 | sql=app.session.query(ouraReadinessSummary).filter(ouraReadinessSummary.summary_date >= lookback).statement, 335 | con=engine, 336 | index_col='summary_date').sort_index( 337 | ascending=True) 338 | readiness.drop( 339 | columns=[ 340 | 'period_id', 'score_activity_balance', 'score_previous_day', 'score_previous_night', 'score_recovery_index', 341 | 'score_resting_hr', 'score_sleep_balance', 'score_temperature', 'score_hrv_balance', 'rest_mode_state'], 342 | inplace=True) 343 | readiness = readiness.add_prefix('Readiness_') 344 | 345 | sleep = pd.read_sql( 346 | sql=app.session.query(ouraSleepSummary).filter(ouraSleepSummary.summary_date >= lookback).statement, 347 | con=engine, 348 | index_col='summary_date').sort_index( 349 | ascending=True) 350 | 351 | sleep.drop( 352 | columns=['bedtime_end_delta', 'is_longest', 'midpoint_at_delta', 'period_id', 'score_alignment', 'score_deep', 353 | 'score_disturbances', 'score_efficiency', 'score_latency', 'score_rem', 'score_total', 354 | 'temperature_delta', 'temperature_trend_deviation', 355 | 'timezone'], 356 | inplace=True) 357 | 358 | sleep = sleep.add_prefix('Sleep_') 359 | 360 | friendly_names = {'Activity_average_met': 'Average METs', 361 | 'Activity_cal_active': 'Activity burn', 362 | 'Activity_cal_total': 'Total burn', 363 | 'Activity_daily_movement': 'Walking equivalent', 364 | 'Activity_high': 'High activity time', 365 | 'Activity_inactive': 'Inactive time', 366 | 'Activity_low': 'Low activity time', 367 | 'Activity_medium': 'Med activity time', 368 | 'Activity_non_wear': 'Non-wear time', 369 | 'Activity_rest': 'Rest time', 370 | 'Activity_score': 'Activity score', 371 | 'Activity_steps': 'Steps', 372 | 'Activity_total': 'Total activity time', 373 | 'Readiness_score': 'Readiness score', 374 | 'Sleep_awake': 'Time awake in bed', 375 | 'Sleep_bedtime_start_delta': 'Late to bedtime', 376 | 'Sleep_breath_average': 'Respiratory rate', 377 | 'Sleep_deep': 'Deep sleep', 378 | 'Sleep_duration': 'Time in bed', 379 | 'Sleep_efficiency': 'Sleep efficiency', 380 | 'Sleep_hr_average': 'Average HR', 381 | 'Sleep_hr_lowest': 'Lowest HR', 382 | 'Sleep_light': 'Light sleep', 383 | 'Sleep_midpoint_time': 'Sleep midpoint', 384 | 'Sleep_onset_latency': 'Sleep latency', 385 | 'Sleep_rem': 'REM sleep', 386 | 'Sleep_restless': 'Restlessness', 387 | 'Sleep_rmssd': 'Average HRV', 388 | 'Sleep_score': 'Sleep score', 389 | 'Sleep_temperature_deviation': 'Temp. deviation', 390 | 'Sleep_total': 'Total sleep'} 391 | 392 | dfs = [sleep, readiness, activity] 393 | df = reduce(lambda left, right: pd.merge(left, right, left_index=True, right_index=True), dfs) 394 | 395 | df.columns = df.columns.to_series().map(friendly_names) 396 | 397 | # Create Prev/Next day for all columns 398 | for col in friendly_names.values(): 399 | df[col + ' (prev)'] = df[col].shift(1) 400 | df[col + ' (next)'] = df[col].shift(-1) 401 | 402 | df = df.corr().replace(1, np.nan) 403 | # Store lookback days that was used for filtering historic data to run correlation on 404 | df['rolling_days'] = lookback_days 405 | df.index.name = 'Metric' 406 | 407 | # df.to_sql('correlations', engine, if_exists='replace', index=True) 408 | 409 | app.session.remove() 410 | return df 411 | 412 | 413 | def top_n_correlations(n, column, days=180): 414 | df = generate_oura_correlations(lookback_days=days) 415 | positive = df[column].nlargest(n).reset_index() 416 | positive.columns = ['Positive', 'Pos Corr Coef.'] 417 | 418 | negative = df[column].nsmallest(n).reset_index() 419 | negative.columns = ['Negative', 'Neg Corr Coef.'] 420 | 421 | return pd.merge(positive, negative, left_index=True, right_index=True) 422 | 423 | 424 | def pull_oura_data(): 425 | if oura_connected(): 426 | days_back = int(config.get('oura', 'days_back')) 427 | 428 | token_dict = current_token_dict() 429 | oura = OuraClient(client_id=client_id, client_secret=client_secret, access_token=token_dict['access_token'], 430 | refresh_token=token_dict['refresh_token'], refresh_callback=save_oura_token) 431 | df_readiness_summary = pull_readiness_data(oura, days_back) 432 | df_readiness_summary.to_csv('readiness.csv', sep=',') 433 | df_activity_summary, df_activity_samples = pull_activity_data(oura, days_back) 434 | df_sleep_summary, df_sleep_samples = pull_sleep_data(oura, days_back) 435 | 436 | insert_readiness_data(df_readiness_summary, days_back) 437 | insert_activity_data(df_activity_summary, df_activity_samples, days_back) 438 | insert_sleep_data(df_sleep_summary, df_sleep_samples, days_back) 439 | 440 | # # Generate correlation table - Depricated, no longer storing in table 441 | # generate_oura_correlations(lookback_days=9999) 442 | 443 | return df_sleep_summary.index.max() == df_readiness_summary.index.max() # == df_activity_summary.index.max() 444 | 445 | # Oura API returns times (bedtime_start, bedtime_end etc. in the timezone of the location where went to sleep. 446 | # Do not need to convert to UTC because we want the time we went to sleep wherever we went to sleep, not necessarily always EST 447 | # API also returns what the timezone was, so storing in MySQL as datetime is not an issue as we can always convert back by adding 448 | # [timezone] minuts to the datetime that is stored to get to UTC, and then convert to anywhere else if necessary 449 | -------------------------------------------------------------------------------- /src/fitly/api/sqlalchemy_declarative.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, Integer, String, Boolean, DateTime, Date, Float, BigInteger, PickleType 2 | from .database import Base 3 | 4 | 5 | ##### Athlete Table ##### 6 | 7 | class athlete(Base): 8 | __tablename__ = 'athlete' 9 | athlete_id = Column('athlete_id', Integer(), index=True, primary_key=True, autoincrement=True) 10 | name = Column('name', String(255)) 11 | birthday = Column('birthday', Date()) 12 | weight_lbs = Column('weight_lbs', Integer()) 13 | resting_hr = Column('resting_hr', Integer()) 14 | run_ftp = Column('run_ftp', Integer()) 15 | ride_ftp = Column('ride_ftp', Integer()) 16 | sex = Column('sex', String(1)) 17 | min_non_warmup_workout_time = Column('min_non_warmup_workout_time', 18 | Integer(), 19 | default=900) # threshold in seconds for when we start counting workouts towards stress scores (don't want to include warm-ups) 20 | weekly_tss_goal = Column('weekly_tss_goal', Integer(), default=150) 21 | rr_max_goal = Column('rr_max_goal', Integer(), 22 | default=8) # Max ramp rate threshold used for calculating injury risk 23 | rr_min_goal = Column('rr_min_goal', Integer(), 24 | default=5) # Min ramp rate threshold used for calculating injury risk 25 | weekly_workout_goal = Column('weekly_workout_goal', Integer(), default=3) # weekly workout minute goal 26 | weekly_sleep_score_goal = Column('weekly_sleep_score_goal', Integer(), 27 | default=3) # Oura sleep scores >= 85 to achieve weekly 28 | weekly_readiness_score_goal = Column('weekly_readiness_score_goal', 29 | Integer(), default=3) # Oura readiness scores >= 85 to achieve weekly 30 | weekly_activity_score_goal = Column('weekly_activity_score_goal', 31 | Integer(), default=3) # Oura activity scores >= 85 to achieve weekly 32 | daily_sleep_hr_target = Column('daily_sleep_hr_target', Integer(), default=8) # Daily sleep hour target 33 | ftp_test_notification_week_threshold = Column('ftp_test_notification_week_threshold', 34 | Integer(), default=6) # Num weeks to retest ftp 35 | cycle_power_zone_threshold_1 = Column('cycle_power_zone_threshold_1', Float(), default=.55) 36 | cycle_power_zone_threshold_2 = Column('cycle_power_zone_threshold_2', Float(), default=.75) 37 | cycle_power_zone_threshold_3 = Column('cycle_power_zone_threshold_3', Float(), default=.9) 38 | cycle_power_zone_threshold_4 = Column('cycle_power_zone_threshold_4', Float(), default=1.05) 39 | cycle_power_zone_threshold_5 = Column('cycle_power_zone_threshold_5', Float(), default=1.2) 40 | cycle_power_zone_threshold_6 = Column('cycle_power_zone_threshold_6', Float(), default=1.5) 41 | run_power_zone_threshold_1 = Column('run_power_zone_threshold_1', Float(), default=.8) 42 | run_power_zone_threshold_2 = Column('run_power_zone_threshold_2', Float(), default=.9) 43 | run_power_zone_threshold_3 = Column('run_power_zone_threshold_3', Float(), default=1) 44 | run_power_zone_threshold_4 = Column('run_power_zone_threshold_4', Float(), default=1.15) 45 | hr_zone_threshold_1 = Column('hr_zone_threshold_1', Float(), default=.6) 46 | hr_zone_threshold_2 = Column('hr_zone_threshold_2', Float(), default=.7) 47 | hr_zone_threshold_3 = Column('hr_zone_threshold_3', Float(), default=.8) 48 | hr_zone_threshold_4 = Column('hr_zone_threshold_4', Float(), default=.9) 49 | pmc_switch_settings = Column('pmc_switch_settings', String(9999), 50 | default='{"ride_status": true, "run_status": true, "all_status": true, "power_status": true, "hr_status": true, "atl_status": false}') 51 | recovery_metric = Column('recovery_metric', String(10), default='readiness') 52 | peloton_auto_bookmark_ids = Column('peloton_auto_bookmark_ids', String(9999), default='{}') 53 | use_run_power = Column('use_run_power', Boolean, default=True) 54 | use_cycle_power = Column('use_cycle_power', Boolean, default=True) 55 | spotify_playlists_switch = Column('spotify_playlists_switch', Boolean, default=False) 56 | spotify_use_rec_intensity = Column('spotify_use_rec_intensity', Boolean(), default=True) 57 | spotify_time_period = Column('spotify_time_period', String(20), default='all') 58 | spotify_num_playlists = Column('spotify_num_playlists', Integer(), default=3) 59 | 60 | 61 | class workoutStepLog(Base): 62 | __tablename__ = 'workout_step_log' 63 | id = Column('id', Integer(), index=True, primary_key=True, autoincrement=True) 64 | athlete_id = Column('athlete_id', Integer()) 65 | date = Column('date', Date()) 66 | workout_step = Column('workout_step', Integer()) 67 | workout_step_desc = Column('workout_step_desc', String(20)) 68 | completed = Column('completed', Boolean, default=False) 69 | rationale = Column('rationale', String(255)) 70 | 71 | 72 | class annotations(Base): 73 | __tablename__ = 'annotations' 74 | id = Column('id', Integer(), index=True, primary_key=True, autoincrement=True) 75 | athlete_id = Column('athlete_id', Integer()) 76 | date = Column('date', Date()) 77 | annotation = Column('annotation', String(255)) 78 | 79 | 80 | ##### Strava Tables ##### 81 | 82 | class stravaSamples(Base): 83 | __tablename__ = 'strava_samples' 84 | timestamp_local = Column('timestamp_local', DateTime(), index=True, primary_key=True) 85 | timestamp_utc = Column('timestamp_utc', DateTime()) 86 | time_interval = Column('time_interval', DateTime()) 87 | activity_id = Column('activity_id', BigInteger()) 88 | date = Column('date', Date()) 89 | type = Column('type', String(255)) 90 | act_name = Column('act_name', String(255)) 91 | athlete_id = Column('athlete_id', BigInteger()) 92 | distance = Column('distance', Float()) 93 | velocity_smooth = Column('velocity_smooth', Float()) 94 | temp = Column('temp', Float()) 95 | altitude = Column('altitude', Float()) 96 | latitude = Column('latitude', Float()) 97 | longitude = Column('longitude', Float()) 98 | heartrate = Column('heartrate', Integer()) 99 | cadence = Column('cadence', Integer()) 100 | watts = Column('watts', Integer()) 101 | moving = Column('moving', Integer()) 102 | grade_smooth = Column('grade_smooth', Float()) 103 | ftp = Column('ftp', Float()) 104 | time = Column('time', Integer()) 105 | power_zone = Column('power_zone', Integer()) 106 | hr_zone = Column('hr_zone', Integer()) 107 | hr_lowest = Column('hr_lowest', Integer()) 108 | 109 | 110 | class stravaBestSamples(Base): 111 | __tablename__ = 'strava_best_samples' 112 | activity_id = Column('activity_id', BigInteger(), index=True, primary_key=True) 113 | interval = Column('interval', Integer, index=True, primary_key=True) 114 | mmp = Column('mmp', Float()) 115 | ftp = Column('ftp', Float()) 116 | watts_per_kg = Column('watts_per_kg', Float()) 117 | timestamp_local = Column('timestamp_local', DateTime()) 118 | timestamp_utc = Column('timestamp_utc', DateTime()) 119 | time_interval = Column('time_interval', DateTime()) 120 | type = Column('type', String(255)) 121 | date = Column('date', Date()) 122 | act_name = Column('act_name', String(255)) 123 | athlete_id = Column('athlete_id', BigInteger()) 124 | 125 | 126 | class stravaSummary(Base): 127 | __tablename__ = 'strava_summary' 128 | start_date_utc = Column('start_date_utc', DateTime(), index=True, primary_key=True) 129 | activity_id = Column('activity_id', BigInteger()) 130 | athlete_id = Column('athlete_id', BigInteger()) 131 | name = Column('name', String(255)) 132 | distance = Column('distance', Float()) 133 | moving_time = Column('moving_time', BigInteger()) 134 | elapsed_time = Column('elapsed_time', BigInteger()) 135 | total_elevation_gain = Column('total_elevation_gain', Integer()) 136 | type = Column('type', String(255)) 137 | start_date_local = Column('start_date_local', DateTime()) 138 | start_day_local = Column('start_day_local', Date()) 139 | timezone = Column('timezone', String(255)) 140 | start_lat = Column('start_lat', String(255)) 141 | start_lon = Column('start_lon', String(255)) 142 | end_lat = Column('end_lat', String(255)) 143 | end_lon = Column('end_lon', String(255)) 144 | location_city = Column('location_city', String(255)) 145 | location_state = Column('location_state', String(255)) 146 | location_country = Column('location_country', String(255)) 147 | average_speed = Column('average_speed', Float()) 148 | max_speed = Column('max_speed', Float()) 149 | average_watts = Column('average_watts', Float()) 150 | max_watts = Column('max_watts', Float()) 151 | average_heartrate = Column('average_heartrate', Float()) 152 | max_heartrate = Column('max_heartrate', Float()) 153 | calories = Column('calories', Float()) 154 | device_name = Column('device_name', String(255)) 155 | description = Column('description', String(255)) 156 | pr_count = Column('pr_count', Integer()) 157 | achievement_count = Column('achievement_count', Integer()) 158 | commute = Column('commute', Integer()) 159 | trainer = Column('trainer', Integer()) 160 | gear_id = Column('gear_id', String(255)) 161 | ftp = Column('ftp', Float()) 162 | weighted_average_power = Column('weighted_average_power', Float()) 163 | relative_intensity = Column('relative_intensity', Float()) 164 | efficiency_factor = Column('efficiency_factor', Float()) 165 | tss = Column('tss', Float()) 166 | hrss = Column('hrss', Float()) 167 | variability_index = Column('variability_index', Float()) 168 | trimp = Column('trimp', Float()) 169 | low_intensity_seconds = Column('low_intensity_seconds', Integer()) 170 | mod_intensity_seconds = Column('mod_intensity_seconds', Integer()) 171 | high_intensity_seconds = Column('high_intensity_seconds', Integer()) 172 | workout_intensity = Column('workout_intensity', String(4)) 173 | weight = Column('weight', Float()) 174 | 175 | 176 | class strydSummary(Base): 177 | __tablename__ = 'stryd_summary' 178 | start_date_local = Column('start_date_local', DateTime(), index=True, primary_key=True) 179 | strava_activity_id = Column('strava_activity_id', BigInteger()) 180 | stryd_ftp = Column('stryd_ftp', Float()) 181 | total_elevation_gain = Column('total_elevation_gain', Float()) 182 | total_elevation_loss = Column('total_elevation_loss', Float()) 183 | max_elevation = Column('max_elevation', Float()) 184 | min_elevation = Column('min_elevation', Float()) 185 | average_cadence = Column('average_cadence', Integer()) 186 | max_cadence = Column('max_cadence', Integer()) 187 | min_cadence = Column('min_cadence', Integer()) 188 | average_stride_length = Column('average_stride_length', Float()) 189 | max_stride_length = Column('max_stride_length', Float()) 190 | min_stride_length = Column('min_stride_length', Float()) 191 | average_ground_time = Column('average_ground_time', Float()) 192 | max_ground_time = Column('max_ground_time', Integer()) 193 | min_ground_time = Column('min_ground_time', Integer()) 194 | average_oscillation = Column('average_oscillation', Float()) 195 | max_oscillation = Column('max_oscillation', Float()) 196 | min_oscillation = Column('min_oscillation', Float()) 197 | average_leg_spring = Column('average_leg_spring', Float()) 198 | max_vertical_stiffness = Column('max_vertical_stiffness', Float()) 199 | rss = Column('rss', Float()) 200 | stryds = Column('stryds', Float()) 201 | elevation = Column('elevation', Float()) 202 | temperature = Column('temperature', Float()) 203 | humidity = Column('humidity', Integer()) 204 | windBearing = Column('windBearing', Integer()) 205 | windSpeed = Column('windSpeed', Float()) 206 | windGust = Column('windGust', Float()) 207 | dewPoint = Column('dewPoint', Float()) 208 | 209 | 210 | ##### Oura Tables ##### 211 | class ouraReadinessSummary(Base): 212 | __tablename__ = 'oura_readiness_summary' 213 | report_date = Column('report_date', Date(), index=True, primary_key=True) 214 | summary_date = Column('summary_date', Date()) 215 | score = Column('score', Integer()) 216 | period_id = Column('period_id', Integer()) 217 | score_activity_balance = Column('score_activity_balance', Integer()) 218 | score_previous_day = Column('score_previous_day', Integer()) 219 | score_previous_night = Column('score_previous_night', Integer()) 220 | score_recovery_index = Column('score_recovery_index', Integer()) 221 | score_resting_hr = Column('score_resting_hr', Integer()) 222 | score_sleep_balance = Column('score_sleep_balance', Integer()) 223 | score_temperature = Column('score_temperature', Integer()) 224 | score_hrv_balance = Column('score_hrv_balance', Integer()) 225 | rest_mode_state = Column('rest_mode_state', Integer()) 226 | 227 | 228 | class ouraActivitySummary(Base): 229 | __tablename__ = 'oura_activity_summary' 230 | summary_date = Column('summary_date', Date(), index=True, primary_key=True) 231 | average_met = Column('average_met', Float()) 232 | cal_active = Column('cal_active', Integer()) 233 | cal_total = Column('cal_total', Integer()) 234 | class_5min = Column('class_5min', String(300)) 235 | daily_movement = Column('daily_movement', Integer()) 236 | day_end_local = Column('day_end_local', DateTime()) 237 | day_start_local = Column('day_start_local', DateTime()) 238 | high = Column('high', Integer()) 239 | inactive = Column('inactive', Integer()) 240 | inactivity_alerts = Column('inactivity_alerts', Integer()) 241 | low = Column('low', Integer()) 242 | medium = Column('medium', Integer()) 243 | met_min_high = Column('met_min_high', Integer()) 244 | met_min_inactive = Column('met_min_inactive', Integer()) 245 | met_min_low = Column('met_min_low', Integer()) 246 | met_min_medium = Column('met_min_medium', Integer()) 247 | non_wear = Column('non_wear', Integer()) 248 | rest = Column('rest', Integer()) 249 | score = Column('score', Integer()) 250 | score_meet_daily_targets = Column('score_meet_daily_targets', Integer()) 251 | score_move_every_hour = Column('score_move_every_hour', Integer()) 252 | score_recovery_time = Column('score_recovery_time', Integer()) 253 | score_stay_active = Column('score_stay_active', Integer()) 254 | score_training_frequency = Column('score_training_frequency', Integer()) 255 | score_training_volume = Column('score_training_volume', Integer()) 256 | steps = Column('steps', Integer()) 257 | target_calories = Column('target_calories', Integer()) 258 | timezone = Column('timezone', Integer()) 259 | target_km = Column('target_km', Float()) 260 | target_miles = Column('target_miles', Float()) 261 | to_target_km = Column('to_target_km', Float()) 262 | to_target_miles = Column('to_target_miles', Float()) 263 | total = Column('total', Integer()) 264 | rest_mode_state = Column('rest_mode_state', Integer()) 265 | 266 | 267 | class ouraActivitySamples(Base): 268 | __tablename__ = 'oura_activity_samples' 269 | timestamp_local = Column('timestamp_local', DateTime(), index=True, primary_key=True) 270 | summary_date = Column('summary_date', Date()) 271 | met_1min = Column('met_1min', Float()) 272 | class_5min = Column('class_5min', Integer()) 273 | class_5min_desc = Column('class_5min_desc', String(10)) 274 | 275 | 276 | class ouraSleepSummary(Base): 277 | __tablename__ = 'oura_sleep_summary' 278 | report_date = Column('report_date', Date(), index=True, primary_key=True) 279 | summary_date = Column('summary_date', Date()) 280 | awake = Column('awake', Integer()) 281 | bedtime_end_local = Column('bedtime_end_local', DateTime()) 282 | bedtime_end_delta = Column('bedtime_end_delta', Integer()) 283 | bedtime_start_local = Column('bedtime_start_local', DateTime()) 284 | bedtime_start_delta = Column('bedtime_start_delta', Integer()) 285 | breath_average = Column('breath_average', Float()) 286 | deep = Column('deep', Integer()) 287 | duration = Column('duration', Integer()) 288 | efficiency = Column('efficiency', Integer()) 289 | hr_average = Column('hr_average', Float()) 290 | hr_lowest = Column('hr_lowest', Integer()) 291 | hypnogram_5min = Column('hypnogram_5min', String(255)) 292 | is_longest = Column('is_longest', Integer()) 293 | light = Column('light', Integer()) 294 | midpoint_at_delta = Column('midpoint_at_delta', Integer()) 295 | midpoint_time = Column('midpoint_time', Integer()) 296 | onset_latency = Column('onset_latency', Integer()) 297 | period_id = Column('period_id', Integer()) 298 | rem = Column('rem', Integer()) 299 | restless = Column('restless', Integer()) 300 | rmssd = Column('rmssd', Integer()) 301 | score = Column('score', Integer()) 302 | score_alignment = Column('score_alignment', Integer()) 303 | score_deep = Column('score_deep', Integer()) 304 | score_disturbances = Column('score_disturbances', Integer()) 305 | score_efficiency = Column('score_efficiency', Integer()) 306 | score_latency = Column('score_latency', Integer()) 307 | score_rem = Column('score_rem', Integer()) 308 | score_total = Column('score_total', Integer()) 309 | temperature_delta = Column('temperature_delta', Float()) 310 | temperature_deviation = Column('temperature_deviation', Float()) 311 | temperature_trend_deviation = Column('temperature_trend_deviation', Float()) 312 | timezone = Column('timezone', Integer()) 313 | total = Column('total', Integer()) 314 | 315 | 316 | class ouraSleepSamples(Base): 317 | __tablename__ = 'oura_sleep_samples' 318 | timestamp_local = Column('timestamp_local', DateTime(), index=True, primary_key=True) 319 | summary_date = Column('summary_date', Date()) 320 | report_date = Column('report_date', Date()) 321 | rmssd_5min = Column('rmssd_5min', Integer()) 322 | hr_5min = Column('hr_5min', Integer()) 323 | hypnogram_5min = Column('hypnogram_5min', Integer()) 324 | hypnogram_5min_desc = Column('hypnogram_5min_desc', String(8)) 325 | 326 | 327 | # class correlations(Base): 328 | # __tablename__ = 'correlations' 329 | # Metric = Column('Metric', String(), index=True, primary_key=True) 330 | # 331 | # Average_METs_prev = Column('Average METs (prev)', Float()) 332 | # Average_METs = Column('Average METs', Float()) 333 | # Average_METs_next = Column('Average METs (next)', Float()) 334 | # 335 | # Activity_burn_prev = Column('Activity burn (prev)', Float()) 336 | # Activity_burn = Column('Activity burn', Float()) 337 | # Activity_burn_next = Column('Activity burn (next)', Float()) 338 | # 339 | # Total_burn_prev = Column('Total burn (prev)', Float()) 340 | # Total_burn = Column('Total burn', Float()) 341 | # Total_burn_next = Column('Total burn (next)', Float()) 342 | # 343 | # Walking_equivalent_prev = Column('Walking equivalent (prev)', Float()) 344 | # Walking_equivalent = Column('Walking equivalent', Float()) 345 | # Walking_equivalent_next = Column('Walking equivalent (next)', Float()) 346 | # 347 | # High_activity_time_prev = Column('High activity time (prev)', Float()) 348 | # High_activity_time = Column('High activity time', Float()) 349 | # High_activity_time_next = Column('High activity time (next)', Float()) 350 | # 351 | # Inactive_time_prev = Column('Inactive time (prev)', Float()) 352 | # Inactive_time = Column('Inactive time', Float()) 353 | # Inactive_time_next = Column('Inactive time (next)', Float()) 354 | # 355 | # Low_activity_time_prev = Column('Low activity time (prev)', Float()) 356 | # Low_activity_time = Column('Low activity time', Float()) 357 | # Low_activity_time_next = Column('Low activity time (next)', Float()) 358 | # 359 | # Med_activity_time_prev = Column('Med activity time (prev)', Float()) 360 | # Med_activity_time = Column('Med activity time', Float()) 361 | # Med_activity_time_next = Column('Med activity time (next)', Float()) 362 | # 363 | # Non_wear_time_prev = Column('Non-wear time (prev)', Float()) 364 | # Non_wear_time = Column('Non-wear time', Float()) 365 | # Non_wear_time_next = Column('Non-wear time (next)', Float()) 366 | # 367 | # Rest_time_prev = Column('Rest time (prev)', Float()) 368 | # Rest_time = Column('Rest time', Float()) 369 | # Rest_time_next = Column('Rest time (next)', Float()) 370 | # 371 | # Activity_score_prev = Column('Activity score (prev)', Float()) 372 | # Activity_score = Column('Activity score', Float()) 373 | # Activity_score_next = Column('Activity score (next)', Float()) 374 | # 375 | # Steps_prev = Column('Steps (prev)', Float()) 376 | # Steps = Column('Steps', Float()) 377 | # Steps_next = Column('Steps (next)', Float()) 378 | # 379 | # Total_activity_time_prev = Column('Total activity time (prev)', Float()) 380 | # Total_activity_time = Column('Total activity time', Float()) 381 | # Total_activity_time_next = Column('Total activity time (next)', Float()) 382 | # 383 | # Readiness_score_prev = Column('Readiness score (prev)', Float()) 384 | # Readiness_score = Column('Readiness score', Float()) 385 | # Readiness_score_next = Column('Readiness score (next)', Float()) 386 | # 387 | # Time_awake_in_bed_prev = Column('Time awake in bed (prev)', Float()) 388 | # Time_awake_in_bed = Column('Time awake in bed', Float()) 389 | # Time_awake_in_bed_next = Column('Time awake in bed (next)', Float()) 390 | # 391 | # Late_to_bedtime_prev = Column('Late to bedtime (prev)', Float()) 392 | # Late_to_bedtime = Column('Late to bedtime', Float()) 393 | # Late_to_bedtime_next = Column('Late to bedtime (next)', Float()) 394 | # 395 | # Respiratory_rate_prev = Column('Respiratory rate (prev)', Float()) 396 | # Respiratory_rate = Column('Respiratory rate', Float()) 397 | # Respiratory_rate_next = Column('Respiratory rate (next)', Float()) 398 | # 399 | # Deep_sleep_prev = Column('Deep sleep (prev)', Float()) 400 | # Deep_sleep = Column('Deep sleep', Float()) 401 | # Deep_sleep_next = Column('Deep sleep (next)', Float()) 402 | # 403 | # Time_in_bed_prev = Column('Time in bed (prev)', Float()) 404 | # Time_in_bed = Column('Time in bed', Float()) 405 | # Time_in_bed_next = Column('Time in bed (next)', Float()) 406 | # 407 | # Sleep_efficiency_prev = Column('Sleep efficiency (prev)', Float()) 408 | # Sleep_efficiency = Column('Sleep efficiency', Float()) 409 | # Sleep_efficiency_next = Column('Sleep efficiency (next)', Float()) 410 | # 411 | # Average_HR_prev = Column('Average HR (prev)', Float()) 412 | # Average_HR = Column('Average HR', Float()) 413 | # Average_HR_next = Column('Average HR (next)', Float()) 414 | # 415 | # Lowest_HR_prev = Column('Lowest HR (prev)', Float()) 416 | # Lowest_HR = Column('Lowest HR', Float()) 417 | # Lowest_HR_next = Column('Lowest HR (next)', Float()) 418 | # 419 | # Light_sleep_prev = Column('Light sleep (prev)', Float()) 420 | # Light_sleep = Column('Light sleep', Float()) 421 | # Light_sleep_next = Column('Light sleep (next)', Float()) 422 | # 423 | # Sleep_midpoint_prev = Column('Sleep midpoint (prev)', Float()) 424 | # Sleep_midpoint = Column('Sleep midpoint', Float()) 425 | # Sleep_midpoint_next = Column('Sleep midpoint (next)', Float()) 426 | # 427 | # Sleep_latency_prev = Column('Sleep latency (prev)', Float()) 428 | # Sleep_latency = Column('Sleep latency', Float()) 429 | # Sleep_latency_next = Column('Sleep latency (next)', Float()) 430 | # 431 | # REM_sleep_prev = Column('REM sleep (prev)', Float()) 432 | # REM_sleep = Column('REM sleep', Float()) 433 | # REM_sleep_next = Column('REM sleep (next)', Float()) 434 | # 435 | # Restlessness_prev = Column('Restlessness (prev)', Float()) 436 | # Restlessness = Column('Restlessness', Float()) 437 | # Restlessness_next = Column('Restlessness (next)', Float()) 438 | # 439 | # Average_HRV_prev = Column('Average HRV (prev)', Float()) 440 | # Average_HRV = Column('Average HRV', Float()) 441 | # Average_HRV_next = Column('Average HRV (next)', Float()) 442 | # 443 | # Sleep_score_prev = Column('Sleep score (prev)', Float()) 444 | # Sleep_score = Column('Sleep score', Float()) 445 | # Sleep_score_next = Column('Sleep score (next)', Float()) 446 | # 447 | # Temp_deviation_prev = Column('Temp. deviation (prev)', Float()) 448 | # Temp_deviation = Column('Temp. deviation', Float()) 449 | # Temp_deviation_next = Column('Temp. deviation (next)', Float()) 450 | # 451 | # Total_sleep_prev = Column('Total sleep (prev)', Float()) 452 | # Total_sleep = Column('Total sleep', Float()) 453 | # Total_sleep_next = Column('Total sleep (next)', Float()) 454 | 455 | 456 | class spotifyPlayHistory(Base): 457 | __tablename__ = 'spotify_play_history' 458 | timestamp_utc = Column('timestamp_utc', DateTime(), index=True, primary_key=True) 459 | track_id = Column('track_id', String(255)) 460 | track_name = Column('track_name', String(255)) 461 | track_url = Column('track_url', String(255)) 462 | explicit = Column('explicit', Boolean()) 463 | artist_id = Column('artist_id', String(255)) 464 | artist_name = Column('artist_name', String(255)) 465 | album_id = Column('album_id', String(255)) 466 | album_name = Column('album_name', String(255)) 467 | analysis_url = Column('analysis_url', String(255)) 468 | duration_ms = Column('duration_ms', Integer()) 469 | acousticness = Column('acousticness', Float()) 470 | danceability = Column('danceability', Float()) 471 | energy = Column('energy', Float()) 472 | instrumentalness = Column('instrumentalness', Float()) 473 | key = Column('key', Integer()) 474 | liveness = Column('liveness', Float()) 475 | loudness = Column('loudness', Float()) 476 | mode = Column('mode', Integer()) 477 | speechiness = Column('speechiness', Float()) 478 | tempo = Column('tempo', Float()) 479 | time_signature = Column('time_signature', Integer()) 480 | valence = Column('valence', Float()) 481 | percentage_listened = Column('percentage_listened', Float()) 482 | skipped = Column('skipped', Boolean()) 483 | rewound = Column('rewound', Boolean()) 484 | fast_forwarded = Column('fast_forwarded', Boolean()) 485 | secs_playing = Column('secs_playing', Integer()) 486 | secs_paused = Column('secs_paused', Integer()) 487 | 488 | 489 | class apiTokens(Base): 490 | __tablename__ = 'api_tokens' 491 | date_utc = Column('date_utc', DateTime(), index=True, primary_key=True) 492 | service = Column('service', String(255)) 493 | tokens = Column('tokens', PickleType) 494 | 495 | 496 | class dbRefreshStatus(Base): 497 | __tablename__ = 'db_refresh' 498 | timestamp_utc = Column('timestamp_utc', DateTime(), index=True, primary_key=True) 499 | refresh_method = Column('refresh_method', String(255)) 500 | truncate = Column('truncate', Boolean(), default=False) 501 | oura_status = Column('oura_status', String(255)) 502 | strava_status = Column('strava_status', String(255)) 503 | withings_status = Column('withings_status', String(255)) 504 | fitbod_status = Column('fitbod_status', String(255)) 505 | 506 | 507 | class withings(Base): 508 | __tablename__ = 'withings' 509 | date_utc = Column('date_utc', DateTime(), index=True, primary_key=True) 510 | weight = Column('weight', Float()) 511 | fat_ratio = Column('fat_ratio', Float()) 512 | hydration = Column('hydration', Float()) 513 | 514 | 515 | class fitbod(Base): 516 | __tablename__ = 'fitbod' 517 | id = Column('id', Integer(), index=True, primary_key=True, autoincrement=True) 518 | date_utc = Column('date_UTC', DateTime()) 519 | exercise = Column('Exercise', String(255)) 520 | reps = Column('Reps', Integer()) 521 | weight = Column('Weight', Integer()) 522 | duration = Column('Duration', Integer()) 523 | iswarmup = Column('isWarmup', Boolean()) 524 | note = Column('Note', String(255)) 525 | 526 | 527 | class fitbod_muscles(Base): 528 | __tablename__ = 'fitbod_muscles' 529 | exercise = Column('Exercise', String(255), index=True, primary_key=True) 530 | muscle = Column('Muscle', String(255)) 531 | -------------------------------------------------------------------------------- /src/fitly/api/spotifyAPI.py: -------------------------------------------------------------------------------- 1 | import tekore as tk 2 | import pandas as pd 3 | import json 4 | from ..utils import config 5 | from ..app import app 6 | from ..api.database import engine 7 | from ..api.sqlalchemy_declarative import apiTokens, spotifyPlayHistory, stravaSummary 8 | from sqlalchemy import delete, func, extract 9 | from datetime import datetime, timedelta 10 | import ast 11 | import time 12 | import math 13 | from sklearn.preprocessing import StandardScaler 14 | from sklearn.cluster import KMeans 15 | from sklearn.linear_model import SGDClassifier 16 | from sklearn.model_selection import GridSearchCV 17 | from sklearn import metrics 18 | from sklearn.neighbors import KNeighborsClassifier 19 | from sklearn.model_selection import train_test_split, cross_val_score 20 | from sklearn.linear_model import LogisticRegression 21 | from sklearn.metrics import (accuracy_score, precision_score, recall_score, 22 | f1_score, classification_report, confusion_matrix, 23 | roc_auc_score, roc_curve, matthews_corrcoef) 24 | from sklearn.utils import resample 25 | from sklearn.ensemble import RandomForestClassifier 26 | from sklearn.neural_network import MLPClassifier 27 | import numpy as np 28 | import random 29 | import threading 30 | import queue 31 | import pickle 32 | 33 | client_id = config.get('spotify', 'client_id') 34 | client_secret = config.get('spotify', 'client_secret') 35 | redirect_uri = config.get('spotify', 'redirect_uri') 36 | min_secs_listened = int(config.get('spotify', 'min_secs_listened')) 37 | skip_min_threshold = float(config.get('spotify', 'skip_min_threshold')) 38 | skip_max_threshold = float(config.get('spotify', 'skip_max_threshold')) 39 | poll_interval_seconds = float(config.get('spotify', 'poll_interval_seconds')) 40 | 41 | # Main queue that stream will add playback feeds to 42 | q = queue.Queue(maxsize=0) 43 | current_state = None 44 | last_state = None 45 | playback_feed = [] 46 | 47 | 48 | # Retrieve current tokens from db 49 | def current_token_dict(): 50 | try: 51 | token_dict = app.session.query(apiTokens.tokens).filter(apiTokens.service == 'Spotify').first().tokens 52 | token_pickle = pickle.loads(token_dict) 53 | app.session.remove() 54 | except BaseException as e: 55 | app.server.logger.error(e) 56 | token_pickle = {} 57 | return token_pickle 58 | 59 | 60 | # Function for auto saving spotify token_dict to db 61 | def save_spotify_token(token_dict): 62 | token_dict = { 63 | 'access_token': token_dict.access_token, 64 | 'expires_at': token_dict.expires_at, 65 | 'refresh_token': token_dict.refresh_token, 66 | 'token_type': token_dict.token_type 67 | } 68 | # Delete current key 69 | app.server.logger.debug('Deleting current spotify tokens') 70 | app.session.execute(delete(apiTokens).where(apiTokens.service == 'Spotify')) 71 | # Insert new key 72 | app.server.logger.debug('Inserting new strava tokens') 73 | app.session.add(apiTokens(date_utc=datetime.utcnow(), service='Spotify', tokens=pickle.dumps(token_dict))) 74 | app.session.commit() 75 | app.session.remove() 76 | 77 | 78 | def spotify_connected(): 79 | try: 80 | client = get_spotify_client() 81 | test = client.current_user_top_tracks(limit=10) 82 | app.server.logger.debug('Spotify connected') 83 | return True 84 | except BaseException as e: 85 | app.server.logger.error('Spotify not connected') 86 | app.server.logger.error(e) 87 | return False 88 | 89 | 90 | ## Provide link for button on settings page 91 | def connect_spotify_link(auth_client): 92 | return auth_client.url 93 | 94 | 95 | def get_spotify_client(): 96 | token_dict = current_token_dict() 97 | if token_dict: 98 | # If token has expired, refresh it 99 | if int(time.time()) > token_dict['expires_at']: 100 | app.server.logger.debug('Spotify tokens expired, refreshing...') 101 | new_token = tk.Credentials(client_id=client_id, client_secret=client_secret, 102 | redirect_uri=redirect_uri).refresh_user_token(token_dict['refresh_token']) 103 | # Save to DB 104 | save_spotify_token(new_token) 105 | # Query the new tokens into a dict 106 | token_dict = current_token_dict() 107 | 108 | client = tk.Spotify(token_dict['access_token']) 109 | else: 110 | client = tk.Spotify() 111 | 112 | return client 113 | 114 | 115 | # def save_spotify_play_history(): 116 | ### Replaced with live stream() function 117 | # ''' 118 | # Function that we be polled every refresh to populate spotify_play_history table 119 | # :return: 120 | # ''' 121 | # if spotify_connected(): 122 | # app.server.logger.info('Pulling spotify play history...') 123 | # spotify = get_spotify_client() 124 | # # Get latest tracks 125 | # tracks = spotify.playback_recently_played(limit=50).items 126 | # # Get features of all the tracks to join in with track information 127 | # track_features = pd.DataFrame(json.loads(spotify.tracks_audio_features([x.track.id for x in tracks]).json())) 128 | # # Get track information 129 | # track_information = [] 130 | # for x in tracks: 131 | # x = json.loads(x.track.json()) 132 | # if x['type'] == 'track': # Only add tracks (no podcasts, etc.) 133 | # track_information.append({ 134 | # "timestamp_utc": datetime.utcfromtimestamp(float(x.timestamp_utc) / 1000), 135 | # "track_id": x["id"], 136 | # "track_name": x["name"], 137 | # "explicit": x["explicit"], 138 | # "artist_id": ', '.join([y["id"] for y in x["artists"]]), 139 | # "artist_name": ', '.join([y["name"] for y in x["artists"]]), 140 | # # URLs do not need to be stored, can be generated with 141 | # # # https://open.spotify.com/track/ 142 | # # https://open.spotify.com/artist/ 143 | # # https://open.spotify.com/album/ 144 | # "album_id": x["album"]["id"], 145 | # "album_name": x["album"]["name"] 146 | # }) 147 | # 148 | # # Convert trackinfo into df 149 | # track_info_df = pd.DataFrame(track_information) 150 | # # Merge trackinfo with track features 151 | # track_table = pd.merge(track_info_df, track_features, how='left', left_on='track_id', right_on='id').set_index( 152 | # 'timestamp_utc') 153 | # track_table.drop_duplicates(inplace=True) 154 | # track_table = track_table.drop(columns=['id', 'type', 'uri', 'track_href']) 155 | # 156 | # # Filter to only new records and insert into DB 157 | # latest = app.session.query(func.max(spotifyPlayHistory.timestamp_utc)).first()[0] 158 | # app.session.remove() 159 | # if latest: 160 | # track_table = track_table[track_table.index > latest] 161 | # 162 | # if len(track_table) > 0: 163 | # app.server.logger.debug(f'{len(track_table)} new songs found!') 164 | # track_table.to_sql('spotify_play_history', engine, if_exists='append', index=True) 165 | 166 | 167 | def get_played_tracks(workout_intensity='all', sport='all', pop_time_period='all'): 168 | ''' 169 | 170 | :param workout_intensity: (Optional) Filters the spotify tracks by the intensity of the workout that was done 171 | :return: df of spotify tracks that were done during a workout 172 | ''' 173 | 174 | # Query tracks 175 | if pop_time_period == 'all': 176 | df_tracks = pd.read_sql(sql=app.session.query(spotifyPlayHistory).statement, con=engine) 177 | df_tracks['Period'] = 'Current' 178 | 179 | elif pop_time_period == 'ytd': 180 | df_tracks = pd.read_sql(sql=app.session.query(spotifyPlayHistory).filter( 181 | extract('year', spotifyPlayHistory.timestamp_utc) >= (datetime.utcnow().year - 1)).statement, con=engine) 182 | 183 | df_tracks['Period'] = 'Current' 184 | df_tracks.at[df_tracks['timestamp_utc'].dt.year == (datetime.utcnow().date().year - 1), 'Period'] = 'Previous' 185 | 186 | elif pop_time_period in ['l90d', 'l6w', 'l30d']: 187 | days = {'l90d': 180, 'l6w': 84, 'l30d': 60} 188 | df_tracks = pd.read_sql(sql=app.session.query(spotifyPlayHistory).filter( 189 | spotifyPlayHistory.timestamp_utc >= ( 190 | datetime.utcnow().date() - timedelta(days=days[pop_time_period]))).statement, 191 | con=engine) 192 | df_tracks['Period'] = 'Current' 193 | df_tracks.at[ 194 | df_tracks['timestamp_utc'].dt.date <= ( 195 | datetime.utcnow().date() - timedelta(days=days[pop_time_period] / 2)), 'Period'] = 'Previous' 196 | 197 | # Query workouts 198 | df_summary = pd.read_sql( 199 | sql=app.session.query(stravaSummary.start_date_utc, stravaSummary.activity_id, stravaSummary.name, 200 | stravaSummary.elapsed_time, stravaSummary.type, 201 | stravaSummary.workout_intensity).statement, con=engine) 202 | df_summary['end_date_utc'] = df_summary['start_date_utc'] + pd.to_timedelta(df_summary['elapsed_time'], 's') 203 | df_summary.drop(columns=['elapsed_time'], inplace=True) 204 | 205 | # Full Cross Join 206 | df_tracks = df_tracks.assign(join_key=1) 207 | df_summary = df_summary.assign(join_key=1) 208 | df_merge = pd.merge(df_summary, df_tracks, on='join_key').drop('join_key', axis=1) 209 | # Filter only on tracks performed during workout times 210 | df_merge = df_merge.query('timestamp_utc >= start_date_utc and timestamp_utc <= end_date_utc') 211 | # Join back to original date range table and drop key column 212 | df = df_tracks.merge(df_merge, on=['timestamp_utc'], how='left').fillna('').drop('join_key', axis=1) 213 | # Days with no workout_intensity are rest days 214 | df.at[df['start_date_utc'] == '', 'workout_intensity'] = 'rest' 215 | # Cleanup the end resulting df 216 | df = df[[c for c in df.columns if '_y' not in c]] 217 | df.columns = [c.replace('_x', '') for c in df.columns] 218 | df = df.rename(columns={'type': 'workout_type', 'name': 'workout_name'}) 219 | # Filter on workout intensity/rest day 220 | if workout_intensity == 'workout': 221 | df = df[df['workout_intensity'] != 'rest'] 222 | elif workout_intensity != 'all': 223 | df = df[df['workout_intensity'] == workout_intensity] 224 | # Filter on workout type 225 | if sport != 'all': 226 | df = df[df['workout_type'] == sport] 227 | 228 | df.drop(columns=['start_date_utc', 'end_date_utc'], inplace=True) 229 | 230 | df.set_index('timestamp_utc', inplace=True) 231 | 232 | return df 233 | 234 | 235 | ### Predictive Modeling ### 236 | def generate_recommendation_playlists(workout_intensity='all', sport='all', normalize=True, 237 | num_clusters=8, # TODO: Create dynamic approach to calculation best num_clusters 238 | num_playlists=3, time_period='l90d'): 239 | ''' 240 | KMeans to cluster types of music detected in history 241 | Filter music in each cluster that was 'liked' (not skipped per thresholds in config.ini) 242 | Uses resulting tracks as seeds for spotifys recommend api 243 | 244 | :param workout_intensity: specify intensity for querying seeds 245 | :param sport: specify workout type for querying seeds 246 | :param normalize: boolean for normalizing audio features 247 | :param num_clusters: number of clusters K-Means will use 248 | :param num_playlists: number of spotify playlists to be generated 249 | :param time_period: time period for querying seeds 250 | :return: None; generates spotify playlists 251 | ''' 252 | # Inspired by http://ben-tanen.com/notebooks/kmeans-music.html 253 | 254 | # Clear all Fitly playlists 255 | spotify = get_spotify_client() 256 | user_id = spotify.current_user().id 257 | 258 | playlists = {} 259 | for x in spotify.playlists(user_id).items: 260 | playlists[x.name] = x.id 261 | if 'Fitly' in x.name: 262 | spotify.playlist_clear(x.id) 263 | 264 | # Query tracks to use as seeds for generating recommendations 265 | df = get_played_tracks(workout_intensity=workout_intensity, sport=sport, pop_time_period=time_period).reset_index() 266 | 267 | # Filter on correct dates 268 | df = df[df['Period'] == 'Current'] 269 | 270 | if len(df) >= num_clusters: 271 | 272 | _audiofeat_df = df[['track_id', 'time_signature', 'duration_ms', 'acousticness', 'danceability', 273 | 'energy', 'instrumentalness', 'key', 'liveness', 'loudness', 'mode', 'speechiness', 274 | 'tempo', 'valence']] 275 | 276 | # scale audio features (if desired) 277 | if normalize: 278 | scaler = StandardScaler() 279 | audiofeat = scaler.fit_transform(_audiofeat_df.drop(['track_id'], axis=1)) 280 | audiofeat_df = pd.DataFrame(audiofeat, columns=_audiofeat_df.drop('track_id', axis=1).columns) 281 | audiofeat_df['track_id'] = _audiofeat_df['track_id'] 282 | else: 283 | audiofeat_df = _audiofeat_df 284 | 285 | # Run the K-Means to cluster all tracks 286 | kmeans = KMeans(n_clusters=num_clusters).fit(audiofeat_df.drop(['track_id'], axis=1)) 287 | audiofeat_df['cluster'] = pd.Series(kmeans.labels_) + 1 288 | 289 | # Join cluster back to main track df 290 | df = df.merge(audiofeat_df[['track_id', 'cluster']], how='left', left_on='track_id', right_on='track_id') 291 | 292 | # drop clusters that don't have both likes and dislikes 293 | df = df[df['cluster'].isin( 294 | df.groupby('cluster').filter(lambda x: x['skipped'].nunique() == 2)['cluster'].unique().tolist())] 295 | 296 | # Start with largest cluster and work through all until num_playlists have been generated 297 | rand_clusters = df.groupby('cluster').size().sort_values(ascending=False).index.tolist() 298 | # Create the playlists! 299 | playlist_number = 1 300 | for cluster in rand_clusters: 301 | if playlist_number < num_playlists + 1: 302 | 303 | # Keep looping through cluster passing different tracks into spotify recommendation api 304 | # Once enough tracks recived from spotify pass our prediction model, move to next cluster 305 | # We want at least 50 tracks in each playlist 306 | # If we hit recommendation api 10 times and still don't have enough good results, move on as to not overload spotify api 307 | attempts = 0 308 | predict_df = pd.DataFrame() 309 | # Only choose tracks that were 'liked' in the cluster for seeds 310 | track_uris = df[(df['cluster'] == cluster) & (df['skipped'] == 0)]['track_id'].unique().tolist() 311 | # artist_uris = df[df['cluster'] == i]['artist_id'].unique().tolist() 312 | while len(predict_df) < 50 and attempts < 20: 313 | # Get 5 random tracks from the cluster to use as seeds for recommendation (spotify api can only take up to 5 seeds) 314 | seed_tracks = random.sample(track_uris, 5 if len(track_uris) > 5 else len(track_uris)) 315 | # seed_artists = random.sample(artist_uris, 5 if len(artist_uris) > 5 else len(artist_uris)) 316 | 317 | # Get recommendations from spotify 318 | recommendations = spotify.recommendations(track_ids=seed_tracks, limit=50).tracks 319 | # recommendations = spotify.recommendations(artist_ids=artist_uris, limit=50).tracks 320 | 321 | # Predict if you will like the songs spotify recommends, and if true add them to playlist 322 | try: 323 | rec_track_features = pd.DataFrame( 324 | json.loads(spotify.tracks_audio_features([x.id for x in recommendations]).json())) 325 | # Use all tracks in the same cluster (likes/dislikes) as train data set 326 | _predict_df = predict_songs(df_tracks=rec_track_features, df_train=df[df['cluster'] == cluster]) 327 | if len(_predict_df) > 0: 328 | # Only take predictions that are positive 329 | _predict_df = _predict_df[_predict_df['predictions'] == 1] 330 | # add to predict dataframe and repeat loop until > 50 tracks to insert into the playlist 331 | predict_df = pd.concat([predict_df, _predict_df]).drop_duplicates() 332 | except Exception as e: 333 | app.server.logger.error(f'ERROR Creating Playlist: {e}') 334 | 335 | # time.sleep(1) # Avoid spotify api limit 336 | attempts += 1 337 | 338 | if len(predict_df) > 0: 339 | # Grab playlist id if it already exists otherwise create the playlist 340 | playlist_id = playlists.get(f'Fitly Playlist {playlist_number}') 341 | if not playlist_id: 342 | playlist_id = spotify.playlist_create(user_id=user_id, name=f'Fitly Playlist {playlist_number}', 343 | public=False).id 344 | predict_df['track_uri'] = 'spotify:track:' + predict_df['id'] 345 | # Add recommended tracks to the playlist 346 | app.server.logger.debug(f'Refreshing Fitly Playlist {playlist_number}...') 347 | spotify.playlist_add(playlist_id=playlist_id, uris=predict_df['track_uri'].tolist()) 348 | app.server.logger.debug(f'Fitly Playlist {playlist_number} refreshed') 349 | playlist_number += 1 350 | else: 351 | continue 352 | 353 | else: 354 | app.server.logger.debug( 355 | f'Not enough tracks found for "{workout_intensity}" intensity and "{sport}" workout types to generate playlist recommendations. Skipping playlist generation.') 356 | 357 | 358 | #### Models to predict if you will actually like the recommend songs #### 359 | 360 | class PModel(object): 361 | def __init__(self, X_train, X_test, y_train, y_test): 362 | self.X_train = X_train 363 | self.X_test = X_test 364 | self.y_train = y_train 365 | self.y_test = y_test 366 | 367 | def get_best_model(self): 368 | # Run all models and return best 369 | self.model_scores = {} 370 | self.best_model = None 371 | self.best_model_name = None 372 | try: 373 | self.knn() 374 | self.model_scores['knn'] = self.knn_roc 375 | except BaseException as e: 376 | app.server.logger.error('Error running kNN model: {}'.format(e)) 377 | try: 378 | self.logreg() 379 | self.model_scores['logreg'] = self.logreg_roc 380 | except BaseException as e: 381 | app.server.logger.error('Error running logreg model: {}'.format(e)) 382 | try: 383 | self.rf() 384 | self.model_scores['rf'] = self.rf_roc 385 | except BaseException as e: 386 | app.server.logger.error('Error running rf model: {}'.format(e)) 387 | try: 388 | self.mlp() 389 | self.model_scores['mlp'] = self.mlp_roc 390 | except BaseException as e: 391 | app.server.logger.error('Error running mlp model: {}'.format(e)) 392 | 393 | if len(self.model_scores) > 0: 394 | self.best_model_name = [k for k, v in self.model_scores.items() if v == max(self.model_scores.values())][0] 395 | 396 | if self.best_model_name == 'kkn': 397 | self.best_model = self.knn 398 | self.best_model_score = self.knn_roc 399 | 400 | elif self.best_model_name == 'logreg': 401 | self.best_model = self.logreg 402 | self.best_model_score = self.logreg_roc 403 | 404 | elif self.best_model_name == 'rf': 405 | self.best_model = self.rf 406 | self.best_model_score = self.rf_roc 407 | 408 | elif self.best_model_name == 'mlp': 409 | self.best_model = self.mlp 410 | self.best_model_score = self.mlp_roc 411 | 412 | def knn(self): 413 | self.knn = KNeighborsClassifier() 414 | self.knn.fit(self.X_train, self.y_train) 415 | y_pred_knn = self.knn.predict(self.X_test) 416 | y_pred_prob_knn = self.knn.predict_proba(self.X_test)[:, 1] 417 | self.knn_roc = roc_auc_score(self.y_test, y_pred_prob_knn) 418 | 419 | def logreg(self): 420 | """ 421 | Implements Logistic Regression algorithm 422 | """ 423 | self.logreg = LogisticRegression() 424 | self.logreg.fit(self.X_train, self.y_train) 425 | y_pred_logreg = self.logreg.predict(self.X_test) 426 | y_pred_prob_logreg = self.logreg.predict_proba(self.X_test)[:, 1] 427 | self.logreg_roc = roc_auc_score(self.y_test, y_pred_prob_logreg) 428 | 429 | def rf(self): 430 | """ 431 | Implements Random Forest algorithm 432 | """ 433 | self.rf = RandomForestClassifier() 434 | self.rf.fit(self.X_train, self.y_train) 435 | y_pred_rf = self.rf.predict(self.X_test) 436 | y_pred_prob_rf = self.rf.predict_proba(self.X_test)[:, 1] 437 | self.rf_roc = roc_auc_score(self.y_test, y_pred_prob_rf) 438 | 439 | def mlp(self): 440 | """ 441 | Implements Multilayer Perceptron (Neural Net) algorithm 442 | """ 443 | self.mlp = MLPClassifier() 444 | self.mlp.fit(self.X_train, self.y_train) 445 | y_pred_mlp = self.mlp.predict(self.X_test) 446 | y_pred_prob_mlp = self.mlp.predict_proba(self.X_test)[:, 1] 447 | self.mlp_roc = roc_auc_score(self.y_test, y_pred_prob_mlp) 448 | 449 | 450 | def predict_songs(df_tracks, df_train): 451 | ''' 452 | Queiries spotify_play_history to train models on what songs you 'like' 453 | then predicts if user will like songs passed as an argument through 'df_tracks' 454 | 455 | :param df_tracks: dataframe of tracks to do predictions on 456 | :param df_train: dataframe of tracks to train model on 457 | :return: df_tracks with 'predictions' 458 | ''' 459 | run_time = datetime.utcnow() 460 | 461 | # Preprocess data to get a "target" column. If song is not skipped (within thresholds), 'liked' 462 | # df_train['explicit'] = df_train['explicit'].apply(lambda x: 1 if x == True else 0) 463 | df_train['target'] = 0 464 | df_train.at[df_train['skipped'] == 0, 'target'] = 1 465 | 466 | # Seperate features into features and labels dataset 467 | X = df_train[['energy', 'liveness', 'tempo', 'speechiness', 'acousticness', 'instrumentalness', 'time_signature', 468 | 'danceability', 'key', 'duration_ms', 'loudness', 'valence', 'mode', 469 | # 'explicit' 470 | ]] 471 | y = df_train[['target']] 472 | 473 | # Split data into training and test dataset 474 | X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42) 475 | 476 | # Reshape dimensions of labels (y) 477 | y_train = np.ravel(y_train) 478 | y_test = np.ravel(y_test) 479 | 480 | # print(len(like_final)) 481 | # print(len(dislike_final)) 482 | # print(X_train.shape) 483 | # print(X_test.shape) 484 | 485 | # Create a model object 486 | model = PModel(X_train, X_test, y_train, y_test) 487 | # Run all models and set best one 488 | model.get_best_model() 489 | 490 | if model.best_model is not None: 491 | app.server.logger.info('Predicting songs with the {} model with {} accuracy'.format(model.best_model_name, 492 | model.best_model_score)) 493 | # Make predictions on df of songs 494 | df_tracks['predictions'] = df_tracks[[ 495 | 'energy', 'liveness', 'tempo', 'speechiness', 'acousticness', 'instrumentalness', 'time_signature', 496 | 'danceability', 'key', 'duration_ms', 'loudness', 'valence', 'mode', 497 | # 'explicit' 498 | ]].apply( 499 | lambda x: model.best_model.predict([x])[0], axis=1) 500 | 501 | df_tracks['model_name'] = model.best_model_name 502 | df_tracks['model_score'] = model.best_model_score 503 | df_tracks['mode_run_date'] = run_time 504 | 505 | return df_tracks 506 | 507 | else: 508 | app.server.logger.info('Could not run prediction with any models') 509 | return pd.DataFrame() 510 | 511 | 512 | ### Spotify Stream ### 513 | def stream(): 514 | ''' 515 | Captures live activity of spotify web player and adds list of states (playback feed) to queue after each song finishes 516 | :param tekoreClient: tekoreClient obejct 517 | :return: none 518 | ''' 519 | global current_state 520 | global last_state 521 | global playback_feed 522 | tekoreClient = get_spotify_client() 523 | # current_state = tekoreClient.playback() 524 | 525 | try: 526 | # Do not overwrite last_state until next state that is pulled below is not none 527 | if hasattr(current_state, 'item'): 528 | last_state = current_state 529 | 530 | current_state = tekoreClient.playback_currently_playing() 531 | 532 | if hasattr(current_state, 'item'): 533 | try: 534 | last_song = last_state.item.id 535 | except: 536 | last_song = 'No Stream Detected' 537 | current_song = current_state.item.id 538 | 539 | # If song changed add feed to queue for parsing, otherwise continue appending feed 540 | # If song is on repeat will show as 'rewind' 541 | if last_song != current_song and len(playback_feed) > 0: 542 | # app.server.logger.debug( 543 | # 'Song changed from "{}" to "{}"'.format(last_state.item.name, current_state.item.name)) 544 | # Add to queue for parsing 545 | q.put(playback_feed) 546 | # Clear out feed for next track 547 | playback_feed = [] 548 | elif current_state.item.type == 'track': 549 | playback_feed.append(current_state) 550 | 551 | # If the song has not been changed for longer than 5x the length of current song, listening 552 | # session is probably over. Clear out current/last state 553 | if len(playback_feed) > 5 * (float(current_state.item.duration_ms / 1000)): 554 | current_state, last_state = None, None 555 | except BaseException as e: 556 | app.server.logger.error(f'Error with spotify stream: {e}') 557 | 558 | 559 | class Parser(threading.Thread): 560 | def __init__(self, queue): 561 | app.server.logger.debug('Parser thread started') 562 | threading.Thread.__init__(self) 563 | self.q = queue 564 | 565 | def run(self): 566 | while True: 567 | playback_feed = self.q.get() 568 | # Parse feed 569 | parse_stream(playback_feed) 570 | # Mark task complete 571 | self.q.task_done() 572 | 573 | 574 | # Thread for parsing into db while stream is running 575 | parser = Parser(queue=q) 576 | parser.daemon = True 577 | parser.start() 578 | 579 | 580 | def parse_stream(playback_feed): 581 | # Check that song was listened to for longer than threshold 582 | secs_playing, secs_paused = 0, 0 583 | for x in playback_feed: 584 | if x.is_playing: 585 | secs_playing += 1 586 | else: 587 | secs_paused += 1 588 | 589 | secs_playing *= poll_interval_seconds 590 | secs_paused *= poll_interval_seconds 591 | 592 | # Check if song was listened to for longer than config threshold 593 | if int(secs_playing) >= int(min_secs_listened): 594 | track_name = playback_feed[0].item.name 595 | app.server.logger.info( 596 | '"{}" listened to for longer than {} seconds. Parsing stream...'.format(track_name, min_secs_listened)) 597 | 598 | # Was song skipped? 599 | track_last_state = playback_feed[-1] 600 | progress = track_last_state.progress_ms 601 | duration = track_last_state.item.duration_ms 602 | 603 | # This uses true amount of time song was playing for. May not be 100% accurate as some poll requests don't go through, 604 | percentage_listened = math.ceil( 605 | (secs_playing / (duration / 1000)) * 100) / 100 606 | 607 | # percentage_listened = round(progress / duration, 2) # this uses whenever the song ended 608 | 609 | # If song 'finished' because of crossfade, mark as 100% listened 610 | # Spotify max crossfade is 12 seconds, so assume it is set to the max 611 | if percentage_listened >= .9 and ((duration - progress) / 1000) <= 12: 612 | percentage_listened = 1 613 | # Song 'skipped' if only actually listened to for 5% - 80% (or overridden config values) of its total duration 614 | skipped = (skip_min_threshold <= percentage_listened <= skip_max_threshold) 615 | 616 | # Was song rewound? 617 | rewound = False 618 | old_progress = 0 619 | for state in playback_feed: 620 | new_progress = state.progress_ms 621 | if new_progress < old_progress: 622 | rewound = True 623 | break 624 | else: 625 | old_progress = new_progress 626 | 627 | # Was song fast forwarded? (Skipped forward more than 3 seconds) 628 | fast_forwarded = False 629 | old_progress = 0 630 | for state in playback_feed: 631 | new_progress = state.progress_ms 632 | if new_progress / 1000 >= (old_progress / 1000) + 3: 633 | fast_forwarded = True 634 | break 635 | else: 636 | old_progress = new_progress 637 | 638 | track_info_df = pd.DataFrame([{ 639 | "timestamp_utc": datetime.utcfromtimestamp(float(playback_feed[0].timestamp) / 1000), 640 | "track_id": playback_feed[0].item.id, 641 | "track_url": playback_feed[0].item.href, 642 | "track_name": track_name, 643 | "explicit": playback_feed[0].item.explicit, 644 | "artist_id": ', '.join([y.name for y in playback_feed[0].item.artists]), 645 | "artist_name": ', '.join([y.name for y in playback_feed[0].item.artists]), 646 | # URLs do not need to be stored, can be generated with 647 | # # https://open.spotify.com/track/ 648 | # https://open.spotify.com/artist/ 649 | # https://open.spotify.com/album/ 650 | "album_id": playback_feed[0].item.album.id, 651 | "album_name": playback_feed[0].item.album.name, 652 | "duration_ms": playback_feed[0].item.duration_ms, 653 | "percentage_listened": percentage_listened, 654 | "skipped": skipped, 655 | "rewound": rewound, 656 | "fast_forwarded": fast_forwarded, 657 | "secs_playing": secs_playing, 658 | "secs_paused": secs_paused 659 | }]) 660 | 661 | spotify = get_spotify_client() 662 | track_features = pd.DataFrame( 663 | json.loads(spotify.tracks_audio_features([playback_feed[0].item.id]).json())).drop( 664 | columns=['duration_ms', 'type', 'analysis_url', 'uri', 'track_href']) 665 | 666 | # Merge trackinfo with track features 667 | track_table = pd.merge(track_info_df, track_features, how='left', left_on='track_id', right_on='id').set_index( 668 | 'timestamp_utc').drop(columns=['id']) 669 | # Insert into DB 670 | track_table.to_sql('spotify_play_history', engine, if_exists='append', index=True) 671 | --------------------------------------------------------------------------------