├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ ├── dev.pull.request.yml │ └── prod.push.yml ├── Dockerfile ├── Extra ├── Garmin-Grafana-Logo.svg └── garmin-fetch.ipynb ├── Grafana_Dashboard ├── Garmin-Grafana-Dashboard-Preview.png └── Garmin-Grafana-Dashboard.json ├── LICENSE ├── README.md ├── compose-example.yml ├── garmin-fetch.py └── requirements.txt /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: arpandesign 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: arpandesign 10 | issuehunt: # Replace with a single IssueHunt username 11 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry 12 | polar: # Replace with a single Polar username 13 | buy_me_a_coffee: arpandesign 14 | thanks_dev: # Replace with a single thanks.dev username 15 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 16 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help the project improve 4 | title: "[BUG] Please Edit this title explaining your issue" 5 | labels: bug 6 | assignees: arpanghosh8453 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **Logs** 14 | The log content that precedes the error. Skip this section if there is no error in the log. Please provide the revevant log as applicable instead of the full log. If you have very long error log, please consider attaching the log file to the issue instead. 15 | 16 | ```text 17 | your log content goes here ... replace this text with your relevant log 18 | ``` 19 | 20 | **Screenshots** 21 | If applicable, add screenshots to help explain your problem. 22 | 23 | **Are you using docker?** 24 | - Yes/No 25 | 26 | **Are you using a VPN?** 27 | - Yes/No - Please note that authentication might fail with `401` or `429` Client Errors if you are using a VPN network. 28 | 29 | **Did you read the README and tried to Troubleshoot?** 30 | - Explain if you have tried anything additional to resolve the issue. 31 | 32 | **Additional context** 33 | Add any other context about the problem here. 34 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: "[FEATURE] Explain your proposed feature here" 5 | labels: enhancement 6 | assignees: arpanghosh8453 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/workflows/dev.pull.request.yml: -------------------------------------------------------------------------------- 1 | name: Build and test docker container for development 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | paths: 8 | - 'garmin-fetch.py' 9 | - 'compose-example.yml' 10 | - 'requirements.txt' 11 | - 'Dockerfile' 12 | 13 | env: 14 | REGISTRY: docker.io 15 | IMAGE_NAME: thisisarpanghosh/garmin-fetch-data 16 | 17 | jobs: 18 | build: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: actions/checkout@v4 22 | 23 | - name: Set up QEMU 24 | uses: docker/setup-qemu-action@v3 25 | 26 | - name: Set up Docker Buildx 27 | uses: docker/setup-buildx-action@v2 28 | 29 | - name: Build the Docker image (multi-arch) 30 | run: docker buildx build --platform linux/amd64 --load -t ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:dev . 31 | 32 | test: 33 | runs-on: ubuntu-latest 34 | steps: 35 | - uses: actions/checkout@v4 36 | - name: Test the Docker image 37 | run: docker compose -f compose-example.yml up -d 38 | -------------------------------------------------------------------------------- /.github/workflows/prod.push.yml: -------------------------------------------------------------------------------- 1 | name: Build and Push Docker Image to Docker Hub 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - 'garmin-fetch.py' 9 | - 'compose-example.yml' 10 | - 'requirements.txt' 11 | - 'Dockerfile' 12 | 13 | env: 14 | REGISTRY: docker.io 15 | IMAGE_NAME: thisisarpanghosh/garmin-fetch-data 16 | 17 | jobs: 18 | build: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: actions/checkout@v4 22 | 23 | - name: Set up QEMU 24 | uses: docker/setup-qemu-action@v3 25 | 26 | - name: Set up Docker Buildx 27 | uses: docker/setup-buildx-action@v2 28 | 29 | - name: Build the Docker image (multi-arch) 30 | run: docker buildx build --platform linux/amd64 --load -t ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest . 31 | 32 | test: 33 | runs-on: ubuntu-latest 34 | steps: 35 | - uses: actions/checkout@v4 36 | - name: Test the Docker image 37 | run: docker compose -f compose-example.yml up -d 38 | 39 | push_to_registry: 40 | name: Push Docker image to Docker Hub 41 | runs-on: ubuntu-latest 42 | steps: 43 | - name: Check out the repo 44 | uses: actions/checkout@v4 45 | 46 | - name: Set up QEMU 47 | uses: docker/setup-qemu-action@v3 48 | 49 | - name: Set up Docker Buildx 50 | uses: docker/setup-buildx-action@v2 51 | 52 | - name: Log in to Docker Hub 53 | uses: docker/login-action@v3 54 | with: 55 | username: ${{ secrets.DOCKERHUB_USERNAME }} 56 | password: ${{ secrets.DOCKERHUB_PASSWORD }} 57 | 58 | - name: Build and push multi-arch Docker image 59 | uses: docker/build-push-action@v5 60 | with: 61 | context: . 62 | platforms: linux/amd64,linux/arm64 63 | push: true 64 | tags: | 65 | ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest 66 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12-slim 2 | 3 | ENV PYTHONDONTWRITEBYTECODE=1 4 | 5 | ENV PYTHONUNBUFFERED=1 6 | 7 | COPY requirements.txt . 8 | RUN python -m pip install -r requirements.txt 9 | 10 | WORKDIR /app 11 | COPY ./garmin-fetch.py /app 12 | COPY ./requirements.txt /app 13 | 14 | RUN groupadd --gid 1000 appuser && useradd --uid 1000 --gid appuser --shell /bin/bash --create-home appuser && chown -R appuser:appuser /app 15 | USER appuser 16 | 17 | CMD ["python", "garmin-fetch.py"] -------------------------------------------------------------------------------- /Extra/Garmin-Grafana-Logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 7 | 13 | 18 | 19 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /Extra/garmin-fetch.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import base64, requests, time, pytz, logging, os, sys, dotenv\n", 10 | "from datetime import datetime, timedelta\n", 11 | "from influxdb import InfluxDBClient\n", 12 | "from influxdb.exceptions import InfluxDBClientError\n", 13 | "import xml.etree.ElementTree as ET\n", 14 | "from garth.exc import GarthHTTPError\n", 15 | "from garminconnect import (\n", 16 | " Garmin,\n", 17 | " GarminConnectAuthenticationError,\n", 18 | " GarminConnectConnectionError,\n", 19 | " GarminConnectTooManyRequestsError,\n", 20 | ")\n", 21 | "garmin_obj = None" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": null, 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "banner_text = \"\"\"\n", 31 | "\n", 32 | "***** █▀▀ ▄▀█ █▀█ █▀▄▀█ █ █▄ █   █▀▀ █▀█ ▄▀█ █▀▀ ▄▀█ █▄ █ ▄▀█ *****\n", 33 | "***** █▄█ █▀█ █▀▄ █ ▀ █ █ █ ▀█   █▄█ █▀▄ █▀█ █▀  █▀█ █ ▀█ █▀█ *****\n", 34 | "\n", 35 | "______________________________________________________________________\n", 36 | "\n", 37 | "By Arpan Ghosh | Please consider supporting the project if you love it\n", 38 | "______________________________________________________________________\n", 39 | "\n", 40 | "\"\"\"\n", 41 | "print(banner_text)\n", 42 | "env_override = dotenv.load_dotenv(\"override-default-vars.env\", override=True)\n", 43 | "if env_override:\n", 44 | " logging.warning(\"System ENV variables are overriden with override-default-vars.env\")" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 79, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "INFLUXDB_HOST = os.getenv(\"INFLUXDB_HOST\",'your.influxdb.hostname') # Required\n", 54 | "INFLUXDB_PORT = int(os.getenv(\"INFLUXDB_PORT\", 8086)) # Required\n", 55 | "INFLUXDB_USERNAME = os.getenv(\"INFLUXDB_USERNAME\", 'influxdb_username') # Required\n", 56 | "INFLUXDB_PASSWORD = os.getenv(\"INFLUXDB_PASSWORD\", 'influxdb_access_password') # Required\n", 57 | "INFLUXDB_DATABASE = os.getenv(\"INFLUXDB_DATABASE\", 'GarminStats') # Required\n", 58 | "TOKEN_DIR = os.getenv(\"TOKEN_DIR\", \"~/.garminconnect\") # optional\n", 59 | "GARMINCONNECT_EMAIL = os.environ.get(\"GARMINCONNECT_EMAIL\", None) # optional, asks in prompt on run if not provided\n", 60 | "GARMINCONNECT_PASSWORD = base64.b64decode(os.getenv(\"GARMINCONNECT_BASE64_PASSWORD\")).decode(\"utf-8\") if os.getenv(\"GARMINCONNECT_BASE64_PASSWORD\") != None else None # optional, asks in prompt on run if not provided\n", 61 | "GARMIN_DEVICENAME = os.getenv(\"GARMIN_DEVICENAME\", \"Unknown\") # optional, attepmts to set the same automatically if not given\n", 62 | "AUTO_DATE_RANGE = False if os.getenv(\"AUTO_DATE_RANGE\") in ['False','false','FALSE','f','F','no','No','NO','0'] else True # optional\n", 63 | "MANUAL_START_DATE = os.getenv(\"MANUAL_START_DATE\", None) # optional, in YYYY-MM-DD format, if you want to bulk update only from specific date\n", 64 | "MANUAL_END_DATE = os.getenv(\"MANUAL_END_DATE\", datetime.today().strftime('%Y-%m-%d')) # optional, in YYYY-MM-DD format, if you want to bulk update until a specific date\n", 65 | "LOG_LEVEL = os.getenv(\"LOG_LEVEL\", \"INFO\") # optional\n", 66 | "FETCH_FAILED_WAIT_SECONDS = int(os.getenv(\"FETCH_FAILED_WAIT_SECONDS\", 1800)) # optional\n", 67 | "RATE_LIMIT_CALLS_SECONDS = int(os.getenv(\"RATE_LIMIT_CALLS_SECONDS\", 5)) # optional\n", 68 | "INFLUXDB_ENDPOINT_IS_HTTP = False if os.getenv(\"INFLUXDB_ENDPOINT_IS_HTTP\") in ['False','false','FALSE','f','F','no','No','NO','0'] else True # optional\n", 69 | "GARMIN_DEVICENAME_AUTOMATIC = False if GARMIN_DEVICENAME != \"Unknown\" else True # optional\n", 70 | "UPDATE_INTERVAL_SECONDS = int(os.getenv(\"UPDATE_INTERVAL_SECONDS\", 300)) # optional" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 6, 76 | "metadata": {}, 77 | "outputs": [], 78 | "source": [ 79 | "for handler in logging.root.handlers[:]:\n", 80 | " logging.root.removeHandler(handler)\n", 81 | "\n", 82 | "logging.basicConfig(\n", 83 | " level=getattr(logging, LOG_LEVEL, logging.INFO),\n", 84 | " format=\"%(asctime)s - %(levelname)s - %(message)s\",\n", 85 | " handlers=[\n", 86 | " logging.StreamHandler(sys.stdout)\n", 87 | " ]\n", 88 | ")" 89 | ] 90 | }, 91 | { 92 | "cell_type": "code", 93 | "execution_count": null, 94 | "metadata": {}, 95 | "outputs": [], 96 | "source": [ 97 | "try:\n", 98 | " if INFLUXDB_ENDPOINT_IS_HTTP:\n", 99 | " influxdbclient = InfluxDBClient(host=INFLUXDB_HOST, port=INFLUXDB_PORT, username=INFLUXDB_USERNAME, password=INFLUXDB_PASSWORD)\n", 100 | " else:\n", 101 | " influxdbclient = InfluxDBClient(host=INFLUXDB_HOST, port=INFLUXDB_PORT, username=INFLUXDB_USERNAME, password=INFLUXDB_PASSWORD, ssl=True, verify_ssl=True)\n", 102 | " influxdbclient.switch_database(INFLUXDB_DATABASE)\n", 103 | " influxdbclient.ping()\n", 104 | "except InfluxDBClientError as err:\n", 105 | " logging.error(\"Unable to connect with influxdb database! Aborted\")\n", 106 | " raise InfluxDBClientError(\"InfluxDB connection failed:\" + str(err))" 107 | ] 108 | }, 109 | { 110 | "cell_type": "code", 111 | "execution_count": null, 112 | "metadata": {}, 113 | "outputs": [], 114 | "source": [ 115 | "def iter_days(start_date: str, end_date: str):\n", 116 | " start = datetime.strptime(start_date, '%Y-%m-%d')\n", 117 | " end = datetime.strptime(end_date, '%Y-%m-%d')\n", 118 | " current = end\n", 119 | "\n", 120 | " while current >= start:\n", 121 | " yield current.strftime('%Y-%m-%d')\n", 122 | " current -= timedelta(days=1)\n" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": null, 128 | "metadata": {}, 129 | "outputs": [], 130 | "source": [ 131 | "def garmin_login():\n", 132 | " try:\n", 133 | " logging.info(f\"Trying to login to Garmin Connect using token data from directory '{TOKEN_DIR}'...\")\n", 134 | " garmin = Garmin()\n", 135 | " garmin.login(TOKEN_DIR)\n", 136 | " logging.info(\"login to Garmin Connect successful using stored session tokens.\")\n", 137 | "\n", 138 | " except (FileNotFoundError, GarthHTTPError, GarminConnectAuthenticationError):\n", 139 | " logging.warning(\"Session is expired or login information not present/incorrect. You'll need to log in again...login with your Garmin Connect credentials to generate them.\")\n", 140 | " try:\n", 141 | " user_email = GARMINCONNECT_EMAIL or input(\"Enter Garminconnect Login e-mail: \")\n", 142 | " user_password = GARMINCONNECT_PASSWORD or input(\"Enter Garminconnect password (characters will be visible): \")\n", 143 | " garmin = Garmin(\n", 144 | " email=user_email, password=user_password, is_cn=False, return_on_mfa=True\n", 145 | " )\n", 146 | " result1, result2 = garmin.login()\n", 147 | " if result1 == \"needs_mfa\": # MFA is required\n", 148 | " mfa_code = input(\"MFA one-time code: \")\n", 149 | " garmin.resume_login(result2, mfa_code)\n", 150 | "\n", 151 | " garmin.garth.dump(TOKEN_DIR)\n", 152 | " logging.info(f\"Oauth tokens stored in '{TOKEN_DIR}' directory for future use\")\n", 153 | "\n", 154 | " garmin.login(TOKEN_DIR)\n", 155 | " logging.info(\"login to Garmin Connect successful using stored session tokens.\")\n", 156 | "\n", 157 | " except (\n", 158 | " FileNotFoundError,\n", 159 | " GarthHTTPError,\n", 160 | " GarminConnectAuthenticationError,\n", 161 | " requests.exceptions.HTTPError,\n", 162 | " ) as err:\n", 163 | " logging.error(str(err))\n", 164 | " raise Exception(\"Session is expired : please login again and restart the script\")\n", 165 | "\n", 166 | " return garmin" 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": 10, 172 | "metadata": {}, 173 | "outputs": [], 174 | "source": [ 175 | "def write_points_to_influxdb(points):\n", 176 | " try:\n", 177 | " if len(points) != 0:\n", 178 | " influxdbclient.write_points(points)\n", 179 | " logging.info(\"Successfully updated influxdb database with new points\")\n", 180 | " except InfluxDBClientError as err:\n", 181 | " logging.error(\"Unable to connect with database! \" + str(err))" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": 27, 187 | "metadata": {}, 188 | "outputs": [], 189 | "source": [ 190 | "def get_daily_stats(date_str):\n", 191 | " points_list = []\n", 192 | " stats_json = garmin_obj.get_stats(date_str)\n", 193 | " if stats_json['wellnessStartTimeGmt'] and datetime.strptime(date_str, \"%Y-%m-%d\") < datetime.today():\n", 194 | " points_list.append({\n", 195 | " \"measurement\": \"DailyStats\",\n", 196 | " \"time\": pytz.timezone(\"UTC\").localize(datetime.strptime(stats_json['wellnessStartTimeGmt'], \"%Y-%m-%dT%H:%M:%S.%f\")).isoformat(),\n", 197 | " \"tags\": {\n", 198 | " \"Device\": GARMIN_DEVICENAME\n", 199 | " },\n", 200 | " \"fields\": {\n", 201 | " \"activeKilocalories\": stats_json.get('activeKilocalories'),\n", 202 | " \"bmrKilocalories\": stats_json.get('bmrKilocalories'),\n", 203 | "\n", 204 | " 'totalSteps': stats_json.get('totalSteps'),\n", 205 | " 'totalDistanceMeters': stats_json.get('totalDistanceMeters'),\n", 206 | "\n", 207 | " \"highlyActiveSeconds\": stats_json.get(\"highlyActiveSeconds\"),\n", 208 | " \"activeSeconds\": stats_json.get(\"activeSeconds\"),\n", 209 | " \"sedentarySeconds\": stats_json.get(\"sedentarySeconds\"),\n", 210 | " \"sleepingSeconds\": stats_json.get(\"sleepingSeconds\"),\n", 211 | " \"moderateIntensityMinutes\": stats_json.get(\"moderateIntensityMinutes\"),\n", 212 | " \"vigorousIntensityMinutes\": stats_json.get(\"vigorousIntensityMinutes\"),\n", 213 | "\n", 214 | " \"floorsAscendedInMeters\": stats_json.get(\"floorsAscendedInMeters\"),\n", 215 | " \"floorsDescendedInMeters\": stats_json.get(\"floorsDescendedInMeters\"),\n", 216 | " \"floorsAscended\": stats_json.get(\"floorsAscended\"),\n", 217 | " \"floorsDescended\": stats_json.get(\"floorsDescended\"),\n", 218 | "\n", 219 | " \"minHeartRate\": stats_json.get(\"minHeartRate\"),\n", 220 | " \"maxHeartRate\": stats_json.get(\"maxHeartRate\"),\n", 221 | " \"restingHeartRate\": stats_json.get(\"restingHeartRate\"),\n", 222 | " \"minAvgHeartRate\": stats_json.get(\"minAvgHeartRate\"),\n", 223 | " \"maxAvgHeartRate\": stats_json.get(\"maxAvgHeartRate\"),\n", 224 | " \n", 225 | " \"stressDuration\": stats_json.get(\"stressDuration\"),\n", 226 | " \"restStressDuration\": stats_json.get(\"restStressDuration\"),\n", 227 | " \"activityStressDuration\": stats_json.get(\"activityStressDuration\"),\n", 228 | " \"uncategorizedStressDuration\": stats_json.get(\"uncategorizedStressDuration\"),\n", 229 | " \"totalStressDuration\": stats_json.get(\"totalStressDuration\"),\n", 230 | " \"lowStressDuration\": stats_json.get(\"lowStressDuration\"),\n", 231 | " \"mediumStressDuration\": stats_json.get(\"mediumStressDuration\"),\n", 232 | " \"highStressDuration\": stats_json.get(\"highStressDuration\"),\n", 233 | " \n", 234 | " \"stressPercentage\": stats_json.get(\"stressPercentage\"),\n", 235 | " \"restStressPercentage\": stats_json.get(\"restStressPercentage\"),\n", 236 | " \"activityStressPercentage\": stats_json.get(\"activityStressPercentage\"),\n", 237 | " \"uncategorizedStressPercentage\": stats_json.get(\"uncategorizedStressPercentage\"),\n", 238 | " \"lowStressPercentage\": stats_json.get(\"lowStressPercentage\"),\n", 239 | " \"mediumStressPercentage\": stats_json.get(\"mediumStressPercentage\"),\n", 240 | " \"highStressPercentage\": stats_json.get(\"highStressPercentage\"),\n", 241 | " \n", 242 | " \"bodyBatteryChargedValue\": stats_json.get(\"bodyBatteryChargedValue\"),\n", 243 | " \"bodyBatteryDrainedValue\": stats_json.get(\"bodyBatteryDrainedValue\"),\n", 244 | " \"bodyBatteryHighestValue\": stats_json.get(\"bodyBatteryHighestValue\"),\n", 245 | " \"bodyBatteryLowestValue\": stats_json.get(\"bodyBatteryLowestValue\"),\n", 246 | " \"bodyBatteryDuringSleep\": stats_json.get(\"bodyBatteryDuringSleep\"),\n", 247 | " \"bodyBatteryAtWakeTime\": stats_json.get(\"bodyBatteryAtWakeTime\"),\n", 248 | " \n", 249 | " \"averageSpo2\": stats_json.get(\"averageSpo2\"),\n", 250 | " \"lowestSpo2\": stats_json.get(\"lowestSpo2\"),\n", 251 | " }\n", 252 | " })\n", 253 | " if points_list:\n", 254 | " logging.info(f\"Success : Fetching daily matrices for date {date_str}\")\n", 255 | " return points_list\n", 256 | " else:\n", 257 | " logging.debug(\"No daily stat data available for the give date \" + date_str)\n", 258 | " return []\n", 259 | " " 260 | ] 261 | }, 262 | { 263 | "cell_type": "code", 264 | "execution_count": 12, 265 | "metadata": {}, 266 | "outputs": [], 267 | "source": [ 268 | "def get_last_sync():\n", 269 | " global GARMIN_DEVICENAME\n", 270 | " points_list = []\n", 271 | " sync_data = garmin_obj.get_device_last_used()\n", 272 | " if GARMIN_DEVICENAME_AUTOMATIC:\n", 273 | " GARMIN_DEVICENAME = sync_data.get('lastUsedDeviceName') or \"Unknown\"\n", 274 | " points_list.append({\n", 275 | " \"measurement\": \"DeviceSync\",\n", 276 | " \"time\": datetime.fromtimestamp(sync_data['lastUsedDeviceUploadTime']/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 277 | " \"tags\": {\n", 278 | " \"Device\": GARMIN_DEVICENAME\n", 279 | " },\n", 280 | " \"fields\": {\n", 281 | " \"imageUrl\": sync_data.get('imageUrl'),\n", 282 | " \"Device\": GARMIN_DEVICENAME\n", 283 | " }\n", 284 | " })\n", 285 | " if points_list:\n", 286 | " logging.info(f\"Success : Updated device last sync time\")\n", 287 | " else:\n", 288 | " logging.warning(\"No associated/synced Garmin device found with your account\")\n", 289 | " return points_list" 290 | ] 291 | }, 292 | { 293 | "cell_type": "code", 294 | "execution_count": 35, 295 | "metadata": {}, 296 | "outputs": [], 297 | "source": [ 298 | "def get_sleep_data(date_str):\n", 299 | " points_list = []\n", 300 | " all_sleep_data = garmin_obj.get_sleep_data(date_str)\n", 301 | " sleep_json = all_sleep_data.get(\"dailySleepDTO\", None)\n", 302 | " if sleep_json[\"sleepEndTimestampGMT\"]:\n", 303 | " points_list.append({\n", 304 | " \"measurement\": \"SleepSummary\",\n", 305 | " \"time\": datetime.fromtimestamp(sleep_json[\"sleepEndTimestampGMT\"]/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 306 | " \"tags\": {\n", 307 | " \"Device\": GARMIN_DEVICENAME\n", 308 | " },\n", 309 | " \"fields\": {\n", 310 | " \"sleepTimeSeconds\": sleep_json.get(\"sleepTimeSeconds\"),\n", 311 | " \"deepSleepSeconds\": sleep_json.get(\"deepSleepSeconds\"),\n", 312 | " \"lightSleepSeconds\": sleep_json.get(\"lightSleepSeconds\"),\n", 313 | " \"remSleepSeconds\": sleep_json.get(\"remSleepSeconds\"),\n", 314 | " \"awakeSleepSeconds\": sleep_json.get(\"awakeSleepSeconds\"),\n", 315 | " \"averageSpO2Value\": sleep_json.get(\"averageSpO2Value\"),\n", 316 | " \"lowestSpO2Value\": sleep_json.get(\"lowestSpO2Value\"),\n", 317 | " \"highestSpO2Value\": sleep_json.get(\"highestSpO2Value\"),\n", 318 | " \"averageRespirationValue\": sleep_json.get(\"averageRespirationValue\"),\n", 319 | " \"lowestRespirationValue\": sleep_json.get(\"lowestRespirationValue\"),\n", 320 | " \"highestRespirationValue\": sleep_json.get(\"highestRespirationValue\"),\n", 321 | " \"awakeCount\": sleep_json.get(\"awakeCount\"),\n", 322 | " \"avgSleepStress\": sleep_json.get(\"avgSleepStress\"),\n", 323 | " \"sleepScore\": sleep_json.get(\"sleepScores\", {}).get(\"overall\", {}).get(\"value\"),\n", 324 | " \"restlessMomentsCount\": all_sleep_data.get(\"restlessMomentsCount\"),\n", 325 | " \"avgOvernightHrv\": all_sleep_data.get(\"avgOvernightHrv\"),\n", 326 | " \"bodyBatteryChange\": all_sleep_data.get(\"bodyBatteryChange\"),\n", 327 | " \"restingHeartRate\": all_sleep_data.get(\"restingHeartRate\")\n", 328 | " }\n", 329 | " })\n", 330 | " sleep_movement_intraday = all_sleep_data.get(\"sleepMovement\")\n", 331 | " if sleep_movement_intraday:\n", 332 | " for entry in sleep_movement_intraday:\n", 333 | " points_list.append({\n", 334 | " \"measurement\": \"SleepIntraday\",\n", 335 | " \"time\": pytz.timezone(\"UTC\").localize(datetime.strptime(entry[\"startGMT\"], \"%Y-%m-%dT%H:%M:%S.%f\")).isoformat(),\n", 336 | " \"tags\": {\n", 337 | " \"Device\": GARMIN_DEVICENAME\n", 338 | " },\n", 339 | " \"fields\": {\n", 340 | " \"SleepMovementActivityLevel\": entry.get(\"activityLevel\",-1),\n", 341 | " \"SleepMovementActivitySeconds\": int((datetime.strptime(entry[\"endGMT\"], \"%Y-%m-%dT%H:%M:%S.%f\") - datetime.strptime(entry[\"startGMT\"], \"%Y-%m-%dT%H:%M:%S.%f\")).total_seconds())\n", 342 | " }\n", 343 | " })\n", 344 | " sleep_levels_intraday = all_sleep_data.get(\"sleepLevels\")\n", 345 | " if sleep_levels_intraday:\n", 346 | " for entry in sleep_levels_intraday:\n", 347 | " if entry.get(\"activityLevel\"):\n", 348 | " points_list.append({\n", 349 | " \"measurement\": \"SleepIntraday\",\n", 350 | " \"time\": pytz.timezone(\"UTC\").localize(datetime.strptime(entry[\"startGMT\"], \"%Y-%m-%dT%H:%M:%S.%f\")).isoformat(),\n", 351 | " \"tags\": {\n", 352 | " \"Device\": GARMIN_DEVICENAME\n", 353 | " },\n", 354 | " \"fields\": {\n", 355 | " \"SleepStageLevel\": entry.get(\"activityLevel\"),\n", 356 | " \"SleepStageSeconds\": int((datetime.strptime(entry[\"endGMT\"], \"%Y-%m-%dT%H:%M:%S.%f\") - datetime.strptime(entry[\"startGMT\"], \"%Y-%m-%dT%H:%M:%S.%f\")).total_seconds())\n", 357 | " }\n", 358 | " })\n", 359 | " sleep_restlessness_intraday = all_sleep_data.get(\"sleepRestlessMoments\")\n", 360 | " if sleep_restlessness_intraday:\n", 361 | " for entry in sleep_restlessness_intraday:\n", 362 | " if entry.get(\"value\"):\n", 363 | " points_list.append({\n", 364 | " \"measurement\": \"SleepIntraday\",\n", 365 | " \"time\": datetime.fromtimestamp(entry[\"startGMT\"]/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 366 | " \"tags\": {\n", 367 | " \"Device\": GARMIN_DEVICENAME\n", 368 | " },\n", 369 | " \"fields\": {\n", 370 | " \"sleepRestlessValue\": entry.get(\"value\")\n", 371 | " }\n", 372 | " })\n", 373 | " sleep_spo2_intraday = all_sleep_data.get(\"wellnessEpochSPO2DataDTOList\")\n", 374 | " if sleep_spo2_intraday:\n", 375 | " for entry in sleep_spo2_intraday:\n", 376 | " if entry.get(\"spo2Reading\"):\n", 377 | " points_list.append({\n", 378 | " \"measurement\": \"SleepIntraday\",\n", 379 | " \"time\": pytz.timezone(\"UTC\").localize(datetime.strptime(entry[\"epochTimestamp\"], \"%Y-%m-%dT%H:%M:%S.%f\")).isoformat(),\n", 380 | " \"tags\": {\n", 381 | " \"Device\": GARMIN_DEVICENAME\n", 382 | " },\n", 383 | " \"fields\": {\n", 384 | " \"spo2Reading\": entry.get(\"spo2Reading\")\n", 385 | " }\n", 386 | " })\n", 387 | " sleep_respiration_intraday = all_sleep_data.get(\"wellnessEpochRespirationDataDTOList\")\n", 388 | " if sleep_respiration_intraday:\n", 389 | " for entry in sleep_respiration_intraday:\n", 390 | " if entry.get(\"respirationValue\"):\n", 391 | " points_list.append({\n", 392 | " \"measurement\": \"SleepIntraday\",\n", 393 | " \"time\": datetime.fromtimestamp(entry[\"startTimeGMT\"]/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 394 | " \"tags\": {\n", 395 | " \"Device\": GARMIN_DEVICENAME\n", 396 | " },\n", 397 | " \"fields\": {\n", 398 | " \"respirationValue\": entry.get(\"respirationValue\")\n", 399 | " }\n", 400 | " })\n", 401 | " sleep_heart_rate_intraday = all_sleep_data.get(\"sleepHeartRate\")\n", 402 | " if sleep_heart_rate_intraday:\n", 403 | " for entry in sleep_heart_rate_intraday:\n", 404 | " if entry.get(\"value\"):\n", 405 | " points_list.append({\n", 406 | " \"measurement\": \"SleepIntraday\",\n", 407 | " \"time\": datetime.fromtimestamp(entry[\"startGMT\"]/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 408 | " \"tags\": {\n", 409 | " \"Device\": GARMIN_DEVICENAME\n", 410 | " },\n", 411 | " \"fields\": {\n", 412 | " \"heartRate\": entry.get(\"value\")\n", 413 | " }\n", 414 | " })\n", 415 | " sleep_stress_intraday = all_sleep_data.get(\"sleepStress\")\n", 416 | " if sleep_stress_intraday:\n", 417 | " for entry in sleep_stress_intraday:\n", 418 | " if entry.get(\"value\"):\n", 419 | " points_list.append({\n", 420 | " \"measurement\": \"SleepIntraday\",\n", 421 | " \"time\": datetime.fromtimestamp(entry[\"startGMT\"]/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 422 | " \"tags\": {\n", 423 | " \"Device\": GARMIN_DEVICENAME\n", 424 | " },\n", 425 | " \"fields\": {\n", 426 | " \"stressValue\": entry.get(\"value\")\n", 427 | " }\n", 428 | " })\n", 429 | " sleep_bb_intraday = all_sleep_data.get(\"sleepBodyBattery\")\n", 430 | " if sleep_bb_intraday:\n", 431 | " for entry in sleep_bb_intraday:\n", 432 | " if entry.get(\"value\"):\n", 433 | " points_list.append({\n", 434 | " \"measurement\": \"SleepIntraday\",\n", 435 | " \"time\": datetime.fromtimestamp(entry[\"startGMT\"]/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 436 | " \"tags\": {\n", 437 | " \"Device\": GARMIN_DEVICENAME\n", 438 | " },\n", 439 | " \"fields\": {\n", 440 | " \"bodyBattery\": entry.get(\"value\")\n", 441 | " }\n", 442 | " })\n", 443 | " sleep_hrv_intraday = all_sleep_data.get(\"hrvData\")\n", 444 | " if sleep_hrv_intraday:\n", 445 | " for entry in sleep_hrv_intraday:\n", 446 | " if entry.get(\"value\"):\n", 447 | " points_list.append({\n", 448 | " \"measurement\": \"SleepIntraday\",\n", 449 | " \"time\": datetime.fromtimestamp(entry[\"startGMT\"]/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 450 | " \"tags\": {\n", 451 | " \"Device\": GARMIN_DEVICENAME\n", 452 | " },\n", 453 | " \"fields\": {\n", 454 | " \"hrvData\": entry.get(\"value\")\n", 455 | " }\n", 456 | " })\n", 457 | " if points_list:\n", 458 | " logging.info(f\"Success : Fetching intraday sleep matrices for date {date_str}\")\n", 459 | " return points_list" 460 | ] 461 | }, 462 | { 463 | "cell_type": "code", 464 | "execution_count": 14, 465 | "metadata": {}, 466 | "outputs": [], 467 | "source": [ 468 | "def get_intraday_hr(date_str):\n", 469 | " points_list = []\n", 470 | " hr_list = garmin_obj.get_heart_rates(date_str).get(\"heartRateValues\") or []\n", 471 | " for entry in hr_list:\n", 472 | " if entry[1]:\n", 473 | " points_list.append({\n", 474 | " \"measurement\": \"HeartRateIntraday\",\n", 475 | " \"time\": datetime.fromtimestamp(entry[0]/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 476 | " \"tags\": {\n", 477 | " \"Device\": GARMIN_DEVICENAME\n", 478 | " },\n", 479 | " \"fields\": {\n", 480 | " \"HeartRate\": entry[1]\n", 481 | " }\n", 482 | " })\n", 483 | " if points_list:\n", 484 | " logging.info(f\"Success : Fetching intraday Heart Rate for date {date_str}\")\n", 485 | " return points_list" 486 | ] 487 | }, 488 | { 489 | "cell_type": "code", 490 | "execution_count": 32, 491 | "metadata": {}, 492 | "outputs": [], 493 | "source": [ 494 | "def get_intraday_steps(date_str):\n", 495 | " points_list = []\n", 496 | " steps_list = garmin_obj.get_steps_data(date_str)\n", 497 | " for entry in steps_list:\n", 498 | " if entry[\"steps\"] or entry[\"steps\"] == 0:\n", 499 | " points_list.append({\n", 500 | " \"measurement\": \"StepsIntraday\",\n", 501 | " \"time\": pytz.timezone(\"UTC\").localize(datetime.strptime(entry['startGMT'], \"%Y-%m-%dT%H:%M:%S.%f\")).isoformat(),\n", 502 | " \"tags\": {\n", 503 | " \"Device\": GARMIN_DEVICENAME\n", 504 | " },\n", 505 | " \"fields\": {\n", 506 | " \"StepsCount\": entry[\"steps\"]\n", 507 | " }\n", 508 | " })\n", 509 | " if points_list:\n", 510 | " logging.info(f\"Success : Fetching intraday steps for date {date_str}\")\n", 511 | " return points_list" 512 | ] 513 | }, 514 | { 515 | "cell_type": "code", 516 | "execution_count": 16, 517 | "metadata": {}, 518 | "outputs": [], 519 | "source": [ 520 | "def get_intraday_stress(date_str):\n", 521 | " points_list = []\n", 522 | " stress_list = garmin_obj.get_stress_data(date_str).get('stressValuesArray') or []\n", 523 | " for entry in stress_list:\n", 524 | " if entry[1] or entry[1] == 0:\n", 525 | " points_list.append({\n", 526 | " \"measurement\": \"StressIntraday\",\n", 527 | " \"time\": datetime.fromtimestamp(entry[0]/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 528 | " \"tags\": {\n", 529 | " \"Device\": GARMIN_DEVICENAME\n", 530 | " },\n", 531 | " \"fields\": {\n", 532 | " \"stressLevel\": entry[1]\n", 533 | " }\n", 534 | " })\n", 535 | " bb_list = garmin_obj.get_stress_data(date_str).get('bodyBatteryValuesArray') or []\n", 536 | " for entry in bb_list:\n", 537 | " if entry[2] or entry[2] == 0:\n", 538 | " points_list.append({\n", 539 | " \"measurement\": \"BodyBatteryIntraday\",\n", 540 | " \"time\": datetime.fromtimestamp(entry[0]/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 541 | " \"tags\": {\n", 542 | " \"Device\": GARMIN_DEVICENAME\n", 543 | " },\n", 544 | " \"fields\": {\n", 545 | " \"BodyBatteryLevel\": entry[2]\n", 546 | " }\n", 547 | " })\n", 548 | " if points_list:\n", 549 | " logging.info(f\"Success : Fetching intraday stress and Body Battery values for date {date_str}\")\n", 550 | " return points_list" 551 | ] 552 | }, 553 | { 554 | "cell_type": "code", 555 | "execution_count": 17, 556 | "metadata": {}, 557 | "outputs": [], 558 | "source": [ 559 | "def get_intraday_br(date_str):\n", 560 | " points_list = []\n", 561 | " br_list = garmin_obj.get_respiration_data(date_str).get('respirationValuesArray') or []\n", 562 | " for entry in br_list:\n", 563 | " if entry[1]:\n", 564 | " points_list.append({\n", 565 | " \"measurement\": \"BreathingRateIntraday\",\n", 566 | " \"time\": datetime.fromtimestamp(entry[0]/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 567 | " \"tags\": {\n", 568 | " \"Device\": GARMIN_DEVICENAME\n", 569 | " },\n", 570 | " \"fields\": {\n", 571 | " \"BreathingRate\": entry[1]\n", 572 | " }\n", 573 | " })\n", 574 | " if points_list:\n", 575 | " logging.info(f\"Success : Fetching intraday Breathing Rate for date {date_str}\")\n", 576 | " return points_list" 577 | ] 578 | }, 579 | { 580 | "cell_type": "code", 581 | "execution_count": 33, 582 | "metadata": {}, 583 | "outputs": [], 584 | "source": [ 585 | "def get_intraday_hrv(date_str):\n", 586 | " points_list = []\n", 587 | " hrv_list = (garmin_obj.get_hrv_data(date_str) or {}).get('hrvReadings') or []\n", 588 | " for entry in hrv_list:\n", 589 | " if entry.get('hrvValue'):\n", 590 | " points_list.append({\n", 591 | " \"measurement\": \"HRV_Intraday\",\n", 592 | " \"time\": pytz.timezone(\"UTC\").localize(datetime.strptime(entry['readingTimeGMT'],\"%Y-%m-%dT%H:%M:%S.%f\")).isoformat(),\n", 593 | " \"tags\": {\n", 594 | " \"Device\": GARMIN_DEVICENAME\n", 595 | " },\n", 596 | " \"fields\": {\n", 597 | " \"hrvValue\": entry.get('hrvValue')\n", 598 | " }\n", 599 | " })\n", 600 | " if points_list:\n", 601 | " logging.info(f\"Success : Fetching intraday HRV for date {date_str}\")\n", 602 | " return points_list" 603 | ] 604 | }, 605 | { 606 | "cell_type": "code", 607 | "execution_count": 19, 608 | "metadata": {}, 609 | "outputs": [], 610 | "source": [ 611 | "def get_body_composition(date_str):\n", 612 | " points_list = []\n", 613 | " body_composition_json = garmin_obj.get_body_composition(date_str).get('totalAverage')\n", 614 | " if body_composition_json:\n", 615 | " data_fields = {\n", 616 | " \"weight\": body_composition_json[\"weight\"],\n", 617 | " \"bmi\": body_composition_json[\"bmi\"],\n", 618 | " \"bodyFat\": body_composition_json[\"bodyFat\"],\n", 619 | " \"bodyWater\": body_composition_json[\"bodyWater\"],\n", 620 | " }\n", 621 | " if not all(value is None for value in data_fields.values()):\n", 622 | " points_list.append({\n", 623 | " \"measurement\": \"BodyComposition\",\n", 624 | " \"time\": datetime.fromtimestamp(body_composition_json['from']/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 625 | " \"tags\": {\n", 626 | " \"Device\": GARMIN_DEVICENAME\n", 627 | " },\n", 628 | " \"fields\": data_fields\n", 629 | " })\n", 630 | " logging.info(f\"Success : Fetching intraday Body Composition for date {date_str}\")\n", 631 | " return points_list" 632 | ] 633 | }, 634 | { 635 | "cell_type": "code", 636 | "execution_count": 20, 637 | "metadata": {}, 638 | "outputs": [], 639 | "source": [ 640 | "def get_activity_summary(date_str):\n", 641 | " points_list = []\n", 642 | " activity_with_gps_id_dict = {}\n", 643 | " activity_list = garmin_obj.get_activities_by_date(date_str, date_str)\n", 644 | " for activity in activity_list:\n", 645 | " if activity.get('hasPolyline'):\n", 646 | " activity_with_gps_id_dict[activity.get('activityId')] = activity.get('activityType',{}).get('typeKey', \"Unknown\")\n", 647 | " points_list.append({\n", 648 | " \"measurement\": \"ActivitySummary\",\n", 649 | " \"time\": datetime.fromtimestamp(activity['beginTimestamp']/1000, tz=pytz.timezone(\"UTC\")).isoformat(),\n", 650 | " \"tags\": {\n", 651 | " \"Device\": GARMIN_DEVICENAME\n", 652 | " },\n", 653 | " \"fields\": {\n", 654 | " 'activityId': activity.get('activityId'),\n", 655 | " 'deviceId': activity.get('deviceId'),\n", 656 | " 'activityName': activity.get('activityName'),\n", 657 | " 'activityType': activity.get('activityType',{}).get('typeKey',None),\n", 658 | " 'distance': activity.get('distance'),\n", 659 | " 'elapsedDuration': activity.get('elapsedDuration'),\n", 660 | " 'movingDuration': activity.get('movingDuration'),\n", 661 | " 'averageSpeed': activity.get('averageSpeed'),\n", 662 | " 'maxSpeed': activity.get('maxSpeed'),\n", 663 | " 'calories': activity.get('calories'),\n", 664 | " 'bmrCalories': activity.get('bmrCalories'),\n", 665 | " 'averageHR': activity.get('averageHR'),\n", 666 | " 'maxHR': activity.get('maxHR'),\n", 667 | " 'locationName': activity.get('locationName'),\n", 668 | " 'lapCount': activity.get('lapCount'),\n", 669 | " 'hrTimeInZone_1': activity.get('hrTimeInZone_1'),\n", 670 | " 'hrTimeInZone_2': activity.get('hrTimeInZone_2'),\n", 671 | " 'hrTimeInZone_3': activity.get('hrTimeInZone_3'),\n", 672 | " 'hrTimeInZone_4': activity.get('hrTimeInZone_4'),\n", 673 | " 'hrTimeInZone_5': activity.get('hrTimeInZone_5'),\n", 674 | " }\n", 675 | " })\n", 676 | " points_list.append({\n", 677 | " \"measurement\": \"ActivitySummary\",\n", 678 | " \"time\": datetime.fromtimestamp((activity['beginTimestamp']/1000) + int(activity.get('elapsedDuration')), tz=pytz.timezone(\"UTC\")).isoformat(),\n", 679 | " \"tags\": {\n", 680 | " \"Device\": GARMIN_DEVICENAME\n", 681 | " },\n", 682 | " \"fields\": {\n", 683 | " 'activityId': activity.get('activityId'),\n", 684 | " 'deviceId': activity.get('deviceId'),\n", 685 | " 'activityName': \"END\",\n", 686 | " 'activityType': \"No Activity\",\n", 687 | " }\n", 688 | " })\n", 689 | " logging.info(f\"Success : Fetching Activity summary with id {activity.get('activityId')} for date {date_str}\")\n", 690 | " return points_list, activity_with_gps_id_dict" 691 | ] 692 | }, 693 | { 694 | "cell_type": "code", 695 | "execution_count": 37, 696 | "metadata": {}, 697 | "outputs": [], 698 | "source": [ 699 | "def fetch_activity_GPS(activityIDdict):\n", 700 | " points_list = []\n", 701 | " for activityID in activityIDdict.keys():\n", 702 | " root = ET.fromstring(garmin_obj.download_activity(activityID, dl_fmt=garmin_obj.ActivityDownloadFormat.TCX).decode(\"UTF-8\"))\n", 703 | " ns = {\"tcx\": \"http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2\", \"ns3\": \"http://www.garmin.com/xmlschemas/ActivityExtension/v2\"}\n", 704 | " for activity in root.findall(\"tcx:Activities/tcx:Activity\", ns):\n", 705 | " activity_type = activityIDdict[activityID]\n", 706 | " activity_start_time = datetime.fromisoformat(activity.find(\"tcx:Id\", ns).text.strip(\"Z\"))\n", 707 | " lap_index = 1\n", 708 | " for lap in activity.findall(\"tcx:Lap\", ns):\n", 709 | " lap_start_time = datetime.fromisoformat(lap.attrib.get(\"StartTime\").strip(\"Z\"))\n", 710 | " for tp in lap.findall(\".//tcx:Trackpoint\", ns):\n", 711 | " time_obj = datetime.fromisoformat(tp.findtext(\"tcx:Time\", default=None, namespaces=ns).strip(\"Z\"))\n", 712 | " lat = tp.findtext(\"tcx:Position/tcx:LatitudeDegrees\", default=None, namespaces=ns)\n", 713 | " lon = tp.findtext(\"tcx:Position/tcx:LongitudeDegrees\", default=None, namespaces=ns)\n", 714 | " alt = tp.findtext(\"tcx:AltitudeMeters\", default=None, namespaces=ns)\n", 715 | " dist = tp.findtext(\"tcx:DistanceMeters\", default=None, namespaces=ns)\n", 716 | " hr = tp.findtext(\"tcx:HeartRateBpm/tcx:Value\", default=None, namespaces=ns)\n", 717 | " speed = tp.findtext(\"tcx:Extensions/ns3:TPX/ns3:Speed\", default=None, namespaces=ns)\n", 718 | "\n", 719 | " try: lat = float(lat)\n", 720 | " except: lat = None\n", 721 | " try: lon = float(lon)\n", 722 | " except: lon = None\n", 723 | " try: alt = float(alt)\n", 724 | " except: alt = None\n", 725 | " try: dist = float(dist)\n", 726 | " except: dist = None\n", 727 | " try: hr = float(hr)\n", 728 | " except: hr = None\n", 729 | " try: speed = float(speed)\n", 730 | " except: speed = None\n", 731 | "\n", 732 | " point = {\n", 733 | " \"measurement\": \"ActivityGPS\",\n", 734 | " \"time\": time_obj.isoformat(), \n", 735 | " \"tags\": {\n", 736 | " \"Device\": GARMIN_DEVICENAME,\n", 737 | " \"ActivityID\": activityID,\n", 738 | " \"ActivitySelector\": activity_start_time.strftime('%Y%m%dT%H%M%SUTC-') + activity_type\n", 739 | " },\n", 740 | " \"fields\": {\n", 741 | " \"ActivityName\": activity_type,\n", 742 | " \"ActivityID\": activityID,\n", 743 | " \"Latitude\": lat,\n", 744 | " \"Longitude\": lon,\n", 745 | " \"Altitude\": alt,\n", 746 | " \"Distance\": dist,\n", 747 | " \"HeartRate\": hr,\n", 748 | " \"Speed\": speed,\n", 749 | " \"lap\": lap_index\n", 750 | " }\n", 751 | " }\n", 752 | " points_list.append(point)\n", 753 | " \n", 754 | " lap_index += 1\n", 755 | " logging.info(f\"Success : Fetching TCX details for activity with id {activityID}\")\n", 756 | " return points_list" 757 | ] 758 | }, 759 | { 760 | "cell_type": "code", 761 | "execution_count": 22, 762 | "metadata": {}, 763 | "outputs": [], 764 | "source": [ 765 | "def daily_fetch_write(date_str):\n", 766 | " write_points_to_influxdb(get_daily_stats(date_str))\n", 767 | " write_points_to_influxdb(get_sleep_data(date_str))\n", 768 | " write_points_to_influxdb(get_intraday_steps(date_str))\n", 769 | " write_points_to_influxdb(get_intraday_hr(date_str))\n", 770 | " write_points_to_influxdb(get_intraday_stress(date_str))\n", 771 | " write_points_to_influxdb(get_intraday_br(date_str))\n", 772 | " write_points_to_influxdb(get_intraday_hrv(date_str))\n", 773 | " write_points_to_influxdb(get_body_composition(date_str))\n", 774 | " activity_summary_points_list, activity_with_gps_id_dict = get_activity_summary(date_str)\n", 775 | " write_points_to_influxdb(activity_summary_points_list)\n", 776 | " write_points_to_influxdb(fetch_activity_GPS(activity_with_gps_id_dict))\n", 777 | " " 778 | ] 779 | }, 780 | { 781 | "cell_type": "code", 782 | "execution_count": null, 783 | "metadata": {}, 784 | "outputs": [], 785 | "source": [ 786 | "def fetch_write_bulk(start_date_str, end_date_str):\n", 787 | " logging.info(\"Fetching data for the given period in reverse chronological order\")\n", 788 | " time.sleep(3)\n", 789 | " write_points_to_influxdb(get_last_sync())\n", 790 | " for current_date in iter_days(start_date_str, end_date_str):\n", 791 | " success = False\n", 792 | " while not success:\n", 793 | " try:\n", 794 | " daily_fetch_write(current_date)\n", 795 | " logging.info(f\"Success : Fatched all available health matries for date {current_date} (skipped any if unavailable)\")\n", 796 | " logging.info(f\"Waiting : for {RATE_LIMIT_CALLS_SECONDS} seconds\")\n", 797 | " time.sleep(RATE_LIMIT_CALLS_SECONDS)\n", 798 | " success = True\n", 799 | " except (\n", 800 | " GarminConnectConnectionError,\n", 801 | " GarminConnectAuthenticationError,\n", 802 | " GarminConnectTooManyRequestsError,\n", 803 | " requests.exceptions.HTTPError,\n", 804 | " requests.exceptions.ConnectionError,\n", 805 | " requests.exceptions.Timeout,\n", 806 | " GarthHTTPError) as err:\n", 807 | " logging.info(f\"Failed : Failed to fetch one or more matrices for date {current_date}\")\n", 808 | " logging.error(err)\n", 809 | " logging.info(f\"Waiting : for {FETCH_FAILED_WAIT_SECONDS} seconds\")\n", 810 | " time.sleep(FETCH_FAILED_WAIT_SECONDS)" 811 | ] 812 | }, 813 | { 814 | "cell_type": "code", 815 | "execution_count": null, 816 | "metadata": {}, 817 | "outputs": [], 818 | "source": [ 819 | "garmin_obj = garmin_login()" 820 | ] 821 | }, 822 | { 823 | "cell_type": "code", 824 | "execution_count": null, 825 | "metadata": {}, 826 | "outputs": [], 827 | "source": [ 828 | "if MANUAL_START_DATE:\n", 829 | " fetch_write_bulk(MANUAL_START_DATE, MANUAL_END_DATE)\n", 830 | " logging.info(f\"Bulk update success : Fetched all available health matries for date range {MANUAL_START_DATE} to {MANUAL_END_DATE}\")\n", 831 | " exit(0)\n", 832 | "else:\n", 833 | " try:\n", 834 | " last_influxdb_sync_time_UTC = pytz.utc.localize(datetime.strptime(list(influxdbclient.query(f\"SELECT * FROM HeartRateIntraday ORDER BY time DESC LIMIT 1\").get_points())[0]['time'],\"%Y-%m-%dT%H:%M:%SZ\"))\n", 835 | " except:\n", 836 | " logging.warning(\"No previously synced data found in local InfluxDB database, defaulting to 7 day initial fetching. Use specific start date ENV variable to bulk update past data\")\n", 837 | " last_influxdb_sync_time_UTC = (datetime.today() - timedelta(days=7)).astimezone(pytz.timezone(\"UTC\"))\n", 838 | " \n", 839 | " while True:\n", 840 | " last_watch_sync_time_UTC = datetime.fromtimestamp(int(garmin_obj.get_device_last_used().get('lastUsedDeviceUploadTime')/1000)).astimezone(pytz.timezone(\"UTC\"))\n", 841 | " if last_influxdb_sync_time_UTC < last_watch_sync_time_UTC:\n", 842 | " logging.info(f\"Update found : Current watch sync time is {last_watch_sync_time_UTC} UTC\")\n", 843 | " fetch_write_bulk(last_influxdb_sync_time_UTC.strftime('%Y-%m-%d'), last_watch_sync_time_UTC.strftime('%Y-%m-%d'))\n", 844 | " last_influxdb_sync_time_UTC = last_watch_sync_time_UTC\n", 845 | " else:\n", 846 | " logging.info(f\"No new data found : Current watch and influxdb sync time is {last_watch_sync_time_UTC} UTC\")\n", 847 | " logging.info(f\"waiting for {UPDATE_INTERVAL_SECONDS} seconds before next automatic update calls\")\n", 848 | " time.sleep(UPDATE_INTERVAL_SECONDS)\n", 849 | " \n", 850 | "\n" 851 | ] 852 | } 853 | ], 854 | "metadata": { 855 | "kernelspec": { 856 | "display_name": "iupred-venv", 857 | "language": "python", 858 | "name": "python3" 859 | }, 860 | "language_info": { 861 | "codemirror_mode": { 862 | "name": "ipython", 863 | "version": 3 864 | }, 865 | "file_extension": ".py", 866 | "mimetype": "text/x-python", 867 | "name": "python", 868 | "nbconvert_exporter": "python", 869 | "pygments_lexer": "ipython3", 870 | "version": "3.10.12" 871 | } 872 | }, 873 | "nbformat": 4, 874 | "nbformat_minor": 2 875 | } 876 | -------------------------------------------------------------------------------- /Grafana_Dashboard/Garmin-Grafana-Dashboard-Preview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arpanghosh8453/garmin-grafana/5d2f207d481332a272c168d586693478a7b1146a/Grafana_Dashboard/Garmin-Grafana-Dashboard-Preview.png -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2025, Arpan Ghosh 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, this 9 | list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright notice, 12 | this list of conditions and the following disclaimer in the documentation 13 | and/or other materials provided with the distribution. 14 | 15 | 3. Neither the name of the copyright holder nor the names of its 16 | contributors may be used to endorse or promote products derived from 17 | this software without specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 |

4 | 5 |

6 | 7 | # Garmin Grafana 8 | 9 | A docker container to fetch data from Garmin servers and store the data in a local influxdb database for appealing visualization with Garfana. 10 | 11 | If you are a **Fitbit user**, please check out the [sister project](https://github.com/arpanghosh8453/fitbit-grafana) made for Fitbit 12 | 13 | ## Dashboard Example 14 | 15 | ![Dashboard](https://github.com/arpanghosh8453/garmin-grafana/blob/main/Grafana_Dashboard/Garmin-Grafana-Dashboard-Preview.png?raw=true) 16 | 17 | ## Features 18 | 19 | - Automatic data collection from Garmin 20 | - Collects comprehensive health metrics including: 21 | - Heart Rate Data 22 | - Hourly steps Heatmap 23 | - Daily Step Count 24 | - Sleep Data and patterns (SpO2, Breathing rate, Sleep movements, HRV) 25 | - Sleep regularity heatmap (Visualize sleep routine) 26 | - Stress Data 27 | - Body Battery data 28 | - Calories 29 | - Sleep Score 30 | - Activity Minutes and HR zones 31 | - Activity Timeline (workouts) 32 | - GPS data from workouts (track, pace, altitude, HR) 33 | - And more... 34 | - Automated data fetching in regular interval (set and forget) 35 | - Historical data backfilling 36 | 37 | ## Install with Docker (Recommended) 38 | 39 | 0. Install docker if you don't have it already. Docker is supported in all major platforms/OS. Please check the [docker installation guide](https://docs.docker.com/engine/install/). 40 | 41 | 1. Create a folder named `garmin-fetch-data`, cd into the folder. Then create a folder named `garminconnect-tokens` inside the current folder (`garmin-fetch-data`) with the command `mkdir garminconnect-tokens`. Run `chown -R 1000:1000 garminconnect-tokens` to change the ownership of the garminconnect-tokens folder (so the `garmin-fetch-data` container's internal user can use it to store the Authentication tokens) 42 | 43 | 2. Create a `compose.yml` file inside the current `garmin-fetch-data` folder with the content of the given [compose-example.yml](./compose-example.yml) ( Change the environment variables accordingly ) 44 | 45 | 3. You can use two additional environment variables `GARMINCONNECT_EMAIL` and `GARMINCONNECT_BASE64_PASSWORD` to add the login information directly. otherwise you will need to enter them in the initial setup phase when prompted. Please note that the password must be encoded with [Base64](http://base64encode.org/) when using the `GARMINCONNECT_BASE64_PASSWORD` ENV variable. This is to ensure your Garmin Connect password is not in plaintext in the compose file. The script will decode it and use it when required. If you set these two ENV variables and do not have two factor authentication (via SMS or email), you can directly jump to `step 5`. 46 | 47 | **Note:** If you are planning to use Influxdb V3, you need to enter the admin access token in `INFLUXDB_V3_ACCESS_TOKEN`. To generate the admin token you should run `docker exec influxdb influxdb3 create token --admin` command. This will give you the admin token which you must update to `INFLUXDB_V3_ACCESS_TOKEN` ENV variable. You can do this only once and the token can't be viewed or retrieved ever again (influxdb only stores a hash of it in the database for comparison). So please store this token carefully. 48 | 49 | 4. If you did not set up the email and password ENV variables or have 2FA enabled, you must run the following command first to get the Email, password and 2FA code prompt interactively: `docker pull thisisarpanghosh/garmin-fetch-data:latest && docker compose run --rm garmin-fetch-data`. Enter the Email, Password (the characters will be visible when you type to avoid confusion, so find some privacy. If you paste the password, make sure there is no trailing space or unwanted characters), and 2FA code (if you have that enabled). Once you see the successful authentication message followed by successful data fetching in the stdout log, exit out with `ctrl + c`. This will automatically remove this orphan container as this was started with the `--rm` flag. You need to login like this **only once**. The script will [save the session Authentication tokens](https://github.com/cyberjunky/python-garminconnect/issues/213#issuecomment-2213292471) in the container's internal `/home/appuser/.garminconnect` folder for future use. That token can be used for all the future requests as long as it's valid (expected session token lifetime is about [one year](https://github.com/cyberjunky/python-garminconnect/issues/213), as Garmin seems to use long term valid access tokens instead of short term valid {access token + refresh token} pairs). This helps in reusing the authentication without logging in every time when the container starts, as that leads to `429 Client Error`, when login is attempted repeatedly from the same IP address. If you run into `429 Client Error` during your first login attempt with this script, please refer to the troubleshooting section below. 50 | 51 | 5. Finally run : `docker compose up -d` ( to launch the full stack in detached mode ). Thereafter you should check the logs with `docker compose logs --follow` to see any potential error from the containers. This will help you debug the issue, if there is any (specially read/write permission issues). if you are using docker volumes, there are little chance of this happending as file permissions will be managed by docker. For bind mounts, if you are having permission issues, please check the troubleshooting section. 52 | 53 | 7. Now you can check out the `http://localhost:3000` to reach Grafana (by default), do the initial setup with the default username `admin` and password `admin`, add influxdb as the data source. Please note the influxdb hostname is set as `influxdb` with port `8086` so you should use `http://influxdb:8086` for the address during data source setup and not `http://localhost:8086` because influxdb is a running as a seperate container but part of the same docker network and stack. Here the database name should be `GarminStats` matching the influxdb DB name from the docker compose. The query language used for the dashboard is `influxql` which is supported by both InfluxDB 1.x and 3.x, so please select that from the language dropdown during setup. Use the same username and password you used for your influxdb container (check your docker compose config for influxdb container, here we used `influxdb_user` and `influxdb_secret_password` in default configuration) Test the connection to make sure the influxdb is up and reachable (you are good to go if it finds the measurements when you test the connection) 54 | 55 | 8. To use the Grafana dashboard, please use the [JSON file](https://github.com/arpanghosh8453/garmin-grafana/blob/main/Grafana_Dashboard/Garmin-Grafana-Dashboard.json) downloaded directly from GitHub or use the import code **23245** to pull them directly from the Grafana dashboard cloud. 56 | 57 | 9. In the Grafana dashboard, the heatmap panels require an additional plugin you must install. This can be done by using the `GF_PLUGINS_PREINSTALL=marcusolsson-hourly-heatmap-panel` environment variable like in the [compose-example.yml](./compose-example.yml) file, or after the creation of the container very easily with docker commands. Just run `docker exec -it grafana grafana cli plugins install marcusolsson-hourly-heatmap-panel` and then run `docker restart grafana` to apply that plugin update. Now, you should be able to see the Heatmap panels on the dashboard loading successfully. 58 | 59 | 10. If you are in mainland China and use Garmin-cn account you need to set `GARMINCONNECT_IS_CN=True` 60 | 61 | If you have come this far, everything should be working. If not, please check the **troubleshooting section** for known issues. If it is already working, CONGRATULATIONS! Enjoy your dashboard and keep exercising!. If you like the dashboard and my sincere effort behind it, please **star this repository**. If you enjoy it a lot and want to show your appreciation and share the joy with me, feel free to [buy me a coffee](https://ko-fi.com/A0A84F3DP). Maintaining this project takes a lot of my free time and your support keeps me motivated to develop more features for the community and spend more time on similar projects. if you are having any trouble, feel free to open an issue here, I will try my best to help you! 62 | 63 | --- 64 | 65 | This project is made for InfluxDB 1.11, as Flux queries on influxDB 2.x can be problematic to use with Grafana at times. In fact, InfluxQL is being reintroduced in InfluxDB 3.0, reflecting user feedback. Grafana also has better compatibility/stability with InfluxQL from InfluxDB 1.11. Moreover, there are statistical evidence that Influxdb 1.11 queries run faster compared to influxdb 2.x. Since InfluxDB 2.x offers no clear benefits for this project, there are no plans for a migration. 66 | 67 | Support of current [Influxdb 3](https://docs.influxdata.com/influxdb3/core/) OSS is also available with this project [ `Exprimental` ] 68 | 69 | > [!IMPORTANT] 70 | > Please note that InfluxDB 3.x OSS limits the query time limit to 72 hours. This can be extended more by setting `INFLUXDB3_QUERY_FILE_LIMIT` to a very high value with a potential risk of crashing the container (OOM Error). As we are interested in visualization long term data trends, this limit defeats the purpose. Hence, we strongly recommend using InfluxDB 1.11.x (default settings) to our users as long as it's not discontinued from production. 71 | 72 | Example `compose.yml` file contents is given here for a quick start. 73 | 74 | ```yaml 75 | services: 76 | garmin-fetch-data: 77 | restart: unless-stopped 78 | image: thisisarpanghosh/garmin-fetch-data:latest 79 | container_name: garmin-fetch-data 80 | depends_on: 81 | - influxdb 82 | volumes: 83 | - ./garminconnect-tokens:/home/appuser/.garminconnect # (persistent tokens storage - garminconnect-tokens folder must be owned by 1000:1000) 84 | environment: 85 | - INFLUXDB_HOST=influxdb 86 | - INFLUXDB_PORT=8086 # Influxdb V3 maps to 8181 instead of 8086 of V1 87 | - INFLUXDB_USERNAME=influxdb_user # user should have read/write access to INFLUXDB_DATABASE (Required for influxdb 1.x, ignore for influxdb 3.x - set the 3.x specific variables) 88 | - INFLUXDB_PASSWORD=influxdb_secret_password # (Required for influxdb 1.x, ignore for influxdb 3.x - set the 3.x specific variables) 89 | - INFLUXDB_DATABASE=GarminStats 90 | - GARMINCONNECT_EMAIL=your_garminconnect_email # optional, read the setup docs 91 | - GARMINCONNECT_BASE64_PASSWORD=your_base64_encoded_garminconnect_password # optional, must be Base64 encoded, read setup docs 92 | - GARMINCONNECT_IS_CN=False # Set this to True if you are in mainland China or use Garmin-cn (Default False) 93 | ##################################################################################### 94 | # The following ENV variables are required only if you are using influxdb V3 (You won't have to set the above ) 95 | ##################################################################################### 96 | # - INFLUXDB_VERSION=1 # Required for influxdb V3, Default is 1, must be overridden with 3 if using Influxdb V3 97 | # - INFLUXDB_V3_ACCESS_TOKEN=your_influxdb_admin_access_token # Required for influxdb V3 (ignored for V1), Set this to your admin access token (or a token that has database R/W access) - You can generate this by following step 3 notes in the README instructions 98 | ##################################################################################### 99 | # The following ENV variables will override some default settings. 100 | # Please read the README guide before using them as they may change how the script behaves 101 | ##################################################################################### 102 | # - LOG_LEVEL=INFO # change to DEBUG to get DEBUG logs 103 | # - UPDATE_INTERVAL_SECONDS=300 # Default update check interval is set to 5 minutes 104 | # - FETCH_ADVANCED_TRAINING_DATA=False # This enables fetching Training readiliness, Activity VO2Max, Race Pediction metrics etc when set to True 105 | # - KEEP_FIT_FILES=False # Stores the FIT files (downloads and saves them) when set to True - read docs for more details 106 | # - ALWAYS_PROCESS_FIT_FILES=False # Enables processing FIT files even if GPS data is not present in it when set to True, default False 107 | # - USER_TIMEZONE="" # Can hardcode user's timezone, fetches timezone automatically and dynamically on each run if set to empty (default) - Read docs 108 | # - INFLUXDB_ENDPOINT_IS_HTTP=True # Set this to False if you are using HTTPS for your influxdb connection (over the internet) 109 | # - FORCE_REPROCESS_ACTIVITIES=False # Enables re-processing of already processed FIT files on iterative updates when set to True 110 | 111 | influxdb: 112 | restart: unless-stopped 113 | container_name: influxdb 114 | hostname: influxdb 115 | environment: 116 | - INFLUXDB_DB=GarminStats 117 | - INFLUXDB_USER=influxdb_user 118 | - INFLUXDB_USER_PASSWORD=influxdb_secret_password 119 | - INFLUXDB_DATA_INDEX_VERSION=tsi1 120 | ############################################################# 121 | # The following ENV variables are applicable for InfluxDB V3 122 | ############################################################# 123 | # - INFLUXDB3_MAX_HTTP_REQUEST_SIZE=10485760 124 | # - INFLUXDB3_NODE_IDENTIFIER_PREFIX=Influxdb-node1 125 | # - INFLUXDB3_BUCKET=GarminStats 126 | # - INFLUXDB3_OBJECT_STORE=file 127 | # - INFLUXDB3_DB_DIR=/data 128 | # - INFLUXDB3_QUERY_FILE_LIMIT=5000 # this set to be a very high value if you want to view long term data 129 | ports: 130 | - '8086:8086' # Influxdb V3 should map as "8181:8181" (Change INFLUXDB_PORT on garmin-fetch-data appropriately for InfluxDB V3) 131 | volumes: 132 | - influxdb_data:/var/lib/influxdb # InfluxDB V3 bind mount should be set like - influxdb_data:/data if you set INFLUXDB3_DB_DIR=/data (instead of /var/lib/influxdb) 133 | image: 'influxdb:1.11' # You must change this to 'quay.io/influxdb/influxdb3-core:latest' for influxdb V3 134 | 135 | grafana: 136 | restart: unless-stopped 137 | container_name: grafana 138 | hostname: grafana 139 | environment: 140 | - GF_SECURITY_ADMIN_USER=admin 141 | - GF_SECURITY_ADMIN_PASSWORD=admin 142 | - GF_PLUGINS_PREINSTALL=marcusolsson-hourly-heatmap-panel 143 | volumes: 144 | - grafana_data:/var/lib/grafana 145 | ports: 146 | - '3000:3000' 147 | image: 'grafana/grafana:latest' 148 | 149 | volumes: 150 | influxdb_data: 151 | grafana_data: 152 | 153 | ``` 154 | ### Additional configuration and environment variables 155 | 156 | ✅ The Above compose file creates an open read/write access influxdb database with no authentication. Unless you expose this database to the open internet directly, this poses no threat. If you share your local network, you may enable authentication and grant appropriate read/write access to the influxdb_user on the GarminStats database manually if you want with `INFLUXDB_ADMIN_ENABLED`, `INFLUXDB_ADMIN_USER`, and `INFLUXDB_ADMIN_PASSWORD` ENV variables during the setup by following the [influxdb guide](https://github.com/docker-library/docs/blob/master/influxdb/README.md) but this won't be covered here for the sake of simplicity. 157 | 158 | ✅ You can also enable additional advanced training data fetching with `FETCH_ADVANCED_TRAINING_DATA=True` flag in the compose file. This will fetch and store data such as training readiness, hill score, VO2 max, and Race prediction if you have them available on Garmin connect. The implementations of this should work fine in theory but not throughly tested. This is currently an experimental feature. There is no panel showing these data on the provided grafana dashboard. You must create your own to visualize these on Grafana. 159 | 160 | ✅ By default, the pulled FIT files are not stored as files to save storage space during import (an in-memory IO buffer is used instead). If you want to keep the FIT files downloaded during the import for future use in `Strava` or any other application where FIT files are supported for import, you can turn on `KEEP_FIT_FILES=True` under `garmin-fetch-data` environment variables in the compose file. To access the files from the host machine, you should create a folder named `fit_filestore` with `mkdir fit_filestore` inside the `garmin-fetch-data` folder (where your compose file is currently located) and chnage the ownership with `chown 1000:1000 fit_filestore`, and then must setup a volume bind mount like this `./fit_filestore:/home/appuser/fit_filestore` under the volumes section of `garmin-fetch-data`. This would map the container's internal `/home/appuser/fit_filestore` folder to the `fit_filestore` folder you created. You will see the FIT files for your activities appear inside this `fit_filestore` folder once the script starts running. 161 | 162 | ✅ By default indoor activities FIT files lacking GPS data are not processed (Activity summaries are processed for all activities, just not the detailed intra-activity HR, Pace etc. which are included only inside the FIT files and require additional processing power) to save resources and processing time per fetched activity. If you want to process all activities regardless of GPS data availabliliy associated with the activity, you can set `ALWAYS_PROCESS_FIT_FILES=True` in the environment variables section of the `garmin-fetch-data` container as that will ensure all FIT files are processed irrespective of GPS data availability with the activities. 163 | 164 | ✅ If you are having missing data on previous days till midnight (which are available on Garmin Connect but missing on dashboard) or sync issues when using the automatic periodic fetching, consider updating the container to recent version and use `USER_TIMEZONE` environment variable under the `garmin-fetch-data` service. This variable is optional and the script tries to determine the timezone and fetch the UTC offset automatically if this variable is set as empty. If you see the automatic identification is not working for you, this variable can be used to override that behaviour and ensures the script is using the hardcoded timezone for all data fetching related activities. The previous gaps won't be filled (you need to fetch them using historic bulk update method), but moving forward, the script will keep everything in sync. 165 | 166 | ✅ Want this dashboard in **Imperial units** instead of **metric units**? I can't maintain two seperate dashboards at the same time but here is an [excellent step-by-step guide](https://github.com/arpanghosh8453/garmin-grafana/issues/27#issuecomment-2817081738) on how you can do it yourself on your dashboard! 167 | 168 | ## Historical data fetching (bulk update) 169 | 170 | Please note that this process is intentionally rate limited with a 5 second wait period between each day update to ensure the Garmin servers are not overloaded with requests when using bulk update. You can update the value with `RATE_LIMIT_CALLS_SECONDS` ENV variable in the `garmin-fetch-data` container, but lowering it is not recommended, 171 | 172 | #### Procedure 173 | 174 | 1. Please run the above docker based installation steps `1` to `4` first (to set up the Garmin Connect login session tokens if not done already). 175 | 176 | 2. Stop the running container and remove it with `docker compose down` if running already 177 | 178 | 3. Run command `docker compose run --rm -e MANUAL_START_DATE=YYYY-MM-DD -e MANUAL_END_DATE=YYYY-MM-DD garmin-fetch-data` to update the data between the two dates. You need to replace the `YYYY-MM-DD` with the actual dates in that format, for example `docker compose run --rm -e MANUAL_START_DATE=2025-04-12 -e MANUAL_END_DATE=2025-04-14 garmin-fetch-data`. The `MANUAL_END_DATE` variable is optional, if not provided, the script assumes it to be the current date. `MANUAL_END_DATE` must be in future to the `MANUAL_START_DATE` variable passed, and in case they are same, data is still pulled for that specific date. 179 | 180 | 4. Please note that the bulk data fetching is done in **reverse chronological order**. So you will have recent data first and it will keep going back until it hits `MANUAL_START_DATE`. You can have this running in background. If this terminates after some time unexpectedly, you can check back the last successful update date from the container stdout logs and use that as the `MANUAL_END_DATE` when running bulk update again as it's done in reverse chronological order. 181 | 182 | 4. After successful bulk fetching, you will see a `Bulk update success` message and the container will exit and remove itself automatically. 183 | 184 | 5. Now you can run the regular periodic update with `docker compose up -d` 185 | 186 | ## Update to new versions 187 | 188 | Updating with docker is super simple. Just go to the folder where the `compose.yml` is and run `docker compose pull` and then `docker compose down && docker compose up -d`. Please verify if everything is running correctly by checking the logs with `docker compose logs --follow` 189 | 190 | ## Backup Database 191 | 192 | Whether you are using a bind mount or a docker volume, creating a restorable archival backup of your valuable health data is always advised. Assuming you named your database as `GarminStats` and influxdb container name is `influxdb`, you can use the following script to create a static archival backup of your data present in the influxdb database at that time point. This restore points can be used to re-create the influxdb database with the archived data without requesting them from Garmin's servers again, which is not only time consuming but also resource intensive. 193 | 194 | ```bash 195 | #!/bin/bash 196 | TIMESTAMP=$(date +%F_%H-%M) 197 | BACKUP_DIR="./influxdb_backups/$TIMESTAMP" 198 | mkdir -p "$BACKUP_DIR" 199 | docker exec influxdb influxd backup -portable -db GarminStats /tmp/influxdb_backup 200 | docker cp influxdb:/tmp/influxdb_backup "$BACKUP_DIR" 201 | docker exec influxdb rm -r /tmp/influxdb_backup" 202 | ``` 203 | 204 | The above bash script would create a folder named `influxdb_backups` inside your current working directory and create a subfolder under it with current date-time. Then it will create the backup for `GarminStats` database and copy the backup files to that location. 205 | 206 | For restoring the data from a backup, you first need to make the files available inside the new influxdb docker container. You can use `docker cp` or volume bind mount for this. Once the backup data is available to the container internally, you can simply run `docker exec influxdb influxd restore -portable -db GarminStats /path/to/internal-backup-directory` to restore the backup. 207 | 208 | Please read detailed guide on this from the [influxDB documentation for backup and restore](https://docs.influxdata.com/influxdb/v1/administration/backup_and_restore/) 209 | 210 | 211 | ## Troubleshooting 212 | 213 | - The issued session token is apparently [valid only for 1 year](https://github.com/cyberjunky/python-garminconnect/issues/213) or less. Therefore, the automatic fetch will fail after the token expires. If you are using it more than one year, you may need to stop, remove and redeploy the container (follow the same instructions for initial setup, you will be asked for the username and password + 2FA code again). if you are not using MFA/2FA (SMS or email one time code), you can use the `GARMINCONNECT_EMAIL` and `GARMINCONNECT_BASE64_PASSWORD` (remember, this is [base64 encoded](http://base64encode.org/) password, not plaintext) ENV variables in the compose file to give this info directly, so the script will be able to re-generate the tokens once they expire. Unfortunately, if you are using MFA/2FA, you need to enter the one time code manually after rebuilding the container every year when the tokens expire to keep the script running (Once the session token is valid again, the script will automatically back-fill the data you missed) 214 | 215 | - If you are getting `429 Client Error` after a few login tries during the initial setup, this is an indication that you are being rate limited based on your public IP. Garmin has a set limit for repeated login attempts from the same IP address to protect your account. You can wait for a few hours or a day, or switch to a different wifi network outside your home (will give you a new public IP) or just simply use mobile hotspot (will give you a new public IP as well) for the initial login attempt. This should work in theory as [discussed here](https://github.com/matin/garth/discussions/60). 216 | 217 | - Running into `401 Client Error` when trying to login for the first time? make sure you are using the correct username and password for your account. If you enter it at runtime, it should be in plaintext but if you add it with environment variables in the docker compose stack, it must be [Base64 encoded](https://www.base64encode.org/). if you are 100% sure you are using the right credentials, and still get this error, it's probably due to the fact that you are connected to a VPN network which is preventing the log in request (see issue [#20](https://github.com/arpanghosh8453/garmin-grafana/issues/20)). If you are not using a VPN, then please try running the container with mobile hotspot network or with a VPN exit tunnel (both gives you a different public IP) - you need to try this from a different network somehow. 218 | 219 | - If you want to bind mount the docker volumes for the `garmin-fetch-data` container, please keep in mind that the script runs with the internal user `appuser` with uid and gid set as 1000. So please chown the bind mount folder accordingly as stated in the above instructions. Also, `grafana` container requires the bind mount folders to be owned by `472:472` and `influxdb:1.11` container requires the bind mount folders to be owned by `1500:1500`. If none of this solves the `Permission Denied` issue for you, you can change the bind mount folder permission as `777` with `chmod -R 777 garminconnect-tokens`. Another solutiuon could be to add `user: root` in the container configuration to run it as root instead of default `appuser` (this option has security considerations) 220 | 221 | - If the Activities details (GPS, Pace, HR, Altitude) are not appearing on the dashboard, make sure to select an Activity listed on the top left conner of the Dashboard (In the `Activity with GPS` variable dropdown). If you see no values are available there, but in the log you see the activities are being pulled successfully, then it's due to a Grafana Bug. Go to the dashboard variable settings, and please ensure the correct datasource is selected for the variable and the query is set to `SHOW TAG VALUES FROM "ActivityGPS" WITH KEY = "ActivitySelector" WHERE $timeFilter`. Once you set this properly after the dashboard import, the values should show up correctly in the dropdown and you will be able to select specific Activity and view it's stats on the dashboard. 222 | 223 | ## Credits 224 | 225 | This project is made possible by **generous community contribution** towards the [gofundme](https://gofund.me/0d53b8d1) advertised in [this post](https://www.reddit.com/r/Garmin/comments/1jucwhu/update_free_and_open_source_garmin_grafana/) on Reddit's [r/garmin](https://www.reddit.com/r/Garmin) community. I wanted to build this tool for a long time, but funds were never sufficient for me to get a Garmin, because they are pretty expensive. With the community donations, I was able to buy a `Garmin Vivoactive 6` and built this tool open to everyone. if you are using this tool and enjoy it, please remember what made this possible! Huge shoutout to the [r/garmin](https://www.reddit.com/r/Garmin) community for being generous, trusting me and actively supporting my idea! 226 | 227 | ## Dependencies 228 | 229 | - [python-garminconnect](https://github.com/cyberjunky/python-garminconnect) by [cyberjunky](https://github.com/cyberjunky) : Garmin Web API wrapper 230 | 231 | - [garth](https://github.com/matin/garth) by [martin](https://github.com/matin) : Used for Garmin SSO Authentication 232 | 233 | ## Love this project? 234 | 235 | I'm thrilled that you're using this dashboard. Your interest and engagement mean a lot to me! You can view and analyze more detailed health statistics with this setup than paying a connect+ subscription fee to Garmin. 236 | 237 | Maintaining and improving this project takes a significant amount of my free time. Your support helps keep me motivated to add new features and work on similar projects that benefit the community. 238 | 239 | If you find this project helpful, please consider: 240 | 241 | ⭐ Starring this repository to show your support and spread the news! 242 | 243 | ☕ [Buying me a coffee](https://ko-fi.com/A0A84F3DP) if you'd like to contribute to its maintenance and future development. 244 | 245 | [![ko-fi](https://ko-fi.com/img/githubbutton_sm.svg)](https://ko-fi.com/A0A84F3DP) 246 | Buy me a coffee 247 | 248 | 249 | ## Need Help? 250 | 251 | If you're experiencing any issues with running this project or have questions, feel free to [open an issue](https://github.com/arpanghosh8453/garmin-grafana/issues/new/choose) on this repository. I'll do my best to assist you. 252 | 253 | ## Star History 254 | 255 | [![Star History Chart](https://api.star-history.com/svg?repos=arpanghosh8453/garmin-grafana&type=Date)](https://www.star-history.com/#arpanghosh8453/garmin-grafana&Date) 256 | -------------------------------------------------------------------------------- /compose-example.yml: -------------------------------------------------------------------------------- 1 | services: 2 | garmin-fetch-data: 3 | restart: unless-stopped 4 | image: thisisarpanghosh/garmin-fetch-data:latest 5 | container_name: garmin-fetch-data 6 | depends_on: 7 | - influxdb 8 | volumes: 9 | - ./garminconnect-tokens:/home/appuser/.garminconnect # (persistent tokens storage - garminconnect-tokens folder must be owned by 1000:1000) 10 | environment: 11 | - INFLUXDB_HOST=influxdb 12 | - INFLUXDB_PORT=8086 # Influxdb V3 maps to 8181 instead of 8086 of V1 13 | - INFLUXDB_USERNAME=influxdb_user # user should have read/write access to INFLUXDB_DATABASE (Required for influxdb 1.x, ignore for influxdb 3.x - set the 3.x specific variables) 14 | - INFLUXDB_PASSWORD=influxdb_secret_password # (Required for influxdb 1.x, ignore for influxdb 3.x - set the 3.x specific variables) 15 | - INFLUXDB_DATABASE=GarminStats 16 | - GARMINCONNECT_EMAIL=your_garminconnect_email # optional, read the setup docs 17 | - GARMINCONNECT_BASE64_PASSWORD=your_base64_encoded_garminconnect_password # optional, must be Base64 encoded, read setup docs 18 | - GARMINCONNECT_IS_CN=False # Set this to True if you are in mainland China or use Garmin-cn (Default False) 19 | ##################################################################################### 20 | # The following ENV variables are required only if you are using influxdb V3 (You won't have to set the above ) 21 | ##################################################################################### 22 | # - INFLUXDB_VERSION=1 # Required for influxdb V3, Default is 1, must be overridden with 3 if using Influxdb V3 23 | # - INFLUXDB_V3_ACCESS_TOKEN=your_influxdb_admin_access_token # Required for influxdb V3 (ignored for V1), Set this to your admin access token (or a token that has database R/W access) - You can generate this by following step 3 notes in the README installation 24 | ##################################################################################### 25 | # The following ENV variables will override some default settings. 26 | # Please read the README guide before using them as they may change how the script behaves 27 | ##################################################################################### 28 | # - LOG_LEVEL=INFO # change to DEBUG to get DEBUG logs 29 | # - UPDATE_INTERVAL_SECONDS=300 # Default update check interval is set to 5 minutes 30 | # - FETCH_ADVANCED_TRAINING_DATA=False # This enables fetching Training readiliness, Activity VO2Max, Race Pediction metrics etc when set to True 31 | # - KEEP_FIT_FILES=False # Stores the FIT files (downloads and saves them) when set to True - read docs for more details 32 | # - ALWAYS_PROCESS_FIT_FILES=False # Enables processing FIT files even if GPS data is not present in it when set to True, default False 33 | # - USER_TIMEZONE="" # Can hardcode user's timezone, fetches timezone automatically and dynamically on each run if set to empty (default) - Read docs 34 | # - INFLUXDB_ENDPOINT_IS_HTTP=True # Set this to False if you are using HTTPS for your influxdb connection (over the internet) 35 | # - FORCE_REPROCESS_ACTIVITIES=False # Enables re-processing of already processed FIT files on iterative updates when set to True 36 | 37 | influxdb: 38 | restart: unless-stopped 39 | container_name: influxdb 40 | hostname: influxdb 41 | environment: 42 | - INFLUXDB_DB=GarminStats 43 | - INFLUXDB_USER=influxdb_user 44 | - INFLUXDB_USER_PASSWORD=influxdb_secret_password 45 | - INFLUXDB_DATA_INDEX_VERSION=tsi1 46 | ############################################################# 47 | # The following ENV variables are applicable for InfluxDB V3 48 | ############################################################# 49 | # - INFLUXDB3_MAX_HTTP_REQUEST_SIZE=10485760 50 | # - INFLUXDB3_NODE_IDENTIFIER_PREFIX=Influxdb-node1 51 | # - INFLUXDB3_BUCKET=GarminStats 52 | # - INFLUXDB3_OBJECT_STORE=file 53 | # - INFLUXDB3_DB_DIR=/data 54 | # - INFLUXDB3_QUERY_FILE_LIMIT=5000 # this set to be a very high value if you want to view long term data 55 | ports: 56 | - '8086:8086' # Influxdb V3 should map as "8181:8181" (Change INFLUXDB_PORT on garmin-fetch-data appropriately for InfluxDB V3) 57 | volumes: 58 | - influxdb_data:/var/lib/influxdb # InfluxDB V3 bind mount should be set like - influxdb_data:/data if you set INFLUXDB3_DB_DIR=/data (instead of /var/lib/influxdb) 59 | image: 'influxdb:1.11' # You must change this to 'quay.io/influxdb/influxdb3-core:latest' for influxdb V3 60 | 61 | grafana: 62 | restart: unless-stopped 63 | container_name: grafana 64 | hostname: grafana 65 | environment: 66 | - GF_SECURITY_ADMIN_USER=admin 67 | - GF_SECURITY_ADMIN_PASSWORD=admin 68 | - GF_PLUGINS_PREINSTALL=marcusolsson-hourly-heatmap-panel 69 | volumes: 70 | - grafana_data:/var/lib/grafana 71 | ports: 72 | - '3000:3000' 73 | image: 'grafana/grafana:latest' 74 | 75 | volumes: 76 | influxdb_data: 77 | grafana_data: 78 | -------------------------------------------------------------------------------- /garmin-fetch.py: -------------------------------------------------------------------------------- 1 | # %% 2 | import base64, requests, time, pytz, logging, os, sys, dotenv, io, zipfile 3 | from fitparse import FitFile, FitParseError 4 | from datetime import datetime, timedelta 5 | from influxdb import InfluxDBClient 6 | from influxdb.exceptions import InfluxDBClientError 7 | from influxdb_client_3 import InfluxDBClient3, InfluxDBError 8 | import xml.etree.ElementTree as ET 9 | from garth.exc import GarthHTTPError 10 | from garminconnect import ( 11 | Garmin, 12 | GarminConnectAuthenticationError, 13 | GarminConnectConnectionError, 14 | GarminConnectTooManyRequestsError, 15 | ) 16 | garmin_obj = None 17 | banner_text = """ 18 | 19 | ***** █▀▀ ▄▀█ █▀█ █▀▄▀█ █ █▄ █   █▀▀ █▀█ ▄▀█ █▀▀ ▄▀█ █▄ █ ▄▀█ ***** 20 | ***** █▄█ █▀█ █▀▄ █ ▀ █ █ █ ▀█   █▄█ █▀▄ █▀█ █▀  █▀█ █ ▀█ █▀█ ***** 21 | 22 | ______________________________________________________________________ 23 | 24 | By Arpan Ghosh | Please consider supporting the project if you love it 25 | ______________________________________________________________________ 26 | 27 | """ 28 | print(banner_text) 29 | 30 | env_override = dotenv.load_dotenv("override-default-vars.env", override=True) 31 | if env_override: 32 | logging.warning("System ENV variables are overriden with override-default-vars.env") 33 | 34 | # %% 35 | INFLUXDB_VERSION = os.getenv("INFLUXDB_VERSION",'1') # Your influxdb database verion (accepted values are '1' or '3') 36 | assert INFLUXDB_VERSION in ['1','3'], "Only InfluxDB version 1 or 3 is allowed - please ensure to set this value to either 1 or 3" 37 | INFLUXDB_HOST = os.getenv("INFLUXDB_HOST",'your.influxdb.hostname') # Required 38 | INFLUXDB_PORT = int(os.getenv("INFLUXDB_PORT", 8086)) # Required 39 | INFLUXDB_USERNAME = os.getenv("INFLUXDB_USERNAME", 'influxdb_username') # Required 40 | INFLUXDB_PASSWORD = os.getenv("INFLUXDB_PASSWORD", 'influxdb_access_password') # Required 41 | INFLUXDB_DATABASE = os.getenv("INFLUXDB_DATABASE", 'GarminStats') # Required 42 | INFLUXDB_V3_ACCESS_TOKEN = os.getenv("INFLUXDB_V3_ACCESS_TOKEN",'') # InfluxDB V3 Access token, required only for InfluxDB V3 43 | TOKEN_DIR = os.getenv("TOKEN_DIR", "~/.garminconnect") # optional 44 | GARMINCONNECT_EMAIL = os.environ.get("GARMINCONNECT_EMAIL", None) # optional, asks in prompt on run if not provided 45 | GARMINCONNECT_PASSWORD = base64.b64decode(os.getenv("GARMINCONNECT_BASE64_PASSWORD")).decode("utf-8") if os.getenv("GARMINCONNECT_BASE64_PASSWORD") != None else None # optional, asks in prompt on run if not provided 46 | GARMINCONNECT_IS_CN = True if os.getenv("GARMINCONNECT_IS_CN") in ['True', 'true', 'TRUE','t', 'T', 'yes', 'Yes', 'YES', '1'] else False # optional if you are using a Chinese account 47 | GARMIN_DEVICENAME = os.getenv("GARMIN_DEVICENAME", "Unknown") # optional, attepmts to set the same automatically if not given 48 | AUTO_DATE_RANGE = False if os.getenv("AUTO_DATE_RANGE") in ['False','false','FALSE','f','F','no','No','NO','0'] else True # optional 49 | MANUAL_START_DATE = os.getenv("MANUAL_START_DATE", None) # optional, in YYYY-MM-DD format, if you want to bulk update only from specific date 50 | MANUAL_END_DATE = os.getenv("MANUAL_END_DATE", datetime.today().strftime('%Y-%m-%d')) # optional, in YYYY-MM-DD format, if you want to bulk update until a specific date 51 | LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO") # optional 52 | FETCH_FAILED_WAIT_SECONDS = int(os.getenv("FETCH_FAILED_WAIT_SECONDS", 1800)) # optional 53 | RATE_LIMIT_CALLS_SECONDS = int(os.getenv("RATE_LIMIT_CALLS_SECONDS", 5)) # optional 54 | INFLUXDB_ENDPOINT_IS_HTTP = False if os.getenv("INFLUXDB_ENDPOINT_IS_HTTP") in ['False','false','FALSE','f','F','no','No','NO','0'] else True # optional 55 | GARMIN_DEVICENAME_AUTOMATIC = False if GARMIN_DEVICENAME != "Unknown" else True # optional 56 | UPDATE_INTERVAL_SECONDS = int(os.getenv("UPDATE_INTERVAL_SECONDS", 300)) # optional 57 | FETCH_ADVANCED_TRAINING_DATA = True if os.getenv("FETCH_ADVANCED_TRAINING_DATA") in ['True', 'true', 'TRUE','t', 'T', 'yes', 'Yes', 'YES', '1'] else False # optional 58 | KEEP_FIT_FILES = True if os.getenv("KEEP_FIT_FILES") in ['True', 'true', 'TRUE','t', 'T', 'yes', 'Yes', 'YES', '1'] else False # optional 59 | FIT_FILE_STORAGE_LOCATION = os.getenv("FIT_FILE_STORAGE_LOCATION", os.path.join(os.path.expanduser("~"), "fit_filestore")) 60 | ALWAYS_PROCESS_FIT_FILES = True if os.getenv("ALWAYS_PROCESS_FIT_FILES") in ['True', 'true', 'TRUE','t', 'T', 'yes', 'Yes', 'YES', '1'] else False # optional, will process all FIT files for all activities including indoor ones lacking GPS data 61 | FORCE_REPROCESS_ACTIVITIES = True if os.getenv("FORCE_REPROCESS_ACTIVITIES") in ['True', 'true', 'TRUE','t', 'T', 'yes', 'Yes', 'YES', '1'] else False # optional, will process all FIT files for all activities including indoor ones lacking GPS data 62 | USER_TIMEZONE = os.getenv("USER_TIMEZONE", "") # optional, fetches timezone info from last activity automatically if left blank 63 | PARSED_ACTIVITY_ID_LIST = [] 64 | 65 | # %% 66 | for handler in logging.root.handlers[:]: 67 | logging.root.removeHandler(handler) 68 | 69 | logging.basicConfig( 70 | level=getattr(logging, LOG_LEVEL, logging.INFO), 71 | format="%(asctime)s - %(levelname)s - %(message)s", 72 | handlers=[ 73 | logging.StreamHandler(sys.stdout) 74 | ] 75 | ) 76 | 77 | # %% 78 | try: 79 | if INFLUXDB_ENDPOINT_IS_HTTP: 80 | if INFLUXDB_VERSION == '1': 81 | influxdbclient = InfluxDBClient(host=INFLUXDB_HOST, port=INFLUXDB_PORT, username=INFLUXDB_USERNAME, password=INFLUXDB_PASSWORD) 82 | influxdbclient.switch_database(INFLUXDB_DATABASE) 83 | else: 84 | influxdbclient = InfluxDBClient3( 85 | host=f"http://{INFLUXDB_HOST}:{INFLUXDB_PORT}", 86 | token=INFLUXDB_V3_ACCESS_TOKEN, 87 | database=INFLUXDB_DATABASE 88 | ) 89 | else: 90 | if INFLUXDB_VERSION == '1': 91 | influxdbclient = InfluxDBClient(host=INFLUXDB_HOST, port=INFLUXDB_PORT, username=INFLUXDB_USERNAME, password=INFLUXDB_PASSWORD, ssl=True, verify_ssl=True) 92 | influxdbclient.switch_database(INFLUXDB_DATABASE) 93 | else: 94 | influxdbclient = InfluxDBClient3( 95 | host=f"https://{INFLUXDB_HOST}:{INFLUXDB_PORT}", 96 | token=INFLUXDB_V3_ACCESS_TOKEN, 97 | database=INFLUXDB_DATABASE 98 | ) 99 | demo_point = { 100 | 'measurement': 'DemoPoint', 101 | 'time': '1970-01-01T00:00:00+00:00', 102 | 'tags': {'DemoTag': 'DemoTagValue'}, 103 | 'fields': {'DemoField': 0} 104 | } 105 | # The following code block tests the connection by writing/overwriting a demo point. raises error and aborts if connection fails. 106 | if INFLUXDB_VERSION == '1': 107 | influxdbclient.write_points([demo_point]) 108 | else: 109 | influxdbclient.write(record=[demo_point]) 110 | except (InfluxDBClientError, InfluxDBError) as err: 111 | logging.error("Unable to connect with influxdb database! Aborted") 112 | raise InfluxDBClientError("InfluxDB connection failed:" + str(err)) 113 | 114 | # %% 115 | def iter_days(start_date: str, end_date: str): 116 | start = datetime.strptime(start_date, '%Y-%m-%d') 117 | end = datetime.strptime(end_date, '%Y-%m-%d') 118 | current = end 119 | 120 | while current >= start: 121 | yield current.strftime('%Y-%m-%d') 122 | current -= timedelta(days=1) 123 | 124 | 125 | # %% 126 | def garmin_login(): 127 | try: 128 | logging.info(f"Trying to login to Garmin Connect using token data from directory '{TOKEN_DIR}'...") 129 | garmin = Garmin() 130 | garmin.login(TOKEN_DIR) 131 | logging.info("login to Garmin Connect successful using stored session tokens.") 132 | 133 | except (FileNotFoundError, GarthHTTPError, GarminConnectAuthenticationError): 134 | logging.warning("Session is expired or login information not present/incorrect. You'll need to log in again...login with your Garmin Connect credentials to generate them.") 135 | try: 136 | user_email = GARMINCONNECT_EMAIL or input("Enter Garminconnect Login e-mail: ") 137 | user_password = GARMINCONNECT_PASSWORD or input("Enter Garminconnect password (characters will be visible): ") 138 | garmin = Garmin( 139 | email=user_email, password=user_password, is_cn=GARMINCONNECT_IS_CN, return_on_mfa=True 140 | ) 141 | result1, result2 = garmin.login() 142 | if result1 == "needs_mfa": # MFA is required 143 | mfa_code = input("MFA one-time code (via email or SMS): ") 144 | garmin.resume_login(result2, mfa_code) 145 | 146 | garmin.garth.dump(TOKEN_DIR) 147 | logging.info(f"Oauth tokens stored in '{TOKEN_DIR}' directory for future use") 148 | 149 | garmin.login(TOKEN_DIR) 150 | logging.info("login to Garmin Connect successful using stored session tokens.") 151 | 152 | except ( 153 | FileNotFoundError, 154 | GarthHTTPError, 155 | GarminConnectAuthenticationError, 156 | requests.exceptions.HTTPError, 157 | ) as err: 158 | logging.error(str(err)) 159 | raise Exception("Session is expired : please login again and restart the script") 160 | 161 | return garmin 162 | 163 | # %% 164 | def write_points_to_influxdb(points): 165 | try: 166 | if len(points) != 0: 167 | if INFLUXDB_VERSION == '1': 168 | influxdbclient.write_points(points) 169 | else: 170 | influxdbclient.write(record=points) 171 | logging.info("Success : updated influxDB database with new points") 172 | except (InfluxDBClientError, InfluxDBError) as err: 173 | logging.error("Write failed : Unable to connect with database! " + str(err)) 174 | 175 | # %% 176 | def get_daily_stats(date_str): 177 | points_list = [] 178 | stats_json = garmin_obj.get_stats(date_str) 179 | if stats_json['wellnessStartTimeGmt'] and datetime.strptime(date_str, "%Y-%m-%d") < datetime.today(): 180 | points_list.append({ 181 | "measurement": "DailyStats", 182 | "time": pytz.timezone("UTC").localize(datetime.strptime(stats_json['wellnessStartTimeGmt'], "%Y-%m-%dT%H:%M:%S.%f")).isoformat(), 183 | "tags": { 184 | "Device": GARMIN_DEVICENAME, 185 | "Database_Name": INFLUXDB_DATABASE 186 | }, 187 | "fields": { 188 | "activeKilocalories": stats_json.get('activeKilocalories'), 189 | "bmrKilocalories": stats_json.get('bmrKilocalories'), 190 | 191 | 'totalSteps': stats_json.get('totalSteps'), 192 | 'totalDistanceMeters': stats_json.get('totalDistanceMeters'), 193 | 194 | "highlyActiveSeconds": stats_json.get("highlyActiveSeconds"), 195 | "activeSeconds": stats_json.get("activeSeconds"), 196 | "sedentarySeconds": stats_json.get("sedentarySeconds"), 197 | "sleepingSeconds": stats_json.get("sleepingSeconds"), 198 | "moderateIntensityMinutes": stats_json.get("moderateIntensityMinutes"), 199 | "vigorousIntensityMinutes": stats_json.get("vigorousIntensityMinutes"), 200 | 201 | "floorsAscendedInMeters": stats_json.get("floorsAscendedInMeters"), 202 | "floorsDescendedInMeters": stats_json.get("floorsDescendedInMeters"), 203 | "floorsAscended": stats_json.get("floorsAscended"), 204 | "floorsDescended": stats_json.get("floorsDescended"), 205 | 206 | "minHeartRate": stats_json.get("minHeartRate"), 207 | "maxHeartRate": stats_json.get("maxHeartRate"), 208 | "restingHeartRate": stats_json.get("restingHeartRate"), 209 | "minAvgHeartRate": stats_json.get("minAvgHeartRate"), 210 | "maxAvgHeartRate": stats_json.get("maxAvgHeartRate"), 211 | 212 | "stressDuration": stats_json.get("stressDuration"), 213 | "restStressDuration": stats_json.get("restStressDuration"), 214 | "activityStressDuration": stats_json.get("activityStressDuration"), 215 | "uncategorizedStressDuration": stats_json.get("uncategorizedStressDuration"), 216 | "totalStressDuration": stats_json.get("totalStressDuration"), 217 | "lowStressDuration": stats_json.get("lowStressDuration"), 218 | "mediumStressDuration": stats_json.get("mediumStressDuration"), 219 | "highStressDuration": stats_json.get("highStressDuration"), 220 | 221 | "stressPercentage": stats_json.get("stressPercentage"), 222 | "restStressPercentage": stats_json.get("restStressPercentage"), 223 | "activityStressPercentage": stats_json.get("activityStressPercentage"), 224 | "uncategorizedStressPercentage": stats_json.get("uncategorizedStressPercentage"), 225 | "lowStressPercentage": stats_json.get("lowStressPercentage"), 226 | "mediumStressPercentage": stats_json.get("mediumStressPercentage"), 227 | "highStressPercentage": stats_json.get("highStressPercentage"), 228 | 229 | "bodyBatteryChargedValue": stats_json.get("bodyBatteryChargedValue"), 230 | "bodyBatteryDrainedValue": stats_json.get("bodyBatteryDrainedValue"), 231 | "bodyBatteryHighestValue": stats_json.get("bodyBatteryHighestValue"), 232 | "bodyBatteryLowestValue": stats_json.get("bodyBatteryLowestValue"), 233 | "bodyBatteryDuringSleep": stats_json.get("bodyBatteryDuringSleep"), 234 | "bodyBatteryAtWakeTime": stats_json.get("bodyBatteryAtWakeTime"), 235 | 236 | "averageSpo2": stats_json.get("averageSpo2"), 237 | "lowestSpo2": stats_json.get("lowestSpo2"), 238 | } 239 | }) 240 | if points_list: 241 | logging.info(f"Success : Fetching daily metrics for date {date_str}") 242 | return points_list 243 | else: 244 | logging.debug("No daily stat data available for the give date " + date_str) 245 | return [] 246 | 247 | 248 | # %% 249 | def get_last_sync(): 250 | global GARMIN_DEVICENAME 251 | points_list = [] 252 | sync_data = garmin_obj.get_device_last_used() 253 | if GARMIN_DEVICENAME_AUTOMATIC: 254 | GARMIN_DEVICENAME = sync_data.get('lastUsedDeviceName') or "Unknown" 255 | points_list.append({ 256 | "measurement": "DeviceSync", 257 | "time": datetime.fromtimestamp(sync_data['lastUsedDeviceUploadTime']/1000, tz=pytz.timezone("UTC")).isoformat(), 258 | "tags": { 259 | "Device": GARMIN_DEVICENAME, 260 | "Database_Name": INFLUXDB_DATABASE 261 | }, 262 | "fields": { 263 | "imageUrl": sync_data.get('imageUrl'), 264 | "Device_Name": GARMIN_DEVICENAME 265 | } 266 | }) 267 | if points_list: 268 | logging.info(f"Success : Updated device last sync time") 269 | else: 270 | logging.warning("No associated/synced Garmin device found with your account") 271 | return points_list 272 | 273 | # %% 274 | def get_sleep_data(date_str): 275 | points_list = [] 276 | all_sleep_data = garmin_obj.get_sleep_data(date_str) 277 | sleep_json = all_sleep_data.get("dailySleepDTO", None) 278 | if sleep_json["sleepEndTimestampGMT"]: 279 | points_list.append({ 280 | "measurement": "SleepSummary", 281 | "time": datetime.fromtimestamp(sleep_json["sleepEndTimestampGMT"]/1000, tz=pytz.timezone("UTC")).isoformat(), 282 | "tags": { 283 | "Device": GARMIN_DEVICENAME, 284 | "Database_Name": INFLUXDB_DATABASE 285 | }, 286 | "fields": { 287 | "sleepTimeSeconds": sleep_json.get("sleepTimeSeconds"), 288 | "deepSleepSeconds": sleep_json.get("deepSleepSeconds"), 289 | "lightSleepSeconds": sleep_json.get("lightSleepSeconds"), 290 | "remSleepSeconds": sleep_json.get("remSleepSeconds"), 291 | "awakeSleepSeconds": sleep_json.get("awakeSleepSeconds"), 292 | "averageSpO2Value": sleep_json.get("averageSpO2Value"), 293 | "lowestSpO2Value": sleep_json.get("lowestSpO2Value"), 294 | "highestSpO2Value": sleep_json.get("highestSpO2Value"), 295 | "averageRespirationValue": sleep_json.get("averageRespirationValue"), 296 | "lowestRespirationValue": sleep_json.get("lowestRespirationValue"), 297 | "highestRespirationValue": sleep_json.get("highestRespirationValue"), 298 | "awakeCount": sleep_json.get("awakeCount"), 299 | "avgSleepStress": sleep_json.get("avgSleepStress"), 300 | "sleepScore": sleep_json.get("sleepScores", {}).get("overall", {}).get("value"), 301 | "restlessMomentsCount": all_sleep_data.get("restlessMomentsCount"), 302 | "avgOvernightHrv": all_sleep_data.get("avgOvernightHrv"), 303 | "bodyBatteryChange": all_sleep_data.get("bodyBatteryChange"), 304 | "restingHeartRate": all_sleep_data.get("restingHeartRate") 305 | } 306 | }) 307 | sleep_movement_intraday = all_sleep_data.get("sleepMovement") 308 | if sleep_movement_intraday: 309 | for entry in sleep_movement_intraday: 310 | points_list.append({ 311 | "measurement": "SleepIntraday", 312 | "time": pytz.timezone("UTC").localize(datetime.strptime(entry["startGMT"], "%Y-%m-%dT%H:%M:%S.%f")).isoformat(), 313 | "tags": { 314 | "Device": GARMIN_DEVICENAME, 315 | "Database_Name": INFLUXDB_DATABASE 316 | }, 317 | "fields": { 318 | "SleepMovementActivityLevel": entry.get("activityLevel",-1), 319 | "SleepMovementActivitySeconds": int((datetime.strptime(entry["endGMT"], "%Y-%m-%dT%H:%M:%S.%f") - datetime.strptime(entry["startGMT"], "%Y-%m-%dT%H:%M:%S.%f")).total_seconds()) 320 | } 321 | }) 322 | sleep_levels_intraday = all_sleep_data.get("sleepLevels") 323 | if sleep_levels_intraday: 324 | for entry in sleep_levels_intraday: 325 | if entry.get("activityLevel") or entry.get("activityLevel") == 0: # Include 0 for Deepsleep but not None - Refer to issue #43 326 | points_list.append({ 327 | "measurement": "SleepIntraday", 328 | "time": pytz.timezone("UTC").localize(datetime.strptime(entry["startGMT"], "%Y-%m-%dT%H:%M:%S.%f")).isoformat(), 329 | "tags": { 330 | "Device": GARMIN_DEVICENAME, 331 | "Database_Name": INFLUXDB_DATABASE 332 | }, 333 | "fields": { 334 | "SleepStageLevel": entry.get("activityLevel"), 335 | "SleepStageSeconds": int((datetime.strptime(entry["endGMT"], "%Y-%m-%dT%H:%M:%S.%f") - datetime.strptime(entry["startGMT"], "%Y-%m-%dT%H:%M:%S.%f")).total_seconds()) 336 | } 337 | }) 338 | sleep_restlessness_intraday = all_sleep_data.get("sleepRestlessMoments") 339 | if sleep_restlessness_intraday: 340 | for entry in sleep_restlessness_intraday: 341 | if entry.get("value"): 342 | points_list.append({ 343 | "measurement": "SleepIntraday", 344 | "time": datetime.fromtimestamp(entry["startGMT"]/1000, tz=pytz.timezone("UTC")).isoformat(), 345 | "tags": { 346 | "Device": GARMIN_DEVICENAME, 347 | "Database_Name": INFLUXDB_DATABASE 348 | }, 349 | "fields": { 350 | "sleepRestlessValue": entry.get("value") 351 | } 352 | }) 353 | sleep_spo2_intraday = all_sleep_data.get("wellnessEpochSPO2DataDTOList") 354 | if sleep_spo2_intraday: 355 | for entry in sleep_spo2_intraday: 356 | if entry.get("spo2Reading"): 357 | points_list.append({ 358 | "measurement": "SleepIntraday", 359 | "time": pytz.timezone("UTC").localize(datetime.strptime(entry["epochTimestamp"], "%Y-%m-%dT%H:%M:%S.%f")).isoformat(), 360 | "tags": { 361 | "Device": GARMIN_DEVICENAME, 362 | "Database_Name": INFLUXDB_DATABASE 363 | }, 364 | "fields": { 365 | "spo2Reading": entry.get("spo2Reading") 366 | } 367 | }) 368 | sleep_respiration_intraday = all_sleep_data.get("wellnessEpochRespirationDataDTOList") 369 | if sleep_respiration_intraday: 370 | for entry in sleep_respiration_intraday: 371 | if entry.get("respirationValue"): 372 | points_list.append({ 373 | "measurement": "SleepIntraday", 374 | "time": datetime.fromtimestamp(entry["startTimeGMT"]/1000, tz=pytz.timezone("UTC")).isoformat(), 375 | "tags": { 376 | "Device": GARMIN_DEVICENAME, 377 | "Database_Name": INFLUXDB_DATABASE 378 | }, 379 | "fields": { 380 | "respirationValue": entry.get("respirationValue") 381 | } 382 | }) 383 | sleep_heart_rate_intraday = all_sleep_data.get("sleepHeartRate") 384 | if sleep_heart_rate_intraday: 385 | for entry in sleep_heart_rate_intraday: 386 | if entry.get("value"): 387 | points_list.append({ 388 | "measurement": "SleepIntraday", 389 | "time": datetime.fromtimestamp(entry["startGMT"]/1000, tz=pytz.timezone("UTC")).isoformat(), 390 | "tags": { 391 | "Device": GARMIN_DEVICENAME, 392 | "Database_Name": INFLUXDB_DATABASE 393 | }, 394 | "fields": { 395 | "heartRate": entry.get("value") 396 | } 397 | }) 398 | sleep_stress_intraday = all_sleep_data.get("sleepStress") 399 | if sleep_stress_intraday: 400 | for entry in sleep_stress_intraday: 401 | if entry.get("value"): 402 | points_list.append({ 403 | "measurement": "SleepIntraday", 404 | "time": datetime.fromtimestamp(entry["startGMT"]/1000, tz=pytz.timezone("UTC")).isoformat(), 405 | "tags": { 406 | "Device": GARMIN_DEVICENAME, 407 | "Database_Name": INFLUXDB_DATABASE 408 | }, 409 | "fields": { 410 | "stressValue": entry.get("value") 411 | } 412 | }) 413 | sleep_bb_intraday = all_sleep_data.get("sleepBodyBattery") 414 | if sleep_bb_intraday: 415 | for entry in sleep_bb_intraday: 416 | if entry.get("value"): 417 | points_list.append({ 418 | "measurement": "SleepIntraday", 419 | "time": datetime.fromtimestamp(entry["startGMT"]/1000, tz=pytz.timezone("UTC")).isoformat(), 420 | "tags": { 421 | "Device": GARMIN_DEVICENAME, 422 | "Database_Name": INFLUXDB_DATABASE 423 | }, 424 | "fields": { 425 | "bodyBattery": entry.get("value") 426 | } 427 | }) 428 | sleep_hrv_intraday = all_sleep_data.get("hrvData") 429 | if sleep_hrv_intraday: 430 | for entry in sleep_hrv_intraday: 431 | if entry.get("value"): 432 | points_list.append({ 433 | "measurement": "SleepIntraday", 434 | "time": datetime.fromtimestamp(entry["startGMT"]/1000, tz=pytz.timezone("UTC")).isoformat(), 435 | "tags": { 436 | "Device": GARMIN_DEVICENAME, 437 | "Database_Name": INFLUXDB_DATABASE 438 | }, 439 | "fields": { 440 | "hrvData": entry.get("value") 441 | } 442 | }) 443 | if points_list: 444 | logging.info(f"Success : Fetching intraday sleep metrics for date {date_str}") 445 | return points_list 446 | 447 | # %% 448 | def get_intraday_hr(date_str): 449 | points_list = [] 450 | hr_list = garmin_obj.get_heart_rates(date_str).get("heartRateValues") or [] 451 | for entry in hr_list: 452 | if entry[1]: 453 | points_list.append({ 454 | "measurement": "HeartRateIntraday", 455 | "time": datetime.fromtimestamp(entry[0]/1000, tz=pytz.timezone("UTC")).isoformat(), 456 | "tags": { 457 | "Device": GARMIN_DEVICENAME, 458 | "Database_Name": INFLUXDB_DATABASE 459 | }, 460 | "fields": { 461 | "HeartRate": entry[1] 462 | } 463 | }) 464 | if points_list: 465 | logging.info(f"Success : Fetching intraday Heart Rate for date {date_str}") 466 | return points_list 467 | 468 | # %% 469 | def get_intraday_steps(date_str): 470 | points_list = [] 471 | steps_list = garmin_obj.get_steps_data(date_str) 472 | for entry in steps_list: 473 | if entry["steps"] or entry["steps"] == 0: 474 | points_list.append({ 475 | "measurement": "StepsIntraday", 476 | "time": pytz.timezone("UTC").localize(datetime.strptime(entry['startGMT'], "%Y-%m-%dT%H:%M:%S.%f")).isoformat(), 477 | "tags": { 478 | "Device": GARMIN_DEVICENAME, 479 | "Database_Name": INFLUXDB_DATABASE 480 | }, 481 | "fields": { 482 | "StepsCount": entry["steps"] 483 | } 484 | }) 485 | if points_list: 486 | logging.info(f"Success : Fetching intraday steps for date {date_str}") 487 | return points_list 488 | 489 | # %% 490 | def get_intraday_stress(date_str): 491 | points_list = [] 492 | stress_list = garmin_obj.get_stress_data(date_str).get('stressValuesArray') or [] 493 | for entry in stress_list: 494 | if entry[1] or entry[1] == 0: 495 | points_list.append({ 496 | "measurement": "StressIntraday", 497 | "time": datetime.fromtimestamp(entry[0]/1000, tz=pytz.timezone("UTC")).isoformat(), 498 | "tags": { 499 | "Device": GARMIN_DEVICENAME, 500 | "Database_Name": INFLUXDB_DATABASE 501 | }, 502 | "fields": { 503 | "stressLevel": entry[1] 504 | } 505 | }) 506 | bb_list = garmin_obj.get_stress_data(date_str).get('bodyBatteryValuesArray') or [] 507 | for entry in bb_list: 508 | if entry[2] or entry[2] == 0: 509 | points_list.append({ 510 | "measurement": "BodyBatteryIntraday", 511 | "time": datetime.fromtimestamp(entry[0]/1000, tz=pytz.timezone("UTC")).isoformat(), 512 | "tags": { 513 | "Device": GARMIN_DEVICENAME, 514 | "Database_Name": INFLUXDB_DATABASE 515 | }, 516 | "fields": { 517 | "BodyBatteryLevel": entry[2] 518 | } 519 | }) 520 | if points_list: 521 | logging.info(f"Success : Fetching intraday stress and Body Battery values for date {date_str}") 522 | return points_list 523 | 524 | # %% 525 | def get_intraday_br(date_str): 526 | points_list = [] 527 | br_list = garmin_obj.get_respiration_data(date_str).get('respirationValuesArray') or [] 528 | for entry in br_list: 529 | if entry[1]: 530 | points_list.append({ 531 | "measurement": "BreathingRateIntraday", 532 | "time": datetime.fromtimestamp(entry[0]/1000, tz=pytz.timezone("UTC")).isoformat(), 533 | "tags": { 534 | "Device": GARMIN_DEVICENAME, 535 | "Database_Name": INFLUXDB_DATABASE 536 | }, 537 | "fields": { 538 | "BreathingRate": entry[1] 539 | } 540 | }) 541 | if points_list: 542 | logging.info(f"Success : Fetching intraday Breathing Rate for date {date_str}") 543 | return points_list 544 | 545 | # %% 546 | def get_intraday_hrv(date_str): 547 | points_list = [] 548 | hrv_list = (garmin_obj.get_hrv_data(date_str) or {}).get('hrvReadings') or [] 549 | for entry in hrv_list: 550 | if entry.get('hrvValue'): 551 | points_list.append({ 552 | "measurement": "HRV_Intraday", 553 | "time": pytz.timezone("UTC").localize(datetime.strptime(entry['readingTimeGMT'],"%Y-%m-%dT%H:%M:%S.%f")).isoformat(), 554 | "tags": { 555 | "Device": GARMIN_DEVICENAME, 556 | "Database_Name": INFLUXDB_DATABASE 557 | }, 558 | "fields": { 559 | "hrvValue": entry.get('hrvValue') 560 | } 561 | }) 562 | if points_list: 563 | logging.info(f"Success : Fetching intraday HRV for date {date_str}") 564 | return points_list 565 | 566 | # %% 567 | def get_body_composition(date_str): 568 | points_list = [] 569 | weight_list_all = garmin_obj.get_weigh_ins(date_str, date_str).get('dailyWeightSummaries', []) 570 | if weight_list_all: 571 | weight_list = weight_list_all[0].get('allWeightMetrics', []) 572 | for weight_dict in weight_list: 573 | data_fields = { 574 | "weight": weight_dict.get("weight"), 575 | "bmi": weight_dict.get("bmi"), 576 | "bodyFat": weight_dict.get("bodyFat"), 577 | "bodyWater": weight_dict.get("bodyWater"), 578 | } 579 | if not all(value is None for value in data_fields.values()): 580 | points_list.append({ 581 | "measurement": "BodyComposition", 582 | "time": datetime.fromtimestamp((weight_dict['timestampGMT']/1000) , tz=pytz.timezone("UTC")).isoformat() if weight_dict['timestampGMT'] else datetime.strptime(date_str, "%Y-%m-%d").replace(hour=0, tzinfo=pytz.UTC).isoformat(), # Use GMT 00:00 is timestamp is not available (issue #15) 583 | "tags": { 584 | "Device": GARMIN_DEVICENAME, 585 | "Database_Name": INFLUXDB_DATABASE, 586 | "Frequency" : "Intraday", 587 | "SourceType" : weight_dict.get('sourceType', "Unknown") 588 | }, 589 | "fields": data_fields 590 | }) 591 | logging.info(f"Success : Fetching intraday Body Composition (Weight, BMI etc) for date {date_str}") 592 | return points_list 593 | 594 | # %% 595 | def get_activity_summary(date_str): 596 | points_list = [] 597 | activity_with_gps_id_dict = {} 598 | activity_list = garmin_obj.get_activities_by_date(date_str, date_str) 599 | for activity in activity_list: 600 | if activity.get('hasPolyline') or ALWAYS_PROCESS_FIT_FILES: # will process FIT files lacking GPS data if ALWAYS_PROCESS_FIT_FILES is set to True 601 | if not activity.get('hasPolyline'): 602 | logging.warning(f"Activity ID {activity.get('activityId')} got no GPS data - yet, activity FIT file data will be processed as ALWAYS_PROCESS_FIT_FILES is on") 603 | activity_with_gps_id_dict[activity.get('activityId')] = activity.get('activityType',{}).get('typeKey', "Unknown") 604 | if "startTimeGMT" in activity: # "startTimeGMT" should be available for all activities (fix #13) 605 | points_list.append({ 606 | "measurement": "ActivitySummary", 607 | "time": datetime.strptime(activity["startTimeGMT"], "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.UTC).isoformat(), 608 | "tags": { 609 | "Device": GARMIN_DEVICENAME, 610 | "Database_Name": INFLUXDB_DATABASE, 611 | "ActivityID": activity.get('activityId'), 612 | "ActivitySelector": datetime.strptime(activity["startTimeGMT"], "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.UTC).strftime('%Y%m%dT%H%M%SUTC-') + activity.get('activityType',{}).get('typeKey', "Unknown") 613 | }, 614 | "fields": { 615 | "Activity_ID": activity.get('activityId'), 616 | 'Device_ID': activity.get('deviceId'), 617 | 'activityName': activity.get('activityName'), 618 | 'activityType': activity.get('activityType',{}).get('typeKey',None), 619 | 'distance': activity.get('distance'), 620 | 'elapsedDuration': activity.get('elapsedDuration'), 621 | 'movingDuration': activity.get('movingDuration'), 622 | 'averageSpeed': activity.get('averageSpeed'), 623 | 'maxSpeed': activity.get('maxSpeed'), 624 | 'calories': activity.get('calories'), 625 | 'bmrCalories': activity.get('bmrCalories'), 626 | 'averageHR': activity.get('averageHR'), 627 | 'maxHR': activity.get('maxHR'), 628 | 'locationName': activity.get('locationName'), 629 | 'lapCount': activity.get('lapCount'), 630 | 'hrTimeInZone_1': activity.get('hrTimeInZone_1'), 631 | 'hrTimeInZone_2': activity.get('hrTimeInZone_2'), 632 | 'hrTimeInZone_3': activity.get('hrTimeInZone_3'), 633 | 'hrTimeInZone_4': activity.get('hrTimeInZone_4'), 634 | 'hrTimeInZone_5': activity.get('hrTimeInZone_5'), 635 | } 636 | }) 637 | points_list.append({ 638 | "measurement": "ActivitySummary", 639 | "time": (datetime.strptime(activity["startTimeGMT"], "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.UTC) + timedelta(seconds=int(activity.get('elapsedDuration', 0)))).isoformat(), 640 | "tags": { 641 | "Device": GARMIN_DEVICENAME, 642 | "Database_Name": INFLUXDB_DATABASE, 643 | "ActivityID": activity.get('activityId'), 644 | "ActivitySelector": datetime.strptime(activity["startTimeGMT"], "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.UTC).strftime('%Y%m%dT%H%M%SUTC-') + activity.get('activityType',{}).get('typeKey', "Unknown") 645 | }, 646 | "fields": { 647 | "Activity_ID": activity.get('activityId'), 648 | 'Device_ID': activity.get('deviceId'), 649 | 'activityName': "END", 650 | 'activityType': "No Activity", 651 | } 652 | }) 653 | logging.info(f"Success : Fetching Activity summary with id {activity.get('activityId')} for date {date_str}") 654 | else: 655 | logging.warning(f"Skipped : Start Timestamp missing for activity id {activity.get('activityId')} for date {date_str}") 656 | return points_list, activity_with_gps_id_dict 657 | 658 | # %% 659 | def fetch_activity_GPS(activityIDdict): # Uses FIT file by default, falls back to TCX 660 | points_list = [] 661 | for activityID in activityIDdict.keys(): 662 | activity_type = activityIDdict[activityID] 663 | if (activityID in PARSED_ACTIVITY_ID_LIST) and (not FORCE_REPROCESS_ACTIVITIES): 664 | logging.info(f"Skipping : Activity ID {activityID} has already been processed within current runtime") 665 | return [] 666 | if (activityID in PARSED_ACTIVITY_ID_LIST) and (FORCE_REPROCESS_ACTIVITIES): 667 | logging.info(f"Re-processing : Activity ID {activityID} (FORCE_REPROCESS_ACTIVITIES is on)") 668 | try: 669 | zip_data = garmin_obj.download_activity(activityID, dl_fmt=garmin_obj.ActivityDownloadFormat.ORIGINAL) 670 | logging.info(f"Processing : Activity ID {activityID} FIT file data - this may take a while...") 671 | zip_buffer = io.BytesIO(zip_data) 672 | with zipfile.ZipFile(zip_buffer) as zip_ref: 673 | fit_filename = next((f for f in zip_ref.namelist() if f.endswith('.fit')), None) 674 | if not fit_filename: 675 | raise FileNotFoundError(f"No FIT file found in the downloaded zip archive for Activity ID {activityID}") 676 | else: 677 | fit_data = zip_ref.read(fit_filename) 678 | fit_file_buffer = io.BytesIO(fit_data) 679 | fitfile = FitFile(fit_file_buffer) 680 | fitfile.parse() 681 | all_records_list = [record.get_values() for record in fitfile.get_messages('record')] 682 | if len(all_records_list) == 0: 683 | raise FileNotFoundError(f"No records found in FIT file for Activity ID {activityID} - Discarding FIT file") 684 | else: 685 | activity_start_time = all_records_list[0]['timestamp'].replace(tzinfo=pytz.UTC) 686 | for parsed_record in all_records_list: 687 | if parsed_record.get('timestamp'): 688 | point = { 689 | "measurement": "ActivityGPS", 690 | "time": parsed_record['timestamp'].replace(tzinfo=pytz.UTC).isoformat(), 691 | "tags": { 692 | "Device": GARMIN_DEVICENAME, 693 | "Database_Name": INFLUXDB_DATABASE, 694 | "ActivityID": activityID, 695 | "ActivitySelector": activity_start_time.strftime('%Y%m%dT%H%M%SUTC-') + activity_type 696 | }, 697 | "fields": { 698 | "ActivityName": activity_type, 699 | "Activity_ID": activityID, 700 | "Latitude": int(parsed_record['position_lat']) * ( 180 / 2**31 ) if parsed_record.get('position_lat') else None, 701 | "Longitude": int(parsed_record['position_long']) * ( 180 / 2**31 ) if parsed_record.get('position_long') else None, 702 | "Altitude": parsed_record.get('enhanced_altitude', None) or parsed_record.get('altitude', None), 703 | "Distance": parsed_record.get('distance', None), 704 | "HeartRate": float(parsed_record.get('heart_rate', None)) if parsed_record.get('heart_rate', None) else None, 705 | "Speed": parsed_record.get('enhanced_speed', None) or parsed_record.get('speed', None), 706 | "Cadence": parsed_record.get('cadence', None), 707 | "Fractional_Cadence": parsed_record.get('fractional_cadence', None), 708 | "Temperature": parsed_record.get('temperature', None), 709 | "Accumulated_Power": parsed_record.get('accumulated_power', None), 710 | "Power": parsed_record.get('power', None) 711 | } 712 | } 713 | points_list.append(point) 714 | if KEEP_FIT_FILES: 715 | os.makedirs(FIT_FILE_STORAGE_LOCATION, exist_ok=True) 716 | fit_path = os.path.join(FIT_FILE_STORAGE_LOCATION, activity_start_time.strftime('%Y%m%dT%H%M%SUTC-') + activity_type + ".fit") 717 | with open(fit_path, "wb") as f: 718 | f.write(fit_data) 719 | logging.info(f"Success : Activity ID {activityID} stored in output file {fit_path}") 720 | except (FileNotFoundError, FitParseError) as err: 721 | logging.error(err) 722 | logging.warning(f"Fallback : Failed to use FIT file for activityID {activityID} - Trying TCX file...") 723 | try: 724 | root = ET.fromstring(garmin_obj.download_activity(activityID, dl_fmt=garmin_obj.ActivityDownloadFormat.TCX).decode("UTF-8")) 725 | except requests.exceptions.Timeout as err: 726 | logging.warning(f"Request timeout for fetching large activity record {activityID} - skipping record") 727 | return [] 728 | ns = {"tcx": "http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2", "ns3": "http://www.garmin.com/xmlschemas/ActivityExtension/v2"} 729 | for activity in root.findall("tcx:Activities/tcx:Activity", ns): 730 | activity_start_time = datetime.fromisoformat(activity.find("tcx:Id", ns).text.strip("Z")) 731 | lap_index = 1 732 | for lap in activity.findall("tcx:Lap", ns): 733 | lap_start_time = datetime.fromisoformat(lap.attrib.get("StartTime").strip("Z")) 734 | for tp in lap.findall(".//tcx:Trackpoint", ns): 735 | time_obj = datetime.fromisoformat(tp.findtext("tcx:Time", default=None, namespaces=ns).strip("Z")) 736 | lat = tp.findtext("tcx:Position/tcx:LatitudeDegrees", default=None, namespaces=ns) 737 | lon = tp.findtext("tcx:Position/tcx:LongitudeDegrees", default=None, namespaces=ns) 738 | alt = tp.findtext("tcx:AltitudeMeters", default=None, namespaces=ns) 739 | dist = tp.findtext("tcx:DistanceMeters", default=None, namespaces=ns) 740 | hr = tp.findtext("tcx:HeartRateBpm/tcx:Value", default=None, namespaces=ns) 741 | speed = tp.findtext("tcx:Extensions/ns3:TPX/ns3:Speed", default=None, namespaces=ns) 742 | 743 | try: lat = float(lat) 744 | except: lat = None 745 | try: lon = float(lon) 746 | except: lon = None 747 | try: alt = float(alt) 748 | except: alt = None 749 | try: dist = float(dist) 750 | except: dist = None 751 | try: hr = float(hr) 752 | except: hr = None 753 | try: speed = float(speed) 754 | except: speed = None 755 | 756 | point = { 757 | "measurement": "ActivityGPS", 758 | "time": time_obj.isoformat(), 759 | "tags": { 760 | "Device": GARMIN_DEVICENAME, 761 | "Database_Name": INFLUXDB_DATABASE, 762 | "ActivityID": activityID, 763 | "ActivitySelector": activity_start_time.strftime('%Y%m%dT%H%M%SUTC-') + activity_type 764 | }, 765 | "fields": { 766 | "ActivityName": activity_type, 767 | "Activity_ID": activityID, 768 | "Latitude": lat, 769 | "Longitude": lon, 770 | "Altitude": alt, 771 | "Distance": dist, 772 | "HeartRate": hr, 773 | "Speed": speed, 774 | "lap": lap_index 775 | } 776 | } 777 | points_list.append(point) 778 | 779 | lap_index += 1 780 | logging.info(f"Success : Fetching detailed activity for Activity ID {activityID}") 781 | PARSED_ACTIVITY_ID_LIST.append(activityID) 782 | return points_list 783 | 784 | # Contribution from PR #17 by @arturgoms 785 | def get_training_readiness(date_str): 786 | points_list = [] 787 | tr_list_all = garmin_obj.get_training_readiness(date_str) 788 | if tr_list_all: 789 | for tr_dict in tr_list_all: 790 | data_fields = { 791 | "level": tr_dict.get("level"), 792 | "score": tr_dict.get("score"), 793 | "sleepScore": tr_dict.get("sleepScore"), 794 | "sleepScoreFactorPercent": tr_dict.get("sleepScoreFactorPercent"), 795 | "recoveryTime": tr_dict.get("recoveryTime"), 796 | "recoveryTimeFactorPercent": tr_dict.get("recoveryTimeFactorPercent"), 797 | "acwrFactorPercent": tr_dict.get("acwrFactorPercent"), 798 | "acuteLoad": tr_dict.get("acuteLoad"), 799 | "stressHistoryFactorPercent": tr_dict.get("stressHistoryFactorPercent"), 800 | "hrvFactorPercent": tr_dict.get("hrvFactorPercent"), 801 | } 802 | if (not all(value is None for value in data_fields.values())) and tr_dict.get('timestamp'): 803 | points_list.append({ 804 | "measurement": "TrainingReadiness", 805 | "time": pytz.timezone("UTC").localize(datetime.strptime(tr_dict['timestamp'],"%Y-%m-%dT%H:%M:%S.%f")).isoformat(), 806 | "tags": { 807 | "Device": GARMIN_DEVICENAME, 808 | "Database_Name": INFLUXDB_DATABASE 809 | }, 810 | "fields": data_fields 811 | }) 812 | logging.info(f"Success : Fetching Training Readiness for date {date_str}") 813 | return points_list 814 | 815 | # Contribution from PR #17 by @arturgoms 816 | def get_hillscore(date_str): 817 | points_list = [] 818 | hill_all = garmin_obj.get_hill_score(date_str, date_str) 819 | if hill_all: 820 | for hill in hill_all.get("hillScoreDTOList",[]): 821 | data_fields = { 822 | "strengthScore": hill.get("strengthScore"), 823 | "enduranceScore": hill.get("enduranceScore"), 824 | "hillScoreClassificationId": hill.get("hillScoreClassificationId"), 825 | "overallScore": hill.get("overallScore"), 826 | "hillScoreFeedbackPhraseId": hill.get("hillScoreFeedbackPhraseId") 827 | } 828 | if not all(value is None for value in data_fields.values()): 829 | points_list.append({ 830 | "measurement": "HillScore", 831 | "time": datetime.strptime(date_str,"%Y-%m-%d").replace(hour=0, tzinfo=pytz.UTC).isoformat(), # Use GMT 00:00 for daily record 832 | "tags": { 833 | "Device": GARMIN_DEVICENAME, 834 | "Database_Name": INFLUXDB_DATABASE 835 | }, 836 | "fields": data_fields 837 | }) 838 | logging.info(f"Success : Fetching Hill Score for date {date_str}") 839 | return points_list 840 | 841 | # Contribution from PR #17 by @arturgoms 842 | def get_race_predictions(date_str): 843 | points_list = [] 844 | rp_all = garmin_obj.get_race_predictions() 845 | if rp_all: 846 | data_fields = { 847 | "time5K": rp_all.get("time5K"), 848 | "time10K": rp_all.get("time10K"), 849 | "timeHalfMarathon": rp_all.get("timeHalfMarathon"), 850 | "timeMarathon": rp_all.get("timeMarathon"), 851 | } 852 | if not all(value is None for value in data_fields.values()): 853 | points_list.append({ 854 | "measurement": "RacePredictions", 855 | "time": datetime.strptime(date_str,"%Y-%m-%d").replace(hour=0, tzinfo=pytz.UTC).isoformat(), # Use GMT 00:00 for daily record 856 | "tags": { 857 | "Device": GARMIN_DEVICENAME, 858 | "Database_Name": INFLUXDB_DATABASE 859 | }, 860 | "fields": data_fields 861 | }) 862 | logging.info(f"Success : Fetching Race Predictions for date {date_str}") 863 | return points_list 864 | 865 | def get_vo2_max(date_str): 866 | points_list = [] 867 | max_metrics = garmin_obj.get_max_metrics(date_str) 868 | try: 869 | if max_metrics: 870 | vo2_max_value = max_metrics[0].get("generic", {}).get("vo2MaxPreciseValue") 871 | if vo2_max_value: 872 | points_list.append({ 873 | "measurement": "VO2_Max", 874 | "time": datetime.strptime(date_str,"%Y-%m-%d").replace(hour=0, tzinfo=pytz.UTC).isoformat(), # Use GMT 00:00 for daily record 875 | "tags": { 876 | "Device": GARMIN_DEVICENAME, 877 | "Database_Name": INFLUXDB_DATABASE 878 | }, 879 | "fields": {"VO2_max_value" : vo2_max_value} 880 | }) 881 | logging.info(f"Success : Fetching VO2-max for date {date_str}") 882 | return points_list 883 | except AttributeError as err: 884 | return [] 885 | # %% 886 | def daily_fetch_write(date_str): 887 | write_points_to_influxdb(get_daily_stats(date_str)) 888 | write_points_to_influxdb(get_sleep_data(date_str)) 889 | write_points_to_influxdb(get_intraday_steps(date_str)) 890 | write_points_to_influxdb(get_intraday_hr(date_str)) 891 | write_points_to_influxdb(get_intraday_stress(date_str)) 892 | write_points_to_influxdb(get_intraday_br(date_str)) 893 | write_points_to_influxdb(get_intraday_hrv(date_str)) 894 | write_points_to_influxdb(get_body_composition(date_str)) 895 | activity_summary_points_list, activity_with_gps_id_dict = get_activity_summary(date_str) 896 | write_points_to_influxdb(activity_summary_points_list) 897 | write_points_to_influxdb(fetch_activity_GPS(activity_with_gps_id_dict)) 898 | if FETCH_ADVANCED_TRAINING_DATA: # Contribution from PR #17 by @arturgoms 899 | write_points_to_influxdb(get_training_readiness(date_str)) 900 | write_points_to_influxdb(get_hillscore(date_str)) 901 | write_points_to_influxdb(get_race_predictions(date_str)) 902 | write_points_to_influxdb(get_vo2_max(date_str)) 903 | 904 | # %% 905 | def fetch_write_bulk(start_date_str, end_date_str): 906 | global garmin_obj 907 | logging.info("Fetching data for the given period in reverse chronological order") 908 | time.sleep(3) 909 | write_points_to_influxdb(get_last_sync()) 910 | for current_date in iter_days(start_date_str, end_date_str): 911 | repeat_loop = True 912 | while repeat_loop: 913 | try: 914 | daily_fetch_write(current_date) 915 | logging.info(f"Success : Fetched all available health metrics for date {current_date} (skipped any if unavailable)") 916 | logging.info(f"Waiting : for {RATE_LIMIT_CALLS_SECONDS} seconds") 917 | time.sleep(RATE_LIMIT_CALLS_SECONDS) 918 | repeat_loop = False 919 | except GarminConnectTooManyRequestsError as err: 920 | logging.error(err) 921 | logging.info(f"Too many requests (429) : Failed to fetch one or more metrics - will retry for date {current_date}") 922 | logging.info(f"Waiting : for {FETCH_FAILED_WAIT_SECONDS} seconds") 923 | time.sleep(FETCH_FAILED_WAIT_SECONDS) 924 | repeat_loop = True 925 | except ( 926 | GarminConnectConnectionError, 927 | requests.exceptions.HTTPError, 928 | requests.exceptions.ConnectionError, 929 | requests.exceptions.Timeout, 930 | GarthHTTPError 931 | ) as err: 932 | logging.error(err) 933 | logging.info(f"Connection Error : Failed to fetch one or more metrics - skipping date {current_date}") 934 | logging.info(f"Waiting : for {RATE_LIMIT_CALLS_SECONDS} seconds") 935 | time.sleep(RATE_LIMIT_CALLS_SECONDS) 936 | repeat_loop = False 937 | except GarminConnectAuthenticationError as err: 938 | logging.error(err) 939 | logging.info(f"Authentication Failed : Retrying login with given credentials (won't work automatically for MFA/2FA enabled accounts)") 940 | garmin_obj = garmin_login() 941 | time.sleep(5) 942 | repeat_loop = True 943 | 944 | 945 | # %% 946 | garmin_obj = garmin_login() 947 | 948 | # %% 949 | if MANUAL_START_DATE: 950 | fetch_write_bulk(MANUAL_START_DATE, MANUAL_END_DATE) 951 | logging.info(f"Bulk update success : Fetched all available health metrics for date range {MANUAL_START_DATE} to {MANUAL_END_DATE}") 952 | exit(0) 953 | else: 954 | try: 955 | if INFLUXDB_VERSION == "1": 956 | last_influxdb_sync_time_UTC = pytz.utc.localize(datetime.strptime(list(influxdbclient.query(f"SELECT * FROM HeartRateIntraday ORDER BY time DESC LIMIT 1").get_points())[0]['time'],"%Y-%m-%dT%H:%M:%SZ")) 957 | else: 958 | last_influxdb_sync_time_UTC = pytz.utc.localize(influxdbclient.query(query="SELECT * FROM HeartRateIntraday ORDER BY time DESC LIMIT 1", language="influxql").to_pylist()[0]['time']) 959 | except Exception as err: 960 | logging.error(err) 961 | logging.warning("No previously synced data found in local InfluxDB database, defaulting to 7 day initial fetching. Use specific start date ENV variable to bulk update past data") 962 | last_influxdb_sync_time_UTC = (datetime.today() - timedelta(days=7)).astimezone(pytz.timezone("UTC")) 963 | try: 964 | if USER_TIMEZONE: # If provided by user, using that. 965 | local_timediff = pytz.timezone(USER_TIMEZONE).localize(datetime.utcnow()).utcoffset() 966 | else: # otherwise try to set automatically 967 | last_activity_dict = garmin_obj.get_last_activity() # (very unlineky event that this will be empty given Garmin's userbase, everyone should have at least one activity) 968 | local_timediff = datetime.strptime(last_activity_dict['startTimeLocal'], '%Y-%m-%d %H:%M:%S') - datetime.strptime(last_activity_dict['startTimeGMT'], '%Y-%m-%d %H:%M:%S') 969 | if datetime.strptime(last_activity_dict['startTimeLocal'], '%Y-%m-%d %H:%M:%S') > datetime.strptime(last_activity_dict['startTimeGMT'], '%Y-%m-%d %H:%M:%S'): 970 | logging.info("Using user's local timezone as UTC+" + str(local_timediff)) 971 | else: 972 | logging.info("Using user's local timezone as UTC-" + str(-local_timediff)) 973 | except (KeyError, TypeError) as err: 974 | logging.warning(f"Unable to determine user's timezone - Defaulting to UTC. Consider providing TZ identifier with USER_TIMEZONE environment variable") 975 | local_timediff = timedelta(hours=0) 976 | 977 | while True: 978 | last_watch_sync_time_UTC = datetime.fromtimestamp(int(garmin_obj.get_device_last_used().get('lastUsedDeviceUploadTime')/1000)).astimezone(pytz.timezone("UTC")) 979 | if last_influxdb_sync_time_UTC < last_watch_sync_time_UTC: 980 | logging.info(f"Update found : Current watch sync time is {last_watch_sync_time_UTC} UTC") 981 | fetch_write_bulk((last_influxdb_sync_time_UTC + local_timediff).strftime('%Y-%m-%d'), (last_watch_sync_time_UTC + local_timediff).strftime('%Y-%m-%d')) # Using local dates for deciding which dates to fetch in current iteration (see issue #25) 982 | last_influxdb_sync_time_UTC = last_watch_sync_time_UTC 983 | else: 984 | logging.info(f"No new data found : Current watch and influxdb sync time is {last_watch_sync_time_UTC} UTC") 985 | logging.info(f"waiting for {UPDATE_INTERVAL_SECONDS} seconds before next automatic update calls") 986 | time.sleep(UPDATE_INTERVAL_SECONDS) 987 | 988 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | influxdb>=5.3 2 | pytz>=2025.1 3 | Requests>=2.32 4 | garth>=0.5.3 5 | garminconnect>=0.2.26 6 | dotenv>=0.9.9 7 | fitparse>=1.2.0 8 | influxdb3-python>=0.12.0 --------------------------------------------------------------------------------