├── .gitignore
├── LICENSE
├── README.md
├── Todoist.py
├── config.py
├── docker-example
├── Dockerfile-librelinkup
├── README.md
├── config.py
├── librelinkup.py
└── requirements.txt
├── edsm.py
├── exist.py
├── fitbit.py
├── foursquare.py
├── fshub.py
├── github.py
├── google-play.py
├── grafana
├── edsm.json
├── exist.json
├── fitbit.json
├── foursquare.json
├── fshub.json
├── gaming.json
├── github.json
├── instagram.json
├── rescuetime.json
├── todoist.json
└── trakt.json
├── instagram.py
├── k8s-example
├── README.md
├── cronjob-example.yaml
├── external-secret-example.yaml
└── secret-example.yaml
├── librelinkup.py
├── nintendo-switch.py
├── onetouchreveal.py
├── psn.py
├── requirements.txt
├── rescuetime-games.py
├── rescuetime.py
├── retroachievements.py
├── retroarch_emulationstation.py
├── retropie
├── influx-onend.sh
├── influx-onstart.sh
└── influx-retropie.py
├── screenshots
├── grafana-edsm.png
├── grafana-exist.png
├── grafana-fitbit.png
├── grafana-foursquare.png
├── grafana-fshub.png
├── grafana-gaming.png
├── grafana-github.png
├── grafana-instagram.png
├── grafana-rescuetime.png
├── grafana-todoist.png
└── grafana-trakt.png
├── stadia.py
├── steam.py
├── trakt-tv.py
└── xbox.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | .DS_Store
132 | .trakt.json
133 | .fitbit-refreshtoken
134 | *.sqlite
135 | *.code-workspace
136 | .vscode
137 | .librelinkup-authtoken
138 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Personal-InfluxDB
2 |
3 | Import personal data from various APIs into InfluxDB
4 |
5 | ## Configuration
6 |
7 | Open `config.py` and set your API credentials and InfluxDB server configuration at the top of the file
8 |
9 | * __RescueTime__: Register for an API key at https://www.rescuetime.com/anapi/manage
10 | * __Foursquare__: Register an app at https://foursquare.com/developers/ and generate an OAuth2 access token
11 | * __Fitbit__: Register a "Personal" app at https://dev.fitbit.com/ and generate an OAuth2 access token
12 | * __Steam__: Register for an API key at https://steamcommunity.com/dev/apikey and look up your SteamID at https://steamidfinder.com/ (use the `steamID64 (Dec)` value)
13 | * __Nintendo Switch__: You'll need to set up [mitmproxy](https://mitmproxy.org/) and intercept the Nintendo Switch Parent Controls app on an iOS or Android device to grab your authentication tokens and device IDs
14 | * __Xbox Live__: Register a profile at https://www.trueachievements.com/ and link it to your Xbox account. You can get your ID number by clicking your "TrueAchievement Points" score on your profile and looking at the leaderboard URL, it will be the `findgamerid` parameter.
15 | * __Google Play Games__: Download your Google Play Games archive from https://takeout.google.com/ and extract it in the same folder as the script
16 | * __Todoist__: *Access to the API requires a Todoist Premium subscription* Create an app at https://developer.todoist.com/appconsole.html and generate a test token
17 | * __GitHub__: Create a personal access token at https://github.com/settings/tokens
18 | * __Trakt.tv__: Register for an API key at https://trakt.tv/oauth/applications and generate an OAuth2 access token, you'll also need to create an API key at https://www.themoviedb.org/settings/api to download movie / show posters
19 | * __EDSM__: Generate an API key at https://www.edsm.net/en/settings/api
20 | * __Exist__: Register an app at https://exist.io/account/apps/
21 | * __RetroPie__: Place the shell files and python script into user `pi`'s home directory. Created or edit `/opt/retropie/configs/all/runcommand-onstart.sh` and append the line `bash "/home/pi/influx-onstart.sh" "$@"`. Create or edit `/opt/retropie/configs/all/runcommand-onend.sh` and append the line `bash "/home/pi/influx-onend.sh" "$@"`
22 | * __FsHub.io__: Generate a personal access token at https://fshub.io/settings/integrations and set your pilot ID to the number in your "Personal Dashboard" URL
23 | * __Stadia__: Link your Stadia account to [Exophase](https://www.exophase.com/) and then set your Exophase username and Stadia nickname
24 | * __PSN__: Link your PSN account to [Exophase](https://www.exophase.com/) and then set your Exophase username and PSN nickname
25 | * __LibreLinkUp__: Open the Freestyle Libre app and choose `Connected Apps` from the menu, then send yourself an invite to `LibreLinkUp`. Install the `LibreLinkUp` app on your phone and accept the invitation, then set your username and password in the configuration file.
26 |
27 | ## Usage
28 |
29 | Check your Python version and make sure version 3.7 or newer is installed on your system:
30 |
31 | ```shell
32 | $ python3 --version
33 | ```
34 |
35 | Install required python3 modules:
36 |
37 | ```shell
38 | $ pip3 install pytz influxdb requests requests-cache instaloader trakt.py publicsuffix2 colorlog bs4
39 | ```
40 |
41 | Run each Python script from the terminal and it will insert the most recent data into InfluxDB.
42 |
43 | ## Notes
44 |
45 | * Each script is designed to write to its own InfluxDB database. Using the same database name between scripts can lead to data being unexpectedly overwritten or deleted.
46 | * RescueTime provides data each hour, so scheduling the script as an hourly cron job is recommended.
47 | * Steam provides the recent playtime over 2 weeks, so the first set of data inserted will contain 2 weeks of time. New data going forward will be more accurate as the script will calculate the time since the last run.
48 | * Google Play doesn't provide total play time, only achievements and last played timestamps
49 | * Instagram can take a very long time to download, so by default it will only fetch the 10 most recent posts. Set `MAX_POSTS` to `0` to download everything.
50 | * Access to the Todoist API requires a premium subscription
51 |
52 | ## Grafana Dashboards
53 |
54 | The [grafana](grafana/) folder contains json files for various example dashboards.
55 | Most dashboards require the `grafana-piechart-panel` plugin, and the Foursquaure panel also requires the panodata `grafana-map-panel` plugin:
56 |
57 | ```shell
58 | $ grafana-cli plugins install grafana-piechart-panel
59 | $ grafana-cli --pluginUrl grafana-cli --pluginUrl https://github.com/panodata/grafana-map-panel/releases/download/0.15.0/grafana-map-panel-0.15.0.zip plugins install grafana-map-panel plugins install grafana-map-panel
60 | ```
61 |
62 | ### RescueTime dashboard
63 |
64 | 
65 |
66 | ### Fitbit dashboard
67 |
68 | 
69 |
70 | ### Gaming dashboard
71 |
72 | 
73 |
74 | ### Foursquare dashboard
75 |
76 | 
77 |
78 | ### Instagram dashboard
79 |
80 | 
81 |
82 | ### Todoist dashboard
83 |
84 | 
85 |
86 | ### GitHub dashboard
87 |
88 | 
89 |
90 | ### Trakt.tv dashboard
91 |
92 | 
93 |
94 | ### EDSM dashboard
95 |
96 | 
97 |
98 | ### Exist dashboard
99 |
100 | 
101 |
102 | ### FsHub.io dashboard
103 |
104 | 
105 |
106 | # License
107 |
108 | Copyright (C) 2022 Sam Steele. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
109 |
110 | http://www.apache.org/licenses/LICENSE-2.0
111 |
112 | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
113 |
--------------------------------------------------------------------------------
/Todoist.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys
17 | from config import *
18 |
19 | if not TODOIST_ACCESS_TOKEN:
20 | logging.error("TODOIST_ACCESS_TOKEN not set in config.py")
21 | sys.exit(1)
22 |
23 | points = []
24 |
25 | def get_project(project_id):
26 | try:
27 | response = requests.get(f'https://api.todoist.com/sync/v9/projects/get',
28 | params={'project_id': project_id},
29 | headers={'Authorization': f'Bearer {TODOIST_ACCESS_TOKEN}'})
30 | response.raise_for_status()
31 | except requests.exceptions.HTTPError as err:
32 | if err.response.status_code == 404:
33 | logging.warning("Project ID not found: %s", project_id)
34 | return None
35 | else:
36 | logging.error("HTTP request failed: %s", err)
37 | sys.exit(1)
38 |
39 | return response.json()
40 |
41 | def get_activity(page):
42 | events = []
43 | count = -1
44 | offset = 0
45 | while count == -1 or len(events) < count:
46 | logging.debug("Fetching page %s offset %s", page, len(events))
47 | try:
48 | response = requests.get(f'https://api.todoist.com/sync/v9/activity/get',
49 | params={'page': page, 'offset': offset, 'limit': 100},
50 | headers={'Authorization': f'Bearer {TODOIST_ACCESS_TOKEN}'})
51 | response.raise_for_status()
52 | except requests.exceptions.HTTPError as err:
53 | logging.error("HTTP request failed: %s", err)
54 | sys.exit(1)
55 |
56 | activity = response.json()
57 | events.extend(activity['events'])
58 | count = activity['count']
59 |
60 | logging.info("Got %s items from Todoist", len(events))
61 |
62 | return events
63 |
64 | connect(TODOIST_DATABASE)
65 |
66 | page = 0
67 | activity = get_activity(page)
68 | projects = {}
69 | for event in activity:
70 | if event['object_type'] == 'item':
71 | if event['event_type'] == 'added' or event['event_type'] == 'completed':
72 | project = None
73 | try:
74 | if event['parent_project_id'] in projects:
75 | project = projects[event['parent_project_id']]
76 | else:
77 | project = get_project(event['parent_project_id'])
78 | projects[event['parent_project_id']] = project
79 | except AttributeError as err:
80 | logging.warning("Unable to fetch name for project ID %s", event['parent_project_id'])
81 |
82 | if project != None:
83 | points.append({
84 | "measurement": event['event_type'],
85 | "time": event['event_date'],
86 | "tags": {
87 | "item_id": event['id'],
88 | "project_id": event['parent_project_id'],
89 | "project_name": project['project']['name'],
90 | },
91 | "fields": {
92 | "content": event['extra_data']['content']
93 | }
94 | })
95 |
96 | write_points(points)
97 |
--------------------------------------------------------------------------------
/config.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import sys, logging, colorlog, pytz
17 | from influxdb import InfluxDBClient
18 | from influxdb.exceptions import InfluxDBClientError
19 |
20 | LOCAL_TIMEZONE = pytz.timezone('America/New_York')
21 |
22 | # InfluxDB Configuration
23 | INFLUXDB_HOST = 'localhost'
24 | INFLUXDB_PORT = 8086
25 | INFLUXDB_USERNAME = 'root'
26 | INFLUXDB_PASSWORD = 'root'
27 | INFLUXDB_CHUNK_SIZE = 50 # How many points to send per request
28 |
29 | # Shared gaming database
30 | GAMING_DATABASE = 'gaming'
31 |
32 | # EDSM configuration
33 | EDSM_API_KEY = ''
34 | EDSM_COMMANDER_NAME = ''
35 | EDSM_DATABASE = 'edsm'
36 |
37 | # Exist.io configuration
38 | EXIST_ACCESS_TOKEN = ''
39 | EXIST_USERNAME = ''
40 | EXIST_DATABASE = 'exist'
41 | EXIST_USE_FITBIT = True
42 | EXIST_USE_TRAKT = True
43 | EXIST_USE_GAMING = True
44 | EXIST_USE_RESCUETIME = False
45 |
46 | # Fitbit configuration
47 | FITBIT_LANGUAGE = 'en_US'
48 | FITBIT_CLIENT_ID = ''
49 | FITBIT_CLIENT_SECRET = ''
50 | FITBIT_ACCESS_TOKEN = ''
51 | FITBIT_INITIAL_CODE = ''
52 | FITBIT_REDIRECT_URI = 'https://www.example.net'
53 | FITBIT_DATABASE = 'fitbit'
54 |
55 | # Foursquare configuration
56 | FOURSQUARE_ACCESS_TOKEN = ''
57 | FOURSQUARE_DATABASE = 'foursquare'
58 |
59 | # FSHub configuration
60 | FSHUB_API_KEY = ''
61 | FSHUB_PILOT_ID = ''
62 | FSHUB_DATABASE = 'fshub'
63 |
64 | # GitHub configuration
65 | GITHUB_API_KEY = ''
66 | GITHUB_USERNAME = ''
67 | GITHUB_DATABASE = 'github'
68 |
69 | # Instagram configuration
70 | INSTAGRAM_PROFILE = ''
71 | INSTAGRAM_DATABASE = 'instagram'
72 | INSTAGRAM_MAX_POSTS = 10 #set to 0 to download all posts
73 |
74 | # Freestyle LibreLinkUp configuration
75 | LIBRELINKUP_USERNAME = ''
76 | LIBRELINKUP_PASSWORD = ''
77 | LIBRELINKUP_DATABASE = 'glucose'
78 | LIBRELINKUP_URL = 'https://api-us.libreview.io'
79 | LIBRELINKUP_VERSION = '4.7.0'
80 | LIBRELINKUP_PRODUCT = 'llu.ios'
81 |
82 | # Nintendo Switch configuration
83 | NS_DEVICE_ID = ''
84 | NS_SMART_DEVICE_ID = ''
85 | NS_SESSION_TOKEN = ''
86 | NS_CLIENT_ID = ''
87 | # These occasionally need to be updated when Nintendo changes the minimum allowed version
88 | NS_INTERNAL_VERSION = '321'
89 | NS_DISPLAY_VERSION = '1.17.0'
90 | NS_OS_VERSION = '15.2'
91 | NS_DATABASE = GAMING_DATABASE
92 |
93 | # OneTouch Reveal configuration
94 | ONETOUCH_USERNAME = ''
95 | ONETOUCH_PASSWORD = ''
96 | ONETOUCH_URL = 'https://app.onetouchreveal.com'
97 | ONETOUCH_DATABASE = 'glucose'
98 |
99 | # RescueTime configuration
100 | RESCUETIME_API_KEY = ''
101 | RESCUETIME_DATABASE = 'rescuetime'
102 |
103 | # RetroAchievements configuration
104 | RA_API_KEY = ''
105 | RA_USERNAME = ''
106 | RA_DATABASE = GAMING_DATABASE
107 |
108 | # RetroArch configuration
109 | RETROARCH_LOGS = '/home/ark/.config/retroarch/playlists/logs/'
110 | EMULATIONSTATION_ROMS = '/roms'
111 | RETROARCH_IMAGE_WEB_PREFIX = 'https://example.net/retroarch_images/'
112 |
113 | # Exophase configuration for Stadia and PSN
114 | EXOPHASE_NAME = ''
115 |
116 | # Stadia configuration
117 | STADIA_NAME = ''
118 | STADIA_DATABASE = GAMING_DATABASE
119 |
120 | # PSN configuration
121 | PSN_NAME = ''
122 | PSN_DATABASE = GAMING_DATABASE
123 |
124 | # Steam configuration
125 | STEAM_API_KEY = ''
126 | STEAM_ID = ''
127 | STEAM_USERNAME = ''
128 | STEAM_LANGUAGE = 'en'
129 | STEAM_DATABASE = GAMING_DATABASE
130 |
131 | # Todoist configuration
132 | TODOIST_ACCESS_TOKEN = ''
133 | TODOIST_DATABASE = 'todoist'
134 |
135 | # Trakt.tv configuration
136 | TRAKT_CLIENT_ID = ''
137 | TRAKT_CLIENT_SECRET = ''
138 | TRAKT_OAUTH_CODE = ''
139 | TMDB_API_KEY = ''
140 | TMDB_IMAGE_BASE = 'https://image.tmdb.org/t/p/'
141 | TRAKT_DATABASE = 'trakt'
142 |
143 | # Xbox configuration
144 | XBOX_GAMERTAG = ''
145 | TRUE_ACHIEVEMENTS_ID = ''
146 | XBOX_DATABASE = GAMING_DATABASE
147 |
148 | # Logging configuration
149 | LOG_LEVEL = logging.INFO
150 | LOG_FORMAT = '%(asctime)s %(log_color)s%(message)s'
151 | LOG_COLORS = {
152 | 'WARNING': 'yellow',
153 | 'ERROR': 'red',
154 | 'CRITICAL': 'red',
155 | }
156 |
157 | def connect(db):
158 | global client
159 | try:
160 | logging.info("Connecting to %s:%s", INFLUXDB_HOST, INFLUXDB_PORT)
161 | client = InfluxDBClient(host=INFLUXDB_HOST, port=INFLUXDB_PORT, username=INFLUXDB_USERNAME, password=INFLUXDB_PASSWORD)
162 | client.create_database(db)
163 | client.switch_database(db)
164 | except InfluxDBClientError as err:
165 | logging.error("InfluxDB connection failed: %s", err)
166 | sys.exit(1)
167 | return client
168 |
169 | def write_points(points):
170 | total = len(points)
171 | global client
172 | try:
173 | start = 0
174 | end = INFLUXDB_CHUNK_SIZE
175 | while start < len(points):
176 | if end > len(points):
177 | end = len(points)
178 |
179 | client.write_points(points[start:end])
180 | logging.debug(f"Wrote {end} / {total} points")
181 |
182 | start = end
183 | end = end + INFLUXDB_CHUNK_SIZE
184 | except InfluxDBClientError as err:
185 | logging.error("Unable to write points to InfluxDB: %s", err)
186 | sys.exit(1)
187 |
188 | logging.info("Successfully wrote %s data points to InfluxDB", total)
189 |
190 | client = None
191 |
192 | if sys.stdout.isatty():
193 | colorlog.basicConfig(level=LOG_LEVEL, format=LOG_FORMAT, log_colors=LOG_COLORS, stream=sys.stdout)
194 | else:
195 | logging.basicConfig(level=LOG_LEVEL, format=LOG_FORMAT.replace(f'%(log_color)s', ''), stream=sys.stdout)
196 |
197 | def handle_exception(exc_type, exc_value, exc_traceback):
198 | if issubclass(exc_type, KeyboardInterrupt):
199 | sys.__excepthook__(exc_type, exc_value, exc_traceback)
200 | return
201 |
202 | logging.critical("Uncaught exception:", exc_info=(exc_type, exc_value, exc_traceback))
203 |
204 | sys.excepthook = handle_exception
--------------------------------------------------------------------------------
/docker-example/Dockerfile-librelinkup:
--------------------------------------------------------------------------------
1 | FROM python:3.11.4-alpine3.18
2 |
3 | LABEL maintainer="Evan Richardson (evanrich81[at]gmail.com)"
4 |
5 | # Set the working directory to /app
6 | WORKDIR /app
7 |
8 | # Copy the requirements.txt file into the container at /app
9 | COPY requirements.txt /app/
10 |
11 | # Install the Python dependencies specified in requirements.txt
12 | RUN pip install --no-cache-dir -r requirements.txt
13 |
14 | # Copy the link.py and config.py files to the root directory
15 | COPY librelinkup.py /
16 | COPY config.py /
17 |
18 | ENV INFLUXDB_HOST="default_host"
19 | ENV INFLUXDB_PORT="default_port"
20 | ENV INFLUXDB_USER="default_username"
21 | ENV INFLUXDB_PASSWORD="default_password"
22 | ENV LIBRELINKUP_DATABASE="database"
23 | ENV LIBRELINKUP_USERNAME="librelinkup_username"
24 | ENV LIBRELINKUP_PASSWORD="librelinkup_password"
25 |
26 | # Run the Python script link.py
27 | CMD ["python", "/librelinkup.py"]
--------------------------------------------------------------------------------
/docker-example/README.md:
--------------------------------------------------------------------------------
1 | ## Dockerfile
2 | You can obviously customize this however you want, and likely will. I wrote this quick and dirty to extract all the parameters I needed for the librelinkup and config python files to work. You can add/delete as necessary.
3 |
4 | Breaking the file down:
5 |
6 | `FROM python:3.11.4-alpine3.18`
7 | ###### this defines the base image to use. I wanted to keep things nice and tidy, so I used the python 3.11 image based on alpine 3.18. you can use whatever you want.
8 |
9 | `LABEL maintainer="Evan Richardson (evanrich81[at]gmail.com)"`
10 | ###### **The maintainer of the image. I posted this to dockerhub, so I stuck my name and email on there, please don't spam me. You can change to whatever you want.**
11 |
12 | `WORKDIR /app`
13 | ###### This sets the working directory for the container to `/app`
14 |
15 | `COPY requirements.txt /app/`
16 | ###### Self explanitory.
17 |
18 | `RUN pip install --no-cache-dir -r requirements.txt`
19 | ###### Installs the requirements, leaving no cache behind (smaller final image)
20 |
21 | COPY librelinkup.py /
22 | COPY config.py /
23 | ###### These lines copy the python files to the root directory (could go in app if you wanted). These could also be consolidated on one line for one less layer, but not the end of the world.
24 |
25 |
26 | ENV INFLUXDB_HOST="default_host"
27 | ENV INFLUXDB_PORT="default_port"
28 | ENV INFLUXDB_USER="default_username"
29 | ENV INFLUXDB_PASSWORD="default_password"
30 | ENV LIBRELINKUP_DATABASE="database"
31 | ENV LIBRELINKUP_USERNAME="librelinkup_username"
32 | ENV LIBRELINKUP_PASSWORD="librelinkup_password"
33 | ##### This block of env variables defines env variables that will be available to the container. Set these to whatever you want.
34 |
35 | `CMD ["python", "/librelinkup.py"]`
36 | ##### And finally, the run line. This runs the selected python file upon start of the container.
37 |
38 |
39 | ## Running the container
40 | If you use the cronjob in the kubernetes folder, it will add the environmental variables automatically. if not, then you should run this image in a way similar to:
41 | `docker run -e VAR1=VALUE1 -e VAR2=VALUE evanrich/libre2influx:latest`
--------------------------------------------------------------------------------
/docker-example/config.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import sys, logging, colorlog, pytz, os
17 | from influxdb import InfluxDBClient
18 | from influxdb.exceptions import InfluxDBClientError
19 |
20 | TIMEZONE = os.getenv('TZ', 'America/Los_Angeles')
21 | LOCAL_TIMEZONE = pytz.timezone(TIMEZONE)
22 |
23 | # InfluxDB Configuration
24 | INFLUXDB_HOST = os.getenv('INFLUXDB_HOST')
25 | INFLUXDB_PORT = os.getenv('INFLUXDB_PORT', 8086)
26 | INFLUXDB_USERNAME = os.getenv('INFLUXDB_USER', 'admin')
27 | INFLUXDB_PASSWORD = os.getenv('INFLUXDB_PASSWORD', 'admin')
28 | INFLUXDB_CHUNK_SIZE = 50 # How many points to send per request
29 |
30 | # Freestyle LibreLinkUp configuration
31 | LIBRELINKUP_USERNAME = os.getenv('LIBRELINKUP_USERNAME')
32 | LIBRELINKUP_PASSWORD = os.getenv('LIBRELINKUP_PASSWORD')
33 | LIBRELINKUP_DATABASE = os.getenv('LIBRELINKUP_DATABASE')
34 | LIBRELINKUP_URL = 'https://api-us.libreview.io'
35 | LIBRELINKUP_VERSION = '4.7.0'
36 | LIBRELINKUP_PRODUCT = 'llu.ios'
37 |
38 | # Logging configuration
39 | LOG_LEVEL = logging.INFO
40 | LOG_FORMAT = '%(asctime)s %(log_color)s%(message)s'
41 | LOG_COLORS = {
42 | 'WARNING': 'yellow',
43 | 'ERROR': 'red',
44 | 'CRITICAL': 'red',
45 | }
46 |
47 | def connect(db):
48 | global client
49 | try:
50 | logging.info("Connecting to %s:%s", INFLUXDB_HOST, INFLUXDB_PORT)
51 | client = InfluxDBClient(host=INFLUXDB_HOST, port=INFLUXDB_PORT, username=INFLUXDB_USERNAME, password=INFLUXDB_PASSWORD)
52 | client.create_database(db)
53 | client.switch_database(db)
54 | except InfluxDBClientError as err:
55 | logging.error("InfluxDB connection failed: %s", err)
56 | sys.exit(1)
57 | return client
58 |
59 | def write_points(points):
60 | total = len(points)
61 | global client
62 | try:
63 | start = 0
64 | end = INFLUXDB_CHUNK_SIZE
65 | while start < len(points):
66 | if end > len(points):
67 | end = len(points)
68 |
69 | client.write_points(points[start:end])
70 | logging.debug(f"Wrote {end} / {total} points")
71 |
72 | start = end
73 | end = end + INFLUXDB_CHUNK_SIZE
74 | except InfluxDBClientError as err:
75 | logging.error("Unable to write points to InfluxDB: %s", err)
76 | sys.exit(1)
77 |
78 | logging.info("Successfully wrote %s data points to InfluxDB", total)
79 |
80 | client = None
81 |
82 | if sys.stdout.isatty():
83 | colorlog.basicConfig(level=LOG_LEVEL, format=LOG_FORMAT, log_colors=LOG_COLORS, stream=sys.stdout)
84 | else:
85 | logging.basicConfig(level=LOG_LEVEL, format=LOG_FORMAT.replace(f'%(log_color)s', ''), stream=sys.stdout)
86 |
87 | def handle_exception(exc_type, exc_value, exc_traceback):
88 | if issubclass(exc_type, KeyboardInterrupt):
89 | sys.__excepthook__(exc_type, exc_value, exc_traceback)
90 | return
91 |
92 | logging.critical("Uncaught exception:", exc_info=(exc_type, exc_value, exc_traceback))
93 |
94 | sys.excepthook = handle_exception
--------------------------------------------------------------------------------
/docker-example/librelinkup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys, os, json, time
17 | from datetime import datetime
18 | from config import *
19 |
20 | def append_reading(points, data, reading):
21 | time = datetime.strptime(reading['FactoryTimestamp'] + '+00:00', '%m/%d/%Y %I:%M:%S %p%z')
22 | points.append({
23 | "measurement": "glucose",
24 | "time": time,
25 | "tags": {
26 | "deviceType": "Libre",
27 | "deviceSerialNumber": data['data']['connection']['sensor']['sn'],
28 | },
29 | "fields": {
30 | "value": int(reading['ValueInMgPerDl']),
31 | "units": 'mg/dL',
32 | }
33 | })
34 |
35 | if not LIBRELINKUP_USERNAME:
36 | logging.error("LIBRELINKUP_USERNAME not set in config.py")
37 | sys.exit(1)
38 |
39 | points = []
40 |
41 | connect(LIBRELINKUP_DATABASE)
42 |
43 | LIBRELINKUP_HEADERS = {
44 | "version": LIBRELINKUP_VERSION,
45 | "product": LIBRELINKUP_PRODUCT,
46 | }
47 |
48 | LIBRELINKUP_TOKEN = None
49 | script_dir = os.path.dirname(__file__)
50 | auth_token_path = os.path.join(script_dir, '.librelinkup-authtoken')
51 | if os.path.isfile(auth_token_path):
52 | with open(auth_token_path) as json_file:
53 | auth = json.load(json_file)
54 | if auth['expires'] > time.time():
55 | LIBRELINKUP_TOKEN = auth['token']
56 | logging.info("Using cached authTicket, expiration: %s", datetime.fromtimestamp(auth['expires']).isoformat())
57 |
58 | if LIBRELINKUP_TOKEN is None:
59 | logging.info("Auth ticket not found or expired, requesting a new one")
60 | try:
61 | response = requests.post(f'{LIBRELINKUP_URL}/llu/auth/login',
62 | headers=LIBRELINKUP_HEADERS, json = {'email': LIBRELINKUP_USERNAME, 'password': LIBRELINKUP_PASSWORD})
63 | response.raise_for_status()
64 | except requests.exceptions.HTTPError as err:
65 | logging.error("HTTP request failed: %s", err)
66 | sys.exit(1)
67 |
68 | data = response.json()
69 | if not 'authTicket' in data['data']:
70 | logging.error("Authentication failed")
71 | sys.exit(1)
72 |
73 | with open(auth_token_path, 'w') as outfile:
74 | json.dump(data['data']['authTicket'], outfile)
75 |
76 | LIBRELINKUP_TOKEN = data['data']['authTicket']['token']
77 |
78 | LIBRELINKUP_HEADERS['Authorization'] = 'Bearer ' + LIBRELINKUP_TOKEN
79 |
80 | try:
81 | response = requests.get(f'{LIBRELINKUP_URL}/llu/connections', headers=LIBRELINKUP_HEADERS)
82 | response.raise_for_status()
83 | except requests.exceptions.HTTPError as err:
84 | logging.error("HTTP request failed: %s", err)
85 | sys.exit(1)
86 |
87 | connections = response.json()
88 | if not 'data' in connections or len(connections['data']) < 1:
89 | logging.error("No connections configured. Accept an invitation in the mobile app first.")
90 | sys.exit(1)
91 |
92 | logging.info("Using connection %s: %s %s", connections['data'][0]['patientId'], connections['data'][0]['firstName'], connections['data'][0]['lastName'])
93 |
94 | try:
95 | response = requests.get(f'{LIBRELINKUP_URL}/llu/connections/{connections["data"][0]["patientId"]}/graph', headers=LIBRELINKUP_HEADERS)
96 | response.raise_for_status()
97 | except requests.exceptions.HTTPError as err:
98 | logging.error("HTTP request failed: %s", err)
99 | sys.exit(1)
100 |
101 | data = response.json()
102 | append_reading(points, data, data['data']['connection']['glucoseMeasurement'])
103 |
104 | if len(data['data']['graphData']) > 0:
105 | for reading in data['data']['graphData']:
106 | append_reading(points, data, reading)
107 |
108 | write_points(points)
--------------------------------------------------------------------------------
/docker-example/requirements.txt:
--------------------------------------------------------------------------------
1 | influxdb==5.3.1
2 | requests==2.25.1
3 | colorlog==6.7.0
4 | pytz==2023.3
--------------------------------------------------------------------------------
/edsm.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, requests_cache, sys, math, logging
17 | from datetime import datetime, date
18 | from config import *
19 |
20 | if not EDSM_API_KEY:
21 | logging.error("EDSM_API_KEY not set in config.py")
22 | sys.exit(1)
23 |
24 | points = []
25 | last = None
26 |
27 | def add_rank(data, activity):
28 | global points
29 | points.append({
30 | "measurement": "rank",
31 | "time": date.today().isoformat() + "T00:00:00",
32 | "tags": {
33 | "commander": EDSM_COMMANDER_NAME,
34 | "activity": activity
35 | },
36 | "fields": {
37 | "value": data['ranks'][activity],
38 | "progress": data['progress'][activity],
39 | "name": data['ranksVerbose'][activity]
40 | }
41 | })
42 |
43 | def fetch_system(name):
44 | try:
45 | response = requests.get('https://www.edsm.net/api-v1/system',
46 | params={'systemName':name, 'showCoordinates':1, 'showPrimaryStar':1, 'apiKey':EDSM_API_KEY})
47 | response.raise_for_status()
48 | except requests.exceptions.HTTPError as err:
49 | logging.error("HTTP request failed: %s", err)
50 | sys.exit(1)
51 |
52 | return response.json()
53 |
54 | def distance(system1, system2):
55 | s1 = fetch_system(system1)
56 | s2 = fetch_system(system2)
57 |
58 | dx = float(s1['coords']['x']) - float(s2['coords']['x'])
59 | dy = float(s1['coords']['y']) - float(s2['coords']['y'])
60 | dz = float(s1['coords']['z']) - float(s2['coords']['z'])
61 |
62 | return math.sqrt(dx*dx + dy*dy + dz*dz)
63 |
64 | def add_jump(src, dst):
65 | global points
66 | system = fetch_system(dst['system'])
67 | if 'type' in system['primaryStar']:
68 | points.append({
69 | "measurement": "jump",
70 | "time": datetime.fromisoformat(dst['date']).isoformat(),
71 | "tags": {
72 | "commander": EDSM_COMMANDER_NAME,
73 | "system": dst['system'],
74 | "firstDiscover": dst['firstDiscover'],
75 | "primaryStarType": system['primaryStar']['type']
76 | },
77 | "fields": {
78 | "distance": distance(src['system'], dst['system']),
79 | "x": float(system['coords']['x']),
80 | "y": float(system['coords']['y']),
81 | "z": float(system['coords']['z'])
82 | }
83 | })
84 | else:
85 | points.append({
86 | "measurement": "jump",
87 | "time": datetime.fromisoformat(dst['date']).isoformat(),
88 | "tags": {
89 | "commander": EDSM_COMMANDER_NAME,
90 | "system": dst['system'],
91 | "firstDiscover": dst['firstDiscover']
92 | },
93 | "fields": {
94 | "distance": distance(src['system'], dst['system']),
95 | "x": float(system['coords']['x']),
96 | "y": float(system['coords']['y']),
97 | "z": float(system['coords']['z'])
98 | }
99 | })
100 |
101 | def fetch_jumps(time):
102 | global last
103 | try:
104 | response = requests.get('https://www.edsm.net/api-logs-v1/get-logs',
105 | params={'commanderName':EDSM_COMMANDER_NAME, 'apiKey':EDSM_API_KEY, 'endDateTime':time})
106 | response.raise_for_status()
107 | except requests.exceptions.HTTPError as err:
108 | print("HTTP request failed: %s", err)
109 | sys.exit(1)
110 |
111 | data = response.json()
112 | logging.info("Got %s jumps from EDSM", len(data['logs']))
113 |
114 | for jump in data['logs']:
115 | if last != None:
116 | add_jump(jump, last)
117 | last = jump
118 |
119 | return data
120 |
121 | connect(EDSM_DATABASE)
122 |
123 | try:
124 | response = requests.get('https://www.edsm.net/api-commander-v1/get-credits',
125 | params={'commanderName':EDSM_COMMANDER_NAME, 'apiKey':EDSM_API_KEY})
126 | response.raise_for_status()
127 | except requests.exceptions.HTTPError as err:
128 | logging.error("HTTP request failed: %s", err)
129 | sys.exit(1)
130 |
131 | data = response.json()
132 | if 'credits' not in data:
133 | logging.error("Unable to fetch data from EDSM: %s", data['msg'])
134 | sys.exit(1)
135 |
136 | logging.info("Got credits from EDSM")
137 |
138 | for credits in data['credits']:
139 | points.append({
140 | "measurement": "credits",
141 | "time": datetime.fromisoformat(credits['date']).isoformat(),
142 | "tags": {
143 | "commander": EDSM_COMMANDER_NAME
144 | },
145 | "fields": {
146 | "value": credits['balance']
147 | }
148 | })
149 |
150 | try:
151 | response = requests.get('https://www.edsm.net/api-commander-v1/get-ranks',
152 | params={'commanderName':EDSM_COMMANDER_NAME, 'apiKey':EDSM_API_KEY})
153 | response.raise_for_status()
154 | except requests.exceptions.HTTPError as err:
155 | logging.error("HTTP request failed: %s" % (err))
156 | sys.exit()
157 |
158 | data = response.json()
159 | logging.info("Got ranks from EDSM")
160 | add_rank(data, "Combat")
161 | add_rank(data, "Trade")
162 | add_rank(data, "Explore")
163 | add_rank(data, "CQC")
164 | add_rank(data, "Federation")
165 | add_rank(data, "Empire")
166 | add_rank(data, "Soldier")
167 | add_rank(data, "Exobiologist")
168 |
169 | requests_cache.install_cache('edsm')
170 | data = fetch_jumps(date.today().isoformat() + " 00:00:00")
171 | if len(data['logs']) > 0:
172 | data = fetch_jumps(data['startDateTime'])
173 | while len(data['logs']) == 0:
174 | data = fetch_jumps(data['startDateTime'])
175 |
176 | write_points(points)
177 |
--------------------------------------------------------------------------------
/exist.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys, logging
17 | from datetime import date, datetime, time, timedelta
18 | from publicsuffix2 import PublicSuffixList
19 | from config import *
20 |
21 | if not EXIST_ACCESS_TOKEN:
22 | logging.error("EXIST_ACCESS_TOKEN not set in config.py")
23 | sys.exit(1)
24 |
25 | points = []
26 | start_time = str(int(LOCAL_TIMEZONE.localize(datetime.combine(date.today(), time(0,0)) - timedelta(days=7)).astimezone(pytz.utc).timestamp()) * 1000) + 'ms'
27 |
28 | def append_tags(tags):
29 | try:
30 | response = requests.post('https://exist.io/api/1/attributes/custom/append/',
31 | headers={'Authorization':f'Bearer {EXIST_ACCESS_TOKEN}'},
32 | json=tags)
33 | response.raise_for_status()
34 | except requests.exceptions.HTTPError as err:
35 | logging.error("HTTP request failed: %s", err)
36 | sys.exit(1)
37 |
38 | result = response.json()
39 | if len(result['failed']) > 0:
40 | logging.error("Request failed: %s", result['failed'])
41 | sys.exit(1)
42 |
43 | if len(result['success']) > 0:
44 | logging.info("Successfully sent %s tags", len(result['success']))
45 |
46 | def acquire_attributes(attributes):
47 | try:
48 | response = requests.post('https://exist.io/api/1/attributes/acquire/',
49 | headers={'Authorization':f'Bearer {EXIST_ACCESS_TOKEN}'},
50 | json=attributes)
51 | response.raise_for_status()
52 | except requests.exceptions.HTTPError as err:
53 | logging.error("HTTP request failed: %s", err)
54 | sys.exit(1)
55 |
56 | result = response.json()
57 | if len(result['failed']) > 0:
58 | logging.error("Request failed: %s", result['failed'])
59 | sys.exit(1)
60 |
61 | def post_attributes(values):
62 | try:
63 | response = requests.post('https://exist.io/api/1/attributes/update/',
64 | headers={'Authorization':f'Bearer {EXIST_ACCESS_TOKEN}'},
65 | json=values)
66 | response.raise_for_status()
67 | except requests.exceptions.HTTPError as err:
68 | logging.error("HTTP request failed: %s", err)
69 | sys.exit(1)
70 |
71 | result = response.json()
72 | if len(result['failed']) > 0:
73 | logging.error("Request failed: %s", result['failed'])
74 | sys.exit(1)
75 |
76 | if len(result['success']) > 0:
77 | logging.info("Successfully sent %s attributes" % len(result['success']))
78 |
79 | client = connect(EXIST_DATABASE)
80 |
81 | acquire_attributes([{"name":"gaming_min", "active":True}, {"name":"tv_min", "active":True}])
82 |
83 | try:
84 | response = requests.get('https://exist.io/api/1/users/' + EXIST_USERNAME + '/insights/',
85 | headers={'Authorization':f'Bearer {EXIST_ACCESS_TOKEN}'})
86 | response.raise_for_status()
87 | except requests.exceptions.HTTPError as err:
88 | logging.error("HTTP request failed: %s", err)
89 | sys.exit(1)
90 |
91 | data = response.json()
92 | logging.info("Got %s insights from exist.io", len(data['results']))
93 |
94 | for insight in data['results']:
95 | if insight['target_date'] == None:
96 | date = datetime.fromisoformat(insight['created'].strip('Z')).strftime('%Y-%m-%d')
97 | else:
98 | date = insight['target_date']
99 | points.append({
100 | "measurement": "insight",
101 | "time": date + "T00:00:00",
102 | "tags": {
103 | "type": insight['type']['name'],
104 | "attribute": insight['type']['attribute']['label'],
105 | "group": insight['type']['attribute']['group']['label'],
106 | },
107 | "fields": {
108 | "html": insight['html'].replace("\n", "").replace("\r", ""),
109 | "text": insight['text']
110 | }
111 | })
112 |
113 | try:
114 | response = requests.get('https://exist.io/api/1/users/' + EXIST_USERNAME + '/attributes/?limit=7&groups=custom,mood',
115 | headers={'Authorization':f'Bearer {EXIST_ACCESS_TOKEN}'})
116 | response.raise_for_status()
117 | except requests.exceptions.HTTPError as err:
118 | logging.error("HTTP request failed: %s", err)
119 | sys.exit(1)
120 |
121 | data = response.json()
122 | logging.info("Got attributes from exist.io")
123 |
124 | for result in data:
125 | for value in result['values']:
126 | if value['value'] and result['attribute'] != 'custom':
127 | if result['group']['name'] == 'custom':
128 | points.append({
129 | "measurement": result['group']['name'],
130 | "time": value['date'] + "T00:00:00",
131 | "tags": {
132 | "tag": result['label']
133 | },
134 | "fields": {
135 | "value": value['value']
136 | }
137 | })
138 | else:
139 | points.append({
140 | "measurement": result['attribute'],
141 | "time": value['date'] + "T00:00:00",
142 | "fields": {
143 | "value": value['value']
144 | }
145 | })
146 |
147 | write_points(points)
148 |
149 | values = []
150 | tags = []
151 | if FITBIT_DATABASE and EXIST_USE_FITBIT:
152 | client.switch_database(FITBIT_DATABASE)
153 | durations = client.query(f'SELECT "duration" FROM "activity" WHERE (activityName = \'Meditating\' OR activityName = \'Meditation\')AND time >= {start_time}')
154 | for duration in list(durations.get_points()):
155 | if duration['duration'] > 0:
156 | date = datetime.fromisoformat(duration['time'].strip('Z') + "+00:00").astimezone(LOCAL_TIMEZONE).strftime('%Y-%m-%d')
157 | tags.append({'date': date, 'value': 'meditation'})
158 |
159 | durations = client.query(f'SELECT "duration","activityName" FROM "activity" WHERE activityName != \'Meditating\' AND activityName != \'Meditation\' AND time >= {start_time}')
160 | for duration in list(durations.get_points()):
161 | if duration['duration'] > 0:
162 | date = datetime.fromisoformat(duration['time'].strip('Z') + "+00:00").astimezone(LOCAL_TIMEZONE).strftime('%Y-%m-%d')
163 | tags.append({'date': date, 'value': 'exercise'})
164 | tags.append({'date': date, 'value': duration['activityName'].lower().replace(" ", "_")})
165 |
166 | if TRAKT_DATABASE and EXIST_USE_TRAKT:
167 | totals = {}
168 | client.switch_database(TRAKT_DATABASE)
169 | durations = client.query(f'SELECT "duration" FROM "watch" WHERE time >= {start_time}')
170 | for duration in list(durations.get_points()):
171 | date = datetime.fromisoformat(duration['time'].strip('Z') + "+00:00").astimezone(LOCAL_TIMEZONE).strftime('%Y-%m-%d')
172 | if date in totals:
173 | totals[date] = totals[date] + duration['duration']
174 | else:
175 | totals[date] = duration['duration']
176 |
177 | for date in totals:
178 | values.append({'date': date, 'name': 'tv_min', 'value': int(totals[date])})
179 | tags.append({'date': date, 'value': 'tv'})
180 |
181 | if GAMING_DATABASE and EXIST_USE_GAMING:
182 | totals = {}
183 | client.switch_database(GAMING_DATABASE)
184 | durations = client.query(f'SELECT "value" FROM "time" WHERE "value" > 0 AND time >= {start_time}')
185 | for duration in list(durations.get_points()):
186 | date = datetime.fromisoformat(duration['time'].strip('Z') + "+00:00").astimezone(LOCAL_TIMEZONE).strftime('%Y-%m-%d')
187 | if date in totals:
188 | totals[date] = totals[date] + duration['value']
189 | else:
190 | totals[date] = duration['value']
191 |
192 | for date in totals:
193 | values.append({'date': date, 'name': 'gaming_min', 'value': int(totals[date] / 60)})
194 | tags.append({'date': date, 'value': 'gaming'})
195 | elif RESCUETIME_DATABASE and EXIST_USE_RESCUETIME:
196 | psl = PublicSuffixList()
197 | totals = {}
198 | client.switch_database(RESCUETIME_DATABASE)
199 | durations = client.query(f'SELECT "duration","activity" FROM "activity" WHERE category = \'Games\' AND activity != \'Steam\' AND activity != \'steamwebhelper\' AND activity != \'origin\' AND activity != \'mixedrealityportal\' AND activity != \'holoshellapp\' AND activity != \'vrmonitor\' AND activity != \'vrserver\' AND activity != \'oculusclient\' AND activity != \'vive\' AND activity != \'obs64\' AND time >= {start_time}')
200 | for duration in list(durations.get_points()):
201 | date = datetime.fromisoformat(duration['time'].strip('Z') + "+00:00").astimezone(LOCAL_TIMEZONE).strftime('%Y-%m-%d')
202 | if psl.get_public_suffix(duration['activity'], strict=True) is None:
203 | if date in totals:
204 | totals[date] = totals[date] + duration['duration']
205 | else:
206 | totals[date] = duration['duration']
207 |
208 | for date in totals:
209 | values.append({'date': date, 'name': 'gaming_min', 'duration': int(totals[date] / 60)})
210 | tags.append({'date': date, 'value': 'gaming'})
211 |
212 | if len(tags) > 0:
213 | append_tags(tags)
214 |
215 | if len(values) > 0:
216 | post_attributes(values)
217 |
--------------------------------------------------------------------------------
/fitbit.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys, os, pytz
17 | from datetime import datetime, date, timedelta
18 | from config import *
19 |
20 | if not FITBIT_CLIENT_ID or not FITBIT_CLIENT_SECRET:
21 | logging.error("FITBIT_CLIENT_ID or FITBIT_CLIENT_SECRET not set in config.py")
22 | sys.exit(1)
23 | points = []
24 |
25 | def fetch_data(category, type):
26 | try:
27 | response = requests.get(f'https://api.fitbit.com/1/user/-/{category}/{type}/date/today/1d.json',
28 | headers={'Authorization': f'Bearer {FITBIT_ACCESS_TOKEN}', 'Accept-Language': FITBIT_LANGUAGE})
29 | response.raise_for_status()
30 | except requests.exceptions.HTTPError as err:
31 | logging.error("HTTP request failed: %s", err)
32 | sys.exit(1)
33 |
34 | data = response.json()
35 | logging.info(f"Got {type} from Fitbit")
36 |
37 | for day in data[category.replace('/', '-') + '-' + type]:
38 | points.append({
39 | "measurement": type,
40 | "time": LOCAL_TIMEZONE.localize(datetime.fromisoformat(day['dateTime'])).astimezone(pytz.utc).isoformat(),
41 | "fields": {
42 | "value": float(day['value'])
43 | }
44 | })
45 |
46 | def fetch_heartrate(date):
47 | try:
48 | response = requests.get(f'https://api.fitbit.com/1/user/-/activities/heart/date/{date}/1d/1min.json',
49 | headers={'Authorization': f'Bearer {FITBIT_ACCESS_TOKEN}', 'Accept-Language': FITBIT_LANGUAGE})
50 | response.raise_for_status()
51 | except requests.exceptions.HTTPError as err:
52 | logging.error("HTTP request failed: %s", err)
53 | sys.exit(1)
54 |
55 | data = response.json()
56 | logging.info("Got heartrates from Fitbit")
57 |
58 | for day in data['activities-heart']:
59 | if 'restingHeartRate' in day['value']:
60 | points.append({
61 | "measurement": "restingHeartRate",
62 | "time": datetime.fromisoformat(day['dateTime']),
63 | "fields": {
64 | "value": float(day['value']['restingHeartRate'])
65 | }
66 | })
67 |
68 | if 'heartRateZones' in day['value']:
69 | for zone in day['value']['heartRateZones']:
70 | if 'caloriesOut' in zone and 'min' in zone and 'max' in zone and 'minutes' in zone:
71 | points.append({
72 | "measurement": "heartRateZones",
73 | "time": datetime.fromisoformat(day['dateTime']),
74 | "tags": {
75 | "zone": zone['name']
76 | },
77 | "fields": {
78 | "caloriesOut": float(zone['caloriesOut']),
79 | "min": float(zone['min']),
80 | "max": float(zone['max']),
81 | "minutes": float(zone['minutes'])
82 | }
83 | })
84 | elif 'min' in zone and 'max' in zone and 'minutes' in zone:
85 | points.append({
86 | "measurement": "heartRateZones",
87 | "time": datetime.fromisoformat(day['dateTime']),
88 | "tags": {
89 | "zone": zone['name']
90 | },
91 | "fields": {
92 | "min": float(zone['min']),
93 | "max": float(zone['max']),
94 | "minutes": float(zone['minutes'])
95 | }
96 | })
97 |
98 | if 'activities-heart-intraday' in data:
99 | for value in data['activities-heart-intraday']['dataset']:
100 | time = datetime.fromisoformat(date + "T" + value['time'])
101 | utc_time = LOCAL_TIMEZONE.localize(time).astimezone(pytz.utc).isoformat()
102 | points.append({
103 | "measurement": "heartrate",
104 | "time": utc_time,
105 | "fields": {
106 | "value": float(value['value'])
107 | }
108 | })
109 |
110 | def process_levels(levels):
111 | for level in levels:
112 | type = level['level']
113 | if type == "asleep":
114 | type = "light"
115 | if type == "restless":
116 | type = "rem"
117 | if type == "awake":
118 | type = "wake"
119 |
120 | time = datetime.fromisoformat(level['dateTime'])
121 | utc_time = LOCAL_TIMEZONE.localize(time).astimezone(pytz.utc).isoformat()
122 | points.append({
123 | "measurement": "sleep_levels",
124 | "time": utc_time,
125 | "fields": {
126 | "seconds": int(level['seconds'])
127 | }
128 | })
129 |
130 | def fetch_activities(date):
131 | try:
132 | response = requests.get('https://api.fitbit.com/1/user/-/activities/list.json',
133 | headers={'Authorization': f'Bearer {FITBIT_ACCESS_TOKEN}', 'Accept-Language': FITBIT_LANGUAGE},
134 | params={'beforeDate': date, 'sort':'desc', 'limit':10, 'offset':0})
135 | response.raise_for_status()
136 | except requests.exceptions.HTTPError as err:
137 | logging.error("HTTP request failed: %s", err)
138 | sys.exit(1)
139 |
140 | data = response.json()
141 | logging.info("Got activities from Fitbit")
142 |
143 | for activity in data['activities']:
144 | fields = {}
145 |
146 | if 'activeDuration' in activity:
147 | fields['activeDuration'] = int(activity['activeDuration'])
148 | if 'averageHeartRate' in activity:
149 | fields['averageHeartRate'] = int(activity['averageHeartRate'])
150 | if 'calories' in activity:
151 | fields['calories'] = int(activity['calories'])
152 | if 'duration' in activity:
153 | fields['duration'] = int(activity['duration'])
154 | if 'distance' in activity:
155 | fields['distance'] = float(activity['distance'])
156 | fields['distanceUnit'] = activity['distanceUnit']
157 | if 'pace' in activity:
158 | fields['pace'] = float(activity['pace'])
159 | if 'speed' in activity:
160 | fields['speed'] = float(activity['speed'])
161 | if 'elevationGain' in activity:
162 | fields['elevationGain'] = int(activity['elevationGain'])
163 | if 'steps' in activity:
164 | fields['steps'] = int(activity['steps'])
165 |
166 | for level in activity['activityLevel']:
167 | if level['name'] == 'sedentary':
168 | fields[level['name'] + "Minutes"] = int(level['minutes'])
169 | else:
170 | fields[level['name'] + "ActiveMinutes"] = int(level['minutes'])
171 |
172 |
173 | time = datetime.fromisoformat(activity['startTime'].strip("Z"))
174 | utc_time = time.astimezone(pytz.utc).isoformat()
175 | points.append({
176 | "measurement": "activity",
177 | "time": utc_time,
178 | "tags": {
179 | "activityName": activity['activityName']
180 | },
181 | "fields": fields
182 | })
183 |
184 | connect(FITBIT_DATABASE)
185 |
186 | if not FITBIT_ACCESS_TOKEN:
187 | script_dir = os.path.dirname(__file__)
188 | refresh_token_path = os.path.join(script_dir, '.fitbit-refreshtoken')
189 | if os.path.isfile(refresh_token_path):
190 | f = open(refresh_token_path, "r")
191 | token = f.read().strip()
192 | f.close()
193 | response = requests.post('https://api.fitbit.com/oauth2/token',
194 | data={
195 | "client_id": FITBIT_CLIENT_ID,
196 | "grant_type": "refresh_token",
197 | "redirect_uri": FITBIT_REDIRECT_URI,
198 | "refresh_token": token
199 | }, auth=(FITBIT_CLIENT_ID, FITBIT_CLIENT_SECRET))
200 | else:
201 | response = requests.post('https://api.fitbit.com/oauth2/token',
202 | data={
203 | "client_id": FITBIT_CLIENT_ID,
204 | "grant_type": "authorization_code",
205 | "redirect_uri": FITBIT_REDIRECT_URI,
206 | "code": FITBIT_INITIAL_CODE
207 | }, auth=(FITBIT_CLIENT_ID, FITBIT_CLIENT_SECRET))
208 |
209 | response.raise_for_status()
210 |
211 | json = response.json()
212 | FITBIT_ACCESS_TOKEN = json['access_token']
213 | refresh_token = json['refresh_token']
214 | f = open(refresh_token_path, "w+")
215 | f.write(refresh_token)
216 | f.close()
217 |
218 | try:
219 | response = requests.get('https://api.fitbit.com/1/user/-/devices.json',
220 | headers={'Authorization': f'Bearer {FITBIT_ACCESS_TOKEN}', 'Accept-Language': FITBIT_LANGUAGE})
221 | response.raise_for_status()
222 | except requests.exceptions.HTTPError as err:
223 | logging.error("HTTP request failed: %s", err)
224 | sys.exit(1)
225 |
226 | data = response.json()
227 | logging.info("Got devices from Fitbit")
228 |
229 | for device in data:
230 | points.append({
231 | "measurement": "deviceBatteryLevel",
232 | "time": LOCAL_TIMEZONE.localize(datetime.fromisoformat(device['lastSyncTime'])).astimezone(pytz.utc).isoformat(),
233 | "tags": {
234 | "id": device['id'],
235 | "deviceVersion": device['deviceVersion'],
236 | "type": device['type']
237 | },
238 | "fields": {
239 | "value": float(device['batteryLevel'])
240 | }
241 | })
242 |
243 | end = date.today()
244 | start = end - timedelta(days=1)
245 |
246 | try:
247 | response = requests.get(f'https://api.fitbit.com/1.2/user/-/sleep/date/{start.isoformat()}/{end.isoformat()}.json',
248 | headers={'Authorization': f'Bearer {FITBIT_ACCESS_TOKEN}', 'Accept-Language': FITBIT_LANGUAGE})
249 | response.raise_for_status()
250 | except requests.exceptions.HTTPError as err:
251 | logging.error("HTTP request failed: %s", err)
252 | sys.exit(1)
253 |
254 | data = response.json()
255 | logging.info("Got sleep sessions from Fitbit")
256 |
257 | for day in data['sleep']:
258 | time = datetime.fromisoformat(day['startTime'])
259 | utc_time = LOCAL_TIMEZONE.localize(time).astimezone(pytz.utc).isoformat()
260 | if day['type'] == 'stages':
261 | points.append({
262 | "measurement": "sleep",
263 | "time": utc_time,
264 | "fields": {
265 | "duration": int(day['duration']),
266 | "efficiency": int(day['efficiency']),
267 | "is_main_sleep": bool(day['isMainSleep']),
268 | "minutes_asleep": int(day['minutesAsleep']),
269 | "minutes_awake": int(day['minutesAwake']),
270 | "time_in_bed": int(day['timeInBed']),
271 | "minutes_deep": int(day['levels']['summary']['deep']['minutes']),
272 | "minutes_light": int(day['levels']['summary']['light']['minutes']),
273 | "minutes_rem": int(day['levels']['summary']['rem']['minutes']),
274 | "minutes_wake": int(day['levels']['summary']['wake']['minutes']),
275 | }
276 | })
277 | else:
278 | points.append({
279 | "measurement": "sleep",
280 | "time": utc_time,
281 | "fields": {
282 | "duration": int(day['duration']),
283 | "efficiency": int(day['efficiency']),
284 | "is_main_sleep": bool(day['isMainSleep']),
285 | "minutes_asleep": int(day['minutesAsleep']),
286 | "minutes_awake": int(day['minutesAwake']),
287 | "time_in_bed": int(day['timeInBed']),
288 | "minutes_deep": 0,
289 | "minutes_light": int(day['levels']['summary']['asleep']['minutes']),
290 | "minutes_rem": int(day['levels']['summary']['restless']['minutes']),
291 | "minutes_wake": int(day['levels']['summary']['awake']['minutes']),
292 | }
293 | })
294 |
295 | if 'data' in day['levels']:
296 | process_levels(day['levels']['data'])
297 |
298 | if 'shortData' in day['levels']:
299 | process_levels(day['levels']['shortData'])
300 |
301 | fetch_data('activities', 'steps')
302 | fetch_data('activities', 'distance')
303 | fetch_data('activities', 'floors')
304 | fetch_data('activities', 'elevation')
305 | fetch_data('activities', 'distance')
306 | fetch_data('activities', 'minutesSedentary')
307 | fetch_data('activities', 'minutesLightlyActive')
308 | fetch_data('activities', 'minutesFairlyActive')
309 | fetch_data('activities', 'minutesVeryActive')
310 | fetch_data('activities', 'calories')
311 | fetch_data('activities', 'activityCalories')
312 | fetch_data('body', 'weight')
313 | fetch_data('body', 'fat')
314 | fetch_data('body', 'bmi')
315 | fetch_data('foods/log', 'water')
316 | fetch_data('foods/log', 'caloriesIn')
317 | fetch_heartrate(date.today().isoformat())
318 | fetch_activities((date.today() + timedelta(days=1)).isoformat())
319 |
320 | write_points(points)
321 |
--------------------------------------------------------------------------------
/foursquare.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys
17 | from datetime import datetime, date, timedelta
18 | from config import *
19 |
20 | if not FOURSQUARE_ACCESS_TOKEN:
21 | logging.error("FOURSQUARE_ACCESS_TOKEN not set in config.py")
22 | sys.exit(1)
23 |
24 | points = []
25 |
26 | us_states = {
27 | 'Alabama': 'AL',
28 | 'Alaska': 'AK',
29 | 'Arizona': 'AZ',
30 | 'Arkansas': 'AR',
31 | 'California': 'CA',
32 | 'Colorado': 'CO',
33 | 'Connecticut': 'CT',
34 | 'Delaware': 'DE',
35 | 'District of Columbia': 'DC',
36 | 'Florida': 'FL',
37 | 'Georgia': 'GA',
38 | 'Hawaii': 'HI',
39 | 'Idaho': 'ID',
40 | 'Illinois': 'IL',
41 | 'Indiana': 'IN',
42 | 'Iowa': 'IA',
43 | 'Kansas': 'KS',
44 | 'Kentucky': 'KY',
45 | 'Louisiana': 'LA',
46 | 'Maine': 'ME',
47 | 'Maryland': 'MD',
48 | 'Massachusetts': 'MA',
49 | 'Michigan': 'MI',
50 | 'Minnesota': 'MN',
51 | 'Mississippi': 'MS',
52 | 'Missouri': 'MO',
53 | 'Montana': 'MT',
54 | 'Nebraska': 'NE',
55 | 'Nevada': 'NV',
56 | 'New Hampshire': 'NH',
57 | 'New Jersey': 'NJ',
58 | 'New Mexico': 'NM',
59 | 'New York': 'NY',
60 | 'North Carolina': 'NC',
61 | 'North Dakota': 'ND',
62 | 'Northern Mariana Islands':'MP',
63 | 'Ohio': 'OH',
64 | 'Oklahoma': 'OK',
65 | 'Oregon': 'OR',
66 | 'Palau': 'PW',
67 | 'Pennsylvania': 'PA',
68 | 'Puerto Rico': 'PR',
69 | 'Rhode Island': 'RI',
70 | 'South Carolina': 'SC',
71 | 'South Dakota': 'SD',
72 | 'Tennessee': 'TN',
73 | 'Texas': 'TX',
74 | 'Utah': 'UT',
75 | 'Vermont': 'VT',
76 | 'Virgin Islands': 'VI',
77 | 'Virginia': 'VA',
78 | 'Washington': 'WA',
79 | 'West Virginia': 'WV',
80 | 'Wisconsin': 'WI',
81 | 'Wyoming': 'WY',
82 | }
83 |
84 | def fetch_checkins(offset):
85 | try:
86 | response = requests.get('https://api.foursquare.com/v2/users/self/checkins',
87 | params={'sort': 'newestfirst', 'offset': offset, 'oauth_token':FOURSQUARE_ACCESS_TOKEN, 'v':'20191201', 'limit':250})
88 | response.raise_for_status()
89 | except requests.exceptions.HTTPError as err:
90 | logging.error("HTTP request failed: %s", err)
91 | sys.exit(1)
92 |
93 | data = response.json()
94 | logging.info("Got %s checkins from Foursquare", len(data['response']['checkins']['items']))
95 |
96 | for item in data['response']['checkins']['items']:
97 | cat = ''
98 | if 'venue' in item:
99 | for category in item['venue']['categories']:
100 | if category['primary']:
101 | cat = category['name']
102 | tags = {
103 | "category": cat,
104 | "venue_id": item['venue']['id'],
105 | "venue_name": item['venue']['name'],
106 | "mayor": item['isMayor']
107 | }
108 | if 'country' in item['venue']['location']:
109 | tags['country'] = item['venue']['location']['country']
110 | if 'city' in item['venue']['location']:
111 | tags['city'] = item['venue']['location']['city']
112 | if 'state' in item['venue']['location']:
113 | if item['venue']['location']['state'] in us_states:
114 | tags['state'] = us_states[item['venue']['location']['state']]
115 | else:
116 | tags['state'] = item['venue']['location']['state']
117 | points.append({
118 | "measurement": "checkin",
119 | "time": datetime.fromtimestamp(item['createdAt']).isoformat(),
120 | "tags": tags,
121 | "fields": {
122 | "latitude": float(item['venue']['location']['lat']),
123 | "longitude": float(item['venue']['location']['lng'])
124 | }
125 | })
126 |
127 | return len(data['response']['checkins']['items'])
128 |
129 | connect(FOURSQUARE_DATABASE)
130 | fetch_checkins(0)
131 | write_points(points)
132 |
--------------------------------------------------------------------------------
/fshub.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys
17 | from config import *
18 |
19 | if not FSHUB_API_KEY:
20 | logging.error("FSHUB_API_KEY not set in config.py")
21 | sys.exit(1)
22 |
23 | points = []
24 |
25 | def fetch(limit, cursor):
26 | try:
27 | response = requests.get(f'https://fshub.io/api/v3/pilot/{FSHUB_PILOT_ID}/flight',
28 | params={'limit': limit, 'cursor': cursor},
29 | headers={'X-Pilot-Token': FSHUB_API_KEY, 'Content-Type': 'application/json'})
30 | response.raise_for_status()
31 | except requests.exceptions.HTTPError as err:
32 | logging.error("HTTP request failed: %s", err)
33 | sys.exit(1)
34 |
35 | data = response.json()
36 | logging.info("Got flights %s from FsHub", len(data['data']))
37 |
38 | for flight in data['data']:
39 | if flight['departure'] != None and flight['departure']['icao'] != None and flight['arrival'] != None and flight['arrival']['icao'] != None:
40 | points.append({
41 | "measurement": "flight",
42 | "time": flight['departure']['time'],
43 | "tags": {
44 | "flight_id": flight['id'],
45 | "pilot_id": flight['user']['id']
46 | },
47 | "fields": {
48 | "aircraft": flight['aircraft']['name'],
49 | "fuel_used": flight['fuel_used'],
50 | "landing_rate": flight['landing_rate'],
51 | "distance_nm": flight['distance']['nm'],
52 | "distance_km": flight['distance']['km'],
53 | "max_alt": flight['max']['alt'],
54 | "max_spd": flight['max']['spd'],
55 | "duration": flight['time'],
56 | "departure_icao": flight['departure']['icao'],
57 | "departure_iata": flight['departure']['iata'],
58 | "departure_name": flight['departure']['name'],
59 | "departure_time": flight['departure']['time'],
60 | "departure_lat": flight['departure']['geo']['lat'],
61 | "departure_long": flight['departure']['geo']['lng'],
62 | "departure_hdg_mag": flight['departure']['hdg']['mag'],
63 | "departure_hdg_true": flight['departure']['hdg']['true'],
64 | "departure_spd": flight['departure']['spd']['tas'],
65 | "depature_fuel": flight['departure']['fuel'],
66 | "depature_pitch": flight['departure']['pitch'],
67 | "depature_bank": flight['departure']['bank'],
68 | "depature_wind_spd": flight['departure']['wind']['spd'],
69 | "depature_wind_dir": flight['departure']['wind']['dir'],
70 | "departure_url": f"https://fshub.io/airport/{flight['departure']['icao'].upper()}",
71 | "arrival_icao": flight['arrival']['icao'],
72 | "arrival_iata": flight['arrival']['iata'],
73 | "arrival_name": flight['arrival']['name'],
74 | "arrival_time": flight['arrival']['time'],
75 | "arrival_lat": flight['arrival']['geo']['lat'],
76 | "arrival_long": flight['arrival']['geo']['lng'],
77 | "arrival_hdg_mag": flight['arrival']['hdg']['mag'],
78 | "arrival_hdg_true": flight['arrival']['hdg']['true'],
79 | "arrival_spd": flight['arrival']['spd']['tas'],
80 | "arrival_fuel": flight['arrival']['fuel'],
81 | "arrival_pitch": flight['arrival']['pitch'],
82 | "arrival_bank": flight['arrival']['bank'],
83 | "arrival_wind_spd": flight['arrival']['wind']['spd'],
84 | "arrival_wind_dir": flight['arrival']['wind']['dir'],
85 | "arrival_url": f"https://fshub.io/airport/{flight['arrival']['icao'].upper()}",
86 | "flight_url": f"https://fshub.io/flight/{str(flight['id'])}",
87 | "pilot_url": f"https://fshub.io/pilot/{str(flight['user']['id'])}"
88 | }
89 | })
90 | points.append({
91 | "measurement": "airport",
92 | "time": flight['departure']['time'],
93 | "tags": {
94 | "flight_id": flight['id'],
95 | "pilot_id": flight['user']['id'],
96 | "icao": flight['departure']['icao'],
97 | "iata": flight['departure']['iata']
98 | },
99 | "fields": {
100 | "name": flight['departure']['name'],
101 | "lat": flight['departure']['geo']['lat'],
102 | "long": flight['departure']['geo']['lng'],
103 | "url": f"https://fshub.io/airport/{flight['departure']['icao'].upper()}"
104 | }
105 | })
106 | points.append({
107 | "measurement": "airport",
108 | "time": flight['arrival']['time'],
109 | "tags": {
110 | "flight_id": flight['id'],
111 | "pilot_id": flight['user']['id'],
112 | "icao": flight['arrival']['icao'],
113 | "iata": flight['arrival']['iata']
114 | },
115 | "fields": {
116 | "name": flight['arrival']['name'],
117 | "lat": flight['arrival']['geo']['lat'],
118 | "long": flight['arrival']['geo']['lng'],
119 | "url": f"https://fshub.io/airport/{flight['arrival']['icao'].upper()}"
120 | }
121 | })
122 | if data['meta']['cursor']['count'] == limit:
123 | return data['meta']['cursor']['next']
124 | else:
125 | return -1
126 |
127 | connect(FSHUB_DATABASE)
128 |
129 | cursor = 0
130 |
131 | while cursor != -1:
132 | cursor = fetch(100,cursor)
133 | write_points(points)
134 | points = []
135 |
--------------------------------------------------------------------------------
/github.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys
17 | from datetime import datetime
18 | from config import *
19 |
20 | if not GITHUB_API_KEY:
21 | logging.error("GITHUB_API_KEY not set in config.py")
22 | sys.exit(1)
23 |
24 | def add_week(week):
25 | if week['c'] > 0:
26 | points.append({
27 | "measurement": "commits",
28 | "time": datetime.fromtimestamp(week['w']).isoformat(),
29 | "tags": {
30 | "username": GITHUB_USERNAME,
31 | "repo": repo['full_name']
32 | },
33 | "fields": {
34 | "value": week['c']
35 | }
36 | })
37 |
38 | connect(GITHUB_DATABASE)
39 |
40 | try:
41 | response = requests.get('https://api.github.com/user/repos',
42 | params={'sort': 'pushed', 'per_page':10},
43 | headers={'Authorization': f'token {GITHUB_API_KEY}', 'User-Agent': GITHUB_USERNAME})
44 | response.raise_for_status()
45 | except requests.exceptions.HTTPError as err:
46 | logging.error("HTTP request failed: %s", err)
47 | sys.exit(1)
48 |
49 | repos = response.json()
50 | if len(repos) == 0:
51 | logging.error("No GitHub repos found")
52 | sys.exit(1)
53 | logging.info("Got %s repos from GitHub", len(repos))
54 |
55 | points = []
56 |
57 | for repo in repos:
58 | logging.info("Fetch statistics for %s", repo['full_name'])
59 | try:
60 | response = requests.get(repo['url'] + '/stats/contributors',
61 | params={'sort': 'pushed'},
62 | headers={'Authorization': f'token {GITHUB_API_KEY}', 'User-Agent': GITHUB_USERNAME})
63 | response.raise_for_status()
64 | except requests.exceptions.HTTPError as err:
65 | print("HTTP request failed: %s" % (err))
66 | sys.exit()
67 |
68 | contributors = response.json()
69 | for contributor in contributors:
70 | if contributor['author']['login'] == GITHUB_USERNAME:
71 | # adding all the old data each time causes a lot of stress on InfluxDB
72 | # for week in contributor['weeks']:
73 | # add_week(week)
74 | if len(contributor['weeks']) > 0:
75 | add_week(contributor['weeks'][len(contributor['weeks']) - 1])
76 | if len(contributor['weeks']) > 1:
77 | add_week(contributor['weeks'][len(contributor['weeks']) - 2])
78 |
79 | write_points(points)
80 |
--------------------------------------------------------------------------------
/google-play.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import sys, os
17 | from bs4 import BeautifulSoup
18 | from config import *
19 |
20 | points = []
21 |
22 | def parse_activity(game):
23 | file = open(f'Takeout/Google Play Games Services/Games/{game}/Activity.html')
24 | html = BeautifulSoup(file, 'html.parser')
25 | file.close()
26 |
27 | for row in html.find_all('tr'):
28 | if row.contents[0].name == 'td' and row.contents[0].string == 'Time Last Played':
29 | points.append({
30 | "measurement": "time",
31 | "time": row.contents[1].string,
32 | "tags": {
33 | "platform": "Google Play",
34 | "title": game,
35 | },
36 | "fields": {
37 | "value": 0
38 | }
39 | })
40 |
41 | def parse_achievements(game):
42 | file = open(f'Takeout/Google Play Games Services/Games/{game}/Experience.html')
43 | html = BeautifulSoup(file, 'html.parser')
44 | file.close()
45 |
46 | for row in html.find_all('tr'):
47 | if row.contents[0].name == 'td' and row.contents[0].string == 'Achievement unlocked':
48 | date = row.contents[2].string
49 | game = row.contents[6].string
50 | achievement = row.contents[1].string.title()
51 | apiname = row.contents[1].string
52 |
53 | points.append({
54 | "measurement": "achievement",
55 | "time": date,
56 | "tags": {
57 | "platform": "Google Play",
58 | "title": game,
59 | "apiname": apiname
60 | },
61 | "fields": {
62 | "name": achievement
63 | }
64 | })
65 |
66 | if not os.path.isdir('Takeout/Google Play Games Services/Games'):
67 | logging.error("Google Takeout files not found. Please extract the archive into Takeout/")
68 | sys.exit(1)
69 |
70 | connect(GAMING_DATABASE)
71 |
72 | for game in os.listdir('Takeout/Google Play Games Services/Games/'):
73 | if os.path.isfile(f'Takeout/Google Play Games Services/Games/{game}/Activity.html'):
74 | parse_activity(game)
75 | if os.path.isfile(f'Takeout/Google Play Games Services/Games/{game}/Experience.html'):
76 | parse_achievements(game)
77 |
78 | write_points(points)
79 |
--------------------------------------------------------------------------------
/grafana/exist.json:
--------------------------------------------------------------------------------
1 | {
2 | "__inputs": [
3 | {
4 | "name": "DS_EXIST",
5 | "label": "exist",
6 | "description": "",
7 | "type": "datasource",
8 | "pluginId": "influxdb",
9 | "pluginName": "InfluxDB"
10 | }
11 | ],
12 | "__requires": [
13 | {
14 | "type": "grafana",
15 | "id": "grafana",
16 | "name": "Grafana",
17 | "version": "6.5.1"
18 | },
19 | {
20 | "type": "panel",
21 | "id": "graph",
22 | "name": "Graph",
23 | "version": ""
24 | },
25 | {
26 | "type": "datasource",
27 | "id": "influxdb",
28 | "name": "InfluxDB",
29 | "version": "1.0.0"
30 | },
31 | {
32 | "type": "panel",
33 | "id": "table",
34 | "name": "Table",
35 | "version": ""
36 | }
37 | ],
38 | "annotations": {
39 | "list": [
40 | {
41 | "builtIn": 1,
42 | "datasource": "-- Grafana --",
43 | "enable": true,
44 | "hide": true,
45 | "iconColor": "rgba(0, 211, 255, 1)",
46 | "name": "Annotations & Alerts",
47 | "type": "dashboard"
48 | }
49 | ]
50 | },
51 | "editable": true,
52 | "gnetId": null,
53 | "graphTooltip": 0,
54 | "id": null,
55 | "iteration": 1578395319706,
56 | "links": [],
57 | "panels": [
58 | {
59 | "aliasColors": {},
60 | "bars": true,
61 | "dashLength": 10,
62 | "dashes": false,
63 | "datasource": "${DS_EXIST}",
64 | "decimals": 0,
65 | "fill": 1,
66 | "fillGradient": 0,
67 | "gridPos": {
68 | "h": 8,
69 | "w": 24,
70 | "x": 0,
71 | "y": 0
72 | },
73 | "hiddenSeries": false,
74 | "id": 4,
75 | "legend": {
76 | "alignAsTable": true,
77 | "avg": false,
78 | "current": false,
79 | "hideEmpty": true,
80 | "hideZero": true,
81 | "max": false,
82 | "min": false,
83 | "rightSide": true,
84 | "show": true,
85 | "sort": "total",
86 | "sortDesc": true,
87 | "total": true,
88 | "values": true
89 | },
90 | "lines": false,
91 | "linewidth": 1,
92 | "nullPointMode": "null",
93 | "options": {
94 | "dataLinks": []
95 | },
96 | "percentage": false,
97 | "pointradius": 2,
98 | "points": false,
99 | "renderer": "flot",
100 | "seriesOverrides": [],
101 | "spaceLength": 10,
102 | "stack": true,
103 | "steppedLine": false,
104 | "targets": [
105 | {
106 | "alias": "$tag_tag",
107 | "groupBy": [
108 | {
109 | "params": [
110 | "1d"
111 | ],
112 | "type": "time"
113 | },
114 | {
115 | "params": [
116 | "tag"
117 | ],
118 | "type": "tag"
119 | }
120 | ],
121 | "measurement": "custom",
122 | "orderByTime": "ASC",
123 | "policy": "default",
124 | "refId": "A",
125 | "resultFormat": "time_series",
126 | "select": [
127 | [
128 | {
129 | "params": [
130 | "value"
131 | ],
132 | "type": "field"
133 | },
134 | {
135 | "params": [],
136 | "type": "count"
137 | }
138 | ]
139 | ],
140 | "tags": [
141 | {
142 | "key": "value",
143 | "operator": ">",
144 | "value": "0"
145 | }
146 | ]
147 | }
148 | ],
149 | "thresholds": [],
150 | "timeFrom": null,
151 | "timeRegions": [],
152 | "timeShift": null,
153 | "title": "Daily Tags",
154 | "tooltip": {
155 | "shared": true,
156 | "sort": 0,
157 | "value_type": "individual"
158 | },
159 | "type": "graph",
160 | "xaxis": {
161 | "buckets": null,
162 | "mode": "time",
163 | "name": null,
164 | "show": true,
165 | "values": []
166 | },
167 | "yaxes": [
168 | {
169 | "format": "short",
170 | "label": null,
171 | "logBase": 1,
172 | "max": null,
173 | "min": null,
174 | "show": false
175 | },
176 | {
177 | "format": "short",
178 | "label": null,
179 | "logBase": 1,
180 | "max": null,
181 | "min": null,
182 | "show": false
183 | }
184 | ],
185 | "yaxis": {
186 | "align": false,
187 | "alignLevel": null
188 | }
189 | },
190 | {
191 | "columns": [],
192 | "datasource": "${DS_EXIST}",
193 | "fontSize": "100%",
194 | "gridPos": {
195 | "h": 8,
196 | "w": 11,
197 | "x": 0,
198 | "y": 8
199 | },
200 | "id": 2,
201 | "options": {},
202 | "pageSize": null,
203 | "showHeader": true,
204 | "sort": {
205 | "col": 0,
206 | "desc": true
207 | },
208 | "styles": [
209 | {
210 | "alias": "Date",
211 | "dateFormat": "YYYY-MM-DD",
212 | "pattern": "Time",
213 | "type": "date"
214 | },
215 | {
216 | "alias": "Insight",
217 | "colorMode": null,
218 | "colors": [
219 | "rgba(245, 54, 54, 0.9)",
220 | "rgba(237, 129, 40, 0.89)",
221 | "rgba(50, 172, 45, 0.97)"
222 | ],
223 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
224 | "decimals": 2,
225 | "mappingType": 1,
226 | "pattern": "html",
227 | "preserveFormat": false,
228 | "sanitize": true,
229 | "thresholds": [],
230 | "type": "string",
231 | "unit": "short"
232 | }
233 | ],
234 | "targets": [
235 | {
236 | "groupBy": [],
237 | "measurement": "insight",
238 | "orderByTime": "ASC",
239 | "policy": "default",
240 | "refId": "A",
241 | "resultFormat": "table",
242 | "select": [
243 | [
244 | {
245 | "params": [
246 | "html"
247 | ],
248 | "type": "field"
249 | }
250 | ]
251 | ],
252 | "tags": []
253 | }
254 | ],
255 | "timeFrom": null,
256 | "timeShift": null,
257 | "title": "Insights",
258 | "transform": "table",
259 | "type": "table"
260 | },
261 | {
262 | "aliasColors": {},
263 | "bars": false,
264 | "dashLength": 10,
265 | "dashes": false,
266 | "datasource": "${DS_EXIST}",
267 | "fill": 1,
268 | "fillGradient": 0,
269 | "gridPos": {
270 | "h": 8,
271 | "w": 13,
272 | "x": 11,
273 | "y": 8
274 | },
275 | "hiddenSeries": false,
276 | "id": 6,
277 | "legend": {
278 | "avg": true,
279 | "current": false,
280 | "max": true,
281 | "min": true,
282 | "show": true,
283 | "total": false,
284 | "values": true
285 | },
286 | "lines": true,
287 | "linewidth": 1,
288 | "nullPointMode": "connected",
289 | "options": {
290 | "dataLinks": []
291 | },
292 | "percentage": false,
293 | "pointradius": 2,
294 | "points": false,
295 | "renderer": "flot",
296 | "seriesOverrides": [],
297 | "spaceLength": 10,
298 | "stack": false,
299 | "steppedLine": true,
300 | "targets": [
301 | {
302 | "alias": "Mood",
303 | "groupBy": [
304 | {
305 | "params": [
306 | "1d"
307 | ],
308 | "type": "time"
309 | },
310 | {
311 | "params": [
312 | "null"
313 | ],
314 | "type": "fill"
315 | }
316 | ],
317 | "measurement": "mood",
318 | "orderByTime": "ASC",
319 | "policy": "default",
320 | "refId": "A",
321 | "resultFormat": "time_series",
322 | "select": [
323 | [
324 | {
325 | "params": [
326 | "value"
327 | ],
328 | "type": "field"
329 | },
330 | {
331 | "params": [],
332 | "type": "mean"
333 | }
334 | ]
335 | ],
336 | "tags": []
337 | }
338 | ],
339 | "thresholds": [],
340 | "timeFrom": null,
341 | "timeRegions": [],
342 | "timeShift": null,
343 | "title": "Mood",
344 | "tooltip": {
345 | "shared": true,
346 | "sort": 0,
347 | "value_type": "individual"
348 | },
349 | "type": "graph",
350 | "xaxis": {
351 | "buckets": null,
352 | "mode": "time",
353 | "name": null,
354 | "show": true,
355 | "values": []
356 | },
357 | "yaxes": [
358 | {
359 | "format": "short",
360 | "label": null,
361 | "logBase": 1,
362 | "max": null,
363 | "min": null,
364 | "show": true
365 | },
366 | {
367 | "format": "short",
368 | "label": null,
369 | "logBase": 1,
370 | "max": null,
371 | "min": null,
372 | "show": true
373 | }
374 | ],
375 | "yaxis": {
376 | "align": false,
377 | "alignLevel": null
378 | }
379 | },
380 | {
381 | "datasource": null,
382 | "gridPos": {
383 | "h": 1,
384 | "w": 24,
385 | "x": 0,
386 | "y": 16
387 | },
388 | "id": 8,
389 | "title": "Custom Tags",
390 | "type": "row"
391 | },
392 | {
393 | "aliasColors": {
394 | "Anxiety": "green"
395 | },
396 | "bars": true,
397 | "dashLength": 10,
398 | "dashes": false,
399 | "datasource": "${DS_EXIST}",
400 | "decimals": 0,
401 | "fill": 1,
402 | "fillGradient": 0,
403 | "gridPos": {
404 | "h": 3,
405 | "w": 24,
406 | "x": 0,
407 | "y": 17
408 | },
409 | "hiddenSeries": false,
410 | "id": 9,
411 | "legend": {
412 | "alignAsTable": false,
413 | "avg": false,
414 | "current": false,
415 | "hideEmpty": true,
416 | "hideZero": true,
417 | "max": false,
418 | "min": false,
419 | "rightSide": false,
420 | "show": false,
421 | "sort": "total",
422 | "sortDesc": true,
423 | "total": false,
424 | "values": false
425 | },
426 | "lines": false,
427 | "linewidth": 1,
428 | "nullPointMode": "null",
429 | "options": {
430 | "dataLinks": []
431 | },
432 | "percentage": false,
433 | "pointradius": 2,
434 | "points": false,
435 | "renderer": "flot",
436 | "repeat": "tag",
437 | "repeatDirection": "v",
438 | "seriesOverrides": [],
439 | "spaceLength": 10,
440 | "stack": false,
441 | "steppedLine": false,
442 | "targets": [
443 | {
444 | "alias": "$tag",
445 | "groupBy": [
446 | {
447 | "params": [
448 | "1d"
449 | ],
450 | "type": "time"
451 | }
452 | ],
453 | "measurement": "custom",
454 | "orderByTime": "ASC",
455 | "policy": "default",
456 | "refId": "A",
457 | "resultFormat": "time_series",
458 | "select": [
459 | [
460 | {
461 | "params": [
462 | "value"
463 | ],
464 | "type": "field"
465 | },
466 | {
467 | "params": [],
468 | "type": "sum"
469 | }
470 | ]
471 | ],
472 | "tags": [
473 | {
474 | "key": "tag",
475 | "operator": "=",
476 | "value": "$tag"
477 | }
478 | ]
479 | }
480 | ],
481 | "thresholds": [],
482 | "timeFrom": null,
483 | "timeRegions": [],
484 | "timeShift": null,
485 | "title": "$tag",
486 | "tooltip": {
487 | "shared": true,
488 | "sort": 0,
489 | "value_type": "individual"
490 | },
491 | "type": "graph",
492 | "xaxis": {
493 | "buckets": null,
494 | "mode": "time",
495 | "name": null,
496 | "show": true,
497 | "values": []
498 | },
499 | "yaxes": [
500 | {
501 | "format": "short",
502 | "label": null,
503 | "logBase": 1,
504 | "max": null,
505 | "min": null,
506 | "show": false
507 | },
508 | {
509 | "format": "short",
510 | "label": null,
511 | "logBase": 1,
512 | "max": null,
513 | "min": null,
514 | "show": false
515 | }
516 | ],
517 | "yaxis": {
518 | "align": false,
519 | "alignLevel": null
520 | }
521 | }
522 | ],
523 | "schemaVersion": 21,
524 | "style": "dark",
525 | "tags": [],
526 | "templating": {
527 | "list": [
528 | {
529 | "allValue": null,
530 | "current": {},
531 | "datasource": "${DS_EXIST}",
532 | "definition": "SHOW TAG VALUES WITH KEY = \"tag\"",
533 | "hide": 2,
534 | "includeAll": true,
535 | "label": "",
536 | "multi": false,
537 | "name": "tag",
538 | "options": [],
539 | "query": "SHOW TAG VALUES WITH KEY = \"tag\"",
540 | "refresh": 1,
541 | "regex": "",
542 | "skipUrlSync": false,
543 | "sort": 0,
544 | "tagValuesQuery": "",
545 | "tags": [],
546 | "tagsQuery": "",
547 | "type": "query",
548 | "useTags": false
549 | }
550 | ]
551 | },
552 | "time": {
553 | "from": "now-90d",
554 | "to": "now"
555 | },
556 | "timepicker": {
557 | "refresh_intervals": [
558 | "5s",
559 | "10s",
560 | "30s",
561 | "1m",
562 | "5m",
563 | "15m",
564 | "30m",
565 | "1h",
566 | "2h",
567 | "1d"
568 | ]
569 | },
570 | "timezone": "",
571 | "title": "Exist.io",
572 | "uid": "Yui4lkiRz",
573 | "version": 14
574 | }
--------------------------------------------------------------------------------
/grafana/gaming.json:
--------------------------------------------------------------------------------
1 | {
2 | "__inputs": [
3 | {
4 | "name": "DS_GAMING",
5 | "label": "gaming",
6 | "description": "",
7 | "type": "datasource",
8 | "pluginId": "influxdb",
9 | "pluginName": "InfluxDB"
10 | }
11 | ],
12 | "__requires": [
13 | {
14 | "type": "grafana",
15 | "id": "grafana",
16 | "name": "Grafana",
17 | "version": "8.0.4"
18 | },
19 | {
20 | "type": "panel",
21 | "id": "grafana-piechart-panel",
22 | "name": "Pie Chart (old)",
23 | "version": "1.6.2"
24 | },
25 | {
26 | "type": "panel",
27 | "id": "graph",
28 | "name": "Graph (old)",
29 | "version": ""
30 | },
31 | {
32 | "type": "datasource",
33 | "id": "influxdb",
34 | "name": "InfluxDB",
35 | "version": "1.0.0"
36 | },
37 | {
38 | "type": "panel",
39 | "id": "table-old",
40 | "name": "Table (old)",
41 | "version": ""
42 | }
43 | ],
44 | "annotations": {
45 | "list": [
46 | {
47 | "builtIn": 1,
48 | "datasource": "-- Grafana --",
49 | "enable": true,
50 | "hide": true,
51 | "iconColor": "rgba(0, 211, 255, 1)",
52 | "name": "Annotations & Alerts",
53 | "type": "dashboard"
54 | }
55 | ]
56 | },
57 | "editable": true,
58 | "gnetId": null,
59 | "graphTooltip": 0,
60 | "id": null,
61 | "links": [],
62 | "panels": [
63 | {
64 | "aliasColors": {},
65 | "breakPoint": "50%",
66 | "cacheTimeout": null,
67 | "combine": {
68 | "label": "Others",
69 | "threshold": "0.02"
70 | },
71 | "datasource": "${DS_GAMING}",
72 | "decimals": 0,
73 | "fontSize": "80%",
74 | "format": "dtdurations",
75 | "gridPos": {
76 | "h": 9,
77 | "w": 10,
78 | "x": 0,
79 | "y": 0
80 | },
81 | "id": 8,
82 | "interval": null,
83 | "legend": {
84 | "header": "Time Played",
85 | "show": true,
86 | "sort": "current",
87 | "sortDesc": true,
88 | "values": true
89 | },
90 | "legendType": "Right side",
91 | "links": [],
92 | "maxDataPoints": 3,
93 | "nullPointMode": "connected",
94 | "pieType": "pie",
95 | "strokeWidth": 1,
96 | "targets": [
97 | {
98 | "alias": "$tag_title",
99 | "groupBy": [
100 | {
101 | "params": [
102 | "title"
103 | ],
104 | "type": "tag"
105 | }
106 | ],
107 | "measurement": "time",
108 | "orderByTime": "ASC",
109 | "policy": "default",
110 | "refId": "A",
111 | "resultFormat": "time_series",
112 | "select": [
113 | [
114 | {
115 | "params": [
116 | "value"
117 | ],
118 | "type": "field"
119 | },
120 | {
121 | "params": [],
122 | "type": "sum"
123 | }
124 | ]
125 | ],
126 | "tags": [
127 | {
128 | "key": "value",
129 | "operator": ">",
130 | "value": "0"
131 | }
132 | ]
133 | }
134 | ],
135 | "timeFrom": null,
136 | "timeShift": null,
137 | "title": "Most Played Games",
138 | "type": "grafana-piechart-panel",
139 | "valueName": "current"
140 | },
141 | {
142 | "aliasColors": {},
143 | "breakPoint": "50%",
144 | "cacheTimeout": null,
145 | "combine": {
146 | "label": "Others",
147 | "threshold": 0
148 | },
149 | "datasource": "${DS_GAMING}",
150 | "fontSize": "80%",
151 | "format": "dtdurations",
152 | "gridPos": {
153 | "h": 9,
154 | "w": 4,
155 | "x": 10,
156 | "y": 0
157 | },
158 | "id": 10,
159 | "interval": null,
160 | "legend": {
161 | "header": "Time Played",
162 | "show": true,
163 | "sort": "total",
164 | "sortDesc": true,
165 | "values": true
166 | },
167 | "legendType": "Under graph",
168 | "links": [],
169 | "maxDataPoints": 3,
170 | "nullPointMode": "connected",
171 | "pieType": "pie",
172 | "strokeWidth": 1,
173 | "targets": [
174 | {
175 | "alias": "$tag_platform",
176 | "groupBy": [
177 | {
178 | "params": [
179 | "platform"
180 | ],
181 | "type": "tag"
182 | }
183 | ],
184 | "measurement": "time",
185 | "orderByTime": "ASC",
186 | "policy": "default",
187 | "refId": "A",
188 | "resultFormat": "time_series",
189 | "select": [
190 | [
191 | {
192 | "params": [
193 | "value"
194 | ],
195 | "type": "field"
196 | },
197 | {
198 | "params": [],
199 | "type": "sum"
200 | }
201 | ]
202 | ],
203 | "tags": [
204 | {
205 | "key": "value",
206 | "operator": ">",
207 | "value": "0"
208 | }
209 | ]
210 | }
211 | ],
212 | "timeFrom": null,
213 | "timeShift": null,
214 | "title": "Time Per Platform",
215 | "type": "grafana-piechart-panel",
216 | "valueName": "total"
217 | },
218 | {
219 | "aliasColors": {},
220 | "bars": true,
221 | "dashLength": 10,
222 | "dashes": false,
223 | "datasource": "${DS_GAMING}",
224 | "fieldConfig": {
225 | "defaults": {
226 | "links": []
227 | },
228 | "overrides": []
229 | },
230 | "fill": 4,
231 | "fillGradient": 10,
232 | "gridPos": {
233 | "h": 9,
234 | "w": 10,
235 | "x": 14,
236 | "y": 0
237 | },
238 | "hiddenSeries": false,
239 | "id": 6,
240 | "interval": "1d",
241 | "legend": {
242 | "avg": false,
243 | "current": false,
244 | "max": true,
245 | "min": false,
246 | "show": true,
247 | "total": false,
248 | "values": true
249 | },
250 | "lines": false,
251 | "linewidth": 2,
252 | "nullPointMode": "null",
253 | "options": {
254 | "alertThreshold": true
255 | },
256 | "percentage": false,
257 | "pluginVersion": "8.0.4",
258 | "pointradius": 2,
259 | "points": false,
260 | "renderer": "flot",
261 | "seriesOverrides": [],
262 | "spaceLength": 10,
263 | "stack": true,
264 | "steppedLine": false,
265 | "targets": [
266 | {
267 | "alias": "$tag_platform",
268 | "groupBy": [
269 | {
270 | "params": [
271 | "1d"
272 | ],
273 | "type": "time"
274 | },
275 | {
276 | "params": [
277 | "platform"
278 | ],
279 | "type": "tag"
280 | }
281 | ],
282 | "measurement": "achievement",
283 | "orderByTime": "ASC",
284 | "policy": "default",
285 | "refId": "A",
286 | "resultFormat": "time_series",
287 | "select": [
288 | [
289 | {
290 | "params": [
291 | "name"
292 | ],
293 | "type": "field"
294 | },
295 | {
296 | "params": [],
297 | "type": "count"
298 | }
299 | ]
300 | ],
301 | "tags": []
302 | }
303 | ],
304 | "thresholds": [],
305 | "timeFrom": null,
306 | "timeRegions": [],
307 | "timeShift": null,
308 | "title": "Achievements Per Day",
309 | "tooltip": {
310 | "shared": true,
311 | "sort": 0,
312 | "value_type": "individual"
313 | },
314 | "type": "graph",
315 | "xaxis": {
316 | "buckets": null,
317 | "mode": "time",
318 | "name": null,
319 | "show": true,
320 | "values": []
321 | },
322 | "yaxes": [
323 | {
324 | "$$hashKey": "object:87",
325 | "format": "locale",
326 | "label": null,
327 | "logBase": 1,
328 | "max": null,
329 | "min": null,
330 | "show": true
331 | },
332 | {
333 | "$$hashKey": "object:88",
334 | "format": "short",
335 | "label": null,
336 | "logBase": 1,
337 | "max": null,
338 | "min": null,
339 | "show": false
340 | }
341 | ],
342 | "yaxis": {
343 | "align": false,
344 | "alignLevel": null
345 | }
346 | },
347 | {
348 | "columns": [],
349 | "datasource": "${DS_GAMING}",
350 | "fontSize": "100%",
351 | "gridPos": {
352 | "h": 10,
353 | "w": 10,
354 | "x": 0,
355 | "y": 9
356 | },
357 | "id": 2,
358 | "pageSize": null,
359 | "showHeader": true,
360 | "sort": {
361 | "col": 0,
362 | "desc": true
363 | },
364 | "styles": [
365 | {
366 | "alias": "Date",
367 | "align": "auto",
368 | "dateFormat": "YYYY-MM-DD",
369 | "pattern": "Time",
370 | "type": "date"
371 | },
372 | {
373 | "alias": "Time Played",
374 | "align": "auto",
375 | "colorMode": null,
376 | "colors": [
377 | "rgba(245, 54, 54, 0.9)",
378 | "rgba(237, 129, 40, 0.89)",
379 | "rgba(50, 172, 45, 0.97)"
380 | ],
381 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
382 | "decimals": 0,
383 | "mappingType": 1,
384 | "pattern": "value",
385 | "thresholds": [],
386 | "type": "hidden",
387 | "unit": "dtdurations"
388 | },
389 | {
390 | "alias": "Game",
391 | "align": "auto",
392 | "colorMode": null,
393 | "colors": [
394 | "rgba(245, 54, 54, 0.9)",
395 | "rgba(237, 129, 40, 0.89)",
396 | "rgba(50, 172, 45, 0.97)"
397 | ],
398 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
399 | "decimals": 2,
400 | "mappingType": 1,
401 | "pattern": "title",
402 | "thresholds": [],
403 | "type": "string",
404 | "unit": "short"
405 | },
406 | {
407 | "alias": "Platform",
408 | "align": "auto",
409 | "colorMode": null,
410 | "colors": [
411 | "rgba(245, 54, 54, 0.9)",
412 | "rgba(237, 129, 40, 0.89)",
413 | "rgba(50, 172, 45, 0.97)"
414 | ],
415 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
416 | "decimals": 2,
417 | "mappingType": 1,
418 | "pattern": "platform",
419 | "thresholds": [],
420 | "type": "string",
421 | "unit": "short"
422 | }
423 | ],
424 | "targets": [
425 | {
426 | "groupBy": [],
427 | "measurement": "time",
428 | "orderByTime": "ASC",
429 | "policy": "default",
430 | "query": "SELECT last(\"value\") AS \"value\" FROM \"time\" WHERE $timeFilter GROUP BY time(1d),\"title\",\"platform\" fill(none) ORDER BY time DESC",
431 | "rawQuery": true,
432 | "refId": "A",
433 | "resultFormat": "table",
434 | "select": [
435 | [
436 | {
437 | "params": [
438 | "title"
439 | ],
440 | "type": "field"
441 | }
442 | ],
443 | [
444 | {
445 | "params": [
446 | "value"
447 | ],
448 | "type": "field"
449 | }
450 | ]
451 | ],
452 | "tags": []
453 | }
454 | ],
455 | "timeFrom": null,
456 | "timeShift": null,
457 | "title": "Recent Games",
458 | "transform": "table",
459 | "type": "table-old"
460 | },
461 | {
462 | "columns": [],
463 | "datasource": "${DS_GAMING}",
464 | "fontSize": "100%",
465 | "gridPos": {
466 | "h": 10,
467 | "w": 14,
468 | "x": 10,
469 | "y": 9
470 | },
471 | "id": 4,
472 | "pageSize": null,
473 | "showHeader": true,
474 | "sort": {
475 | "col": 0,
476 | "desc": true
477 | },
478 | "styles": [
479 | {
480 | "alias": "Time",
481 | "align": "auto",
482 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
483 | "pattern": "Time",
484 | "type": "date"
485 | },
486 | {
487 | "alias": "Game",
488 | "align": "auto",
489 | "colorMode": null,
490 | "colors": [
491 | "rgba(245, 54, 54, 0.9)",
492 | "rgba(237, 129, 40, 0.89)",
493 | "rgba(50, 172, 45, 0.97)"
494 | ],
495 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
496 | "decimals": 2,
497 | "mappingType": 1,
498 | "pattern": "title",
499 | "thresholds": [],
500 | "type": "string",
501 | "unit": "short"
502 | },
503 | {
504 | "alias": "Achievement",
505 | "align": "auto",
506 | "colorMode": null,
507 | "colors": [
508 | "rgba(245, 54, 54, 0.9)",
509 | "rgba(237, 129, 40, 0.89)",
510 | "rgba(50, 172, 45, 0.97)"
511 | ],
512 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
513 | "decimals": 2,
514 | "mappingType": 1,
515 | "pattern": "name",
516 | "thresholds": [],
517 | "type": "string",
518 | "unit": "short"
519 | },
520 | {
521 | "alias": "Description",
522 | "align": "auto",
523 | "colorMode": null,
524 | "colors": [
525 | "rgba(245, 54, 54, 0.9)",
526 | "rgba(237, 129, 40, 0.89)",
527 | "rgba(50, 172, 45, 0.97)"
528 | ],
529 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
530 | "decimals": 2,
531 | "mappingType": 1,
532 | "pattern": "description",
533 | "thresholds": [],
534 | "type": "string",
535 | "unit": "short"
536 | },
537 | {
538 | "alias": "Platform",
539 | "align": "auto",
540 | "colorMode": null,
541 | "colors": [
542 | "rgba(245, 54, 54, 0.9)",
543 | "rgba(237, 129, 40, 0.89)",
544 | "rgba(50, 172, 45, 0.97)"
545 | ],
546 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
547 | "decimals": 2,
548 | "mappingType": 1,
549 | "pattern": "platform",
550 | "thresholds": [],
551 | "type": "string",
552 | "unit": "short"
553 | }
554 | ],
555 | "targets": [
556 | {
557 | "groupBy": [],
558 | "measurement": "achievement",
559 | "orderByTime": "ASC",
560 | "policy": "default",
561 | "refId": "A",
562 | "resultFormat": "table",
563 | "select": [
564 | [
565 | {
566 | "params": [
567 | "title"
568 | ],
569 | "type": "field"
570 | }
571 | ],
572 | [
573 | {
574 | "params": [
575 | "name"
576 | ],
577 | "type": "field"
578 | }
579 | ],
580 | [
581 | {
582 | "params": [
583 | "description"
584 | ],
585 | "type": "field"
586 | }
587 | ],
588 | [
589 | {
590 | "params": [
591 | "platform"
592 | ],
593 | "type": "field"
594 | }
595 | ]
596 | ],
597 | "tags": []
598 | }
599 | ],
600 | "timeFrom": null,
601 | "timeShift": null,
602 | "title": "Recent Achievements",
603 | "transform": "table",
604 | "type": "table-old"
605 | }
606 | ],
607 | "schemaVersion": 30,
608 | "style": "dark",
609 | "tags": [],
610 | "templating": {
611 | "list": []
612 | },
613 | "time": {
614 | "from": "now-90d",
615 | "to": "now"
616 | },
617 | "timepicker": {
618 | "refresh_intervals": [
619 | "5s",
620 | "10s",
621 | "30s",
622 | "1m",
623 | "5m",
624 | "15m",
625 | "30m",
626 | "1h",
627 | "2h",
628 | "1d"
629 | ]
630 | },
631 | "timezone": "utc",
632 | "title": "Gaming",
633 | "uid": "TIAeUxgRk",
634 | "version": 20
635 | }
--------------------------------------------------------------------------------
/grafana/github.json:
--------------------------------------------------------------------------------
1 | {
2 | "__inputs": [
3 | {
4 | "name": "DS_GITHUB",
5 | "label": "github",
6 | "description": "",
7 | "type": "datasource",
8 | "pluginId": "influxdb",
9 | "pluginName": "InfluxDB"
10 | }
11 | ],
12 | "__requires": [
13 | {
14 | "type": "grafana",
15 | "id": "grafana",
16 | "name": "Grafana",
17 | "version": "6.5.1"
18 | },
19 | {
20 | "type": "panel",
21 | "id": "graph",
22 | "name": "Graph",
23 | "version": ""
24 | },
25 | {
26 | "type": "datasource",
27 | "id": "influxdb",
28 | "name": "InfluxDB",
29 | "version": "1.0.0"
30 | }
31 | ],
32 | "annotations": {
33 | "list": [
34 | {
35 | "builtIn": 1,
36 | "datasource": "-- Grafana --",
37 | "enable": true,
38 | "hide": true,
39 | "iconColor": "rgba(0, 211, 255, 1)",
40 | "name": "Annotations & Alerts",
41 | "type": "dashboard"
42 | }
43 | ]
44 | },
45 | "editable": true,
46 | "gnetId": null,
47 | "graphTooltip": 0,
48 | "id": null,
49 | "links": [],
50 | "panels": [
51 | {
52 | "aliasColors": {},
53 | "bars": false,
54 | "dashLength": 10,
55 | "dashes": false,
56 | "datasource": "${DS_GITHUB}",
57 | "fill": 1,
58 | "fillGradient": 0,
59 | "gridPos": {
60 | "h": 8,
61 | "w": 12,
62 | "x": 0,
63 | "y": 0
64 | },
65 | "hiddenSeries": false,
66 | "id": 4,
67 | "interval": "1w",
68 | "legend": {
69 | "avg": false,
70 | "current": false,
71 | "max": false,
72 | "min": false,
73 | "show": false,
74 | "total": false,
75 | "values": false
76 | },
77 | "lines": true,
78 | "linewidth": 1,
79 | "nullPointMode": "null",
80 | "options": {
81 | "dataLinks": []
82 | },
83 | "percentage": false,
84 | "pointradius": 2,
85 | "points": false,
86 | "renderer": "flot",
87 | "seriesOverrides": [],
88 | "spaceLength": 10,
89 | "stack": false,
90 | "steppedLine": false,
91 | "targets": [
92 | {
93 | "alias": "Commits",
94 | "groupBy": [
95 | {
96 | "params": [
97 | "1w"
98 | ],
99 | "type": "time"
100 | },
101 | {
102 | "params": [
103 | "0"
104 | ],
105 | "type": "fill"
106 | }
107 | ],
108 | "measurement": "commits",
109 | "orderByTime": "ASC",
110 | "policy": "default",
111 | "refId": "A",
112 | "resultFormat": "time_series",
113 | "select": [
114 | [
115 | {
116 | "params": [
117 | "value"
118 | ],
119 | "type": "field"
120 | },
121 | {
122 | "params": [],
123 | "type": "sum"
124 | }
125 | ]
126 | ],
127 | "tags": []
128 | }
129 | ],
130 | "thresholds": [],
131 | "timeFrom": null,
132 | "timeRegions": [],
133 | "timeShift": null,
134 | "title": "Commits Per Week",
135 | "tooltip": {
136 | "shared": true,
137 | "sort": 0,
138 | "value_type": "individual"
139 | },
140 | "type": "graph",
141 | "xaxis": {
142 | "buckets": null,
143 | "mode": "time",
144 | "name": null,
145 | "show": true,
146 | "values": []
147 | },
148 | "yaxes": [
149 | {
150 | "format": "locale",
151 | "label": null,
152 | "logBase": 1,
153 | "max": null,
154 | "min": null,
155 | "show": true
156 | },
157 | {
158 | "format": "short",
159 | "label": null,
160 | "logBase": 1,
161 | "max": null,
162 | "min": null,
163 | "show": false
164 | }
165 | ],
166 | "yaxis": {
167 | "align": false,
168 | "alignLevel": null
169 | }
170 | },
171 | {
172 | "aliasColors": {},
173 | "bars": true,
174 | "dashLength": 10,
175 | "dashes": false,
176 | "datasource": "${DS_GITHUB}",
177 | "decimals": 0,
178 | "fill": 1,
179 | "fillGradient": 0,
180 | "gridPos": {
181 | "h": 8,
182 | "w": 12,
183 | "x": 0,
184 | "y": 8
185 | },
186 | "hiddenSeries": false,
187 | "id": 2,
188 | "interval": "1w",
189 | "legend": {
190 | "alignAsTable": true,
191 | "avg": true,
192 | "current": false,
193 | "hideEmpty": true,
194 | "hideZero": true,
195 | "max": false,
196 | "min": false,
197 | "rightSide": true,
198 | "show": true,
199 | "sort": "total",
200 | "sortDesc": true,
201 | "total": true,
202 | "values": true
203 | },
204 | "lines": false,
205 | "linewidth": 1,
206 | "nullPointMode": "null as zero",
207 | "options": {
208 | "dataLinks": []
209 | },
210 | "percentage": false,
211 | "pointradius": 2,
212 | "points": false,
213 | "renderer": "flot",
214 | "seriesOverrides": [],
215 | "spaceLength": 10,
216 | "stack": false,
217 | "steppedLine": false,
218 | "targets": [
219 | {
220 | "alias": "$tag_repo",
221 | "groupBy": [
222 | {
223 | "params": [
224 | "repo"
225 | ],
226 | "type": "tag"
227 | }
228 | ],
229 | "measurement": "commits",
230 | "orderByTime": "ASC",
231 | "policy": "default",
232 | "refId": "A",
233 | "resultFormat": "time_series",
234 | "select": [
235 | [
236 | {
237 | "params": [
238 | "value"
239 | ],
240 | "type": "field"
241 | }
242 | ]
243 | ],
244 | "tags": []
245 | }
246 | ],
247 | "thresholds": [],
248 | "timeFrom": null,
249 | "timeRegions": [],
250 | "timeShift": null,
251 | "title": "Commits By Repo",
252 | "tooltip": {
253 | "shared": false,
254 | "sort": 0,
255 | "value_type": "individual"
256 | },
257 | "type": "graph",
258 | "xaxis": {
259 | "buckets": null,
260 | "mode": "series",
261 | "name": null,
262 | "show": false,
263 | "values": [
264 | "total"
265 | ]
266 | },
267 | "yaxes": [
268 | {
269 | "decimals": 0,
270 | "format": "locale",
271 | "label": null,
272 | "logBase": 1,
273 | "max": null,
274 | "min": null,
275 | "show": true
276 | },
277 | {
278 | "format": "short",
279 | "label": null,
280 | "logBase": 1,
281 | "max": null,
282 | "min": null,
283 | "show": true
284 | }
285 | ],
286 | "yaxis": {
287 | "align": false,
288 | "alignLevel": null
289 | }
290 | }
291 | ],
292 | "schemaVersion": 21,
293 | "style": "dark",
294 | "tags": [],
295 | "templating": {
296 | "list": []
297 | },
298 | "time": {
299 | "from": "now-1y",
300 | "to": "now"
301 | },
302 | "timepicker": {},
303 | "timezone": "",
304 | "title": "Github",
305 | "uid": "LRrhorRRk",
306 | "version": 2
307 | }
--------------------------------------------------------------------------------
/grafana/todoist.json:
--------------------------------------------------------------------------------
1 | {
2 | "__inputs": [
3 | {
4 | "name": "DS_TODOIST",
5 | "label": "todoist",
6 | "description": "",
7 | "type": "datasource",
8 | "pluginId": "influxdb",
9 | "pluginName": "InfluxDB"
10 | }
11 | ],
12 | "__requires": [
13 | {
14 | "type": "grafana",
15 | "id": "grafana",
16 | "name": "Grafana",
17 | "version": "8.0.4"
18 | },
19 | {
20 | "type": "panel",
21 | "id": "graph",
22 | "name": "Graph (old)",
23 | "version": ""
24 | },
25 | {
26 | "type": "datasource",
27 | "id": "influxdb",
28 | "name": "InfluxDB",
29 | "version": "1.0.0"
30 | },
31 | {
32 | "type": "panel",
33 | "id": "piechart",
34 | "name": "Pie chart",
35 | "version": ""
36 | }
37 | ],
38 | "annotations": {
39 | "list": [
40 | {
41 | "builtIn": 1,
42 | "datasource": "-- Grafana --",
43 | "enable": true,
44 | "hide": true,
45 | "iconColor": "rgba(0, 211, 255, 1)",
46 | "name": "Annotations & Alerts",
47 | "type": "dashboard"
48 | }
49 | ]
50 | },
51 | "editable": true,
52 | "gnetId": null,
53 | "graphTooltip": 0,
54 | "id": null,
55 | "links": [],
56 | "panels": [
57 | {
58 | "aliasColors": {},
59 | "bars": true,
60 | "dashLength": 10,
61 | "dashes": false,
62 | "datasource": "${DS_TODOIST}",
63 | "fieldConfig": {
64 | "defaults": {
65 | "links": []
66 | },
67 | "overrides": []
68 | },
69 | "fill": 1,
70 | "fillGradient": 0,
71 | "gridPos": {
72 | "h": 8,
73 | "w": 10,
74 | "x": 0,
75 | "y": 0
76 | },
77 | "hiddenSeries": false,
78 | "id": 6,
79 | "legend": {
80 | "avg": true,
81 | "current": false,
82 | "max": true,
83 | "min": false,
84 | "show": true,
85 | "total": true,
86 | "values": true
87 | },
88 | "lines": false,
89 | "linewidth": 1,
90 | "nullPointMode": "null",
91 | "options": {
92 | "alertThreshold": true
93 | },
94 | "percentage": false,
95 | "pluginVersion": "8.0.4",
96 | "pointradius": 2,
97 | "points": false,
98 | "renderer": "flot",
99 | "seriesOverrides": [],
100 | "spaceLength": 10,
101 | "stack": false,
102 | "steppedLine": false,
103 | "targets": [
104 | {
105 | "alias": "Added",
106 | "groupBy": [
107 | {
108 | "params": [
109 | "1w"
110 | ],
111 | "type": "time"
112 | }
113 | ],
114 | "measurement": "added",
115 | "orderByTime": "ASC",
116 | "policy": "default",
117 | "refId": "A",
118 | "resultFormat": "time_series",
119 | "select": [
120 | [
121 | {
122 | "params": [
123 | "content"
124 | ],
125 | "type": "field"
126 | },
127 | {
128 | "params": [],
129 | "type": "count"
130 | }
131 | ]
132 | ],
133 | "tags": []
134 | },
135 | {
136 | "alias": "Completed",
137 | "groupBy": [
138 | {
139 | "params": [
140 | "1w"
141 | ],
142 | "type": "time"
143 | }
144 | ],
145 | "measurement": "completed",
146 | "orderByTime": "ASC",
147 | "policy": "default",
148 | "refId": "B",
149 | "resultFormat": "time_series",
150 | "select": [
151 | [
152 | {
153 | "params": [
154 | "content"
155 | ],
156 | "type": "field"
157 | },
158 | {
159 | "params": [],
160 | "type": "count"
161 | }
162 | ]
163 | ],
164 | "tags": []
165 | }
166 | ],
167 | "thresholds": [],
168 | "timeFrom": null,
169 | "timeRegions": [],
170 | "timeShift": null,
171 | "title": "Added vs. Completed Tasks",
172 | "tooltip": {
173 | "shared": true,
174 | "sort": 0,
175 | "value_type": "individual"
176 | },
177 | "type": "graph",
178 | "xaxis": {
179 | "buckets": null,
180 | "mode": "time",
181 | "name": null,
182 | "show": true,
183 | "values": []
184 | },
185 | "yaxes": [
186 | {
187 | "format": "locale",
188 | "label": null,
189 | "logBase": 1,
190 | "max": null,
191 | "min": null,
192 | "show": true
193 | },
194 | {
195 | "format": "short",
196 | "label": null,
197 | "logBase": 1,
198 | "max": null,
199 | "min": null,
200 | "show": false
201 | }
202 | ],
203 | "yaxis": {
204 | "align": false,
205 | "alignLevel": null
206 | }
207 | },
208 | {
209 | "cacheTimeout": null,
210 | "datasource": "${DS_TODOIST}",
211 | "fieldConfig": {
212 | "defaults": {
213 | "color": {
214 | "mode": "palette-classic"
215 | },
216 | "custom": {
217 | "hideFrom": {
218 | "legend": false,
219 | "tooltip": false,
220 | "viz": false
221 | }
222 | },
223 | "decimals": 0,
224 | "mappings": [],
225 | "unit": "locale"
226 | },
227 | "overrides": []
228 | },
229 | "gridPos": {
230 | "h": 8,
231 | "w": 8,
232 | "x": 10,
233 | "y": 0
234 | },
235 | "id": 2,
236 | "interval": null,
237 | "links": [],
238 | "maxDataPoints": 3,
239 | "options": {
240 | "legend": {
241 | "calcs": [],
242 | "displayMode": "table",
243 | "placement": "right",
244 | "values": [
245 | "value",
246 | "percent"
247 | ]
248 | },
249 | "pieType": "pie",
250 | "reduceOptions": {
251 | "calcs": [
252 | "sum"
253 | ],
254 | "fields": "",
255 | "values": false
256 | },
257 | "tooltip": {
258 | "mode": "single"
259 | }
260 | },
261 | "targets": [
262 | {
263 | "alias": "$tag_project_name",
264 | "groupBy": [
265 | {
266 | "params": [
267 | "project_name"
268 | ],
269 | "type": "tag"
270 | }
271 | ],
272 | "measurement": "completed",
273 | "orderByTime": "ASC",
274 | "policy": "default",
275 | "refId": "A",
276 | "resultFormat": "time_series",
277 | "select": [
278 | [
279 | {
280 | "params": [
281 | "content"
282 | ],
283 | "type": "field"
284 | },
285 | {
286 | "params": [],
287 | "type": "count"
288 | }
289 | ]
290 | ],
291 | "tags": []
292 | }
293 | ],
294 | "timeFrom": null,
295 | "timeShift": null,
296 | "title": "Completed Tasks by Project",
297 | "type": "piechart"
298 | },
299 | {
300 | "aliasColors": {},
301 | "bars": false,
302 | "dashLength": 10,
303 | "dashes": false,
304 | "datasource": "${DS_TODOIST}",
305 | "fieldConfig": {
306 | "defaults": {
307 | "links": []
308 | },
309 | "overrides": []
310 | },
311 | "fill": 1,
312 | "fillGradient": 0,
313 | "gridPos": {
314 | "h": 8,
315 | "w": 18,
316 | "x": 0,
317 | "y": 8
318 | },
319 | "hiddenSeries": false,
320 | "id": 4,
321 | "interval": "1w",
322 | "legend": {
323 | "avg": false,
324 | "current": false,
325 | "max": false,
326 | "min": false,
327 | "show": false,
328 | "total": false,
329 | "values": false
330 | },
331 | "lines": true,
332 | "linewidth": 1,
333 | "nullPointMode": "null",
334 | "options": {
335 | "alertThreshold": true
336 | },
337 | "percentage": false,
338 | "pluginVersion": "8.0.4",
339 | "pointradius": 2,
340 | "points": false,
341 | "renderer": "flot",
342 | "seriesOverrides": [],
343 | "spaceLength": 10,
344 | "stack": false,
345 | "steppedLine": false,
346 | "targets": [
347 | {
348 | "alias": "Tasks",
349 | "groupBy": [
350 | {
351 | "params": [
352 | "1w"
353 | ],
354 | "type": "time"
355 | },
356 | {
357 | "params": [
358 | "0"
359 | ],
360 | "type": "fill"
361 | }
362 | ],
363 | "measurement": "completed",
364 | "orderByTime": "ASC",
365 | "policy": "default",
366 | "refId": "A",
367 | "resultFormat": "time_series",
368 | "select": [
369 | [
370 | {
371 | "params": [
372 | "content"
373 | ],
374 | "type": "field"
375 | },
376 | {
377 | "params": [],
378 | "type": "count"
379 | }
380 | ]
381 | ],
382 | "tags": []
383 | }
384 | ],
385 | "thresholds": [],
386 | "timeFrom": null,
387 | "timeRegions": [],
388 | "timeShift": null,
389 | "title": "Tasks Completed Per Week",
390 | "tooltip": {
391 | "shared": true,
392 | "sort": 0,
393 | "value_type": "individual"
394 | },
395 | "type": "graph",
396 | "xaxis": {
397 | "buckets": null,
398 | "mode": "time",
399 | "name": null,
400 | "show": true,
401 | "values": []
402 | },
403 | "yaxes": [
404 | {
405 | "format": "locale",
406 | "label": null,
407 | "logBase": 1,
408 | "max": null,
409 | "min": null,
410 | "show": true
411 | },
412 | {
413 | "format": "short",
414 | "label": null,
415 | "logBase": 1,
416 | "max": null,
417 | "min": null,
418 | "show": false
419 | }
420 | ],
421 | "yaxis": {
422 | "align": false,
423 | "alignLevel": null
424 | }
425 | }
426 | ],
427 | "schemaVersion": 30,
428 | "style": "dark",
429 | "tags": [],
430 | "templating": {
431 | "list": []
432 | },
433 | "time": {
434 | "from": "now-1y",
435 | "to": "now"
436 | },
437 | "timepicker": {
438 | "refresh_intervals": [
439 | "5s",
440 | "10s",
441 | "30s",
442 | "1m",
443 | "5m",
444 | "15m",
445 | "30m",
446 | "1h",
447 | "2h",
448 | "1d"
449 | ]
450 | },
451 | "timezone": "",
452 | "title": "Todoist",
453 | "uid": "avH3T9ggk",
454 | "version": 3
455 | }
--------------------------------------------------------------------------------
/grafana/trakt.json:
--------------------------------------------------------------------------------
1 | {
2 | "__inputs": [
3 | {
4 | "name": "DS_TRAKT",
5 | "label": "trakt",
6 | "description": "",
7 | "type": "datasource",
8 | "pluginId": "influxdb",
9 | "pluginName": "InfluxDB"
10 | }
11 | ],
12 | "__requires": [
13 | {
14 | "type": "grafana",
15 | "id": "grafana",
16 | "name": "Grafana",
17 | "version": "6.5.1"
18 | },
19 | {
20 | "type": "panel",
21 | "id": "graph",
22 | "name": "Graph",
23 | "version": ""
24 | },
25 | {
26 | "type": "datasource",
27 | "id": "influxdb",
28 | "name": "InfluxDB",
29 | "version": "1.0.0"
30 | },
31 | {
32 | "type": "panel",
33 | "id": "table",
34 | "name": "Table",
35 | "version": ""
36 | }
37 | ],
38 | "annotations": {
39 | "list": [
40 | {
41 | "builtIn": 1,
42 | "datasource": "-- Grafana --",
43 | "enable": true,
44 | "hide": true,
45 | "iconColor": "rgba(0, 211, 255, 1)",
46 | "name": "Annotations & Alerts",
47 | "type": "dashboard"
48 | }
49 | ]
50 | },
51 | "editable": true,
52 | "gnetId": null,
53 | "graphTooltip": 0,
54 | "id": null,
55 | "links": [],
56 | "panels": [
57 | {
58 | "aliasColors": {},
59 | "bars": true,
60 | "dashLength": 10,
61 | "dashes": false,
62 | "datasource": "${DS_TRAKT}",
63 | "fill": 1,
64 | "fillGradient": 0,
65 | "gridPos": {
66 | "h": 6,
67 | "w": 12,
68 | "x": 0,
69 | "y": 0
70 | },
71 | "hiddenSeries": false,
72 | "id": 5,
73 | "legend": {
74 | "avg": true,
75 | "current": false,
76 | "max": true,
77 | "min": false,
78 | "show": true,
79 | "total": true,
80 | "values": true
81 | },
82 | "lines": false,
83 | "linewidth": 1,
84 | "nullPointMode": "null",
85 | "options": {
86 | "dataLinks": []
87 | },
88 | "percentage": false,
89 | "pointradius": 2,
90 | "points": false,
91 | "renderer": "flot",
92 | "seriesOverrides": [],
93 | "spaceLength": 10,
94 | "stack": false,
95 | "steppedLine": false,
96 | "targets": [
97 | {
98 | "alias": "Episodes Watched",
99 | "groupBy": [
100 | {
101 | "params": [
102 | "1w"
103 | ],
104 | "type": "time"
105 | },
106 | {
107 | "params": [
108 | "0"
109 | ],
110 | "type": "fill"
111 | }
112 | ],
113 | "measurement": "watch",
114 | "orderByTime": "ASC",
115 | "policy": "default",
116 | "refId": "A",
117 | "resultFormat": "time_series",
118 | "select": [
119 | [
120 | {
121 | "params": [
122 | "title"
123 | ],
124 | "type": "field"
125 | },
126 | {
127 | "params": [],
128 | "type": "count"
129 | }
130 | ]
131 | ],
132 | "tags": [
133 | {
134 | "key": "type",
135 | "operator": "=",
136 | "value": "episode"
137 | }
138 | ]
139 | }
140 | ],
141 | "thresholds": [],
142 | "timeFrom": null,
143 | "timeRegions": [],
144 | "timeShift": null,
145 | "title": "Episodes Watched Per Week",
146 | "tooltip": {
147 | "shared": true,
148 | "sort": 0,
149 | "value_type": "individual"
150 | },
151 | "type": "graph",
152 | "xaxis": {
153 | "buckets": null,
154 | "mode": "time",
155 | "name": null,
156 | "show": true,
157 | "values": []
158 | },
159 | "yaxes": [
160 | {
161 | "decimals": 0,
162 | "format": "locale",
163 | "label": null,
164 | "logBase": 1,
165 | "max": null,
166 | "min": null,
167 | "show": true
168 | },
169 | {
170 | "format": "short",
171 | "label": null,
172 | "logBase": 1,
173 | "max": null,
174 | "min": null,
175 | "show": false
176 | }
177 | ],
178 | "yaxis": {
179 | "align": false,
180 | "alignLevel": null
181 | }
182 | },
183 | {
184 | "aliasColors": {},
185 | "bars": true,
186 | "dashLength": 10,
187 | "dashes": false,
188 | "datasource": "${DS_TRAKT}",
189 | "fill": 1,
190 | "fillGradient": 0,
191 | "gridPos": {
192 | "h": 6,
193 | "w": 12,
194 | "x": 12,
195 | "y": 0
196 | },
197 | "hiddenSeries": false,
198 | "id": 6,
199 | "legend": {
200 | "avg": true,
201 | "current": false,
202 | "max": true,
203 | "min": false,
204 | "show": true,
205 | "total": true,
206 | "values": true
207 | },
208 | "lines": false,
209 | "linewidth": 1,
210 | "nullPointMode": "null",
211 | "options": {
212 | "dataLinks": []
213 | },
214 | "percentage": false,
215 | "pointradius": 2,
216 | "points": false,
217 | "renderer": "flot",
218 | "seriesOverrides": [],
219 | "spaceLength": 10,
220 | "stack": false,
221 | "steppedLine": false,
222 | "targets": [
223 | {
224 | "alias": "Movies Watched",
225 | "groupBy": [
226 | {
227 | "params": [
228 | "30d"
229 | ],
230 | "type": "time"
231 | },
232 | {
233 | "params": [
234 | "0"
235 | ],
236 | "type": "fill"
237 | }
238 | ],
239 | "measurement": "watch",
240 | "orderByTime": "ASC",
241 | "policy": "default",
242 | "refId": "A",
243 | "resultFormat": "time_series",
244 | "select": [
245 | [
246 | {
247 | "params": [
248 | "title"
249 | ],
250 | "type": "field"
251 | },
252 | {
253 | "params": [],
254 | "type": "count"
255 | }
256 | ]
257 | ],
258 | "tags": [
259 | {
260 | "key": "type",
261 | "operator": "=",
262 | "value": "movie"
263 | }
264 | ]
265 | }
266 | ],
267 | "thresholds": [],
268 | "timeFrom": null,
269 | "timeRegions": [],
270 | "timeShift": null,
271 | "title": "Movies Watched Per Month",
272 | "tooltip": {
273 | "shared": true,
274 | "sort": 0,
275 | "value_type": "individual"
276 | },
277 | "type": "graph",
278 | "xaxis": {
279 | "buckets": null,
280 | "mode": "time",
281 | "name": null,
282 | "show": true,
283 | "values": []
284 | },
285 | "yaxes": [
286 | {
287 | "decimals": 0,
288 | "format": "locale",
289 | "label": null,
290 | "logBase": 1,
291 | "max": null,
292 | "min": null,
293 | "show": true
294 | },
295 | {
296 | "format": "short",
297 | "label": null,
298 | "logBase": 1,
299 | "max": null,
300 | "min": null,
301 | "show": false
302 | }
303 | ],
304 | "yaxis": {
305 | "align": false,
306 | "alignLevel": null
307 | }
308 | },
309 | {
310 | "columns": [],
311 | "datasource": "${DS_TRAKT}",
312 | "fontSize": "100%",
313 | "gridPos": {
314 | "h": 15,
315 | "w": 12,
316 | "x": 0,
317 | "y": 6
318 | },
319 | "id": 2,
320 | "options": {},
321 | "pageSize": null,
322 | "showHeader": true,
323 | "sort": {
324 | "col": 0,
325 | "desc": true
326 | },
327 | "styles": [
328 | {
329 | "alias": "Time",
330 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
331 | "pattern": "Time",
332 | "type": "date"
333 | },
334 | {
335 | "alias": "Poster",
336 | "colorMode": null,
337 | "colors": [
338 | "rgba(245, 54, 54, 0.9)",
339 | "rgba(237, 129, 40, 0.89)",
340 | "rgba(50, 172, 45, 0.97)"
341 | ],
342 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
343 | "decimals": 2,
344 | "mappingType": 1,
345 | "pattern": "poster_html",
346 | "sanitize": true,
347 | "thresholds": [],
348 | "type": "string",
349 | "unit": "short"
350 | },
351 | {
352 | "alias": "Show",
353 | "colorMode": null,
354 | "colors": [
355 | "rgba(245, 54, 54, 0.9)",
356 | "rgba(237, 129, 40, 0.89)",
357 | "rgba(50, 172, 45, 0.97)"
358 | ],
359 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
360 | "decimals": 2,
361 | "link": true,
362 | "linkUrl": "${__cell_4:raw}",
363 | "mappingType": 1,
364 | "pattern": "show",
365 | "thresholds": [],
366 | "type": "string",
367 | "unit": "short"
368 | },
369 | {
370 | "alias": "Episode",
371 | "colorMode": null,
372 | "colors": [
373 | "rgba(245, 54, 54, 0.9)",
374 | "rgba(237, 129, 40, 0.89)",
375 | "rgba(50, 172, 45, 0.97)"
376 | ],
377 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
378 | "decimals": 2,
379 | "link": true,
380 | "linkUrl": "${__cell_5:raw}",
381 | "mappingType": 1,
382 | "pattern": "title",
383 | "thresholds": [],
384 | "type": "string",
385 | "unit": "short"
386 | },
387 | {
388 | "alias": "",
389 | "colorMode": null,
390 | "colors": [
391 | "rgba(245, 54, 54, 0.9)",
392 | "rgba(237, 129, 40, 0.89)",
393 | "rgba(50, 172, 45, 0.97)"
394 | ],
395 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
396 | "decimals": 2,
397 | "mappingType": 1,
398 | "pattern": "url",
399 | "thresholds": [],
400 | "type": "hidden",
401 | "unit": "short"
402 | },
403 | {
404 | "alias": "",
405 | "colorMode": null,
406 | "colors": [
407 | "rgba(245, 54, 54, 0.9)",
408 | "rgba(237, 129, 40, 0.89)",
409 | "rgba(50, 172, 45, 0.97)"
410 | ],
411 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
412 | "decimals": 2,
413 | "mappingType": 1,
414 | "pattern": "episode_url",
415 | "thresholds": [],
416 | "type": "hidden",
417 | "unit": "short"
418 | }
419 | ],
420 | "targets": [
421 | {
422 | "groupBy": [],
423 | "measurement": "watch",
424 | "orderByTime": "ASC",
425 | "policy": "default",
426 | "refId": "A",
427 | "resultFormat": "table",
428 | "select": [
429 | [
430 | {
431 | "params": [
432 | "poster_html"
433 | ],
434 | "type": "field"
435 | }
436 | ],
437 | [
438 | {
439 | "params": [
440 | "show"
441 | ],
442 | "type": "field"
443 | }
444 | ],
445 | [
446 | {
447 | "params": [
448 | "title"
449 | ],
450 | "type": "field"
451 | }
452 | ],
453 | [
454 | {
455 | "params": [
456 | "url"
457 | ],
458 | "type": "field"
459 | }
460 | ],
461 | [
462 | {
463 | "params": [
464 | "episode_url"
465 | ],
466 | "type": "field"
467 | }
468 | ]
469 | ],
470 | "tags": [
471 | {
472 | "key": "type",
473 | "operator": "=",
474 | "value": "episode"
475 | }
476 | ]
477 | }
478 | ],
479 | "timeFrom": null,
480 | "timeShift": null,
481 | "title": "Recently Watched Episodes",
482 | "transform": "table",
483 | "type": "table"
484 | },
485 | {
486 | "columns": [],
487 | "datasource": "${DS_TRAKT}",
488 | "fontSize": "100%",
489 | "gridPos": {
490 | "h": 15,
491 | "w": 12,
492 | "x": 12,
493 | "y": 6
494 | },
495 | "id": 3,
496 | "options": {},
497 | "pageSize": null,
498 | "showHeader": true,
499 | "sort": {
500 | "col": 0,
501 | "desc": true
502 | },
503 | "styles": [
504 | {
505 | "alias": "Time",
506 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
507 | "pattern": "Time",
508 | "type": "date"
509 | },
510 | {
511 | "alias": "Poster",
512 | "colorMode": null,
513 | "colors": [
514 | "rgba(245, 54, 54, 0.9)",
515 | "rgba(237, 129, 40, 0.89)",
516 | "rgba(50, 172, 45, 0.97)"
517 | ],
518 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
519 | "decimals": 2,
520 | "mappingType": 1,
521 | "pattern": "poster_html",
522 | "sanitize": true,
523 | "thresholds": [],
524 | "type": "string",
525 | "unit": "short"
526 | },
527 | {
528 | "alias": "Movie",
529 | "colorMode": null,
530 | "colors": [
531 | "rgba(245, 54, 54, 0.9)",
532 | "rgba(237, 129, 40, 0.89)",
533 | "rgba(50, 172, 45, 0.97)"
534 | ],
535 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
536 | "decimals": 2,
537 | "link": true,
538 | "linkUrl": "${__cell_3:raw}",
539 | "mappingType": 1,
540 | "pattern": "title",
541 | "thresholds": [],
542 | "type": "string",
543 | "unit": "short"
544 | },
545 | {
546 | "alias": "",
547 | "colorMode": null,
548 | "colors": [
549 | "rgba(245, 54, 54, 0.9)",
550 | "rgba(237, 129, 40, 0.89)",
551 | "rgba(50, 172, 45, 0.97)"
552 | ],
553 | "dateFormat": "YYYY-MM-DD HH:mm:ss",
554 | "decimals": 2,
555 | "mappingType": 1,
556 | "pattern": "url",
557 | "thresholds": [],
558 | "type": "hidden",
559 | "unit": "short"
560 | }
561 | ],
562 | "targets": [
563 | {
564 | "groupBy": [],
565 | "measurement": "watch",
566 | "orderByTime": "ASC",
567 | "policy": "default",
568 | "refId": "A",
569 | "resultFormat": "table",
570 | "select": [
571 | [
572 | {
573 | "params": [
574 | "poster_html"
575 | ],
576 | "type": "field"
577 | }
578 | ],
579 | [
580 | {
581 | "params": [
582 | "title"
583 | ],
584 | "type": "field"
585 | }
586 | ],
587 | [
588 | {
589 | "params": [
590 | "url"
591 | ],
592 | "type": "field"
593 | }
594 | ]
595 | ],
596 | "tags": [
597 | {
598 | "key": "type",
599 | "operator": "=",
600 | "value": "movie"
601 | }
602 | ]
603 | }
604 | ],
605 | "timeFrom": null,
606 | "timeShift": null,
607 | "title": "Recently Watched Movies",
608 | "transform": "table",
609 | "type": "table"
610 | }
611 | ],
612 | "schemaVersion": 21,
613 | "style": "dark",
614 | "tags": [],
615 | "templating": {
616 | "list": []
617 | },
618 | "time": {
619 | "from": "now-1y",
620 | "to": "now"
621 | },
622 | "timepicker": {},
623 | "timezone": "",
624 | "title": "Trakt.tv",
625 | "uid": "QcJfM7kgk",
626 | "version": 10
627 | }
--------------------------------------------------------------------------------
/instagram.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import sys
17 | from datetime import datetime
18 | from instaloader import instaloader, Profile
19 | from config import *
20 |
21 | if not INSTAGRAM_PROFILE:
22 | logging.error("INSTAGRAM_PROFILE not set in config.py")
23 | sys.exit(1)
24 |
25 | points = []
26 |
27 | connect(INSTAGRAM_DATABASE)
28 |
29 | logging.info("Fetching profile for %s", INSTAGRAM_PROFILE)
30 | L = instaloader.Instaloader()
31 | try:
32 | L.load_session_from_file(INSTAGRAM_PROFILE)
33 | except FileNotFoundError:
34 | logging.warning("Logging into Instagram can make this script more reliable. Try: instaloader -l {INSTAGRAM_PROFILE}")
35 |
36 | profile = Profile.from_username(L.context, INSTAGRAM_PROFILE)
37 | followers = profile.followers
38 | points.append({
39 | "measurement": "followers",
40 | "time": datetime.utcnow().isoformat(),
41 | "tags": {
42 | "username": INSTAGRAM_PROFILE
43 | },
44 | "fields": {
45 | "value": followers
46 | }
47 | })
48 |
49 | posts = list(profile.get_posts())
50 | count = len(posts)
51 | logging.info("Got %s posts from Instagram", count)
52 | if INSTAGRAM_MAX_POSTS == 0:
53 | INSTAGRAM_MAX_POSTS = count
54 |
55 | for post in posts:
56 | points.append({
57 | "measurement": "post",
58 | "time": post.date_utc.isoformat(),
59 | "tags": {
60 | "owner": post.owner_username,
61 | "shortcode": post.shortcode,
62 | },
63 | "fields": {
64 | "image": post.url,
65 | "url": f'https://instagram.com/p/{post.shortcode}/',
66 | "thumbnail_html": f'
',
67 | "caption": post.caption,
68 | "likes": post.likes,
69 | "comments": post.comments
70 | }
71 | })
72 |
73 | logging.debug("%s / %s", len(points), INSTAGRAM_MAX_POSTS)
74 |
75 | if len(points) > INSTAGRAM_MAX_POSTS:
76 | break
77 |
78 | write_points(points)
--------------------------------------------------------------------------------
/k8s-example/README.md:
--------------------------------------------------------------------------------
1 | ## Secrets
2 | The Kubernetes manifests here are pretty straight forward. I personally use [external-secrets](https://external-secrets.io/v0.8.5/) with [doppler.com](https://doppler.com) to manage my secrets outside the cluster, but you can use a regular old kubernetes secret if you want. I've included examples for both. Obviously depending on which python file you're using and how you modify the config.py, you may need to rename/change/add/delete from here.
3 |
4 | Simply use the secret/external-secret examples as a starting point, and then apply with `kubectl apply -f secret.yaml` and you should be good to go.
5 |
6 | ## Cron Job
7 | The cronjob example runs the image at a preset time. I have mine currently set to run every hour, where it grabs librelinkup data for my pet's blood sugar monitor. You can customize as necessary, then same as the secret, deploy with `kubectl apply -f cronjob.yaml` or your favorite automation tool of choice. I use ArgoCD and have a manifest for that to deploy into my cluster.
8 |
9 |
10 | > Written with [StackEdit](https://stackedit.io/).
--------------------------------------------------------------------------------
/k8s-example/cronjob-example.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: batch/v1
2 | kind: CronJob
3 | metadata:
4 | name: freelibre-cron
5 | namespace: default
6 | spec:
7 | schedule: "0 */1 * * *"
8 | concurrencyPolicy: Forbid
9 | jobTemplate:
10 | spec:
11 | template:
12 | metadata:
13 | labels:
14 | app: freelibre-job
15 | spec:
16 | containers:
17 | - name: freelibre-upload-cron
18 | image: evanrich/freelibre2influx:latest
19 | imagePullPolicy: IfNotPresent
20 | envFrom:
21 | - secretRef:
22 | name: freestyle-secrets
23 | restartPolicy: OnFailure
--------------------------------------------------------------------------------
/k8s-example/external-secret-example.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: external-secrets.io/v1beta1
2 | kind: ExternalSecret
3 | metadata:
4 | name: freestyle-secrets
5 | namespace: default
6 | spec:
7 | secretStoreRef:
8 | kind: ClusterSecretStore
9 | name: doppler
10 | target:
11 | name: freestyle-secrets
12 | data:
13 | - secretKey: INFLUXDB_HOST
14 | remoteRef:
15 | key: INFLUXDB_HOST
16 | - secretKey: INFLUXDB_PORT
17 | remoteRef:
18 | key: INFLUXDB_PORT
19 | - secretKey: INFLUXDB_USERNAME
20 | remoteRef:
21 | key: INFLUXDB_ADMIN_USER
22 | - secretKey: INFLUXDB_PASSWORD
23 | remoteRef:
24 | key: INFLUXDB_ADMIN_PASSWORD
25 | - secretKey: LIBRELINKUP_USERNAME
26 | remoteRef:
27 | key: LIBRELINKUP_USERNAME
28 | - secretKey: LIBRELINKUP_PASSWORD
29 | remoteRef:
30 | key: LIBRELINKUP_PASSWORD
31 | - secretKey: LIBRELINKUP_DATABASE
32 | remoteRef:
33 | key: LIBRELINKUP_DATABASE
--------------------------------------------------------------------------------
/k8s-example/secret-example.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Secret
3 | metadata:
4 | name: freestyle-secrets
5 | type: Opaque
6 | data:
7 | INFLUXDB_HOST: dXNlcg==
8 | INFLUXDB_PORT: dXNlcg==
9 | INFLUXDB_ADMIN_USER: dXNlcg==
10 | INFLUXDB_ADMIN_PASSWORD: dXNlcg==
11 | LIBRELINKUP_USERNAME: dXNlcg==
12 | LIBRELINKUP_PASSWORD: dXNlcg==
13 | LIBRELINKUP_DATABASE: dXNlcg==
--------------------------------------------------------------------------------
/librelinkup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys, os, json, time
17 | from datetime import datetime
18 | from config import *
19 |
20 | def append_reading(points, data, reading):
21 | time = datetime.strptime(reading['FactoryTimestamp'] + '+00:00', '%m/%d/%Y %I:%M:%S %p%z')
22 | points.append({
23 | "measurement": "glucose",
24 | "time": time,
25 | "tags": {
26 | "deviceType": "Libre",
27 | "deviceSerialNumber": data['data']['connection']['sensor']['sn'],
28 | },
29 | "fields": {
30 | "value": int(reading['ValueInMgPerDl']),
31 | "units": 'mg/dL',
32 | }
33 | })
34 |
35 | if not LIBRELINKUP_USERNAME:
36 | logging.error("LIBRELINKUP_USERNAME not set in config.py")
37 | sys.exit(1)
38 |
39 | points = []
40 |
41 | connect(LIBRELINKUP_DATABASE)
42 |
43 | LIBRELINKUP_HEADERS = {
44 | "version": LIBRELINKUP_VERSION,
45 | "product": LIBRELINKUP_PRODUCT,
46 | }
47 |
48 | LIBRELINKUP_TOKEN = None
49 | script_dir = os.path.dirname(__file__)
50 | auth_token_path = os.path.join(script_dir, '.librelinkup-authtoken')
51 | if os.path.isfile(auth_token_path):
52 | with open(auth_token_path) as json_file:
53 | auth = json.load(json_file)
54 | if auth['expires'] > time.time():
55 | LIBRELINKUP_TOKEN = auth['token']
56 | logging.info("Using cached authTicket, expiration: %s", datetime.fromtimestamp(auth['expires']).isoformat())
57 |
58 | if LIBRELINKUP_TOKEN is None:
59 | logging.info("Auth ticket not found or expired, requesting a new one")
60 | try:
61 | response = requests.post(f'{LIBRELINKUP_URL}/llu/auth/login',
62 | headers=LIBRELINKUP_HEADERS, json = {'email': LIBRELINKUP_USERNAME, 'password': LIBRELINKUP_PASSWORD})
63 | response.raise_for_status()
64 | except requests.exceptions.HTTPError as err:
65 | logging.error("HTTP request failed: %s", err)
66 | sys.exit(1)
67 |
68 | data = response.json()
69 | if not 'authTicket' in data['data']:
70 | logging.error("Authentication failed")
71 | sys.exit(1)
72 |
73 | with open(auth_token_path, 'w') as outfile:
74 | json.dump(data['data']['authTicket'], outfile)
75 |
76 | LIBRELINKUP_TOKEN = data['data']['authTicket']['token']
77 |
78 | LIBRELINKUP_HEADERS['Authorization'] = 'Bearer ' + LIBRELINKUP_TOKEN
79 |
80 | try:
81 | response = requests.get(f'{LIBRELINKUP_URL}/llu/connections', headers=LIBRELINKUP_HEADERS)
82 | response.raise_for_status()
83 | except requests.exceptions.HTTPError as err:
84 | logging.error("HTTP request failed: %s", err)
85 | sys.exit(1)
86 |
87 | connections = response.json()
88 | if not 'data' in connections or len(connections['data']) < 1:
89 | logging.error("No connections configured. Accept an invitation in the mobile app first.")
90 | sys.exit(1)
91 |
92 | logging.info("Using connection %s: %s %s", connections['data'][0]['patientId'], connections['data'][0]['firstName'], connections['data'][0]['lastName'])
93 |
94 | try:
95 | response = requests.get(f'{LIBRELINKUP_URL}/llu/connections/{connections["data"][0]["patientId"]}/graph', headers=LIBRELINKUP_HEADERS)
96 | response.raise_for_status()
97 | except requests.exceptions.HTTPError as err:
98 | logging.error("HTTP request failed: %s", err)
99 | sys.exit(1)
100 |
101 | data = response.json()
102 | append_reading(points, data, data['data']['connection']['glucoseMeasurement'])
103 |
104 | if len(data['data']['graphData']) > 0:
105 | for reading in data['data']['graphData']:
106 | append_reading(points, data, reading)
107 |
108 | write_points(points)
109 |
--------------------------------------------------------------------------------
/nintendo-switch.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys
17 | from config import *
18 |
19 | if not NS_DEVICE_ID:
20 | logging.error("NS_DEVICE_ID not set in config.py")
21 | sys.exit(1)
22 |
23 | GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:jwt-bearer-session-token'
24 | points = []
25 |
26 |
27 | def get_access_token():
28 | response = requests.post('https://accounts.nintendo.com/connect/1.0.0/api/token', data={
29 | 'session_token': NS_SESSION_TOKEN,
30 | 'client_id': NS_CLIENT_ID,
31 | 'grant_type': GRANT_TYPE
32 | })
33 | return response.json()
34 |
35 |
36 | def get_daily_summary(access):
37 | response = requests.get(f'https://api-lp1.pctl.srv.nintendo.net/moon/v1/devices/{NS_DEVICE_ID}/daily_summaries', headers={
38 | 'x-moon-os-language': 'en-US',
39 | 'x-moon-app-language': 'en-US',
40 | 'authorization': f"{access['token_type']} {access['access_token']}",
41 | 'x-moon-app-internal-version': NS_INTERNAL_VERSION,
42 | 'x-moon-app-display-version': NS_DISPLAY_VERSION,
43 | 'x-moon-app-id': 'com.nintendo.znma',
44 | 'x-moon-os': 'IOS',
45 | 'x-moon-os-version': NS_OS_VERSION,
46 | 'x-moon-model': 'iPhone11,8',
47 | 'accept-encoding': 'gzip;q=1.0, compress;q=0.5',
48 | 'accept-language': 'en-US;q=1.0',
49 | 'user-agent': 'moon_ios/' + NS_DISPLAY_VERSION + ' (com.nintendo.znma; build:' + NS_INTERNAL_VERSION + '; iOS ' + NS_OS_VERSION + ') Alamofire/4.8.2',
50 | 'x-moon-timezone': 'America/Los_Angeles',
51 | 'x-moon-smart-device-id': NS_SMART_DEVICE_ID
52 | })
53 | return response.json()
54 |
55 | def get_monthly_summary(month, access):
56 | response = requests.get(f'https://api-lp1.pctl.srv.nintendo.net/moon/v1/devices/{NS_DEVICE_ID}/monthly_summaries/{month}', headers={
57 | 'x-moon-os-language': 'en-US',
58 | 'x-moon-app-language': 'en-US',
59 | 'authorization': f"{access['token_type']} {access['access_token']}",
60 | 'x-moon-app-internal-version': NS_INTERNAL_VERSION,
61 | 'x-moon-app-display-version': NS_DISPLAY_VERSION,
62 | 'x-moon-app-id': 'com.nintendo.znma',
63 | 'x-moon-os': 'IOS',
64 | 'x-moon-os-version': NS_OS_VERSION,
65 | 'x-moon-model': 'iPhone11,8',
66 | 'accept-encoding': 'gzip;q=1.0, compress;q=0.5',
67 | 'accept-language': 'en-US;q=1.0',
68 | 'user-agent': 'moon_ios/' + NS_DISPLAY_VERSION + ' (com.nintendo.znma; build:' + NS_INTERNAL_VERSION + '; iOS ' + NS_OS_VERSION + ') Alamofire/4.8.2',
69 | 'x-moon-timezone': 'America/Los_Angeles',
70 | 'x-moon-smart-device-id': NS_SMART_DEVICE_ID
71 | })
72 | return response.json()
73 |
74 | connect(NS_DATABASE)
75 | token = get_access_token()
76 | summary = get_daily_summary(token)
77 |
78 | for day in summary['items']:
79 | for player in day['devicePlayers']:
80 | for playedApp in player['playedApps']:
81 | for app in day['playedApps']:
82 | if app['applicationId'] == playedApp['applicationId']:
83 | points.append({
84 | "measurement": "time",
85 | "time": day['date'],
86 | "tags": {
87 | "player_id": player['playerId'],
88 | "application_id": app['applicationId'],
89 | "platform": "Nintendo Switch",
90 | "player_name": player['nickname'],
91 | "title": app['title'],
92 | },
93 | "fields": {
94 | "value": playedApp['playingTime'],
95 | "image": app['imageUri']['large'],
96 | "url": app['shopUri']
97 | }
98 | })
99 |
100 | write_points(points)
101 |
--------------------------------------------------------------------------------
/onetouchreveal.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys, pytz
17 | from datetime import datetime, date, timedelta
18 | from config import *
19 |
20 | if not ONETOUCH_USERNAME:
21 | logging.error("ONETOUCH_USERNAME not set in config.py")
22 | sys.exit(1)
23 |
24 | STARTDATE = (date.today() - timedelta(days=date.today().weekday())).strftime("%Y-%m-%d %H:%M:%S")
25 | points = []
26 |
27 | connect(ONETOUCH_DATABASE)
28 |
29 | try:
30 | response = requests.post(f'{ONETOUCH_URL}/mobile/user/v3/authenticate',
31 | headers={'Content-Type': 'application/json', 'login': ONETOUCH_USERNAME, 'password':ONETOUCH_PASSWORD})
32 | response.raise_for_status()
33 | except requests.exceptions.HTTPError as err:
34 | logging.error("HTTP request failed: %s", err)
35 | sys.exit(1)
36 |
37 | data = response.json()
38 | if not 'token' in data['result']:
39 | logging.error("Authentication failed")
40 | sys.exit(1)
41 |
42 | ONETOUCH_TOKEN = data['result']['token']
43 | try:
44 | response = requests.post(f'{ONETOUCH_URL}/mobile/health/v1/data/subscribe',
45 | json={'endDate':'', 'lastSyncTime':0,'readingTypes':['bgReadings'], 'startDate':STARTDATE},
46 | headers={'Content-Type': 'application/json', 'authenticationtoken': ONETOUCH_TOKEN, 'token':ONETOUCH_TOKEN})
47 | response.raise_for_status()
48 | except requests.exceptions.HTTPError as err:
49 | logging.error("HTTP request failed: %s", err)
50 | sys.exit(1)
51 |
52 | data = response.json()
53 |
54 | if len(data['result']['bgReadings']) > 0:
55 | for reading in data['result']['bgReadings']:
56 | time = datetime.strptime(reading['readingDate'], "%Y-%m-%d %H:%M:%S")
57 | utc_time = LOCAL_TIMEZONE.localize(time).astimezone(pytz.utc).isoformat()
58 | points.append({
59 | "measurement": "glucose",
60 | "time": utc_time,
61 | "tags": {
62 | "deviceType": reading['deviceType'],
63 | "deviceSerialNumber": reading['deviceSerialNumber'],
64 | },
65 | "fields": {
66 | "value": int(reading['bgValue']['value']),
67 | "units": reading['bgValue']['units'],
68 | }
69 | })
70 |
71 | write_points(points)
72 |
--------------------------------------------------------------------------------
/psn.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests
17 | import sys
18 | import json
19 | from datetime import datetime
20 | from bs4 import BeautifulSoup
21 | from urllib.parse import urljoin, urlparse
22 | from config import *
23 |
24 |
25 | if not EXOPHASE_NAME:
26 | logging.error("EXOPHASE_NAME not set in config.py")
27 | sys.exit(1)
28 |
29 | points = []
30 |
31 |
32 | def scrape_exophase_id():
33 | try:
34 | response = requests.get(
35 | f"https://www.exophase.com/user/{EXOPHASE_NAME}")
36 | response.raise_for_status()
37 | except requests.exceptions.HTTPError as err:
38 | logging.error("HTTP request failed: %s", err)
39 | sys.exit(1)
40 | soup = BeautifulSoup(response.text, 'html.parser')
41 | return [soup.find("a", attrs={'data-playerid': True})['data-playerid'], soup.find("div", attrs={'data-userid': True})['data-userid']]
42 |
43 |
44 | def scrape_latest_games(platform):
45 | games = []
46 | try:
47 | response = requests.get(
48 | f"https://www.exophase.com/{platform}/user/{PSN_NAME}")
49 | response.raise_for_status()
50 | except requests.exceptions.HTTPError as err:
51 | logging.error("HTTP request failed: %s", err)
52 | sys.exit(1)
53 | soup = BeautifulSoup(response.text, 'html.parser')
54 | for game in soup.find_all("li", attrs={'data-gameid': True}):
55 | try:
56 | playtime = int(float(game.select_one(
57 | "span.hours").get_text()[:-1]) * 60)
58 | img = game.select_one("div.image > img")['src']
59 | img = urljoin(img, urlparse(img).path).replace(
60 | "/games/m/", "/games/l/")
61 | games.append({'gameid': game['data-gameid'],
62 | 'time': datetime.fromtimestamp(float(game['data-lastplayed'])),
63 | 'title': game.select_one("h3 > a").string,
64 | 'url': game.select_one("h3 > a")['href'],
65 | 'image': img,
66 | 'playtime': playtime,
67 | })
68 | except Exception: # Games with out total played time
69 | pass
70 |
71 | return games
72 |
73 |
74 | def scrape_achievements(url, gameid):
75 | achievements = []
76 | try:
77 | response = requests.get(
78 | f"https://api.exophase.com/public/player/{urlparse(url).fragment}/game/{gameid}/earned")
79 |
80 | response.raise_for_status()
81 |
82 | api_data = response.json()
83 |
84 | except requests.exceptions.HTTPError as err:
85 | logging.error("HTTP request failed: %s", err)
86 | sys.exit(1)
87 |
88 | if api_data['success'] == True:
89 | achievement_data = {}
90 |
91 | for achievement in api_data['list']:
92 | api_desc_response = requests.get(achievement["endpoint"])
93 | soup = BeautifulSoup(api_desc_response.text, 'html.parser')
94 | award = soup.find("div", {"class": "col award-details snippet"}).p
95 |
96 | achievement_data = {'id': achievement['awardid'],
97 | 'name': achievement["slug"].replace("-", " ").title(),
98 | 'image': achievement["icons"]["o"],
99 | 'time': datetime.fromtimestamp(achievement['timestamp']),
100 | 'description': award.text
101 | }
102 | achievements.append(achievement_data)
103 |
104 | return achievements
105 |
106 |
107 | client = connect(PSN_DATABASE)
108 |
109 | PLAYERID, USERID = scrape_exophase_id()
110 | totals = client.query(
111 | f'SELECT last("total") AS "total" FROM "time" WHERE "platform" = \'PSN\' AND "total" > 0 AND "player_id" = \'{PLAYERID}\' GROUP BY "application_id" ORDER BY "time" DESC')
112 |
113 | for game in scrape_latest_games('psn'):
114 | value = game['playtime']
115 | total = list(totals.get_points(
116 | tags={'application_id': str(game['gameid'])}))
117 | if len(total) == 1 and total[0]['total'] > 0:
118 | value = game['playtime'] - total[0]['total']
119 | if value > 1:
120 | points.append({
121 | "measurement": "time",
122 | "time": game['time'].isoformat(),
123 | "tags": {
124 | "player_id": PLAYERID,
125 | "application_id": game['gameid'],
126 | "platform": "PSN",
127 | "player_name": PSN_NAME,
128 | "title": game['title'],
129 | },
130 | "fields": {
131 | "value": int(value) * 60,
132 | "total": game['playtime'],
133 | "image": game['image'],
134 | "url": game['url']
135 | }
136 | })
137 |
138 | for achievement in scrape_achievements(game['url'], game['gameid']):
139 | points.append({
140 | "measurement": "achievement",
141 | "time": achievement['time'].isoformat(),
142 | "tags": {
143 | "player_id": PLAYERID,
144 | "application_id": game['gameid'],
145 | "apiname": achievement['id'],
146 | "platform": "PSN",
147 | "player_name": PSN_NAME,
148 | "title": game['title'],
149 | },
150 | "fields": {
151 | "name": achievement['name'],
152 | "description": achievement['description'],
153 | "icon": achievement['image'],
154 | "icon_gray": achievement['image'],
155 | }
156 | })
157 |
158 | # print(points)
159 | write_points(points)
160 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | beautifulsoup4==4.10.0
2 | bs4==0.0.1
3 | certifi==2022.12.7
4 | charset-normalizer==2.0.12
5 | colorlog==6.6.0
6 | idna==3.3
7 | influxdb==5.3.1
8 | msgpack==1.0.3
9 | python-dateutil==2.8.2
10 | pytz==2021.3
11 | requests==2.27.1
12 | six==1.16.0
13 | soupsieve==2.3.1
14 | urllib3==1.26.8
15 | appdirs==1.4.4
16 | arrow==1.2.2
17 | attrs==21.4.0
18 | beautifulsoup4==4.10.0
19 | bs4==0.0.1
20 | cattrs==1.10.0
21 | certifi==2022.12.7
22 | charset-normalizer==2.0.12
23 | colorlog==6.6.0
24 | idna==3.3
25 | influxdb==5.3.1
26 | instaloader==4.8.4
27 | msgpack==1.0.3
28 | publicsuffix2==2.20191221
29 | python-dateutil==2.8.2
30 | pytz==2021.3
31 | requests==2.27.1
32 | requests-cache==0.9.3
33 | six==1.16.0
34 | soupsieve==2.3.1
35 | todoist-python==8.1.3
36 | trakt.py==4.4.0
37 | url-normalize==1.4.3
38 | urllib3==1.26.8
39 |
--------------------------------------------------------------------------------
/rescuetime-games.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import pytz
17 | from datetime import datetime, date, timedelta, time
18 | from config import *
19 |
20 | games = {
21 | "angry-birds-vr-isle-of-pigs": {
22 | "image": 'https://steamcdn-a.akamaihd.net/steam/apps/1001140/header.jpg',
23 | "title": "Angry Birds VR: Isle of Pigs",
24 | "platform": "Viveport",
25 | "url": 'https://store.steampowered.com/app/1001140/'
26 | },
27 | "arcade": {
28 | "image": 'https://steamcdn-a.akamaihd.net/steam/apps/435490/header.jpg',
29 | "title": "Pierhead Arcade",
30 | "platform": "Viveport",
31 | "url": 'https://store.steampowered.com/app/435490/'
32 | },
33 | "cloudlands": {
34 | "image": 'https://steamcdn-a.akamaihd.net/steam/apps/425720/header.jpg',
35 | "title": "Cloudlands: VR Minigolf",
36 | "platform": "Viveport",
37 | "url": 'https://store.steampowered.com/app/425720/'
38 | },
39 | "fuji": {
40 | "image": 'https://steamcdn-a.akamaihd.net/steam/apps/589040/header.jpg',
41 | "title": "Fuji",
42 | "platform": "Viveport",
43 | "url": 'https://store.steampowered.com/app/589040/'
44 | },
45 | "half + half": {
46 | "image": 'https://halfandhalf.fun/assets/images/logo.png',
47 | "title": "Half + Half",
48 | "platform": "Oculus",
49 | "url": 'https://www.oculus.com/experiences/quest/2035353573194060/'
50 | },
51 | "openttd": {
52 | "image": 'https://www.openttd.org/static/img/layout/openttd-128.gif',
53 | "title": "OpenTTD",
54 | "platform": "Mac",
55 | "url": 'https://www.openttd.org'
56 | },
57 | "pixelripped1989": {
58 | "image": 'https://steamcdn-a.akamaihd.net/steam/apps/577530/header.jpg',
59 | "title": "Pixel Ripped 1989",
60 | "platform": "Viveport",
61 | "url": 'https://store.steampowered.com/app/577530/'
62 | },
63 | "proze-win64-shipping": {
64 | "image": 'https://steamcdn-a.akamaihd.net/steam/apps/924250/header.jpg',
65 | "title": "Proze: Enlightenment",
66 | "platform": "Viveport",
67 | "url": 'https://store.steampowered.com/app/924250/'
68 | },
69 | "shenmue3-win64-shipping": {
70 | "image": 'https://steamcdn-a.akamaihd.net/steam/apps/878670/header.jpg',
71 | "title": "Shenmue III",
72 | "platform": "Epic Games Store",
73 | "url": 'https://store.steampowered.com/app/878670/'
74 | },
75 | "starcitizen": {
76 | "image": 'https://robertsspaceindustries.com/rsi/static/wsc/images/Logo-SC@2x.png',
77 | "title": "Star Citizen",
78 | "platform": "Windows",
79 | "url": 'https://robertsspaceindustries.com/star-citizen'
80 | },
81 | "synthriders": {
82 | "image": 'https://steamcdn-a.akamaihd.net/steam/apps/885000/header.jpg',
83 | "title": "Synth Riders",
84 | "platform": "Viveport",
85 | "url": 'https://store.steampowered.com/app/885000/'
86 | },
87 | "the sims 4": {
88 | "image": 'https://media.contentapi.ea.com/content/dam/eacom/SIMS/brand-refresh-assets/images/2019/06/ts4-adaptive-logo-primary-white-7x2-xl-5x2-lg-2x1-md-16x9-sm-xs.png',
89 | "title": "The Sims 4",
90 | "platform": "Origin",
91 | "url": 'https://www.ea.com/games/the-sims/the-sims-4'
92 | },
93 | "transpose": {
94 | "image": 'https://steamcdn-a.akamaihd.net/steam/apps/835950/header.jpg',
95 | "title": "Transpose",
96 | "platform": "Viveport",
97 | "url": 'https://store.steampowered.com/app/835950/'
98 | },
99 | "twilightpath_Viveport": {
100 | "image": 'https://steamcdn-a.akamaihd.net/steam/apps/770110/header.jpg',
101 | "title": "Twilight Path",
102 | "platform": "Viveport",
103 | "url": 'https://store.steampowered.com/app/770110/'
104 | },
105 | "Solitaire": {
106 | "image": 'https://lh3.googleusercontent.com/trsFOWkeuVbmN40ss88nfXDxXcOiH1IF3oJJOueRvcrQEf0gMYsTCzGbC6C-kgqZow=s180-rw',
107 | "title": "Solitaire",
108 | "platform": "Android",
109 | "url": 'https://play.google.com/store/apps/details?id=com.mobilityware.solitaire'
110 | },
111 | "flightsimulator": {
112 | "image": 'https://steamcdn-a.akamaihd.net/steam/apps/1250410/header.jpg',
113 | "title": "Microsoft Flight Simulator",
114 | "platform": "Windows",
115 | "url": 'https://store.steampowered.com/app/1250410/'
116 | },
117 | "movingout": {
118 | "image": 'https://steamcdn-a.akamaihd.net/steam/apps/996770/header.jpg',
119 | "title": "Moving Out",
120 | "platform": "Windows",
121 | "url": 'https://store.steampowered.com/app/996770/'
122 | },
123 | "cengine": {
124 | "image": 'https://store-images.s-microsoft.com/image/apps.53972.13524928534337711.061b795b-d8df-4621-98ec-a96089e571a1.51af2134-99b3-46b4-bdd9-7c7f29c0655e?mode=scale&q=90&h=300&w=200',
125 | "title": "The Touryst",
126 | "platform": "Windows",
127 | "url": 'https://www.microsoft.com/en-us/p/the-touryst/9n9w1jk1x5qj'
128 | },
129 | }
130 | points = []
131 | start_time = str(int(LOCAL_TIMEZONE.localize(datetime.combine(date.today(), time(0,0)) - timedelta(days=7)).astimezone(pytz.utc).timestamp()) * 1000) + 'ms'
132 |
133 | client = connect(GAMING_DATABASE)
134 | client.switch_database(RESCUETIME_DATABASE)
135 | durations = client.query('SELECT "duration","activity" FROM "activity" WHERE time >= ' + start_time)
136 | for duration in list(durations.get_points()):
137 | if duration['activity'] in games:
138 | points.append({
139 | "measurement": "time",
140 | "time": duration['time'],
141 | "tags": {
142 | "application_id": duration['activity'],
143 | "platform": games[duration['activity']]['platform'],
144 | "title": games[duration['activity']]['title'],
145 | },
146 | "fields": {
147 | "value": duration['duration'],
148 | "image": games[duration['activity']]['image'],
149 | "url": games[duration['activity']]['url']
150 | }
151 | })
152 |
153 | client.switch_database(GAMING_DATABASE)
154 | write_points(points)
155 |
--------------------------------------------------------------------------------
/rescuetime.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, pytz, sys
17 | from datetime import datetime
18 | from config import *
19 |
20 | if not RESCUETIME_API_KEY:
21 | logging.error("RESCUETIME_API_KEY not set in config.py")
22 | sys.exit(1)
23 |
24 | connect(RESCUETIME_DATABASE)
25 |
26 | try:
27 | response = requests.get('https://www.rescuetime.com/anapi/data',
28 | params={"key":RESCUETIME_API_KEY, "perspective":"interval", "restrict_kind":"activity", "format":"json"})
29 | response.raise_for_status()
30 | except requests.exceptions.HTTPError as err:
31 | logging.error("HTTP request failed: %s", err)
32 | sys.exit(1)
33 |
34 | activities = response.json()
35 | logging.info("Got %s activites from RescueTime", len(activities['rows']))
36 | if len(activities['rows']) == 0:
37 | sys.exit()
38 |
39 | points = []
40 |
41 | for activity in activities['rows']:
42 | time = datetime.fromisoformat(activity[0])
43 | utc_time = LOCAL_TIMEZONE.localize(time).astimezone(pytz.utc).isoformat()
44 | points.append({
45 | "measurement": "activity",
46 | "time": utc_time,
47 | "tags": {
48 | "activity": activity[3],
49 | "category": activity[4]
50 | },
51 | "fields": {
52 | "duration": activity[1],
53 | "productivity": activity[5],
54 | "score": activity[1] * activity[5]
55 | }
56 | })
57 |
58 | write_points(points)
59 |
--------------------------------------------------------------------------------
/retroachievements.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys
17 | from datetime import datetime, date, timedelta, time
18 | from config import *
19 |
20 | if not RA_API_KEY:
21 | logging.error("RA_API_KEY not set in config.py")
22 | sys.exit(1)
23 |
24 | points = []
25 | connect(RA_DATABASE)
26 |
27 | end = datetime.utcnow().timestamp()
28 | start = end - 604800
29 |
30 | try:
31 | response = requests.get('https://retroachievements.org/API/API_GetAchievementsEarnedBetween.php',
32 | params={'z': RA_USERNAME, 'y': RA_API_KEY, 'u': RA_USERNAME, 'f': start, 't': end})
33 | response.raise_for_status()
34 | except requests.exceptions.HTTPError as err:
35 | logging.error("HTTP request failed: %s", err)
36 | sys.exit(1)
37 |
38 | data = response.json()
39 | logging.info("Got %s achievements from RetroAchievements", len(data))
40 |
41 | for achievement in data:
42 | date = datetime.strptime(achievement['Date'], "%Y-%m-%d %H:%M:%S")
43 |
44 | points.append({
45 | "measurement": "achievement",
46 | "time": date.isoformat(),
47 | "tags": {
48 | "player_id": RA_USERNAME,
49 | "platform": achievement['ConsoleName'],
50 | "player_name": RA_USERNAME,
51 | "title": achievement['GameTitle'],
52 | "application_id": str(achievement['GameID']),
53 | "apiname": str(achievement['AchievementID']),
54 | },
55 | "fields": {
56 | "name": achievement['Title'],
57 | "description": achievement['Description'],
58 | "icon": f'https://retroachievements.org{achievement["BadgeURL"]}'
59 | }
60 | })
61 |
62 | write_points(points)
63 |
--------------------------------------------------------------------------------
/retroarch_emulationstation.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import os, ntpath, json, pytz, urllib
17 | import xml.etree.ElementTree as ET
18 | from datetime import datetime
19 | from config import *
20 |
21 | if not os.path.isdir(EMULATIONSTATION_ROMS):
22 | logging.error("Unable to find path: %s", EMULATIONSTATION_ROMS)
23 | sys.exit(1)
24 |
25 | points = []
26 | client = connect(GAMING_DATABASE)
27 |
28 | roms = {}
29 | for platform in os.listdir(EMULATIONSTATION_ROMS):
30 | if os.path.exists(EMULATIONSTATION_ROMS + '/' + platform + '/gamelist.xml'):
31 | gamelist = ET.parse(EMULATIONSTATION_ROMS + '/' + platform + '/gamelist.xml').getroot()
32 | for game in gamelist.findall('game'):
33 | if gamelist.find('provider/System') != None:
34 | rom = {}
35 | rom['name'] = game.find('name').text
36 | rom['filename'] = ntpath.basename(game.find('path').text)
37 | rom['key'] = os.path.splitext(rom['filename'])[0]
38 | rom['path'] = platform
39 | rom['platform'] = gamelist.find('provider/System').text
40 | if(rom['platform'] == 'Mame'):
41 | rom['platform'] = 'Arcade'
42 |
43 | roms[rom['key']] = rom
44 |
45 | for core in os.listdir(RETROARCH_LOGS):
46 | totals = client.query(f'SELECT last("total") AS "total" FROM "time" WHERE "total" > 0 AND "player_id" = \'{core}\' GROUP BY "application_id" ORDER BY "time" DESC')
47 |
48 | for log in os.listdir(RETROARCH_LOGS + '/' + core):
49 | key = os.path.splitext(log)[0]
50 | if key in roms:
51 | with open(RETROARCH_LOGS + '/' + core + '/' + log, 'r') as f:
52 | playtime = json.load(f)
53 |
54 | rom = roms[key]
55 | h, m, s = playtime['runtime'].split(':')
56 | runtime = value = int(h) * 3600 + int(m) * 60 + int(s)
57 | total = list(totals.get_points(tags={'application_id': rom['key']}))
58 | if len(total) == 1 and total[0]['total'] > 0:
59 | value -= total[0]['total']
60 | if value > 1:
61 | time = datetime.fromisoformat(playtime['last_played'])
62 | utc_time = LOCAL_TIMEZONE.localize(time).astimezone(pytz.utc).isoformat()
63 | points.append({
64 | "measurement": "time",
65 | "time": utc_time,
66 | "tags": {
67 | "player_id": core,
68 | "application_id": rom['key'],
69 | "platform": rom['platform'],
70 | "player_name": core,
71 | "title": rom['name'],
72 | },
73 | "fields": {
74 | "value": int(value),
75 | "total": runtime,
76 | "image": f"{RETROARCH_IMAGE_WEB_PREFIX}{urllib.parse.quote(rom['path'])}/{urllib.parse.quote(rom['key'])}.png",
77 | "url": f"https://thegamesdb.net/search.php?name={urllib.parse.quote_plus(rom['name'])}"
78 | }
79 | })
80 |
81 | write_points(points)
82 |
--------------------------------------------------------------------------------
/retropie/influx-onend.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | echo `date +%s` >> /run/shm/influx-retropie
17 | /usr/bin/python3 /home/pi/influx-retropie.py >&2
18 | rm /run/shm/influx-retropie
19 |
20 |
--------------------------------------------------------------------------------
/retropie/influx-onstart.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | echo `date +%s` > /run/shm/influx-retropie
17 | echo $1 >> /run/shm/influx-retropie
18 | echo $2 >> /run/shm/influx-retropie
19 | echo $3 >> /run/shm/influx-retropie
20 |
--------------------------------------------------------------------------------
/retropie/influx-retropie.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import os, sys
17 | import xml.etree.ElementTree as ET
18 | from datetime import datetime
19 | from influxdb import InfluxDBClient
20 | from influxdb.exceptions import InfluxDBClientError
21 |
22 | INFLUXDB_HOST = 'localhost'
23 | INFLUXDB_PORT = 8086
24 | INFLUXDB_USERNAME = 'root'
25 | INFLUXDB_PASSWORD = 'root'
26 | GAMING_DATABASE = 'gaming'
27 |
28 | f = open('/run/shm/influx-retropie', 'r')
29 | start = datetime.utcfromtimestamp(int(f.readline().strip()))
30 | platform = f.readline().strip()
31 | emulator = f.readline().strip()
32 | rom = name = os.path.basename(f.readline().strip())
33 | end = datetime.utcfromtimestamp(int(f.readline().strip()))
34 | duration = (end - start).seconds
35 | f.close()
36 |
37 | if not rom:
38 | rom = name = emulator
39 | platform = "Linux"
40 |
41 | #Ignore games played less than 60 seconds
42 | if duration < 60:
43 | print("Ignoring '" + emulator + ": " + name +"' played less than 60 seconds")
44 | sys.exit()
45 |
46 | #Ignore non-games and Macintosh platform which doesn't provide game names
47 | if platform == "macintosh" or rom.startswith("+") or rom == "Desktop.sh" or rom == "Kodi.sh" or rom == "Steam Link.sh":
48 | print("Ignoring non-game: '" + emulator + ": " + name +"'")
49 | sys.exit()
50 |
51 | gamelist = os.path.expanduser('~/.emulationstation/gamelists/' + platform + '/gamelist.xml')
52 |
53 | if os.path.exists(gamelist):
54 | root = ET.parse(gamelist).getroot()
55 | for game in root.findall('game'):
56 | path = os.path.basename(game.find('path').text)
57 | if path == name:
58 | name = game.find('name').text
59 | break
60 |
61 | if platform == "nes":
62 | platform = "NES"
63 | elif platform == "snes":
64 | platform = "SNES"
65 | elif platform == "gba":
66 | platform = "Game Boy Advance"
67 | elif platform == "gbc":
68 | platform = "Game Boy Color"
69 | elif platform == "megadrive" or platform == "genesis":
70 | platform = "Sega Genesis"
71 | elif platform == "sega32x":
72 | platform = "Sega 32X"
73 | elif platform == "segacd":
74 | platform = "Sega CD"
75 | elif platform == "pc":
76 | platform = "MS-DOS"
77 | elif platform == "scummvm":
78 | platform = "ScummVM"
79 | elif platform == "mame-libretro":
80 | platform = "Arcade"
81 | elif platform == "mastersystem":
82 | platform = "Sega MasterSystem"
83 | else:
84 | platform = platform.capitalize()
85 |
86 | url = ""
87 | image = ""
88 |
89 | if name == "openttd":
90 | name = "OpenTTD"
91 | url = "https://www.openttd.org"
92 | image = "https://www.openttd.org/static/img/layout/openttd-128.gif"
93 |
94 | if url and image:
95 | points = [{
96 | "measurement": "time",
97 | "time": start,
98 | "tags": {
99 | "application_id": rom,
100 | "platform": platform,
101 | "title": name,
102 | },
103 | "fields": {
104 | "value": duration,
105 | "image": image,
106 | "url": url
107 | }
108 | }]
109 | else:
110 | points = [{
111 | "measurement": "time",
112 | "time": start,
113 | "tags": {
114 | "application_id": rom,
115 | "platform": platform,
116 | "title": name,
117 | },
118 | "fields": {
119 | "value": duration
120 | }
121 | }]
122 |
123 | try:
124 | client = InfluxDBClient(host=INFLUXDB_HOST, port=INFLUXDB_PORT, username=INFLUXDB_USERNAME, password=INFLUXDB_PASSWORD)
125 | client.create_database(GAMING_DATABASE)
126 | except InfluxDBClientError as err:
127 | print("InfluxDB connection failed: %s" % (err))
128 | sys.exit()
129 |
130 | try:
131 | client.switch_database(GAMING_DATABASE)
132 | client.write_points(points)
133 | except InfluxDBClientError as err:
134 | print("Unable to write points to InfluxDB: %s" % (err))
135 | sys.exit()
136 |
137 | print("Successfully wrote %s data points to InfluxDB" % (len(points)))
138 |
--------------------------------------------------------------------------------
/screenshots/grafana-edsm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c99koder/personal-influxdb/eb521e28ceea5e82cac4b8e2fdcae7005ea540e1/screenshots/grafana-edsm.png
--------------------------------------------------------------------------------
/screenshots/grafana-exist.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c99koder/personal-influxdb/eb521e28ceea5e82cac4b8e2fdcae7005ea540e1/screenshots/grafana-exist.png
--------------------------------------------------------------------------------
/screenshots/grafana-fitbit.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c99koder/personal-influxdb/eb521e28ceea5e82cac4b8e2fdcae7005ea540e1/screenshots/grafana-fitbit.png
--------------------------------------------------------------------------------
/screenshots/grafana-foursquare.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c99koder/personal-influxdb/eb521e28ceea5e82cac4b8e2fdcae7005ea540e1/screenshots/grafana-foursquare.png
--------------------------------------------------------------------------------
/screenshots/grafana-fshub.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c99koder/personal-influxdb/eb521e28ceea5e82cac4b8e2fdcae7005ea540e1/screenshots/grafana-fshub.png
--------------------------------------------------------------------------------
/screenshots/grafana-gaming.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c99koder/personal-influxdb/eb521e28ceea5e82cac4b8e2fdcae7005ea540e1/screenshots/grafana-gaming.png
--------------------------------------------------------------------------------
/screenshots/grafana-github.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c99koder/personal-influxdb/eb521e28ceea5e82cac4b8e2fdcae7005ea540e1/screenshots/grafana-github.png
--------------------------------------------------------------------------------
/screenshots/grafana-instagram.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c99koder/personal-influxdb/eb521e28ceea5e82cac4b8e2fdcae7005ea540e1/screenshots/grafana-instagram.png
--------------------------------------------------------------------------------
/screenshots/grafana-rescuetime.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c99koder/personal-influxdb/eb521e28ceea5e82cac4b8e2fdcae7005ea540e1/screenshots/grafana-rescuetime.png
--------------------------------------------------------------------------------
/screenshots/grafana-todoist.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c99koder/personal-influxdb/eb521e28ceea5e82cac4b8e2fdcae7005ea540e1/screenshots/grafana-todoist.png
--------------------------------------------------------------------------------
/screenshots/grafana-trakt.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c99koder/personal-influxdb/eb521e28ceea5e82cac4b8e2fdcae7005ea540e1/screenshots/grafana-trakt.png
--------------------------------------------------------------------------------
/stadia.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests
17 | import sys
18 | from datetime import datetime
19 | from bs4 import BeautifulSoup
20 | from urllib.parse import urljoin, urlparse
21 | from config import *
22 |
23 | if not EXOPHASE_NAME:
24 | logging.error("EXOPHASE_NAME not set in config.py")
25 | sys.exit(1)
26 |
27 | points = []
28 |
29 |
30 | def scrape_exophase_id():
31 | try:
32 | response = requests.get(
33 | f"https://www.exophase.com/user/{EXOPHASE_NAME}")
34 | response.raise_for_status()
35 | except requests.exceptions.HTTPError as err:
36 | logging.error("HTTP request failed: %s", err)
37 | sys.exit(1)
38 | soup = BeautifulSoup(response.text, 'html.parser')
39 | return [soup.find("a", attrs={'data-playerid': True})['data-playerid'], soup.find("div", attrs={'data-userid': True})['data-userid']]
40 |
41 |
42 | def scrape_latest_games(platform):
43 | games = []
44 | try:
45 | response = requests.get(
46 | f"https://www.exophase.com/{platform}/user/{EXOPHASE_NAME}")
47 | response.raise_for_status()
48 | except requests.exceptions.HTTPError as err:
49 | logging.error("HTTP request failed: %s", err)
50 | sys.exit(1)
51 | soup = BeautifulSoup(response.text, 'html.parser')
52 | for game in soup.find_all("li", attrs={'data-gameid': True}):
53 | playtime = int(float(game.select_one(
54 | "span.hours").get_text()[:-1]) * 60)
55 | img = game.select_one("div.image > img")['src']
56 | img = urljoin(img, urlparse(img).path).replace(
57 | "/games/m/", "/games/l/")
58 | games.append({'gameid': game['data-gameid'],
59 | 'time': datetime.fromtimestamp(float(game['data-lastplayed'])),
60 | 'title': game.select_one("h3 > a").string,
61 | 'url': game.select_one("h3 > a")['href'],
62 | 'image': img,
63 | 'playtime': playtime,
64 | })
65 |
66 | return games
67 |
68 |
69 | def scrape_achievements(url, gameid):
70 | achievements = []
71 | try:
72 | response = requests.get(
73 | f"https://api.exophase.com/public/player/{urlparse(url).fragment}/game/{gameid}/earned")
74 | response.raise_for_status()
75 | except requests.exceptions.HTTPError as err:
76 | logging.error("HTTP request failed: %s", err)
77 | sys.exit(1)
78 | api_data = response.json()
79 | if api_data['success'] == True:
80 | achievement_data = {}
81 | response = requests.get(url)
82 | soup = BeautifulSoup(response.text, 'html.parser')
83 | for achievement in soup.find_all("li", attrs={'data-type': 'achievement'}):
84 | img = achievement.select_one("div.image > img")['src']
85 | img = urljoin(img, urlparse(img).path)
86 | achievement_data[achievement['id']] = {'id': achievement['id'],
87 | 'name': achievement.select_one("div.award-title > a").string.replace("\xa0", " "),
88 | 'description': achievement.select_one("div.award-description > p").string.replace("\xa0", " "),
89 | 'image': img
90 | }
91 |
92 | for achievement in api_data['list']:
93 | data = achievement_data[str(achievement['awardid'])]
94 | data['time'] = datetime.fromtimestamp(achievement['timestamp'])
95 | achievements.append(data)
96 |
97 | return achievements
98 |
99 |
100 | client = connect(STADIA_DATABASE)
101 |
102 | PLAYERID, USERID = scrape_exophase_id()
103 | totals = client.query(
104 | f'SELECT last("total") AS "total" FROM "time" WHERE "platform" = \'Stadia\' AND "total" > 0 AND "player_id" = \'{PLAYERID}\' GROUP BY "application_id" ORDER BY "time" DESC')
105 |
106 | for game in scrape_latest_games('stadia'):
107 | value = game['playtime']
108 | total = list(totals.get_points(
109 | tags={'application_id': str(game['gameid'])}))
110 | if len(total) == 1 and total[0]['total'] > 0:
111 | value = game['playtime'] - total[0]['total']
112 | if value > 1:
113 | points.append({
114 | "measurement": "time",
115 | "time": game['time'].isoformat(),
116 | "tags": {
117 | "player_id": PLAYERID,
118 | "application_id": game['gameid'],
119 | "platform": "Stadia",
120 | "player_name": STADIA_NAME,
121 | "title": game['title'],
122 | },
123 | "fields": {
124 | "value": int(value) * 60,
125 | "total": game['playtime'],
126 | "image": game['image'],
127 | "url": game['url']
128 | }
129 | })
130 |
131 | for achievement in scrape_achievements(game['url'], game['gameid']):
132 | points.append({
133 | "measurement": "achievement",
134 | "time": achievement['time'].isoformat(),
135 | "tags": {
136 | "player_id": PLAYERID,
137 | "application_id": game['gameid'],
138 | "apiname": achievement['id'],
139 | "platform": "Stadia",
140 | "player_name": STADIA_NAME,
141 | "title": game['title'],
142 | },
143 | "fields": {
144 | "name": achievement['name'],
145 | "description": achievement['description'],
146 | "icon": achievement['image'],
147 | "icon_gray": achievement['image'],
148 | }
149 | })
150 |
151 | write_points(points)
152 |
--------------------------------------------------------------------------------
/steam.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2023 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys, re, json
17 | from datetime import datetime
18 | from config import *
19 |
20 | if not STEAM_API_KEY:
21 | logging.error("STEAM_API_KEY not set in config.py")
22 | sys.exit(1)
23 |
24 | points = []
25 |
26 | def fetch_schema(appId):
27 | try:
28 | response = requests.get('https://api.steampowered.com/ISteamUserStats/GetSchemaForGame/v1/',
29 | params={'key': STEAM_API_KEY, 'steamid': STEAM_ID, 'appid': appId, 'l': STEAM_LANGUAGE})
30 | response.raise_for_status()
31 | except requests.exceptions.HTTPError as err:
32 | logging.error("HTTP request failed: %s", err)
33 | sys.exit(1)
34 | json = response.json()
35 | if 'game' in json:
36 | return json['game']
37 | else:
38 | return {}
39 |
40 | def fetch_achievements(appId):
41 | try:
42 | response = requests.get('https://api.steampowered.com/ISteamUserStats/GetPlayerAchievements/v1/',
43 | params={'key': STEAM_API_KEY, 'steamid': STEAM_ID, 'appid': appId})
44 | response.raise_for_status()
45 | except requests.exceptions.HTTPError as err:
46 | logging.error("HTTP request failed: %s", err)
47 | sys.exit(1)
48 | json = response.json()
49 | if 'playerstats' in json and 'achievements' in json['playerstats']:
50 | logging.info("Got %s achievements from Steam for appId %s", len(json['playerstats']['achievements']), appId)
51 | return json['playerstats']['achievements']
52 | else:
53 | return []
54 |
55 | def fetch_recents():
56 | try:
57 | response = requests.get('https://api.steampowered.com/IPlayerService/GetRecentlyPlayedGames/v1/',
58 | params={'key': STEAM_API_KEY, 'steamid': STEAM_ID})
59 | response.raise_for_status()
60 | except requests.exceptions.HTTPError as err:
61 | logging.error("HTTP request failed: %s", err)
62 | sys.exit(1)
63 | json = response.json()
64 | if 'response' in json and 'games' in json['response']:
65 | logging.info("Got %s games from Steam recents", json['response']['total_count'])
66 | return json['response']['games']
67 | else:
68 | return []
69 |
70 | def fetch_owned_games():
71 | try:
72 | response = requests.get('https://api.steampowered.com/IPlayerService/GetOwnedGames/v1/',
73 | params={'key': STEAM_API_KEY, 'steamid': STEAM_ID})
74 | response.raise_for_status()
75 | except requests.exceptions.HTTPError as err:
76 | logging.error("HTTP request failed: %s", err)
77 | sys.exit(1)
78 | json = response.json()
79 | if 'response' in json and 'games' in json['response']:
80 | logging.info("Got %s games from Steam library", json['response']['game_count'])
81 | return json['response']['games']
82 | else:
83 | return []
84 |
85 | client = connect(STEAM_DATABASE)
86 |
87 | totals = client.query(f'SELECT last("total") AS "total" FROM "time" WHERE "platform" = \'Steam\' AND "total" > 0 AND "player_id" = \'{STEAM_ID}\' GROUP BY "application_id" ORDER BY "time" DESC')
88 | games = fetch_owned_games()
89 |
90 | for app in fetch_recents():
91 | for game in games:
92 | if game['appid'] == app['appid']:
93 | value = app['playtime_2weeks']
94 | total = list(totals.get_points(tags={'application_id': str(app['appid'])}))
95 | if len(total) == 1 and total[0]['total'] > 0:
96 | value = app['playtime_forever'] - total[0]['total']
97 | if value > 1:
98 | points.append({
99 | "measurement": "time",
100 | "time": datetime.fromtimestamp(game['rtime_last_played']).isoformat(),
101 | "tags": {
102 | "player_id": STEAM_ID,
103 | "application_id": app['appid'],
104 | "platform": "Steam",
105 | "player_name": STEAM_USERNAME,
106 | "title": app['name'],
107 | },
108 | "fields": {
109 | "value": int(value) * 60,
110 | "total": app['playtime_forever'],
111 | "image": f"https://steamcdn-a.akamaihd.net/steam/apps/{app['appid']}/header.jpg",
112 | "url": f"https://store.steampowered.com/app/{app['appid']}/"
113 | }
114 | })
115 |
116 | schema = fetch_schema(app['appid'])
117 | if 'availableGameStats' in schema and 'achievements' in schema['availableGameStats']:
118 | achievements = schema['availableGameStats']['achievements']
119 | for achievement in fetch_achievements(app['appid']):
120 | if achievement['unlocktime'] > 0:
121 | description = None
122 | if 'description' in achievements[achievement['apiname']]:
123 | description = achievements[achievement['apiname']]['description']
124 |
125 | points.append({
126 | "measurement": "achievement",
127 | "time": datetime.fromtimestamp(achievement['unlocktime']).isoformat(),
128 | "tags": {
129 | "player_id": STEAM_ID,
130 | "application_id": app['appid'],
131 | "apiname":achievement['apiname'],
132 | "platform": "Steam",
133 | "player_name": STEAM_USERNAME,
134 | "title": app['name'],
135 | },
136 | "fields": {
137 | "name": achievements[achievement['apiname']]['displayName'],
138 | "description": description,
139 | "icon": achievements[achievement['apiname']]['icon'],
140 | "icon_gray": achievements[achievement['apiname']]['icongray'],
141 | }
142 | })
143 |
144 | write_points(points)
145 |
--------------------------------------------------------------------------------
/trakt-tv.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, requests_cache, sys, os, json
17 | from datetime import datetime, date
18 | from trakt import Trakt
19 | from trakt.objects import Episode, Movie
20 | from config import *
21 |
22 | if not TRAKT_CLIENT_ID:
23 | logging.error("TRAKT_CLIENT_ID not set in config.py")
24 | sys.exit(1)
25 |
26 | points = []
27 | posters = {}
28 |
29 | def fetch_poster(type, tmdb_id):
30 | if tmdb_id == None:
31 | return None
32 | logging.debug("Fetching poster for type=%s id=%s", type, tmdb_id)
33 | try:
34 | with requests_cache.enabled('tmdb'):
35 | response = requests.get(f'https://api.themoviedb.org/3/{type}/{tmdb_id}',
36 | params={'api_key': TMDB_API_KEY})
37 | response.raise_for_status()
38 | except requests.exceptions.HTTPError as err:
39 | logging.error("HTTP request failed: %s", err)
40 | return None
41 |
42 | data = response.json()
43 | if 'poster_path' in data and data['poster_path'] != None:
44 | return TMDB_IMAGE_BASE + 'w154' + data['poster_path']
45 | else:
46 | return None
47 |
48 | connect(TRAKT_DATABASE)
49 |
50 | Trakt.configuration.defaults.client(
51 | id=TRAKT_CLIENT_ID,
52 | secret=TRAKT_CLIENT_SECRET
53 | )
54 |
55 | script_dir = os.path.dirname(__file__)
56 | oauth_config_file = os.path.join(script_dir, '.trakt.json')
57 | if not os.path.exists(oauth_config_file):
58 | auth = Trakt['oauth'].token_exchange(TRAKT_OAUTH_CODE, 'urn:ietf:wg:oauth:2.0:oob')
59 | with open(oauth_config_file, 'w') as outfile:
60 | json.dump(auth, outfile)
61 | else:
62 | with open(oauth_config_file) as json_file:
63 | auth = json.load(json_file)
64 |
65 | Trakt.configuration.defaults.oauth.from_response(auth)
66 |
67 | for item in Trakt['sync/history'].get(pagination=True, per_page=100, start_at=datetime(date.today().year, date.today().month, 1), extended='full'):
68 | if item.action == "watch" or item.action == "scrobble":
69 | if isinstance(item, Episode):
70 | if not item.show.get_key('tmdb') in posters:
71 | posters[item.show.get_key('tmdb')] = fetch_poster('tv', item.show.get_key('tmdb'))
72 | if posters[item.show.get_key('tmdb')] == None:
73 | html = None
74 | else:
75 | html = '
'
76 | points.append({
77 | "measurement": "watch",
78 | "time": item.watched_at.isoformat(),
79 | "tags": {
80 | "id": item.get_key('trakt'),
81 | "show": item.show.title,
82 | "show_id": item.show.get_key('trakt'),
83 | "season": item.pk[0],
84 | "episode": item.pk[1],
85 | "type": "episode"
86 | },
87 | "fields": {
88 | "title": item.title,
89 | "tmdb_id": item.show.get_key('tmdb'),
90 | "duration": item.show.runtime,
91 | "poster": posters[item.show.get_key('tmdb')],
92 | "poster_html": html,
93 | "slug": item.show.get_key('slug'),
94 | "url": f"https://trakt.tv/shows/{item.show.get_key('slug')}",
95 | "episode_url": f"https://trakt.tv/shows/{item.show.get_key('slug')}/seasons/{item.pk[0]}/episodes/{item.pk[1]}"
96 | }
97 | })
98 | if isinstance(item, Movie):
99 | if not item.get_key('tmdb') in posters:
100 | posters[item.get_key('tmdb')] = fetch_poster('movie', item.get_key('tmdb'))
101 | if posters[item.get_key('tmdb')] == None:
102 | html = None
103 | else:
104 | html = f'
'
105 | points.append({
106 | "measurement": "watch",
107 | "time": item.watched_at.isoformat(),
108 | "tags": {
109 | "id": item.get_key('trakt'),
110 | "type": "movie"
111 | },
112 | "fields": {
113 | "title": item.title,
114 | "tmdb_id": item.get_key('tmdb'),
115 | "duration": item.runtime,
116 | "poster": posters[item.get_key('tmdb')],
117 | "poster_html": html,
118 | "slug": item.get_key('slug'),
119 | "url": f"https://trakt.tv/movie/{item.get_key('slug')}"
120 | }
121 | })
122 |
123 | if len(points) >= 5000:
124 | write_points(points)
125 | points = []
126 |
127 | write_points(points)
128 |
--------------------------------------------------------------------------------
/xbox.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # Copyright 2022 Sam Steele
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | import requests, sys, re
17 | from datetime import datetime, date
18 | from bs4 import BeautifulSoup
19 | from config import *
20 |
21 | if not TRUE_ACHIEVEMENTS_ID:
22 | logging.error("TRUE_ACHIEVEMENTS_ID not set in config.py")
23 | sys.exit(1)
24 |
25 | points = []
26 |
27 | connect(XBOX_DATABASE)
28 |
29 | try:
30 | response = requests.get(f'https://www.trueachievements.com/gamer/{XBOX_GAMERTAG}/achievements?executeformfunction&function=AjaxList¶ms=oAchievementList%7C%26ddlPlatformIDs%3D%26ddlGenreIDs%3D%26ddlDLCFilter%3DInclude%20DLC%26ddlFlagIDs%3D%26ddlGamerScore%3D-1%26AchievementFilter%3DrdoAchievementsIHave%26chkExcludeDoneWith%3DTrue%26oAchievementList_Order%3DWonTimeStamp%26oAchievementList_Page%3D1%26oAchievementList_ItemsPerPage%3D100%26oAchievementList_ResponsiveMode%3DTrue%26oAchievementList_TimeZone%3DEastern%20Standard%20Time%26oAchievementList_ShowAll%3DFalse%26txtHideUnobtainableAchievement%3DFalse%26txtGamerID%3D{TRUE_ACHIEVEMENTS_ID}%26txtEasy%3DFalse%26txtShowDescriptions%3DTrue%26txtAlwaysShowUnlockedAchievementDescriptions%3DFalse%26txtYearWon%3D0%26txtMinRatio%3D0%26txtMaxRatio%3D0%26txtMaxTrueAchievement%3D0%26txtLastCharAlpha%3DFalse%26txtFirstCharAlpha%3DFalse%26txtOnlySecret%3DFalse%26txtChallenges%3DFalse%26txtContestID%3D0%26txtUseStringSQL%3DTrue%26txtOddGamerScore%3DFalse%26txtAchievementNameCharacters%3D0')
31 | response.raise_for_status()
32 | except requests.exceptions.HTTPError as err:
33 | logging.error("HTTP request failed: %s", err)
34 | sys.exit(1)
35 | html = BeautifulSoup(response.text, 'html.parser')
36 |
37 | table = html.find('table', id='oAchievementList')
38 | for row in table.find_all('tr'):
39 | if row['class'][0] == 'odd' or row['class'][0] == 'even':
40 | if row.find('td', class_='date').string != 'Offline':
41 | date = datetime.strptime(row.find('td', class_='date').string, '%d %b %y')
42 | game = row.find('td', class_='gamethumb').find('img')['alt']
43 | icon = 'https://www.trueachievements.com' + row.find('td', class_='achthumb').find('img')['src'].replace('/thumbs/', '/')
44 | achievement = row.find('td', class_='wideachievement').find('a').string
45 | description = list(row.find('td', class_='wideachievement').find('span').stripped_strings)[0]
46 | apiname = re.search('(?<=/)\w+', row.find('td', class_='achthumb').find('a')['href'])[0]
47 |
48 | points.append({
49 | "measurement": "achievement",
50 | "time": date.isoformat(),
51 | "tags": {
52 | "player_id": TRUE_ACHIEVEMENTS_ID,
53 | "platform": "Xbox Live",
54 | "player_name": XBOX_GAMERTAG,
55 | "title": game,
56 | "apiname": apiname
57 | },
58 | "fields": {
59 | "name": achievement,
60 | "description": description,
61 | "icon": icon
62 | }
63 | })
64 |
65 | write_points(points)
66 |
--------------------------------------------------------------------------------