├── .gitignore
├── Enpahse Enlighten v4.postman_collection.json
├── License.txt
├── README.md
├── enlighten_v4_config.json
├── populate_google_sheet.py
├── requirements.txt
├── run_inverter_daily_stats.py
├── run_inverter_daily_stats.sh
├── solar_performance_example.png
└── utils
├── enlightenAPI_v4.py
└── googleSheetsAPI.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
--------------------------------------------------------------------------------
/Enpahse Enlighten v4.postman_collection.json:
--------------------------------------------------------------------------------
1 | {
2 | "info": {
3 | "_postman_id": "274a5c9b-df54-48b1-a24e-25405890f016",
4 | "name": "Enpahse Enlighten v4",
5 | "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
6 | },
7 | "item": [
8 | {
9 | "name": "Fetch systems",
10 | "request": {
11 | "auth": {
12 | "type": "bearer",
13 | "bearer": [
14 | {
15 | "key": "token",
16 | "value": "{{access_token}}",
17 | "type": "string"
18 | }
19 | ]
20 | },
21 | "method": "GET",
22 | "header": [
23 | {
24 | "key": "Host",
25 | "value": "api.enphaseenergy.com",
26 | "type": "text"
27 | }
28 | ],
29 | "url": {
30 | "raw": "https://api.enphaseenergy.com/api/v4/systems?key={{app_api_key}}",
31 | "protocol": "https",
32 | "host": [
33 | "api",
34 | "enphaseenergy",
35 | "com"
36 | ],
37 | "path": [
38 | "api",
39 | "v4",
40 | "systems"
41 | ],
42 | "query": [
43 | {
44 | "key": "key",
45 | "value": "{{app_api_key}}"
46 | }
47 | ]
48 | }
49 | },
50 | "response": []
51 | },
52 | {
53 | "name": "Generate OAuth2 access_token",
54 | "request": {
55 | "auth": {
56 | "type": "basic",
57 | "basic": [
58 | {
59 | "key": "password",
60 | "value": "{{client_secret}}",
61 | "type": "string"
62 | },
63 | {
64 | "key": "username",
65 | "value": "{{client_id}}",
66 | "type": "string"
67 | }
68 | ]
69 | },
70 | "method": "POST",
71 | "header": [],
72 | "url": {
73 | "raw": "https://api.enphaseenergy.com/oauth/token?grant_type=authorization_code&redirect_uri=https://api.enphaseenergy.com/oauth/redirect_uri&code={{auth_code}}",
74 | "protocol": "https",
75 | "host": [
76 | "api",
77 | "enphaseenergy",
78 | "com"
79 | ],
80 | "path": [
81 | "oauth",
82 | "token"
83 | ],
84 | "query": [
85 | {
86 | "key": "grant_type",
87 | "value": "authorization_code"
88 | },
89 | {
90 | "key": "redirect_uri",
91 | "value": "https://api.enphaseenergy.com/oauth/redirect_uri"
92 | },
93 | {
94 | "key": "code",
95 | "value": "{{auth_code}}"
96 | }
97 | ]
98 | }
99 | },
100 | "response": []
101 | },
102 | {
103 | "name": "Refresh access_token",
104 | "event": [
105 | {
106 | "listen": "test",
107 | "script": {
108 | "exec": [
109 | ""
110 | ],
111 | "type": "text/javascript"
112 | }
113 | }
114 | ],
115 | "request": {
116 | "auth": {
117 | "type": "basic",
118 | "basic": [
119 | {
120 | "key": "password",
121 | "value": "{{client_secret}}",
122 | "type": "string"
123 | },
124 | {
125 | "key": "username",
126 | "value": "{{client_id}}",
127 | "type": "string"
128 | }
129 | ]
130 | },
131 | "method": "POST",
132 | "header": [],
133 | "url": {
134 | "raw": "https://api.enphaseenergy.com/oauth/token?grant_type=refresh_token&refresh_token={{refresh_token}}",
135 | "protocol": "https",
136 | "host": [
137 | "api",
138 | "enphaseenergy",
139 | "com"
140 | ],
141 | "path": [
142 | "oauth",
143 | "token"
144 | ],
145 | "query": [
146 | {
147 | "key": "grant_type",
148 | "value": "refresh_token"
149 | },
150 | {
151 | "key": "refresh_token",
152 | "value": "{{refresh_token}}"
153 | }
154 | ]
155 | }
156 | },
157 | "response": []
158 | },
159 | {
160 | "name": "Inverters Summary by Envoy or Site",
161 | "request": {
162 | "auth": {
163 | "type": "bearer",
164 | "bearer": [
165 | {
166 | "key": "token",
167 | "value": "{{access_token}}",
168 | "type": "string"
169 | }
170 | ]
171 | },
172 | "method": "GET",
173 | "header": [
174 | {
175 | "key": "Host",
176 | "value": "api.enphaseenergy.com",
177 | "type": "text"
178 | }
179 | ],
180 | "url": {
181 | "raw": "https://api.enphaseenergy.com/api/v4/systems/inverters_summary_by_envoy_or_site?key={{app_api_key}}&site_id={{system_id}}",
182 | "protocol": "https",
183 | "host": [
184 | "api",
185 | "enphaseenergy",
186 | "com"
187 | ],
188 | "path": [
189 | "api",
190 | "v4",
191 | "systems",
192 | "inverters_summary_by_envoy_or_site"
193 | ],
194 | "query": [
195 | {
196 | "key": "key",
197 | "value": "{{app_api_key}}"
198 | },
199 | {
200 | "key": "site_id",
201 | "value": "{{system_id}}"
202 | }
203 | ]
204 | }
205 | },
206 | "response": []
207 | },
208 | {
209 | "name": "Microinverter Telemetry",
210 | "request": {
211 | "auth": {
212 | "type": "bearer",
213 | "bearer": [
214 | {
215 | "key": "token",
216 | "value": "{{access_token}}",
217 | "type": "string"
218 | }
219 | ]
220 | },
221 | "method": "GET",
222 | "header": [],
223 | "url": {
224 | "raw": "https://api.enphaseenergy.com/api/v4/systems/{{system_id}}/telemetry/production_micro?key={{app_api_key}}",
225 | "protocol": "https",
226 | "host": [
227 | "api",
228 | "enphaseenergy",
229 | "com"
230 | ],
231 | "path": [
232 | "api",
233 | "v4",
234 | "systems",
235 | "{{system_id}}",
236 | "telemetry",
237 | "production_micro"
238 | ],
239 | "query": [
240 | {
241 | "key": "key",
242 | "value": "{{app_api_key}}"
243 | }
244 | ]
245 | }
246 | },
247 | "response": []
248 | }
249 | ]
250 | }
--------------------------------------------------------------------------------
/License.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020-Present Daniel Patenaude
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # python_enlighten_api (v4)
2 |
3 | _Note: this repository has been updated to include using the new Enphase Enlighten v4 API (https://developer-v4.enphase.com/aboutproduct.html)._
4 |
5 | Enphase Enlighten API & Google Sheets application to pull data and monitor panel performance and populate a Google Sheet with historical and visual data. This allows tracking individual panel performance over the lifetime of the system. Most of the functionlaity provided here is also provided by the Enphase Enlighten website and app, but this allows for granular panel tracking and performance over time.
6 |
7 | For example: the panel underperforming (red) is partially blocked by a neighbor's tree during the morning hours. So it's reporting performance outside a few standard deviations.
8 |
9 |
10 |
11 |
12 |
13 |
14 | ## Known Limitations
15 | 1. Token Expiration:
16 | * The Enlighten v4 API does not provide any documentation on what happens whe `refresh_token` expires (1 week validity) and how a new one is generated. The step to generate the original `access_token` and `refresh_token` in [Generate OAuth2 access_token and refresh_token](https://developer-v4.enphase.com/docs/quickstart.html#step_8) seems to only allow the route to be used a single time for an authorization code (auth_code).
17 | * As such, _the enlightenAPI_v4_ constructor provided in this repository will refresh the tokens using the [Generate new access_token and refresh_token using refresh_token](https://developer-v4.enphase.com/docs/quickstart.html#step_10) and update the _enlighten_v4_config.json_ to contain the new tokens. This ensures that each day when the script is run, the tokens are refreshed and kept well within the refresh_token's 1 week validity.
18 | 2. Inverter Telemetry Reporting:
19 | * It appears there's a new v4 route (/api/v4/systems/{System_id}/devices/micros/{serial_no}/telemetry) that can get microinverter data based on a date range. This seems the more ideal way to get daily inverter data than my current way of having to get the current lifetime data minus the stored value. Unfortunately, this route requires [paid plans and is not available using the free 'Watt' plan](https://github.com/danielpatenaude/python_enlighten_api/issues/4). For users of the free developer 'Watt' plan, you'll always receive a `401 - Not Authorized`.
20 |
21 | ## Requirements
22 | Requires Python 3.6.8 or later installed. Much effort has been taken to ensure this application does not require additional modules besides what is included standard with Python.
23 |
24 | pip install -r requirements.txt
25 |
26 | This will install the following:
27 | 1. requests
28 | 2. google-api-python-client
29 | 3. google-auth-httplib2
30 | 4. google-auth-oauthlib
31 |
32 | ## Getting Started
33 |
34 | ### Postman Scripts
35 | The _Enpahse Enlighten v4.postman_collection.json_ is provided to assist setting up the `access_token` and `refresh_token`. In addition, you can use the GET routes to test your API connectivity and configuration.
36 |
37 | ### Script/API Setup
38 | 1. [Enlighten API] Allow app access to your Enlighten Account:
39 | * Follow the Enlighten [Quickstart instructions Steps 1-4](https://developer-v4.enphase.com/docs/quickstart.html#step_1) to add a new application to your developer account
40 | 2. [Enlighten API] Open the _enlighten_v4_config.json_ and set the following:
41 | * `name`: a generic name to identify this system.
42 | * `system_id`: (previously known as `Site ID`) Within MyEnlighten the System ID should be displayed (or if using the web browser should be in the URL [https://enlighten.enphaseenergy.com/web/:SYSTEM_ID/today/graph/hours].
43 | * `app_api_key`: Within the [Enlighten API App Page](https://developer-v4.enphase.com/admin/applications) page, copy your app's API key.
44 | * `app_client_id`: Within the [Enlighten API App Page](https://developer-v4.enphase.com/admin/applications) page, copy your app's Client ID.
45 | * `app_client_secret`: Within the [Enlighten API App Page](https://developer-v4.enphase.com/admin/applications) page, copy your app's Client Secret.
46 | * `access/refresh_token`: See step 3 below.
47 | * `spreadsheet_id`: The SpreadSheet ID from Google Sheets. See step 5.
48 | 3. [Enlighten API] Generate access_token and refresh_token (this condenses some of the Enlighten API [Quickstart Guide](https://developer-v4.enphase.com/docs/quickstart.html)
49 | * Follow the Enlighten [Quickstart instructions Steps 6-7](https://developer-v4.enphase.com/docs/quickstart.html#step_6), generate the auth_code for your application
50 | * Using the _Enpahse Enlighten v4.postman_collection.json_ open the _Generate OAuth2 access_token_ request:
51 | * Update the Postman environment variables to set the: auth_code, client_id, and client_secret
52 | * Run the request. The resulting access_token and refresh_token should be added to your enlighten_v4_config.json
53 | * _Note: it appears that you can only run this request ONCE per app authorization code (auth_code). Once you have the tokens generated for this auth_code, it appears you will be unable to run this route again. So make sure to save them. The Enlighten v4 API does not provide any details on this, but this appears to be the case._
54 | * You can call the Postman _Fetech Systems_ request using your new tokens to ensure proper configuration of your application and API tokens.
55 | 4. [Google Sheets] Setup a Google API key for your python script and put the following files in your main working script directory: credentials.json, token.pickle.
56 | * Follow the [Google Python Quickstart Guide](https://developers.google.com/sheets/api/quickstart/python)
57 | * Allow your API token to access your Google Sheets account: https://console.developers.google.com/apis/api/sheets.googleapis.com/overview?
58 | 5. [Google Sheets] Google Sheet Setup
59 | * The _'Solar Performance'_ Google sheet can be accessed from the [Solar Performance (Template)](https://docs.google.com/spreadsheets/d/1JPnT5T4xvDIKaefL8Z7AoxRNFv6HnVBF7SH-J9Yqfdk). It is a working copy of an example system setup. You will need to manually clear and remove the demo data to use.
60 | * How to Setup the _'Solar Perforance'_ Sheet:
61 | 1. You'll need to make a copy of this sheet to your personal Google Drive.
62 | 2. Populate each of your inverter serial numbers into the _'Panel Data-Template'_ Sheet.
63 | * This template sheet will automatically be created into a _'Panel Data-'_ sheet where the historical data for each panel per day will be stored.
64 | 3. Copy and Paste, using values and transpose, your inverter serial numbers into the 'Last 7 Days' sheet from the _'Panel Data-Template'_ sheet.
65 | 4. Update the _'Dashboard'_ Sheet panel layout to match your panels.
66 | 5. Update the _'Dashboard'_ Sheet panel numbers and serial numbers to match your panel data
67 | * Note: Enphase Enlighten did not provide a good way to do this. So I manually had to match up each inverter serial number with the panel number in the layout by tracking panel energy produced over a few days on the _'Panel Data-'_ sheet vs the Enphase Enlighten website/app. After a few days each panel's historical data output allowed me to match up each panel on the Dashboard/Panel Data Sheet with the layout of the Enphase app.
68 |
69 | ### Running
70 | The Enlighten API has a long lag time between when data is updated on their end. If you run these scripts once a day after the Enlighten data updates AND before your solar is producing power (e.g.: 4am) you get the total lifetime power produced by each inverter, including the previous day.
71 |
72 | Run with run_inverter_daily_stats.sh or copy the logic this script is using.
73 |
74 | #### Linux: Setting Automated Cron Jobs
75 | If you're using Linux, you can add these scripts to crontab jobs to run automatically at night by:
76 |
77 | Run:
78 |
79 | >crontab -e
80 | Add a crontab job:
81 |
82 | # At 4am local time run the python script via shell script to ensure we're in the right directory
83 | 0 4 * * * /home//python_enlighten_api/run_inverter_daily_stats.sh >> /home//python_enlighten_api/cron.log 2>&1
84 |
85 | #### Windows: Scheduled Task
86 | If you're using Windows, you can automate running this by doing similar to what the Linux job is doing:
87 | 1. Add a new Windows scheduled task to run at 4am
88 | 2. Create and have the scheduled task run a .bat file that performs the directory change and python call from `run_inverter_daily_stats.sh`
89 |
90 | ## Enphase Enlighten API Documentation
91 |
92 | * https://developer-v4.enphase.com/docs.html
93 |
94 | ## Scripts Explanation
95 |
96 | This repository contains a few scripts used to hit the Enphase Enlighten API and collect data. The scripts inclide:
97 |
98 | ### run_inverter_daily_stats.py
99 |
100 | Runs the Enlighten API route 'inverters_summary_by_envoy_or_site' to collect the lifetime energy produced by each inverter. The Enphase API lacks the granularity of seeing per inverter daily stats. So this script provides a means to do that. If you call this route once a day before your solar is producing power (e.g.: 4am) you get the total lifetime power produced by each inverter, including the previous day. If you track this total lifetime energy value every day, you can then subtract the current day's total from the previous day lifetime total. That gives you the daily production value for that inverter. Note: if your Envoy is connected via low bandwidth Cellular, data only refreshes to Enlighten every 6 hours. So perform this route the next day in the early morning to ensure you get complete data.
101 |
102 | The resulting data is stashed in a .json file. The file organizes the data by microinverter (by ID), then by day. So you can easily parse this historical data for daily production values.
103 | For example:
104 |
105 | {
106 | "micro_inverters":
107 | {
108 | "12345678":
109 | {
110 | "2020-10-14":
111 | {
112 | "daily_energy": 0,
113 | "lifetime_energy": 69496
114 | },
115 | "2020-10-15":
116 | {
117 | "daily_energy": 2,
118 | "lifetime_energy": 69498
119 | }
120 | },
121 | "12345679":
122 | {
123 | "2020-10-14":
124 | {
125 | "daily_energy": 0,
126 | "lifetime_energy": 68967
127 | },
128 | "2020-10-15":
129 | {
130 | "daily_energy": 7,
131 | "lifetime_energy": 68974
132 | }
133 | },
134 | }
135 | }
136 |
137 | ### populat_google_sheet.py
138 | Is run after the Englighten API data has been captured to the specified sheet by:
139 | 1. Grab all inverter serial numbers from the linked google sheet's Named Range 'InverterSerialNumbers'
140 | 2. Load in captured enlighten historical data (from run_inverter_daily_stats.py)
141 | 3. Look for the sheet titled 'Panel Data-' or duplicate it from 'Panel Data-Template' if it's not found
142 | 4. Match up the Enlighten serial number list order vs the InverterSerialNumber range data and filter the data by the current day
143 | 5. Insert the data into the 'Panel Data-' sheet
144 |
--------------------------------------------------------------------------------
/enlighten_v4_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "",
3 | "system_id": "",
6 | "app_client_id": "",
7 | "app_client_secret": "",
8 | "access_token": " 'Generate OAuth2 access_token'>",
9 | "refresh_token": " 'Generate OAuth2 access_token'>",
10 | "spreadsheet_id": ""
11 | }
--------------------------------------------------------------------------------
/populate_google_sheet.py:
--------------------------------------------------------------------------------
1 | # Author: Daniel Patenaude
2 | # Date: 12/02/2022
3 | # Desc: Populate the enphase data into the google sheet template
4 |
5 | import datetime
6 | import json
7 | import os
8 | from utils.googleSheetsAPI import googleSheetsAPI
9 |
10 | INVERTER_STARTING_CELL = 'A3'
11 |
12 | def run(config):
13 | SPREADSHEET_ID = config["spreadsheet_id"]
14 | print('Beginning push to Google Sheets...')
15 | api = googleSheetsAPI()
16 |
17 | # Get our inverter serial numbers from our google sheet
18 | inverter_sns = []
19 | result_values = api.readRange(SPREADSHEET_ID, 'InverterSerialNumbers')
20 | for row in result_values:
21 | for cell in row:
22 | #print(cell)
23 | inverter_sns.append(cell)
24 |
25 | # Grab the historical data from the data store
26 | inverter_historical_data = {}
27 | datafile = f'data/inverter_daily_data-{config["system_id"]}.json'
28 | if os.path.isfile(datafile) and os.access(datafile, os.R_OK):
29 | with open(datafile) as json_file:
30 | inverter_historical_data = json.load(json_file)
31 | else:
32 | print(f'Missing {datafile}')
33 | exit(1)
34 |
35 | yesterday = datetime.datetime.now() + datetime.timedelta(days=-1)
36 | daily_data_to_populate = yesterday.strftime('%Y-%m-%d') #"2020-10-15"
37 | values = [ [yesterday.strftime("%m/%d/%Y")] ]
38 |
39 | # Ensure we can find the sheet 'Panel Data-' to insert data into. If not, duplicate one
40 | # from the template sheet
41 | year = yesterday.strftime('%Y')
42 | # Check if the sheet for the current year exists. If not, make it
43 | sheets = api.getSheetList(SPREADSHEET_ID)
44 | target_sheet_found = False
45 | for sheet in sheets:
46 | if sheet['properties']['title'] == f'Panel Data-{year}':
47 | target_sheet_found = True
48 | break
49 | if target_sheet_found is False:
50 | template_sheet_id = api.getSheetId(SPREADSHEET_ID, "Panel Data-TEMPLATE")
51 | api.duplicateSheet(SPREADSHEET_ID, template_sheet_id, f'Panel Data-{year}', 6)
52 |
53 | # Match up inverter serial numbers from the google sheet to
54 | for serial_num in inverter_sns:
55 | if serial_num in inverter_historical_data["micro_inverters"]:
56 | inverter_data = inverter_historical_data["micro_inverters"][serial_num]
57 | if daily_data_to_populate in inverter_data:
58 | values.append([inverter_data[daily_data_to_populate]["daily_energy"]])
59 | else:
60 | print(f'Date data: {daily_data_to_populate} missing from {datafile}')
61 | exit(1)
62 | else:
63 | print(f'Serial Number: {serial_num} missing from google sheet')
64 | exit(1)
65 |
66 | body = {
67 | 'majorDimension': 'COLUMNS',
68 | 'values': values
69 | }
70 | # Add our daily data to our 'Panel Data' sheet
71 | api.appendDataToRange(SPREADSHEET_ID, f'Panel Data-{year}!{INVERTER_STARTING_CELL}', body)
72 |
73 | if __name__ == '__main__':
74 | run(config)
75 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | requests==2.24.0
2 | google-api-python-client==1.12.3
3 | google-auth-httplib2==0.0.4
4 | google-auth-oauthlib==0.4.1
--------------------------------------------------------------------------------
/run_inverter_daily_stats.py:
--------------------------------------------------------------------------------
1 | # Author: Daniel Patenaude
2 | # Date: 12/02/2022
3 | # Desc: Use the Enphase Enlighten v4 API application to pull inverter/panel daily energy production data
4 |
5 | import datetime
6 | import json
7 | import os
8 | from utils.enlightenAPI_v4 import enlightenAPI_v4
9 | import populate_google_sheet
10 |
11 | # Load the user config
12 | with open('enlighten_v4_config.json') as config_file:
13 | config = json.load(config_file)
14 |
15 | print(f'Beginning EnlightenAPI pull for System ID: {config["system_id"]}')
16 | api = enlightenAPI_v4(config)
17 |
18 | # Get the inverter data from the enlighten API
19 | inverter_summary = api.inverter_summary()[0]
20 |
21 | # Read in the existing data or create the folder/file if needed.
22 | # This should be formatted as:
23 | '''
24 | {
25 | "micro_inverters" : {
26 | "": {
27 | "": {
28 | "daily_energy" ,
29 | "lifetime_energy":
30 | }
31 | }
32 | }
33 | '''
34 | inverter_historical_data = {}
35 | datafile = f'data/inverter_daily_data-{config["system_id"]}.json'
36 | if os.path.isfile(datafile) and os.access(datafile, os.R_OK):
37 | with open(datafile) as json_file:
38 | inverter_historical_data = json.load(json_file)
39 | else:
40 | os.makedirs('data', exist_ok=True)
41 | inverter_historical_data["micro_inverters"] = {}
42 |
43 | # Load the inverter data to the dictionary.
44 | yesterday = (datetime.datetime.now() + datetime.timedelta(days=-1)).strftime('%Y-%m-%d')
45 | for inverter in inverter_summary["micro_inverters"]:
46 | inverter_sn = str(inverter["serial_number"])
47 | if inverter_sn not in inverter_historical_data["micro_inverters"]:
48 | inverter_historical_data["micro_inverters"][inverter_sn] = {}
49 | inverter_historical_data["micro_inverters"][inverter_sn][yesterday] = { "daily_energy": 0, "lifetime_energy": inverter["energy"]["value"]}
50 |
51 | # Populate the daily_energy for each inverter for today's date based on the previous day's lifetime_energy and now's lifetime_energy
52 | two_days_ago = (datetime.datetime.now() + datetime.timedelta(days=-2)).strftime('%Y-%m-%d')
53 | total_daily_wh = 0
54 | for inverter_sn, inverter_data in inverter_historical_data["micro_inverters"].items():
55 | if two_days_ago in inverter_data:
56 | two_days_ago_lifetime_energy = inverter_data[two_days_ago]["lifetime_energy"]
57 | yesterday_lifetime_energy = inverter_data[yesterday]["lifetime_energy"]
58 | yesterday_energy = yesterday_lifetime_energy - two_days_ago_lifetime_energy
59 | inverter_historical_data["micro_inverters"][inverter_sn][yesterday]["daily_energy"] = yesterday_energy
60 | total_daily_wh = total_daily_wh + yesterday_energy
61 |
62 | # Write new data to file
63 | with open(datafile, 'w') as outfile:
64 | json.dump(inverter_historical_data, outfile)
65 |
66 | print(f'Yesterdays\'s Total Energy: {total_daily_wh}Wh')
67 |
68 | populate_google_sheet.run(config)
69 |
70 | print('Complete...')
71 |
--------------------------------------------------------------------------------
/run_inverter_daily_stats.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd /home//python_enlighten_api && python3 /home//python_enlighten_api/run_inverter_daily_stats.py
3 |
--------------------------------------------------------------------------------
/solar_performance_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/danielpatenaude/python_enlighten_api/50145d3c2da9c46327b7f0feb2dbbd69ca346ef5/solar_performance_example.png
--------------------------------------------------------------------------------
/utils/enlightenAPI_v4.py:
--------------------------------------------------------------------------------
1 | # Author: Daniel Patenaude
2 | # Date: 10/13/2020
3 | # Desc: API utilities for calling the Enphase Enlighten API v4
4 |
5 | import datetime
6 | import json
7 | import requests
8 | from base64 import b64encode
9 | from datetime import date
10 |
11 | class enlightenAPI_v4:
12 |
13 | def __assert_success(self, res, exit_on_failure = True):
14 | '''
15 | Determine if the web request was successful (HTTP 200)
16 | Returns:
17 | If exit_on_failure, returned whether the web request was successful
18 | '''
19 | if res.status_code != 200:
20 | print("Server Responded: " + str(res.status_code) + " - " + res.text)
21 | if exit_on_failure:
22 | quit()
23 | else:
24 | return False
25 | return True
26 |
27 | def __log_time(self):
28 | return datetime.datetime.now().strftime('%Y-%m-%d %I:%M:%S') + ": "
29 |
30 | def fetch_systems(self):
31 | '''
32 | Run the enlighten API Fetch Systems route
33 | Returns:
34 | Returns a list of systems for which the user can make API requests. By default, systems are returned in batches of 10. The maximum size is 100.
35 | '''
36 | url = f'{self.config["api_url"]}api/v4/systems/?key={self.config["app_api_key"]}'
37 | response = requests.get(url, headers={'Authorization': 'Bearer ' + self.config["access_token"]})
38 | self.__assert_success(response)
39 | result = json.loads(result.text)
40 | return result
41 |
42 | def __refresh_access_token(self):
43 | '''
44 | Refresh the access_token (1 day expiration) using the refresh_token (1 week expiration) using the steps detailed
45 | at: https://developer-v4.enphase.com/docs/quickstart.html#step_10.
46 | This will override the current self.config and save the new config to local disk to ensure we have the latest access
47 | and refresh tokens for the next use.
48 |
49 | Note: It's unclear from the Enlighten API docs how to refresh the refresh_token once it expires. If the refresh_token expires
50 | we're unable to call this route. Generating an access/refresh token via the API (https://developer-v4.enphase.com/docs/quickstart.html#step_8)
51 | seems to only be usable once per app auth_code.
52 | Returns:
53 | The full web request result of the token refresh
54 | '''
55 | print(self.__log_time() + "Refreshing access_token...")
56 | url = f'{self.config["api_url"]}/oauth/token?grant_type=refresh_token&refresh_token={self.config["refresh_token"]}'
57 | # Enlighten API v4 Quickstart says this should be a GET request, but that seems to be incorrect. POST works.
58 | response = requests.post(url, auth=(self.config['app_client_id'], self.config['app_client_secret']))
59 | refresh_successful = self.__assert_success(response, False)
60 | if not refresh_successful:
61 | print("Unable to refresh access_token. Please set a new access_token and refresh_token in the enlighten_v4_config.json. Quitting...")
62 | quit()
63 |
64 | result = json.loads(response.text)
65 |
66 | self.config['access_token'] = result['access_token']
67 | self.config['refresh_token'] = result['refresh_token']
68 |
69 | with open('enlighten_v4_config.json', 'w') as f:
70 | json.dump(self.config, f, ensure_ascii=False, indent=4)
71 |
72 | return result
73 |
74 | def inverter_summary(self):
75 | '''
76 | Run the enlighten API inverters_summary_by_envoy_or_site route (https://developer-v4.enphase.com/docs.html).
77 | This route returns the detailed information for each inverter (including lifetime power produced). Note: if your Envoy is connected via low
78 | bandwidth Cellular, data only refreshes to Enlighten every 6 hours. So perform this route the next day in the early morning to ensure you get
79 | complete data.
80 | Returns:
81 | Returns the microinverters summary based on the specified active envoy serial number or system.
82 | '''
83 | print(self.__log_time() + "Pulling EnlightenAPI inverter summary...")
84 | url = f'{self.config["api_url"]}api/v4/systems/inverters_summary_by_envoy_or_site?key={self.config["app_api_key"]}&site_id={self.config["system_id"]}'
85 | response = requests.get(url, headers={'Authorization': 'Bearer ' + self.config["access_token"]})
86 | self.__assert_success(response)
87 | result = json.loads(response.text)
88 | return result
89 |
90 | def production_telemetry(self):
91 | '''
92 | Run the enlighten API telemetry/production_micro route (https://developer-v4.enphase.com/docs.html).
93 | This route returns the telemetry for all the production micros of a system.
94 | It will return the default 'day' granularity i.e. start from midnight today in 5 minutes increments
95 | '''
96 | print(self.__log_time() + "Pulling EnlightenAPI inverter summary...")
97 | url = f'{self.config["api_url"]}api/v4/systems/{self.config["system_id"]}/telemetry/production_micro?key={self.config["app_api_key"]}'
98 | response = requests.get(url, headers={'Authorization': 'Bearer ' + self.config["access_token"]})
99 | self.__assert_success(response)
100 | result = json.loads(response.text)
101 | return result
102 |
103 | def __init__(self, config):
104 | '''
105 | Initialize the englightAPI class
106 | Parameters:
107 | The API configuration (as a dictionary). Must contain api_url, api_key, and secrets
108 | '''
109 | self.config = config
110 |
111 | # It seems the v4 API allows you to only call the OAuth POST route with grant_type=authorization_code a SINGLE time for a auth_code.
112 | # So we need to make sure those already exist.
113 | if not self.config["access_token"] or not self.config["refresh_token"]:
114 | print('Error: access_token or refresh_token not set in the enlighten_v4_config.json')
115 | quit()
116 |
117 | # Refresh and save out the new config with the refreshed access_token/refresh_token
118 | self.__refresh_access_token()
--------------------------------------------------------------------------------
/utils/googleSheetsAPI.py:
--------------------------------------------------------------------------------
1 | # pip install --upgrade google-api-python-client google-auth-httplib2 google-auth-oauthlib
2 |
3 | import pickle
4 | import os.path
5 | from googleapiclient.discovery import build
6 | from google_auth_oauthlib.flow import InstalledAppFlow
7 | from google.auth.transport.requests import Request
8 |
9 | class googleSheetsAPI:
10 | # Refer to https://developers.google.com/sheets/api/quickstart/python for Google Sheets API documentation
11 |
12 | def __init__(self):
13 | # If modifying these scopes, delete the file token.pickle.
14 | self.scopes = ['https://www.googleapis.com/auth/spreadsheets']
15 |
16 | def __connect(self, spreadsheet_id):
17 | '''
18 | Private function to connect to the google sheet API
19 | Parameters:
20 | spreadsheet_id (string): google sheet ID (e.g.: pz2I46A6k7b197szhc91wKMkaXiOVpuiPk)
21 | '''
22 | """Shows basic usage of the Sheets API.
23 | Prints values from a sample spreadsheet.
24 | """
25 | creds = None
26 | # The file token.pickle stores the user's access and refresh tokens, and is
27 | # created automatically when the authorization flow completes for the first
28 | # time.
29 | if os.path.exists('token.pickle'):
30 | with open('token.pickle', 'rb') as token:
31 | creds = pickle.load(token)
32 | # If there are no (valid) credentials available, let the user log in.
33 | if not creds or not creds.valid:
34 | if creds and creds.expired and creds.refresh_token:
35 | creds.refresh(Request())
36 | else:
37 | flow = InstalledAppFlow.from_client_secrets_file(
38 | 'credentials.json', self.scopes)
39 | creds = flow.run_local_server(port=0)
40 | # Save the credentials for the next run
41 | with open('token.pickle', 'wb') as token:
42 | pickle.dump(creds, token)
43 |
44 | service = build('sheets', 'v4', credentials=creds)
45 | return service
46 |
47 | def readRange(self, spreadsheet_id, range_name):
48 | '''
49 | Initialize the googleSheetsRead class
50 | Parameters:
51 | spreadsheet_id (string): google sheet ID (e.g.: pz2I46A6k7b197szhc91wKMkaXiOVpuiPk)
52 | range_name (string): range in which to read data (e.g.: a named range or a sheet and columns/rows [Class Data!A2:E])
53 | '''
54 | service = self.__connect(spreadsheet_id)
55 |
56 | # Call the Sheets API
57 | sheet = service.spreadsheets()
58 | result = sheet.values().get(
59 | spreadsheetId=spreadsheet_id,
60 | range=range_name).execute()
61 | values = result.get('values', [])
62 |
63 | if not values:
64 | print('No data found.')
65 | else:
66 | return values
67 |
68 | def appendDataToRange(self, spreadsheet_id, range_name, body):
69 | '''
70 | Initialize the googleSheetsRead class
71 | Parameters:
72 | spreadsheet_id (string): google sheet ID
73 | range_name (string): range in which to read data (e.g.: a named range or a sheet and columns/rows [Class Data!A2:E])
74 | data (string []): data to append to the specified range
75 | '''
76 | service = self.__connect(spreadsheet_id)
77 |
78 | # Call the Sheets API
79 | result = \
80 | service.spreadsheets().values().append(
81 | spreadsheetId=spreadsheet_id, range=range_name,
82 | valueInputOption='USER_ENTERED', body=body).execute()
83 | print('{0} cells appended.'.format(result \
84 | .get('updates') \
85 | .get('updatedCells')))
86 |
87 | def getSheetList(self, spreadsheet_id):
88 | '''
89 | Initialize the googleSheetsRead class
90 | Parameters:
91 | spreadsheet_id (string): google sheet ID
92 | '''
93 | service = self.__connect(spreadsheet_id)
94 |
95 | result = service.spreadsheets().get(
96 | spreadsheetId=spreadsheet_id,
97 | fields="sheets(properties(sheetId,title))").execute()
98 | sheets = result.get('sheets', [])
99 | return sheets
100 |
101 | def getSheetId(self, spreadsheet_id, sheet_title):
102 | '''
103 | Initialize the googleSheetsRead class
104 | Parameters:
105 | spreadsheet_id (string): google sheet ID
106 | sheet_title (string): Sheet's title to get ID for
107 | '''
108 | sheets = self.getSheetList(spreadsheet_id)
109 |
110 | if not sheets:
111 | print('No data found.')
112 | for sheet in sheets:
113 | if sheet['properties']['title'] == sheet_title:
114 | return sheet['properties']['sheetId']
115 | print(f'Unable to find sheetId for sheet: {sheet_title}')
116 |
117 | def duplicateSheet(self, spreadsheet_id, source_sheet_id, new_sheet_title, insert_after_index):
118 | '''
119 | Initialize the googleSheetsRead class
120 | Parameters:
121 | spreadsheet_id (string): google sheet ID
122 | source_sheet_id (string): id of the sheet to duplicate
123 | new_sheet_title (string): title of the new sheet
124 | insert_after_index (int): Position where the new sheet should be inserted
125 | '''
126 |
127 | service = self.__connect(spreadsheet_id)
128 |
129 | requests = []
130 | requests.append({
131 | 'duplicateSheet': {
132 | "sourceSheetId": source_sheet_id,
133 | "insertSheetIndex": insert_after_index,
134 | "newSheetName": new_sheet_title
135 | }
136 | })
137 | body = {
138 | 'requests': requests
139 | }
140 | # Call the Sheets API
141 | response = \
142 | service.spreadsheets().batchUpdate(
143 | spreadsheetId=spreadsheet_id,
144 | body=body).execute()
145 |
146 | # if __name__ == '__main__':
147 | # main()
--------------------------------------------------------------------------------