├── accu_allergies_glances.PNG ├── apps └── accu_allergies │ └── accu_allergies.py ├── hacs.json ├── info.md ├── post_april22.jpg └── readme.md /accu_allergies_glances.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simonhq/accu_allergies/07d3275b603e146a1c85b17cba0f313d60d1eeaa/accu_allergies_glances.PNG -------------------------------------------------------------------------------- /apps/accu_allergies/accu_allergies.py: -------------------------------------------------------------------------------- 1 | ############################################################ 2 | # 3 | # This class aims to get the Allergies information from Accuweather 4 | # 5 | # written to be run from AppDaemon for a HASS or HASSIO install 6 | # 7 | # Written: 30/04/2020 8 | # Updated: 26/06/2020 9 | # added postcode in addition to ID for some locations 10 | ############################################################ 11 | 12 | ############################################################ 13 | # 14 | # In the apps.yaml file you will need the following 15 | # updated for your database path, stop ids and name of your flag 16 | # 17 | # accu_allergies: 18 | # module: accu_allergies 19 | # class: Get_Accu_Allergies 20 | # ACC_FILE: "./allergies" 21 | # ACC_FLAG: "input_boolean.get_allergies_data" 22 | # DEB_FLAG: "input_boolean.reset_allergies_sensor" 23 | # URL_ID: "21921" 24 | # URL_CITY: "canberra" 25 | # URL_COUNTRY: "au" 26 | # URL_LANG: "en" 27 | # URL_POSTCODE: "" 28 | # WEB_VER: "APRIL22" 29 | # 30 | # PRE APRIL 2022 31 | # https://www.accuweather.com/en/au/canberra/21921/allergies-weather/21921 32 | # https://www.accuweather.com/en/au/canberra/21921/cold-flu-weather/21921 33 | # https://www.accuweather.com/en/au/canberra/21921/asthma-weather/21921 34 | # https://www.accuweather.com/en/au/canberra/21921/arthritis-weather/21921 35 | # https://www.accuweather.com/en/au/canberra/21921/migraine-weather/21921 36 | # https://www.accuweather.com/en/au/canberra/21921/sinus-weather/21921 37 | # 38 | # APRIL 2022 39 | # https://www.accuweather.com/en/au/canberra/21921/health-activities/21921 40 | 41 | ############################################################ 42 | 43 | # import the function libraries for beautiful soup 44 | from distutils.log import debug 45 | from bs4 import BeautifulSoup 46 | import json 47 | import datetime 48 | import appdaemon.plugins.hass.hassapi as hass 49 | import requests 50 | import shelve 51 | import urllib.parse 52 | 53 | class Get_Accu_Allergies(hass.Hass): 54 | 55 | ACC_FLAG = "" 56 | DEB_FLAG = "" 57 | URL_LANG = "" 58 | URL_COUNTRY = "" 59 | URL_CITY = "" 60 | URL_ID = "" 61 | URL_POSTCODE = "" 62 | WEB_VER = "" 63 | 64 | payload = {} 65 | headers = { 66 | 'User-Agent': 'Mozilla/5.0' 67 | } 68 | 69 | #"https://www.accuweather.com/URL_LANG/URL_COUNTRY/URL_CITY/URL_POSTCODE/health-activities/URL_ID" 70 | #url building 71 | url_base = "https://www.accuweather.com" 72 | 73 | # simple - asthma, arthritis, migraine, sinus 74 | 75 | url_txt_setsA = [["/asthma-weather/","asthma"], ["/arthritis-weather/","arthritis"], ["/migraine-weather/","migraine"], [ "/sinus-weather/","sinus"], ["/air-quality-index/", "air"]] 76 | url_txt_setsB = [["/health-activities/","health"],["/air-quality-index/", "air"]] 77 | # extended - cold, flu, ragweed pollen, grass pollen, tree pollen, mold, dust 78 | #["/allergies-weather/","allergies"], ["/cold-flu-weather/","coldflu"] 79 | url_txt_xtdA = [["/allergies-weather/", "?name=ragweed-pollen" , "ragweed"], ["/allergies-weather/", "?name=grass-pollen" , "grass"], ["/allergies-weather/", "?name=tree-pollen" , "tree"], ["/allergies-weather/", "?name=mold" , "mold"], ["/allergies-weather/", "?name=dust-dander" , "dust"], ["/cold-flu-weather/", "?name=common-cold" , "cold"], ["/cold-flu-weather/", "?name=flu" , "flu"]] 80 | url_txt_xtdB = [] 81 | 82 | icon_txt_set = {'Air Quality': 'air-purifier','Dust & Dander': 'weather-dust','Sinus Pressure': 'head-sync','Asthma': 'head-dots-horizontal', 83 | 'Migraine': 'head-alert','Arthritis': 'bone','Common Cold': 'head-snowflake','Flu': 'head-flash','Indoor Pests': 'bug', 84 | 'Outdoor Pests': 'spider','Mosquitos': 'bee','Outdoor Entertaining': 'party-popper','Lawn Mowing': 'mower', 85 | 'Composting': 'compost','Air Travel': 'airplane','Driving': 'car','Fishing': 'fish','Running': 'run', 86 | 'Golf': 'golf','Biking & Cycling': 'bike','Beach & Pool': 'beach','Stargazing': 'weather-night','Hiking': 'hiking', 87 | 'Tree Pollen': 'tree'} 88 | 89 | def cleanString(self, s): 90 | retstr = "" 91 | for chars in s: 92 | retstr += self.removeNonAscii(chars) 93 | return retstr 94 | 95 | def removeNonAscii(self, s): 96 | return ''.join(i for i in s if ord(i)<126 and ord(i)>31) 97 | 98 | # run to setup the system 99 | def initialize(self): 100 | #get the info for the system 101 | self.ACC_FILE = self.args["ACC_FILE"] 102 | self.ACC_FLAG = self.args["ACC_FLAG"] 103 | self.DEB_FLAG = self.args["DEB_FLAG"] 104 | self.URL_LANG = self.args["URL_LANG"] 105 | self.URL_COUNTRY = self.args["URL_COUNTRY"] 106 | self.URL_CITY = self.args["URL_CITY"] 107 | self.URL_ID = self.args["URL_ID"] 108 | 109 | try: 110 | self.WEB_VER = self.args["WEB_VER"] 111 | except: 112 | self.WEB_VER = "" 113 | 114 | #see if they have included a postcode value, if not, just use the ID value 115 | try: 116 | self.URL_POSTCODE = self.args["URL_POSTCODE"] 117 | except: 118 | self.URL_POSTCODE = self.URL_ID 119 | #check that the postcode is blank, and if so set it to the ID value 120 | if self.URL_POSTCODE == "": 121 | self.URL_POSTCODE = self.URL_ID 122 | 123 | #this supports the two website variations 124 | if self.WEB_VER == "APRIL22": 125 | self.url_txt_sets = self.url_txt_setsB 126 | self.url_txt_xtd = self.url_txt_xtdB 127 | else: 128 | self.url_txt_sets = self.url_txt_setsA 129 | self.url_txt_xtd = self.url_txt_xtdA 130 | 131 | #create the original sensors 132 | self.load_sensors() 133 | 134 | #set the listener for the update flag for getting the data 135 | self.listen_state(self.get_all_data, self.ACC_FLAG, new="on") 136 | #set the listener for update flag for updating the sensor from the files 137 | self.listen_state(self.set_acc_sensors, self.DEB_FLAG, new="on") 138 | 139 | # set to run each morning at 5.07am 140 | runtime = datetime.time(5,7,0) 141 | self.run_daily(self.daily_load_sensors, runtime) 142 | 143 | 144 | #get the information from each of the pages and write them into text files for reuse 145 | def get_all_data(self, entity, attribute, old, new, kwargs): 146 | #call the data builder 147 | self.get_html_data() 148 | #turn off the flag 149 | self.turn_off(self.ACC_FLAG) 150 | 151 | #request the website information 152 | def get_html_data(self): 153 | #build the url for the correct country and area 154 | start_url = self.url_base + "/" + self.URL_LANG + "/" + urllib.parse.quote(self.URL_COUNTRY) + "/" + urllib.parse.quote(self.URL_CITY) + "/" + self.URL_POSTCODE 155 | 156 | #for each of the basic pages (asthma, arthritis, migraine and sinus) 157 | for sets in self.url_txt_sets: 158 | #build the url for this allergy type 159 | data_url = start_url + sets[0] + self.URL_ID 160 | #call the function to get the information and put it in the text file 161 | self.get_data(data_url, sets[1]) 162 | 163 | #for each of the multi-tier pages (allergies and cold/flu) 164 | for sets in self.url_txt_xtd: 165 | #build the url for this allergy type 166 | data_url = start_url + sets[0] + self.URL_ID + sets[1] 167 | #call the function to get the information and put it in the text file 168 | self.get_data(data_url, sets[2]) 169 | 170 | #request the website information and write it to a file 171 | def get_data(self, url, txt): 172 | # request the rendered html 173 | self.log("request " + url) 174 | data_from_website = self.get_html(url) 175 | # write the html into the local shelve file 176 | with shelve.open(self.ACC_FILE) as allergies_db: 177 | allergies_db[txt] = data_from_website 178 | #write out the get sensor 179 | self.set_get_sensor() 180 | #update the sensor 181 | self.create_get_sensor() 182 | 183 | def set_get_sensor(self): 184 | #create a sensor to keep track last time this was run 185 | tim = datetime.datetime.now() 186 | date_time = tim.strftime("%d/%m/%Y, %H:%M:%S") 187 | #add date time to the save file 188 | with shelve.open(self.ACC_FILE) as allergies_db: 189 | allergies_db["updated"] = date_time 190 | 191 | def create_get_sensor(self): 192 | #get last update date time from the save file 193 | with shelve.open(self.ACC_FILE) as allergies_db: 194 | date_time = allergies_db["updated"] 195 | #create the sensor 196 | self.set_state("sensor.acc_data_last_sourced", state=date_time, replace=True, attributes={"icon": "mdi:timeline-clock-outline", "friendly_name": "ACC Allergy Data last sourced"}) 197 | 198 | #get the html from the website 199 | def get_html(self, url): 200 | #create request for getting information from the accuweather website 201 | response = requests.request("GET", url, headers=self.headers, data = self.payload) 202 | #scrape and return the rendered html 203 | return response.text.encode('utf8') 204 | 205 | # call the processes to create the sensors 206 | def set_acc_sensors(self, entity, attribute, old, new, kwargs): 207 | #load all the sensors 208 | self.load_sensors() 209 | #turn off the flag 210 | self.turn_off(self.DEB_FLAG) 211 | 212 | # this loads the first time run and on a restart of appdaemon 213 | def load_sensors(self): 214 | #if no current data files 215 | collect_flag = 0 216 | with shelve.open(self.ACC_FILE) as allergies_db: 217 | #self.log(allergies_db[self.url_txt_sets[0][1]]) 218 | if "updated" not in allergies_db: 219 | collect_flag = 1 220 | 221 | if collect_flag == 1: 222 | self.get_html_data() 223 | self.log("get") 224 | 225 | #create the sensors 226 | 227 | if self.WEB_VER == "APRIL22": 228 | self.get_vals(self.url_txt_sets[0][1]) 229 | self.get_allergies_air_info(self.url_txt_sets[1][1]) 230 | else: 231 | #pollens etc 232 | self.get_allergies_rag_info(self.url_txt_xtd[0][2]) 233 | self.get_allergies_grass_info(self.url_txt_xtd[1][2]) 234 | self.get_allergies_tree_info(self.url_txt_xtd[2][2]) 235 | self.get_allergies_mold_info(self.url_txt_xtd[3][2]) 236 | self.get_allergies_dust_info(self.url_txt_xtd[4][2]) 237 | #cold and flu 238 | self.get_coldflu_cold_info(self.url_txt_xtd[5][2]) 239 | self.get_coldflu_flu_info(self.url_txt_xtd[6][2]) 240 | #air quality 241 | self.get_allergies_air_info(self.url_txt_sets[4][1]) 242 | #asthma 243 | self.get_asthma_info(self.url_txt_sets[0][1]) 244 | #arthritis 245 | self.get_arthritis_info(self.url_txt_sets[1][1]) 246 | #migraine 247 | self.get_migraine_info(self.url_txt_sets[2][1]) 248 | #sinus 249 | self.get_sinus_info(self.url_txt_sets[3][1]) 250 | 251 | #update the last updated sensor 252 | self.create_get_sensor() 253 | 254 | # this runs each morning 255 | def daily_load_sensors(self, kwargs): 256 | #get data 257 | self.get_html_data() 258 | 259 | #load sensors 260 | self.load_sensors() 261 | 262 | def get_vals(self, txt): 263 | 264 | #open the file and read the allergies information 265 | with shelve.open(self.ACC_FILE) as allergies_db: 266 | html_info = allergies_db[txt] 267 | 268 | #parse the file for the hmtl 269 | soup = BeautifulSoup(html_info, "html.parser") 270 | 271 | myvals = soup.find_all("div", "index-name") 272 | mytext = soup.find_all("div", "index-status-text") 273 | 274 | 275 | for val, txt in zip(myvals, mytext): 276 | #create the hassio sensors for today and tomorrow for ragweed 277 | senid = "sensor.acc_" + val.text.strip().lower().replace(" ","_").replace("&","and") + "_today" 278 | self.log(senid) 279 | if val.text in self.icon_txt_set: 280 | ticon = 'mdi:' + self.icon_txt_set[val.text] 281 | else: 282 | ticon = 'mdi:air-purifier' 283 | #self.set_state(senid, state=txt.text, replace=True, attributes={"icon": "mdi:air-purifier", "friendly_name": val.text + " Today"}) 284 | self.set_state(senid, state=txt.text, replace=True, attributes={"icon": ticon, "friendly_name": val.text + " Today"}) 285 | 286 | 287 | #get the info for pollens - ragweed, grass, tree, mold, dust and air quality 288 | def get_allergies_air_info(self, txt): 289 | 290 | #open the file and read the allergies information 291 | with shelve.open(self.ACC_FILE) as allergies_db: 292 | html_info = allergies_db[txt] 293 | #parse the file for the hmtl 294 | 295 | soup = BeautifulSoup(html_info, "html.parser") 296 | 297 | myvals = soup.find_all("div", "aq-number") 298 | mytext = soup.find_all("p", "category-text") 299 | mystate = soup.find_all("p", "statement") 300 | 301 | 302 | #create the hassio sensors for today and tomorrow for ragweed 303 | if(len(myvals) > 1): 304 | self.set_state("sensor.acc_air_today", state=myvals[0].text, replace=True, attributes={"icon": "mdi:air-purifier", "friendly_name": "Air Quality Today", "today_air_value": myvals[0].text + " - " + mytext[0].text , "today_air_phrase": mystate[0].text }) 305 | self.set_state("sensor.acc_air_tomorrow", state=myvals[2].text, replace=True, attributes={"icon": "mdi:air-purifier", "friendly_name": "Air Quality Tomorrow", "tomorrow_air_value": myvals[2].text + " - " + mytext[2].text , "tomorrow_air_phrase": mystate[2].text }) 306 | else: 307 | self.set_state("sensor.acc_air_today", state='Unknown', replace=True, attributes={"icon": "mdi:air-purifier", "friendly_name": "Air Quality Today", "today_air_value": 'Unknown' , "today_air_phrase": 'Unknown' }) 308 | self.set_state("sensor.acc_air_tomorrow", state='Unknown', replace=True, attributes={"icon": "mdi:air-purifier", "friendly_name": "Air Quality Tomorrow", "tomorrow_air_value": 'Unknown' , "tomorrow_air_phrase": 'Unknown' }) 309 | 310 | 311 | #get the info for pollens - ragweed, grass, tree, mold, dust and air quality 312 | def get_allergies_rag_info(self, txt): 313 | 314 | #open the file and read the allergies information 315 | with shelve.open(self.ACC_FILE) as allergies_db: 316 | html_info = allergies_db[txt] 317 | #parse the file for the hmtl 318 | soup = BeautifulSoup(html_info, "html.parser") 319 | 320 | myvals = soup.find_all("div", "gauge") 321 | myconds = soup.find_all("div", "cond") 322 | 323 | #create the hassio sensors for today and tomorrow for ragweed 324 | if(len(myvals) > 1): 325 | myvalseta = self.cleanString(myvals[0].text.split('>')) 326 | myvalsetb = self.cleanString(myvals[1].text.split('>')) 327 | self.set_state("sensor.acc_ragweed_pollen_today", state=myvalseta, replace=True, attributes={"icon": "mdi:clover", "friendly_name": "Ragweed Pollen Today", "today_ragweed_value": myvals[0].text , "today_ragweed_phrase": myconds[0].text }) 328 | self.set_state("sensor.acc_ragweed_pollen_tomorrow", state=myvalsetb, replace=True, attributes={"icon": "mdi:clover", "friendly_name": "Ragweed Pollen Tomorrow", "tomorrow_ragweed_value": myvals[1].text , "tomorrow_ragweed_phrase": myconds[1].text }) 329 | else: 330 | self.set_state("sensor.acc_ragweed_pollen_today", state='Unknown', replace=True, attributes={"icon": "mdi:clover", "friendly_name": "Ragweed Pollen Today", "today_ragweed_value": 'Unknown' , "today_ragweed_phrase": 'Unknown' }) 331 | self.set_state("sensor.acc_ragweed_pollen_tomorrow", state='Unknown', replace=True, attributes={"icon": "mdi:clover", "friendly_name": "Ragweed Pollen Tomorrow", "tomorrow_ragweed_value": 'Unknown' , "tomorrow_ragweed_phrase": 'Unknown' }) 332 | 333 | 334 | #get the info for pollens - ragweed, grass, tree, mold, dust and air quality 335 | def get_allergies_grass_info(self, txt): 336 | 337 | #open the file and read the allergies information 338 | with shelve.open(self.ACC_FILE) as allergies_db: 339 | html_info = allergies_db[txt] 340 | #parse the file for the hmtl 341 | soup = BeautifulSoup(html_info, "html.parser") 342 | 343 | myvals = soup.find_all("div", "gauge") 344 | myconds = soup.find_all("div", "cond") 345 | 346 | #create the hassio sensors for today and tomorrow for ragweed 347 | if(len(myvals) > 1): 348 | myvalseta = self.cleanString(myvals[0].text.split('>')) 349 | myvalsetb = self.cleanString(myvals[1].text.split('>')) 350 | self.set_state("sensor.acc_grass_pollen_today", state=myvalseta, replace=True, attributes={"icon": "mdi:barley", "friendly_name": "Grass Pollen Today", "today_grass_value": myvals[0].text , "today_grass_phrase": myconds[0].text }) 351 | self.set_state("sensor.acc_grass_pollen_tomorrow", state=myvalsetb, replace=True, attributes={"icon": "mdi:barley", "friendly_name": "Grass Pollen Tomorrow", "tomorrow_grass_value": myvals[1].text , "tomorrow_grass_phrase": myconds[1].text }) 352 | else: 353 | self.set_state("sensor.acc_grass_pollen_today", state='Unknown', replace=True, attributes={"icon": "mdi:barley", "friendly_name": "Grass Pollen Today", "today_grass_value": 'Unknown' , "today_grass_phrase": 'Unknown' }) 354 | self.set_state("sensor.acc_grass_pollen_tomorrow", state='Unknown', replace=True, attributes={"icon": "mdi:barley", "friendly_name": "Grass Pollen Tomorrow", "tomorrow_grass_value": 'Unknown' , "tomorrow_grass_phrase": 'Unknown' }) 355 | 356 | 357 | #get the info for pollens - ragweed, grass, tree, mold, dust and air quality 358 | def get_allergies_tree_info(self, txt): 359 | 360 | #open the file and read the allergies information 361 | with shelve.open(self.ACC_FILE) as allergies_db: 362 | html_info = allergies_db[txt] 363 | #parse the file for the hmtl 364 | soup = BeautifulSoup(html_info, "html.parser") 365 | 366 | myvals = soup.find_all("div", "gauge") 367 | myconds = soup.find_all("div", "cond") 368 | 369 | #create the hassio sensors for today and tomorrow for ragweed 370 | if(len(myvals) > 1): 371 | myvalseta = self.cleanString(myvals[0].text.split('>')) 372 | myvalsetb = self.cleanString(myvals[1].text.split('>')) 373 | self.set_state("sensor.acc_tree_pollen_today", state=myvalseta, replace=True, attributes={"icon": "mdi:tree-outline", "friendly_name": "Tree Pollen Today", "today_tree_value": myvals[0].text , "today_tree_phrase": myconds[0].text }) 374 | self.set_state("sensor.acc_tree_pollen_tomorrow", state=myvalsetb, replace=True, attributes={"icon": "mdi:tree-outline", "friendly_name": "Tree Pollen Tomorrow", "tomorrow_tree_value": myvals[1].text , "tomorrow_tree_phrase": myconds[1].text }) 375 | else: 376 | self.set_state("sensor.acc_tree_pollen_today", state='Unknown', replace=True, attributes={"icon": "mdi:tree-outline", "friendly_name": "Tree Pollen Today", "today_tree_value": 'Unknown' , "today_tree_phrase": 'Unknown' }) 377 | self.set_state("sensor.acc_tree_pollen_tomorrow", state='Unknown', replace=True, attributes={"icon": "mdi:tree-outline", "friendly_name": "Tree Pollen Tomorrow", "tomorrow_tree_value": 'Unknown' , "tomorrow_tree_phrase": 'Unknown' }) 378 | 379 | 380 | #get the info for pollens - ragweed, grass, tree, mold, dust and air quality 381 | def get_allergies_mold_info(self, txt): 382 | 383 | #open the file and read the allergies information 384 | with shelve.open(self.ACC_FILE) as allergies_db: 385 | html_info = allergies_db[txt] 386 | #parse the file for the hmtl 387 | soup = BeautifulSoup(html_info, "html.parser") 388 | 389 | myvals = soup.find_all("div", "gauge") 390 | myconds = soup.find_all("div", "cond") 391 | 392 | #create the hassio sensors for today and tomorrow for ragweed 393 | if(len(myvals) > 1): 394 | myvalseta = self.cleanString(myvals[0].text.split('>')) 395 | myvalsetb = self.cleanString(myvals[1].text.split('>')) 396 | self.set_state("sensor.acc_mold_today", state=myvalseta, replace=True, attributes={"icon": "mdi:bacteria-outline", "friendly_name": "Mold Today", "today_mold_value": myvals[0].text , "today_mold_phrase": myconds[0].text }) 397 | self.set_state("sensor.acc_mold_tomorrow", state=myvalsetb, replace=True, attributes={"icon": "mdi:bacteria-outline", "friendly_name": "Mold Tomorrow", "tomorrow_mold_value": myvals[1].text , "tomorrow_mold_phrase": myconds[1].text }) 398 | else: 399 | self.set_state("sensor.acc_mold_today", state='Unknown', replace=True, attributes={"icon": "mdi:bacteria-outline", "friendly_name": "Mold Today", "today_mold_value": 'Unknown' , "today_mold_phrase": 'Unknown' }) 400 | self.set_state("sensor.acc_mold_tomorrow", state='Unknown', replace=True, attributes={"icon": "mdi:bacteria-outline", "friendly_name": "Mold Tomorrow", "tomorrow_mold_value": 'Unknown' , "tomorrow_mold_phrase": 'Unknown' }) 401 | 402 | 403 | #get the info for pollens - ragweed, grass, tree, mold, dust and air quality 404 | def get_allergies_dust_info(self, txt): 405 | 406 | #open the file and read the allergies information 407 | with shelve.open(self.ACC_FILE) as allergies_db: 408 | html_info = allergies_db[txt] 409 | #parse the file for the hmtl 410 | soup = BeautifulSoup(html_info, "html.parser") 411 | 412 | myvals = soup.find_all("div", "gauge") 413 | myconds = soup.find_all("div", "cond") 414 | 415 | #create the hassio sensors for today and tomorrow for ragweed 416 | if(len(myvals) > 1): 417 | myvalseta = self.cleanString(myvals[0].text.split('>')) 418 | myvalsetb = self.cleanString(myvals[1].text.split('>')) 419 | self.set_state("sensor.acc_dust_today", state=myvalseta, replace=True, attributes={"icon": "mdi:cloud-search-outline", "friendly_name": "Dust Today", "today_dust_value": myvals[0].text , "today_dust_phrase": myconds[0].text }) 420 | self.set_state("sensor.acc_dust_tomorrow", state=myvalsetb, replace=True, attributes={"icon": "mdi:cloud-search-outline", "friendly_name": "Dust Tomorrow", "tomorrow_dust_value": myvals[1].text , "tomorrow_dust_phrase": myconds[1].text }) 421 | else: 422 | self.set_state("sensor.acc_dust_today", state='Unknown', replace=True, attributes={"icon": "mdi:cloud-search-outline", "friendly_name": "Dust Today", "today_dust_value": 'Unknown' , "today_dust_phrase": 'Unknown' }) 423 | self.set_state("sensor.acc_dust_tomorrow", state='Unknown', replace=True, attributes={"icon": "mdi:cloud-search-outline", "friendly_name": "Dust Tomorrow", "tomorrow_dust_value": 'Unknown' , "tomorrow_dust_phrase": 'Unknown' }) 424 | 425 | #get the info for cold and flu 426 | def get_coldflu_cold_info(self, txt): 427 | 428 | #open the file and read the allergies information 429 | with shelve.open(self.ACC_FILE) as allergies_db: 430 | html_info = allergies_db[txt] 431 | #parse the file for the hmtl 432 | soup = BeautifulSoup(html_info, "html.parser") 433 | 434 | myvals = soup.find_all("div", "gauge") 435 | myconds = soup.find_all("div", "cond") 436 | 437 | #create the hassio sensors for today and tomorrow for cold 438 | if(len(myvals) > 1): 439 | myvalseta = self.cleanString(myvals[0].text.split('>')) 440 | myvalsetb = self.cleanString(myvals[1].text.split('>')) 441 | self.set_state("sensor.acc_common_cold_today", state=myvalseta, replace=True, attributes={"icon": "mdi:snowflake-alert", "friendly_name": "Common Cold Today", "today_common_value": myvals[0].text , "today_common_phrase": myconds[0].text }) 442 | self.set_state("sensor.acc_common_cold_tomorrow", state=myvalsetb, replace=True, attributes={"icon": "mdi:snowflake-alert", "friendly_name": "Common Cold Tomorrow", "tomorrow_common_value": myvals[1].text , "tomorrow_common_phrase": myconds[1].text }) 443 | else: 444 | self.set_state("sensor.acc_common_cold_today", state='Unknown', replace=True, attributes={"icon": "mdi:snowflake-alert", "friendly_name": "Common Cold Today", "today_common_value": 'Unknown' , "today_common_phrase": 'Unknown' }) 445 | self.set_state("sensor.acc_common_cold_tomorrow", state='Unknown', replace=True, attributes={"icon": "mdi:snowflake-alert", "friendly_name": "Common Cold Tomorrow", "tomorrow_common_value": 'Unknown' , "tomorrow_common_phrase": 'Unknown' }) 446 | 447 | 448 | def get_coldflu_flu_info(self, txt): 449 | 450 | #open the file and read the allergies information 451 | with shelve.open(self.ACC_FILE) as allergies_db: 452 | html_info = allergies_db[txt] 453 | #parse the file for the hmtl 454 | soup = BeautifulSoup(html_info, "html.parser") 455 | 456 | myvals = soup.find_all("div", "gauge") 457 | myconds = soup.find_all("div", "cond") 458 | 459 | #create the hassio sensors for today and tomorrow for cold 460 | if(len(myvals) > 1): 461 | myvalseta = self.cleanString(myvals[0].text.split('>')) 462 | myvalsetb = self.cleanString(myvals[1].text.split('>')) 463 | self.set_state("sensor.acc_flu_today", state=myvalseta, replace=True, attributes={"icon": "mdi:bacteria", "friendly_name": "Flu Today", "today_flu_value": myvals[0].text , "today_flu_phrase": myconds[0].text }) 464 | self.set_state("sensor.acc_flu_tomorrow", state=myvalsetb, replace=True, attributes={"icon": "mdi:bacteria", "friendly_name": "Flu Tomorrow", "tomorrow_flu_value": myvals[1].text , "tomorrow_flu_phrase": myconds[1].text }) 465 | else: 466 | self.set_state("sensor.acc_flu_today", state='Unknown', replace=True, attributes={"icon": "mdi:bacteria", "friendly_name": "Flu Today", "today_flu_value": 'Unknown' , "today_flu_phrase": 'Unknown' }) 467 | self.set_state("sensor.acc_flu_tomorrow", state='Unknown', replace=True, attributes={"icon": "mdi:bacteria", "friendly_name": "Flu Tomorrow", "tomorrow_flu_value": 'Unknown' , "tomorrow_flu_phrase": 'Unknown' }) 468 | 469 | 470 | #get the info for asthma 471 | def get_asthma_info(self, txt): 472 | 473 | #open the file and read the allergies information 474 | with shelve.open(self.ACC_FILE) as allergies_db: 475 | html_info = allergies_db[txt] 476 | #parse the file for the hmtl 477 | soup = BeautifulSoup(html_info, "html.parser") 478 | 479 | myvals = soup.find_all("div", "gauge") 480 | myconds = soup.find_all("div", "cond") 481 | 482 | #create the hassio sensors for today and tomorrow for asthma 483 | if(len(myvals) > 1): 484 | myvalseta = self.cleanString(myvals[0].text.split('>')) 485 | myvalsetb = self.cleanString(myvals[1].text.split('>')) 486 | self.set_state("sensor.acc_asthma_today", state=myvalseta, replace=True, attributes={"icon": "mdi:lungs", "friendly_name": "Asthma Today", "today_asthma_value": myvals[0].text , "today_asthma_phrase": myconds[0].text }) 487 | self.set_state("sensor.acc_asthma_tomorrow", state=myvalsetb, replace=True, attributes={"icon": "mdi:lungs", "friendly_name": "Asthma Tomorrow", "tomorrow_asthma_value": myvals[1].text , "tomorrow_asthma_phrase": myconds[1].text }) 488 | else: 489 | self.set_state("sensor.acc_asthma_today", state='Unknown', replace=True, attributes={"icon": "mdi:lungs", "friendly_name": "Asthma Today", "today_asthma_value": 'Unknown' , "today_asthma_phrase": 'Unknown' }) 490 | self.set_state("sensor.acc_asthma_tomorrow", state='Unknown', replace=True, attributes={"icon": "mdi:lungs", "friendly_name": "Asthma Tomorrow", "tomorrow_asthma_value": 'Unknown' , "tomorrow_asthma_phrase": 'Unknown' }) 491 | 492 | 493 | #get the info for arthritis 494 | def get_arthritis_info(self, txt): 495 | 496 | #open the file and read the allergies information 497 | with shelve.open(self.ACC_FILE) as allergies_db: 498 | html_info = allergies_db[txt] 499 | #parse the file for the hmtl 500 | soup = BeautifulSoup(html_info, "html.parser") 501 | 502 | myvals = soup.find_all("div", "gauge") 503 | myconds = soup.find_all("div", "cond") 504 | 505 | #create the hassio sensors for today and tomorrow for arthritis 506 | if(len(myvals) > 1): 507 | myvalseta = self.cleanString(myvals[0].text.split('>')) 508 | myvalsetb = self.cleanString(myvals[1].text.split('>')) 509 | self.set_state("sensor.acc_arthritis_today", state=myvalseta, replace=True, attributes={"icon": "mdi:bone", "friendly_name": "Arthritis Today", "today_arthritis_value": myvals[0].text , "today_arthritis_phrase": myconds[0].text }) 510 | self.set_state("sensor.acc_arthritis_tomorrow", state=myvalsetb, replace=True, attributes={"icon": "mdi:bone", "friendly_name": "Arthritis Tomorrow", "tomorrow_arthritis_value": myvals[1].text , "tomorrow_arthritis_phrase": myconds[1].text }) 511 | else: 512 | self.set_state("sensor.acc_arthritis_today", state='Unknown', replace=True, attributes={"icon": "mdi:bone", "friendly_name": "Arthritis Today", "today_arthritis_value": 'Unknown' , "today_arthritis_phrase": 'Unknown' }) 513 | self.set_state("sensor.acc_arthritis_tomorrow", state='Unknown', replace=True, attributes={"icon": "mdi:bone", "friendly_name": "Arthritis Tomorrow", "tomorrow_arthritis_value": 'Unknown' , "tomorrow_arthritis_phrase": 'Unknown' }) 514 | 515 | #get the info for migraine 516 | def get_migraine_info(self, txt): 517 | 518 | #open the file and read the allergies information 519 | with shelve.open(self.ACC_FILE) as allergies_db: 520 | html_info = allergies_db[txt] 521 | #parse the file for the hmtl 522 | soup = BeautifulSoup(html_info, "html.parser") 523 | 524 | myvals = soup.find_all("div", "gauge") 525 | myconds = soup.find_all("div", "cond") 526 | 527 | #create the hassio sensors for today and tomorrow for migraine 528 | if(len(myvals) > 1): 529 | myvalseta = self.cleanString(myvals[0].text.split('>')) 530 | myvalsetb = self.cleanString(myvals[1].text.split('>')) 531 | self.set_state("sensor.acc_migraine_today", state=myvalseta, replace=True, attributes={"icon": "mdi:head-flash", "friendly_name": "Migraine Today", "today_migraine_value": myvals[0].text , "today_migraine_phrase": myconds[0].text }) 532 | self.set_state("sensor.acc_migraine_tomorrow", state=myvalsetb, replace=True, attributes={"icon": "mdi:head-flash", "friendly_name": "Migraine Tomorrow", "tomorrow_migraine_value": myvals[1].text , "tomorrow_migraine_phrase": myconds[1].text }) 533 | else: 534 | self.set_state("sensor.acc_migraine_today", state='Unknown', replace=True, attributes={"icon": "mdi:head-flash", "friendly_name": "Migraine Today", "today_migraine_value": 'Unknown' , "today_migraine_phrase": 'Unknown' }) 535 | self.set_state("sensor.acc_migraine_tomorrow", state='Unknown', replace=True, attributes={"icon": "mdi:head-flash", "friendly_name": "Migraine Tomorrow", "tomorrow_migraine_value": 'Unknown' , "tomorrow_migraine_phrase": 'Unknown' }) 536 | 537 | 538 | #get the info for sinus 539 | def get_sinus_info(self, txt): 540 | 541 | #open the file and read the allergies information 542 | with shelve.open(self.ACC_FILE) as allergies_db: 543 | html_info = allergies_db[txt] 544 | #parse the file for the hmtl 545 | soup = BeautifulSoup(html_info, "html.parser") 546 | 547 | myvals = soup.find_all("div", "gauge") 548 | myconds = soup.find_all("div", "cond") 549 | 550 | #create the hassio sensors for today and tomorrow for sinus 551 | if(len(myvals) > 1): 552 | myvalseta = self.cleanString(myvals[0].text.split('>')) 553 | myvalsetb = self.cleanString(myvals[1].text.split('>')) 554 | self.set_state("sensor.acc_sinus_today", state=myvalseta, replace=True, attributes={"icon": "mdi:head-remove-outline", "friendly_name": "Sinus Today", "today_sinus_value": myvals[0].text , "today_sinus_phrase": myconds[0].text }) 555 | self.set_state("sensor.acc_sinus_tomorrow", state=myvalsetb, replace=True, attributes={"icon": "mdi:head-remove-outline", "friendly_name": "Sinus Tomorrow", "tomorrow_sinus_value": myvals[1].text , "tomorrow_sinus_phrase": myconds[1].text }) 556 | else: 557 | self.set_state("sensor.acc_sinus_today", state='Unknown', replace=True, attributes={"icon": "mdi:head-remove-outline", "friendly_name": "Sinus Today", "today_sinus_value": 'Unknown' , "today_sinus_phrase": 'Unknown' }) 558 | self.set_state("sensor.acc_sinus_tomorrow", state='Unknown', replace=True, attributes={"icon": "mdi:head-remove-outline", "friendly_name": "Sinus Tomorrow", "tomorrow_sinus_value": 'Unknown' , "tomorrow_sinus_phrase": 'Unknown' }) 559 | 560 | -------------------------------------------------------------------------------- /hacs.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "accu_weather", 3 | "render_readme": "True" 4 | } -------------------------------------------------------------------------------- /info.md: -------------------------------------------------------------------------------- 1 | 2 | ## AppDaemon configuration 3 | 4 | You will also need to install Beautiful Soup / bs4 by adding bs4 to your python packages in Appdaemon 4 configuration on the add-on panel. 5 | 6 | ```yaml 7 | system_packages: [] 8 | python_packages: 9 | - bs4 10 | init_commands: [] 11 | ``` 12 | 13 | ## App configuration 14 | 15 | In the appdaemon/apps/apps.yaml file - 16 | 17 | ```yaml 18 | accu_allergies: 19 | module: accu_allergies 20 | class: Get_Accu_Allergies 21 | ACC_FILE: "./allergies" 22 | ACC_FLAG: "input_boolean.get_allergies_data" 23 | DEB_FLAG: "input_boolean.reset_allergies_sensor" 24 | URL_ID: "21921" 25 | URL_CITY: "canberra" 26 | URL_COUNTRY: "au" 27 | URL_LANG: "en" 28 | URL_POSTCODE: "" 29 | WEB_VER: "" 30 | ``` 31 | 32 | key | optional | type | default | description 33 | -- | -- | -- | -- | -- 34 | `module` | False | string | | `accu_allergies` 35 | `class` | False | string | | `Get_Accu_Allergies` 36 | `ACC_FILE` | False | string | | path and name of a file to store html in, to reduce number of requests to the website 37 | `ACC_FLAG` | False | string | | The name of the flag in HA for triggering getting the information from the website 38 | `DEB_FLAG` | False | string | | The name of the flag in HA for triggering refreshing the sensors from the stored html 39 | `URL_ID` | False | string | | The ID on the AccuWeather webpage for the node you want information for 40 | `URL_CITY` | False | string | | The name on the AccuWeather webpage for the node you want information for 41 | `URL_COUNTRY` | False | string | | The country code on the AccuWeather webpage for the node you want information for 42 | `URL_LANG` | False | string | | The language code on the AccuWeather webpage for the node you want information for 43 | `URL_POSTCODE` | True | string | | Some locations use the postcode as well as an ID in the AccuWeather webpage URL this will default to the ID value if left blank 44 | `WEB_VER` | True | string | | Some locations have transitioned to a new website template for AccuWeather - use "APRIL22" if your area has the new template 45 | 46 | ## Sensors to be created 47 | 48 | This app will create sensors 49 | 50 | * sensor.acc_data_last_sourced 51 | 52 | Pre APRIL22 53 | 54 | sensors for each of the types for today and tomorrow (24 in total) 55 | ragweed pollen, grass pollen, tree pollen, mold, dust, air quality, common cold, flu, asthma, arthritis, migraine, sinus 56 | 57 | each sensor is a rating from 1->10 58 | 59 | The actual site holds 12 days of information for each of the 12 concepts, but I have only chosen to get the current day and the next day. 60 | 61 | Post APRIL22 62 | 63 | air quality for today and tomorrow 64 | 65 | sensors for each of the types for today only 66 | 67 | each sensor is a low->extreme 68 | 69 | dust & dander, sinus pressure, asthma, grass pollen, ragweed pollen, tree pollen, mold, migraine, arthritis, common cold, flu, 70 | indoor pests, outdoor pests, mosquitos, outdoor entertaining, lawn mowing, composting, air travel, driving, fishing, running 71 | golf, biking & cycling, beach & pool, stargazing, hiking 72 | 73 | -------------------------------------------------------------------------------- /post_april22.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simonhq/accu_allergies/07d3275b603e146a1c85b17cba0f313d60d1eeaa/post_april22.jpg -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | ## NOTE - added automatic daily download 2 | 3 | # AccuWeather Allergies 4 | [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg?style=for-the-badge)](https://github.com/custom-components/hacs) 5 | 6 | _Creates sensors for Home Assistant with the AccuWeather Allergy level information for various sensor types_ 7 | 8 | ## Lovelace Example 9 | 10 | PRE APRIL22 changes 11 | 12 | ![Example of Allergies information in the Dashboard](https://github.com/simonhq/accu_allergies/blob/master/accu_allergies_glances.PNG) 13 | 14 | APRIL22 changes 15 | 16 | ![Example of Allergies information in the Dashboard](https://github.com/simonhq/accu_allergies/blob/master/post_april22.jpg) 17 | 18 | ## Installation 19 | 20 | This app is best installed using [HACS](https://github.com/custom-components/hacs), so that you can easily track and download updates. 21 | 22 | Alternatively, you can download the `accu_allergies` directory from inside the `apps` directory here to your local `apps` directory, then add the configuration to enable the `accu_allergies` module. 23 | 24 | ## How it works 25 | 26 | The [AccuWeather](https://www.accuweather.com/) site provides this information, this just scrapes the page and makes the information available as a sensor in HA. 27 | 28 | As this is non time critical sensor, it only gets the information on a set time schedule, once per day at 5.07am, but it also watches an `input_boolean` that you specify for when to update the sensor. You can obviously automate when you want that input_boolean to turn on. 29 | 30 | ### To Run outside of the schedule 31 | 32 | You will need to create an input_boolean entity to watch for when to update the sensor. When this `input_boolean` is turned on, whether manually or by another automation you create, the scraping process will be run to create/update the sensor. 33 | 34 | To reduce the number of requests to the website, I have made this a two stage system, with one `input_boolean` controlling when the data is requested from the website and a second when you want to update the sensors, meaning doing a HA restart won't trigger html requests, it will just read from your saved data file from that morning. 35 | 36 | Therefore you will need to chain the two `input_boolean` requests so that one is offset by a minute to allow for the data to be downloaded. 37 | 38 | ## AppDaemon configuration 39 | 40 | You will also need to install Beautiful Soup / bs4 by adding bs4 to your python packages in Appdaemon. 41 | 42 | ```yaml 43 | system_packages: [] 44 | python_packages: 45 | - bs4 46 | init_commands: [] 47 | ``` 48 | 49 | ## App configuration 50 | 51 | ```yaml 52 | accu_allergies: 53 | module: accu_allergies 54 | class: Get_Accu_Allergies 55 | ACC_FILE: "./allergies" 56 | ACC_FLAG: "input_boolean.get_allergies_data" 57 | DEB_FLAG: "input_boolean.reset_allergies_sensor" 58 | URL_ID: "21921" 59 | URL_CITY: "canberra" 60 | URL_COUNTRY: "au" 61 | URL_LANG: "en" 62 | URL_POSTCODE: "" 63 | WEB_VER: "" # or use "APRIL22" 64 | ``` 65 | 66 | key | optional | type | default | description 67 | -- | -- | -- | -- | -- 68 | `module` | False | string | | `accu_allergies` 69 | `class` | False | string | | `Get_Accu_Allergies` 70 | `ACC_FILE` | False | string | | path and name of a file to store html in, to reduce number of requests to the website 71 | `ACC_FLAG` | False | string | | The name of the flag in HA for triggering getting the information from the website 72 | `DEB_FLAG` | False | string | | The name of the flag in HA for triggering refreshing the sensors from the stored html 73 | `URL_ID` | False | string | | The ID on the AccuWeather webpage for the node you want information for 74 | `URL_CITY` | False | string | | The name on the AccuWeather webpage for the node you want information for 75 | `URL_COUNTRY` | False | string | | The country code on the AccuWeather webpage for the node you want information for 76 | `URL_LANG` | False | string | | The language code on the AccuWeather webpage for the node you want information for 77 | `URL_POSTCODE` | True | string | | Some locations use the postcode as well as an ID in the AccuWeather webpage URL this will default to the ID value if left blank 78 | `WEB_VER` | True | string | | Some locations have transitioned to a new website template for AccuWeather - use "APRIL22" if your area has the new template 79 | 80 | ## Sensors to be created 81 | 82 | This app will create sensors 83 | 84 | * sensor.acc_data_last_sourced 85 | 86 | Pre APRIL22 87 | 88 | sensors for each of the types for today and tomorrow (24 in total) 89 | ragweed pollen, grass pollen, tree pollen, mold, dust, air quality, common cold, flu, asthma, arthritis, migraine, sinus 90 | 91 | each sensor is a rating from 1->10 92 | 93 | The actual site holds 12 days of information for each of the 12 concepts, but I have only chosen to get the current day and the next day. 94 | 95 | Post APRIL22 96 | 97 | air quality for today and tomorrow 98 | 99 | sensors for each of the types for today only 100 | 101 | each sensor is a low->extreme 102 | 103 | dust & dander, sinus pressure, asthma, grass pollen, ragweed pollen, tree pollen, mold, migraine, arthritis, common cold, flu, 104 | indoor pests, outdoor pests, mosquitos, outdoor entertaining, lawn mowing, composting, air travel, driving, fishing, running 105 | golf, biking & cycling, beach & pool, stargazing, hiking 106 | 107 | 108 | ## Issues/Feature Requests 109 | 110 | Please log any issues or feature requests in this GitHub repository for me to review. --------------------------------------------------------------------------------