├── README-en.md ├── MOSMIX-Vorhersage.png ├── Wetterwarnung-JSON.png ├── forecast-chart-other.png ├── forecast-chart-belchertown.png ├── old ├── usr │ └── local │ │ └── bin │ │ ├── html2ent.ansi │ │ ├── wget-dwd │ │ ├── dwd-warnings │ │ ├── bbk-warnings │ │ └── dwd-cap-warnings └── README.md ├── Wettervorhersage-Warnungen-Fichtelberg.png ├── etc └── cron.hourly │ └── dwd ├── skins └── DWD │ └── geo │ └── README.md ├── install.py ├── bin └── user │ ├── weatherservicesutil.py │ ├── weatherservicesdb.py │ ├── wildfire.py │ └── weatherserviceshealth.py └── LICENSE /README-en.md: -------------------------------------------------------------------------------- 1 | README.md -------------------------------------------------------------------------------- /MOSMIX-Vorhersage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/roe-dl/weewx-DWD/HEAD/MOSMIX-Vorhersage.png -------------------------------------------------------------------------------- /Wetterwarnung-JSON.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/roe-dl/weewx-DWD/HEAD/Wetterwarnung-JSON.png -------------------------------------------------------------------------------- /forecast-chart-other.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/roe-dl/weewx-DWD/HEAD/forecast-chart-other.png -------------------------------------------------------------------------------- /forecast-chart-belchertown.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/roe-dl/weewx-DWD/HEAD/forecast-chart-belchertown.png -------------------------------------------------------------------------------- /old/usr/local/bin/html2ent.ansi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/roe-dl/weewx-DWD/HEAD/old/usr/local/bin/html2ent.ansi -------------------------------------------------------------------------------- /Wettervorhersage-Warnungen-Fichtelberg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/roe-dl/weewx-DWD/HEAD/Wettervorhersage-Warnungen-Fichtelberg.png -------------------------------------------------------------------------------- /old/README.md: -------------------------------------------------------------------------------- 1 | Files in this directory tree are outdated and should not be used. 2 | 3 | Die Dateien in diesem Verzeichnisbereich sind veraltet und sollten 4 | nicht mehr benutzt werden. 5 | -------------------------------------------------------------------------------- /etc/cron.hourly/dwd: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Daten beim Deutschen Wetterdienst herunterladen 3 | # Copyright (C) 2021, 2023 Johanna Roedenbeck 4 | # licensed under the terms of the General Public Licens (GPL) v3 5 | 6 | # Diese Datei ist unter /etc/cron.hourly zu speichern. Sie muss 7 | # ausfuehrbar sein. 8 | 9 | /usr/local/bin/wget-dwd 2>/dev/null 10 | /usr/local/bin/dwd-cap-warnings --weewx --resolution=city 2>/dev/null >/dev/null 11 | 12 | # Soll statt dwd-cap-warnings dwd-warnings verwendet werden, 13 | # hier das Kommentarzeichen entfernen und dafuer eines vor 14 | # die Zeile mit dwd-cap-warnings setzen 15 | #/usr/local/bin/dwd-warnings 2>/dev/null >/dev/null 16 | 17 | # Fuer Wettervorhersagen XXXXX durch den Stationscode 18 | # ersetzen. 19 | /usr/local/bin/dwd-mosmix --weewx --daily --hourly XXXXX 2>/dev/null >/dev/null 20 | 21 | exit 0 22 | -------------------------------------------------------------------------------- /skins/DWD/geo/README.md: -------------------------------------------------------------------------------- 1 | Dieses Verzeichnis enthält Dateien mit geopraphischen Koordinaten zur 2 | Verwendung mit den Radarkarten. 3 | 4 | Hinweis: Es muß der jeweils zugehörige Urheberrechtshinweis angebracht 5 | werden. 6 | 7 | This directory contains files of geographic coordinates. They can be used 8 | together with the radar maps. 9 | 10 | Don't forget to include the correct copyright notice. 11 | 12 | * `countrycoords.txt` 13 | 14 | nur Staatsgrenzen 15 | 16 | country borders only 17 | 18 | Copyright: "EuroGeographics" 19 | 20 | * `borders01.txt` 21 | 22 | Staatsgrenzen und Bundeslandgrenzen, geringere Auflösung 23 | 24 | country and state boundaries, 25 | 26 | Copyright: "OpenStreetMap, ODbL 1.0" 27 | 28 | * `borders0001.txt` 29 | 30 | Staatsgrenzen und Bundeslandgrenzen, höhere Auflösung 31 | 32 | country and state boundaries, 33 | 34 | Copyright: "OpenStreetMap, ODbL 1.0" 35 | 36 | * `erzgebirge-proj.txt` 37 | 38 | Wasserläufe, Talsperren und der Grenzabschnitt für das Westerzgebirge 39 | 40 | Copyright: "OpenStreetMap, ODbL 1.0" 41 | 42 | Diese Datei kann zusammen mit `map = Fichtelberg` bzw. bei Aufruf 43 | von der Kommandozeile `--image-size=Fichtelberg` benutzt werden. 44 | Sie gibt ein Beispiel, wie man bei kleineren Ausschnitten die 45 | Landschaft geeignet unterlegen kann. Im Gebirge sind die Täler 46 | ein wesentliches Landschafts- und Orientierungsmerkmal. Die 47 | Flüsse bzw. Bäche können den Verlauf der Täler auf einfache 48 | Weise ohne eine unübersichtliche Detailfülle markieren. 49 | -------------------------------------------------------------------------------- /old/usr/local/bin/wget-dwd: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Herunterladen von Dateien beim Deutschen Wetterdienst 3 | # Copyright (C) 2021 Johanna Roedenbeck 4 | # licensed under the terms of the General Public License (GPL) v3 5 | 6 | # vom Benutzer anzupassen 7 | 8 | # URLs der herunterzuladenden Dateien beim DWD 9 | # (muss ggf. an das eigene Bundesland angepasst werden) 10 | DWD_URL="https://opendata.dwd.de/weather/text_forecasts/html" 11 | DWD_BUNDESLAND="DWLG" 12 | DWD_MAP="https://www.dwd.de/DWD/wetter/wv_spez/hobbymet/wetterkarten/bwk_bodendruck_na_ana.png" 13 | DWD_MAP2="https://www.dwd.de/DWD/wetter/wv_spez/hobbymet/wetterkarten/bwk_bodendruck_weu_ana.png" 14 | DWD_WARN="https://www.dwd.de/DWD/warnungen/warnstatus/SchilderLZ.jpg" 15 | DWD_WARNJ="https://www.dwd.de/DWD/warnungen/warnapp/json/warnings.json" 16 | 17 | # Log-Datei 18 | LOG_FN="/var/log/wget-dwd.log" 19 | 20 | # Zielpfad zum Speichern der Dateien 21 | # (Das Verzeichnis muss vorher vom Benutzer angelegt werden.) 22 | PTH="/etc/weewx/skins/Belchertown/dwd" 23 | 24 | # Ende des vom Benutzer anzupassenden Bereichs 25 | 26 | # Programm zum Herunterladen 27 | WGET="/usr/bin/wget" 28 | TOUCH="/usr/bin/touch" 29 | 30 | # Logdatei loeschen 31 | /bin/rm "$LOG_FN" 2>/dev/null 32 | 33 | # Herunterladen der Vorhersage-Dateien und Zeichensatz konvertieren 34 | for i in 50 51 52 53 54; do 35 | 36 | FN="VHDL${i}_${DWD_BUNDESLAND}_LATEST" 37 | $WGET -N -a "$LOG_FN" -O "$PTH/temp" "$DWD_URL/${FN}_html" 38 | if [ "$?" -eq 0 ]; then 39 | /usr/local/bin/html2ent.ansi <"$PTH/temp" >"$PTH/$FN.html" 40 | $TOUCH -r "$PTH/temp" "$PTH/$FN.html" 41 | rm "$PTH/temp" 42 | fi 43 | 44 | done 45 | 46 | # Herunterladen der uebrigen Dateien 47 | $WGET -N -a "$LOG_FN" -O "$PTH/${DWD_MAP##*/}" $DWD_MAP 48 | $WGET -N -a "$LOG_FN" -O "$PTH/${DWD_MAP2##*/}" $DWD_MAP2 49 | $WGET -N -a "$LOG_FN" -O "$PTH/${DWD_WARN##*/}" $DWD_WARN 50 | $WGET -N -a "$LOG_FN" -O "$PTH/${DWD_WARNJ##*/}" $DWD_WARNJ 51 | -------------------------------------------------------------------------------- /install.py: -------------------------------------------------------------------------------- 1 | # installer DWD 2 | # Copyright 2023 Johanna Roedenbeck 3 | # Distributed under the terms of the GNU Public License (GPLv3) 4 | 5 | # Caution! Not finished. 6 | 7 | """ 8 | This script is free software: you can redistribute it and/or modify 9 | it under the terms of the GNU General Public License as published by 10 | the Free Software Foundation, either version 3 of the License, or 11 | (at your option) any later version. 12 | 13 | This script is distributed in the hope that it will be useful, 14 | but WITHOUT ANY WARRANTY; without even the implied warranty of 15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 16 | GNU General Public License for more details. 17 | """ 18 | 19 | from weecfg.extension import ExtensionInstaller 20 | import os 21 | import os.path 22 | import stat 23 | import shutil 24 | 25 | def loader(): 26 | return DWDInstaller() 27 | 28 | class DWDInstaller(ExtensionInstaller): 29 | def __init__(self): 30 | super(DWDInstaller, self).__init__( 31 | version="0.x", 32 | name='weatherforecasts', 33 | description='Service to retrieve data from weather services', 34 | author="Johanna Roedenbeck", 35 | author_email="", 36 | data_services='user.weatherservices.DWDservice', 37 | config={ 38 | 'StdWXCalculate': { 39 | 'Calculations': { 40 | 'barometerDWD':'software, loop' 41 | } 42 | }, 43 | 'DeutscherWetterdienst': { 44 | }, 45 | 'WeatherServices': { 46 | 'path': '/etc/weewx/skins/Belchertown/dwd', 47 | 'current': { 48 | 'safe':'True' 49 | }, 50 | 'forecast': { 51 | 'icons':'replace_me', 52 | 'orientation':'h,v', 53 | '#show_obs_symbols':'True', 54 | '#show_obs_description':'True', 55 | '#show_placemark':'True' 56 | }, 57 | 'warning': { 58 | 'icons':'replace_me' 59 | }, 60 | 'Belchertown': { 61 | 'section':'Belchertown', 62 | 'warnings':'replace_me', 63 | 'forecast':'replace_me', 64 | '#include_advance_warings':'0', 65 | '#aqi_source':'replace_me', 66 | '#compass_lang':'replace_me' 67 | } 68 | } 69 | }, 70 | files=[('bin/user', [ 71 | 'bin/user/weatherservices.py', 72 | 'bin/user/weatherservicesutil.py', 73 | 'bin/user/weatherservicesdb.py', 74 | 'bin/user/weatherservicesradar.py', 75 | 'bin/user/weatherserviceshealth.py', 76 | 'bin/user/weathercodes.py', 77 | 'bin/user/wildfire.py', 78 | 'bin/user/capwarnings.py']),] 79 | ) 80 | 81 | def configure(self, engine): 82 | # path of the user directory 83 | print(engine.root_dict) 84 | user_root = engine.root_dict.get('USER_ROOT',engine.root_dict.get('USER_DIR')) 85 | if not user_root: 86 | print('user directory not found. Create links manually.') 87 | return False 88 | # path for system wide commands 89 | bin = '/usr/local/bin' 90 | # links to create and files to copy 91 | links = ['dwd-cap-warnings','bbk-warnings','msc-warnings'] 92 | # complete path of capwarnings.py 93 | capwarnings_fn = os.path.join(user_root,'capwarnings.py') 94 | # make capwarnings.py executable 95 | try: 96 | engine.logger.log("chmod u=rwx,g=rx,o=rx %s" % capwarnings_fn) 97 | except AttributeError: 98 | engine.printer.out("chmod u=rwx,g=rx,o=rx %s" % capwarnings_fn) 99 | if not engine.dry_run: 100 | os.chmod(capwarnings_fn,stat.S_IRWXU|stat.S_IRGRP|stat.S_IXGRP|stat.S_IROTH|stat.S_IXOTH) 101 | # create symbolic links 102 | for li in links: 103 | fn = os.path.join(bin,li) 104 | try: 105 | engine.logger.log("ln -s %s %s" % (capwarnings_fn,fn)) 106 | except AttributeError: 107 | engine.printer.out("ln -s %s %s" % (capwarnings_fn,fn)) 108 | if not engine.dry_run: 109 | try: 110 | os.symlink(capwarnings_fn,fn) 111 | except OSError as e: 112 | try: 113 | engine.logger.log("%s %s" % (e.__class__.__name__,e)) 114 | engine.logger.log("try setting the link by hand") 115 | except AttributeError: 116 | engine.printer.out("%s %s" % (e.__class__.__name__,e)) 117 | engine.printer.out("try setting the link by hand") 118 | # no change of the configration file 119 | return False 120 | -------------------------------------------------------------------------------- /old/usr/local/bin/dwd-warnings: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # Erzeugen von Warnmeldungen 3 | # Copyright (C) 2021 Johanna Roedenbeck 4 | # licensed under the terms of the General Public License (GPL) v3 5 | 6 | import json 7 | import time 8 | import configobj 9 | 10 | config = configobj.ConfigObj("/etc/weewx/weewx.conf") 11 | 12 | if 'DeutscherWetterdienst' in config and 'warning' in config['DeutscherWetterdienst']: 13 | # Bundeslaender und Landkreise, fuer die Warndaten 14 | # bereitgestellt werden sollen, aus weewx.conf lesen 15 | if 'states' in config['DeutscherWetterdienst']['warning']: 16 | states = config['DeutscherWetterdienst']['warning']['states'] 17 | if not isinstance(states,list): 18 | states=[states] 19 | else: 20 | states=[] 21 | counties=config['DeutscherWetterdienst']['warning']['counties'] 22 | ICON_PTH=config['DeutscherWetterdienst']['warning']['icons'] 23 | try: 24 | target_path=config['WeatherServices']['path'] 25 | except LookupError: 26 | target_path=config['DeutscherWetterdienst']['path'] 27 | else: 28 | # test only 29 | # vom Benutzer anzupassen 30 | states=['Sachsen','Thüringen'] 31 | counties={ 32 | 'Kreis Mittelsachsen - Tiefland':'DL', 33 | 'Stadt Döbeln':'DL', 34 | 'Stadt Leipzig':'L', 35 | 'Stadt Jena':'J', 36 | 'Stadt Dresden':'DD'} 37 | ICON_PTH="../dwd/warn_icons_50x50" 38 | target_path='/etc/weewx/skins/Belchertown-de/dwd' 39 | 40 | # Der DWD verwendet ganz offensichtlich nicht die nach ISO genormten 41 | # Abkuerzungen fuer Bundeslaender. 42 | dwd_copy={ 43 | 'SN':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/sachsen/warnlage_sac_node.html', 44 | 'TH':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/thueringen/warnlage_thu_node.html', 45 | 'SA':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/sachen_anhalt/warnlage_saa_node.html', 46 | 'BB':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/berlin_brandenburg/warnlage_bb_node.html', 47 | 'MV':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/mecklenburg_vorpommern/warnlage_mv_node.html', 48 | 'NS':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/niedersachsen_bremen/warnlage_nds_node.html', 49 | 'HB':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/niedersachsen_bremen/warnlage_nds_node.html', 50 | 'HE':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/hessen/warnlage_hes_node.html', 51 | 'NRW':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/nordrhein_westfalen/warnlage_nrw_node.html', 52 | 'BY':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/bayern/warnlage_bay_node.html', 53 | 'SH':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/schleswig_holstein_hamburg/warnlage_shh_node.html', 54 | 'HH':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/schleswig_holstein_hamburg/warnlage_shh_node.html', 55 | 'RP':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/rheinland-pfalz_saarland/warnlage_rps_node.html', 56 | 'SL':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/rheinland-pfalz_saarland/warnlage_rps_node.html', 57 | 'BW':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/baden-wuerttemberg/warnlage_baw_node.html'} 58 | 59 | # Ende des vom Benutzer anzupassenden Bereichs 60 | 61 | dwd_level=( 62 | 'keine Warnung', # 0 no warning 63 | 'Vorinformation', # 1 preliminary info 64 | 'Wetterwarnung', # 2 minor 65 | 'markantes Wetter', # 3 moderate 66 | 'Unwetterwarnung', # 4 severe 67 | 'extremes Unwetter') # 5 extreme 68 | 69 | # Namensbestandteile der Warn-Icons 70 | dwd_warning_type=( 71 | 'gewitter', # 0 thunderstorm 72 | 'wind', # 1 wind/storm 73 | 'regen', # 2 rain 74 | 'schnee', # 3 snow 75 | 'nebel', # 4 fog 76 | 'frost', # 5 frost 77 | 'eis', # 6 ice 78 | 'tau', # 7 thawing 79 | 'hitze', # 8 heat 80 | 'uv') # 9 uv warning 81 | 82 | def dwd_warn_icon_file(type,level): 83 | if type==8 or type==9: 84 | return "warn_icons_%s.png" % dwd_warning_type[type] 85 | if level<2 or level>5: return None 86 | return "warn_icons_%s_%s.png" % (dwd_warning_type[type],level-1) 87 | 88 | def dwd_level_text(level): 89 | try: 90 | return dwd_level[level] 91 | except IndexError: 92 | if level==10: return 'Hitzewarnung' 93 | return None 94 | 95 | # read JSONP file and remove function call from it 96 | __x=None 97 | with open('%s/warnings.json' % target_path,encoding='utf-8') as file: 98 | __x=file.read().split('(',1) 99 | if __x[0]=="warnWetter.loadWarnings": 100 | __x=json.loads(__x[1][:-2]) 101 | #for __i in __x: 102 | # print(__i) 103 | 104 | if __x is not None: 105 | 106 | # initialize dict for all regions to create warnings for 107 | wwarn={counties[i]:[] for i in counties} 108 | #print("wwarn %s" % wwarn) 109 | 110 | if True: 111 | try: 112 | ts=__x['time'] 113 | except IndexError: 114 | ts=None 115 | try: 116 | wrn=__x['warnings'] 117 | except IndexError: 118 | wrn=None 119 | try: 120 | vrb=__x['vorabInformation'] 121 | except IndexError: 122 | vrb=None 123 | try: 124 | cpy=__x['copyright'] 125 | except IndexError: 126 | cpy=None 127 | 128 | region={} 129 | for __i in wrn: 130 | for __j in wrn[__i]: 131 | if __j['state'] in states: 132 | if __j['regionName'] not in region: 133 | region[__j['regionName']]=__j 134 | if __j['regionName'] in counties: 135 | if counties[__j['regionName']] not in wwarn: 136 | wwarn[counties[__j['regionName']]]=[] 137 | wwarn[counties[__j['regionName']]].append(__j) 138 | 139 | #for __i in region: 140 | # print("%s" % (__i,)) 141 | 142 | #for __i in wwarn: 143 | # print(__i) 144 | 145 | for __ww in wwarn: 146 | s="" 147 | r=None 148 | for idx,__i in enumerate(wwarn[__ww]): 149 | if r is None or r!=__i['regionName']: 150 | r=__i['regionName'] 151 | s+='

%s

\n' % r 152 | 153 | s+='\n' 154 | __icon_fn=dwd_warn_icon_file(__i['type'],__i['level']) 155 | if __icon_fn is not None: 156 | s+='\n' % (ICON_PTH,__icon_fn) 157 | wwarn[__ww][idx]["icon"] = "%s/%s" % (ICON_PTH,__icon_fn) 158 | __size=110 if int(__i['level'])>2 else 100 159 | s+='\n' % (s,time.strftime("%d.%m. %H:%M",time.localtime(__i['start']/1000)),time.strftime("%d.%m. %H:%M",time.localtime(__i['end']/1000))) 161 | s+='

%s

\n' % (__size,__i['headline']) 160 | s='%s

gültig vom %s bis %s

\n' 162 | 163 | if 'description' in __i and __i['description']: 164 | s+="

%s

\n" % __i['description'] 165 | if 'instruction' in __i and __i['instruction']: 166 | s+="

%s

\n" % __i['instruction'] 167 | 168 | wwarn[__ww][idx]["level_text"] = dwd_level_text(__i['level']) 169 | s+='

%s – %s  –  %s – %s

' % (__i['type'],__i['event'],__i['level'],dwd_level_text(__i['level'])) 170 | 171 | if s: 172 | s+='

Quelle: DWD

\n' % dwd_copy[wwarn[__ww][0]['stateShort']] 173 | else: 174 | s='

zur Zeit keine Warnungen

' 175 | 176 | #print("--> %s" % __ww) 177 | #print(s) 178 | 179 | with open("%s/warn-%s.inc" % (target_path,__ww),"w") as file: 180 | file.write(s) 181 | 182 | with open("%s/warn-%s.json" % (target_path,__ww),"w") as file: 183 | json.dump(wwarn[__ww],file,indent=4) 184 | 185 | -------------------------------------------------------------------------------- /bin/user/weatherservicesutil.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # Copyright (C) 2022, 2023, 2024 Johanna Roedenbeck 3 | 4 | """ 5 | 6 | This program is free software: you can redistribute it and/or modify 7 | it under the terms of the GNU General Public License as published by 8 | the Free Software Foundation, either version 3 of the License, or 9 | (at your option) any later version. 10 | 11 | This program is distributed in the hope that it will be useful, 12 | but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | GNU General Public License for more details. 15 | 16 | You should have received a copy of the GNU General Public License 17 | along with this program. If not, see . 18 | 19 | """ 20 | 21 | VERSION = "0.x" 22 | 23 | import threading 24 | import configobj 25 | import requests 26 | from requests.auth import AuthBase 27 | import csv 28 | import io 29 | import zipfile 30 | import time 31 | import datetime 32 | import json 33 | import random 34 | import traceback 35 | 36 | if __name__ == '__main__': 37 | import sys 38 | sys.path.append('/usr/share/weewx') 39 | 40 | import __main__ 41 | if __name__ == '__main__' or __main__.__file__.endswith('weatherservices.py'): 42 | 43 | def logdbg(x): 44 | print('DEBUG',x) 45 | def loginf(x): 46 | print('INFO',x) 47 | def logerr(x): 48 | print('ERROR',x) 49 | 50 | else: 51 | 52 | try: 53 | # Test for new-style weewx logging by trying to import weeutil.logger 54 | import weeutil.logger 55 | import logging 56 | log = logging.getLogger("user.DWD.base") 57 | 58 | def logdbg(msg): 59 | log.debug(msg) 60 | 61 | def loginf(msg): 62 | log.info(msg) 63 | 64 | def logerr(msg): 65 | log.error(msg) 66 | 67 | except ImportError: 68 | # Old-style weewx logging 69 | import syslog 70 | 71 | def logmsg(level, msg): 72 | syslog.syslog(level, 'user.DWD.base: %s' % msg) 73 | 74 | def logdbg(msg): 75 | logmsg(syslog.LOG_DEBUG, msg) 76 | 77 | def loginf(msg): 78 | logmsg(syslog.LOG_INFO, msg) 79 | 80 | def logerr(msg): 81 | logmsg(syslog.LOG_ERR, msg) 82 | 83 | import weewx 84 | from weewx.engine import StdService 85 | import weeutil.weeutil 86 | import weewx.units 87 | 88 | # week day names 89 | WEEKDAY_SHORT = { 90 | 'de':['Mo','Di','Mi','Do','Fr','Sa','So'], 91 | 'en':['Mon','Tue','Wed','Thu','Fri','Sat','Sun'], 92 | 'fr':['lu','ma','me','je','ve','sa','di'], 93 | 'it':['lun.','mar.','mer.','gio.','ven.','sab.','dom.'], 94 | 'cs':['Po','Út','St','Čt','Pá','So','Ne'], 95 | 'cz':['Po','Út','St','Čt','Pá','So','Ne'], 96 | 'pl':['pon.','wt.','śr.','czw.','pt.','sob.','niedz.'], 97 | 'nl':['Ma','Di','Wo','Do','Vr','Za','Zo'], 98 | } 99 | 100 | WEEKDAY_LONG = { 101 | 'de':['Montag','Dienstag','Mittwoch','Donnerstag','Freitag','Sonnabend','Sonntag'], 102 | 'en':['Monday','Tuesday','Wednesday','Thursday','Friday','Saturday','Sunday'], 103 | 'fr':['lundi','mardi','mercredi','jeudi','vendredi','samedi','dimanche'], 104 | 'it':['lunedì','martedì','mercoledì','giovedì','venerdì','sabato','domenica'], 105 | 'cs':['pondělí','úterý','středa','čtvrtek','pátek','sobota','neděle'], 106 | 'cz':['pondělí','úterý','středa','čtvrtek','pátek','sobota','neděle'], 107 | 'pl':['poniedziałek','wtorek','środa','czwartek','piątek','sobota','niedziela'], 108 | 'nl':['maansdag','dinsdag','woensdag','donderdag','vrijdag','zaterdag','zondag'], 109 | } 110 | 111 | HTTP_MONTH = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'] 112 | 113 | # Initialize default unit for the unit groups defined in this extension 114 | for _,ii in weewx.units.std_groups.items(): 115 | ii.setdefault('group_wmo_ww','byte') 116 | ii.setdefault('group_wmo_wawa','byte') 117 | ii.setdefault('group_wmo_W','byte') 118 | ii.setdefault('group_wmo_Wa','byte') 119 | ii.setdefault('group_coordinate','degree_compass') 120 | 121 | def http_timestamp_to_ts(s): 122 | """ convert HTTP time to Unix epoch timestamp 123 | 124 | example: Wed, 17 Jul 2024 12:13:14 GMT 125 | 126 | Args: 127 | s(str): timestamp string as found in HTTP headers 128 | 129 | Returns: 130 | int: timestamp or None in case of errors 131 | """ 132 | if not s: return None 133 | try: 134 | # split day of week 135 | x = s.split(',') 136 | # split the parts of date and time 137 | dt = x[1].strip().split(' ') 138 | # get the month number from abbreviated month name 139 | mon = HTTP_MONTH.index(dt[1])+1 140 | # hours since 0:0:0 UTC 141 | hr = dt[3].split(':') 142 | # return result 143 | return weeutil.weeutil.utc_to_ts( 144 | weeutil.weeutil.to_int(dt[2]), 145 | mon, 146 | weeutil.weeutil.to_int(dt[0]), 147 | weeutil.weeutil.to_int(hr[0])+weeutil.weeutil.to_int(hr[1])/60.0+weeutil.weeutil.to_int(hr[2])/3600.0 148 | ) 149 | except (ValueError,LookupError): 150 | return None 151 | 152 | def ts_to_http_timestamp(ts): 153 | """ convert Unix epoch timestamp to text as used in HTTP headers 154 | 155 | Args: 156 | ts(int): Unix epoch timestamp 157 | 158 | Returns: 159 | str: formatted timestamp 160 | """ 161 | x = time.gmtime(ts) 162 | return '%s, %02d %s %04d %02d:%02d:%02d GMT' % ( 163 | WEEKDAY_SHORT['en'][x.tm_wday], 164 | x.tm_mday,HTTP_MONTH[x.tm_mon-1],x.tm_year, 165 | x.tm_hour,x.tm_min,x.tm_sec) 166 | 167 | class KNMIAuth(AuthBase): 168 | def __init__(self, api_key): 169 | super(KNMIAuth,self).__init__() 170 | self.api_key = str(api_key) 171 | 172 | def __call__(self, r): 173 | r.headers['Authorization'] = self.api_key 174 | return r 175 | 176 | def wget_extended(url, log_success=False, log_failure=True, session=requests, if_modified_since=None, auth=None): 177 | """ download 178 | 179 | Args: 180 | url(str): URL to retrieve 181 | log_success(boolean): log in case of success or not 182 | log_failure(boolean): log in case of failure or not 183 | session(Session): http session 184 | if_modified_since(int): download only if newer than this timestamp 185 | 186 | Returns: 187 | tuple: Etag, Last-Modified, data received, status code 188 | """ 189 | elapsed = time.time() 190 | headers = {'User-Agent':'weewx-DWD'} 191 | if if_modified_since is not None: 192 | # add a If-Modified-Since header 193 | headers['If-Modified-Since'] = ts_to_http_timestamp(if_modified_since) 194 | try: 195 | reply = session.get(url, headers=headers, auth=auth, timeout=5) 196 | except requests.exceptions.Timeout: 197 | if log_failure: 198 | logerr('timeout downloading %s' % url) 199 | return (None,None,None,None) 200 | elapsed = time.time()-elapsed 201 | 202 | reply_url = reply.url.split('?')[0] 203 | 204 | if reply.status_code==200: 205 | # success 206 | if log_success: 207 | loginf('successfully downloaded %s in %.2f seconds' % (reply_url,elapsed)) 208 | return ( 209 | reply.headers.get('Etag'), 210 | http_timestamp_to_ts(reply.headers.get('Last-Modified')), 211 | reply.content, 212 | reply.status_code 213 | ) 214 | elif reply.status_code==304 and if_modified_since is not None: 215 | # not changed 216 | if log_success or log_failure: 217 | logdbg('skipped, %s was not changed since %s' % (reply_url,headers['If-Modified-Since'])) 218 | return ( 219 | reply.headers.get('Etag'), 220 | http_timestamp_to_ts(reply.headers.get('Last-Modified')), 221 | None, 222 | reply.status_code 223 | ) 224 | else: 225 | # failure 226 | if log_failure: 227 | logerr('error downloading %s: %s %s' % (reply_url,reply.status_code,reply.reason)) 228 | return (None,None,None,reply.status_code) 229 | 230 | def wget(url, log_success=False, log_failure=True, session=requests): 231 | """ download 232 | 233 | Args: 234 | url(str): URL to retrieve 235 | log_success(boolean): log in case of success or not 236 | log_failure(boolean): log in case of failure or not 237 | session(Session): http session 238 | 239 | Returns: 240 | bytes: data received or None in case of failure 241 | """ 242 | return wget_extended(url, log_success, log_failure, session)[2] 243 | 244 | class BaseThread(threading.Thread): 245 | 246 | def __init__(self, name, log_success=False, log_failure=True): 247 | super(BaseThread,self).__init__(name=name) 248 | self.log_success = log_success 249 | self.log_failure = log_failure 250 | self.log_sleeping = False 251 | self.evt = threading.Event() 252 | self.running = True 253 | self.query_interval = 300 254 | self.last_run_duration = 0 255 | 256 | def shutDown(self): 257 | """ request thread shutdown """ 258 | self.running = False 259 | loginf("thread '%s': shutdown requested" % self.name) 260 | self.evt.set() 261 | 262 | def get_data(self, ts): 263 | raise NotImplementedError 264 | 265 | def set_current_location(self, latitude, longitude): 266 | """ remember current location for mobile stations """ 267 | pass 268 | 269 | def getRecord(self): 270 | """ download and process data """ 271 | raise NotImplementedError 272 | 273 | def waiting_time(self): 274 | """ time to wait until the next data fetch """ 275 | return self.query_interval-time.time()%self.query_interval 276 | 277 | def random_time(self, waiting): 278 | """ do a little bit of load balancing 279 | 280 | let at least 10 seconds to ultimo to download and process 281 | data 282 | """ 283 | if waiting<=10: return 0.1-waiting 284 | w = waiting-10 285 | return -random.random()*(60 if w>60 else w)-10 286 | 287 | def run(self): 288 | """ thread loop """ 289 | loginf("thread '%s' starting" % self.name) 290 | try: 291 | while self.running: 292 | # time to to the next interval 293 | waiting = self.waiting_time() 294 | # do a little bit of load balancing 295 | waiting_r = self.random_time(waiting) 296 | waiting += waiting_r 297 | # wait 298 | if self.log_sleeping: 299 | loginf ("thread '%s': sleeping for %s seconds" % (self.name,waiting)) 300 | if waiting>0: 301 | if self.evt.wait(waiting): break 302 | # download and process data 303 | start_ts = time.thread_time_ns() 304 | self.getRecord() 305 | self.last_run_duration = (time.thread_time_ns()-start_ts)*1e-9 306 | if waiting_r<=0: 307 | if self.evt.wait(1-waiting_r): break 308 | except Exception as e: 309 | logerr("thread '%s': main loop %s - %s" % (self.name,e.__class__.__name__,e)) 310 | for ii in traceback.format_tb(e.__traceback__): 311 | for jj in ii.splitlines(): 312 | logerr("thread '%s': *** %s" % (self.name,jj.replace('\n',' ').strip())) 313 | finally: 314 | loginf("thread '%s' stopped" % self.name) 315 | 316 | def get_parameters(self, section_dict, replace_dict=dict()): 317 | parameters = dict() 318 | for i,j in section_dict.get('parameters',dict()).items(): 319 | if isinstance(j,list): 320 | k = ','.join([str(jj).replace(',','_') for jj in j]) 321 | else: 322 | k = j 323 | for to_replace, replace_by in replace_dict.items(): 324 | k = k.replace(to_replace, replace_by) 325 | k = k.replace('%','%25').replace("'",'%27').replace('/','%2F').replace(' ','%20').replace('<','%3C').replace('=','%3D').replace('>','%3E') 326 | parameters[i] = k 327 | return parameters 328 | 329 | 330 | if __name__ == '__main__': 331 | ts = http_timestamp_to_ts('Mon, 17 Jul 2024 12:13:14 GMT') 332 | print(ts) 333 | print(time.strftime('%Y-%m-%d %H:%M:%S %z',time.gmtime(ts))) 334 | url = 'https://opendata.dwd.de/weather/text_forecasts/html/VHDL50_DWLG_LATEST_html' 335 | reply = wget(url,True,True,if_modified_since=1721229741-10) 336 | print(reply[0],reply[1],time.strftime('%Y-%m-%d %H:%M:%S',time.gmtime(reply[1]))) 337 | print(reply[2].decode('iso8859-1')) 338 | -------------------------------------------------------------------------------- /bin/user/weatherservicesdb.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # Copyright (C) 2023, 2024 Johanna Roedenbeck 3 | 4 | """ 5 | 6 | This program is free software: you can redistribute it and/or modify 7 | it under the terms of the GNU General Public License as published by 8 | the Free Software Foundation, either version 3 of the License, or 9 | (at your option) any later version. 10 | 11 | This program is distributed in the hope that it will be useful, 12 | but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | GNU General Public License for more details. 15 | 16 | You should have received a copy of the GNU General Public License 17 | along with this program. If not, see . 18 | 19 | """ 20 | 21 | """ 22 | In order to display the readings provided by the weather services 23 | in diagrams or calculate aggregated values out of them, data have to 24 | saved into a database that WeeWX can process. For doing so, there 25 | are two things to consider: 26 | 27 | - Those data are subject to subsequent quality checks. So data 28 | already provided can change afterwards. 29 | 30 | - Additionally readings are provided with certain delay. 31 | 32 | - WeeWX requires all data to be of the same timestamp. 33 | 34 | For these reasons the readings are not saved by the methods of 35 | WeeWX but by the thread provided here. 36 | 37 | Unlike WeeWX this thread extends the database schema automatically 38 | if new observation types are seen. Please note, that WeeWX 39 | does not recognize them until restarted. 40 | 41 | Let this thread create the databases before referencing them 42 | in section `[DataBindings]` in weewx.conf. 43 | """ 44 | 45 | VERSION = "0.x" 46 | 47 | import threading 48 | import configobj 49 | import os.path 50 | import sqlite3 51 | import time 52 | 53 | if __name__ == '__main__': 54 | import sys 55 | sys.path.append('/usr/share/weewx') 56 | 57 | import __main__ 58 | if __name__ == '__main__' or __main__.__file__.endswith('weatherservices.py'): 59 | 60 | def logdbg(x): 61 | print('DEBUG',x) 62 | def loginf(x): 63 | print('INFO',x) 64 | def logerr(x): 65 | print('ERROR',x) 66 | 67 | else: 68 | 69 | import weeutil.logger 70 | import logging 71 | log = logging.getLogger("user.DWD.db") 72 | 73 | def logdbg(msg): 74 | log.debug(msg) 75 | 76 | def loginf(msg): 77 | log.info(msg) 78 | 79 | def logerr(msg): 80 | log.error(msg) 81 | 82 | import weewx 83 | import weeutil.weeutil 84 | import weewx.units 85 | 86 | # deal with differences between python 2 and python 3 87 | try: 88 | # Python 3 89 | import queue 90 | except ImportError: 91 | # Python 2 92 | # noinspection PyUnresolvedReferences 93 | import Queue as queue 94 | 95 | def sqlstr(x): 96 | if x is None: return 'NULL' 97 | if isinstance(x,str): return "'%s'" % x.replace("'","_") 98 | return str(x) 99 | 100 | class DatabaseThread(threading.Thread): 101 | 102 | def __init__(self, name, db_queue, db_pth, log_success, log_failure): 103 | """ create thread to save readings to a SQLITE database 104 | 105 | Args: 106 | name (str): thread name for logging 107 | db_queue (queue.Queue): queue to receive data to save 108 | db_pth (str): target directory 109 | log_success (boolean): log successful operation 110 | log_failure (boolean): log unsuccessful operation 111 | 112 | Returns: 113 | nothing 114 | """ 115 | super(DatabaseThread,self).__init__(name=name) 116 | self.db_queue = db_queue 117 | self.db_pth = db_pth 118 | self.log_success = log_success 119 | self.log_failure = log_failure 120 | self.evt = threading.Event() 121 | self.running = True 122 | self.databases = dict() 123 | self.filenames = dict() 124 | logdbg("thread '%s': database path: %s" % (self.name,self.db_pth)) 125 | 126 | def shutDown(self): 127 | """ request thread shutdown """ 128 | self.running = False 129 | loginf("thread '%s': shutdown requested" % self.name) 130 | self.evt.set() 131 | 132 | def run(self): 133 | """ thread loop """ 134 | loginf("thread '%s' starting" % self.name) 135 | try: 136 | while self.running: 137 | try: 138 | reply = self.db_queue.get(timeout=1.5) 139 | except queue.Empty: 140 | continue 141 | self.process_data(reply[0],reply[1],reply[2]) 142 | #self.evt.wait(waiting) 143 | except Exception as e: 144 | logerr("thread '%s': main loop %s - %s" % (self.name,e.__class__.__name__,e)) 145 | finally: 146 | self.close_db() 147 | loginf("thread '%s' stopped" % self.name) 148 | 149 | def db_name(self, datasource, data): 150 | """ get database name out of provider and interval """ 151 | if datasource.lower() in {'poi','cdc','zamg','openmeteo','met','radolanhg','radolanwn','radolanrv'}: 152 | # non-varying interval 153 | try: 154 | interval = weewx.units.convert(data[0].get('interval'),'minute')[0] 155 | except (TypeError,ValueError,LookupError,ArithmeticError) as e: 156 | if self.log_failure: 157 | logerr("thread '%s': error converting interval %s %s" % (self.name,e.__class__.__name__)) 158 | return None 159 | file = 'weatherservices-readings-%s-%s' % (self.name, interval) 160 | else: 161 | # varying interval 162 | file = 'weatherservices-readings-%s' % datasource 163 | return file 164 | 165 | def open_create_db(self, dbname): 166 | """ open or create the database """ 167 | if dbname not in self.databases: 168 | file = os.path.join(self.db_pth, '%s.sdb' % dbname) 169 | logdbg("thread '%s': database file %s" % (self.name,file)) 170 | try: 171 | self.databases[dbname] = sqlite3.connect(file) 172 | self.filenames[dbname] = file 173 | cur = self.databases[dbname].cursor() 174 | cur.execute('CREATE TABLE IF NOT EXISTS archive(`dateTime` INTEGER PRIMARY KEY NOT NULL, `usUnits` INTEGER NOT NULL, `interval` INTEGER)') 175 | except sqlite3.Error as e: 176 | if self.log_failure: 177 | logerr("thread '%s': could not open or create database %s %s - %s" % (self.name,file,e.__class__.__name__,e)) 178 | return self.databases.get(dbname) 179 | 180 | 181 | def close_db(self): 182 | """ close open databases """ 183 | for dbname in self.databases: 184 | try: 185 | self.databases[dbname].close() 186 | except sqlite3.Error as e: 187 | if self.log_failure: 188 | logerr("thread '%s': error closing database '%s' %s %s" % (self.name,self.filenames.get(dbname,'N/A'),e.__class__.__name__,e)) 189 | 190 | 191 | def check_and_add_columns(self, con, data, logtext): 192 | """ check if required columns exist """ 193 | required_columns = set() 194 | for el in data: 195 | required_columns.update([key for key in el if key not in {'dateTime','interval','usUnits'}]) 196 | logdbg("check_and_add_columns(): required_columns = %s" % required_columns) 197 | try: 198 | cur = con.cursor() 199 | res = cur.execute('SELECT * from archive') 200 | #reply = res.fetchone() 201 | present_columns = [key[0] for key in res.description] 202 | new_columns = set(required_columns).difference(present_columns) 203 | if not new_columns: 204 | # empty set --> no column to add, all columns present already 205 | logdbg("check_and_add_columns(): columns already present") 206 | return True 207 | for column in new_columns: 208 | val = data[-1][column] 209 | if isinstance(val,int): 210 | obstype = 'INT' 211 | elif isinstance(val,float): 212 | obstype = 'REAL' 213 | else: 214 | obstype = 'VARCHAR(255)' 215 | cur.execute('ALTER TABLE archive ADD COLUMN %s %s' % (column,obstype)) 216 | con.commit() 217 | if self.log_success: 218 | loginf("thread '%s', %s: successfully added columns %s to database" % (self.name,logtext,new_columns)) 219 | return True 220 | except sqlite3.Error as e: 221 | if self.log_failure: 222 | logerr("thread '%s', %s: error adding columns to database %s %s" % (self.name,logtext,e.__class__.__name__,e)) 223 | return False 224 | 225 | def update_data(self, con, data, logtext): 226 | """ insert or update 227 | 228 | Args: 229 | con(sqlite3.Connection): database connection 230 | data(list of dict): data to save 231 | 232 | Returns: 233 | nothing 234 | """ 235 | try: 236 | inserted = 0 237 | updated = 0 238 | cur = con.cursor() 239 | for el in data: 240 | if 'dateTime' in el: 241 | res = cur.execute('SELECT count(*) FROM archive WHERE `dateTime`=?',tuple((el['dateTime'],))) 242 | reply = res.fetchone() 243 | if reply and reply[0]: 244 | # There is a row for that timestamp in the database. 245 | colvals = ','.join(['`%s`=%s' % (key,sqlstr(val)) for key,val in el.items() if key not in {'dateTime','usUnits','interval',None}]) 246 | if not colvals: continue 247 | sql = 'UPDATE archive SET %s WHERE `dateTime`=%s' % (colvals,el['dateTime']) 248 | else: 249 | # There is no row for that timestamp in the database. 250 | cols = ','.join([key for key in el if key]) 251 | vals = ','.join([sqlstr(el[key]) for key in el if key]) 252 | sql = 'INSERT INTO archive (%s) VALUES (%s)' % (cols,vals) 253 | if __name__ == '__main__': 254 | logdbg(sql) 255 | try: 256 | cur.execute(sql) 257 | if sql.startswith('INSERT'): 258 | inserted += 1 259 | else: 260 | updated += 1 261 | except sqlite3.Error as e: 262 | if self.log_failure: 263 | logerr("thread '%s', %s: error executing %s %s - %s" % (self.name,logtext,sql,e.__class__.__name__,e)) 264 | con.commit() 265 | if self.log_success: 266 | loginf("thread '%s', %s: Added %s record%s and updated %s record%s" % (self.name,logtext,inserted,'' if inserted==1 else 's',updated,'' if updated==1 else 's')) 267 | except sqlite3.Error as e: 268 | if self.log_failure: 269 | logerr("thread '%s', %s: error updating data %s - %s" % (self.name,logtext,e.__class__.__name__,e)) 270 | 271 | def process_data(self, datasource, prefix, data): 272 | """ process data 273 | 274 | Args: 275 | datasource(str): product name like CDC, POI, Radolan etc. 276 | prefix(str): observation type prefix 277 | data(list of dict of ValueTuple): data to convert 278 | 279 | Returns: 280 | nothing 281 | """ 282 | dbname = self.db_name(datasource, data) 283 | if not dbname: return 284 | con = self.open_create_db(dbname) 285 | x = self.convert(prefix, data) 286 | if prefix: 287 | logtxt = "prefix '%s'" % prefix 288 | elif datasource.startswith('Radolan'): 289 | logtxt = "product '%s'" % datasource[7:] 290 | else: 291 | logtxt = datasource 292 | if con and self.check_and_add_columns(con, x, logtxt): 293 | self.update_data(con, x, logtxt) 294 | 295 | def convert(self, prefix, data): 296 | """ convert data to the appropriate units and add prefix to the keys 297 | 298 | Args: 299 | prefix(str): observation type prefix 300 | data(list of dict of ValueTuple): data to convert 301 | 302 | Returns: 303 | list of dict: converted data 304 | """ 305 | new_data = [] 306 | for el in data: 307 | x = {'usUnits':weewx.METRIC} 308 | for key, val in el.items(): 309 | try: 310 | if key!='usUnits': 311 | new_val = weewx.units.convertStd(val,weewx.METRIC)[0] 312 | if key in {'dateTime','interval'}: 313 | new_key = key 314 | new_val = weeutil.weeutil.to_int(new_val) 315 | else: 316 | if prefix: 317 | new_key = prefix+key[0].upper()+key[1:] 318 | else: 319 | new_key = key 320 | if val[1] and val[2]: 321 | new_val = weeutil.weeutil.to_float(new_val) 322 | x[new_key] = new_val 323 | except (AttributeError,TypeError,ValueError,LookupError) as e: 324 | if self.log_failure: 325 | logerr("thread '%s': error converting %s %s %s - %s" % (self.name,prefix,key,e.__class__.__name__,e)) 326 | new_data.append(x) 327 | logdbg('convert(): %s' % new_data) 328 | return new_data 329 | 330 | 331 | def databasecreatethread(name, config_dict): 332 | """ create database thread 333 | 334 | Args: 335 | name (str): thread name 336 | config_dict (configobj.ConfigObj): configuration dict 337 | 338 | Returns: 339 | queue.Queue: queue reference to use with databaseput() 340 | threading.Thread: thread reference 341 | """ 342 | weewx_path = config_dict.get('WEEWX_ROOT') 343 | sqlite_path = config_dict.get('DatabaseTypes',configobj.ConfigObj()).get('SQLite',configobj.ConfigObj()).get('SQLITE_ROOT','.') 344 | if weewx_path: 345 | sqlite_path = os.path.join(weewx_path,sqlite_path) 346 | site_dict = weeutil.config.accumulateLeaves(config_dict.get('WeatherServices',configobj.ConfigObj()).get('current',configobj.ConfigObj())) 347 | log_success = weeutil.weeutil.to_bool(site_dict.get('log_success',True)) 348 | log_failure = weeutil.weeutil.to_bool(site_dict.get('log_failure',True)) 349 | save = weeutil.weeutil.to_bool(site_dict.get('save',True)) 350 | if save: 351 | q = queue.Queue(10) 352 | db = DatabaseThread(name,q,sqlite_path,log_success,log_failure) 353 | db.start() 354 | else: 355 | q = None 356 | db = None 357 | return q, db 358 | 359 | 360 | def databaseput(q, datasource, prefix, data): 361 | """ queue new data for saving 362 | 363 | Args: 364 | q (queue.Queue): queue to put data in 365 | prefix (str): column name prefix 366 | data (list of dict): data to save 367 | 368 | Returns: 369 | True in case of success, False otherwise 370 | """ 371 | if q: 372 | try: 373 | # data has to be a list of dicts 374 | data[0].get('dateTime') 375 | # append the new item to the queue 376 | q.put((datasource, prefix, data)) 377 | return True 378 | except queue.Full: 379 | # should not happen as long as the thread is alive 380 | pass 381 | except (AttributeError,TypeError,LookupError): 382 | # one or more parameters are not appropriate 383 | pass 384 | return False 385 | 386 | 387 | if __name__ == '__main__': 388 | 389 | q, db = databasecreatethread('DWD-dbtest',configobj.ConfigObj()) 390 | 391 | try: 392 | x = [] 393 | while True: 394 | x.append({ 395 | 'dateTime':(time.time(),'unix_epoch','group_time'), 396 | 'interval':(10,'minute','group_interval'), 397 | 'outTemp':(28.5,'degree_C','group_temperature'), 398 | 'outHumidity':(20.1,'degree_C','group_temperature'), 399 | }) 400 | if len(x)>3: 401 | del(x[0]) 402 | databaseput(q,'CDC','xx',x) 403 | print('---') 404 | time.sleep(5) 405 | except Exception as e: 406 | print('**MAIN**',e) 407 | except KeyboardInterrupt: 408 | print() 409 | print('**MAIN** CTRL-C pressed') 410 | 411 | db.shutDown() 412 | time.sleep(5) 413 | -------------------------------------------------------------------------------- /old/usr/local/bin/bbk-warnings: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # warnings of public dangers 3 | # Copyright (C) 2022 Johanna Roedenbeck 4 | # licensed under the terms of the General Public License (GPL) v3 5 | 6 | from __future__ import absolute_import 7 | from __future__ import print_function 8 | from __future__ import with_statement 9 | 10 | """ 11 | This script is free software: you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation, either version 3 of the License, or 14 | (at your option) any later version. 15 | 16 | This script is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | """ 21 | 22 | 23 | # Source: https://nina.api.bund.dev 24 | # Protocol description: http://docs.oasis-open.org/emergency/cap/v1.2/CAP-v1.2-os.pdf 25 | 26 | DEFAULT_BBK_URL = "https://warnung.bund.de/api31" 27 | 28 | import time 29 | import datetime 30 | import json 31 | import requests 32 | import configobj 33 | 34 | if __name__ == "__main__": 35 | import optparse 36 | import sys 37 | def loginf(x): 38 | print(x, file=sys.stderr) 39 | def logerr(x): 40 | print(x, file=sys.stderr) 41 | 42 | """ 43 | # not used here 44 | # from dwd-cap-warnings 45 | AGS_STATES = { 46 | '01':('SH','Schleswig-Holstein'), 47 | '02':('HH','Freie und Hansestadt Hamburg'), 48 | '03':('NS','Niedersachsen'), 49 | '04':('HB','Freie Hansestadt Bremen'), 50 | '05':('NRW','Nordrhein-Westfalen'), 51 | '06':('HE','Hessen'), 52 | '07':('RP','Rheinland-Pfalz'), 53 | '08':('BW','Baden-Württemberg'), 54 | '09':('BY','Freistaat Bayern'), 55 | '10':('SL','Saarland'), 56 | '11':('BB','Berlin'), 57 | '12':('BB','Brandenburg'), 58 | '13':('MV','Mecklenburg-Vorpommern'), 59 | '14':('SN','Freistaat Sachsen'), 60 | '15':('SA','Sachsen-Anhalt'), 61 | '16':('TH','Thüringen')} 62 | """ 63 | 64 | WARNING_SOURCES = ( 65 | 'katwarn', 66 | 'biwapp', 67 | 'mowas', 68 | 'dwd', 69 | 'lhp', 70 | 'police') 71 | 72 | CAP_SEVERITY = { 73 | 'Minor':2, 74 | 'Moderate':3, 75 | 'Severe':4, 76 | 'Extreme':5} 77 | 78 | bbk_level = ( 79 | 'keine Warnung', # 0 no warning 80 | 'Vorinformation', # 1 preliminary info 81 | 'leicht', # 2 minor 82 | 'mittel', # 3 moderate 83 | 'schwer', # 4 severe 84 | 'extrem') # 5 extreme 85 | dwd_level=( 86 | 'keine Warnung', # 0 no warning 87 | 'Vorinformation', # 1 preliminary info 88 | 'Wetterwarnung', # 2 minor 89 | 'markantes Wetter', # 3 moderate 90 | 'Unwetterwarnung', # 4 severe 91 | 'extremes Unwetter') # 5 extreme 92 | 93 | def bbk_level_text(level, isdwd=False): 94 | try: 95 | if isdwd: return dwd_level[level] 96 | return bbk_level[level] 97 | except IndexError: 98 | if level==10: return 'Hitzewarnung' 99 | return None 100 | 101 | def get_eventtype(evt, ii): 102 | return "*" 103 | 104 | CAP_CATEGORY = { 105 | 'Geo':{'de':'geophysikalisch','en':'geophysical'}, 106 | 'Met':{'de':'meteorologisch','en':'meteorological'}, 107 | 'Safety':{'de':'allgemeine Gefahren und öffentliche Sicherheit', 108 | 'en':'general emergency and public safety'}, 109 | 'Security':{'de':'Gesetzesdurchsetzung, militärische, regionale und lokale/private Sicherheit', 110 | 'en':'law enforcement, miltary, homeland and local/private security'}, 111 | 'Rescue':{'de':'Feuerbekämpfung und Sicherheit', 112 | 'en':'fire suppression and rescue'}, 113 | 'Health':{'de':'Medizin und öffentliche Gesundheit', 114 | 'en':'medical and public health'}, 115 | 'Env':{'de':'Umweltverschmutzung und andere Umweltgefahren', 116 | 'en':'pollution and other environmental'}, 117 | 'Transport':{'de':'öffentlicher und privater Verkehr', 118 | 'en':'public and private transportation'}, 119 | 'Infra':{'de':'Infrastruktur', # z.B. Telekommunikation 120 | 'en':'utility, telecommunication, other non-transport infrastructure'}, 121 | 'CBRNE':{'de':'chemische, biologische, radioaktive, nukleare oder explosive Bedrohung oder Attacke', 122 | 'en':'chemical, biological, radiological, nuclear or high-yield exlosive threat or attack'}, 123 | 'Other':{'de':'andere Ereignisse','en':'other events'} 124 | } 125 | 126 | def get_category_name(category, lang='de'): 127 | try: 128 | print(category) 129 | return CAP_CATEGORY[category][lang.lower()] 130 | except LookupError: 131 | pass 132 | try: 133 | return CAP_CATEGORY[category]['en'] 134 | except LookupError: 135 | pass 136 | if lang.lower()=='de': return 'unbekannt' 137 | return 'unknown' 138 | 139 | 140 | def tobool(x): 141 | """ convert text to boolean 142 | Copyright (C) Tom Keffer 143 | """ 144 | try: 145 | if x.lower() in ['true', 'yes', 'y']: 146 | return True 147 | elif x.lower() in ['false', 'no', 'n']: 148 | return False 149 | except AttributeError: 150 | pass 151 | try: 152 | return bool(int(x)) 153 | except (ValueError, TypeError): 154 | pass 155 | raise ValueError("Unknown boolean specifier: '%s'." % x) 156 | 157 | 158 | class BbkWarnings(object): 159 | 160 | def __init__(self, config_dict, verbose=False): 161 | 162 | base_dict = config_dict.get('DeutscherWetterdienst',dict()) 163 | # target path 164 | self.target_path = base_dict.get('path','.') 165 | # warning config data 166 | warn_dict = base_dict.get('BBK',dict()) 167 | self.filter_area = warn_dict.get('counties',dict()) 168 | self.icon_pth = warn_dict.get('icons','images') 169 | self.bbk_url = DEFAULT_BBK_URL 170 | # warn icons 171 | self.icon_pth = warn_dict.get('icons',self.bbk_url+'/appdata/gsb/eventCodes') 172 | self.logo_pth = warn_dict.get('logos',self.bbk_url+'/appdata/gsb/logos') 173 | # logging 174 | self.verbose = verbose 175 | self.log_success = tobool(warn_dict.get('log_success',base_dict.get('log_success',config_dict.get('log_success',False)))) 176 | self.log_failure = tobool(warn_dict.get('log_failure',base_dict.get('log_failure',config_dict.get('log_failure',False)))) 177 | if int(config_dict.get('debug',0))>0 or verbose: 178 | self.log_success = True 179 | self.log_failure = True 180 | self.verbose = True 181 | if __name__ == "__main__" and verbose: 182 | print('-- configuration data ----------------------------------') 183 | print('verbose: ',self.verbose) 184 | print('log success: ',self.log_success) 185 | print('log failure: ',self.log_failure) 186 | print('url: ',self.bbk_url) 187 | print('filter area: ',self.filter_area) 188 | print('target path: ',self.target_path) 189 | print('--------------------------------------------------------') 190 | 191 | 192 | @staticmethod 193 | def _mktime(timestring): 194 | """ convert CAP timestamp string to epoch time """ 195 | if not timestring: return None 196 | ti = datetime.datetime.strptime(timestring,'%Y-%m-%dT%H:%M:%S%z') 197 | #print(ti) 198 | return int(ti.timestamp()*1000) 199 | 200 | 201 | @staticmethod 202 | def compareARS(ars, pars): 203 | """ Is ars in pars? """ 204 | if not pars: return True 205 | # remove '0' at the end of the string 206 | ars_str = ars.strip().rstrip('0') 207 | ars_len = len(ars_str) 208 | #print('QQQQQQQQQQ','ARS str',ars_str,'len',ars_len) 209 | # '000000000000' means 'whole country' 210 | if ars_len==0: return True 211 | # pars may be a list of ARS 212 | for ii in pars.split(','): 213 | # remove '0' at the end of the string 214 | vgl_str = ii.strip().rstrip('0') 215 | vgl_len = len(vgl_str) 216 | #print('QQQQQQQQQQ','ARS',ars_str,ars_len,'VGL',vgl_str,vgl_len,':',ars[0:min(vgl_len,ars_len)],'==',vgl_str[0:min(vgl_len,ars_len)]) 217 | # 218 | if vgl_len==0: return True 219 | # 220 | if ars[0:min(vgl_len,ars_len)]==vgl_str[0:min(vgl_len,ars_len)]: 221 | return True 222 | return False 223 | 224 | 225 | def wget(self, url): 226 | """ download from BBK """ 227 | headers={'User-Agent':'weewx-DWD'} 228 | reply = requests.get(url,headers=headers) 229 | 230 | if reply.status_code==200: 231 | if self.log_success or self.verbose: 232 | loginf('successfully downloaded %s' % reply.url) 233 | return json.loads(reply.content) 234 | else: 235 | if self.log_failure or self.verbose: 236 | loginf('error downloading %s: %s %s' % (reply.url,reply.status_code,reply.reason)) 237 | return None 238 | 239 | def get_logos(self): 240 | """ get the list of sender logos """ 241 | url = self.bbk_url + '/appdata/gsb/logos/logos.json' 242 | logos = self.wget(url) 243 | return {logo['senderId']:logo for logo in logos['logos']} 244 | 245 | 246 | def get_eventcodes(self): 247 | """ get the list of event codes """ 248 | url = self.bbk_url + '/appdata/gsb/eventCodes/eventCodes.json' 249 | evcs = self.wget(url) 250 | return {evc['eventCode']:evc['imageUrl'] for evc in evcs['eventCodes']} 251 | 252 | 253 | def get_list(self, ars): 254 | """ get list of active warnings for county ars """ 255 | if ars in WARNING_SOURCES: 256 | url = self.bbk_url + '/' + ars + '/mapData.json' 257 | else: 258 | url = self.bbk_url + '/dashboard/' + str(ars)[0:5] + '0000000.json' 259 | return self.wget(url) 260 | 261 | 262 | def get_warning(self, id): 263 | """ get warning data of warning id """ 264 | url = self.bbk_url + '/warnings/' + id +'.json' 265 | return self.wget(url) 266 | 267 | 268 | def get_warnings(self, location=None, include_dwd=False, lang='de'): 269 | 270 | # initialize dict for all regions to collect warnings for 271 | if location: 272 | try: 273 | wwarn = {i:[] for i in location} 274 | arss = {i:i for i in location} 275 | except LookupError: 276 | wwarn = {location:[]} 277 | arss = {location:location} 278 | else: 279 | wwarn = {self.filter_area[i]:[] for i in self.filter_area} 280 | arss = self.filter_area 281 | 282 | evcode = self.get_eventcodes() 283 | logos = self.get_logos() 284 | 285 | # remember downloaded warnings 286 | alerts = dict() 287 | 288 | for ars in arss: 289 | 290 | if self.verbose and __name__ == "__main__": 291 | print("++ dashboard data ++++++++++++++++++++++++++++++++++++++") 292 | 293 | warns = self.get_list(ars) 294 | 295 | if self.verbose: 296 | loginf("Regionalschlüssel ARS %s, %s Einträge" % (ars,len(warns))) 297 | 298 | if warns: 299 | 300 | for warn in warns: 301 | 302 | if self.verbose: 303 | loginf("Warn ID: %s" % warn.get('id')) 304 | loginf(warn) 305 | 306 | if warn['id'] in alerts: 307 | # this warning is already downloaded 308 | alert = alerts[warn['id']] 309 | if self.log_success or self.verbose: 310 | loginf('reuse warning %s' % warn['id']) 311 | else: 312 | # download warning 313 | alert = self.get_warning(warn['id']) 314 | alerts[warn['id']] = alert 315 | 316 | if 'sender' in alert: 317 | logo = logos.get(alert['sender']) 318 | if logo: 319 | if 'image' in logo: 320 | alert['sender_logo'] = self.logo_pth+'/'+logo.get('image') 321 | alert['sender_name'] = logo.get('name') 322 | for info_dict in alert.get('info',[]): 323 | if info_dict.get('language','')[0:2].lower()==lang: 324 | for jj in info_dict.get('area',dict()): 325 | alert['areas'] = jj.get('areaDesc') 326 | alert['event'] = info_dict['event'] 327 | alert['headline'] = info_dict['headline'] 328 | alert['description'] = info_dict['description'] 329 | alert['urgency'] = info_dict['urgency'] 330 | alert['severity'] = info_dict['severity'] 331 | alert['category'] = info_dict['category'] 332 | alert['categoryName'] = [get_category_name(ii) for ii in info_dict['category']] 333 | alert['certainty'] = info_dict['certainty'] 334 | if 'responseType' in info_dict: 335 | alert['responseType'] = info_dict['responseType'] 336 | # 337 | for ii in info_dict.get('eventcode',[]): 338 | vn = None 339 | vl = None 340 | for jj in ii: 341 | if jj.lower()=='valuename': 342 | vn = ii[jj] 343 | elif jj.lower()=='value': 344 | vl = ii[jj] 345 | if vn and vl is not None: 346 | alert['eventCode-'+str(vn)] = vl 347 | # 348 | alert['parameter'] = dict() 349 | for ii in info_dict.get('parameter',[]): 350 | vn = None 351 | vl = None 352 | for jj in ii: 353 | if jj.lower()=='valuename': 354 | vn = ii[jj] 355 | elif jj.lower()=='value': 356 | vl = ii[jj] 357 | try: 358 | alert['parameter'][vn] = vl 359 | except LookupError: 360 | pass 361 | # severity level 362 | if info_dict.get('event','')[:16]=='VORABINFORMATION': 363 | alert['level'] = 1 364 | else: 365 | alert['level'] = CAP_SEVERITY.get(info_dict.get('severity'),0) 366 | alert["level_text"] = bbk_level_text(alert['level'],alert.get('sender','')=="opendata@dwd.de") 367 | # event type 368 | alert['type'] = get_eventtype( 369 | info_dict.get('event'), 370 | alert.get('eventCode-II')) 371 | del alert['info'] 372 | # release time 373 | alert['released'] = BbkWarnings._mktime(warn.get('effective',warn.get('sent'))) 374 | # start time 375 | alert['start'] = BbkWarnings._mktime(warn.get('onset',warn.get('sent'))) 376 | # end time 377 | alert['end'] = BbkWarnings._mktime(warn.get('expires')) 378 | # 379 | for ii in alert.get('code',[]): 380 | if ii=='SILENT_UPDATE': 381 | alert['SILENT_UPDATE'] = True 382 | if ii=='PARTIAL_CLEAR': 383 | alert['PARTIAL_CLEAR'] = True 384 | if ii[:3]=='id:': 385 | alert['msgid'] = ii 386 | # warn icon 387 | evc = warn.get('payload',dict()).get('data',dict()).get('transKeys',dict()).get('event') 388 | if evc: 389 | alert['icon'] = self.icon_pth+'/'+evcode.get(evc,'unknown.png') 390 | else: 391 | alert['icon'] = self.icon_pth+'/'+'bbkicon.png' 392 | # "opendata@dwd.de" 393 | if alert.get('sender','')!="opendata@dwd.de" or include_dwd: 394 | pars = alert.get('parameter',dict()).get('warnVerwaltungsbereiche','') 395 | if BbkWarnings.compareARS(ars,pars): 396 | wwarn[arss[ars]].append(alert) 397 | 398 | if self.verbose and __name__ == "__main__": 399 | print("--") 400 | return wwarn 401 | 402 | 403 | def write_html(self, wwarn, dryrun): 404 | for __ww in wwarn: 405 | s = "" 406 | r = None 407 | for idx,val in enumerate(wwarn[__ww]): 408 | _region = val['areas'] 409 | val['regionName'] = _region 410 | if r is None or r!=_region: 411 | r = _region 412 | s+='

%s

\n' % r 413 | 414 | # alert message 415 | s+='\n' 416 | if val.get('icon'): 417 | s+='\n' % (val['icon'],val['event']) 418 | __size=110 if int(val['level'])>2 else 100 419 | s+='\n' % (s,time.strftime("%d.%m. %H:%M",time.localtime(val['start']/1000)),time.strftime("%d.%m. %H:%M",time.localtime(val['end']/1000))) 422 | elif val['start']: 423 | s='%s

gültig seit %s

\n' % (s,time.strftime("%d.%m. %H:%M",time.localtime(val['start']/1000))) 424 | elif val['end']: 425 | s='%s

gültig bis %s

\n' % (s,time.strftime("%d.%m. %H:%M",time.localtime(val['end']/1000))) 426 | s+='
%s

%s

\n' % (__size,val['headline']) 420 | if val['start'] and val['end']: 421 | s='%s

gültig vom %s bis %s

\n' 427 | 428 | if val.get('description'): 429 | s+="

%s

\n" % val['description'] 430 | if val.get('instruction'): 431 | s+="

%s

\n" % val['instruction'] 432 | if val.get('sender'): 433 | if 'sender_name' in val: 434 | sn = val['sender_name']+' ('+val['sender']+')' 435 | else: 436 | sn = val['sender'] 437 | if val.get('sender_logo',''): 438 | lg = '%s ' % (val['sender_logo'],val['sender']) 439 | else: 440 | lg = "" 441 | s+='

Quelle: %s%s

' % (lg,sn) 442 | 443 | s+='

%s – %s  –  %s – %s  –  %s – %s  –  %s

' % (val.get('type',''),val.get('event',''),val.get('level',''),val.get('level_text',''),val.get('category',''),val.get('categoryName',''),val.get('identifier','')) 444 | 445 | if s: 446 | s += '

Herausgegeben vom BBK | Abgerufen am %s

\n' % time.strftime('%d.%m.%Y %H:%M') 447 | else: 448 | s='

zur Zeit keine Warnungen

' 449 | 450 | if dryrun: 451 | print("########################################") 452 | print("-- HTML -- bbk-%s.inc ------------------------------"%__ww) 453 | print(s) 454 | print("-- JSON -- bbk-%s.json -----------------------------"%__ww) 455 | print(json.dumps(wwarn[__ww],indent=4,ensure_ascii=False)) 456 | else: 457 | with open("%s/bbk-%s.inc" % (self.target_path,__ww),"w") as file: 458 | file.write(s) 459 | with open("%s/bbk-%s.json" % (self.target_path,__ww),"w") as file: 460 | json.dump(wwarn[__ww],file,indent=4) 461 | 462 | if __name__ == "__main__": 463 | 464 | usage = """Usage: %prog [options] [ARS] 465 | 466 | If no ARS is specified, ARSs are read from config. 467 | """ 468 | 469 | epilog = None 470 | 471 | # Create a command line parser: 472 | parser = optparse.OptionParser(usage=usage, epilog=epilog) 473 | 474 | # options 475 | parser.add_option("--config", dest="config_path", type=str, 476 | metavar="CONFIG_FILE", 477 | default=None, 478 | help="Use configuration file CONFIG_FILE.") 479 | parser.add_option("--weewx", action="store_true", 480 | help="Read config from weewx.conf.") 481 | parser.add_option("--lang", dest="lang", type=str, 482 | metavar="ISO639", 483 | default='de', 484 | help="Alert language. Default 'de'") 485 | 486 | group = optparse.OptionGroup(parser,"Output and logging options") 487 | group.add_option("--dry-run", action="store_true", 488 | default=False, 489 | help="Print what would happen but do not do it. Default is False") 490 | group.add_option("-v","--verbose", action="store_true", 491 | default=False, 492 | help="Verbose output") 493 | group.add_option("--include-dwd", action="store_true", 494 | default=False, 495 | help="include DWD warnings. Default is False. Use dwd-cap-warnings to get DWD warnings.") 496 | parser.add_option_group(group) 497 | 498 | # commands 499 | group = optparse.OptionGroup(parser,"Commands") 500 | group.add_option("--list-logos", action="store_true", 501 | help="list logos") 502 | group.add_option("--list-eventcodes", action="store_true", 503 | help="list event codes") 504 | parser.add_option_group(group) 505 | 506 | (options, args) = parser.parse_args() 507 | 508 | if options.weewx: 509 | config_path = "/etc/weewx/weewx.conf" 510 | else: 511 | config_path = options.config_path 512 | 513 | if config_path: 514 | print("Using configuration file %s" % config_path) 515 | config = configobj.ConfigObj(config_path) 516 | else: 517 | # test only 518 | print("Using test configuration") 519 | # vom Benutzer anzupassen 520 | config = {'DeutscherWetterdienst':{'BBK':{'counties':{'146280000000':'XX'}}}} 521 | config = {'DeutscherWetterdienst':{'BBK':{'counties':{'083110000000':'XX'}}}} 522 | 523 | if len(args)==0: args = None 524 | 525 | bbk = BbkWarnings(config,verbose=options.verbose) 526 | """ 527 | warns = bbk.get_list('146280000000') 528 | for warn in warns: 529 | print(warn['id']) 530 | print(bbk.get_warning(warn['id'])) 531 | break 532 | """ 533 | if options.list_logos: 534 | x = bbk.get_logos() 535 | print(json.dumps(x,indent=4,ensure_ascii=False)) 536 | elif options.list_eventcodes: 537 | x = bbk.get_eventcodes() 538 | print(json.dumps(x,indent=4,ensure_ascii=False)) 539 | else: 540 | wwarn = bbk.get_warnings(location=args,include_dwd=options.include_dwd,lang=options.lang) 541 | bbk.write_html(wwarn,options.dry_run) 542 | -------------------------------------------------------------------------------- /bin/user/wildfire.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # Copyright (C) 2023 Johanna Roedenbeck 3 | 4 | """ 5 | 6 | This program is free software: you can redistribute it and/or modify 7 | it under the terms of the GNU General Public License as published by 8 | the Free Software Foundation, either version 3 of the License, or 9 | (at your option) any later version. 10 | 11 | This program is distributed in the hope that it will be useful, 12 | but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | GNU General Public License for more details. 15 | 16 | You should have received a copy of the GNU General Public License 17 | along with this program. If not, see . 18 | 19 | """ 20 | 21 | """ 22 | The german forest administrations issue a wildfire danger level 23 | (Waldbrandgefahrenstufe), based on an index (Waldbrandgefahrenindex) 24 | calculated by the German Weather Service (DWD) and on the forest 25 | characteristics (Waldbrandgefahrenklasse). They issue it once a day. 26 | 27 | There are 5 levels: 28 | 1 - very low danger 29 | 2 - low danger 30 | 3 - moderate danger 31 | 4 - high danger 32 | 5 - extremly high danger 33 | 34 | Along with those levels, certain restrictions to forest access 35 | apply. 36 | 37 | Eastern german forest administrations use an icon showing a squirrel 38 | to represent information about wildfire protection. This 'wildfire 39 | squirrel' is used in different colors here to display the wildfire 40 | danger level in effect. 41 | 42 | Configuration keys: 43 | 44 | * `enable` - enable or disable, optional, default True 45 | * `provider` - data provider, actually `Sachsenforst` only, 46 | mandatory 47 | * `server_url` - URL to fetch data from, mandatory 48 | * `area` - area code, mandatory 49 | * `api_key` - API key, mandatory 50 | * `fetch_time` - time of day, when data is issued by the provider, 51 | mandatory; in UTC, if value ends with `UTC` otherwise 52 | in local time (example "03:00 UTC", "06:30") 53 | If data is not available at that time, retries 54 | take place every 5 minutes. 55 | * `file` - unique file name part to use when creating files, 56 | mandatory 57 | * `log_sleeping` - log remaining sleeping time before going to sleep, 58 | optional, default False 59 | 60 | derived from higher levels of the configuration or set here: 61 | 62 | * `log_success` - log successful operation, optional, default False 63 | * `log_failure` - log failed operation, optional, default True 64 | * `path` - path to write files to, mandatory 65 | 66 | Example configuration: 67 | 68 | ``` 69 | log_success = True 70 | log_failure = True 71 | ... 72 | [WeatherServices] 73 | path = '/etc/weewx/skins/Belchertown/dwd' 74 | ... 75 | [[forecast]] 76 | ... 77 | [[[AREA10]]] 78 | provider = WBSProvider 79 | server_url = 'https://wbs.example.com/wbs.php' 80 | area = 10 81 | api_key = 'asdfghjkl1234567' 82 | fetch_time = 04:30 UTC 83 | file = XX 84 | log_sleeping = True 85 | ``` 86 | 87 | Actually Saxony is supported only. 88 | 89 | Please note, you need a contract (free of charge) to display the 90 | wildfire danger level on your website. Contact the respective 91 | forest administration for more information. 92 | 93 | If you want to add another provider, define a new class based on 94 | class WildfireThread, defining provider_name(), provider_url(), 95 | get_url(), and process_data() functions. Then append a reference 96 | to that class to providers_dict. 97 | 98 | https://www.dwd.de/DE/leistungen/waldbrandgef/waldbrandgef.html 99 | """ 100 | 101 | VERSION = "0.x" 102 | 103 | import threading 104 | import configobj 105 | import requests 106 | import datetime 107 | import json 108 | import random 109 | import time 110 | import os 111 | import os.path 112 | 113 | if __name__ == '__main__': 114 | 115 | import sys 116 | import __main__ 117 | sys.path.append('/usr/share/weewx') 118 | x = os.path.dirname(os.path.abspath(os.path.dirname(__main__.__file__))) 119 | if x not in sys.path: 120 | sys.path.append(x) 121 | 122 | def logdbg(x): 123 | print('DEBUG',x) 124 | #def loginf(x): 125 | # print('INFO',x) 126 | #def logerr(x): 127 | # print('ERROR',x) 128 | 129 | import weeutil.logger 130 | weeutil.logger.setup('wildfire',dict()) 131 | import logging 132 | log = logging.getLogger("user.DWD.wildfire") 133 | def loginf(msg): 134 | log.info(msg) 135 | def logerr(msg): 136 | log.error(msg) 137 | 138 | else: 139 | 140 | import weeutil.logger 141 | import logging 142 | log = logging.getLogger("user.DWD.wildfire") 143 | 144 | def logdbg(msg): 145 | log.debug(msg) 146 | 147 | def loginf(msg): 148 | log.info(msg) 149 | 150 | def logerr(msg): 151 | log.error(msg) 152 | 153 | from user.weatherservicesutil import wget, BaseThread 154 | import weeutil.weeutil # startOfDay, archiveDaySpan 155 | import weeutil.config # accumulateLeaves 156 | 157 | # RAL3000: #A72920 158 | 159 | WILDFIRESQUIRREL = """ 160 | wildfire squirrel, symbol for wildfire protection information 161 | 162 | 168 | 174 | 178 | 191 | 192 | 193 | """ 194 | 195 | # N/A 1 2 3 4 5 196 | LEVELCOLOR = ['#808080','#ffffcd','#ffd879','#ff8c39','#e9161d','#7f0126'] 197 | 198 | LEVELTEXT = [ 199 | 'unbekannt', # N/A 200 | 'sehr geringe Gefahr', # 1 201 | 'geringe Gefahr', # 2 202 | 'mittlere Gefahr', # 3 203 | 'hohe Gefahr', # 4 204 | 'sehr hohe Gefahr' # 5 205 | ] 206 | 207 | INSTRUCTIONTEXT = [ 208 | # 1 Sehr geringe Gefahr 209 | '', 210 | # 2 Geringe Gefahr 211 | """

Erhöhte Umsicht und Vorsicht, um Zündquellen zu vermeiden

212 |
    213 |
  • Rauchverbot beachten.
  • 214 |
  • Vorsicht beim Parken.
  • 215 |
216 | """, 217 | # 3 Mittlere Gefahr 218 | """

Die Situation wird kritisch und bedarf bewußter Einschränkungen.

219 |
    220 |
  • Rauchverbot strikt einhalten.
  • 221 |
  • Öffentliche Feuerstellen und Grillplätze im und am Wald sollten nicht genutzt werden.
  • 222 |
  • Erhöhte Vorsicht beim Parken. Nicht auf vegetationsbedeckten Flächen parken.
  • 223 |
224 | """, 225 | # 4 Hohe Gefahr 226 | """

Aktiver Brandschutz des Waldes durch äußerste Vorsicht und weitere Einschränkungen

227 |
    228 |
  • Parkplätze und touristische Einrichtungen können behördlich gesperrt sein. Sperrungen beachten.
  • 229 |
  • Rauchverbot strikt einhalten.
  • 230 |
  • Öffentliche Feuerstellen und Grillplätze im und am Wald dürfen nicht genutzt werden.
  • 231 |
  • Wege nicht verlassen.
  • 232 |
  • Hohe Vorsicht beim Parken. Nicht auf vegetationsbedeckten Flächen parken.
  • 233 |
234 | """, 235 | # Sehr hohe Gefahr 236 | """

Maximaler Schutz des Waldes vor Bränden

237 |
    238 |
  • Behörden und Waldbesitzer können den Wald aus Brandschutzgründen sperren. Sperrungen beachten.
  • 239 |
  • Rauchverbot strikt einhalten.
  • 240 |
  • Wald meiden.
  • 241 |
  • Bei unvermeidlichem Aufenthalt im oder am Wald Wege nicht verlassen.
  • 242 |
  • Öffentliche Feuerstellen und Grillplätze im und am Wald dürfen nicht genutzt werden.
  • 243 |
  • Nur auf öffentlichen, freigegebenen und vegetationslosen Parkplätzen parken.
  • 244 |
245 | """ 246 | ] 247 | 248 | ############################################################################## 249 | # General wildfire danger level fetching thread # 250 | ############################################################################## 251 | 252 | class WildfireThread(BaseThread): 253 | 254 | @property 255 | def provider_name(self): 256 | raise NotImplementedError 257 | 258 | @property 259 | def provider_url(self): 260 | raise NotImplementedError 261 | 262 | def __init__(self, name, conf_dict, archive_interval): 263 | # get logging configuration 264 | log_success = weeutil.weeutil.to_bool(conf_dict.get('log_success',False)) 265 | log_failure = weeutil.weeutil.to_bool(conf_dict.get('log_failure',True)) 266 | # initialize thread 267 | super(WildfireThread,self).__init__(name='DWD-WBS-'+name,log_success=log_success,log_failure=log_failure) 268 | # archive interval 269 | self.query_interval = weeutil.weeutil.to_int(archive_interval) 270 | # fetch time (example: "03:00 UTC", "06:30") 271 | fetch_time = conf_dict.get('fetch_time','').upper().strip() 272 | self.fetch_time_utc = fetch_time.endswith('UTC') 273 | fetch_time = ''.join([i for i in fetch_time if i in '0123456789:']) 274 | self.fetch_time = 0 275 | j = 3600 276 | for i in fetch_time.split(':'): 277 | self.fetch_time += int(i)*j 278 | j /= 60 279 | # log sleeping time or not 280 | self.log_sleeping = weeutil.weeutil.to_bool(conf_dict.get('log_sleeping',False)) 281 | # server data 282 | self.server_url = conf_dict.get('server_url') 283 | self.wildfire_area = conf_dict.get('area') 284 | self.api_key = conf_dict.get('api_key') 285 | # path and file name for HTML and JSON files 286 | self.target_path = conf_dict.get('path','.') 287 | self.filename = conf_dict.get('file','') 288 | self.bootstrapmodal = weeutil.weeutil.to_bool(conf_dict.get('Bootstrap_modal',True)) 289 | # log config at start 290 | loginf("thread '%s': provider '%s', fetch time %s %s, area %s" % (self.name,self.provider_name,self.fetch_time,'UTC' if self.fetch_time_utc else 'local time',self.wildfire_area)) 291 | 292 | self.lock = threading.Lock() 293 | 294 | self.init_data() 295 | self.last_newday_ts = 0 296 | self.wildfire_area_name = '' 297 | 298 | 299 | def init_data(self): 300 | self.last_data_ts = 0 301 | self.data = dict() 302 | 303 | 304 | def get_data(self, ts): 305 | """ get buffered data """ 306 | today_ts = weeutil.weeutil.startOfArchiveDay(ts) 307 | try: 308 | self.lock.acquire() 309 | if today_ts>self.last_data_ts: 310 | # data is outdated 311 | self.init_data() 312 | interval = 1 313 | data = self.data 314 | finally: 315 | self.lock.release() 316 | if __name__ == '__main__': 317 | print('get_data()',data) 318 | data = { 319 | 'wildfireDangerLevel':(data.get('wbs'),'byte','group_data'), 320 | 'wildfireDangerLevelIssued':(data.get('released'),'unix_epoch','group_datetime'), 321 | 'wildfireDangerLevelText':(data.get('text'),None,None), 322 | 'wildfireDangerLevelArea':(data.get('name'),None,None), 323 | 'wildfireDangerLevelColor':(data.get('color'),None,None) 324 | } 325 | return data,interval 326 | 327 | 328 | def is_fetch_time_reached(self): 329 | """ check if fetch time is reached """ 330 | now = time.time() 331 | today = weeutil.weeutil.archiveDaySpan(now) 332 | if self.fetch_time_utc: 333 | # UTC 334 | reference_time = today[0]-today[0]%86400+self.fetch_time 335 | if reference_time<=today[0]: 336 | reference_time += 86400 337 | countdown = reference_time-now 338 | else: 339 | # local time 340 | now_tuple = time.localtime(now) 341 | now_time_of_day = now_tuple.tm_hour*3600+now_tuple.tm_min*60+now_sec 342 | countdown = self.fetch_time-now_time_of_day 343 | return now, today, countdown 344 | 345 | 346 | def get_url(self): 347 | """ get URL to fetch data """ 348 | raise NotImplementedError 349 | 350 | 351 | def getRecord(self): 352 | """ download and process data 353 | 354 | called at the beginning of the new day and from the scheduled 355 | time on until success 356 | 357 | Please note, that additional calls are possible due to 358 | interruptions of the sleeping function within the base thread. 359 | """ 360 | if __name__ == '__main__': 361 | print('WildfireThread.getRecord()') 362 | now, today, countdown = self.is_fetch_time_reached() 363 | # Check if all is done for today 364 | if self.last_data_ts>today[0]: 365 | # Data for today already received. Nothing to do. 366 | return 367 | # Check if fetch time is reached 368 | fetch_time_reached = countdown<=60 369 | # 370 | if fetch_time_reached: 371 | # fetch data 372 | try: 373 | reply = wget(self.get_url(), 374 | log_success=self.log_success, 375 | log_failure=self.log_failure) 376 | if reply is None: return 377 | reply = json.loads(reply) 378 | if self.log_success: 379 | loginf("thread '%s': got %s" % (self.name,reply)) 380 | except Exception as e: 381 | if self.log_failure: 382 | logerr("thread '%s': wget %s - %s" % (self.name,e.__class__.__name__,e)) 383 | return 384 | # process data 385 | try: 386 | data, issued = self.process_data(reply, now) 387 | except Exception as e: 388 | if self.log_failure: 389 | logerr("thread '%s': process data %s - %s" % (self.name,e.__class__.__name__,e)) 390 | return 391 | if not data or not issued: 392 | # no valid data received 393 | return 394 | self.last_data_ts = issued 395 | if issued and 'name' in data: 396 | self.wildfire_area_name = data['name'] 397 | else: 398 | # Check if day change is already processed 399 | if self.last_newday_ts>today[0]: 400 | # Day change is already processed and fetch time is not 401 | # reached. Nothing to do. 402 | return 403 | self.last_newday_ts = now 404 | data = { 405 | 'Issuer':self.provider_name, 406 | 'id':self.wildfire_area, 407 | 'name': self.wildfire_area_name, # region 408 | 'sent': now, 409 | 'wbs': None, 410 | 'released': None, 411 | 'text': 'noch nicht veröffentlicht', 412 | 'description':'', 413 | 'instruction':'', 414 | 'color': LEVELCOLOR[0], 415 | 'day': time.strftime('%d.%m.',time.localtime(now)) 416 | } 417 | data['fetch_time_reached'] = fetch_time_reached 418 | data['processed'] = now 419 | data['start'] = today[0] 420 | data['end'] = today[1] 421 | try: 422 | self.lock.acquire() 423 | self.data = data 424 | finally: 425 | self.lock.release() 426 | self.write_html(({self.filename:[data]},'de'),self.target_path,False) 427 | if self.bootstrapmodal: 428 | self.write_html_bootstrap_modal(({self.filename:[data]},'de'),self.target_path,False) 429 | 430 | 431 | def waiting_time(self): 432 | """ time to wait until the next fetch """ 433 | if self.last_data_ts==0: return 0 434 | now, today, countdown = self.is_fetch_time_reached() 435 | if today[0]>self.last_data_ts: 436 | # new day 437 | if countdown<=0: 438 | # If data is outdated, wait to the end of the current archive 439 | # interval. 440 | waiting = self.query_interval-now%self.query_interval 441 | else: 442 | # in the morning wait to the next schedule 443 | waiting = countdown 444 | else: 445 | # Otherwise wait to the beginning of the new day 446 | waiting = today[1]-now+self.query_interval 447 | if __name__ == '__main__': 448 | print('waiting_time()',time.strftime('%H:%M:%S'),'countdown',countdown,'waiting',waiting) 449 | return waiting 450 | 451 | 452 | def random_time(self, waiting): 453 | """ do a little bit of load balancing 454 | 455 | let at least 10 seconds to ultimo to download an process 456 | data 457 | """ 458 | return -random.random()*60 459 | 460 | def process_data(self, reply, now): 461 | """ convert reply to internal structure """ 462 | raise NotImplementedError 463 | 464 | 465 | def write_html(self, wwarn, target_path, dryrun): 466 | """ create HTML and JSON file """ 467 | lang = wwarn[1] 468 | wwarn = wwarn[0] 469 | for __ww,data_list in wwarn.items(): 470 | s = '' 471 | r = None 472 | for data in data_list: 473 | _region = data['name'] 474 | # if a new region starts, set a caption 475 | if r is None or r!=_region: 476 | r = _region 477 | s+='

%s

\n' % r 478 | 479 | valid_on = time.strftime('%d.%m.%Y',time.localtime((data['start']+data['end'])/2)) 480 | wbs = data.get('wbs') 481 | color = data.get('color',LEVELCOLOR[wbs]) if wbs in {1,2,3,4,5} else LEVELCOLOR[0] 482 | s += '\n' 483 | s += '\n' 490 | s += '
\n' 484 | s += '\n%s\n' % (color,WILDFIRESQUIRREL) 485 | s += '%s\n' % (wbs if wbs else '?',) 486 | s += '\n' 487 | s += 'Waldbrandgefahrenstufe %s
%s\n' % (data.get('wbs',''),110 if wbs else 100,data.get('text','nicht verfügbar')) 488 | s += '
gültig am %s' % valid_on 489 | s += '
\n' 491 | s += data.get('instruction','') 492 | if not s: 493 | s += '

keine Angaben verfügbar

' 494 | s += '

Waldbrandgefahrenstufe ausgegeben vom %s

\n' % (self.provider_url,self.provider_name) 495 | if dryrun: 496 | print("########################################") 497 | print("-- HTML -- wbs-%s.inc -------------------------------"%__ww) 498 | print(s) 499 | print("-- JSON -- wbs-%s.json ------------------------------"%__ww) 500 | print(json.dumps(data,indent=4,ensure_ascii=False)) 501 | else: 502 | fn = os.path.join(target_path,"wbs-%s.inc" % __ww) 503 | fn_tmp = '%s.tmp' % fn 504 | with open(fn_tmp,"w") as file: 505 | file.write(s) 506 | os.rename(fn_tmp,fn) 507 | fn = os.path.join(target_path,"wbs-%s.json" % __ww) 508 | fn_tmp = '%s.tmp' % fn 509 | with open(fn_tmp,"w") as file: 510 | json.dump(data_list,file,indent=4,ensure_ascii=False) 511 | os.rename(fn_tmp,fn) 512 | 513 | def write_html_bootstrap_modal(self, wwarn, target_path, dryrun): 514 | """ create link and modal window for Bootstrap framework """ 515 | lang = wwarn[1] 516 | wwarn = wwarn[0] 517 | for __ww,data_list in wwarn.items(): 518 | s_link = '' 519 | s_modal = '' 520 | for data in data_list: 521 | valid_on = time.strftime('%d.%m.%Y',time.localtime((data['start']+data['end'])/2)) 522 | wbs = data.get('wbs') 523 | color = data.get('color',LEVELCOLOR[wbs]) if wbs in {1,2,3,4,5} else LEVELCOLOR[0] 524 | linkname = 'wbs%s' % int(data.get('start',0)*1000) 525 | s_link += '\n' 539 | # modal dialog 540 | if wbs and wbs!=1: 541 | s_modal += '\n' % linkname 542 | s_modal += '\n' 567 | s_modal += '\n' % linkname 568 | if dryrun: 569 | print("########################################") 570 | print("-- HTML -- wbs-%s-link.inc --------------------------"%__ww) 571 | print(s_link) 572 | print("-- HTML -- wbs-%s-modal.inc -------------------------"%__ww) 573 | print(s_modal) 574 | else: 575 | with open(os.path.join(target_path,"wbs-%s-link.inc" % __ww),"w") as file: 576 | file.write(s_link) 577 | with open(os.path.join(target_path,"wbs-%s-modal.inc" % __ww),"w") as file: 578 | file.write(s_modal) 579 | 580 | ############################################################################## 581 | # Provider Sachsenforst # 582 | ############################################################################## 583 | 584 | class SachsenforstThread(WildfireThread): 585 | 586 | @property 587 | def provider_name(self): 588 | return 'Staatsbetrieb Sachsenforst' 589 | 590 | @property 591 | def provider_url(self): 592 | return 'https://www.mais.de/php/sachsenforst.php' 593 | 594 | def get_url(self): 595 | """ get URL to fetch data """ 596 | return str(self.server_url)+'?id='+str(self.wildfire_area)+'&key='+str(self.api_key) 597 | 598 | 599 | def process_data(self, reply, now): 600 | """ convert received data to dict """ 601 | data = { 602 | 'Issuer':self.provider_name, 603 | 'ProductID':'WBS', 604 | 'id':self.wildfire_area 605 | } 606 | if reply: 607 | data['name'] = reply.get('region','') 608 | if time.strftime('%d.%m.%Y')==reply['date']: 609 | try: 610 | wbs = int(reply['wbs']) 611 | except (LookupError,TypeError,ValueError): 612 | wbs = None 613 | try: 614 | issued = time.strptime(reply['generated'],'%d.%m.%Y %H:%M') 615 | #issued = time.mktime(issued[0:8]+(0,)) 616 | issued = time.mktime(issued) 617 | if reply['wbs']==0: issued = None 618 | except (LookupError,ValueError,TypeError,ArithmeticError): 619 | issued = None 620 | data['sent'] = issued 621 | data['released'] = issued # effective 622 | data['description'] = '' 623 | try: 624 | data['instruction'] = INSTRUCTIONTEXT[wbs-1] 625 | except (LookupError,ValueError,TypeError,ArithmeticError): 626 | data['instruction'] = '' 627 | data['wbs'] = wbs 628 | data['color'] = reply.get('color',LEVELCOLOR[wbs if wbs in {1,2,3,4,5} else 0]) 629 | data['text'] = reply.get('text','') 630 | data['day'] = reply.get('date','')[0:6] 631 | else: 632 | # got out of day data 633 | issued = None 634 | else: 635 | issued = None 636 | return data, issued 637 | 638 | ############################################################################## 639 | 640 | providers_dict = { 641 | 'Sachsenforst': SachsenforstThread 642 | } 643 | 644 | 645 | 646 | def create_thread(thread_name,config_dict,archive_interval): 647 | """ create wildfire thread """ 648 | prefix = config_dict.get('prefix','id'+thread_name) 649 | provider = config_dict.get('provider') 650 | wildfire_area = config_dict.get('area') 651 | if provider and provider in providers_dict: 652 | conf_dict = weeutil.config.accumulateLeaves(config_dict) 653 | conf_dict['prefix'] = prefix 654 | conf_dict['area'] = wildfire_area 655 | if weeutil.weeutil.to_bool(conf_dict.get('enable',True)): 656 | thread = dict() 657 | thread['datasource'] = 'WBS' 658 | thread['prefix'] = prefix 659 | thread['thread'] = providers_dict[provider](thread_name,conf_dict,archive_interval) 660 | thread['thread'].start() 661 | return thread 662 | return None 663 | 664 | 665 | 666 | if __name__ == '__main__': 667 | 668 | conf_dict = configobj.ConfigObj("wildfire.conf") 669 | print('create wildfire thread') 670 | wildfire = SachsenforstThread('wildfiretest',conf_dict,300) 671 | print('about to start thread') 672 | wildfire.start() 673 | print('started') 674 | try: 675 | while True: 676 | time.sleep(300-time.time()%300+15) 677 | data, interval = wildfire.get_data(time.time()-15) 678 | print(json.dumps(data,indent=4,ensure_ascii=False)) 679 | except Exception as e: 680 | print('**MAIN**',e) 681 | except KeyboardInterrupt: 682 | print() 683 | print('**MAIN** CTRL-C pressed') 684 | wwarn = ({'Test':[]},'de') 685 | now = time.time() 686 | for i in range(5): 687 | x = { 688 | 'name':'Testregion', 689 | 'wbs':i+1, 690 | 'start':now+i*86400, 691 | 'end':now+i*86400, 692 | 'instruction':INSTRUCTIONTEXT[i], 693 | 'text':LEVELTEXT[i+1], 694 | 'color':LEVELCOLOR[i+1], 695 | } 696 | wwarn[0]['Test'].append(x) 697 | wildfire.write_html(wwarn,'.',False) 698 | if wildfire.bootstrapmodal: 699 | wildfire.write_html_bootstrap_modal(wwarn,'.',False) 700 | for i in wwarn[0]['Test']: 701 | wildfire.write_html(({'Test-%s' % i['wbs']:[i]},'de'),'.',False) 702 | if wildfire.bootstrapmodal: 703 | wildfire.write_html_bootstrap_modal(({'Test-%s' % i['wbs']:[i]},'de'),'.',False) 704 | wildfire.shutDown() 705 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | -------------------------------------------------------------------------------- /old/usr/local/bin/dwd-cap-warnings: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # Erzeugen von Warnmeldungen 3 | # Copyright (C) 2021,2022 Johanna Roedenbeck 4 | # licensed under the terms of the General Public License (GPL) v3 5 | 6 | from __future__ import absolute_import 7 | from __future__ import print_function 8 | from __future__ import with_statement 9 | 10 | """ 11 | This script is free software: you can redistribute it and/or modify 12 | it under the terms of the GNU General Public License as published by 13 | the Free Software Foundation, either version 3 of the License, or 14 | (at your option) any later version. 15 | 16 | This script is distributed in the hope that it will be useful, 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 19 | GNU General Public License for more details. 20 | """ 21 | 22 | """ 23 | Ein -Element DARF ein oder mehrere -Elemente haben. Jedes - 24 | Element stellt eine Warnung in einer Sprache dar. Sind mehrere -Elemente in 25 | einem -Block enthalten, so MUSS jedes -Element die gleiche Warnung 26 | (die gleiche Information) in einer anderen Sprache darstellen. Jedes -Element 27 | DARF ein oder mehrere -Elemente haben. 28 | """ 29 | 30 | import json 31 | import time 32 | import datetime 33 | import configobj 34 | import os.path 35 | import requests 36 | import csv 37 | import io 38 | import urllib.parse 39 | from email.utils import formatdate 40 | import html.parser 41 | import zipfile 42 | 43 | if __name__ == "__main__": 44 | import optparse 45 | import sys 46 | def loginf(x): 47 | print(x, file=sys.stderr) 48 | def logerr(x): 49 | print(x, file=sys.stderr) 50 | 51 | DEFAULT_DWD_WARNCELLID_URL = "https://www.dwd.de/DE/leistungen/opendata/help/warnungen/cap_warncellids_csv.csv?__blob=publicationFile&v=3" 52 | DEFAULT_DWD_CAP_URL = "https://opendata.dwd.de/weather/alerts/cap" 53 | 54 | # Der DWD verwendet ganz offensichtlich nicht die nach ISO genormten 55 | # Abkuerzungen fuer Bundeslaender. 56 | dwd_copy={ 57 | 'SN':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/sachsen/warnlage_sac_node.html', 58 | 'TH':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/thueringen/warnlage_thu_node.html', 59 | 'SA':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/sachen_anhalt/warnlage_saa_node.html', 60 | 'BB':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/berlin_brandenburg/warnlage_bb_node.html', 61 | 'MV':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/mecklenburg_vorpommern/warnlage_mv_node.html', 62 | 'NS':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/niedersachsen_bremen/warnlage_nds_node.html', 63 | 'HB':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/niedersachsen_bremen/warnlage_nds_node.html', 64 | 'HE':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/hessen/warnlage_hes_node.html', 65 | 'NRW':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/nordrhein_westfalen/warnlage_nrw_node.html', 66 | 'BY':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/bayern/warnlage_bay_node.html', 67 | 'SH':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/schleswig_holstein_hamburg/warnlage_shh_node.html', 68 | 'HH':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/schleswig_holstein_hamburg/warnlage_shh_node.html', 69 | 'RP':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/rheinland-pfalz_saarland/warnlage_rps_node.html', 70 | 'SL':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/rheinland-pfalz_saarland/warnlage_rps_node.html', 71 | 'BW':'https://www.dwd.de/DE/wetter/warnungen_aktuell/warnlagebericht/baden-wuerttemberg/warnlage_baw_node.html'} 72 | AGS_STATES = { 73 | '01':('SH','Schleswig-Holstein'), 74 | '02':('HH','Freie und Hansestadt Hamburg'), 75 | '03':('NS','Niedersachsen'), 76 | '04':('HB','Freie Hansestadt Bremen'), 77 | '05':('NRW','Nordrhein-Westfalen'), 78 | '06':('HE','Hessen'), 79 | '07':('RP','Rheinland-Pfalz'), 80 | '08':('BW','Baden-Württemberg'), 81 | '09':('BY','Freistaat Bayern'), 82 | '10':('SL','Saarland'), 83 | '11':('BB','Berlin'), 84 | '12':('BB','Brandenburg'), 85 | '13':('MV','Mecklenburg-Vorpommern'), 86 | '14':('SN','Freistaat Sachsen'), 87 | '15':('SA','Sachsen-Anhalt'), 88 | '16':('TH','Thüringen')} 89 | 90 | # Codes from warnings.json 91 | 92 | dwd_level=( 93 | 'keine Warnung', # 0 no warning 94 | 'Vorinformation', # 1 preliminary info 95 | 'Wetterwarnung', # 2 minor 96 | 'markantes Wetter', # 3 moderate 97 | 'Unwetterwarnung', # 4 severe 98 | 'extremes Unwetter') # 5 extreme 99 | 100 | # Namensbestandteile der Warn-Icons 101 | dwd_warning_type=( 102 | 'gewitter', # 0 thunderstorm 103 | 'wind', # 1 wind/storm 104 | 'regen', # 2 rain 105 | 'schnee', # 3 snow 106 | 'nebel', # 4 fog 107 | 'frost', # 5 frost 108 | 'eis', # 6 ice 109 | 'tau', # 7 thawing 110 | 'hitze', # 8 heat 111 | 'uv') # 9 uv warning 112 | 113 | def dwd_warn_icon_file(type,level): 114 | if type is None: return None 115 | if type==8 or type==9: 116 | return "warn_icons_%s.png" % dwd_warning_type[type] 117 | if level==1: 118 | return "warn_icons_%s_pre.png" % dwd_warning_type[type] 119 | if level<2 or level>5: return None 120 | return "warn_icons_%s_%s.png" % (dwd_warning_type[type],level-1) 121 | 122 | def dwd_level_text(level): 123 | try: 124 | return dwd_level[level] 125 | except IndexError: 126 | if level==10: return 'Hitzewarnung' 127 | return None 128 | 129 | # Codes from CAP 130 | 131 | CAP_SEVERITY = { 132 | 'Minor':2, 133 | 'Moderate':3, 134 | 'Severe':4, 135 | 'Extreme':5} 136 | 137 | CAP_II_CAPTION = [ 138 | 'Warnungen', 139 | 'Küsten-Warnungen', 140 | 'Hochsee-Warnungen', 141 | 'Medizin-Meteorologische Warnungen', 142 | 'Vorabinformationen Unwetter'] 143 | CAP_II = [ 144 | #II,type,c,name 145 | # Warnungen 146 | (22,5,0,'Frost'), 147 | (24,6,0,'Glätte'), 148 | (31,0,0,'Gewitter'), 149 | (33,0,0,'starkes Gewitter'), 150 | (34,0,0,'starkes Gewitter'), 151 | (36,0,0,'starkes Gewitter'), 152 | (38,0,0,'starkes Gewitter'), 153 | (40,0,0,'schweres Gewitter mit Orkanböen'), 154 | (41,0,0,'schweres Gewitter mit extremen Orkanböen'), 155 | (42,0,0,'schweres Gewitter mit heftigem Starkregen'), 156 | (44,0,0,'schweres Gewitter mit Orkanböen und heftigem Starkregen'), 157 | (45,0,0,'schweres Gewitter mit extremen Orkanböen und heftigem Starkregen'), 158 | (46,0,0,'schweres Gewitter mit heftigem Starkregen und Hagel'), 159 | (48,0,0,'schweres Gewitter mit Orkanböen, heftigem Starkregen und Hagel'), 160 | (49,0,0,'schweres Gewitter mit extremen Orkanböen, heftigem Starkregen und Hagel'), 161 | (51,1,0,'Windböen'), 162 | (52,1,0,'Sturmböen'), 163 | (53,1,0,'schwere Sturmböen'), 164 | (54,1,0,'orkanartige Böen'), 165 | (55,1,0,'Orkanböen'), 166 | (56,1,0,'extreme Orkanböen'), 167 | (57,1,0,'Starkwind'), 168 | (58,1,0,'Sturm'), 169 | (59,4,0,'Nebel'), 170 | (61,2,0,'Starkregen'), 171 | (62,2,0,'heftiger Starkregen'), 172 | (63,2,0,'Dauerregen'), 173 | (64,2,0,'ergiebiger Dauerregen'), 174 | (65,2,0,'extrem ergiebiger Dauerregen'), 175 | (66,2,0,'extrem heftiger Starkregen'), 176 | (70,3,0,'leichter Schneefall'), 177 | (71,3,0,'Schneefall'), 178 | (72,3,0,'starker Schneefall'), 179 | (73,3,0,'extrem starker Schneefall'), 180 | (74,3,0,'Schneeverwehung'), 181 | (75,3,0,'starke Scheeverwehung'), 182 | (76,3,0,'extrem starke Schneeverwehung'), 183 | (79,5,0,'Leiterseilschwingungen'), 184 | (82,5,0,'strenger Frost'), 185 | (84,6,0,'Glätte'), 186 | (85,6,0,'Glatteis'), 187 | (87,6,0,'Glatteis'), 188 | (88,7,0,'Tauwetter'), 189 | (89,7,0,'starkes Tauwetter'), 190 | (90,0,0,'Gewitter'), 191 | (91,0,0,'starkes Gewitter'), 192 | (92,0,0,'schweres Gewitter'), 193 | (93,0,0,'extremes Gewitter'), 194 | (95,0,0,'schweres Gewitter mit extremem, heftigem Starkregen und Hagel'), 195 | (96,0,0,'extremes Gewitter mit Orkanböen, extrem heftigem Starkregen und Hagel'), 196 | (98,None,0,'Test-Warnung'), 197 | (99,None,0,'Test-Unwetterwarnung'), 198 | # Küsten-Warnungen 199 | (11,1,1,'Böen'), 200 | (12,1,1,'Wind'), 201 | (13,1,1,'Sturm'), 202 | # Hochsee-Warnungen 203 | (14,1,2,'Starkwind'), 204 | (15,1,2,'Sturm'), 205 | (16,1,2,'schwerer Sturm'), 206 | # Medizin-Meteorologische Warnungen 207 | (246,9,3,'UV-Index'), 208 | (247,8,3,'starke Hitze'), 209 | (248,8,3,'extreme Hitze'), 210 | # Vorabinformationen Unwetter 211 | (40,0,4,'Vorabinformation schweres Gewitter'), 212 | (55,1,4,'Vorabinformation Orkanböen'), 213 | (65,2,4,'Vorabinformation heftiger/ergiebiger Regen'), 214 | (75,3,4,'Vorabinformation starker Schneefall/Schneeverwehung'), 215 | (85,6,4,'Vorabinformation Glatteis'), 216 | (89,7,4,'Vorabinformation starkes Tauwetter'), 217 | (99,None,4,'Test-Vorabinformation Unwetter')] 218 | 219 | CAP_II_TYPE = { ii[0]:ii[1] for ii in CAP_II if ii[2]!=4 and ii[1] is not None } 220 | 221 | CAP_EVENT = { 222 | 'FROST':(5,22), 223 | 'GLÄTTE':(6,24), 224 | 'GLATTEIS':(6,None), 225 | 'GEWITTER':(0,31), 226 | 'WINDBÖEN':(1,51), 227 | # 'STURM':(1,58), 228 | 'NEBEL':(4,59), 229 | 'TEST-WARNUNG':(None,98), 230 | 'TEST-UNWETTERWARNUNG':(None,99), 231 | # Vorabinformation Unwetter 232 | 'VORABINFORMATION SCHWERES GEWITTER':(0,40), 233 | 'VORABINFORMATION ORKANBÖEN':(1,55), 234 | 'VORABINFORMATION HEFTIGER / ERGIEBIGER REGEN':(2,65), 235 | 'VORABINFORMATION STARKER SCHNEEFALL / SCHNEEVERWEHUNG':(3,75), 236 | 'VORABINFORMATION GLATTEIS':(6,85), 237 | 'VORABINFORMATION STARKES TAUWETTER':(7,89), 238 | 'TEST-VORABINFORMATION UNWETTER':(None,99), 239 | # Küsten-Warnungen 240 | 'BÖEN':(1,11), 241 | 'WIND':(1,12), 242 | 'STURM':(1,13), 243 | # Medizin-Meteorologische-Warnungen 244 | 'UV-INDEX':(9,246), 245 | 'STARKE HITZE':(8,247), 246 | 'EXTREME HITZE':(8,248) 247 | } 248 | 249 | # räumliche Auflösung 250 | DWD_CAP_URL_RES = { 251 | 'county': 'DISTRICT', 252 | 'city': 'COMMUNEUNION', 253 | 'Landkreis': 'DISTRICT', 254 | 'Gemeinde': 'COMMUNEUNION'} 255 | 256 | # Aktualisierungsstrategie und Aktualisierungsregeln 257 | # update strategy and update rules 258 | DWD_CAP_URL_UPDATE = { 259 | 'cell': { 260 | 'dwd': 'DWD', 261 | 'neutral': 'CELLS'}, 262 | 'event': { 263 | 'neutral': 'EVENT', 264 | None: 'EVENT'}} 265 | 266 | # Kompletter Warnstatus oder Differenzmeldungen 267 | # status or difference messages 268 | DWD_CAP_URL_STATUS_DIFFERENCE = { 269 | False: 'STAT', 270 | True: 'DIFF'} 271 | 272 | def get_eventtype_from_cap(capevent,eventtypeii): 273 | """ get JSON event type from CAP event and ii """ 274 | try: 275 | if capevent in CAP_EVENT: return CAP_EVENT[capevent][0] 276 | if 'GEWITTER' in capevent: return 0 277 | if 'STURM' in capevent: return 1 278 | if 'REGEN' in capevent: return 2 279 | if 'SCHNEEFALL' in capevent: return 3 280 | if 'FROST' in capevent: return 5 281 | if 'TAUWETTER' in capevent: return 7 282 | eventtypeii = int(eventtypeii) 283 | if eventtypeii in CAP_II_TYPE: return CAP_II_TYPE[eventtypeii] 284 | except Exception: 285 | pass 286 | return None 287 | 288 | def get_cap_url(resolution, strategy, rule, diff): 289 | """ compose URL for CAP files """ 290 | try: 291 | return DEFAULT_DWD_CAP_URL+'/'+DWD_CAP_URL_RES[resolution]+'_'+DWD_CAP_URL_UPDATE[strategy][rule]+'_'+DWD_CAP_URL_STATUS_DIFFERENCE[diff] 292 | except Exception: 293 | return None 294 | 295 | 296 | ############################################################################## 297 | # extract file names from directory listing # 298 | ############################################################################## 299 | 300 | class CapDirParser(html.parser.HTMLParser): 301 | 302 | def __init__(self, lang): 303 | super(CapDirParser,self).__init__() 304 | self.lang = lang 305 | self.files = [] 306 | 307 | def handle_starttag(self, tag, attrs): 308 | """ process HTML start tags """ 309 | if tag=='a': 310 | for i in attrs: 311 | if i[0]=='href': 312 | file_lang = (i[1][-6:-4]).lower() 313 | if file_lang==self.lang and i[1][-4:].lower()=='.zip': 314 | self.files.append(i[1]) 315 | 316 | def get_files(self): 317 | """ get the list of file names found and close parser """ 318 | self.close() 319 | return self.files 320 | 321 | 322 | ############################################################################## 323 | # parse CAP file # 324 | ############################################################################## 325 | 326 | # Note: Strictly speaking CAP files are XML, not HTML. But for our 327 | # purpose a simple HTML parser is sufficient. 328 | 329 | # Note: There is one and only one alert per file. 330 | 331 | class CAPParser(html.parser.HTMLParser): 332 | 333 | # tags that require special handling 334 | TAGTYPE = { 335 | # tag sub multiple 336 | # section times 337 | 'alert': (True, False), 338 | 'info': (True, False), 339 | 'eventcode':(True, True), 340 | 'area': (True, True), 341 | 'geocode': (True, True), 342 | 'parameter':(True, True), 343 | 'code': (False, True)} 344 | # default: False False 345 | 346 | def __init__(self, log_tags=False): 347 | super(CAPParser,self).__init__() 348 | self.log_tags = log_tags 349 | self.lvl = 0 350 | self.tags = [] 351 | self.cap = dict() 352 | self.ar = [self.cap] 353 | 354 | def _is_dict(self, tag): 355 | """ Is this tag a sub-section? """ 356 | return self.TAGTYPE.get(tag,(False,False))[0] 357 | 358 | def _is_array(self, tag): 359 | """ Is this tag allowed multiple times? """ 360 | return self.TAGTYPE.get(tag,(False,False))[1] 361 | 362 | def handle_starttag(self, tag, attrs): 363 | """ handle start tag """ 364 | if self.log_tags: 365 | print(self.lvl,self.tags,'start',tag,attrs) 366 | self.tags.append(tag) 367 | self.lvl+=1 368 | if self._is_array(tag): 369 | if tag not in self.ar[-1]: 370 | self.ar[-1][tag] = [] 371 | if self._is_dict(tag): 372 | self.ar[-1][tag].append(dict()) 373 | self.ar.append(self.ar[-1][tag][-1]) 374 | elif self._is_dict(tag): 375 | self.ar[-1][tag] = dict() 376 | self.ar.append(self.ar[-1][tag]) 377 | 378 | def handle_endtag(self, tag): 379 | """ handle end tag """ 380 | del self.tags[-1] 381 | self.lvl-=1 382 | if self._is_dict(tag): 383 | del self.ar[-1] 384 | if self.log_tags: 385 | print(self.lvl,self.tags,'end',tag) 386 | 387 | def handle_data(self, data): 388 | """ handle data between tags """ 389 | if len(self.tags)>0: 390 | tag = self.tags[-1] 391 | if self._is_array(tag): 392 | if self._is_dict(tag): 393 | pass 394 | else: 395 | self.ar[-1][tag].append(data) 396 | pass 397 | elif self._is_dict(tag): 398 | if not data.isspace(): 399 | self.ar[-1]['@'] = data 400 | else: 401 | self.ar[-1][tag] = data 402 | if self.log_tags: 403 | print(self.lvl,self.tags,'data',data) 404 | 405 | ############################################################################## 406 | # Class for processing warnings in CAP files # 407 | ############################################################################## 408 | 409 | class DwdWarnings(object): 410 | 411 | def __init__(self, config_dict, verbose=False): 412 | 413 | # target path 414 | try: 415 | self.target_path = config_dict['WeatherServices']['path'] 416 | except LookupError: 417 | self.target_path = config_dict['DeutscherWetterdienst']['path'] 418 | # warning config data 419 | warn_dict = config_dict['DeutscherWetterdienst']['warning'] 420 | # Bundeslaender und Landkreise, fuer die Warndaten 421 | # bereitgestellt werden sollen, aus weewx.conf lesen 422 | self.resolution = warn_dict.get('resolution','city') 423 | self.states = warn_dict.get('states',[]) 424 | if not isinstance(self.states,list): self.states=[self.states] 425 | _area = DWD_CAP_URL_RES.get(self.resolution,'COMMUNEUNION' if 'cities' in warn_dict else 'DISTRICT') 426 | if _area=='DISTRICT': 427 | self.filter_area = warn_dict.get('counties',dict()) 428 | elif _area=='COMMUNEUNION': 429 | self.filter_area = warn_dict.get('cities',dict()) 430 | # warn icons 431 | self.icon_pth = warn_dict['icons'] 432 | # source urls 433 | self.dwd_status_url = warn_dict.get('dwd_status_url',get_cap_url(self.resolution,'cell','neutral',False)) 434 | self.dwd_diff_url = warn_dict.get('dwd_diff_url',get_cap_url(self.resolution,'cell','neutral',True)) 435 | # logging 436 | self.verbose = verbose 437 | self.log_success = warn_dict.get('log_success',config_dict['DeutscherWetterdienst'].get('log_success',config_dict.get('log_success',False))) 438 | self.log_failure = warn_dict.get('log_failure',config_dict['DeutscherWetterdienst'].get('log_failure',config_dict.get('log_failure',False))) 439 | if int(config_dict.get('debug',0))>0 or verbose: 440 | self.log_success = True 441 | self.log_failure = True 442 | self.verbose = True 443 | if __name__ == "__main__" and verbose: 444 | print('-- configuration data ----------------------------------') 445 | print('log success: ',self.log_success) 446 | print('log failure: ',self.log_failure) 447 | print('status url: ',self.dwd_status_url) 448 | print('diff msgs url:',self.dwd_diff_url) 449 | print('filter area: ',self.filter_area) 450 | print('target path: ',self.target_path) 451 | print('--------------------------------------------------------') 452 | 453 | 454 | def download_warncellids(self, dryrun=False): 455 | 456 | # Path to store the file 457 | fn = os.path.join(self.target_path,'warncellids.csv') 458 | 459 | if os.path.exists(fn): 460 | mtime = os.path.getmtime(fn) 461 | mtime_str = formatdate(mtime,False,True) 462 | else: 463 | mtime = 0 464 | mtime_str = None 465 | if self.verbose: 466 | loginf('warncellids.csv mtime %s %s' % (mtime,mtime_str)) 467 | 468 | # Without specifying a user agent the server sends the error 469 | # message 403 470 | headers={'User-Agent':'weewx-DWD'} 471 | # If the file is not changed we need not download it again 472 | if mtime_str: headers['If-Modified-Since'] = mtime_str 473 | 474 | reply = requests.get(DEFAULT_DWD_WARNCELLID_URL,headers=headers) 475 | if self.verbose: 476 | loginf('warncellids URL %s' % reply.url) 477 | 478 | if reply.status_code==200: 479 | if self.log_success or self.verbose: 480 | loginf('warncellids successfully downloaded') 481 | if dryrun: 482 | print(reply.text) 483 | else: 484 | with open(fn,'w',encoding='utf-8') as f: 485 | f.write(reply.text) 486 | 487 | elif reply.status_code==304: 488 | if self.log_success or self.verbose: 489 | loginf('warncellids.csv is already up to date') 490 | else: 491 | if self.log_failure or self.verbose: 492 | logerr('error downloading warncellids: %s %s' % (reply.status_code,reply.reason)) 493 | 494 | with open(fn,'r',encoding='utf-8') as f: 495 | f.read(3) 496 | wcids = csv.DictReader(f,delimiter=';') 497 | print(wcids) 498 | #for i in wcids: print(i) 499 | 500 | def dir(self, diff, lang='de'): 501 | 502 | if diff: 503 | url = self.dwd_diff_url 504 | else: 505 | url = self.dwd_status_url 506 | 507 | if self.verbose: 508 | loginf('about to download zip file list from %s' % url) 509 | 510 | reply = requests.get(url) 511 | 512 | if reply.status_code==200: 513 | if self.log_success or self.verbose: 514 | loginf('zip file list successfully downloaded') 515 | parser = CapDirParser(lang) 516 | parser.feed(reply.text) 517 | return parser.get_files() 518 | else: 519 | if self.log_failure or self.verbose: 520 | logerr('error downloading zip file list from %s: %s %s' % (reply.url,reply.status_code,reply.reason)) 521 | return None 522 | 523 | def download_zip(self, diff, file_name): 524 | 525 | if diff: 526 | url = self.dwd_diff_url 527 | else: 528 | url = self.dwd_status_url 529 | 530 | url = url+'/'+file_name 531 | 532 | if self.verbose: 533 | loginf('about to download %s' % url) 534 | 535 | headers={'User-Agent':'weewx-DWD'} 536 | reply = requests.get(url,headers=headers) 537 | 538 | if reply.status_code==200: 539 | if self.log_success or self.verbose: 540 | loginf('successfully downloaded %s' % reply.url) 541 | return zipfile.ZipFile(io.BytesIO(reply.content),'r') 542 | else: 543 | if self.log_failure or self.verbose: 544 | loginf('error downloading %s: %s %s' % (reply.url,reply.status_code,reply.reason)) 545 | return None 546 | 547 | def process_xml(self, zz, name, log_tags=False): 548 | """ get file 'name' out of zip file 'zipfile' and 549 | convert to dict() """ 550 | if self.verbose: 551 | loginf('processing CAP file %s' % name) 552 | xmltext = zz.read(name).decode(encoding='utf-8') 553 | parser = CAPParser(log_tags) 554 | parser.feed(xmltext) 555 | cap_dict = parser.cap 556 | parser.close() 557 | if self.verbose: 558 | loginf('CAP file %s processed' % name) 559 | return cap_dict 560 | 561 | def _area_filter(self, tag, val): 562 | try: 563 | reply = [] 564 | if tag=='area': 565 | for ii in val: 566 | try: 567 | if ii['areadesc'] in self.filter_area: 568 | wcid = None 569 | for jj in ii.get('geocode',[]): 570 | if jj.get('valuename','')=='WARNCELLID': 571 | wcid = jj.get('value') 572 | try: 573 | ags = wcid[-8:] 574 | state = AGS_STATES[ags[:2]] 575 | except Exception: 576 | state = [None,None] 577 | try: 578 | alt = int(float(ii['altitude'])*0.3048) 579 | except Exception: 580 | alt = None 581 | try: 582 | cie = int(float(ii['ceiling'])*0.3048) 583 | except Exception: 584 | cie = None 585 | reply.append((ii['areadesc'], 586 | wcid,alt,cie, 587 | state[0],state[1], 588 | self.filter_area[ii['areadesc']])) 589 | except Exception as e: 590 | if self.verbose: 591 | logerr(e) 592 | return reply 593 | except Exception as e: 594 | if self.verbose: 595 | logerr(e) 596 | return [] 597 | 598 | @staticmethod 599 | def _mktime(timestring): 600 | """ convert CAP timestamp string to epoch time """ 601 | ti = datetime.datetime.strptime(timestring,'%Y-%m-%dT%H:%M:%S%z') 602 | #print(ti) 603 | return int(ti.timestamp()*1000) 604 | 605 | def get_warnings(self, diff, filename, lang='de', log_tags=False): 606 | if self.verbose: 607 | loginf('processing file %s' % filename) 608 | # initialize dict for all regions to collect warnings for 609 | wwarn={self.filter_area[i]:dict() for i in self.filter_area} 610 | # download CAP file 611 | zz = self.download_zip(diff,filename) 612 | # process alerts included in the CAP file 613 | for name in zz.namelist(): 614 | # read file out of zip file and convert to dict 615 | cap_dict = self.process_xml(zz,name,log_tags) 616 | # process dict 617 | for lvl1 in cap_dict: 618 | # 619 | #print('lvl1',lvl1,'--------------') 620 | # search the alert for area references we are interested in 621 | areas = [] 622 | lvl1_dict = cap_dict[lvl1] 623 | info_dict = dict() 624 | if isinstance(lvl1_dict,dict): 625 | # items in 'alert' that are dicts 626 | for lvl2 in lvl1_dict: 627 | # 628 | #print('lvl2',lvl2,'------------') 629 | # There may be one or more sections, one for 630 | # each language 631 | if lvl2=='info': 632 | try: 633 | if lvl1_dict[lvl2]['language'][0:2].lower()==lang: 634 | info_dict = lvl1_dict[lvl2] 635 | # search section for sections 636 | for lvl3 in info_dict: 637 | ar = self._area_filter(lvl3,info_dict[lvl3]) 638 | if ar: areas.extend(ar) 639 | except Exception: 640 | pass 641 | else: 642 | pass 643 | else: 644 | # items in 'alert' that are not dicts 645 | pass 646 | # If the alert applies to areas we are interested in, 647 | # the variable areas contains the the area references. 648 | # Otherwise it is an empty array. 649 | if areas and lvl1_dict.get('status','')!='Test': 650 | areas.sort(key=lambda x:x[0]) 651 | altitude = (areas[0][2],areas[0][3]) 652 | for ii in areas: 653 | if (ii[2],ii[3])!=altitude: 654 | altitude = None 655 | break 656 | alert = { 657 | 'identifier':lvl1_dict.get('identifier'), 658 | 'sender':lvl1_dict.get('sender'), 659 | 'sent':DwdWarnings._mktime(lvl1_dict.get('sent')), 660 | 'status':lvl1_dict.get('status'), 661 | 'msgType':lvl1_dict.get('msgtype'), 662 | 'source':lvl1_dict.get('source'), 663 | 'scope':lvl1_dict.get('scope'), 664 | 'regionName':[ii[0] for ii in areas], 665 | 'altitudeRange':altitude, 666 | 'areas':areas, 667 | 'description':info_dict.get('description',''), 668 | 'event':info_dict.get('event',''), 669 | 'headline':info_dict.get('headline',''), 670 | 'instruction':info_dict.get('instruction',''), 671 | 'category':info_dict.get('category'), 672 | 'responseType':info_dict.get('responsetype'), 673 | 'urgency':info_dict.get('urgency'), 674 | 'severity':info_dict.get('severity')} 675 | # release time 676 | alert['released'] = DwdWarnings._mktime(info_dict.get('effective')) 677 | # start time 678 | alert['start'] = DwdWarnings._mktime(info_dict.get('onset')) 679 | # end time 680 | alert['end'] = DwdWarnings._mktime(info_dict.get('expires')) 681 | # 682 | for ii in info_dict.get('eventcode',[]): 683 | try: 684 | alert['eventCode-'+ii['valuename']] = ii['value'] 685 | except Exception: 686 | pass 687 | # 688 | alert['parameter'] = dict() 689 | for ii in info_dict.get('parameter',[]): 690 | try: 691 | alert['parameter'][ii['valuename']] = ii['value'] 692 | except Exception: 693 | pass 694 | # 695 | for ii in lvl1_dict.get('code',[]): 696 | if ii=='SILENT_UPDATE': 697 | alert['SILENT_UPDATE'] = True 698 | if ii=='PARTIAL_CLEAR': 699 | alert['PARTIAL_CLEAR'] = True 700 | if ii[:3]=='id:': 701 | alert['msgid'] = ii 702 | # severity level 703 | if info_dict.get('event','')[:16]=='VORABINFORMATION': 704 | alert['level'] = 1 705 | else: 706 | alert['level'] = CAP_SEVERITY.get(info_dict.get('severity'),0) 707 | alert["level_text"] = dwd_level_text(alert['level']) 708 | # event type 709 | alert['type'] = get_eventtype_from_cap( 710 | info_dict.get('event'), 711 | alert.get('eventCode-II')) 712 | # warn icon 713 | try: 714 | alert['icon'] = self.icon_pth+'/'+dwd_warn_icon_file(alert['type'],alert['level']) 715 | except Exception: 716 | pass 717 | # 718 | _areas = dict() 719 | for ii in areas: _areas[ii[-1]] = True 720 | _region = ', '.join([ii[0] for ii in areas]) 721 | for ii in _areas: 722 | if _region not in wwarn[ii]: 723 | wwarn[ii][_region] = [] 724 | wwarn[ii][_region].append(alert) 725 | #print(json.dumps(alert,indent=4,ensure_ascii=False)) 726 | 727 | # The sub-dictionary for regions was include for the purpose 728 | # of sorting, only. Now it is removed to get the the right 729 | # data structure. 730 | for __ww in wwarn: 731 | x = [] 732 | for ii in wwarn[__ww]: x.extend(wwarn[__ww][ii]) 733 | wwarn[__ww] = x 734 | 735 | if self.verbose: 736 | loginf('file %s processed' % filename) 737 | #print(json.dumps(wwarn,indent=4,ensure_ascii=False)) 738 | return wwarn 739 | 740 | def write_html(self, wwarn, dryrun): 741 | for __ww in wwarn: 742 | s = "" 743 | stateShort = "" 744 | r = None 745 | for idx,val in enumerate(wwarn[__ww]): 746 | 747 | # get the state (Bundesland) out of the AGS code 748 | try: 749 | _states = list({(i[4],i[5]) for i in val['areas'] if i[-1]==__ww}) 750 | if len(_states)!=1: raise Exception 751 | stateShort = _states[0][0] 752 | val['stateShort'] = stateShort 753 | val['state'] = _states[0][1] 754 | except Exception: 755 | stateShort = None 756 | 757 | # list of warning regions that alert applies to 758 | _region = ', '.join([i[0] for i in val['areas'] if i[-1]==__ww]) 759 | _region = _region.replace('Stadt ','').replace('Gemeinde ','') 760 | val['regionName'] = _region 761 | # if a new region starts, set a caption 762 | if r is None or r!=_region: 763 | r = _region 764 | s+='

%s

\n' % r 765 | 766 | # alert message 767 | s+='\n' 768 | if val.get('icon'): 769 | s+='\n' % (val['icon'],val['event']) 770 | __size=110 if int(val['level'])>2 else 100 771 | s+='\n
%s

%s

\n' % (__size,val['headline']) 772 | s='%s

gültig vom %s bis %s\n' % (s,time.strftime("%d.%m. %H:%M",time.localtime(val['start']/1000)),time.strftime("%d.%m. %H:%M",time.localtime(val['end']/1000))) 773 | 774 | if val.get('altitudeRange'): 775 | altitude = val['altitudeRange'] 776 | if altitude[0]>0 and altitude[1]>=3000: 777 | s += '
für Höhen ab %s m\n' % altitude[0] 778 | elif altitude[0]<=0 and altitude[1]<3000: 779 | s += '
für Höhen bis %s m\n' % altitude[1] 780 | elif altitude[0]>0 and altitude[1]<3000: 781 | s += '
für Höhen von %s m bis %s m\n' % altitude 782 | 783 | s+='

\n' 784 | 785 | if val.get('description'): 786 | s+="

%s

\n" % val['description'] 787 | if val.get('instruction'): 788 | s+="

%s

\n" % val['instruction'] 789 | 790 | s+='

%s – %s  –  %s – %s  –  II=%s – %s

' % (val['type'],val['event'],val['level'],val['level_text'],val.get('eventCode-II',''),val.get('eventCode-GROUP','')) 791 | 792 | if s: 793 | s+='

Quelle: DWD

\n' % dwd_copy.get(stateShort,"https://www.wettergefahren.de") 794 | else: 795 | s='

zur Zeit keine Warnungen

' 796 | 797 | if dryrun: 798 | print("########################################") 799 | print("-- HTML -- warn-%s.inc ------------------------------"%__ww) 800 | print(s) 801 | print("-- JSON -- warn-%s.json -----------------------------"%__ww) 802 | print(json.dumps(wwarn[__ww],indent=4,ensure_ascii=False)) 803 | else: 804 | with open("%s/warn-%s.inc" % (self.target_path,__ww),"w") as file: 805 | file.write(s) 806 | with open("%s/warn-%s.json" % (self.target_path,__ww),"w") as file: 807 | json.dump(wwarn[__ww],file,indent=4) 808 | 809 | if __name__ == "__main__": 810 | 811 | usage = """Usage: %prog [options] [zip_file_name [CAP_file_name]] 812 | 813 | Without an option from the commands group HTML and JSON files are 814 | created and saved according to the configuration.""" 815 | 816 | epilog = None 817 | 818 | # Create a command line parser: 819 | parser = optparse.OptionParser(usage=usage, epilog=epilog) 820 | 821 | # options 822 | parser.add_option("--config", dest="config_path", type=str, 823 | metavar="CONFIG_FILE", 824 | default=None, 825 | help="Use configuration file CONFIG_FILE.") 826 | parser.add_option("--weewx", action="store_true", 827 | help="Read config from weewx.conf.") 828 | parser.add_option("--diff", action="store_true", 829 | help="Use diff files instead of status files.") 830 | parser.add_option("--resolution", dest="resolution", type=str, 831 | metavar="VALUE", 832 | default=None, 833 | help="Overwrite configuration setting for resolution. Possible values are 'county' and 'city'.") 834 | parser.add_option("--lang", dest="lang", type=str, 835 | metavar="ISO639", 836 | default='de', 837 | help="Alert language. Default 'de'") 838 | 839 | group = optparse.OptionGroup(parser,"Output and logging options") 840 | group.add_option("--dry-run", action="store_true", 841 | help="Print what would happen but do not do it. Default is False.") 842 | group.add_option("--log-tags", action="store_true", 843 | help="Log tags while parsing the XML file.") 844 | group.add_option("-v","--verbose", action="store_true", 845 | help="Verbose output") 846 | parser.add_option_group(group) 847 | 848 | # commands 849 | group = optparse.OptionGroup(parser,"Commands") 850 | group.add_option("--get-warncellids", dest="warncellids", action="store_true", 851 | help="Download warn cell ids file.") 852 | group.add_option("--list-ii", dest="lsii", action="store_true", 853 | help="List defined II event codes") 854 | group.add_option("--list-zip", dest="lszip", action="store_true", 855 | help="Download and display zip file list") 856 | group.add_option("--list-cap", dest="lscap", action="store_true", 857 | help="List CAP files within a zip file. Requires zip file name as argument") 858 | group.add_option("--print-cap", dest="printcap", action="store_true", 859 | help="Convert one CAP file to JSON and print the result. Requires zip file name and CAP file name as arguments") 860 | parser.add_option_group(group) 861 | 862 | (options, args) = parser.parse_args() 863 | 864 | if options.weewx: 865 | config_path = "/etc/weewx/weewx.conf" 866 | else: 867 | config_path = options.config_path 868 | 869 | 870 | if config_path: 871 | print("Using configuration file %s" % config_path) 872 | config = configobj.ConfigObj(config_path) 873 | else: 874 | # test only 875 | print("Using test configuration") 876 | # vom Benutzer anzupassen 877 | states=['Sachsen','Thüringen'] 878 | counties={ 879 | 'Kreis Mittelsachsen - Tiefland':'DL', 880 | 'Stadt Leipzig':'L', 881 | 'Stadt Jena':'J', 882 | 'Stadt Dresden':'DD'} 883 | cities={ 884 | 'Stadt Döbeln':'DL', 885 | 'Stadt Leipzig':'L', 886 | 'Stadt Jena':'J', 887 | 'Stadt Dresden':'DD'} 888 | ICON_PTH="../dwd/warn_icons_50x50" 889 | target_path='.' 890 | 891 | config = { 892 | 'log_success':True, 893 | 'log_failure':True, 894 | 'DeutscherWetterdienst': { 895 | 'path':target_path, 896 | 'warning': { 897 | #'dwd_status_url': get_cap_url('city','cell','neutral',False), 898 | #'dwd_diff_url': get_cap_url('city','cell','neutral',True), 899 | 'icons': ICON_PTH, 900 | 'states' : states, 901 | 'counties': counties, 902 | 'cities': cities }}} 903 | 904 | if options.resolution: 905 | config['DeutscherWetterdienst']['warning']['resolution'] = options.resolution 906 | 907 | dwd = DwdWarnings(config,options.verbose) 908 | 909 | if options.lsii: 910 | c = -1 911 | for ii in CAP_II: 912 | if c!=ii[2]: 913 | if c>=0: print("") 914 | c = ii[2] 915 | print(CAP_II_CAPTION[c]) 916 | print(" II | type | c | event") 917 | print("---:|-----:|--:|--------------------------------------------------------------") 918 | print("%3s | %4s | %1s | %s" % (ii[0],ii[1],ii[2],ii[3])) 919 | elif options.warncellids: 920 | dwd.download_warncellids(options.dryrun) 921 | elif options.lszip: 922 | ff = dwd.dir(options.diff,options.lang) 923 | print('\n'.join(ff)) 924 | elif options.lscap: 925 | if len(args)>0: 926 | zz = dwd.download_zip(False,args[0]) 927 | cc = zz.namelist() 928 | print(len(cc),' files in ',args[0]) 929 | print('\n'.join(cc)) 930 | else: 931 | print("requires 1 file argument") 932 | elif options.printcap: 933 | if len(args)>=2: 934 | zz = dwd.download_zip(False,args[0]) 935 | cap_dict = dwd.process_xml(zz,args[1]) 936 | print(json.dumps(cap_dict,indent=2,ensure_ascii=False)) 937 | else: 938 | print("requires 2 file arguments") 939 | else: 940 | if len(args)>0: 941 | zfn = args[0] 942 | else: 943 | ff = dwd.dir(options.diff,options.lang) 944 | zfn = ff[-1] 945 | wwarn = dwd.get_warnings(options.diff,zfn,lang=options.lang,log_tags=options.log_tags) 946 | dwd.write_html(wwarn,options.dry_run) 947 | 948 | -------------------------------------------------------------------------------- /bin/user/weatherserviceshealth.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # Copyright (C) 2024 Johanna Roedenbeck 3 | 4 | """ 5 | 6 | This program is free software: you can redistribute it and/or modify 7 | it under the terms of the GNU General Public License as published by 8 | the Free Software Foundation, either version 3 of the License, or 9 | (at your option) any later version. 10 | 11 | This program is distributed in the hope that it will be useful, 12 | but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | GNU General Public License for more details. 15 | 16 | You should have received a copy of the GNU General Public License 17 | along with this program. If not, see . 18 | 19 | """ 20 | 21 | """ 22 | 23 | Note: If you start WeeWX between 11:00 and 12:00 no actual data 24 | will be available. 25 | 26 | """ 27 | 28 | import json 29 | import configobj 30 | import time 31 | import threading 32 | import os 33 | import os.path 34 | import random 35 | import copy 36 | 37 | import __main__ 38 | if __name__ == '__main__': 39 | import optparse 40 | import sys 41 | sys.path.append('/usr/share/weewx') 42 | x = os.path.dirname(os.path.abspath(os.path.dirname(__main__.__file__))) 43 | if x not in sys.path: 44 | sys.path.append(x) 45 | 46 | if __name__ == '__main__' or __main__.__file__.endswith('weatherservices.py'): 47 | 48 | def logdbg(x): 49 | print('DEBUG health',x) 50 | def loginf(x): 51 | print('INFO health',x) 52 | def logerr(x): 53 | print('ERROR health',x) 54 | 55 | else: 56 | 57 | try: 58 | # Test for new-style weewx logging by trying to import weeutil.logger 59 | import weeutil.logger 60 | import logging 61 | log = logging.getLogger("user.DWD.health") 62 | 63 | def logdbg(msg): 64 | log.debug(msg) 65 | 66 | def loginf(msg): 67 | log.info(msg) 68 | 69 | def logerr(msg): 70 | log.error(msg) 71 | 72 | except ImportError: 73 | # Old-style weewx logging 74 | import syslog 75 | 76 | def logmsg(level, msg): 77 | syslog.syslog(level, 'user.DWD.base: %s' % msg) 78 | 79 | def logdbg(msg): 80 | logmsg(syslog.LOG_DEBUG, msg) 81 | 82 | def loginf(msg): 83 | logmsg(syslog.LOG_INFO, msg) 84 | 85 | def logerr(msg): 86 | logmsg(syslog.LOG_ERR, msg) 87 | 88 | import weeutil.weeutil 89 | import weewx.units 90 | import weewx.accum 91 | from user.weatherservicesutil import wget, BaseThread, WEEKDAY_LONG 92 | 93 | ACCUM_STRING = { 'accumulator':'firstlast','extractor':'last' } 94 | 95 | NEG_NEG_SYMBOL = (2.2,"-25 -25 110 50", 96 | """ 97 | 99 | 100 | 102 | """) 103 | NEG_SYMBOL = (1.0,"-25 -25 50 50", 104 | """ 105 | 107 | """) 108 | NEUTRAL_SYMBOL = (1.0,"-25 -25 50 50", 109 | """ 110 | 112 | """) 113 | POS_SYMBOL = (1.0,"-25 -25 50 50", 114 | """ 115 | 117 | """) 118 | SVG_START = """ 124 | """ 125 | SVG_END = """ 126 | """ 127 | VAL_SYMBOLS = { 128 | 'geringe Gefährdung': NEG_SYMBOL, 129 | 'hohe Gefährdung': NEG_NEG_SYMBOL, 130 | 'kein Einfluss': NEUTRAL_SYMBOL, 131 | 'positiver Einfluss': POS_SYMBOL, 132 | } 133 | 134 | def symbol(val, height): 135 | if val not in VAL_SYMBOLS: return val 136 | sym = VAL_SYMBOLS[val] 137 | width = height*sym[0] 138 | return '%s%s%s%s' % (SVG_START % (width,height,sym[1]),val,sym[2],SVG_END) 139 | 140 | OK_SYMBOL = """ 144 | """ 145 | THERMO_TEXT = """ 148 | %s 149 | 150 | """ 151 | THERMO_SMILEY = """ 152 | 153 | 154 | 157 | """ 158 | 159 | THERMO_SYMBOLS = { 160 | 'extreme Kältereize':-4, 161 | 'starke Kältereize':-3, 162 | 'mäßige Kältereize':-2, 163 | 'schwache Kältereize':-1, 164 | 'keine':0, 165 | 'schwache Wärmebelastung':1, 166 | 'mäßige Wärmebelastung':2, 167 | 'starke Wärmebelastung':3, 168 | 'extreme Wärmebelastung':4, 169 | } 170 | 171 | THERMO_COLORS = [ 172 | '#0000ff', # -4 173 | '#006eff', # -3 174 | '#00cdff', # -2 175 | '#82ffff', # -1 176 | '#3ea72d', # 0 177 | '#f9e814', # 1 178 | '#f18b00', # 2 179 | '#e53210', # 3 180 | '#b567a4', # 4 181 | ] 182 | 183 | def thermometer(x, y, color, value): 184 | v = round(value*0.12+0.206264,6) 185 | return """ 188 | 191 | 192 | """ % (color,x,y,-v,v,x,y) 193 | 194 | def thermalstress_symbol(val, height): 195 | if isinstance(val,int): 196 | v = val 197 | else: 198 | if val not in THERMO_SYMBOLS: return val 199 | v = THERMO_SYMBOLS[val] 200 | if v<=-4: 201 | col = THERMO_COLORS[-4+4] 202 | elif v>=4: 203 | col = THERMO_COLORS[4+4] 204 | else: 205 | col = THERMO_COLORS[int(round(v,0))+4] 206 | """ 207 | if v<-1.5: 208 | v = thermometer(6,21.5,col,16.6666666666) 209 | w = thermometer(19,21.5,col,16.66666666666666) 210 | """ 211 | if v<-0.5: 212 | r = 40/v/v if v>-3.65 else 3 213 | v = thermometer(5,21.5,col,16.66666666666) 214 | w = THERMO_SMILEY % (col,r,r) 215 | elif v<=0.5: 216 | v = thermometer(6,21.5,col,50) 217 | w = OK_SYMBOL 218 | else: 219 | r = 40/v/v if v<3.65 else 3 220 | v = thermometer(5,21.5,col,83.33333333333) 221 | # w = THERMO_TEXT % (0.5*height,col,v) 222 | w = THERMO_SMILEY % (col,r,r) 223 | """ 224 | elif v<=2.5: 225 | v = thermometer(6,21.5,col,83.33333333333) 226 | w = thermometer(19,21.5,col,83.3333333333333) 227 | else: 228 | v = '%s%s%s' % ( 229 | thermometer(5,21.5,col,83.33333333333), 230 | thermometer(12.5,21.5,col,83.33333333333), 231 | thermometer(20,21.5,col,83.3333333333333) 232 | ) 233 | """ 234 | return '%s%s%s%s%s' % (SVG_START % (height,height,'0 0 25 25'),val,v,w,SVG_END) 235 | 236 | 237 | class DwdHealthThread(BaseThread): 238 | 239 | BASE_URL = 'https://opendata.dwd.de/climate_environment/health' 240 | 241 | ALERTS_URL = { 242 | 'biowetter':'alerts/biowetter.json', 243 | 'thermal':'alerts/gt.json', 244 | 'pollen':'alerts/s31fg.json', 245 | 'uvi':'alerts/uvi.json' 246 | } 247 | 248 | TIMESPANS1 = [ 249 | 'today', 250 | 'tomorrow', 251 | 'dayafter_to' 252 | ] 253 | 254 | TIMESPANS2 = [ 255 | 'today_morning', 256 | 'today_afternoon', 257 | 'tomorrow_morning', 258 | 'tomorrow_afternoon', 259 | 'dayafter_to_morning', 260 | 'dayafter_to_afternoon', 261 | ] 262 | 263 | BIOWETTER_OBS = { 264 | 'biowetterValidFrom':('unix_epoch','group_time'), 265 | 'biowetterValidTo':('unix_epoch','group_time'), 266 | 'biowetterIssued':('unix_epoch','group_time'), 267 | 'biowetterNextUpdate':('unix_epoch','group_time'), 268 | 'biowetterValue':(None,None), 269 | 'biowetterExpectedThermalStress':('count','group_count'), 270 | } 271 | 272 | POLLEN_OBS = { 273 | 'pollenValidFrom':('unix_epoch','group_time'), 274 | 'pollenValidTo':('unix_epoch','group_time'), 275 | 'pollenIssued':('unix_epoch','group_time'), 276 | 'pollenNextUpdate':('unix_epoch','group_time'), 277 | } 278 | 279 | POLLEN_TYPES = [ 280 | 'Hasel', 281 | 'Erle', 282 | 'Esche', 283 | 'Birke', 284 | 'Graeser', 285 | 'Roggen', 286 | 'Beifuss', 287 | 'Ambrosia', 288 | ] 289 | 290 | POLLEN_COLORS = [ 291 | '#3ea72d', # 0 292 | '#dafac7', # 0.5 293 | '#fee391', # 1 294 | '#fec44e', # 1.5 295 | '#fe9929', # 2 296 | '#f03b20', # 2.5 297 | '#bd0026', # 3 298 | ] 299 | 300 | @property 301 | def provider_name(self): 302 | return 'DWD' 303 | 304 | @property 305 | def provider_url(self): 306 | return 'https://www.dwd.de' 307 | 308 | @classmethod 309 | def is_provided(cls, model): 310 | return model in cls.ALERTS_URL 311 | 312 | def __init__(self, name, conf_dict, archive_interval): 313 | # get logging configuration 314 | log_success = weeutil.weeutil.to_bool(conf_dict.get('log_success',False)) 315 | log_failure = weeutil.weeutil.to_bool(conf_dict.get('log_failure',True)) 316 | # initialize thread 317 | super(DwdHealthThread,self).__init__(name='DWD-Health-'+name,log_success=log_success,log_failure=log_failure) 318 | # archive interval 319 | self.query_interval = weeutil.weeutil.to_int(archive_interval) 320 | # log sleeping time or not 321 | self.log_sleeping = weeutil.weeutil.to_bool(conf_dict.get('log_sleeping',False)) 322 | # config 323 | self.model = conf_dict.get('model') 324 | if self.model: self.model = self.model.lower() 325 | self.url = '%s/%s' % (DwdHealthThread.BASE_URL,DwdHealthThread.ALERTS_URL[self.model]) 326 | self.area = conf_dict.get('area') 327 | self.target_path = conf_dict.get('path','.') 328 | self.filename = '%s-%s' % (self.model,conf_dict.get('file',self.area)) 329 | self.data = [] 330 | self.tab = (dict(),dict()) 331 | loginf("thread '%s': area '%s', URL '%s'" % (self.name,self.area,self.url)) 332 | self.last_update = 0 333 | self.next_update = 0 334 | self.area_name = '' 335 | self.lock = threading.Lock() 336 | # register observation types and accumulators 337 | prefix = conf_dict.get('prefix','') 338 | _accum = dict() 339 | if self.model=='biowetter': 340 | obs_dict = DwdHealthThread.BIOWETTER_OBS 341 | elif self.model=='pollen': 342 | obs_dict = DwdHealthThread.POLLEN_OBS 343 | for plant in DwdHealthThread.POLLEN_TYPES: 344 | obs_dict['pollen%sValue' % plant] = ('count','group_count') 345 | obs_dict['pollen%sText' % plant] = (None,None) 346 | else: 347 | obs_dict = None 348 | if obs_dict: 349 | for key, obs in obs_dict.items(): 350 | obstype = key 351 | obsgroup = obs[1] 352 | #print(prefix,obstype,obsgroup) 353 | if obsgroup: 354 | # number variable 355 | if prefix: 356 | weewx.units.obs_group_dict.setdefault(prefix+obstype[0].upper()+obstype[1:],obsgroup) 357 | else: 358 | weewx.units.obs_group_dict.setdefault(obstype,obsgroup) 359 | else: 360 | # string variable 361 | if prefix: 362 | _accum[prefix+obstype[0].upper()+obstype[1:]] = ACCUM_STRING 363 | else: 364 | _accum[obstype] = ACCUM_STRING 365 | if _accum: 366 | weewx.accum.accum_dict.maps.append(_accum) 367 | # HTML config 368 | self.show_placemark = weeutil.weeutil.to_bool( 369 | conf_dict.get('show_placemark',True) 370 | ) 371 | self.plusminus_icon_size = weeutil.weeutil.to_int( 372 | conf_dict.get('plusminus_icon_size',20) 373 | ) 374 | self.thermalstress_icon_size = weeutil.weeutil.to_int( 375 | conf_dict.get('thermalstress_icon_size', self.plusminus_icon_size*2) 376 | ) 377 | # orientation of the HTML table 378 | orientation = conf_dict.get('orientation','h,v') 379 | if not isinstance(orientation,list): 380 | if orientation.lower()=='both': orientation = 'h,v' 381 | orientation = orientation.split(',') 382 | orientation = [ii[0].lower() for ii in orientation] 383 | self.horizontal_table = 'h' in orientation 384 | self.vertical_table = 'v' in orientation 385 | # classes to include in and surronding
tag 386 | self.horizontal_table_classes = conf_dict.get( 387 | 'horizontal_table_classes', 388 | 'dwd%stable table-striped' % self.model 389 | ) 390 | self.horizontal_div_classes = conf_dict.get( 391 | 'horizontal_div_classes', 392 | 'dwd%s-horizontal' % self.model 393 | ) 394 | self.horizontal_main_effect_td_classes = conf_dict.get( 395 | 'horizontal_main_effect_td_classes', 396 | 'records-header' 397 | ) 398 | self.vertical_table_classes = conf_dict.get( 399 | 'vertical_table_classes', 400 | 'dwd%stable table-striped' % self.model 401 | ) 402 | self.vertical_div_classes = conf_dict.get( 403 | 'vertical_div_classes', 404 | 'dwd%s-vertical' % self.model 405 | ) 406 | # visibility according to viewport size 407 | class_hidden = conf_dict.get('class_hidden','hidden-xs') 408 | class_visible = conf_dict.get('class_visible','visible-xs-block') 409 | if self.horizontal_table and self.vertical_table: 410 | # both tables are included, so we need to set visibility 411 | self.horizontal_div_classes = ((self.horizontal_div_classes+' ') if self.horizontal_div_classes else '')+class_hidden 412 | self.vertical_div_classes = ((self.vertical_div_classes+' ') if self.vertical_div_classes else '')+class_visible 413 | # test output 414 | if __name__ == "__main__": 415 | print('obs_group_dict') 416 | print('') 417 | print(weewx.units.obs_group_dict) 418 | print('accum_dict') 419 | print(weewx.accum.accum_dict) 420 | print('') 421 | print('log_success',self.log_success) 422 | print('log_failure',self.log_failure) 423 | print('log_sleeping',self.log_sleeping) 424 | print('model',self.model) 425 | print('filename',self.filename) 426 | 427 | def get_data(self, ts): 428 | """ get actual biometeorologic forecast data for the given 429 | timestamp 430 | 431 | Note: This method is called by another thread. So lock 432 | internal data from changing. 433 | """ 434 | data = dict() 435 | try: 436 | self.lock.acquire() 437 | # If data is already received, timestamps of actual and next 438 | # release are always available, independent of the time data 439 | # is requested for. 440 | data['%sLastUpdate' % self.model] = ( 441 | self.last_update if self.last_update else None, 442 | 'unix_epoch', 443 | 'group_time' 444 | ) 445 | data['%sNextUpdate' % self.model] = ( 446 | self.next_update if self.next_update else None, 447 | 'unix_epoch', 448 | 'group_time' 449 | ) 450 | # Look for data for the requested timestamp 451 | for ii in self.data: 452 | if ts<=ii[1] and ts>ii[0]: 453 | data.update(ii[2]) 454 | finally: 455 | self.lock.release() 456 | return data,5 457 | 458 | def convert_timestamp(self, val): 459 | """ convert timestamp to unix_epoch """ 460 | if val is None: return None 461 | val = str(val) 462 | if 'T' in val: 463 | val = val.split('T') 464 | else: 465 | val = val.split(' ') 466 | if len(val)<1: return None 467 | dt = val[0].split('-') 468 | if len(val)<2: 469 | ti = [0,0,0] 470 | else: 471 | ti = val[1].split(':') 472 | try: 473 | return time.mktime(( 474 | int(dt[0]), # year 475 | int(dt[1]), # month 476 | int(dt[2]), # day 477 | int(ti[0]), # hour 478 | int(ti[1]), # minute 479 | int(ti[2]) if len(ti)>2 else 0, # second 480 | -1, 481 | -1, 482 | -1 483 | )) 484 | except (TypeError,OverflowError,ValueError) as e: 485 | logerr("thread '%s': convert timestamp %s - %s" % (self.name,e.__class__.__name__,e)) 486 | return None 487 | 488 | def process_pollen(self, zone, name, author, last_update, next_update, now, legend): 489 | """ pollen forecast """ 490 | lang = 'de' 491 | data = [] 492 | tab = dict() 493 | timespans = dict() 494 | # name of the area data is valid for 495 | if zone.get('partregion_id',-1)==-1: 496 | area_name = zone.get('region_name',zone['region_id']) 497 | else: 498 | area_name = zone.get('partregion_name',zone['partregion_id']) 499 | # process legend 500 | legend_dict = dict() 501 | a = dict() 502 | b = dict() 503 | for ii in legend: 504 | if ii.startswith('id'): 505 | no = ii.split('_')[0] 506 | if ii.endswith('desc'): 507 | b[no] = legend[ii] 508 | else: 509 | a[no] = legend[ii] 510 | for ii in a: 511 | legend_dict[a[ii]] = b.get(ii) 512 | del a 513 | del b 514 | # test output 515 | if __name__ == "__main__": 516 | print('Legende:') 517 | print(legend_dict) 518 | # initialize timespans (today, tomorrow, day after tomorrow) 519 | dt = last_update 520 | for ii in range(3): 521 | start, end = weeutil.weeutil.archiveDaySpan(dt) 522 | wday = WEEKDAY_LONG[lang][time.localtime(start).tm_wday] 523 | dd = time.strftime('%d.%m.',time.localtime(start)) 524 | ti = None 525 | data.append((start,end,{ 526 | 'pollenIssued':(last_update,'unix_epoch','group_time'), 527 | 'pollenValidFrom':(start,'unix_epoch','group_time'), 528 | 'pollenValidTo':(end,'unix_epoch','group_time'), 529 | })) 530 | if end>=now: 531 | timespans[(wday,dd,ti)] = None 532 | dt = end+3600 533 | # process data 534 | for plant in zone.get('Pollen',[]): 535 | for idx,timespan in enumerate(DwdHealthThread.TIMESPANS1): 536 | if timespan in zone['Pollen'][plant]: 537 | val = zone['Pollen'][plant][timespan] 538 | if val.isdigit(): 539 | val_f = float(val) 540 | else: 541 | val_f = val.split('-') 542 | try: 543 | val_f = float(val_f[0])+0.5 544 | except (ValueError,OverflowError): 545 | val_f = None 546 | data[idx][2]['pollen'+plant+'Value'] = (val_f,None,None) 547 | data[idx][2]['pollen'+plant+'Text'] = (legend_dict.get(val),None,None) 548 | wday = WEEKDAY_LONG[lang][time.localtime(data[idx][0]).tm_wday] 549 | dt = time.strftime('%d.%m.',time.localtime(data[idx][0])) 550 | ti = None 551 | if end>=now: 552 | if plant not in tab: 553 | tab[plant] = dict() 554 | if (wday,dt,ti) not in tab[plant]: 555 | tab[plant][(wday,dt,ti)] = dict() 556 | tab[plant][(wday,dt,ti)] = {'value':val_f,'effect':legend_dict.get(val)} 557 | return data, (tab, timespans), area_name 558 | 559 | def process_bio(self, zone, name, author, last_update, next_update, now): 560 | """ process bioweather data """ 561 | lang = 'de' 562 | data = [] 563 | tab = dict() 564 | timespans = dict() 565 | # name of the area data is valid for 566 | area_name = zone.get('name',zone['id']) 567 | # process data 568 | for timespan in DwdHealthThread.TIMESPANS2: 569 | if timespan in zone: 570 | #loginf(zone[timespan]) 571 | forecast = zone[timespan] 572 | dt = forecast['date'] 573 | ti = forecast['name'] 574 | val = forecast['value'] 575 | val_list = str(val).split('-') 576 | try: 577 | if val_list[2]=='00': 578 | thermalstress = 0 579 | elif val_list[2].startswith('w'): 580 | thermalstress = int(val_list[2][1:]) 581 | elif val_list[2].startswith('k'): 582 | thermalstress = -int(val_list[2][1:]) 583 | else: 584 | raise ValueError("unknown heat stress %s" % val_list[2]) 585 | except (ValueError,TypeError): 586 | thermalstress = None 587 | #print(name,dt,ti,val) 588 | if ti.startswith('1'): 589 | start = self.convert_timestamp('%sT0:0:0' % dt) 590 | end = self.convert_timestamp('%sT12:0:0' % dt) 591 | wday = WEEKDAY_LONG[lang][time.localtime(end).tm_wday] 592 | dt = time.strftime('%d.%m.',time.localtime(end)) 593 | else: 594 | start = self.convert_timestamp('%sT12:0:0' % dt) 595 | end = self.convert_timestamp('%sT24:0:0' % dt) 596 | wday = WEEKDAY_LONG[lang][time.localtime(start).tm_wday] 597 | dt = time.strftime('%d.%m.',time.localtime(start)) 598 | if end>=now: 599 | timespans[(wday,dt,ti)] = val 600 | _data = { 601 | 'biowetterIssued':(last_update,'unix_epoch','group_time'), 602 | 'biowetterValidTo':(end,'unix_epoch','group_time'), 603 | 'biowetterValidFrom':(start,'unix_epoch','group_time'), 604 | 'biowetterValue':(val,None,None), 605 | 'biowetterExpectedThermalStress':(thermalstress,'count','group_count'), 606 | } 607 | for effect in forecast['effect']: 608 | #print(effect['name'],effect['value']) 609 | if end>=now: 610 | if effect['name'] not in tab: 611 | tab[effect['name']] = dict() 612 | if (wday,dt,ti) not in tab[effect['name']]: 613 | tab[effect['name']][(wday,dt,ti)] = dict() 614 | tab[effect['name']][(wday,dt,ti)]['effect'] = effect['value'] 615 | for subeffect in effect.get('subeffect',[]): 616 | nm = subeffect['name'] 617 | vl = subeffect['value'] 618 | #print('%-40s: %s' % (nm,vl)) 619 | if end>=now: 620 | nmm = '* %s' % nm 621 | if nmm not in tab: 622 | tab[nmm] = dict() 623 | if (wday,dt,ti) not in tab[nmm]: 624 | tab[nmm][(wday,dt,ti)] = dict() 625 | tab[nmm][(wday,dt,ti)]['effect'] = vl 626 | for recomm in forecast['recomms']: 627 | #print(recomm['name'],recomm['value']) 628 | if end>=now: 629 | if recomm['name'] not in tab: 630 | tab[recomm['name']] = dict() 631 | if (wday,dt,ti) not in tab[recomm['name']]: 632 | tab[recomm['name']][(wday,dt,ti)] = dict() 633 | tab[recomm['name']][(wday,dt,ti)]['recomm'] = recomm['value'] 634 | #print('') 635 | data.append((start,end,_data)) 636 | return data, (tab, timespans), area_name 637 | 638 | def process_uvi(self, zone, name, author, last_update, next_update, now, forecast_day): 639 | """ process bioweather data """ 640 | lang = 'de' 641 | data = [] 642 | tab = dict() 643 | timespans = dict() 644 | # name of the area data is valid for 645 | area_name = zone.get('city','') 646 | # forecast start timestamp 647 | start_timestamp = self.convert_timestamp('%sT12:0:0' % forecast_day) 648 | # process data 649 | for idx, timespan in enumerate(DwdHealthThread.TIMESPANS1): 650 | if timespan in zone.get('forecast',dict()): 651 | val = zone['forecast'][timespan] 652 | dt = start_timestamp+idx*86400 653 | start, end = weeutil.weeutil.archiveDaySpan(dt) 654 | dt = time.localtime(dt) 655 | wday = WEEKDAY_LONG[lang][dt.tm_wday] 656 | dt = time.strftime('%d.%m.',dt) 657 | ti = None 658 | timespans[(wday,dt,ti)] = val 659 | data.append((start,end,{ 660 | 'uviforecastIssued':(last_update,'unix_epoch','group_time'), 661 | 'uviforecastValidTo':(end,'unix_epoch','group_time'), 662 | 'uviforecastValidFrom':(start,'unix_epoch','group_time'), 663 | 'uviforecastValue':(val,'uv_index','group_uv') 664 | })) 665 | return data, (tab, timespans), area_name 666 | 667 | def write_html(self, tabtimespans, area_name, last_update, now): 668 | if tabtimespans: 669 | tab = tabtimespans[0] 670 | timespans = tabtimespans[1] 671 | colwidth = 100/(len(timespans)+1) 672 | s = '' 673 | if self.show_placemark: 674 | s += '

%s

\n' % area_name 675 | #if self.horizontal_div_classes: 676 | # s += '
\n' % self.horizontal_div_classes 677 | s += '
' % self.horizontal_table_classes 678 | s += '' 679 | s += '' % colwidth 680 | timespansvalue = False 681 | for ii,val in timespans.items(): 682 | s += '' 685 | if val is not None: timespansvalue = True 686 | s += '' 687 | s += '' 688 | if timespansvalue: 689 | s += '' 690 | for _,ii in timespans.items(): 691 | s += '' % ii 692 | s += '' 693 | for ii in tab: 694 | vertical_align = 'middle' 695 | for jj in timespans: 696 | if jj in tab[ii] and 'recomm' in tab[ii][jj] and tab[ii][jj]['recomm']!='keine': 697 | vertical_align = 'top' 698 | break 699 | if ii.startswith('*') or self.model!='biowetter': 700 | s += '' % ( 701 | vertical_align, 702 | ii 703 | ) 704 | else: 705 | s += '' % ( 706 | self.horizontal_main_effect_td_classes, 707 | len(timespans)+1, 708 | ii 709 | ) 710 | s += '' % ( 711 | vertical_align, 712 | 'Insgesamt' 713 | ) 714 | for jj in timespans: 715 | s += '' 750 | s += '' 751 | s += '' 752 | s += '
%s
%s' % (colwidth,ii[0],ii[1]) 683 | if ii[2]: s += '
%s' % ii[2].replace('Tageshälfte','Tages­hälfte') 684 | s += '
Wert%s
%s
%s
%s' % vertical_align 716 | if jj in tab[ii]: 717 | if self.model=='pollen': 718 | col = tab[ii][jj].get('value') 719 | if col is not None: 720 | if col>3: col = 3 721 | if col<0: col = 0 722 | tcl = '#ffffff' if col<0.25 or col>1.75 else '#000000' 723 | col = DwdHealthThread.POLLEN_COLORS[int(round(col*2.0,0))] 724 | s += (' %s  ' % (tcl,col,tab[ii][jj]['value'])).replace('.',',') 725 | effect = tab[ii][jj].get('effect','') 726 | if effect=='geringe Gefährdung': 727 | col = '#ffd879' 728 | elif effect=='hohe Gefährdung': 729 | col = '#e53210' 730 | elif effect=='positiver Einfluss': 731 | col = '#7cb5ec' 732 | else: 733 | col = '' 734 | if self.model=='biowetter': 735 | if ii=='Thermische Belastung': 736 | effect = thermalstress_symbol(effect,self.thermalstress_icon_size) 737 | else: 738 | effect = symbol(effect,self.plusminus_icon_size) 739 | if self.model=='pollen': 740 | s += '' 749 | s += '
\n' 753 | #if self.horizontal_div_classes: 754 | # s += '\n' 755 | if self.model=='biowetter': 756 | # danger 757 | s += '
    ' 758 | for ii in ('Legende:','hohe Gefährdung','geringe Gefährdung','kein Einfluss','positiver Einfluss'): 759 | sym = symbol(ii,self.plusminus_icon_size) 760 | txt = ii if sym==ii else '%s %s' % (sym,ii) 761 | s += '
  • %s
  • ' % txt 762 | s += '
' 763 | # heat stress 764 | s += '
    ' 765 | for ii in ('Wärmebelastung:','keine','schwach','mäßig','stark','extrem'): 766 | sym = thermalstress_symbol(ii+'e Wärmebelastung' if ii not in ('Wärmebelastung:','keine') else ii,self.plusminus_icon_size*2) 767 | txt = ii if sym==ii else '%s %s' % (sym,ii) 768 | s += '
  • %s
  • ' % txt 769 | s += '
' 770 | # cold stress 771 | s += '
    ' 772 | for ii in ('Kältebelastung:','keine','schwach','mäßig','stark','extrem'): 773 | sym = thermalstress_symbol(ii+'e Kältereize' if ii not in ('Kältebelastung:','keine') else ii,self.plusminus_icon_size*2) 774 | txt = ii if sym==ii else '%s %s' % (sym,ii) 775 | s += '
  • %s
  • ' % txt 776 | s += '
' 777 | elif self.model=='pollen': 778 | s += '
    ' 779 | s += '
  • Belastung:
  • ' 780 | for idx, col in enumerate(DwdHealthThread.POLLEN_COLORS): 781 | txt = ('keine','keine bis gering','gering','gering bis mittel','mittel','mittel bis hoch','hoch')[idx] 782 | tcl = '#ffffff' if idx==0 or idx>3 else '#000000' 783 | s += ('
  •  %3.1f  %s
  • ' % (col,tcl,idx*0.5,txt)).replace('.',',') 784 | s += '
' 785 | s += '

herausgegeben vom %s am %s | Vorhersage erstellt am %s

' % ( 786 | self.provider_url,self.provider_name, 787 | time.strftime('%d.%m.%Y %H:%M',time.localtime(last_update)), 788 | time.strftime('%d.%m.%Y %H:%M',time.localtime(now)) 789 | ) 790 | try: 791 | fn = os.path.join(self.target_path,'health-%s.inc' % self.filename) 792 | fn_tmp = '%s.tmp' % fn 793 | with open(fn_tmp,'wt') as f: 794 | f.write(s) 795 | os.rename(fn_tmp,fn) 796 | except OSError as e: 797 | if self.log_failure: 798 | logerr("thread '%s': cannot write .inc file %s - %s" % (self.name,e.__class__.__name__,e)) 799 | """ 800 | try: 801 | fn = os.path.join(self.target_path,'health-%s.json' % self.filename 802 | fn_tmp = '%s.tmp' % fn 803 | with open(fn_tmp,'wt') as f: 804 | json.dump(tab,f,indent=4,ensure_ascii=False) 805 | os.rename(fn_tmp,fn) 806 | except (OSError,TypeError,RecursionError,ValueError) as e: 807 | if self.log_failure: 808 | logerr("thread '%s': cannot write .json file %s - %s" % (self.name,e.__class__.__name__,e)) 809 | """ 810 | 811 | def getRecord(self): 812 | """ download and process data """ 813 | if __name__ == "__main__": 814 | print('getRecord() start') 815 | try: 816 | reply = wget(self.url, 817 | log_success=self.log_success, 818 | log_failure=self.log_failure) 819 | now = time.time() 820 | reply = json.loads(reply) 821 | except Exception as e: 822 | if self.log_failure: 823 | logerr("thread '%s': wget %s - %s" % (self.name,e.__class__.__name__,e)) 824 | return 825 | data = None 826 | try: 827 | last_update = self.convert_timestamp(reply.get('last_update')) 828 | next_update = self.convert_timestamp(reply.get('next_update')) 829 | if self.model=='biowetter': 830 | for zone in reply['zone']: 831 | if zone['id']==self.area: 832 | data, tabtimespans, area_name = self.process_bio(zone,reply.get('name'),reply.get('author'),last_update,next_update,now) 833 | break 834 | elif self.model=='pollen': 835 | area1 = int(self.area) 836 | if (area1%10)!=0: 837 | # subregion 838 | area2 = area1 839 | area1 -= area1%10 840 | else: 841 | # main region 842 | area2 = -1 843 | #print('area',area1,area2) 844 | for zone in reply['content']: 845 | if (zone['region_id']==area1 and 846 | zone['partregion_id']==area2): 847 | data, tabtimespans, area_name = self.process_pollen(zone,reply.get('name'),reply.get('sender'),last_update,next_update,now,reply.get('legend')) 848 | break 849 | elif self.model=='uvi': 850 | for zone in reply['content']: 851 | if zone['city']==self.area: 852 | data, tabtimespans, area_name = self.process_uvi(zone,reply.get('name'),reply.get('sender'),last_update,next_update,now,reply.get('forecast_day')) 853 | break 854 | except Exception as e: 855 | if self.log_failure: 856 | logerr("thread '%s': process %s - %s" % (self.name,e.__class__.__name__,e)) 857 | # If new data could be obtained, update the cache. 858 | if data: 859 | try: 860 | self.write_html(tabtimespans, area_name, last_update, now) 861 | except Exception as e: 862 | if self.log_failure: 863 | logerr("thread '%s': write HTML %s - %s" % (self.name,e.__class__.__name__,e)) 864 | data.sort() 865 | try: 866 | self.lock.acquire() 867 | self.last_update = last_update 868 | self.next_update = next_update 869 | self.area_name = area_name 870 | x = None 871 | for i in self.data: 872 | if i[1]==data[0][0]: 873 | x = i 874 | break 875 | if x: 876 | self.data = [x] + data 877 | else: 878 | self.data = data 879 | self.tab = tabtimespans 880 | finally: 881 | self.lock.release() 882 | #loginf("getRecord %s" % ','.join(['(%s,%s)' % (i[0],i[1]) for i in self.data])) 883 | 884 | def waiting_time(self): 885 | now = time.time() 886 | # If it is after the time the next update is scheduled for, 887 | # fetch data at the end of the current archive interval. 888 | if now>=self.next_update: 889 | return super(DwdHealthThread,self).waiting_time() 890 | # At the beginning of the next day there is no new data, but 891 | # the HTML table has to be rewritten. 892 | eod = weeutil.weeutil.archiveDaySpan(now)[1]+self.query_interval 893 | # noon of the day in case it is in future 894 | mid = eod-43200 895 | if mid60 else w)-60 916 | 917 | 918 | def is_provided(provided, model): 919 | if provided.lower()=='dwd': 920 | return DwdHealthThread.is_provided(model.lower()) 921 | return False 922 | 923 | def create_thread(thread_name,config_dict,archive_interval): 924 | """ create radar thread """ 925 | prefix = config_dict.get('prefix','') 926 | provider = config_dict.get('provider') 927 | model = config_dict.get('model') 928 | if provider=='DWD' and DwdHealthThread.is_provided(model): 929 | conf_dict = weeutil.config.accumulateLeaves(config_dict) 930 | conf_dict['prefix'] = prefix 931 | conf_dict['model'] = model 932 | if weeutil.weeutil.to_bool(conf_dict.get('enable',True)): 933 | #loginf(conf_dict) 934 | thread = dict() 935 | thread['datasource'] = 'Health'+model 936 | thread['prefix'] = prefix 937 | thread['thread'] = DwdHealthThread(thread_name,conf_dict,archive_interval) 938 | thread['thread'].start() 939 | return thread 940 | return None 941 | 942 | 943 | if __name__ == "__main__": 944 | 945 | print('Weatherservices Health start') 946 | conf = configobj.ConfigObj("health.conf") 947 | dwd = create_thread(conf['model'],conf,300) 948 | if not dwd: 949 | print('could not create thread') 950 | try: 951 | while True: 952 | time.sleep(300-time.time()%300+15) 953 | data, interval = dwd['thread'].get_data(time.time()-15) 954 | print(json.dumps(data,indent=4,ensure_ascii=False)) 955 | except Exception as e: 956 | print('**MAIN**',e) 957 | except KeyboardInterrupt: 958 | print() 959 | print('**MAIN** CTRL-C pressed') 960 | dwd['thread'].shutDown() 961 | exit(0) 962 | --------------------------------------------------------------------------------