├── .gitignore ├── LICENSE ├── Pre_Weather ├── GetData.py ├── GetModel.py ├── Main.py ├── Model.pkl ├── ProcessData.py ├── Write.py ├── weather_test.csv ├── weather_train_train.csv └── weather_train_valid.csv ├── README.md └── result.jpg /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | __pycache__/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Eritque arcus 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Pre_Weather/GetData.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # @Time: 2020/12/16 3 | # @Author: Eritque arcus 4 | # @File: GetData.py 5 | # 功能: 爬取数据 6 | import urllib3 7 | 8 | 9 | class GetData: 10 | url = "" 11 | headers = "" 12 | 13 | def __init__(self, url, header=""): 14 | """ 15 | :param url: 获取的网址 16 | :param header: 请求头,默认已内置 17 | """ 18 | self.url = url 19 | if header == "": 20 | self.headers = { 21 | 'Connection': 'Keep-Alive', 22 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,' 23 | '*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', 24 | 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8', 25 | 'Accept-Encoding': 'gzip, deflate', 26 | 'User-Agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, ' 27 | 'like Gecko) Chrome/87.0.4280.66 Mobile Safari/537.36 ', 28 | 'Host': 'www.meteomanz.com' 29 | } 30 | else: 31 | self.headers = header 32 | 33 | def Get(self): 34 | """ 35 | :return: 网址对应的网页内容 36 | """ 37 | http = urllib3.PoolManager() 38 | return http.request('GET', self.url, headers=self.headers).data 39 | -------------------------------------------------------------------------------- /Pre_Weather/GetModel.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # @Time: 2020/12/16 3 | # @Author: Eritque arcus 4 | # @File: GetModel.py 5 | from sklearn.ensemble import RandomForestRegressor 6 | import joblib 7 | from sklearn.metrics import mean_absolute_error 8 | from ProcessData import ProcessData 9 | 10 | 11 | # 训练并保存模型 12 | def GetModel(a="Model.pkl"): 13 | """ 14 | :param a: 模型文件名 15 | :return: 16 | [socre: MAE评估结果, 17 | X_test: 预测数据集] 18 | """ 19 | # 取到数据 20 | [X_train, X_valid, y_train, y_valid, X_test] = ProcessData() 21 | # 用XGB模型,不过用有bug 22 | # modelX = XGBRegressor(n_estimators=1000, learning_rate=0.05, random_state=0, n_jobs=4) 23 | # # model.fit(X_train_3, y_train_3) 24 | # # model.fit(X_train_2, y_train_2) 25 | # col = ["Ave_t", "Max_t", "Min_t", "Prec","SLpress", "Winddir", "Windsp", "Cloud"] 26 | # modelX.fit(X_train, y_train, 27 | # early_stopping_rounds=5, 28 | # eval_set=[(X_valid, y_valid)], 29 | # verbose=False) 30 | # 随机树森林模型 31 | model = RandomForestRegressor(random_state=0, n_estimators=1001) 32 | # 训练模型 33 | model.fit(X_train, y_train) 34 | # 预测模型,用上个星期的数据 35 | preds = model.predict(X_valid) 36 | # 用MAE评估 37 | score = mean_absolute_error(y_valid, preds) 38 | # 保存模型到本地 39 | joblib.dump(model, a) 40 | # 返回MAE 41 | return [score, X_test] 42 | -------------------------------------------------------------------------------- /Pre_Weather/Main.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # @Time: 2020/12/16 3 | # @Author: Eritque arcus 4 | # @File: Main.py 5 | import joblib 6 | import datetime as DT 7 | from GetModel import GetModel 8 | import matplotlib.pyplot as plt 9 | 10 | 11 | # 训练并保存模型并返回MAE 12 | r = GetModel() 13 | print("MAE:", r[0]) 14 | # 读取保存的模型 15 | model = joblib.load('Model.pkl') 16 | 17 | # 最终预测结果 18 | preds = model.predict(r[1]) 19 | # 反归一化或标准化,不过出bug了,不用 20 | # for cols in range(0, len(preds)): 21 | # preds[cols] = scaler.inverse_transform(preds[cols]) 22 | # sns.lineplot(data=preds) 23 | # plt.show() 24 | # 打印结果到控制台 25 | print("未来7天预测") 26 | all_ave_t = [] 27 | all_high_t = [] 28 | all_low_t = [] 29 | all_rainfall = [] 30 | for a in range(1, 7): 31 | today = DT.datetime.now() 32 | time = (today + DT.timedelta(days=a)).date() 33 | print(time.year, '/', time.month, '/', time.day, 34 | ': 平均气温', preds[a][0], 35 | '最高气温', preds[a][1], 36 | '最低气温', preds[a][2], 37 | "降雨量", preds[a][3], 38 | "风力", preds[a][4]) 39 | all_ave_t.append(preds[a][0]) 40 | all_high_t.append(preds[a][1]) 41 | all_low_t.append(preds[a][2]) 42 | all_rainfall.append(preds[a][3]) 43 | temp = {"ave_t": all_ave_t, "high_t": all_high_t, "low_t": all_low_t, "rainfall": all_rainfall} 44 | # 绘画折线图 45 | plt.plot(range(1, 7), temp["ave_t"], color="green", label="ave_t") 46 | plt.plot(range(1, 7), temp["high_t"], color="red", label="high_t") 47 | plt.plot(range(1, 7), temp["low_t"], color="blue", label="low_t") 48 | plt.legend() # 显示图例 49 | plt.ylabel("Temperature(°C)") 50 | plt.xlabel("day") 51 | # 显示 52 | plt.show() 53 | plt.plot(range(1, 7), temp["rainfall"], color="black", label="rainfall") 54 | plt.legend() 55 | plt.ylabel("mm") 56 | plt.xlabel("day") 57 | plt.show() 58 | -------------------------------------------------------------------------------- /Pre_Weather/Model.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Nambers/PYWeatherForecast/0a5964e15d2899e0f53a42cd2b7514e03a1a266e/Pre_Weather/Model.pkl -------------------------------------------------------------------------------- /Pre_Weather/ProcessData.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # @Time: 2020/12/16 3 | # @Author: Eritque arcus 4 | # @File: ProcessData.py 5 | from Write import write 6 | import pandas as pd 7 | from sklearn.model_selection import train_test_split 8 | from sklearn.impute import SimpleImputer 9 | import seaborn as sns 10 | import matplotlib.pyplot as plt 11 | 12 | 13 | # 功能: 数据预处理 14 | def ProcessData(): 15 | """ 16 | :return: 17 | [X_train X训练数据集, 18 | X_valid X训练数据集的验证集, 19 | y_train Y训练数据集, 20 | y_valid Y训练数据集的验证集, 21 | imputed_X_test 预测数据集] 22 | """ 23 | # 用近几年的数据做训练集 24 | # 如 [1,1], [20, 0]就是用2019年的今天的20天前到2019年的今天数据做训练集 25 | # 写入csv 26 | write([1, 1], [15, 0], "weather_train_train.csv") 27 | write([1, 1], [0, 15], "weather_train_valid.csv") 28 | write([0, 0], [15, 0], "weather_test.csv") 29 | X_test = pd.read_csv("weather_test.csv", index_col="Time", parse_dates=True) 30 | # 读取测试集和验证集 31 | X = pd.read_csv("weather_train_train.csv", index_col="Time", parse_dates=True) 32 | y = pd.read_csv("weather_train_valid.csv", index_col="Time", parse_dates=True) 33 | # 把全部丢失的数据都drop,MAE=3.7又高了,所以去掉了 34 | # dxtcol = [col for col in X_test.columns 35 | # if X_test[col].isnull().all()] 36 | # dxcol = [col for col in X.columns 37 | # if X[col].isnull().all()] 38 | # dycol = [col for col in y.columns 39 | # if y[col].isnull().all()] 40 | # for a1 in [dxtcol, dxcol, dycol]: 41 | # for a2 in a1: 42 | # if a2 in X_test.columns: 43 | # X_test = X_test.drop(a2, axis=1) 44 | # if a2 in X.columns: 45 | # X = X.drop(a2, axis=1) 46 | # if a2 in y.columns: 47 | # y = y.drop(a2, axis=1) 48 | # 数据归一化和标准化,无法还原不用 49 | # scaler = preprocessing.StandardScaler() 50 | # pars = [cols for cols in X.columns if cols != "Time"] 51 | # for data in [X, y, X_test]: 52 | # for par in pars: 53 | # data[par] = scaler.fit_transform(data[par].values.reshape(-1, 1)) 54 | # # temp = scaler.fit(data[par].values.reshape(-1, 1)) 55 | # # data[par] = scaler.fit_transform(data[par].values.reshape(-1, 1), temp) 56 | 57 | # 填充缺少的数值用方差,不清楚效果如何 58 | my_imputer = SimpleImputer() 59 | X_train, X_valid, y_train, y_valid = train_test_split(X, y, train_size=0.8, test_size=0.2, random_state=0) 60 | imputed_X_train = pd.DataFrame(my_imputer.fit_transform(X_train)) 61 | imputed_X_valid = pd.DataFrame(my_imputer.transform(X_valid)) 62 | imputed_X_train.columns = X_train.columns 63 | imputed_X_valid.columns = X_valid.columns 64 | imputed_y_train = pd.DataFrame(my_imputer.fit_transform(y_train)) 65 | imputed_y_valid = pd.DataFrame(my_imputer.transform(y_valid)) 66 | imputed_y_train.columns = y_train.columns 67 | imputed_y_valid.columns = y_valid.columns 68 | imputed_X_test = pd.DataFrame(my_imputer.fit_transform(X_test)) 69 | 70 | # 画折线图 71 | # sns.lineplot(data=X) 72 | # plt.show() 73 | # sns.lineplot(data=y) 74 | # plt.show() 75 | # sns.lineplot(data=X_test) 76 | # plt.show() 77 | # 返回分割后的数据集 78 | return [imputed_X_train, imputed_X_valid, imputed_y_train, imputed_y_valid, imputed_X_test] 79 | -------------------------------------------------------------------------------- /Pre_Weather/Write.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # @Time: 2020/12/16 3 | # @Author: Eritque arcus 4 | # @File: Write.py 5 | from calendar import isleap 6 | import re 7 | from bs4 import BeautifulSoup 8 | from GetData import GetData 9 | import datetime as DT 10 | import csv 11 | 12 | 13 | def a(t): 14 | return t.replace(" - ", "0") 15 | 16 | 17 | # 功能: 写csv 18 | def write(years, b, c): 19 | """ 20 | :param years: [开始日期距离现在的年份, 结束日期距离现在的年份] 21 | :param b: [开始日期距离现在日期的天数, 结束日期距离现在日期的天数] 22 | :param c: csv文件名 23 | :return: None 24 | """ 25 | # 1. 创建文件对象 26 | f = open(c, 'w', encoding='utf-8', newline='') 27 | 28 | # 2. 基于文件对象构建 csv写入对象 29 | csv_writer = csv.writer(f) 30 | 31 | # 3. 构建列表头 32 | # , "negAve", "negMax", "negMin" 33 | csv_writer.writerow(["Time", "Ave_t", "Max_t", "Min_t", "Prec", "SLpress", "Winddir", "Windsp", "Cloud"]) 34 | # 取现在日期 35 | today = DT.datetime.today() 36 | # 闰年片段 37 | st = isleap(today.year) 38 | # 取20天前日期 39 | week_ago = (today - DT.timedelta(days=b[0])).date() 40 | # 20天后 41 | week_pre = (today + DT.timedelta(days=b[1])).date() 42 | if week_ago.month + week_pre.month == 3 or week_ago.month + week_pre.month == 5: 43 | if week_ago.month == 2 and not st == isleap(today.year - years[0]): 44 | if st: 45 | # 今年是,去年或未来不是,所以-1 46 | week_ago -= DT.timedelta(days=1) 47 | else: 48 | # 今年不是,去年或未来是,所以+1 49 | week_ago += DT.timedelta(days=1) 50 | if week_pre.month == 2 and not st == isleap(today.year - years[1]): 51 | if st: 52 | # 今年是,去年或未来不是,所以要-1 53 | week_pre -= DT.timedelta(days=1) 54 | else: 55 | # 今年不是,去年或未来是,所以+1 56 | week_pre += DT.timedelta(days=1) 57 | # 城市id 广州59287 青岛 54857 58 | id = "59287" 59 | # 爬取数据链接 60 | url = "http://www.meteomanz.com/sy2?l=1&cou=2250&ind=" + id + "&d1=" + str(week_ago.day).zfill(2) + "&m1=" + str( 61 | week_ago.month).zfill(2) + "&y1=" + str(week_ago.year - years[0]) + "&d2=" + str(week_pre.day).zfill( 62 | 2) + "&m2=" + str(week_pre.month).zfill(2) + "&y2=" + str(week_pre.year - years[1]) 63 | # 显示获取数据集的网址 64 | print(url) 65 | g = GetData(url).Get() 66 | # beautifulsoup解析网页 67 | soup = BeautifulSoup(g, "html5lib") 68 | # 取
内容 69 | tb = soup.find(name="tbody") 70 | # 取tr内容 71 | past_tr = tb.find_all(name="tr") 72 | for tr in past_tr: 73 | # 取tr内每个td的内容 74 | text = tr.find_all(name="td") 75 | flag = False 76 | negA = negMax = negMin = False 77 | for i in range(0, len(text)): 78 | if i == 0: 79 | text[i] = text[i].a.string 80 | # 网站bug,会给每个月第0天,比如 00/11/2020,所以要drop掉 81 | if "00/" in text[i]: 82 | flag = True 83 | elif i == 8: 84 | # 把/8去掉,网页显示的格式 85 | text[i] = text[i].string.replace("/8", "") 86 | elif i == 5: 87 | # 去掉单位 88 | text[i] = text[i].string.replace(" Hpa", "") 89 | elif i == 6: 90 | # 去掉风力里括号内的内容 91 | text[i] = re.sub(u"[º(.*?|N|W|E|S)]", "", text[i].string) 92 | else: 93 | # 取每个元素的内容 94 | text[i] = text[i].string 95 | # 丢失数据都取2(简陋做法) 96 | # 这么做 MAE=3.6021 97 | text[i] = "2" if text[i] == "-" else text[i] 98 | text[i] = "2" if text[i] == "Tr" else text[i] 99 | text = text[0:9] 100 | # ext += [str(int(negA)), str(int(negMax)), str(int(negMin))] 101 | # 4. 写入csv文件内容 102 | if not flag: 103 | csv_writer.writerow(text) 104 | # 5. 关闭文件 105 | f.close() 106 | -------------------------------------------------------------------------------- /Pre_Weather/weather_test.csv: -------------------------------------------------------------------------------- 1 | Time,Ave_t,Max_t,Min_t,Prec,SLpress,Winddir,Windsp,Cloud 2 | 3 | 11/03/2021,21.0,25.5,16.5,0.0,1019.8,48,4,2 4 | 10/03/2021,20.9,25.2,16.5,2,1019.7,116,6,2 5 | 09/03/2021,20.4,24.1,16.7,0.0,1020.1,15,8,2 6 | 08/03/2021,19.6,22.5,16.7,0.0,1020.1,10,5,2 7 | 07/03/2021,19.6,22.5,16.8,0.0,1019.9,353,10,2 8 | 06/03/2021,19.4,22.3,16.6,18.5,1016.7,203,8,2 9 | 05/03/2021,17.5,21.2,13.8,2.6,1015.2,86,5,2 10 | 04/03/2021,17.4,20.9,13.8,3.1,1018.1,335,6,2 11 | 03/03/2021,17.9,21.5,14.2,1.2,1020.1,349,8,2 12 | 02/03/2021,19.5,24.8,14.2,2,1020.8,342,13,2 13 | 01/03/2021,19.9,24.8,14.9,0.1,1017.5,335,12,2 14 | 28/02/2021,18.4,22.9,14.0,2,1015.9,351,5,2 15 | 27/02/2021,17.6,21.3,14.0,0.2,1016.3,348,10,2 16 | 26/02/2021,19.9,24.5,15.2,0.2,1011.9,342,13,2 17 | 25/02/2021,20.2,25.4,15.1,0.6,1010.1,351,5,2 18 | 24/02/2021,22.0,29.7,14.3,0.2,1013.2,147,9,2 19 | -------------------------------------------------------------------------------- /Pre_Weather/weather_train_train.csv: -------------------------------------------------------------------------------- 1 | Time,Ave_t,Max_t,Min_t,Prec,SLpress,Winddir,Windsp,Cloud 2 | 3 | 11/03/2020,18.4,23.8,13.0,2,1017.7,77,4,7 4 | 10/03/2020,20.9,25.0,16.8,0.0,1017.1,343,16,3 5 | 09/03/2020,22.7,25.0,20.4,0.2,1009.0,120,12,8 6 | 08/03/2020,21.2,24.3,18.1,2,1009.0,138,13,8 7 | 07/03/2020,19.4,22.6,16.2,0.6,1012.7,119,5,8 8 | 06/03/2020,16.9,20.9,12.8,2,1016.4,112,5,8 9 | 05/03/2020,16.2,19.7,12.8,0.0,1019.7,14,5,8 10 | 04/03/2020,18.1,22.3,14.0,9.7,1019.8,340,13,8 11 | 03/03/2020,20.1,22.3,18.0,7.5,1018.0,26,7,8 12 | 02/03/2020,22.5,27.8,17.2,0.0,1017.9,84,5,8 13 | 01/03/2020,21.9,27.8,16.0,0.0,1015.1,354,8,4 14 | 29/02/2020,21.9,27.7,16.0,0.0,1014.3,281,5,5 15 | 28/02/2020,21.4,26.7,16.1,0.0,1017.5,324,5,7 16 | 27/02/2020,22.6,29.0,16.2,0.0,1019.1,131,8,4 17 | 26/02/2020,22.4,29.0,15.8,0.0,1018.1,326,6,4 18 | 25/02/2020,20.7,27.2,14.2,2,1018.0,351,6,5 19 | -------------------------------------------------------------------------------- /Pre_Weather/weather_train_valid.csv: -------------------------------------------------------------------------------- 1 | Time,Ave_t,Max_t,Min_t,Prec,SLpress,Winddir,Windsp,Cloud 2 | 3 | 26/03/2020,24.3,27.7,20.9,3.9,1013.0,114,8,8 4 | 25/03/2020,24.7,28.5,20.9,0.7,1013.6,145,8,7 5 | 24/03/2020,25.0,30.0,20.0,0.0,1014.6,135,8,6 6 | 23/03/2020,25.0,30.0,20.0,0.0,1014.0,176,7,7 7 | 22/03/2020,24.4,29.1,19.7,0.0,1013.8,205,5,4 8 | 21/03/2020,21.6,25.1,18.0,0.0,1014.6,124,7,8 9 | 20/03/2020,19.8,21.6,18.0,1.5,1015.1,80,4,8 10 | 19/03/2020,20.1,22.0,18.2,25.4,1015.5,11,5,8 11 | 18/03/2020,19.8,22.0,17.6,7.5,1015.1,116,5,8 12 | 17/03/2020,21.1,24.9,17.3,1.7,1018.1,35,5,8 13 | 16/03/2020,19.2,24.9,13.5,0.0,1019.4,88,8,8 14 | 15/03/2020,19.4,25.4,13.5,0.0,1019.4,346,6,8 15 | 14/03/2020,21.1,25.4,16.7,0.0,1019.5,341,13,5 16 | 13/03/2020,20.8,24.7,16.8,8.2,1016.2,25,12,8 17 | 12/03/2020,17.6,22.3,13.0,2,1014.8,91,5,8 18 | 11/03/2020,18.4,23.8,13.0,2,1017.7,77,4,7 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PYWeatherReport 2 | ## 简介 3 | 一个用python机器学习(ml)做的广州地区的简易天气预报 4 | 5 | 模型是用的是sklearn的随机树森林(randomForest) 6 | 7 | 在Pre_Weather文件夹下用 python Main.py 命令运行 8 | 9 | 训练数据来源于[http://www.meteomanz.com/](http://www.meteomanz.com/) 10 | 11 | ## 如何使用 12 | 13 | 直接用python运行`pre_weather/Main.py`,就会在控制台输出预测的数据 14 | ``` 15 | python pre_weather/Main.py 16 | ``` 17 | 或 18 | 19 | 在你的python代码里用`joblib`导入生成的模型,然后输入你的数据进行预测 20 | 21 | (PS: 因为模型的训练用的数据日期和你预测数据的日期有关,所以不建议直接用使用非当天训练的模型进行预测,误差可能偏大) 22 | 23 | 如以下代码(在Main.py的11行): 24 | ``` 25 | import joblib 26 | 27 | # 读取保存的模型 28 | model = joblib.load('Model.pkl') 29 | 30 | # 最终预测结果 31 | preds = model.predict(r[1]) 32 | ``` 33 | 其中,`r[1]`是预测数据 34 | 35 | 或 36 | 37 | 参考`Main.py`,自己写一个符合你需求启动文件 38 | ## 系列教程 39 | 40 | [机器学习参考篇: python+sklearn+kaggle机器学习](https://blog.csdn.net/qq_40832960/article/details/109260388) 41 | 42 | [用python+sklearn(机器学习)实现天气预报 准备](https://blog.csdn.net/qq_40832960/article/details/111146467) 43 | 44 | [用python+sklearn(机器学习)实现天气预报数据 数据](https://blog.csdn.net/qq_40832960/article/details/111182425) 45 | 46 | [用python+sklearn(机器学习)实现天气预报 模型和使用](https://blog.csdn.net/qq_40832960/article/details/111238926) 47 | 48 | > 2020/12/16 49 | 50 | 优化和修复代码,增加数据可视化显示 51 |  52 | 53 | > 2020/12/7 54 | 55 | 优化和修复代码 56 | 57 | 增加模型保存和填充缺失数据 58 | 59 | > 2020/11/25 60 | 61 | 优化了代码 62 | 63 | 计划未来改模型为RGBoost或用tensorflow来降低MAE,同时提高数据多元化 64 | 65 | MAE优化到3.6021665834173815 66 | 67 | 把丢失值取为手动平均值2 68 | 69 |  70 | 71 | > 2020/11/24 72 | 73 | 模型是用的决策树森林 74 | 75 | 训练数据来源于[http://www.meteomanz.com/](http://www.meteomanz.com/) 76 | 77 | MAE目前是3.604,未来我会尽可能继续优化 78 | 79 |  80 | 81 | -------------------------------------------------------------------------------- /result.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Nambers/PYWeatherForecast/0a5964e15d2899e0f53a42cd2b7514e03a1a266e/result.jpg --------------------------------------------------------------------------------