├── 1.png ├── 10.png ├── 11.png ├── 12.png ├── 13.png ├── 14.png ├── 15.png ├── 16.png ├── 17.png ├── 18.png ├── 2.png ├── 3.png ├── 44444444444444444444444.png ├── 5.png ├── 6.png ├── 7.png ├── 8.png ├── 9.png ├── JedisUtil.java ├── README.md ├── ReduceByKeySortRddDemo.scala └── crawler.py /1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/1.png -------------------------------------------------------------------------------- /10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/10.png -------------------------------------------------------------------------------- /11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/11.png -------------------------------------------------------------------------------- /12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/12.png -------------------------------------------------------------------------------- /13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/13.png -------------------------------------------------------------------------------- /14.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/14.png -------------------------------------------------------------------------------- /15.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/15.png -------------------------------------------------------------------------------- /16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/16.png -------------------------------------------------------------------------------- /17.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/17.png -------------------------------------------------------------------------------- /18.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/18.png -------------------------------------------------------------------------------- /2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/2.png -------------------------------------------------------------------------------- /3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/3.png -------------------------------------------------------------------------------- /44444444444444444444444.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/44444444444444444444444.png -------------------------------------------------------------------------------- /5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/5.png -------------------------------------------------------------------------------- /6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/6.png -------------------------------------------------------------------------------- /7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/7.png -------------------------------------------------------------------------------- /8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/8.png -------------------------------------------------------------------------------- /9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bysj2022NB/traffic_predict_nb2099_bigdata888/13424899d65c68c68d0bb2fe7758bae0f93b73a1/9.png -------------------------------------------------------------------------------- /JedisUtil.java: -------------------------------------------------------------------------------- 1 | package com.bigdata.storm.kafka.util; 2 | 3 | import redis.clients.jedis.Jedis; 4 | import redis.clients.jedis.JedisPool; 5 | import redis.clients.jedis.JedisPoolConfig; 6 | 7 | /** 8 | * @program: storm-kafka-api-demo 9 | * @description: redis工具类 10 | * @author: 小毕 11 | * @company: 清华大学深圳研究生院 12 | * @create: 2019-08-22 17:23 13 | */ 14 | public class JedisUtil { 15 | 16 | /*redis连接池*/ 17 | private static JedisPool pool; 18 | 19 | /** 20 | *@Description: 返回redis连接池 21 | *@Param: 22 | *@return: 23 | *@Author: 小毕 24 | *@date: 2019/8/22 0022 25 | */ 26 | public static JedisPool getPool(){ 27 | if(pool==null){ 28 | //创建jedis连接池配置 29 | JedisPoolConfig jedisPoolConfig = new JedisPoolConfig(); 30 | //最大连接数 31 | jedisPoolConfig.setMaxTotal(20); 32 | //最大空闲连接 33 | jedisPoolConfig.setMaxIdle(5); 34 | pool=new JedisPool(jedisPoolConfig,"node03.hadoop.com",6379,3000); 35 | } 36 | return pool; 37 | } 38 | 39 | public static Jedis getConnection(){ 40 | return getPool().getResource(); 41 | } 42 | 43 | /* public static void main(String[] args) { 44 | //System.out.println(getPool()); 45 | //System.out.println(getConnection().set("hello","world")); 46 | }*/ 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 计算机毕业设计逆天版Python+Spark智慧城市交通大数据 交通流量预测 交通爬虫 地铁客流量分析 大数据毕业设计 大数据毕设 深度学习 机器学习 2 | 3 | ## 要求 4 | ### 源码有偿一套(论文 PPT 源码+sql脚本+虚拟机) 5 | ### 6 | ### 加好友前帮忙start一下,并备注github有偿获取源码 7 | ### 我的QQ号是2877135669 或 1679232425 8 | 9 | ### 或者 微信 bysj2023nb 10 | 11 | ### 加qq好友说明(被部分 网友整得心力交瘁): 12 | 1.加好友务必按照格式备注git交通大数据最新 13 | 2.避免浪费各自的时间 14 | 15 | 16 | 17 | B站运行演示如下: 18 | 19 | https://www.bilibili.com/video/BV1fR4y1B7EK/?spm_id_from=333.999.0.0 20 | 21 | https://www.bilibili.com/video/BV1pd4y1n7Xa/?spm_id_from=333.999.0.0 22 | 23 | # 多说几句: 24 | 25 | 26 | 吊炸天碾压导师绞杀导师让他无话答辩从头秒到尾 27 | 28 | 29 | 30 | ## 开发技术-粗略 31 | Python爬虫、Python、MySQL、机器学习、Spark、Vue、echarts、springboot、短信、 32 | 33 | Spark Hadoop 34 | 35 | 36 | 37 | # 开发技术-详细 38 | 39 | 前端:vue.js websocket echarts element-ui 40 | 41 | 后端:springboot+mybaits 42 | 43 | 大数据组件:Spark 44 | 45 | 机器学习/深度学习:线性回归预测、KNN、CNN等 46 | 47 | 数据库:mysql 48 | 49 | 爬虫:Python爬虫 50 | 51 | 三方接口:阿里云短信接口 52 | 53 | ## 创新点 54 | 55 | 预测算法 56 | 57 | 大屏 58 | 59 | 爬虫 60 | 61 | 前后端分离 62 | 63 | ## 补充说明 64 | 适合大数据毕业设计、数据分析、爬虫类计算机毕业设计 65 | 66 | 67 | 68 | 69 | 70 | # 功能 71 | 72 | 1、门户系统 73 | 74 | (1)道路车流量查询 75 | 76 | (2)道路车流量预测 77 | 78 | (3)道路状态信息发布 79 | 80 | (4)查看道路状态信息 81 | 82 | 83 | 84 | 2、大屏统计系统 85 | 86 | (1)道路车流量Top10 87 | 88 | (2)道路累计车流量数据 89 | 90 | (3)某一时段道路车流量数据 91 | 92 | 注:目前能想到的统计维度较少,后期是否能增加 93 | 94 | 95 | 96 | 3、后台管理系统 97 | 98 | (1)个人信息管理 99 | 100 | (2)用户管理 101 | 102 | (3)道路信息管理 103 | 104 | 105 | 106 | 角色描述 107 | 108 | 4、用户角色 109 | 110 | (1)系统中共有3个角色,分别是系统管理员,交通管理员和普通用户。 111 | 112 | (2)系统管理员拥有最高权限,可以管理交通管理员和普通用户。 113 | 114 | (3)交通管理员可以查询道路车流量、查看道路车流量预测结果、发布道路状态信息、查看道路状态信息,以及查看大屏统计系统。 115 | 116 | (4)普通用户可以可以查询道路车流量,查看道路状态信息,以及查看大屏统计系统。 117 | 118 | 119 | 120 | # 运行截图 121 | 122 | ![](5.png) 123 | 124 | ![6](6.png) 125 | 126 | ![7](7.png) 127 | 128 | ![](1.png) 129 | 130 | ![2](2.png) 131 | 132 | ![3](3.png) 133 | 134 | ![8](8.png) 135 | 136 | ![9](9.png) 137 | 138 | ![10](10.png) 139 | 140 | ![11](11.png) 141 | 142 | ![12](12.png) 143 | 144 | ![13](13.png) 145 | 146 | ![14](14.png) 147 | 148 | ![15](15.png) 149 | 150 | ![16](16.png) 151 | 152 | ![17](17.png) 153 | 154 | ![18](18.png) 155 | 156 | ![44444444444444444444444](44444444444444444444444.png) 157 | 158 | # 演示视频 159 | 160 | https://www.bilibili.com/video/BV1fR4y1B7EK/?spm_id_from=333.999.0.0 161 | 162 | https://www.bilibili.com/video/BV1pd4y1n7Xa/?spm_id_from=333.999.0.0 -------------------------------------------------------------------------------- /ReduceByKeySortRddDemo.scala: -------------------------------------------------------------------------------- 1 | package com.bigdata.spark.reducebykey_sort 2 | 3 | import org.apache.spark.{SparkConf, SparkContext} 4 | 5 | /** 6 | * @program: spark-api-demo 7 | * @description: 类作用描述 8 | * @author: 小毕 9 | * @company: 清华大学深圳研究生院 10 | * @create: 2019-09-02 18:00 11 | */ 12 | object ReduceByKeySortRddDemo { 13 | 14 | def main(args: Array[String]): Unit = { 15 | val conf=new SparkConf() 16 | .setAppName("MapFilterApp") 17 | .setMaster("local") 18 | val sc=new SparkContext(conf) 19 | val rdd1=sc.parallelize(List(("tom", 1), ("jerry", 3), ("kitty", 2), ("shuke", 1))) 20 | val rdd2=sc.parallelize(List(("jerry", 2), ("tom", 3), ("shuke", 2), ("kitty", 5))) 21 | val rdd3=rdd1.union(rdd2) 22 | //按key进行聚合 23 | val rdd4=rdd3.reduceByKey(_+_) 24 | rdd4.collect.foreach(println(_)) 25 | //按value的降序排序 26 | val rdd5=rdd4.map(t=>(t._2,t._1)).sortByKey(false).map(t=>(t._2,t._1)) 27 | rdd5.collect.foreach(println) 28 | } 29 | 30 | } 31 | -------------------------------------------------------------------------------- /crawler.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | from bs4 import BeautifulSoup 3 | import requests 4 | import sys 5 | import random 6 | import pymysql 7 | links = [] 8 | datas = [] 9 | hea = { 10 | 'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36' 11 | } 12 | urls =[ 13 | "https://www.chinanews.com/china.shtml", #国内 14 | "https://www.chinanews.com/society.shtml", #社会 15 | "https://www.chinanews.com/compatriot.shtml",#港澳 16 | "https://www.chinanews.com/wenhua.shtml",#文化 17 | "https://www.chinanews.com/world.shtml",#国际 18 | "https://www.chinanews.com/cj/gd.shtml",#财经 19 | "https://www.chinanews.com/sports.shtml",#体育 20 | "https://www.chinanews.com/huaren.shtml" #华人 21 | ] 22 | # 打开数据库连接 23 | db = pymysql.connect(host='127.0.0.1', user='root', password='123456', port=3396, db='news_recommendation_system') 24 | # 使用cursor()方法获取操作游标 25 | cursor = db.cursor() 26 | 27 | def main(): 28 | #reload(sys) 29 | #sys.setdefaultencoding("utf-8") 30 | #baseurl = 'https://www.chinanews.com/taiwan.shtml' # 要爬取的网页链接 31 | baseurl = 'https://www.chinanews.com/taiwan.shtml' # 要爬取的网页链接 32 | # deleteDate() 33 | # 1.爬取主网页获取各个链接 34 | getLink(baseurl) 35 | # 2.根据链接爬取内部信息并且保存数据到数据库 36 | getInformationAndSave() 37 | # 3.关闭数据库 38 | db.close() 39 | 40 | def getInformationAndSave(): 41 | for link in links: 42 | data = [] 43 | url = "https://www.chinanews.com" + link[1] 44 | cur_html = requests.get(url, headers=hea) 45 | cur_html.encoding = "utf8" 46 | soup = BeautifulSoup(cur_html.text, 'html.parser') 47 | # 获取时间 48 | title = soup.find('h1') 49 | title = title.text.strip() 50 | # 获取时间和来源 51 | tr = soup.find('div', class_='left-t').text.split() 52 | time = tr[0] + tr[1] 53 | recourse = tr[2] 54 | # 获取内容 55 | cont = soup.find('div', class_="left_zw") 56 | content = cont.text.strip() 57 | print(link[0] + "---" + title + "---" + time + "---" + recourse + "---" + url) 58 | saveDate(title,content,time,recourse,url) 59 | 60 | def deleteDate(): 61 | sql = "DELETE FROM news " 62 | try: 63 | # 执行SQL语句 64 | cursor.execute(sql) 65 | # 提交修改 66 | db.commit() 67 | except: 68 | # 发生错误时回滚 69 | db.rollback() 70 | 71 | def saveDate(title,content,time,recourse,url): 72 | try: 73 | cursor.execute("INSERT INTO news(news_title, news_content, type_id, news_creatTime, news_recourse,news_link) VALUES ('%s', '%s', '%s', '%s', '%s' ,'%s')" % \ 74 | (title, content, random.randint(1,8), time, recourse,url)) 75 | db.commit() 76 | print("执行成功") 77 | except: 78 | db.rollback() 79 | print("执行失败") 80 | 81 | def getLink(baseurl): 82 | html = requests.get(baseurl, headers=hea) 83 | html.encoding = 'utf8' 84 | soup = BeautifulSoup(html.text, 'html.parser') 85 | for item in soup.select('div.content_list > ul > li'): 86 | # 对不符合的数据进行清洗 87 | if (item.a == None): 88 | continue 89 | data = [] 90 | type = item.div.text[1:3] # 类型 91 | link = item.div.next_sibling.next_sibling.a['href'] 92 | data.append(type) 93 | data.append(link) 94 | links.append(data) 95 | 96 | if __name__ == '__main__': 97 | main() 98 | 99 | 100 | --------------------------------------------------------------------------------