基于Python+Flask+Echarts的COVID-19数据可视化项目

阅读: 评论:0

基于Python+Flask+Echarts的COVID-19数据可视化项⽬
1、爬取yq数据
有了爬⾍基础后,我们可以⾃⾏去全国各地的卫健委⽹站上爬取数据,不过部分⽹站反爬⾍⼿段很⾼明,需要专业的反反爬⼿段      我们也可以去各⼤平台直接爬取最终数据,⽐如:
百度
腾讯
other爬取的数据的key值有
#爬取并处理腾讯疫情数据
import requests
import json
import time
#返回历史数据和当⽇详细数据
def get_tencent_data():
url1 = "view.inews.qq/g2/getOnsInfo?name=disease_h5"
url2 = "view.inews.qq/g2/getOnsInfo?name=disease_other"
headers = {
'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.70 Safari/537.36'    }
r1 = (url1, headers)
r2 = (url2, headers)
#json字符串转字典
res1 = json.)
res2 = json.)
锦江酒店data_all1 = json.loads(res1["data"])
data_all2 = json.loads(res2["data"])
#历史数据
history = {}
for i in data_all2["chinaDayList"]:
ds = "2020." + i["date"]
tup = time.strptime(ds, "%Y.%m.%d")  # 匹配时间
ds = time.strftime("%Y-%m-%d", tup)  #改变时间输⼊格式,不然插⼊数据库会报错,数据库是datatime格式
confirm = i["confirm"]
suspect = i["suspect"]
heal = i["heal"]
dead = i["dead"]
history[ds] = {"confirm": confirm, "suspect": suspect, "heal": heal, "dead": dead}
for i in data_all2["chinaDayAddList"]:
ds = "2020." + i["date"]
tup = time.strptime(ds, "%Y.%m.%d")  # 匹配时间
ds = time.strftime("%Y-%m-%d", tup)  #改变时间输⼊格式,不然插⼊数据库会报错,数据库是datatime格式
confirm = i["confirm"]
suspect = i["suspect"]
heal = i["heal"]
dead = i["dead"]
history[ds].update({"confirm_add": confirm, "suspect_add": suspect, "heal_add": heal, "dead_add": dead})
#当⽇详细数据
details = []
update_time = data_all1["lastUpdateTime"]
data_country = data_all1["areaTree"]  #list 25个国家
data_province = data_country[0]["children"] #中国各省
for pro_infos in data_province:
province = pro_infos["name"] #省名
for city_infos in pro_infos["children"]:
city = city_infos["name"]
confirm = city_infos["total"]["confirm"]
confirm_add = city_infos["today"]["confirm"]
heal = city_infos["total"]["heal"]
dead = city_infos["total"]["dead"]
details.append([update_time, province, city, confirm, confirm_add, heal, dead])
return history, details
his,de = get_tencent_data()
his,de = get_tencent_data()
print(his)
print(de)
2、yq数据的存储
数据存储
建⽴数据库cov
山东农业大学
进⼊cov数据库,建两张表,history表存储每⽇总数据,details表存储每⽇详细数据
CREATE TABLE `history` (
`ds` datetime NOT NULL COMMENT '⽇期',
`confirm` int(11) DEFAULT NULL COMMENT '累计确诊',
`confirm_add` int(11) DEFAULT NULL COMMENT '当⽇新增确诊',
`suspect` int(11) DEFAULT NULL COMMENT '剩余疑似',
`suspect_add` int(11) DEFAULT NULL COMMENT '当⽇新增疑似',
`heal` int(11) DEFAULT NULL COMMENT '累计治愈',
`heal_add` int(11) DEFAULT NULL COMMENT '当⽇新增治愈',
`dead` int(11) DEFAULT NULL COMMENT '累计死亡',
`dead_add` int(11) DEFAULT NULL COMMENT '当⽇新增死亡',
PRIMARY KEY (`ds`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
武大樱花
CREATE TABLE `details` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`update_time` datetime DEFAULT NULL COMMENT '数据最后更新时间',
`province` varchar(50) DEFAULT NULL COMMENT '省',
`city` varchar(50) DEFAULT NULL COMMENT '市',
`confirm` int(11) DEFAULT NULL COMMENT '累计确诊',
`confirm_add` int(11) DEFAULT NULL COMMENT '新增治愈',
海洋馆图片
`heal` int(11) DEFAULT NULL COMMENT '累计治愈',
`dead` int(11) DEFAULT NULL COMMENT '累计死亡',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
appreciation将数据存储到数据库中
pymysql库
import requests
import json
import time
import pymysql
#返回历史数据和当⽇详细数据
def get_tencent_data():
url1 = "view.inews.qq/g2/getOnsInfo?name=disease_h5"
url2 = "view.inews.qq/g2/getOnsInfo?name=disease_other"
开平立园介绍
headers = {
'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.70 Safari/537.36'    }
r1 = (url1, headers)
r2 = (url2, headers)
#json字符串转字典
res1 = json.)
res2 = json.)
data_all1 = json.loads(res1["data"])
data_all2 = json.loads(res2["data"])
#历史数据
history = {}
for i in data_all2["chinaDayList"]:
ds = "2020." + i["date"]
tup = time.strptime(ds, "%Y.%m.%d")  # 匹配时间
ds = time.strftime("%Y-%m-%d", tup)  #改变时间输⼊格式,不然插⼊数据库会报错,数据库是datatime格式
confirm = i["confirm"]
suspect = i["suspect"]
heal = i["heal"]
dead = i["dead"]
history[ds] = {"confirm": confirm, "suspect": suspect, "heal": heal, "dead": dead}
for i in data_all2["chinaDayAddList"]:
ds = "2020." + i["date"]
tup = time.strptime(ds, "%Y.%m.%d")  # 匹配时间
ds = time.strftime("%Y-%m-%d", tup)  #改变时间输⼊格式,不然插⼊数据库会报错,数据库是datatime格式
confirm = i["confirm"]
suspect = i["suspect"]
heal = i["heal"]
dead = i["dead"]
history[ds].update({"confirm_add": confirm, "suspect_add": suspect, "heal_add": heal, "dead_add": dead})
#当⽇详细数据
details = []
update_time = data_all1["lastUpdateTime"]
data_country = data_all1["areaTree"]  #list 25个国家
data_province = data_country[0]["children"] #中国各省
for pro_infos in data_province:
province = pro_infos["name"] #省名
for city_infos in pro_infos["children"]:
city = city_infos["name"]
confirm = city_infos["total"]["confirm"]

本文发布于:2023-07-12 01:49:34,感谢您对本站的认可!

本文链接:http://www.035400.com/whly/2/524263.html

版权声明:本站内容均来自互联网,仅供演示用,请勿用于商业和其他非法用途。如果侵犯了您的权益请与我们联系,我们将在24小时内删除。

标签:数据   数据库   时间   累计   爬取
留言与评论(共有 0 条评论)
   
验证码:
Copyright ©2024-2030 Comsenz Inc.Powered by © 文化旅游网 滇ICP备2022007236号-403 联系QQ:1103060800网站地图