# -*- coding: utf-8 -*-
import time,datetime, json, requests,pymysql
import pandas as pd
import traceback
from selenium.webdriver import Chrome, ChromeOptions
import sys
# ----------------数据库连接、关闭------------------------
#连接数据库
def get_conn():
#建立连接
connect = pymysql.Connect(
host='localhost',
port=3306,
user='root',
passwd='123456',
db='cov',
charset='utf8'
)
#获取游标
cursor = connect.cursor()
return connect,cursor
#关闭连接
def close_conn(connect,cursor):
if connect:
connect.close()
if cursor:
cursor.close()
# ----------------爬取数据------------------------
# 抓取腾讯疫情国内每日实时详细各省市和中国每日历史数据
def get_tencent_data():
url1 = 'https://view.inews.qq.com/g2/getOnsInfo?name=disease_h5&callback=&_=%d'%int(time.time()*1000)
url2 = 'https://view.inews.qq.com/g2/getOnsInfo?name=disease_other&callback=&_=%d'%int(time.time()*1000)
headers = {
'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.70 Safari/537.36'
}
r1 = requests.get(url1, headers)
r2 = requests.get(url2, headers)
#json字符串转字典
res1 = json.loads(r1.text)
res2 = json.loads(r2.text)
data_all1 = json.loads(res1["data"])
data_all2 = json.loads(res2["data"])
#当日详细数据
details = []
update_time = data_all1["lastUpdateTime"]
data_country = data_all1["areaTree"]
data_province = data_country[0]["children"]
for pro_infos in data_province:
province = pro_infos["name"]
for city_infos in pro_infos["children"]:
city = city_infos["name"]
confirm = city_infos["total"]["confirm"]
confirm_add = city_infos["today"]["confirm"]
nowConfirm = city_infos['total']['nowConfirm']
suspect = city_infos["total"]["suspect"]
heal = city_infos["total"]["heal"]
dead = city_infos["total"]["dead"]
dead_rate = city_infos['total']['deadRate']
heal_rate = city_infos['total']['healRate']
details.append([update_time, province, city,nowConfirm, confirm, confirm_add, suspect,heal, dead,dead_rate,heal_rate])
#历史数据
history = {}
for day_infos in data_all2["chinaDayList"]:
ds = day_infos["y"]+"."+day_infos["date"]
tup = time.strptime(ds, "%Y.%m.%d") # 匹配时间
ds = time.strftime("%Y-%m-%d", tup) #改变时间输入格式,不然插入数据库会报错,数据库是datatime格式
confirm = day_infos["confirm"]
suspect = day_infos["suspect"]
heal = day_infos["heal"]
dead = day_infos["dead"]
nowConfirm = day_infos["nowConfirm"]
nowSevere = day_infos["nowSevere"]
importedCase = day_infos["importedCase"]
noInfect = day_infos["noInfect"]
localConfirm = day_infos["localConfirm"]
dead_rate = day_infos["deadRate"]
heal_rate = day_infos["healRate"]
history[ds] = {"confirm":confirm, "suspect":suspect, "heal":heal, "dead":dead,
"importedCase": importedCase, "noInfect": noInfect, "localConfirm":localConfirm, "nowConfirm":nowConfirm,
"nowSevere":nowSevere, "dead_rate":dead_rate, "heal_rate":heal_rate}
for day_infos in data_all2["chinaDayAddList"]:
ds = day_infos["y"]+"."+day_infos["date"]
tup = time.strptime(ds, "%Y.%m.%d") # 匹配时间
ds = time.strftime("%Y-%m-%d", tup) #改变时间输入格式,不然插入数据库会报错,数据库是datatime格式
confirm = day_infos["confirm"]
suspect = day_infos["suspect"]
heal = day_infos["heal"]
dead = day_infos["dead"]
importedCase = day_infos["importedCase"]
noInfect = day_infos["infect"]
dead_rate = day_infos["deadRate"]
heal_rate = day_infos["healRate"]
localConfirm = day_infos["localConfirmadd"]
history[ds].update({"confirm_add":confirm, "suspect_add":suspect, "heal_add":heal, "dead_add":dead,
"importedCase_add": importedCase, "noInfect_add": noInfect, "localConfirm_add":localConfirm,
"dead_rate_add":dead_rate, "heal_rate_add":heal_rate})
return history,details
# 抓取各省从2020到2021的每日历史数据(无市区)
def get_province_history_data():
headers = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36"
}
url = "http://111.231.75.86:8000/api/provinces/CHN/daily/"
response = requests.get(url=url, headers=headers)
res = json.loads(response.text)
details = []
for infos in res:
ds = datetime.datetime.strptime(str(infos["dateId"]), '%Y%m%d').strftime('%Y-%m-%d')
province = infos["provinceName"]
province_code = infos["provinceCode"]
nowConfirm = infos["currentConfirmedCount"]
nowConfirm_add = infos["currentConfirmedIncr"]
confirm = infos["confirmedCount"]
confirm_add = infos["confirmedIncr"]
suspect = infos["suspectedCount"]
suspect_add = infos["suspectedCountIncr"]
heal = infos["curedCount"]
dead = infos["deadCount"]
heal_add = infos["curedIncr"]
dead_add = infos["deadIncr"]
nowSevere = infos["highDangerCount"]
nowMidSevere = infos["midDangerCount"]
details.append(
[ds, province,province_code, confirm, confirm_add, nowConfirm, nowConfirm_add, suspect, suspect_add, heal, heal_add, dead, dead_add, nowSevere, nowMidSevere])
return details
# 抓取本土风险划分数据
def get_localrisk_data():
url = 'https://view.inews.qq.com/g2/getOnsInfo?name=disease_other&callback=&_=%d' % int(time.time() * 1000)
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.70 Safari/537.36'
}
r = requests.get(url, headers)
res = json.loads(r.text)
data_all = json.loads(res["data"])
locallist = []
for local in data_all["statisGradeCityDetail"]:
ds = str(local["syear"]) + "/" + local["date"]
tup = time.strptime(ds, "%Y/%m/%d") # 匹配时间
ds = time.strftime("%Y-%m-%d", tup) # 改变时间输入格式,不然插入数据库会报错,数据库是datatime格式
province = local["province"]
city = local["city"]
nowConfirm = local["nowConfirm"]
confirm = local["confirm"]
confirm_add = local["confirmAdd"]
heal = local["heal"]
dead = local["dead"]
grade = local["grade"]
locallist.append([ds, province,city,nowConfirm,confirm,confirm_add,heal,dead,grade])
return locallist
#抓取全球各国以及美国各洲最新的数据
def get_global_country_latest_data():
url = 'https://view.inews.qq.com/g2/getOnsInfo?name=disease_foreign&callback=&_=%d' % int(time.time() * 1000)
url2 = "https://api.inews.qq.com/newsqa/v1/automation/foreign/country/ranklist"
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.70 Safari/537.36'
}
s = requests.session()
s.keep_alive = False
requests.DEFAULT_RETRIES = 5
# 各国各城市数据
details = []
america = []
#获取美国数据
r1 = requests.get(url, headers)
res1 = json.loads(r1.text)
data_all = json.loads(res1["data"])
# 获取全球数据
r2 = requests.post(url=url2, headers=headers)
res2 = json.loads(r2.text)
# print(res["data"])
for infos in res2["data"]:
ds = infos["y"] + "." + infos["date"]
country = infos["name"]
continent = infos["continent"]
nowConfirm = infos["nowConfirm"]
没有合适的资源?快使用搜索试试~ 我知道了~
温馨提示
【资源说明】 1、该资源内项目代码都是经过测试运行成功,功能正常的情况下才上传的,请放心下载使用。 2、适用人群:主要针对计算机相关专业(如计科、信息安全、数据科学与大数据技术、人工智能、通信、物联网、数学、电子信息等)的同学或企业员工下载使用,具有较高的学习借鉴价值。 3、不仅适合小白学习实战练习,也可作为大作业、课程设计、毕设项目、初期项目立项演示等,欢迎下载,互相学习,共同进步! 基于Python+Flask+Echarts的全国疫情监控系统源码+项目说明(疫情数据收集通过网络爬虫技术爬取实时疫情、网站的搭建是基于Flask 框架,在此基础上搭配 Echarts 来将数据库中的数据映射成图表形式,实现数据可视化).zip
资源推荐
资源详情
资源评论
收起资源包目录
基于Python+Flask+Echarts的全国疫情监控系统源码+项目说明(疫情数据收集通过网络爬虫技术爬取实时疫情).zip (507个子文件)
common.css 10KB
vaccinations-news-data.csv 1.25MB
yq-news-data.csv 628KB
weiboComments.csv 248KB
cumulative-covid-vaccinations.csv 222KB
covid-vaccination-doses-per-capita.csv 208KB
people-vaccinated-covid.csv 200KB
share-people-vaccinated-covid.csv 188KB
people-fully-vaccinated-covid.csv 144KB
share-people-fully-vaccinated-covid.csv 135KB
weibo-words-data.csv 10KB
yq-words-data.csv 7KB
weibo-evaluation-results.csv 2KB
vaccinations-words-data.csv 709B
loading.gif 701B
demo.html 21KB
index.html 9KB
index.html 8KB
worldvaccine.html 7KB
global.html 6KB
america.html 5KB
worldtrend.html 2KB
americatrend.html 2KB
history.html 2KB
protrend.html 2KB
covid19-system.iml 742B
bg.jpg 252KB
world_new.js 1.21MB
world.js 987KB
echarts.min.js 951KB
echarts-gl.min.js 598KB
echarts-wordcloud3.min.js 125KB
china.js 117KB
xinjiang.js 87KB
sichuan.js 84KB
jquery.js 82KB
heilongjiang.js 78KB
guangdong.js 72KB
yunnan.js 62KB
neimenggu.js 58KB
moment.js 52KB
zhejiang.js 51KB
xizang.js 51KB
shandong.js 51KB
liaoning.js 50KB
chongqing.js 48KB
gansu.js 48KB
guangxi.js 47KB
hunan.js 46KB
qinghai.js 44KB
fujian.js 44KB
jilin.js 42KB
hebei.js 40KB
hubei.js 39KB
henan.js 37KB
america_charts.js 36KB
guizhou.js 33KB
jiangxi.js 33KB
shanxi1.js 32KB
area_charts.js 32KB
anhui.js 32KB
hainan.js 30KB
taiwan.js 30KB
vaccine_charts.js 29KB
jiangsu.js 24KB
shanxi.js 24KB
beijing.js 22KB
mapworld.js 20KB
ecStat.min.js 16KB
history_controller.js 16KB
echarts-wordcloud.min.js 16KB
world_charts.js 15KB
tianjin.js 14KB
ningxia.js 14KB
xianggang.js 13KB
shanghai.js 13KB
china-main-city-map.js 11KB
worldvaccine_controller.js 10KB
mapamerica.js 8KB
controller.js 8KB
layui.js 7KB
mapchina.js 7KB
america_controller.js 6KB
trend_controller.js 6KB
global_controller.js 6KB
aomen.js 3KB
mapvaccineworld.js 3KB
share.js 1KB
world-country-history.json 17.04MB
world-series.json 6.53MB
america-provinces-history.json 6.04MB
word-china.json 2.46MB
china-province-series.json 2.04MB
america-history.json 249KB
441000.json 220KB
500100.json 95KB
542300.json 94KB
150700.json 91KB
542400.json 88KB
USA.json 86KB
共 507 条
- 1
- 2
- 3
- 4
- 5
- 6
资源评论
龙年行大运
- 粉丝: 1155
- 资源: 3822
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
最新资源
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功