基于python历史天气采集的分析

时间:2021-09-26 06:51:30

分析历史天气的趋势。

先采集

基于python历史天气采集的分析

基于python历史天气采集的分析

基于python历史天气采集的分析

代码:

?
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
#-*- coding:utf-8 -*-
import requests
import random
import mysqldb
import xlwt
from bs4 import beautifulsoup
user_agent=['mozilla/5.0 (windows nt 6.1; wow64) applewebkit/537.36 (khtml, like gecko) chrome/54.0.2840.87 safari/537.36',
    'mozilla/5.0 (x11; u; linux x86_64; zh-cn; rv:1.9.2.10) gecko/20100922 ubuntu/10.10 (maverick) firefox/3.6.10',
    'mozilla/5.0 (x11; linux x86_64) applewebkit/537.11 (khtml, like gecko) chrome/23.0.1271.64 safari/537.11',
    'mozilla/5.0 (windows nt 6.1; wow64) applewebkit/537.36 (khtml, like gecko) chrome/30.0.1599.101 safari/537.36',
    'mozilla/5.0 (windows nt 6.1; wow64) applewebkit/537.1 (khtml, like gecko) chrome/21.0.1180.71 safari/537.1 lbbrowser',
    'mozilla/5.0 (compatible; msie 9.0; windows nt 6.1; wow64; trident/5.0; slcc2; .net clr 2.0.50727; .net clr 3.5.30729; .net clr 3.0.30729; media center pc 6.0; .net4.0c; .net4.0e; qqbrowser/7.0.3698.400)',
    ]
headers={
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'accept-encoding': 'gzip, deflate, sdch',
'accept-language': 'zh-cn,zh;q=0.8',
'user-agent': user_agent[random.randint(0,5)]}
 
myfile=xlwt.workbook()
wtable=myfile.add_sheet(u"历史天气",cell_overwrite_ok=true)
wtable.write(0,0,u"日期")
wtable.write(0,1,u"最高温度")
wtable.write(0,2,u"最低温度")
wtable.write(0,3,u"天气")
wtable.write(0,4,u"风向")
wtable.write(0,5,u"风力")
 
db = mysqldb.connect('localhost','root','liao1234','liao',charset='utf8')
cursor = db.cursor()
 
index = requests.get("http://lishi.tianqi.com/binjianqu/index.html",headers=headers)
html_index = index.text
index_soup = beautifulsoup(html_index)
i = 1
for href in index_soup.find("div",class_="tqtongji1").find_all("a"):
  print href.attrs["href"]
 
 
  url = href.attrs["href"]
  r = requests.get(url,headers = headers)
  html = r.text
  #print html
  soup = beautifulsoup(html)
  ss = []
  s = []
  for tag in soup.find("div",class_="tqtongji2").find_all("li"):
    print tag.string
    s.append(tag.string)
    if len(s) == 6:
      ss.append(s)
      s = []
  flag = 0
  for s in ss:
    if flag == 0:
      flag = 1
      continue
    else:
      sql = "insert into weather(old_date,hight,low,weather,wind,wind_power) values('%s','%s','%s','%s','%s','%s')"%(s[0],s[1],s[2],s[3],s[4],s[5])
      cursor.execute(sql)
      wtable.write(i,0,s[0])
      wtable.write(i,1,s[1])
      wtable.write(i,2,s[2])
      wtable.write(i,3,s[3])
      wtable.write(i,4,s[4])
      wtable.write(i,5,s[5])
      i += 1
myfile.save("weather.xls")
db.close()

以上这篇基于python历史天气采集的分析就是小编分享给大家的全部内容了,希望能给大家一个参考,也希望大家多多支持服务器之家。

原文链接:https://blog.csdn.net/qq1124794084/article/details/54174340