import random
def getHtml(url):
# 爬到一半遇到这个报错
# http.client.RemoteDisconnected: Remote end closed connection without response
# 请求头被封,于是采用多个请求头,每次随机用一个,防止被服务器识别为爬虫
user_agent_list = [
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10.5; en-US; rv:1.9.2.15) Gecko/20110303 Firefox/3.6.15",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)",
"Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)"
]
header = {'User-Agent': random.choice(user_agent_list)}
data = {
'col': 1,
'webid': '1',
'path': 'http://www.jinan.gov.cn/',
'columnid': '1861', # 1861对应重点关注
'sourceContentType': '1',
'unitid': '543813',
'webname': '%E6%B5%8E%E5%8D%97%E5%B8%82%E4%BA%BA%E6%B0%91%E6%94%BF%E5%BA%9C',
'permissiontype': 0
}
# 如果超时,重新进行三次连接
reconnect = 0
while reconnect < 3:
try:
with requests.post(url=url, data=data, headers=header, stream=True, timeout=20) as rep:
# 得到中文乱码,查询网页编码方式为utf-8
rep.encoding = 'utf-8'
# 解析网页
soup = BeautifulSoup(rep.text, 'html.parser')
return soup
except (requests.exceptions.RequestException, ValueError):
reconnect += 1
return []