from urllib.request import Request, ProxyHandlerfrom urllib.request import build_openerfrom bs4 import BeautifulSoupimport redisurlfront = "http://www.xicidaili.com"url = "http://www.xicidaili.com/nn/1"r = redis.Redis(host='127.0.0.1', port=6379,db=0)# def spider_IP(url):# 获取整个页面def get_allcode(url): # 设置代理IP proxy = { 'https': '110.73.0.45:8123'} proxy_support = ProxyHandler(proxy); opener = build_opener(proxy_support) # 设置访问http协议头,模拟浏览器 opener.addheaders = [ ('User-agent', 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6')] r = opener.open(url) html = r.read().decode("UTF-8"); # print(html) return str(html)# 根据URl用beautifulsoup提取,可以写方法def find_ip(s): soup = BeautifulSoup(s, 'html.parser'); aList = soup.find_all(name="tr",class_="odd") for items in aList: link = items.find_all("td") print("%s:%s" %(link[1].get_text(),link[2].get_text()))find_ip(get_allcode(url))