python 下载获取网络代理xicidaili,并协程检测代理是否有效

mac2024-03-17  47

协程方式检测代理ip是否有效

from bs4 import BeautifulSoup import requests import random from gevent import monkey monkey.patch_all() import gevent,time from gevent.queue import Queue work = Queue() def ipput(proxies): work.put_nowait(proxies) def crawler(): while not work.empty(): proxies = work.get_nowait() testip(proxies) def spiders(): tasks_list = [] for x in range(4): #创建4个蜘蛛 task = gevent.spawn(crawler) tasks_list.append(task) gevent.joinall(tasks_list) url = 'http://www.xicidaili.com/nn/' headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36' } def get_ip_list(url, headers): web_data = requests.get(url, headers=headers) soup = BeautifulSoup(web_data.text, 'lxml') ips = soup.find_all('tr') ip_list = [] for i in range(1, len(ips)): ip_info = ips[i] tds = ip_info.find_all('td') ip_list.append(tds[1].text + ':' + tds[2].text) return ip_list def get_random_ip(ip_list): proxy_list = [] for ip in ip_list: proxy_list.append('http://' + ip) proxy_ip = random.choice(proxy_list) proxies = {'http': proxy_ip} ipput(proxies) return proxies def testip(proxies): headers = { # 请求来源,携带的信息比“origin”更丰富,本案例中其实是不需要加这个参数的,只是为了演示 'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36' } try: res = requests.get(url="http://icanhazip.com/",timeout=8,proxies=proxies,headers=headers) bs = BeautifulSoup(res.text,'html.parser') print(proxies,'有效ip',res.status_code,bs) if proxies in youxiao_ip: pass else: youxiao_ip.append(proxies) return proxies except: print(proxies,'代理ip无效',time.localtime()) youxiao_ip=[] if __name__ == '__main__': ip_list = get_ip_list(url, headers=headers) proxies = get_random_ip(ip_list) print('开始检测代理ip')#函数get_ip_list(url, headers)传入url和headers,最后返回一个IP列表,列表的元素类似42.84.226.65:8888格式,这个列表包括国内髙匿代理IP网站首页所有IP地址和端口。 spiders() print('\n\n以下是有效ip啦') for ip in youxiao_ip: print(ip)
最新回复(0)