scray中设置动态ip

在scray写入一个脚本文件

思路:

1、建立def crawl_ips():方法获取爬取免费的西刺ip。

2、将获取的ip存放在数据库中,对其进行判断分析,剔除无效ip

3、在middleware文件进行获取保存好的有效ip

爬取ip代理脚本
import requests
from scrapy.selector import Selector
import MySQLdb
conn=MySQLdb.connect(host="192.168.0.104",user="root",passwd="spiderliu",db="article_spider",charest="utf8")
cursor=conn.cursor()

#设置ip代理,可以使用类似item方式进行
def crawl_ips():
#爬取西刺免费代理
    headers={"UserAgent":"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36"}
    for i in range(3473):
    #通过requests获取整个页面的信息
        re=requests.get("http://www.xicidaili.com/nn/{0}".format(i),headers=headers)
    #通过selector获取页面
    selector=Selector(text=re.text)
    #获取所有tr
    all_trs=selector.css("#ip_list tr")
    ip_list=[]
    for tr in all_trs[1:]:
        # 获取速度
        speed_str=tr.css(".bar::attr(title)").extract()[0]
        if speed_str:
            speed=float(speed_str.split("秒")[0])
        all_texts=tr.css("td::text").extract()
        ip=all_texts[0]
        port=all_texts[1]
        proxy_type=all_texts[5]
        ip_list.append((ip,port,proxy_type,speed))
    for ip_info in ip_list:
        cursor.execute(
            "insert proxy_ip(ip,port,proxy_type,speed) VALUES('{0}','{1}',{2},'http or https')".format(
                ip_info[0], ip_info[1], ip_info[3]
            )
        )
        conn.commit()

class GetIP(object):
    def delete_ip(self,ip):
        #从数据库中删除无效的ip
        delete_sql="""
            delete from proxy_ip where ip='{0}'
        """.format(ip)
        cursor.execute(delete_sql)
        cursor.commit()
        return True
    #对于获取的ip进行判断
    def judge_ip(self,ip,port):
        http_url="www.baidu.com"
        proxy_url="https://{0}:{1}".format(ip,port)
        try:
            proxy_dict={
                "https":proxy_url
            }
            response=requests.get(http_url,proxies=proxy_dict)
        except Exception as e:
            print("无效ip和端口")
            self.delete_ip()
            return False
        else:
            code=response.status_code
            if code>=200 and code<300:
                print("有效的ip")
                return True
            else:
                print("无效")
                self.delete_ip()
                return False

    #从数据库中随机获取一个ip
    def get_random_ip(self):
        random_sql="""
              SELECT ip,prot FROM proxy_ip
            ORDER BY RAND()
            LIMIT 1
        """
        result=cursor.execute(random_sql)
        for ip_info in cursor.fetchall():
            ip=ip_info[0]
            port=ip_info[1]
            judge_re=self.judge_ip(ip,port)
            if judge_re:
                return "http://{0}:{1}".format(ip,port)
            else:
                return self.get_random_ip()

#print(crawl_ips())
if __name__=="__main__":
    get_ip=GetIP()
    get_ip.get_random_ip()

middlewares文件中获取脚本中的ip
class RandomProxyMiddleware(object):
    #动态ip设置
    def process_request(self,request,spider):
        get_ip=GetIP()
        request.meta["proxy"]=get_ip.get_random_ip()

猜你喜欢

转载自blog.csdn.net/lx516109011/article/details/83572910