import urllib from urllib import request import random # 本地ip代理 # proxy = {'http': '169.154.131.127:808'} #无密码 """ 其他IP代理: proxy = {'http':'121.42.167.160:3128'} """ # proxy = {'协议类型':'用户:密码@代理Ip:port'} proxy = {'http': 'User1:[email protected]:808'} # 有密码 # # ip池 # proxyList = [{}, {}, {}, ] # proxy = random.choice(proxyList) proxy_handler = urllib.request.ProxyHandler(proxy) opener = urllib.request.build_opener(proxy_handler) headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36"} url = 'http://www.baidu.com/' req = urllib.request.Request(url, headers=headers) response = urllib.request.urlopen(req) print(response.read().decode('utf-8'))
爬虫(14):使用IP代理
猜你喜欢
转载自blog.csdn.net/yx1179109710/article/details/80920704
今日推荐
周排行