最近爬虫项目接触得多,自己整理一下requests的接口
基本用法
- requests.get(url,data="",headers=headers)
get请求,上传参数字典data - requests.post(url,data="",headers=headers)
post请求,上传参数字典data - 请求返回值 Response Content
response = requests.get('https://api.github.com/events')
print(response.text)
- 改变返回文本编码
response.encoding - 获得二进制返回内容
response = requests.get('https://api.github.com/events')
print(response.content)
from PIL import Image
from io import BytesIO
i = Image.open(BytesIO(response.content))
- 获得原始返回内容
response = requests.get('https://api.github.com/events')
print(response.raw)
print(response.raw.read(10))
- 上传一个Multipart-Encoded文件
url = 'https://httpbin.org/post'
files = {'file': open('report.xls', 'rb')}
r = requests.post(url, files=files)
- 状态码
r = requests.get('https://httpbin.org/get')
r.status_code
- 抛出异常状态码
response.raise_for_status()
- 获得存储的cooikes
r = requests.get(url)
r.cookies['example_cookie_name']
- 发送cookies
url = 'https://httpbin.org/cookies'
cookies = dict(cookies_are='working')
r = requests.get(url, cookies=cookies)
- 更精准的使用cookies
jar = requests.cookies.RequestsCookieJar()
jar.set('tasty_cookie', 'yum', domain='httpbin.org',path='/cookies')
jar.set('gross_cookie', 'blech', domain='httpbin.org', path='/elsewhere')
url = 'https://httpbin.org/cookies'
r = requests.get(url, cookies=jar)
r.text