基于爬虫的快手网页版,批量取关操作,
因为以前玩快手,看到有趣的就关注,所以我的账号积累了将近一千个关注。而快手的网页版和移动版都不支持批量取关,只能一个一个取关,很是麻烦。所以我用刚学的爬虫,爬取了我的关注,每次可以批量取关30个。(因为关注更新的机制是阿贾克斯请求,更新每一页需要动态加载的),代码如下。
import requests
import json
def un_follow(touid):
json={
"operationName":"visionFollow",
"query":"此处填写自己专属的query",
"variables":{"touid":touid,"ftype":2,"followSource":1}
}
response=requests.post(url=url,headers=headers,json=json)
json_data=response.json()
print('取关成功')
def get_id():
data={
"operationName":"visionProfileUserList",
"query":"此处填写自己专属的query",
"variables":{"ftype":1,"pcursor":""}
}
data = json.dumps(data)
response = requests.post(url=url,headers=headers,data=data)
data_json = response.json()
fols = data_json['data']['visionProfileUserList']['fols']
user_ids=[]
for fol in fols:
user_ids.append(fol['user_id'])
return user_ids
url = 'https://www.kuaishou.com/graphql'
headers = {
"Content-Type": "application/json",
"Cookie": "此处填写自己账号专属的cookie"
"Origin": "https://www.kuaishou.com",
"Referer": "https://www.kuaishou.com/profile/3xigqm5vbacpwfk",
"User-Agent": "此处填写专属的UA"
}
user_ids=get_id()
print(user_ids)
for user in user_ids:
un_follow(user)