爬取知乎
爬取知乎文章
import requests
import re
import sqlite3# 自己定义的一个数据库方法,将爬取到的数据存储到数据库当中from zhihu import save_dataif __name__ == '__main__':# 传入必要的参数headers = {"cookie": '_zap=8d6f3b3d-0e65-4d7f-a873-ece51a405d4f; _xsrf=ZxTB6ynFDnNlfAkRnMPkNAt1VNdGPS5M; d_c0="AEAaOr''TkxRKPToKa9y0yLTdMvLytAxnqclg=|1615298044"; tst=r; q_c1=2a801a3d647941989180bd24d6f7253a|16152''98578000|1615298578000; captcha_ticket_v2="2|1:0|10:1615299659|17:captcha_ticket_v2|244:eyJhcH''BpZCI6IjIwMTIwMzEzMTQiLCJyZXQiOjAsInRpY2tldCI6InQwM1pGZ19JSlJlaFhnUTN4bVBhX09ndmJ3ODlvd0xFWGxU''SERqZ1NXUUlKVVNkMldNNjY3T1Z0dHlLbHlvZldsN2YyQWlMb0VmSi1lX0dIb3pSQnE5RXprajF3RmpqUkxQcVRyY1M1b1U''3WkhFZUdNUE1nT3FldEEqKiIsInJhbmRzdHIiOiJAaVZqIn0=|73e944fcfb21ef59611eb4cd77caf8cbc500177e86770''4e90d7e38912fe2fef2"; r_cap_id="Y2U1ZWQzNGI0Njc3NDg2OGJjODRmMDQ2NzY5MWM2NjA=|1615299664|48a559f''f5ca92e6585ec0fc9d1b4ce9b1657caab"; cap_id="OTVjNzlkMWU4ZmI2NDUxM2IzYmI3NGVkNTRjMTdiNzU=|161529''9664|8760658a078bc63c5854c454d4cfad7f3927f79e"; l_cap_id="ZDk5MGRhZjZkZTc1NGY0OTk2N2E2ZjU3ODAyM''zFkMzg=|1615299664|efe74ed0e1fe56324622f70916c5a0cd60141867"; captcha_session_v2="2|1:0|10:16152''99682|18:captcha_session_v2|88:V0RSU0c1T2RiRmlTbklKRGxYU1JRNXREWUNMTDlVdGZ3NmF0ZVJ6Y29jWlA5U2ky''UHJTd1I1dW1BMHRkalpTYg==|5a14058529bed60459f5d318b4e97cce6e10fcf5502e0ef38d11fbcd586e2803"; z_c''0="2|1:0|10:1615299734|4:z_c0|92:Mi4xT3VESUdRQUFBQUFBUUJvNnRPVEZFaWNBQUFDRUFsVk5sUkZ2WUFBOU1WclF''2a2RMMUxZYWRiSDFoaXNuMWhXZmlB|affee2d55ed7677781e8fd764eac197039b2e9cc1e9a893ca7070ff92478b522"; ''Hm_lvt_98beee57fd2ef70ccdd5ca52b9740c49=1615298045,1615299638,1615343424,1615462348; Hm_lpvt_98b''eee57fd2ef70ccdd5ca52b9740c49=1615462348; SESSIONID=YiBh2urZh9QBAtNAVh8vcAoYhyWIZP6FIpI6DHOdmtc;'' JOID=UVsSAUj_6hLXtYmQCv96jAIFbFUcpodVvof85lWX00eQ9f3eTzcUX7a1jZEOMkkFC_vvfkH_R1NBtHA5O_hRUWg=; ''osd=VlodAUj46x3XtY6RBf96iwMKbFUbp4hVvoD96VWX1Eaf9f3ZTjgUX7G0gpEONUgKC_vof07_R1RAu3A5PPleUWg=; KL''BRSID=57358d62405ef24305120316801fd92a|1615462354|1615462345',"user-agent": 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4''389.82 Safari/537.36',}# 发起请求的urlurl = 'https://www.zhihu.com/api/v3/feed/topstory/recommend?session_token=02e86205df19ff3b4949dd184dfa456e&desktop=' \'true&page_number=3&limit=6&action=down&after_id=11&ad_interval=-1'# 得到json文件response = requests.get(url=url,headers=headers).json()# 对json文件进行分析data = response['data']author = list()content = list()title = list()for dic in data:# 因为视频和文字在Json文件中放置的位置不同,所以遇到KeyError时候,直接跳过,取下一个的Keytry :author_copy = dic['target']['question']['author']['name']title_copy = dic['target']['question']['title']content_copy = dic['target']['content']author.append(author_copy)title.append(title_copy)content.append(content_copy)except KeyError:pass# json文件中分析到的content是带有许多标签的,用re正则可以筛选出里面的中文和标点符号p = re.compile(r'[\u3002\uff1b\uff0c\uff1a\u201c\u201d\uff08\uff09\u3001\uff1f\u300a\u300b\u4e00-\u9fa5]',re.M)content1 = list()for i in content:content_sum = p.findall(i)content_sum = ''.join(content_sum)content1.append(content_sum)# 将数据传入到数据库当中for i in range(len(content1)):save_data(author[i],title[i],content1[i])
下面附上数据的方法
import sqlite3# 这是一个保存数据到数据库的方法,你们自己学着去用
def save_data(content, people, name):# 连接数据库conn = sqlite3.connect('data.db')# 插入数据insert = "insert into zhihu(name, title, text) values ('%s', '%s', '%s') " % (name, people, content)conn.execute(insert)conn.commit()conn.close()print('%s 的文章 保存成功' % name)
本文来自互联网用户投稿,文章观点仅代表作者本人,不代表本站立场,不承担相关法律责任。如若转载,请注明出处。 如若内容造成侵权/违法违规/事实不符,请点击【内容举报】进行投诉反馈!
