爬虫-斗鱼直播

from selenium import webdriver
import time
from lxml import etreedriver = webdriver.PhantomJS(executable_path=r'F:\老师发的文件\系统班第四阶段\day923\phantomjs-2.1.1-windows\bin\phantomjs.exe')
def request_html(url):driver.get(url=url)time.sleep(3)driver.save_screenshot('douyu.png')with open('douyu.html','w',encoding='utf-8') as f:f.write(driver.page_source)def parse_html(response):tree = etree.HTML(response)li_list = tree.xpath('//div[@class="layout-Module-container layout-Cover ListContent"]/ul/li')# print(len(li_list))for li in li_list:type = li.xpath('.//div[@class="DyListCover-content"]/div[1]/span/text()')[0]title = li.xpath('.//div[@class="DyListCover-content"]/div[1]/h3/text()')[0]hot = li.xpath('.//div[@class="DyListCover-content"]/div[2]/span/text()')[0]username = li.xpath('.//div[@class="DyListCover-content"]/div[2]/h2/text()')[0]print(type,'--',title,'--',hot,'--',username)return tree
if __name__ == '__main__':url = 'https://www.douyu.com/directory/all'# 建立连接request_html(url)# 返回结果response = driver.page_source# 获取数据tree = parse_html(response)page = 1# 点击下一页while True:print('==============={}==================='.format(page))flag = tree.xpath('//li[@title="下一页"]/@aria-disabled')[0]if flag == 'true':breakdriver.find_element_by_class_name(' dy-Pagination-next').click()time.sleep(3)response = driver.page_sourcetree = parse_html(response)page += 1


本文来自互联网用户投稿,文章观点仅代表作者本人,不代表本站立场,不承担相关法律责任。如若转载,请注明出处。 如若内容造成侵权/违法违规/事实不符,请点击【内容举报】进行投诉反馈!

相关文章

立即
投稿

微信公众账号

微信扫一扫加关注

返回
顶部