1.加载cookies,加载headers,进行模拟登录
@加载headers的方法
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from fake_useragent import UserAgent
from selenium import webdriver
#方法一
ua = UserAgent()
desired_capabilities = DesiredCapabilities.PHANTOMJS.copy()
desired_capabilities["phantomjs.page.settings.userAgent"] = ua.random
driver = webdriver.PhantomJS(executable_path=PhantomJS_Path,desired_capabilities = desired_capabilities)
#方法二:
dcap = dict(DesiredCapabilities.PHANTOMJS)
dcap["phantomjs.page.settings.userAgent"]=( "Mozilla/5.0 (Linux; Android 5.1.1; Nexus 6 Build/LYZ28E) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.23 Mobile Safari/537.36" )
@等待时间
@睡眠等待(最不好的方法)
time.sleep()
@显式等待
显式等待是你在代码中定义等待一定条件发生后再进一步执行你的代码。 最糟糕的案例是使用time.sleep(),它将条件设置为等待一个确切的时间段。 这里有一些方便的方法让你只等待需要的时间。WebDriverWait结合ExpectedCondition 是实现的一种方式。
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
driver = webdriver.Firefox()
driver.get("http://somedomain/url_that_delays_loading")
try:
element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "myDynamicElement")))
finally:
driver.quit()
@隐式等待
隐式等待是在尝试发现某个元素的时候,如果没能立刻发现,就等待固定长度的时间。默认设置是0秒。一旦设置了隐式等待时间,它的作用范围就是Webdriver对象实例的整个生命周期。
from selenium import webdriver
driver = webdriver.Firefox()
driver.implicitly_wait(10) # seconds
driver.get("http://somedomain/url_that_delays_loading")
myDynamicElement = driver.find_element_by_id("myDynamicElement")
发现一个小问题,知道了为啥我的selenium+PhantomJS,一直登录不上了,原因是因为我没加载完全就结束操作了,注意一定要设置好等待时间,等待加载。
在middleware内集成selenium
class intergration_selenium(object):
def __init__(self):
#super(intergration_selenium,self).__init__()
#在init中初始化driver避免多次调用,使句柄不一致
self.driver = self.driver_handle() #暴力传入句柄
self.count = 1
def process_request(self, request, spider):
if spider.name == "lagou_BaseSpider":
self.driver.get(request.url)
time.sleep(8) #使用time.sleep是最不好的方法,利用显式和隐式等待
print('正在访问:',request.url)
#self.driver.get_screenshot_as_file('{0}_.png'.format(self.count))
#self.count+=1
# 导出4个值,分别为url,html,encoding,request,直接返回HtmlResponse可以令scrapy不再调用downloader
return HtmlResponse(url=self.driver.current_url,body=self.driver.page_source,encoding='utf-8',request=request)
#不清楚cookies怎么弄,直接暴力传入句柄
def driver_handle(self):
lagou_login_url = r'https://passport.lagou.com/login/login.html'
# 改成phantomjs
desired_capabilities = DesiredCapabilities.PHANTOMJS.copy()
ua = UserAgent()
desired_capabilities["phantomjs.page.settings.userAgent"] = ua.random
driver = webdriver.PhantomJS(
executable_path=r'E:\PhantomJS\phantomjs-2.1.1-windows\phantomjs-2.1.1-windows\bin\phantomjs.exe',
desired_capabilities=desired_capabilities)
driver.get(lagou_login_url)
name = driver.find_element_by_xpath('//input[@placeholder="请输入常用手机号/邮箱"]').send_keys('******')
password = driver.find_element_by_xpath('//input[@placeholder="请输入密码"]').send_keys('******')
driver.find_element_by_xpath('//input[@type="submit"]').click()
time.sleep(10)
return driver
注意这句,导出4个值,分别为url,html,encoding,request,直接返回HtmlResponse可以令scrapy不再调用downloader
return HtmlResponse(url=self.driver.current_url,body=self.driver.page_source,encoding=’utf-8’,request=request)