我正在尝试创建一个将更改 urllib2.build_opener() 的用户代理的方法
Thant是我到目前为止得到的:
Crawler.py
import urllib, urllib2, cookielib
from bs4 import BeautifulSoup
import urlopener
import re, os
class Crawler():
def __init__(self):
# Web site that contains all the browser headers
self.url = 'http://somewebsite'
self.opener = urlopener.opener()
self.web_page=self.opener.open(self.url)
self.soup=BeautifulSoup(self.web_page.read())
def current_browser(self):
try:
web_page=self.opener.open(self.url)
soup=BeautifulSoup(web_page.read())
return soup.find(id='uas_textfeld').string
except urllib2.HTTPError:
print 'ERROR'
网址打开器:
import cookielib
import urllib, urllib2
import linecache, random
cj=cookielib.CookieJar()
useragent='Mozilla/5.0 (BlackBerry; U; BlackBerry 9850; en-US) AppleWebKit/534.11+ (KHTML, like Gecko) Version/7.0.0.115 Mobile Safari/534.11+'
def opener():
#Process Hadlers
opener=urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
opener.addheaders=[
('User-Agent', useragent),
('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'),
('Accept-Language', 'en-gb,en;q=0.5'),
('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.7'),
('Keep-Alive', '115'),
('Connection', 'keep-alive'),
('Cache-Control', 'max-age=0'),
]
return opener
#randomly change browser
def browser_change(f_path):
#f_path is a path to the file that contains browsers
#To get the file uncoment next lines
#c=Crawler()
#c.get_to_the_mobile_browser_list()
f=open(f_path, 'r+')
count=0
for line in f.xreadlines(): count+=1
br_num=random.randint(1,count)
useragent=linecache.getline(f_path, br_num)
return opener()
这就是我测试 Crawler.py 的方式:
c=Crawler()
print 'Current Browser :n',c.current_browser()
f_path='/home/vor/mob_brows.txt'
opener=urlopener.browser_change(f_path) # The problem is right here!!!!!
b=Crawler()
print 'New Browser:n',b.current_browser()
在我的输出中,当前浏览器和新浏览器是相同的
Current Browser :
Mozilla/5.0 (BlackBerry; U; BlackBerry 9850; en-US) AppleWebKit/534.11+ (KHTML, like Gecko) Version/7.0.0.115 Mobile Safari/534.11+
New Browser:
Mozilla/5.0 (BlackBerry; U; BlackBerry 9850; en-US) AppleWebKit/534.11+ (KHTML, like Gecko) Version/7.0.0.115 Mobile Safari/534.11+
文件mob_brows.txt包含如下信息:
Mozilla/5.0 (Linux; U; Android 2.3.3; zh-tw; HTC_Pyramid Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1
Mozilla/5.0 (Linux; U; Android 2.3.3; zh-tw; HTC_Pyramid Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari
Mozilla/5.0 (Linux; U; Android 2.3.3; zh-tw; HTC Pyramid Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1
Mozilla/5.0 (Linux; U; Android 2.3.3; ko-kr; LG-LU3000 Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1
Mozilla/5.0 (Linux; U; Android 2.3.3; en-us; HTC_DesireS_S510e Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile
修改opener
以接受用户代理作为参数...
def opener(user_agent):
#Process Hadlers
opener=urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
opener.addheaders=[
('User-Agent', user_agent),
# snip...
]
return opener
然后生成具有不同用户代理字符串的开场白列表...
# this could be nicer, but demonstrates the point
openers = [opener(agent) for agent in open('your_f_path')]
然后使用随机模块中的choice
来选择一个开场白,您将在其中分配self.opener = urlopener.opener()
Crawler
类。
from random import choice
use_to_open = choice(openers)
你的代码是错误的,你将不得不重新制作它
这就是我做我的网络爬虫的方式:
https://github.com/mouuff/MouCrawler/blob/master/moucrawler.py