2010-04-23 03:47:41 +00:00
|
|
|
# convenience wrapper for urllib2 & friends
|
|
|
|
|
2010-08-25 20:26:12 +00:00
|
|
|
import cookielib
|
2010-04-23 03:47:41 +00:00
|
|
|
import json
|
|
|
|
import urllib
|
|
|
|
import urllib2
|
|
|
|
import urlparse
|
|
|
|
|
2010-07-24 23:38:28 +00:00
|
|
|
from urllib import quote, quote_plus as _quote_plus
|
2010-04-23 03:47:41 +00:00
|
|
|
from urllib2 import HTTPError, URLError
|
|
|
|
|
2010-04-25 21:39:31 +00:00
|
|
|
from lxml import etree, html
|
2010-04-23 03:47:41 +00:00
|
|
|
|
2010-08-25 20:26:12 +00:00
|
|
|
|
2011-03-29 13:48:44 +00:00
|
|
|
ua_skybot = 'Skybot/1.0 http://github.com/rmmh/skybot'
|
2010-04-23 03:47:41 +00:00
|
|
|
|
|
|
|
ua_firefox = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.6) ' \
|
|
|
|
'Gecko/20070725 Firefox/2.0.0.6'
|
|
|
|
ua_internetexplorer = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'
|
|
|
|
|
2010-08-25 20:26:12 +00:00
|
|
|
jar = cookielib.CookieJar()
|
|
|
|
|
2010-04-23 03:47:41 +00:00
|
|
|
|
|
|
|
def get(*args, **kwargs):
|
|
|
|
return open(*args, **kwargs).read()
|
|
|
|
|
|
|
|
|
|
|
|
def get_html(*args, **kwargs):
|
2010-04-25 21:39:31 +00:00
|
|
|
return html.fromstring(get(*args, **kwargs))
|
2010-04-23 03:47:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_xml(*args, **kwargs):
|
2010-04-25 21:39:31 +00:00
|
|
|
return etree.fromstring(get(*args, **kwargs))
|
2010-04-23 03:47:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_json(*args, **kwargs):
|
|
|
|
return json.loads(get(*args, **kwargs))
|
|
|
|
|
|
|
|
|
2012-06-02 23:08:30 +00:00
|
|
|
def open(url, query_params=None, user_agent=None, referer=None, post_data=None,
|
2010-08-25 20:26:12 +00:00
|
|
|
get_method=None, cookies=False, **kwargs):
|
2010-11-12 05:18:32 +00:00
|
|
|
|
2010-09-06 03:28:34 +00:00
|
|
|
if query_params is None:
|
|
|
|
query_params = {}
|
2010-11-12 05:18:32 +00:00
|
|
|
|
2011-03-29 13:48:44 +00:00
|
|
|
if user_agent is None:
|
|
|
|
user_agent = ua_skybot
|
|
|
|
|
2010-04-23 03:47:41 +00:00
|
|
|
query_params.update(kwargs)
|
|
|
|
|
|
|
|
url = prepare_url(url, query_params)
|
|
|
|
|
|
|
|
request = urllib2.Request(url, post_data)
|
|
|
|
|
|
|
|
if get_method is not None:
|
|
|
|
request.get_method = lambda: get_method
|
|
|
|
|
|
|
|
request.add_header('User-Agent', user_agent)
|
2010-08-25 20:26:12 +00:00
|
|
|
|
2012-06-02 23:08:30 +00:00
|
|
|
if referer is not None:
|
|
|
|
request.add_header('Referer', referer)
|
|
|
|
|
2010-08-25 20:26:12 +00:00
|
|
|
if cookies:
|
|
|
|
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(jar))
|
|
|
|
else:
|
|
|
|
opener = urllib2.build_opener()
|
|
|
|
|
|
|
|
return opener.open(request)
|
2010-04-23 03:47:41 +00:00
|
|
|
|
2010-04-23 03:50:56 +00:00
|
|
|
|
2010-04-23 03:47:41 +00:00
|
|
|
def prepare_url(url, queries):
|
|
|
|
if queries:
|
|
|
|
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
|
|
|
|
|
|
|
|
query = dict(urlparse.parse_qsl(query))
|
|
|
|
query.update(queries)
|
|
|
|
query = urllib.urlencode(dict((to_utf8(key), to_utf8(value))
|
|
|
|
for key, value in query.iteritems()))
|
|
|
|
|
|
|
|
url = urlparse.urlunsplit((scheme, netloc, path, query, fragment))
|
|
|
|
|
|
|
|
return url
|
|
|
|
|
|
|
|
|
|
|
|
def to_utf8(s):
|
2010-11-12 05:18:32 +00:00
|
|
|
if isinstance(s, unicode):
|
2010-04-23 03:47:41 +00:00
|
|
|
return s.encode('utf8', 'ignore')
|
2010-11-12 05:18:32 +00:00
|
|
|
else:
|
|
|
|
return str(s)
|
2010-07-24 23:38:28 +00:00
|
|
|
|
|
|
|
|
|
|
|
def quote_plus(s):
|
|
|
|
return _quote_plus(to_utf8(s))
|
2011-05-30 11:18:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
def unescape(s):
|
|
|
|
if not s.strip():
|
|
|
|
return s
|
|
|
|
return html.fromstring(s).text_content()
|