-cookiejar = cookielib.CookieJar()
-urlopener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookiejar))
-def _urlopen(url, referrer=None):
- req = urllib2.Request(url)
- req.add_header("User-Agent", USER_AGENT)
- if referrer:
- req.add_header("Referer", referrer)
- return urlopener.open(req)
-
-def urlopen(url, max_age):
-### print url
- if not os.path.isdir(CACHE_DIR):
- os.makedirs(CACHE_DIR)
-
- if max_age <= 0:
- return _urlopen(url)
-
- filename = hashlib.md5(url).hexdigest()
- filename = os.path.join(CACHE_DIR, filename)
- if os.path.exists(filename):
- file_age = int(time.time()) - os.path.getmtime(filename)
- if file_age < max_age:
- return open(filename)
-
- src = _urlopen(url)
- dst = open(filename, "wb")
- try:
- shutil.copyfileobj(src, dst)
- except Exception, e:
- try:
- os.unlink(filename)
- except OSError:
- pass
- raise e
- src.close()
- dst.close()
-
- return open(filename)
-
-def grab_text(url, max_age):
- f = urlopen(url, max_age)
- text = f.read().decode("utf-8")
- f.close()
- return text
-
-def grab_html(url, max_age):
- f = urlopen(url, max_age)
- doc = html.parse(f, html.HTMLParser(encoding="utf-8", recover=True))
- f.close()
+def ensure_scheme(url):
+ parts = urllib.parse.urlparse(url)
+ if parts.scheme:
+ return url
+ parts = list(parts)
+ parts[0] = "http"
+ return urllib.parse.urlunparse(parts)
+
+http_session = requests.Session()
+http_session.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:21.0) Gecko/20100101 Firefox/21.0"
+
+def grab_text(url):
+ logging.debug("grab_text(%r)", url)
+ request = http_session.prepare_request(requests.Request("GET", url))
+ response = http_session.send(request)
+ return response.text
+
+def grab_html(url):
+ logging.debug("grab_html(%r)", url)
+ request = http_session.prepare_request(requests.Request("GET", url))
+ response = http_session.send(request, stream=True)
+ doc = lxml.html.parse(response.raw, lxml.html.HTMLParser(encoding="utf-8", recover=True))
+ response.close()