-import python2_compat
-
import hashlib
-import http.cookiejar
+import io
import json
import logging
import lxml.etree
import lxml.html
import os
import re
+import requests
+import requests_cache
import shutil
import signal
import subprocess
import time
import urllib.parse
-import urllib.request
try:
level = logging.INFO if os.environ.get("DEBUG", None) is None else logging.DEBUG,
)
-CACHE_DIR = os.path.join(
+CACHE_FILE = os.path.join(
os.environ.get("XDG_CACHE_HOME", os.path.expanduser("~/.cache")),
- "webdl"
+ "webdl",
+ "requests_cache"
)
+if not os.path.isdir(os.path.dirname(CACHE_FILE)):
+ os.makedirs(os.path.dirname(CACHE_FILE))
-USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:21.0) Gecko/20100101 Firefox/21.0"
+requests_cache.install_cache(CACHE_FILE, backend='sqlite', expire_after=3600)
class Node(object):
import sbs
sbs.fill_nodes(root_node)
- import brightcove
- brightcove.fill_nodes(root_node)
+ import ten
+ ten.fill_nodes(root_node)
return root_node
parts[0] = "http"
return urllib.parse.urlunparse(parts)
-cookiejar = http.cookiejar.CookieJar()
-urlopener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cookiejar))
-def _urlopen(url, referrer=None):
- url = ensure_scheme(url)
- req = urllib.request.Request(url)
- req.add_header("User-Agent", USER_AGENT)
- if referrer:
- req.add_header("Referer", referrer)
- return urlopener.open(req)
-
-def urlopen(url, max_age):
- logging.debug("urlopen(%r, %r)", url, max_age)
-
- if not os.path.isdir(CACHE_DIR):
- os.makedirs(CACHE_DIR)
-
- if max_age <= 0:
- return _urlopen(url)
-
- filename = hashlib.md5(url.encode("utf-8")).hexdigest()
- filename = os.path.join(CACHE_DIR, filename)
- if os.path.exists(filename):
- file_age = int(time.time()) - os.path.getmtime(filename)
- if file_age < max_age:
- logging.debug("loading from cache: %s", filename)
- return open(filename, "rb")
-
- logging.debug("downloading: %s -> %s", url, filename)
- src = _urlopen(url)
- dst = open(filename, "wb")
- try:
- shutil.copyfileobj(src, dst)
- except Exception as e:
- try:
- os.unlink(filename)
- except OSError:
- pass
- raise e
- src.close()
- dst.close()
-
- return open(filename, "rb")
-
-def grab_text(url, max_age):
- f = urlopen(url, max_age)
- text = f.read().decode("utf-8")
- f.close()
- return text
-
-def grab_html(url, max_age):
- f = urlopen(url, max_age)
- doc = lxml.html.parse(f, lxml.html.HTMLParser(encoding="utf-8", recover=True))
- f.close()
+http_session = requests.Session()
+http_session.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:21.0) Gecko/20100101 Firefox/21.0"
+
+def grab_text(url):
+ logging.debug("grab_text(%r)", url)
+ request = http_session.prepare_request(requests.Request("GET", url))
+ response = http_session.send(request)
+ return response.text
+
+def grab_html(url):
+ logging.debug("grab_html(%r)", url)
+ request = http_session.prepare_request(requests.Request("GET", url))
+ response = http_session.send(request, stream=True)
+ doc = lxml.html.parse(io.BytesIO(response.content), lxml.html.HTMLParser(encoding="utf-8", recover=True))
+ response.close()
return doc
-def grab_xml(url, max_age):
- f = urlopen(url, max_age)
- doc = lxml.etree.parse(f, lxml.etree.XMLParser(encoding="utf-8", recover=True))
- f.close()
+def grab_xml(url):
+ logging.debug("grab_xml(%r)", url)
+ request = http_session.prepare_request(requests.Request("GET", url))
+ response = http_session.send(request, stream=True)
+ doc = lxml.etree.parse(io.BytesIO(response.content), lxml.etree.XMLParser(encoding="utf-8", recover=True))
+ response.close()
return doc
-def grab_json(url, max_age, skip_assignment=False, skip_function=False):
- f = urlopen(url, max_age)
- text = f.read().decode("utf-8")
-
- if skip_assignment:
- pos = text.find("=")
- text = text[pos+1:]
-
- elif skip_function:
- pos = text.find("(")
- rpos = text.rfind(")")
- text = text[pos+1:rpos]
-
- doc = json.loads(text)
- f.close()
- return doc
+def grab_json(url):
+ logging.debug("grab_json(%r)", url)
+ request = http_session.prepare_request(requests.Request("GET", url))
+ response = http_session.send(request)
+ return response.json()
def exec_subprocess(cmd):
logging.debug("Executing: %s", cmd)
raise Exception("You must install ffmpeg or libav-tools")
+def find_streamlink():
+ for streamlink in ["streamlink", "livestreamer"]:
+ if check_command_exists([streamlink, "--help"]):
+ return streamlink
+
+ raise Exception("You must install streamlink or livestreamer")
+
def get_duration(filename):
ffprobe = find_ffprobe()
]
output = subprocess.check_output(cmd).decode("utf-8")
for line in output.split("\n"):
- if line.startswith("duration="):
- return float(line.split("=")[1]) # ffprobe
- if re.match(R'^[0-9.]*$', line):
- return float(line) # avprobe
+ m = re.search(R"([0-9]+)", line)
+ if not m:
+ continue
+ duration = m.group(1)
+ if duration.isdigit():
+ return int(duration)
+
- raise Exception("Unable to determine video duration of " + filename)
+ logging.debug("Falling back to full decode to find duration: %s % filename")
+
+ ffmpeg = find_ffmpeg()
+ cmd = [
+ ffmpeg,
+ "-i", filename,
+ "-vn",
+ "-f", "null", "-",
+ ]
+ output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode("utf-8")
+ duration = None
+ for line in re.split(R"[\r\n]", output):
+ m = re.search(R"time=([0-9:]*)\.", line)
+ if not m:
+ continue
+ [h, m, s] = m.group(1).split(":")
+ # ffmpeg prints the duration as it reads the file, we want the last one
+ duration = int(h) * 3600 + int(m) * 60 + int(s)
+
+ if duration:
+ return duration
+ else:
+ raise Exception("Unable to determine video duration of " + filename)
def check_video_durations(flv_filename, mp4_filename):
flv_duration = get_duration(flv_filename)
"-bsf:a", "aac_adtstoasc",
"-acodec", "copy",
"-vcodec", "copy",
+ "-y",
outfile,
]
if not exec_subprocess(cmd):
def download_hds(filename, video_url, pvswf=None):
+ streamlink = find_streamlink()
+
filename = sanify_filename(filename)
logging.info("Downloading: %s", filename)
param = video_url
cmd = [
- "livestreamer",
+ streamlink,
+ "-f",
"-o", filename,
param,
"best",
return False
def download_hls(filename, video_url):
+ streamlink = find_streamlink()
+
filename = sanify_filename(filename)
video_url = "hlsvariant://" + video_url
logging.info("Downloading: %s", filename)
cmd = [
- "livestreamer",
+ streamlink,
+ "-f",
+ "-o", filename,
+ video_url,
+ "best",
+ ]
+ if exec_subprocess(cmd):
+ return convert_to_mp4(filename)
+ else:
+ return False
+
+def download_mpd(filename, video_url):
+ streamlink = find_streamlink()
+
+ filename = sanify_filename(filename)
+ video_url = "dash://" + video_url
+ logging.info("Downloading: %s", filename)
+
+ cmd = [
+ streamlink,
+ "-f",
"-o", filename,
video_url,
"best",