]> code.delx.au - webdl/blob - common.py
Fix for avprobe
[webdl] / common.py
1 import python2_compat
2
3 import hashlib
4 import http.cookiejar
5 import json
6 import logging
7 import lxml.etree
8 import lxml.html
9 import os
10 import re
11 import shutil
12 import signal
13 import subprocess
14 import time
15 import urllib.parse
16 import urllib.request
17
18
19 try:
20 import autosocks
21 autosocks.try_autosocks()
22 except ImportError:
23 pass
24
25
26 logging.basicConfig(
27 format = "%(levelname)s %(message)s",
28 level = logging.INFO if os.environ.get("DEBUG", None) is None else logging.DEBUG,
29 )
30
31 CACHE_DIR = os.path.join(
32 os.environ.get("XDG_CACHE_HOME", os.path.expanduser("~/.cache")),
33 "webdl"
34 )
35
36 USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:21.0) Gecko/20100101 Firefox/21.0"
37
38
39 class Node(object):
40 def __init__(self, title, parent=None):
41 self.title = title
42 if parent:
43 parent.children.append(self)
44 self.parent = parent
45 self.children = []
46 self.can_download = False
47
48 def get_children(self):
49 if not self.children:
50 self.fill_children()
51 return self.children
52
53 def fill_children(self):
54 pass
55
56 def download(self):
57 raise NotImplemented
58
59
60 def load_root_node():
61 root_node = Node("Root")
62
63 import iview
64 iview.fill_nodes(root_node)
65
66 import sbs
67 sbs.fill_nodes(root_node)
68
69 import brightcove
70 brightcove.fill_nodes(root_node)
71
72 return root_node
73
74 valid_chars = frozenset("-_.()!@#%^ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
75 def sanify_filename(filename):
76 filename = "".join(c for c in filename if c in valid_chars)
77 assert len(filename) > 0
78 return filename
79
80 def ensure_scheme(url):
81 parts = urllib.parse.urlparse(url)
82 if parts.scheme:
83 return url
84 parts = list(parts)
85 parts[0] = "http"
86 return urllib.parse.urlunparse(parts)
87
88 cookiejar = http.cookiejar.CookieJar()
89 urlopener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cookiejar))
90 def _urlopen(url, referrer=None):
91 url = ensure_scheme(url)
92 req = urllib.request.Request(url)
93 req.add_header("User-Agent", USER_AGENT)
94 if referrer:
95 req.add_header("Referer", referrer)
96 return urlopener.open(req)
97
98 def urlopen(url, max_age):
99 logging.debug("urlopen(%r, %r)", url, max_age)
100
101 if not os.path.isdir(CACHE_DIR):
102 os.makedirs(CACHE_DIR)
103
104 if max_age <= 0:
105 return _urlopen(url)
106
107 filename = hashlib.md5(url.encode("utf-8")).hexdigest()
108 filename = os.path.join(CACHE_DIR, filename)
109 if os.path.exists(filename):
110 file_age = int(time.time()) - os.path.getmtime(filename)
111 if file_age < max_age:
112 logging.debug("loading from cache: %s", filename)
113 return open(filename, "rb")
114
115 logging.debug("downloading: %s -> %s", url, filename)
116 src = _urlopen(url)
117 dst = open(filename, "wb")
118 try:
119 shutil.copyfileobj(src, dst)
120 except Exception as e:
121 try:
122 os.unlink(filename)
123 except OSError:
124 pass
125 raise e
126 src.close()
127 dst.close()
128
129 return open(filename, "rb")
130
131 def grab_text(url, max_age):
132 f = urlopen(url, max_age)
133 text = f.read().decode("utf-8")
134 f.close()
135 return text
136
137 def grab_html(url, max_age):
138 f = urlopen(url, max_age)
139 doc = lxml.html.parse(f, lxml.html.HTMLParser(encoding="utf-8", recover=True))
140 f.close()
141 return doc
142
143 def grab_xml(url, max_age):
144 f = urlopen(url, max_age)
145 doc = lxml.etree.parse(f, lxml.etree.XMLParser(encoding="utf-8", recover=True))
146 f.close()
147 return doc
148
149 def grab_json(url, max_age, skip_assignment=False, skip_function=False):
150 f = urlopen(url, max_age)
151 text = f.read().decode("utf-8")
152
153 if skip_assignment:
154 pos = text.find("=")
155 text = text[pos+1:]
156
157 elif skip_function:
158 pos = text.find("(")
159 rpos = text.rfind(")")
160 text = text[pos+1:rpos]
161
162 doc = json.loads(text)
163 f.close()
164 return doc
165
166 def exec_subprocess(cmd):
167 logging.debug("Executing: %s", cmd)
168 try:
169 p = subprocess.Popen(cmd)
170 ret = p.wait()
171 if ret != 0:
172 logging.error("%s exited with error code: %s", cmd[0], ret)
173 return False
174 else:
175 return True
176 except OSError as e:
177 logging.error("Failed to run: %s -- %s", cmd[0], e)
178 except KeyboardInterrupt:
179 logging.info("Cancelled: %s", cmd)
180 try:
181 p.terminate()
182 p.wait()
183 except KeyboardInterrupt:
184 p.send_signal(signal.SIGKILL)
185 p.wait()
186 return False
187
188
189 def check_command_exists(cmd):
190 try:
191 subprocess.check_output(cmd, stderr=subprocess.STDOUT)
192 return True
193 except Exception:
194 return False
195
196 def find_ffmpeg():
197 for ffmpeg in ["avconv", "ffmpeg"]:
198 if check_command_exists([ffmpeg, "--help"]):
199 return ffmpeg
200
201 raise Exception("You must install ffmpeg or libav-tools")
202
203 def find_ffprobe():
204 for ffprobe in ["avprobe", "ffprobe"]:
205 if check_command_exists([ffprobe, "--help"]):
206 return ffprobe
207
208 raise Exception("You must install ffmpeg or libav-tools")
209
210 def get_duration(filename):
211 ffprobe = find_ffprobe()
212
213 cmd = [
214 ffprobe,
215 filename,
216 "-show_format_entry", "duration",
217 "-v", "quiet",
218 ]
219 output = subprocess.check_output(cmd).decode("utf-8")
220 for line in output.split("\n"):
221 if line.startswith("duration="):
222 return float(line.split("=")[1]) # ffprobe
223 if re.match(R'^[0-9.]*$', line):
224 return float(line) # avprobe
225
226 raise Exception("Unable to determine video duration of " + filename)
227
228 def check_video_durations(flv_filename, mp4_filename):
229 flv_duration = get_duration(flv_filename)
230 mp4_duration = get_duration(mp4_filename)
231
232 if abs(flv_duration - mp4_duration) > 1:
233 logging.error(
234 "The duration of %s is suspicious, did the remux fail? Expected %s == %s",
235 mp4_filename, flv_duration, mp4_duration
236 )
237 return False
238
239 return True
240
241 def remux(infile, outfile):
242 logging.info("Converting %s to mp4", infile)
243
244 ffmpeg = find_ffmpeg()
245 cmd = [
246 ffmpeg,
247 "-i", infile,
248 "-bsf:a", "aac_adtstoasc",
249 "-acodec", "copy",
250 "-vcodec", "copy",
251 outfile,
252 ]
253 if not exec_subprocess(cmd):
254 return False
255
256 if not check_video_durations(infile, outfile):
257 return False
258
259 os.unlink(infile)
260 return True
261
262 def convert_to_mp4(filename):
263 with open(filename, "rb") as f:
264 fourcc = f.read(4)
265 basename, ext = os.path.splitext(filename)
266
267 if ext == ".mp4" and fourcc == b"FLV\x01":
268 os.rename(filename, basename + ".flv")
269 ext = ".flv"
270 filename = basename + ext
271
272 if ext in (".flv", ".ts"):
273 filename_mp4 = basename + ".mp4"
274 return remux(filename, filename_mp4)
275
276 return ext == ".mp4"
277
278
279 def download_hds(filename, video_url, pvswf=None):
280 filename = sanify_filename(filename)
281 logging.info("Downloading: %s", filename)
282
283 video_url = "hds://" + video_url
284 if pvswf:
285 param = "%s pvswf=%s" % (video_url, pvswf)
286 else:
287 param = video_url
288
289 cmd = [
290 "livestreamer",
291 "-o", filename,
292 param,
293 "best",
294 ]
295 if exec_subprocess(cmd):
296 return convert_to_mp4(filename)
297 else:
298 return False
299
300 def download_hls(filename, video_url):
301 filename = sanify_filename(filename)
302 video_url = "hlsvariant://" + video_url
303 logging.info("Downloading: %s", filename)
304
305 cmd = [
306 "livestreamer",
307 "-o", filename,
308 video_url,
309 "best",
310 ]
311 if exec_subprocess(cmd):
312 return convert_to_mp4(filename)
313 else:
314 return False
315
316 def download_http(filename, video_url):
317 filename = sanify_filename(filename)
318 logging.info("Downloading: %s", filename)
319
320 cmd = [
321 "curl",
322 "--fail", "--retry", "3",
323 "-o", filename,
324 video_url,
325 ]
326 if exec_subprocess(cmd):
327 return convert_to_mp4(filename)
328 else:
329 return False
330
331 def natural_sort(l, key=None):
332 ignore_list = ["a", "the"]
333 def key_func(k):
334 if key is not None:
335 k = key(k)
336 k = k.lower()
337 newk = []
338 for c in re.split("([0-9]+)", k):
339 c = c.strip()
340 if c.isdigit():
341 newk.append(c.zfill(5))
342 else:
343 for subc in c.split():
344 if subc not in ignore_list:
345 newk.append(subc)
346 return newk
347
348 return sorted(l, key=key_func)
349
350 def append_to_qs(url, params):
351 r = list(urllib.parse.urlsplit(url))
352 qs = urllib.parse.parse_qs(r[3])
353 for k, v in params.items():
354 if v is not None:
355 qs[k] = v
356 elif k in qs:
357 del qs[k]
358 r[3] = urllib.parse.urlencode(sorted(qs.items()), True)
359 url = urllib.parse.urlunsplit(r)
360 return url
361