]> code.delx.au - webdl/blob - common.py
Failsafe in case channel 10 returns bad results again
[webdl] / common.py
1 import hashlib
2 import io
3 import json
4 import logging
5 import lxml.etree
6 import lxml.html
7 import os
8 import re
9 import requests
10 import requests_cache
11 import shutil
12 import signal
13 import subprocess
14 import time
15 import urllib.parse
16
17
18 try:
19 import autosocks
20 autosocks.try_autosocks()
21 except ImportError:
22 pass
23
24
25 logging.basicConfig(
26 format = "%(levelname)s %(message)s",
27 level = logging.INFO if os.environ.get("DEBUG", None) is None else logging.DEBUG,
28 )
29
30 CACHE_FILE = os.path.join(
31 os.environ.get("XDG_CACHE_HOME", os.path.expanduser("~/.cache")),
32 "webdl",
33 "requests_cache"
34 )
35 if not os.path.isdir(os.path.dirname(CACHE_FILE)):
36 os.makedirs(os.path.dirname(CACHE_FILE))
37
38 requests_cache.install_cache(CACHE_FILE, backend='sqlite', expire_after=3600)
39
40
41 class Node(object):
42 def __init__(self, title, parent=None):
43 self.title = title
44 if parent:
45 parent.children.append(self)
46 self.parent = parent
47 self.children = []
48 self.can_download = False
49
50 def get_children(self):
51 if not self.children:
52 self.fill_children()
53 return self.children
54
55 def fill_children(self):
56 pass
57
58 def download(self):
59 raise NotImplemented
60
61
62 def load_root_node():
63 root_node = Node("Root")
64
65 import iview
66 iview.fill_nodes(root_node)
67
68 import sbs
69 sbs.fill_nodes(root_node)
70
71 import ten
72 ten.fill_nodes(root_node)
73
74 return root_node
75
76 valid_chars = frozenset("-_.()!@#%^ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
77 def sanify_filename(filename):
78 filename = "".join(c for c in filename if c in valid_chars)
79 assert len(filename) > 0
80 return filename
81
82 def ensure_scheme(url):
83 parts = urllib.parse.urlparse(url)
84 if parts.scheme:
85 return url
86 parts = list(parts)
87 parts[0] = "http"
88 return urllib.parse.urlunparse(parts)
89
90 http_session = requests.Session()
91 http_session.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:21.0) Gecko/20100101 Firefox/21.0"
92
93 def grab_text(url):
94 logging.debug("grab_text(%r)", url)
95 request = http_session.prepare_request(requests.Request("GET", url))
96 response = http_session.send(request)
97 return response.text
98
99 def grab_html(url):
100 logging.debug("grab_html(%r)", url)
101 request = http_session.prepare_request(requests.Request("GET", url))
102 response = http_session.send(request, stream=True)
103 doc = lxml.html.parse(io.BytesIO(response.content), lxml.html.HTMLParser(encoding="utf-8", recover=True))
104 response.close()
105 return doc
106
107 def grab_xml(url):
108 logging.debug("grab_xml(%r)", url)
109 request = http_session.prepare_request(requests.Request("GET", url))
110 response = http_session.send(request, stream=True)
111 doc = lxml.etree.parse(io.BytesIO(response.content), lxml.etree.XMLParser(encoding="utf-8", recover=True))
112 response.close()
113 return doc
114
115 def grab_json(url):
116 logging.debug("grab_json(%r)", url)
117 request = http_session.prepare_request(requests.Request("GET", url))
118 response = http_session.send(request)
119 return response.json()
120
121 def exec_subprocess(cmd):
122 logging.debug("Executing: %s", cmd)
123 try:
124 p = subprocess.Popen(cmd)
125 ret = p.wait()
126 if ret != 0:
127 logging.error("%s exited with error code: %s", cmd[0], ret)
128 return False
129 else:
130 return True
131 except OSError as e:
132 logging.error("Failed to run: %s -- %s", cmd[0], e)
133 except KeyboardInterrupt:
134 logging.info("Cancelled: %s", cmd)
135 try:
136 p.terminate()
137 p.wait()
138 except KeyboardInterrupt:
139 p.send_signal(signal.SIGKILL)
140 p.wait()
141 return False
142
143
144 def check_command_exists(cmd):
145 try:
146 subprocess.check_output(cmd, stderr=subprocess.STDOUT)
147 return True
148 except Exception:
149 return False
150
151 def find_ffmpeg():
152 if check_command_exists(["ffmpeg", "--help"]):
153 return "ffmpeg"
154
155 if check_command_exists(["avconv", "--help"]):
156 logging.warn("Detected libav-tools! ffmpeg is recommended")
157 return "avconv"
158
159 raise Exception("You must install ffmpeg or libav-tools")
160
161 def find_ffprobe():
162 if check_command_exists(["ffprobe", "--help"]):
163 return "ffprobe"
164
165 if check_command_exists(["avprobe", "--help"]):
166 logging.warn("Detected libav-tools! ffmpeg is recommended")
167 return "avprobe"
168
169 raise Exception("You must install ffmpeg or libav-tools")
170
171 def find_streamlink():
172 if check_command_exists(["streamlink", "--help"]):
173 return "streamlink"
174
175 if check_command_exists(["livestreamer", "--help"]):
176 logging.warn("Detected livestreamer! streamlink is recommended")
177 return "livestreamer"
178
179 raise Exception("You must install streamlink or livestreamer")
180
181 def get_duration(filename):
182 ffprobe = find_ffprobe()
183
184 cmd = [
185 ffprobe,
186 filename,
187 "-show_format_entry", "duration",
188 "-v", "quiet",
189 ]
190 output = subprocess.check_output(cmd).decode("utf-8")
191 for line in output.split("\n"):
192 m = re.search(R"([0-9]+)", line)
193 if not m:
194 continue
195 duration = m.group(1)
196 if duration.isdigit():
197 return int(duration)
198
199
200 logging.debug("Falling back to full decode to find duration: %s % filename")
201
202 ffmpeg = find_ffmpeg()
203 cmd = [
204 ffmpeg,
205 "-i", filename,
206 "-vn",
207 "-f", "null", "-",
208 ]
209 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode("utf-8")
210 duration = None
211 for line in re.split(R"[\r\n]", output):
212 m = re.search(R"time=([0-9:]*)\.", line)
213 if not m:
214 continue
215 [h, m, s] = m.group(1).split(":")
216 # ffmpeg prints the duration as it reads the file, we want the last one
217 duration = int(h) * 3600 + int(m) * 60 + int(s)
218
219 if duration:
220 return duration
221 else:
222 raise Exception("Unable to determine video duration of " + filename)
223
224 def check_video_durations(flv_filename, mp4_filename):
225 flv_duration = get_duration(flv_filename)
226 mp4_duration = get_duration(mp4_filename)
227
228 if abs(flv_duration - mp4_duration) > 1:
229 logging.error(
230 "The duration of %s is suspicious, did the remux fail? Expected %s == %s",
231 mp4_filename, flv_duration, mp4_duration
232 )
233 return False
234
235 return True
236
237 def remux(infile, outfile):
238 logging.info("Converting %s to mp4", infile)
239
240 ffmpeg = find_ffmpeg()
241 cmd = [
242 ffmpeg,
243 "-i", infile,
244 "-bsf:a", "aac_adtstoasc",
245 "-acodec", "copy",
246 "-vcodec", "copy",
247 "-y",
248 outfile,
249 ]
250 if not exec_subprocess(cmd):
251 return False
252
253 if not check_video_durations(infile, outfile):
254 return False
255
256 os.unlink(infile)
257 return True
258
259 def convert_to_mp4(filename):
260 with open(filename, "rb") as f:
261 fourcc = f.read(4)
262 basename, ext = os.path.splitext(filename)
263
264 if ext == ".mp4" and fourcc == b"FLV\x01":
265 os.rename(filename, basename + ".flv")
266 ext = ".flv"
267 filename = basename + ext
268
269 if ext in (".flv", ".ts"):
270 filename_mp4 = basename + ".mp4"
271 return remux(filename, filename_mp4)
272
273 return ext == ".mp4"
274
275
276 def download_hds(filename, video_url, pvswf=None):
277 streamlink = find_streamlink()
278
279 filename = sanify_filename(filename)
280 logging.info("Downloading: %s", filename)
281
282 video_url = "hds://" + video_url
283 if pvswf:
284 param = "%s pvswf=%s" % (video_url, pvswf)
285 else:
286 param = video_url
287
288 cmd = [
289 streamlink,
290 "-f",
291 "-o", filename,
292 param,
293 "best",
294 ]
295 if exec_subprocess(cmd):
296 return convert_to_mp4(filename)
297 else:
298 return False
299
300 def download_hls(filename, video_url):
301 streamlink = find_streamlink()
302
303 filename = sanify_filename(filename)
304 video_url = "hlsvariant://" + video_url
305 logging.info("Downloading: %s", filename)
306
307 cmd = [
308 streamlink,
309 "-f",
310 "-o", filename,
311 video_url,
312 "best",
313 ]
314 if exec_subprocess(cmd):
315 return convert_to_mp4(filename)
316 else:
317 return False
318
319 def download_mpd(filename, video_url):
320 streamlink = find_streamlink()
321
322 filename = sanify_filename(filename)
323 video_url = "dash://" + video_url
324 logging.info("Downloading: %s", filename)
325
326 cmd = [
327 streamlink,
328 "-f",
329 "-o", filename,
330 video_url,
331 "best",
332 ]
333 if exec_subprocess(cmd):
334 return convert_to_mp4(filename)
335 else:
336 return False
337
338 def download_http(filename, video_url):
339 filename = sanify_filename(filename)
340 logging.info("Downloading: %s", filename)
341
342 cmd = [
343 "curl",
344 "--fail", "--retry", "3",
345 "-o", filename,
346 video_url,
347 ]
348 if exec_subprocess(cmd):
349 return convert_to_mp4(filename)
350 else:
351 return False
352
353 def natural_sort(l, key=None):
354 ignore_list = ["a", "the"]
355 def key_func(k):
356 if key is not None:
357 k = key(k)
358 k = k.lower()
359 newk = []
360 for c in re.split("([0-9]+)", k):
361 c = c.strip()
362 if c.isdigit():
363 newk.append(c.zfill(5))
364 else:
365 for subc in c.split():
366 if subc not in ignore_list:
367 newk.append(subc)
368 return newk
369
370 return sorted(l, key=key_func)
371
372 def append_to_qs(url, params):
373 r = list(urllib.parse.urlsplit(url))
374 qs = urllib.parse.parse_qs(r[3])
375 for k, v in params.items():
376 if v is not None:
377 qs[k] = v
378 elif k in qs:
379 del qs[k]
380 r[3] = urllib.parse.urlencode(sorted(qs.items()), True)
381 url = urllib.parse.urlunsplit(r)
382 return url
383