]> code.delx.au - webdl/blob - common.py
Log to stdout instead of stderr
[webdl] / common.py
1 import hashlib
2 import io
3 import json
4 import logging
5 import lxml.etree
6 import lxml.html
7 import os
8 import re
9 import requests
10 import requests_cache
11 import shutil
12 import signal
13 import subprocess
14 import sys
15 import time
16 import urllib.parse
17
18 USER_AGENT = "Mozilla/5.0 (X11; Linux x86_64; rv:74.0) Gecko/20100101 Firefox/74.0"
19
20 try:
21 import autosocks
22 autosocks.try_autosocks()
23 except ImportError:
24 pass
25
26
27 logging.basicConfig(
28 format = "%(levelname)s %(message)s",
29 level = logging.INFO if os.environ.get("DEBUG", None) is None else logging.DEBUG,
30 stream = sys.stdout,
31 )
32
33 CACHE_FILE = os.path.join(
34 os.environ.get("XDG_CACHE_HOME", os.path.expanduser("~/.cache")),
35 "webdl",
36 "requests_cache"
37 )
38 if not os.path.isdir(os.path.dirname(CACHE_FILE)):
39 os.makedirs(os.path.dirname(CACHE_FILE))
40
41 requests_cache.install_cache(CACHE_FILE, backend='sqlite', expire_after=3600)
42
43
44 class Node(object):
45 def __init__(self, title, parent=None):
46 self.title = title
47 if parent:
48 parent.children.append(self)
49 self.parent = parent
50 self.children = []
51 self.can_download = False
52
53 def get_children(self):
54 if not self.children:
55 self.fill_children()
56 self.children = natural_sort(self.children, key=lambda node: node.title)
57 return self.children
58
59 def fill_children(self):
60 pass
61
62 def download(self):
63 raise NotImplemented
64
65
66 def load_root_node():
67 root_node = Node("Root")
68
69 import iview
70 iview.fill_nodes(root_node)
71
72 import sbs
73 sbs.fill_nodes(root_node)
74
75 import ten
76 ten.fill_nodes(root_node)
77
78 return root_node
79
80 valid_chars = frozenset("-_.()!@#%^ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
81 def sanify_filename(filename):
82 filename = "".join(c for c in filename if c in valid_chars)
83 assert len(filename) > 0
84 return filename
85
86 def ensure_scheme(url):
87 parts = urllib.parse.urlparse(url)
88 if parts.scheme:
89 return url
90 parts = list(parts)
91 parts[0] = "http"
92 return urllib.parse.urlunparse(parts)
93
94 http_session = requests.Session()
95 http_session.headers["User-Agent"] = USER_AGENT
96
97 def grab_text(url):
98 logging.debug("grab_text(%r)", url)
99 request = http_session.prepare_request(requests.Request("GET", url))
100 response = http_session.send(request)
101 return response.text
102
103 def grab_html(url):
104 logging.debug("grab_html(%r)", url)
105 request = http_session.prepare_request(requests.Request("GET", url))
106 response = http_session.send(request, stream=True)
107 doc = lxml.html.parse(io.BytesIO(response.content), lxml.html.HTMLParser(encoding="utf-8", recover=True))
108 response.close()
109 return doc
110
111 def grab_xml(url):
112 logging.debug("grab_xml(%r)", url)
113 request = http_session.prepare_request(requests.Request("GET", url))
114 response = http_session.send(request, stream=True)
115 doc = lxml.etree.parse(io.BytesIO(response.content), lxml.etree.XMLParser(encoding="utf-8", recover=True))
116 response.close()
117 return doc
118
119 def grab_json(url):
120 logging.debug("grab_json(%r)", url)
121 request = http_session.prepare_request(requests.Request("GET", url))
122 response = http_session.send(request)
123 return response.json()
124
125 def exec_subprocess(cmd):
126 logging.debug("Executing: %s", cmd)
127 try:
128 p = subprocess.Popen(cmd)
129 ret = p.wait()
130 if ret != 0:
131 logging.error("%s exited with error code: %s", cmd[0], ret)
132 return False
133 else:
134 return True
135 except OSError as e:
136 logging.error("Failed to run: %s -- %s", cmd[0], e)
137 except KeyboardInterrupt:
138 logging.info("Cancelled: %s", cmd)
139 try:
140 p.terminate()
141 p.wait()
142 except KeyboardInterrupt:
143 p.send_signal(signal.SIGKILL)
144 p.wait()
145 return False
146
147
148 def check_command_exists(cmd):
149 try:
150 subprocess.check_output(cmd, stderr=subprocess.STDOUT)
151 return True
152 except Exception:
153 return False
154
155 def find_ffmpeg():
156 if check_command_exists(["ffmpeg", "--help"]):
157 return "ffmpeg"
158
159 if check_command_exists(["avconv", "--help"]):
160 logging.warn("Detected libav-tools! ffmpeg is recommended")
161 return "avconv"
162
163 raise Exception("You must install ffmpeg or libav-tools")
164
165 def find_ffprobe():
166 if check_command_exists(["ffprobe", "--help"]):
167 return "ffprobe"
168
169 if check_command_exists(["avprobe", "--help"]):
170 logging.warn("Detected libav-tools! ffmpeg is recommended")
171 return "avprobe"
172
173 raise Exception("You must install ffmpeg or libav-tools")
174
175 def get_duration(filename):
176 ffprobe = find_ffprobe()
177
178 cmd = [
179 ffprobe,
180 filename,
181 "-show_format_entry", "duration",
182 "-v", "quiet",
183 ]
184 output = subprocess.check_output(cmd).decode("utf-8")
185 for line in output.split("\n"):
186 m = re.search(R"([0-9]+)", line)
187 if not m:
188 continue
189 duration = m.group(1)
190 if duration.isdigit():
191 return int(duration)
192
193
194 logging.debug("Falling back to full decode to find duration: %s % filename")
195
196 ffmpeg = find_ffmpeg()
197 cmd = [
198 ffmpeg,
199 "-i", filename,
200 "-vn",
201 "-f", "null", "-",
202 ]
203 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode("utf-8")
204 duration = None
205 for line in re.split(R"[\r\n]", output):
206 m = re.search(R"time=([0-9:]*)\.", line)
207 if not m:
208 continue
209 [h, m, s] = m.group(1).split(":")
210 # ffmpeg prints the duration as it reads the file, we want the last one
211 duration = int(h) * 3600 + int(m) * 60 + int(s)
212
213 if duration:
214 return duration
215 else:
216 raise Exception("Unable to determine video duration of " + filename)
217
218 def check_video_durations(flv_filename, mp4_filename):
219 flv_duration = get_duration(flv_filename)
220 mp4_duration = get_duration(mp4_filename)
221
222 if abs(flv_duration - mp4_duration) > 1:
223 logging.error(
224 "The duration of %s is suspicious, did the remux fail? Expected %s == %s",
225 mp4_filename, flv_duration, mp4_duration
226 )
227 return False
228
229 return True
230
231 def remux(infile, outfile):
232 logging.info("Converting %s to mp4", infile)
233
234 ffmpeg = find_ffmpeg()
235 cmd = [
236 ffmpeg,
237 "-i", infile,
238 "-bsf:a", "aac_adtstoasc",
239 "-acodec", "copy",
240 "-vcodec", "copy",
241 "-y",
242 outfile,
243 ]
244 if not exec_subprocess(cmd):
245 return False
246
247 if not check_video_durations(infile, outfile):
248 return False
249
250 os.unlink(infile)
251 return True
252
253 def convert_to_mp4(filename):
254 with open(filename, "rb") as f:
255 fourcc = f.read(4)
256 basename, ext = os.path.splitext(filename)
257
258 if ext == ".mp4" and fourcc == b"FLV\x01":
259 os.rename(filename, basename + ".flv")
260 ext = ".flv"
261 filename = basename + ext
262
263 if ext in (".flv", ".ts"):
264 filename_mp4 = basename + ".mp4"
265 return remux(filename, filename_mp4)
266
267 return ext == ".mp4"
268
269
270 def download_hds(filename, video_url, pvswf=None):
271 filename = sanify_filename(filename)
272 logging.info("Downloading: %s", filename)
273
274 video_url = "hds://" + video_url
275 if pvswf:
276 param = "%s pvswf=%s" % (video_url, pvswf)
277 else:
278 param = video_url
279
280 cmd = [
281 "streamlink",
282 "--force",
283 "--output", filename,
284 param,
285 "best",
286 ]
287 if exec_subprocess(cmd):
288 return convert_to_mp4(filename)
289 else:
290 return False
291
292 def download_hls(filename, video_url):
293 filename = sanify_filename(filename)
294 video_url = "hlsvariant://" + video_url
295 logging.info("Downloading: %s", filename)
296
297 cmd = [
298 "streamlink",
299 "--http-header", "User-Agent=" + USER_AGENT,
300 "--force",
301 "--output", filename,
302 video_url,
303 "best",
304 ]
305 if exec_subprocess(cmd):
306 return convert_to_mp4(filename)
307 else:
308 return False
309
310 def download_mpd(filename, video_url):
311 filename = sanify_filename(filename)
312 video_url = "dash://" + video_url
313 logging.info("Downloading: %s", filename)
314
315 cmd = [
316 "streamlink",
317 "--force",
318 "--output", filename,
319 video_url,
320 "best",
321 ]
322 if exec_subprocess(cmd):
323 return convert_to_mp4(filename)
324 else:
325 return False
326
327 def download_http(filename, video_url):
328 filename = sanify_filename(filename)
329 logging.info("Downloading: %s", filename)
330
331 cmd = [
332 "curl",
333 "--fail", "--retry", "3",
334 "-o", filename,
335 video_url,
336 ]
337 if exec_subprocess(cmd):
338 return convert_to_mp4(filename)
339 else:
340 return False
341
342 def natural_sort(l, key=None):
343 ignore_list = ["a", "the"]
344 def key_func(k):
345 if key is not None:
346 k = key(k)
347 k = k.lower()
348 newk = []
349 for c in re.split("([0-9]+)", k):
350 c = c.strip()
351 if c.isdigit():
352 newk.append(c.zfill(5))
353 else:
354 for subc in c.split():
355 if subc not in ignore_list:
356 newk.append(subc)
357 return newk
358
359 return sorted(l, key=key_func)
360
361 def append_to_qs(url, params):
362 r = list(urllib.parse.urlsplit(url))
363 qs = urllib.parse.parse_qs(r[3])
364 for k, v in params.items():
365 if v is not None:
366 qs[k] = v
367 elif k in qs:
368 del qs[k]
369 r[3] = urllib.parse.urlencode(sorted(qs.items()), True)
370 url = urllib.parse.urlunsplit(r)
371 return url
372