]> code.delx.au - webdl/blob - common.py
Fixed missing import
[webdl] / common.py
1 import hashlib
2 import io
3 import json
4 import logging
5 import lxml.etree
6 import lxml.html
7 import os
8 import re
9 import requests
10 import requests_cache
11 import shutil
12 import signal
13 import subprocess
14 import time
15 import urllib.parse
16
17 USER_AGENT = "Mozilla/5.0 (X11; Linux x86_64; rv:74.0) Gecko/20100101 Firefox/74.0"
18
19 try:
20 import autosocks
21 autosocks.try_autosocks()
22 except ImportError:
23 pass
24
25
26 logging.basicConfig(
27 format = "%(levelname)s %(message)s",
28 level = logging.INFO if os.environ.get("DEBUG", None) is None else logging.DEBUG,
29 )
30
31 CACHE_FILE = os.path.join(
32 os.environ.get("XDG_CACHE_HOME", os.path.expanduser("~/.cache")),
33 "webdl",
34 "requests_cache"
35 )
36 if not os.path.isdir(os.path.dirname(CACHE_FILE)):
37 os.makedirs(os.path.dirname(CACHE_FILE))
38
39 requests_cache.install_cache(CACHE_FILE, backend='sqlite', expire_after=3600)
40
41
42 class Node(object):
43 def __init__(self, title, parent=None):
44 self.title = title
45 if parent:
46 parent.children.append(self)
47 self.parent = parent
48 self.children = []
49 self.can_download = False
50
51 def get_children(self):
52 if not self.children:
53 self.fill_children()
54 self.children = natural_sort(self.children, key=lambda node: node.title)
55 return self.children
56
57 def fill_children(self):
58 pass
59
60 def download(self):
61 raise NotImplemented
62
63
64 def load_root_node():
65 root_node = Node("Root")
66
67 import iview
68 iview.fill_nodes(root_node)
69
70 import sbs
71 sbs.fill_nodes(root_node)
72
73 import ten
74 ten.fill_nodes(root_node)
75
76 return root_node
77
78 valid_chars = frozenset("-_.()!@#%^ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
79 def sanify_filename(filename):
80 filename = "".join(c for c in filename if c in valid_chars)
81 assert len(filename) > 0
82 return filename
83
84 def ensure_scheme(url):
85 parts = urllib.parse.urlparse(url)
86 if parts.scheme:
87 return url
88 parts = list(parts)
89 parts[0] = "http"
90 return urllib.parse.urlunparse(parts)
91
92 http_session = requests.Session()
93 http_session.headers["User-Agent"] = USER_AGENT
94
95 def grab_text(url):
96 logging.debug("grab_text(%r)", url)
97 request = http_session.prepare_request(requests.Request("GET", url))
98 response = http_session.send(request)
99 return response.text
100
101 def grab_html(url):
102 logging.debug("grab_html(%r)", url)
103 request = http_session.prepare_request(requests.Request("GET", url))
104 response = http_session.send(request, stream=True)
105 doc = lxml.html.parse(io.BytesIO(response.content), lxml.html.HTMLParser(encoding="utf-8", recover=True))
106 response.close()
107 return doc
108
109 def grab_xml(url):
110 logging.debug("grab_xml(%r)", url)
111 request = http_session.prepare_request(requests.Request("GET", url))
112 response = http_session.send(request, stream=True)
113 doc = lxml.etree.parse(io.BytesIO(response.content), lxml.etree.XMLParser(encoding="utf-8", recover=True))
114 response.close()
115 return doc
116
117 def grab_json(url):
118 logging.debug("grab_json(%r)", url)
119 request = http_session.prepare_request(requests.Request("GET", url))
120 response = http_session.send(request)
121 return response.json()
122
123 def exec_subprocess(cmd):
124 logging.debug("Executing: %s", cmd)
125 try:
126 p = subprocess.Popen(cmd)
127 ret = p.wait()
128 if ret != 0:
129 logging.error("%s exited with error code: %s", cmd[0], ret)
130 return False
131 else:
132 return True
133 except OSError as e:
134 logging.error("Failed to run: %s -- %s", cmd[0], e)
135 except KeyboardInterrupt:
136 logging.info("Cancelled: %s", cmd)
137 try:
138 p.terminate()
139 p.wait()
140 except KeyboardInterrupt:
141 p.send_signal(signal.SIGKILL)
142 p.wait()
143 return False
144
145
146 def check_command_exists(cmd):
147 try:
148 subprocess.check_output(cmd, stderr=subprocess.STDOUT)
149 return True
150 except Exception:
151 return False
152
153 def find_ffmpeg():
154 if check_command_exists(["ffmpeg", "--help"]):
155 return "ffmpeg"
156
157 if check_command_exists(["avconv", "--help"]):
158 logging.warn("Detected libav-tools! ffmpeg is recommended")
159 return "avconv"
160
161 raise Exception("You must install ffmpeg or libav-tools")
162
163 def find_ffprobe():
164 if check_command_exists(["ffprobe", "--help"]):
165 return "ffprobe"
166
167 if check_command_exists(["avprobe", "--help"]):
168 logging.warn("Detected libav-tools! ffmpeg is recommended")
169 return "avprobe"
170
171 raise Exception("You must install ffmpeg or libav-tools")
172
173 def get_duration(filename):
174 ffprobe = find_ffprobe()
175
176 cmd = [
177 ffprobe,
178 filename,
179 "-show_format_entry", "duration",
180 "-v", "quiet",
181 ]
182 output = subprocess.check_output(cmd).decode("utf-8")
183 for line in output.split("\n"):
184 m = re.search(R"([0-9]+)", line)
185 if not m:
186 continue
187 duration = m.group(1)
188 if duration.isdigit():
189 return int(duration)
190
191
192 logging.debug("Falling back to full decode to find duration: %s % filename")
193
194 ffmpeg = find_ffmpeg()
195 cmd = [
196 ffmpeg,
197 "-i", filename,
198 "-vn",
199 "-f", "null", "-",
200 ]
201 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode("utf-8")
202 duration = None
203 for line in re.split(R"[\r\n]", output):
204 m = re.search(R"time=([0-9:]*)\.", line)
205 if not m:
206 continue
207 [h, m, s] = m.group(1).split(":")
208 # ffmpeg prints the duration as it reads the file, we want the last one
209 duration = int(h) * 3600 + int(m) * 60 + int(s)
210
211 if duration:
212 return duration
213 else:
214 raise Exception("Unable to determine video duration of " + filename)
215
216 def check_video_durations(flv_filename, mp4_filename):
217 flv_duration = get_duration(flv_filename)
218 mp4_duration = get_duration(mp4_filename)
219
220 if abs(flv_duration - mp4_duration) > 1:
221 logging.error(
222 "The duration of %s is suspicious, did the remux fail? Expected %s == %s",
223 mp4_filename, flv_duration, mp4_duration
224 )
225 return False
226
227 return True
228
229 def remux(infile, outfile):
230 logging.info("Converting %s to mp4", infile)
231
232 ffmpeg = find_ffmpeg()
233 cmd = [
234 ffmpeg,
235 "-i", infile,
236 "-bsf:a", "aac_adtstoasc",
237 "-acodec", "copy",
238 "-vcodec", "copy",
239 "-y",
240 outfile,
241 ]
242 if not exec_subprocess(cmd):
243 return False
244
245 if not check_video_durations(infile, outfile):
246 return False
247
248 os.unlink(infile)
249 return True
250
251 def convert_to_mp4(filename):
252 with open(filename, "rb") as f:
253 fourcc = f.read(4)
254 basename, ext = os.path.splitext(filename)
255
256 if ext == ".mp4" and fourcc == b"FLV\x01":
257 os.rename(filename, basename + ".flv")
258 ext = ".flv"
259 filename = basename + ext
260
261 if ext in (".flv", ".ts"):
262 filename_mp4 = basename + ".mp4"
263 return remux(filename, filename_mp4)
264
265 return ext == ".mp4"
266
267
268 def download_hds(filename, video_url, pvswf=None):
269 filename = sanify_filename(filename)
270 logging.info("Downloading: %s", filename)
271
272 video_url = "hds://" + video_url
273 if pvswf:
274 param = "%s pvswf=%s" % (video_url, pvswf)
275 else:
276 param = video_url
277
278 cmd = [
279 "streamlink",
280 "--force",
281 "--output", filename,
282 param,
283 "best",
284 ]
285 if exec_subprocess(cmd):
286 return convert_to_mp4(filename)
287 else:
288 return False
289
290 def download_hls(filename, video_url):
291 filename = sanify_filename(filename)
292 video_url = "hlsvariant://" + video_url
293 logging.info("Downloading: %s", filename)
294
295 cmd = [
296 "streamlink",
297 "--http-header", "User-Agent=" + USER_AGENT,
298 "--force",
299 "--output", filename,
300 video_url,
301 "best",
302 ]
303 if exec_subprocess(cmd):
304 return convert_to_mp4(filename)
305 else:
306 return False
307
308 def download_mpd(filename, video_url):
309 filename = sanify_filename(filename)
310 video_url = "dash://" + video_url
311 logging.info("Downloading: %s", filename)
312
313 cmd = [
314 "streamlink",
315 "--force",
316 "--output", filename,
317 video_url,
318 "best",
319 ]
320 if exec_subprocess(cmd):
321 return convert_to_mp4(filename)
322 else:
323 return False
324
325 def download_http(filename, video_url):
326 filename = sanify_filename(filename)
327 logging.info("Downloading: %s", filename)
328
329 cmd = [
330 "curl",
331 "--fail", "--retry", "3",
332 "-o", filename,
333 video_url,
334 ]
335 if exec_subprocess(cmd):
336 return convert_to_mp4(filename)
337 else:
338 return False
339
340 def natural_sort(l, key=None):
341 ignore_list = ["a", "the"]
342 def key_func(k):
343 if key is not None:
344 k = key(k)
345 k = k.lower()
346 newk = []
347 for c in re.split("([0-9]+)", k):
348 c = c.strip()
349 if c.isdigit():
350 newk.append(c.zfill(5))
351 else:
352 for subc in c.split():
353 if subc not in ignore_list:
354 newk.append(subc)
355 return newk
356
357 return sorted(l, key=key_func)
358
359 def append_to_qs(url, params):
360 r = list(urllib.parse.urlsplit(url))
361 qs = urllib.parse.parse_qs(r[3])
362 for k, v in params.items():
363 if v is not None:
364 qs[k] = v
365 elif k in qs:
366 del qs[k]
367 r[3] = urllib.parse.urlencode(sorted(qs.items()), True)
368 url = urllib.parse.urlunsplit(r)
369 return url
370