]> code.delx.au - webdl/blob - common.py
Switch to requests to remove custom caching code
[webdl] / common.py
1 import hashlib
2 import json
3 import logging
4 import lxml.etree
5 import lxml.html
6 import os
7 import re
8 import requests
9 import requests_cache
10 import shutil
11 import signal
12 import subprocess
13 import time
14 import urllib.parse
15
16
17 try:
18 import autosocks
19 autosocks.try_autosocks()
20 except ImportError:
21 pass
22
23
24 logging.basicConfig(
25 format = "%(levelname)s %(message)s",
26 level = logging.INFO if os.environ.get("DEBUG", None) is None else logging.DEBUG,
27 )
28
29 CACHE_FILE = os.path.join(
30 os.environ.get("XDG_CACHE_HOME", os.path.expanduser("~/.cache")),
31 "webdl",
32 "requests_cache"
33 )
34 requests_cache.install_cache(CACHE_FILE, backend='sqlite', expire_after=3600)
35
36
37 class Node(object):
38 def __init__(self, title, parent=None):
39 self.title = title
40 if parent:
41 parent.children.append(self)
42 self.parent = parent
43 self.children = []
44 self.can_download = False
45
46 def get_children(self):
47 if not self.children:
48 self.fill_children()
49 return self.children
50
51 def fill_children(self):
52 pass
53
54 def download(self):
55 raise NotImplemented
56
57
58 def load_root_node():
59 root_node = Node("Root")
60
61 import iview
62 iview.fill_nodes(root_node)
63
64 import sbs
65 sbs.fill_nodes(root_node)
66
67 import brightcove
68 brightcove.fill_nodes(root_node)
69
70 return root_node
71
72 valid_chars = frozenset("-_.()!@#%^ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
73 def sanify_filename(filename):
74 filename = "".join(c for c in filename if c in valid_chars)
75 assert len(filename) > 0
76 return filename
77
78 def ensure_scheme(url):
79 parts = urllib.parse.urlparse(url)
80 if parts.scheme:
81 return url
82 parts = list(parts)
83 parts[0] = "http"
84 return urllib.parse.urlunparse(parts)
85
86 http_session = requests.Session()
87 http_session.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:21.0) Gecko/20100101 Firefox/21.0"
88
89 def grab_text(url):
90 logging.debug("grab_text(%r)", url)
91 request = http_session.prepare_request(requests.Request("GET", url))
92 response = http_session.send(request)
93 return response.text
94
95 def grab_html(url):
96 logging.debug("grab_html(%r)", url)
97 request = http_session.prepare_request(requests.Request("GET", url))
98 response = http_session.send(request, stream=True)
99 doc = lxml.html.parse(response.raw, lxml.html.HTMLParser(encoding="utf-8", recover=True))
100 response.close()
101 return doc
102
103 def grab_xml(url):
104 logging.debug("grab_xml(%r)", url)
105 request = http_session.prepare_request(requests.Request("GET", url))
106 response = http_session.send(request, stream=True)
107 doc = lxml.etree.parse(response.raw, lxml.etree.XMLParser(encoding="utf-8", recover=True))
108 response.close()
109 return doc
110
111 def grab_json(url):
112 logging.debug("grab_json(%r)", url)
113 request = http_session.prepare_request(requests.Request("GET", url))
114 response = http_session.send(request)
115 return response.json()
116
117 def exec_subprocess(cmd):
118 logging.debug("Executing: %s", cmd)
119 try:
120 p = subprocess.Popen(cmd)
121 ret = p.wait()
122 if ret != 0:
123 logging.error("%s exited with error code: %s", cmd[0], ret)
124 return False
125 else:
126 return True
127 except OSError as e:
128 logging.error("Failed to run: %s -- %s", cmd[0], e)
129 except KeyboardInterrupt:
130 logging.info("Cancelled: %s", cmd)
131 try:
132 p.terminate()
133 p.wait()
134 except KeyboardInterrupt:
135 p.send_signal(signal.SIGKILL)
136 p.wait()
137 return False
138
139
140 def check_command_exists(cmd):
141 try:
142 subprocess.check_output(cmd, stderr=subprocess.STDOUT)
143 return True
144 except Exception:
145 return False
146
147 def find_ffmpeg():
148 for ffmpeg in ["avconv", "ffmpeg"]:
149 if check_command_exists([ffmpeg, "--help"]):
150 return ffmpeg
151
152 raise Exception("You must install ffmpeg or libav-tools")
153
154 def find_ffprobe():
155 for ffprobe in ["avprobe", "ffprobe"]:
156 if check_command_exists([ffprobe, "--help"]):
157 return ffprobe
158
159 raise Exception("You must install ffmpeg or libav-tools")
160
161 def get_duration(filename):
162 ffprobe = find_ffprobe()
163
164 cmd = [
165 ffprobe,
166 filename,
167 "-show_format_entry", "duration",
168 "-v", "quiet",
169 ]
170 output = subprocess.check_output(cmd).decode("utf-8")
171 for line in output.split("\n"):
172 if line.startswith("duration="):
173 return float(line.split("=")[1]) # ffprobe
174 if re.match(R'^[0-9.]*$', line):
175 return float(line) # avprobe
176
177 raise Exception("Unable to determine video duration of " + filename)
178
179 def check_video_durations(flv_filename, mp4_filename):
180 flv_duration = get_duration(flv_filename)
181 mp4_duration = get_duration(mp4_filename)
182
183 if abs(flv_duration - mp4_duration) > 1:
184 logging.error(
185 "The duration of %s is suspicious, did the remux fail? Expected %s == %s",
186 mp4_filename, flv_duration, mp4_duration
187 )
188 return False
189
190 return True
191
192 def remux(infile, outfile):
193 logging.info("Converting %s to mp4", infile)
194
195 ffmpeg = find_ffmpeg()
196 cmd = [
197 ffmpeg,
198 "-i", infile,
199 "-bsf:a", "aac_adtstoasc",
200 "-acodec", "copy",
201 "-vcodec", "copy",
202 outfile,
203 ]
204 if not exec_subprocess(cmd):
205 return False
206
207 if not check_video_durations(infile, outfile):
208 return False
209
210 os.unlink(infile)
211 return True
212
213 def convert_to_mp4(filename):
214 with open(filename, "rb") as f:
215 fourcc = f.read(4)
216 basename, ext = os.path.splitext(filename)
217
218 if ext == ".mp4" and fourcc == b"FLV\x01":
219 os.rename(filename, basename + ".flv")
220 ext = ".flv"
221 filename = basename + ext
222
223 if ext in (".flv", ".ts"):
224 filename_mp4 = basename + ".mp4"
225 return remux(filename, filename_mp4)
226
227 return ext == ".mp4"
228
229
230 def download_hds(filename, video_url, pvswf=None):
231 filename = sanify_filename(filename)
232 logging.info("Downloading: %s", filename)
233
234 video_url = "hds://" + video_url
235 if pvswf:
236 param = "%s pvswf=%s" % (video_url, pvswf)
237 else:
238 param = video_url
239
240 cmd = [
241 "livestreamer",
242 "-o", filename,
243 param,
244 "best",
245 ]
246 if exec_subprocess(cmd):
247 return convert_to_mp4(filename)
248 else:
249 return False
250
251 def download_hls(filename, video_url):
252 filename = sanify_filename(filename)
253 video_url = "hlsvariant://" + video_url
254 logging.info("Downloading: %s", filename)
255
256 cmd = [
257 "livestreamer",
258 "-o", filename,
259 video_url,
260 "best",
261 ]
262 if exec_subprocess(cmd):
263 return convert_to_mp4(filename)
264 else:
265 return False
266
267 def download_http(filename, video_url):
268 filename = sanify_filename(filename)
269 logging.info("Downloading: %s", filename)
270
271 cmd = [
272 "curl",
273 "--fail", "--retry", "3",
274 "-o", filename,
275 video_url,
276 ]
277 if exec_subprocess(cmd):
278 return convert_to_mp4(filename)
279 else:
280 return False
281
282 def natural_sort(l, key=None):
283 ignore_list = ["a", "the"]
284 def key_func(k):
285 if key is not None:
286 k = key(k)
287 k = k.lower()
288 newk = []
289 for c in re.split("([0-9]+)", k):
290 c = c.strip()
291 if c.isdigit():
292 newk.append(c.zfill(5))
293 else:
294 for subc in c.split():
295 if subc not in ignore_list:
296 newk.append(subc)
297 return newk
298
299 return sorted(l, key=key_func)
300
301 def append_to_qs(url, params):
302 r = list(urllib.parse.urlsplit(url))
303 qs = urllib.parse.parse_qs(r[3])
304 for k, v in params.items():
305 if v is not None:
306 qs[k] = v
307 elif k in qs:
308 del qs[k]
309 r[3] = urllib.parse.urlencode(sorted(qs.items()), True)
310 url = urllib.parse.urlunsplit(r)
311 return url
312