]> code.delx.au - webdl/blob - common.py
Improved README
[webdl] / common.py
1 import hashlib
2 import json
3 import logging
4 import lxml.etree
5 import lxml.html
6 import os
7 import re
8 import requests
9 import requests_cache
10 import shutil
11 import signal
12 import subprocess
13 import time
14 import urllib.parse
15
16
17 try:
18 import autosocks
19 autosocks.try_autosocks()
20 except ImportError:
21 pass
22
23
24 logging.basicConfig(
25 format = "%(levelname)s %(message)s",
26 level = logging.INFO if os.environ.get("DEBUG", None) is None else logging.DEBUG,
27 )
28
29 CACHE_FILE = os.path.join(
30 os.environ.get("XDG_CACHE_HOME", os.path.expanduser("~/.cache")),
31 "webdl",
32 "requests_cache"
33 )
34 if not os.path.isdir(os.path.dirname(CACHE_FILE)):
35 os.makedirs(os.path.dirname(CACHE_FILE))
36
37 requests_cache.install_cache(CACHE_FILE, backend='sqlite', expire_after=3600)
38
39
40 class Node(object):
41 def __init__(self, title, parent=None):
42 self.title = title
43 if parent:
44 parent.children.append(self)
45 self.parent = parent
46 self.children = []
47 self.can_download = False
48
49 def get_children(self):
50 if not self.children:
51 self.fill_children()
52 return self.children
53
54 def fill_children(self):
55 pass
56
57 def download(self):
58 raise NotImplemented
59
60
61 def load_root_node():
62 root_node = Node("Root")
63
64 import iview
65 iview.fill_nodes(root_node)
66
67 import sbs
68 sbs.fill_nodes(root_node)
69
70 import brightcove
71 brightcove.fill_nodes(root_node)
72
73 return root_node
74
75 valid_chars = frozenset("-_.()!@#%^ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
76 def sanify_filename(filename):
77 filename = "".join(c for c in filename if c in valid_chars)
78 assert len(filename) > 0
79 return filename
80
81 def ensure_scheme(url):
82 parts = urllib.parse.urlparse(url)
83 if parts.scheme:
84 return url
85 parts = list(parts)
86 parts[0] = "http"
87 return urllib.parse.urlunparse(parts)
88
89 http_session = requests.Session()
90 http_session.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:21.0) Gecko/20100101 Firefox/21.0"
91
92 def grab_text(url):
93 logging.debug("grab_text(%r)", url)
94 request = http_session.prepare_request(requests.Request("GET", url))
95 response = http_session.send(request)
96 return response.text
97
98 def grab_html(url):
99 logging.debug("grab_html(%r)", url)
100 request = http_session.prepare_request(requests.Request("GET", url))
101 response = http_session.send(request, stream=True)
102 doc = lxml.html.parse(response.raw, lxml.html.HTMLParser(encoding="utf-8", recover=True))
103 response.close()
104 return doc
105
106 def grab_xml(url):
107 logging.debug("grab_xml(%r)", url)
108 request = http_session.prepare_request(requests.Request("GET", url))
109 response = http_session.send(request, stream=True)
110 doc = lxml.etree.parse(response.raw, lxml.etree.XMLParser(encoding="utf-8", recover=True))
111 response.close()
112 return doc
113
114 def grab_json(url):
115 logging.debug("grab_json(%r)", url)
116 request = http_session.prepare_request(requests.Request("GET", url))
117 response = http_session.send(request)
118 return response.json()
119
120 def exec_subprocess(cmd):
121 logging.debug("Executing: %s", cmd)
122 try:
123 p = subprocess.Popen(cmd)
124 ret = p.wait()
125 if ret != 0:
126 logging.error("%s exited with error code: %s", cmd[0], ret)
127 return False
128 else:
129 return True
130 except OSError as e:
131 logging.error("Failed to run: %s -- %s", cmd[0], e)
132 except KeyboardInterrupt:
133 logging.info("Cancelled: %s", cmd)
134 try:
135 p.terminate()
136 p.wait()
137 except KeyboardInterrupt:
138 p.send_signal(signal.SIGKILL)
139 p.wait()
140 return False
141
142
143 def check_command_exists(cmd):
144 try:
145 subprocess.check_output(cmd, stderr=subprocess.STDOUT)
146 return True
147 except Exception:
148 return False
149
150 def find_ffmpeg():
151 for ffmpeg in ["avconv", "ffmpeg"]:
152 if check_command_exists([ffmpeg, "--help"]):
153 return ffmpeg
154
155 raise Exception("You must install ffmpeg or libav-tools")
156
157 def find_ffprobe():
158 for ffprobe in ["avprobe", "ffprobe"]:
159 if check_command_exists([ffprobe, "--help"]):
160 return ffprobe
161
162 raise Exception("You must install ffmpeg or libav-tools")
163
164 def get_duration(filename):
165 ffprobe = find_ffprobe()
166
167 cmd = [
168 ffprobe,
169 filename,
170 "-show_format_entry", "duration",
171 "-v", "quiet",
172 ]
173 output = subprocess.check_output(cmd).decode("utf-8")
174 for line in output.split("\n"):
175 if line.startswith("duration="):
176 return float(line.split("=")[1]) # ffprobe
177 if re.match(R'^[0-9.]*$', line):
178 return float(line) # avprobe
179
180 raise Exception("Unable to determine video duration of " + filename)
181
182 def check_video_durations(flv_filename, mp4_filename):
183 flv_duration = get_duration(flv_filename)
184 mp4_duration = get_duration(mp4_filename)
185
186 if abs(flv_duration - mp4_duration) > 1:
187 logging.error(
188 "The duration of %s is suspicious, did the remux fail? Expected %s == %s",
189 mp4_filename, flv_duration, mp4_duration
190 )
191 return False
192
193 return True
194
195 def remux(infile, outfile):
196 logging.info("Converting %s to mp4", infile)
197
198 ffmpeg = find_ffmpeg()
199 cmd = [
200 ffmpeg,
201 "-i", infile,
202 "-bsf:a", "aac_adtstoasc",
203 "-acodec", "copy",
204 "-vcodec", "copy",
205 outfile,
206 ]
207 if not exec_subprocess(cmd):
208 return False
209
210 if not check_video_durations(infile, outfile):
211 return False
212
213 os.unlink(infile)
214 return True
215
216 def convert_to_mp4(filename):
217 with open(filename, "rb") as f:
218 fourcc = f.read(4)
219 basename, ext = os.path.splitext(filename)
220
221 if ext == ".mp4" and fourcc == b"FLV\x01":
222 os.rename(filename, basename + ".flv")
223 ext = ".flv"
224 filename = basename + ext
225
226 if ext in (".flv", ".ts"):
227 filename_mp4 = basename + ".mp4"
228 return remux(filename, filename_mp4)
229
230 return ext == ".mp4"
231
232
233 def download_hds(filename, video_url, pvswf=None):
234 filename = sanify_filename(filename)
235 logging.info("Downloading: %s", filename)
236
237 video_url = "hds://" + video_url
238 if pvswf:
239 param = "%s pvswf=%s" % (video_url, pvswf)
240 else:
241 param = video_url
242
243 cmd = [
244 "livestreamer",
245 "-o", filename,
246 param,
247 "best",
248 ]
249 if exec_subprocess(cmd):
250 return convert_to_mp4(filename)
251 else:
252 return False
253
254 def download_hls(filename, video_url):
255 filename = sanify_filename(filename)
256 video_url = "hlsvariant://" + video_url
257 logging.info("Downloading: %s", filename)
258
259 cmd = [
260 "livestreamer",
261 "-o", filename,
262 video_url,
263 "best",
264 ]
265 if exec_subprocess(cmd):
266 return convert_to_mp4(filename)
267 else:
268 return False
269
270 def download_http(filename, video_url):
271 filename = sanify_filename(filename)
272 logging.info("Downloading: %s", filename)
273
274 cmd = [
275 "curl",
276 "--fail", "--retry", "3",
277 "-o", filename,
278 video_url,
279 ]
280 if exec_subprocess(cmd):
281 return convert_to_mp4(filename)
282 else:
283 return False
284
285 def natural_sort(l, key=None):
286 ignore_list = ["a", "the"]
287 def key_func(k):
288 if key is not None:
289 k = key(k)
290 k = k.lower()
291 newk = []
292 for c in re.split("([0-9]+)", k):
293 c = c.strip()
294 if c.isdigit():
295 newk.append(c.zfill(5))
296 else:
297 for subc in c.split():
298 if subc not in ignore_list:
299 newk.append(subc)
300 return newk
301
302 return sorted(l, key=key_func)
303
304 def append_to_qs(url, params):
305 r = list(urllib.parse.urlsplit(url))
306 qs = urllib.parse.parse_qs(r[3])
307 for k, v in params.items():
308 if v is not None:
309 qs[k] = v
310 elif k in qs:
311 del qs[k]
312 r[3] = urllib.parse.urlencode(sorted(qs.items()), True)
313 url = urllib.parse.urlunsplit(r)
314 return url
315