]>
code.delx.au - webdl/blob - common.py
9a873f698b709cf56a1fbbb70779f179ebeac391
20 autosocks
.try_autosocks()
26 format
= "%(levelname)s %(message)s",
27 level
= logging
.INFO
if os
.environ
.get("DEBUG", None) is None else logging
.DEBUG
,
30 CACHE_FILE
= os
.path
.join(
31 os
.environ
.get("XDG_CACHE_HOME", os
.path
.expanduser("~/.cache")),
35 if not os
.path
.isdir(os
.path
.dirname(CACHE_FILE
)):
36 os
.makedirs(os
.path
.dirname(CACHE_FILE
))
38 requests_cache
.install_cache(CACHE_FILE
, backend
='sqlite', expire_after
=3600)
42 def __init__(self
, title
, parent
=None):
45 parent
.children
.append(self
)
48 self
.can_download
= False
50 def get_children(self
):
53 self
.children
= natural_sort(self
.children
, key
=lambda node
: node
.title
)
56 def fill_children(self
):
64 root_node
= Node("Root")
67 iview
.fill_nodes(root_node
)
70 sbs
.fill_nodes(root_node
)
73 ten
.fill_nodes(root_node
)
77 valid_chars
= frozenset("-_.()!@#%^ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
78 def sanify_filename(filename
):
79 filename
= "".join(c
for c
in filename
if c
in valid_chars
)
80 assert len(filename
) > 0
83 def ensure_scheme(url
):
84 parts
= urllib
.parse
.urlparse(url
)
89 return urllib
.parse
.urlunparse(parts
)
91 http_session
= requests
.Session()
92 http_session
.headers
["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:21.0) Gecko/20100101 Firefox/21.0"
95 logging
.debug("grab_text(%r)", url
)
96 request
= http_session
.prepare_request(requests
.Request("GET", url
))
97 response
= http_session
.send(request
)
101 logging
.debug("grab_html(%r)", url
)
102 request
= http_session
.prepare_request(requests
.Request("GET", url
))
103 response
= http_session
.send(request
, stream
=True)
104 doc
= lxml
.html
.parse(io
.BytesIO(response
.content
), lxml
.html
.HTMLParser(encoding
="utf-8", recover
=True))
109 logging
.debug("grab_xml(%r)", url
)
110 request
= http_session
.prepare_request(requests
.Request("GET", url
))
111 response
= http_session
.send(request
, stream
=True)
112 doc
= lxml
.etree
.parse(io
.BytesIO(response
.content
), lxml
.etree
.XMLParser(encoding
="utf-8", recover
=True))
117 logging
.debug("grab_json(%r)", url
)
118 request
= http_session
.prepare_request(requests
.Request("GET", url
))
119 response
= http_session
.send(request
)
120 return response
.json()
122 def exec_subprocess(cmd
):
123 logging
.debug("Executing: %s", cmd
)
125 p
= subprocess
.Popen(cmd
)
128 logging
.error("%s exited with error code: %s", cmd
[0], ret
)
133 logging
.error("Failed to run: %s -- %s", cmd
[0], e
)
134 except KeyboardInterrupt:
135 logging
.info("Cancelled: %s", cmd
)
139 except KeyboardInterrupt:
140 p
.send_signal(signal
.SIGKILL
)
145 def check_command_exists(cmd
):
147 subprocess
.check_output(cmd
, stderr
=subprocess
.STDOUT
)
153 if check_command_exists(["ffmpeg", "--help"]):
156 if check_command_exists(["avconv", "--help"]):
157 logging
.warn("Detected libav-tools! ffmpeg is recommended")
160 raise Exception("You must install ffmpeg or libav-tools")
163 if check_command_exists(["ffprobe", "--help"]):
166 if check_command_exists(["avprobe", "--help"]):
167 logging
.warn("Detected libav-tools! ffmpeg is recommended")
170 raise Exception("You must install ffmpeg or libav-tools")
172 def find_streamlink():
173 if check_command_exists(["streamlink", "--help"]):
176 if check_command_exists(["livestreamer", "--help"]):
177 logging
.warn("Detected livestreamer! streamlink is recommended")
178 return "livestreamer"
180 raise Exception("You must install streamlink or livestreamer")
182 def get_duration(filename
):
183 ffprobe
= find_ffprobe()
188 "-show_format_entry", "duration",
191 output
= subprocess
.check_output(cmd
).decode("utf-8")
192 for line
in output
.split("\n"):
193 m
= re
.search(R
"([0-9]+)", line
)
196 duration
= m
.group(1)
197 if duration
.isdigit():
201 logging
.debug("Falling back to full decode to find duration: %s % filename")
203 ffmpeg
= find_ffmpeg()
210 output
= subprocess
.check_output(cmd
, stderr
=subprocess
.STDOUT
).decode("utf-8")
212 for line
in re
.split(R
"[\r\n]", output
):
213 m
= re
.search(R
"time=([0-9:]*)\.", line
)
216 [h
, m
, s
] = m
.group(1).split(":")
217 # ffmpeg prints the duration as it reads the file, we want the last one
218 duration
= int(h
) * 3600 + int(m
) * 60 + int(s
)
223 raise Exception("Unable to determine video duration of " + filename
)
225 def check_video_durations(flv_filename
, mp4_filename
):
226 flv_duration
= get_duration(flv_filename
)
227 mp4_duration
= get_duration(mp4_filename
)
229 if abs(flv_duration
- mp4_duration
) > 1:
231 "The duration of %s is suspicious, did the remux fail? Expected %s == %s",
232 mp4_filename
, flv_duration
, mp4_duration
238 def remux(infile
, outfile
):
239 logging
.info("Converting %s to mp4", infile
)
241 ffmpeg
= find_ffmpeg()
245 "-bsf:a", "aac_adtstoasc",
251 if not exec_subprocess(cmd
):
254 if not check_video_durations(infile
, outfile
):
260 def convert_to_mp4(filename
):
261 with
open(filename
, "rb") as f
:
263 basename
, ext
= os
.path
.splitext(filename
)
265 if ext
== ".mp4" and fourcc
== b
"FLV\x01":
266 os
.rename(filename
, basename
+ ".flv")
268 filename
= basename
+ ext
270 if ext
in (".flv", ".ts"):
271 filename_mp4
= basename
+ ".mp4"
272 return remux(filename
, filename_mp4
)
277 def download_hds(filename
, video_url
, pvswf
=None):
278 streamlink
= find_streamlink()
280 filename
= sanify_filename(filename
)
281 logging
.info("Downloading: %s", filename
)
283 video_url
= "hds://" + video_url
285 param
= "%s pvswf=%s" % (video_url
, pvswf
)
296 if exec_subprocess(cmd
):
297 return convert_to_mp4(filename
)
301 def download_hls(filename
, video_url
):
302 streamlink
= find_streamlink()
304 filename
= sanify_filename(filename
)
305 video_url
= "hlsvariant://" + video_url
306 logging
.info("Downloading: %s", filename
)
315 if exec_subprocess(cmd
):
316 return convert_to_mp4(filename
)
320 def download_mpd(filename
, video_url
):
321 streamlink
= find_streamlink()
323 filename
= sanify_filename(filename
)
324 video_url
= "dash://" + video_url
325 logging
.info("Downloading: %s", filename
)
334 if exec_subprocess(cmd
):
335 return convert_to_mp4(filename
)
339 def download_http(filename
, video_url
):
340 filename
= sanify_filename(filename
)
341 logging
.info("Downloading: %s", filename
)
345 "--fail", "--retry", "3",
349 if exec_subprocess(cmd
):
350 return convert_to_mp4(filename
)
354 def natural_sort(l
, key
=None):
355 ignore_list
= ["a", "the"]
361 for c
in re
.split("([0-9]+)", k
):
364 newk
.append(c
.zfill(5))
366 for subc
in c
.split():
367 if subc
not in ignore_list
:
371 return sorted(l
, key
=key_func
)
373 def append_to_qs(url
, params
):
374 r
= list(urllib
.parse
.urlsplit(url
))
375 qs
= urllib
.parse
.parse_qs(r
[3])
376 for k
, v
in params
.items():
381 r
[3] = urllib
.parse
.urlencode(sorted(qs
.items()), True)
382 url
= urllib
.parse
.urlunsplit(r
)