X-Git-Url: https://code.delx.au/webdl/blobdiff_plain/12275e973a904261ac0217ce6de27cad5f2ab85e..9761bbe56493e1eb57083f624181fffc65a947e1:/sbs.py diff --git a/sbs.py b/sbs.py index 4f39ff5..deac6eb 100644 --- a/sbs.py +++ b/sbs.py @@ -1,115 +1,155 @@ -#!/usr/bin/env python -# vim:ts=4:sts=4:sw=4:noet +import requests_cache +from common import grab_html, grab_json, grab_xml, download_hls, download_mpd, Node, append_to_qs -from common import grab_html, grab_json, grab_xml, download_rtmp, download_urllib, Node, append_to_qs +import json +import logging +import os +import sys -import collections -import urlparse - -BASE = "http://www.sbs.com.au" -VIDEO_MENU = BASE + "/ondemand/js/video-menu" +BASE = "https://www.sbs.com.au" +FULL_VIDEO_LIST = BASE + "/api/video_feed/f/Bgtm9B/sbs-section-programs/" VIDEO_URL = BASE + "/ondemand/video/single/%s" -VIDEO_MAGIC = { - "v": "2.5.14", - "fp": "MAC 11,1,102,55", - "r": "FLQDD", - "g": "YNANAXRIYFYO", -} -SWF_URL = "http://resources.sbs.com.au/vod/theplatform/core/current/swf/flvPlayer.swf" +PARAMS_URL = BASE + "/api/video_pdkvars/id/%s?form=json" NS = { - "smil": "http://www.w3.org/2005/SMIL21/Language", + "smil": "http://www.w3.org/2005/SMIL21/Language", } -class SbsNode(Node): - def __init__(self, title, parent, video_id): - Node.__init__(self, title, parent) - self.title = title - self.video_id = video_id.split("/")[-1] - self.can_download = True - - def download(self): - doc = grab_html(VIDEO_URL % self.video_id, 0) - desc_url = None - for script in doc.xpath("//script", namespaces=NS): - if not script.text: - continue - for line in script.text.split("\n"): - if line.find("player.releaseUrl") < 0: - continue - desc_url = line[line.find("\"")+1 : line.rfind("\"")] - break - if desc_url is not None: - break - if desc_url is None: - raise Exception("Failed to get JSON URL for " + self.title) - - desc_url = append_to_qs(desc_url, {"manifest": None}) - doc = grab_xml(desc_url, 0) - video = doc.xpath("//smil:video", namespaces=NS)[0] - video_url = video.attrib["src"] - ext = urlparse.urlsplit(video_url).path.rsplit(".", 1)[1] - filename = self.title + "." + ext - video_url = append_to_qs(video_url, VIDEO_MAGIC) - return download_urllib(filename, video_url, referrer=SWF_URL) +class SbsVideoNode(Node): + def __init__(self, title, parent, url): + Node.__init__(self, title, parent) + self.video_id = url.split("/")[-1] + self.can_download = True + + def download(self): + with requests_cache.disabled(): + doc = grab_html(VIDEO_URL % self.video_id) + player_params = grab_json(PARAMS_URL % self.video_id) + + error = player_params.get("error", None) + if error: + print("Cannot download:", error) + return False + + release_url = player_params["releaseUrls"]["html"] + filename = self.title + ".ts" + + hls_url = self.get_hls_url(release_url) + if hls_url: + return download_hls(filename, hls_url) + else: + return download_mpd(filename, release_url) + + def get_hls_url(self, release_url): + with requests_cache.disabled(): + doc = grab_xml("https:" + release_url.replace("http:", "").replace("https:", "")) + video = doc.xpath("//smil:video", namespaces=NS) + if not video: + return + video_url = video[0].attrib["src"] + return video_url class SbsNavNode(Node): - def __init__(self, title, parent, url): - Node.__init__(self, title, parent) - self.url = url - self.sort_children = True - - def fill_children(self): - try: - doc = grab_json(BASE + self.url, 3600) - except ValueError: - # SBS sends XML as an error message :\ - return - if len(doc.get("entries", [])) == 0: - return - for entry in doc["entries"]: - self.fill_entry(entry) - - def fill_entry(self, entry): - title = entry["title"] - video_id = entry["id"] - SbsNode(title, self, video_id) - -class SbsRootNode(Node): - def __init__(self, parent=None): - Node.__init__(self, "SBS", parent) - - def fill_children(self): - menu = grab_json(VIDEO_MENU, 3600, skip_assignment=True) - for name in menu.keys(): - self.fill_category(self, menu[name]) - - def create_nav_node(self, name, parent, cat_data, url_key): - try: - url = cat_data[url_key] - except KeyError: - return - if url.strip(): - SbsNavNode(name, parent, url) - - def fill_category(self, parent, cat_data): - if not cat_data.has_key("children"): - name = cat_data["name"] - self.create_nav_node(name, parent, cat_data, "url") - return - - node = Node(cat_data["name"], parent) - self.create_nav_node("Featured", node, cat_data, "furl") - self.create_nav_node("Latest", node, cat_data, "url") - self.create_nav_node("Most Popular", node, cat_data, "purl") - - children = cat_data.get("children", []) - if isinstance(children, dict): - children = [children[k] for k in sorted(children.keys())] - for child_cat in children: - self.fill_category(node, child_cat) + def create_video_node(self, entry_data): + SbsVideoNode(entry_data["title"], self, entry_data["id"]) + + def find_existing_child(self, path): + for child in self.children: + if child.title == path: + return child + +class SbsRootNode(SbsNavNode): + def __init__(self, parent): + Node.__init__(self, "SBS", parent) + + def fill_children(self): + all_video_entries = self.load_all_video_entries() + category_and_entry_data = self.explode_videos_to_unique_categories(all_video_entries) + for category_path, entry_data in category_and_entry_data: + nav_node = self.create_nav_node(self, category_path) + nav_node.create_video_node(entry_data) + + def load_all_video_entries(self): + channels = [ + "Channel/SBS1", + "Channel/SBS Food", + "Channel/SBS VICELAND", + "Channel/SBS World Movies", + "Channel/Web Exclusive", + ] + + all_entries = {} + for channel in channels: + self.load_all_video_entries_for_channel(all_entries, channel) + + all_entries = list(all_entries.values()) + print(" SBS fetched", len(all_entries)) + return all_entries + + def load_all_video_entries_for_channel(self, all_entries, channel): + offset = 1 + page_size = 500 + duplicate_warning = False + + while True: + entries = self.fetch_entries_page(channel, offset, page_size) + if len(entries) == 0: + break + + for entry in entries: + guid = entry["guid"] + if guid in entries and not duplicate_warning: + # https://bitbucket.org/delx/webdl/issues/102/recent-sbs-series-missing + logging.warn("SBS returned a duplicate response, data is probably missing. Try decreasing page_size.") + duplicate_warning = True + + all_entries[guid] = entry + + offset += page_size + if os.isatty(sys.stdout.fileno()): + sys.stdout.write(".") + sys.stdout.flush() + + def fetch_entries_page(self, channel, offset, page_size): + url = append_to_qs(FULL_VIDEO_LIST, { + "range": "%s-%s" % (offset, offset+page_size-1), + "byCategories": channel, + }) + data = grab_json(url) + if "entries" not in data: + raise Exception("Missing data in SBS response", data) + return data["entries"] + + def explode_videos_to_unique_categories(self, all_video_entries): + for entry_data in all_video_entries: + for category_data in entry_data["media$categories"]: + category_path = self.calculate_category_path( + category_data["media$scheme"], + category_data["media$name"], + ) + if category_path: + yield category_path, entry_data + + def calculate_category_path(self, scheme, name): + if not scheme: + return + if scheme == name: + return + name = name.split("/") + if name[0] != scheme: + name.insert(0, scheme) + return name + + def create_nav_node(self, parent, category_path): + if not category_path: + return parent + + current_path = category_path[0] + current_node = parent.find_existing_child(current_path) + if not current_node: + current_node = SbsNavNode(current_path, parent) + return self.create_nav_node(current_node, category_path[1:]) def fill_nodes(root_node): - SbsRootNode(root_node) - + SbsRootNode(root_node)