X-Git-Url: https://code.delx.au/webdl/blobdiff_plain/8c49f0ff2a004225bbb02dd9f8772440dd3d7db0..992e749d40b94d33d1703f90afb28d8c8b548dd5:/sbs.py diff --git a/sbs.py b/sbs.py index dc353ca..c3cbe52 100644 --- a/sbs.py +++ b/sbs.py @@ -2,10 +2,12 @@ import requests_cache from common import grab_html, grab_json, grab_xml, download_hls, download_mpd, Node, append_to_qs import json +import logging +import os import sys BASE = "https://www.sbs.com.au" -FULL_VIDEO_LIST = BASE + "/api/video_search/v2/?m=1&filters={section}{Programs}" +FULL_VIDEO_LIST = BASE + "/api/video_feed/f/Bgtm9B/sbs-section-programs/" VIDEO_URL = BASE + "/ondemand/video/single/%s" NS = { @@ -23,8 +25,13 @@ class SbsVideoNode(Node): with requests_cache.disabled(): doc = grab_html(VIDEO_URL % self.video_id) player_params = self.get_player_params(doc) - release_url = player_params["releaseUrls"]["html"] + error = player_params.get("error", None) + if error: + print("Cannot download:", error) + return False + + release_url = player_params["releaseUrls"]["html"] filename = self.title + ".ts" hls_url = self.get_hls_url(release_url) @@ -76,22 +83,55 @@ class SbsRootNode(SbsNavNode): nav_node.create_video_node(entry_data) def load_all_video_entries(self): + channels = [ + "Channel/SBS1", + "Channel/SBS Food", + "Channel/SBS VICELAND", + "Channel/SBS World Movies", + "Channel/Web Exclusive", + ] + + all_entries = {} + for channel in channels: + self.load_all_video_entries_for_channel(all_entries, channel) + + all_entries = list(all_entries.values()) + print(" SBS fetched", len(all_entries)) + return all_entries + + def load_all_video_entries_for_channel(self, all_entries, channel): offset = 1 - amount = 49 + page_size = 500 + duplicate_warning = False + while True: - url = append_to_qs(FULL_VIDEO_LIST, {"range": "%s-%s" % (offset, offset+amount)}) - data = grab_json(url) - if "entries" not in data: - raise Exception("Missing data in SBS response", data) - entries = data["entries"] + entries = self.fetch_entries_page(channel, offset, page_size) if len(entries) == 0: break + for entry in entries: - yield entry - offset += amount - sys.stdout.write(".") - sys.stdout.flush() - print() + guid = entry["guid"] + if guid in entries and not duplicate_warning: + # https://bitbucket.org/delx/webdl/issues/102/recent-sbs-series-missing + logging.warn("SBS returned a duplicate response, data is probably missing. Try decreasing page_size.") + duplicate_warning = True + + all_entries[guid] = entry + + offset += page_size + if os.isatty(sys.stdout.fileno()): + sys.stdout.write(".") + sys.stdout.flush() + + def fetch_entries_page(self, channel, offset, page_size): + url = append_to_qs(FULL_VIDEO_LIST, { + "range": "%s-%s" % (offset, offset+page_size-1), + "byCategories": channel, + }) + data = grab_json(url) + if "entries" not in data: + raise Exception("Missing data in SBS response", data) + return data["entries"] def explode_videos_to_unique_categories(self, all_video_entries): for entry_data in all_video_entries: