- menu = grab_json(VIDEO_MENU, 3600, skip_assignment=True)
- for name in menu.keys():
- self.fill_category(self, menu[name])
-
- def create_nav_node(self, name, parent, cat_data, url_key):
- try:
- url = cat_data[url_key]
- except KeyError:
+ all_video_entries = self.load_all_video_entries()
+ category_and_entry_data = self.explode_videos_to_unique_categories(all_video_entries)
+ for category_path, entry_data in category_and_entry_data:
+ nav_node = self.create_nav_node(self, category_path)
+ nav_node.create_video_node(entry_data)
+
+ def load_all_video_entries(self):
+ channels = [
+ "Channel/SBS1",
+ "Channel/SBS Food",
+ "Channel/SBS VICELAND",
+ "Channel/SBS World Movies",
+ "Channel/Web Exclusive",
+ ]
+
+ all_entries = {}
+ for channel in channels:
+ self.load_all_video_entries_for_channel(all_entries, channel)
+
+ all_entries = list(all_entries.values())
+ print(" SBS fetched", len(all_entries))
+ return all_entries
+
+ def load_all_video_entries_for_channel(self, all_entries, channel):
+ offset = 1
+ page_size = 500
+ duplicate_warning = False
+
+ while True:
+ entries = self.fetch_entries_page(channel, offset, page_size)
+ if len(entries) == 0:
+ break
+
+ for entry in entries:
+ guid = entry["guid"]
+ if guid in entries and not duplicate_warning:
+ # https://bitbucket.org/delx/webdl/issues/102/recent-sbs-series-missing
+ logging.warn("SBS returned a duplicate response, data is probably missing. Try decreasing page_size.")
+ duplicate_warning = True
+
+ all_entries[guid] = entry
+
+ offset += page_size
+ if os.isatty(sys.stdout.fileno()):
+ sys.stdout.write(".")
+ sys.stdout.flush()
+
+ def fetch_entries_page(self, channel, offset, page_size):
+ url = append_to_qs(FULL_VIDEO_LIST, {
+ "range": "%s-%s" % (offset, offset+page_size-1),
+ "byCategories": channel,
+ })
+ data = grab_json(url)
+ if "entries" not in data:
+ raise Exception("Missing data in SBS response", data)
+ return data["entries"]
+
+ def explode_videos_to_unique_categories(self, all_video_entries):
+ for entry_data in all_video_entries:
+ for category_data in entry_data["media$categories"]:
+ category_path = self.calculate_category_path(
+ category_data["media$scheme"],
+ category_data["media$name"],
+ )
+ if category_path:
+ yield category_path, entry_data
+
+ def calculate_category_path(self, scheme, name):
+ if not scheme: