- for i, entry in enumerate(entries):
- if entry["guid"] not in uniq:
- uniq.add(entry["guid"])
- yield entry
- offset += amount
- sys.stdout.write(".")
- sys.stdout.flush()
- print()
+
+ for entry in entries:
+ guid = entry["guid"]
+ if guid in entries and not duplicate_warning:
+ # https://bitbucket.org/delx/webdl/issues/102/recent-sbs-series-missing
+ logging.warn("SBS returned a duplicate response, data is probably missing. Try decreasing page_size.")
+ duplicate_warning = True
+
+ all_entries[guid] = entry
+
+ offset += page_size
+ if os.isatty(sys.stdout.fileno()):
+ sys.stdout.write(".")
+ sys.stdout.flush()
+
+ def fetch_entries_page(self, channel, offset, page_size):
+ url = append_to_qs(FULL_VIDEO_LIST, {
+ "range": "%s-%s" % (offset, offset+page_size-1),
+ "byCategories": channel,
+ })
+ data = grab_json(url)
+ if "entries" not in data:
+ raise Exception("Missing data in SBS response", data)
+ return data["entries"]