@@ -2323,10 +2323,10 @@ def _sort_by_date_time(item, limits):
23232323 return timestamp
23242324
23252325 threaded_output = {
2326- 'channel_ids' : [] ,
2327- 'playlist_ids' : [] ,
2326+ 'channel_ids' : set () ,
2327+ 'playlist_ids' : set () ,
23282328 'feeds' : {},
2329- 'to_refresh' : [] ,
2329+ 'to_refresh' : set () ,
23302330 }
23312331
23322332 (use_subscriptions ,
@@ -2337,21 +2337,25 @@ def _sort_by_date_time(item, limits):
23372337 use_subscriptions = False
23382338 use_saved_playlists = False
23392339
2340- bookmarks = context .get_bookmarks_list ().get_items ()
2341- if bookmarks :
2342- channel_ids = threaded_output ['channel_ids' ]
2343- playlist_ids = threaded_output ['playlist_ids' ]
2344- for item_id , item in bookmarks .items ():
2345- if isinstance (item , DirectoryItem ):
2346- item_id = getattr (item , PLAYLIST_ID , None )
2347- if item_id and use_bookmarked_playlists :
2348- playlist_ids .append (item_id )
2349- continue
2350- item_id = getattr (item , CHANNEL_ID , None )
2351- elif not isinstance (item , float ):
2352- continue
2353- if item_id and use_bookmarked_channels :
2354- channel_ids .append (item_id )
2340+ if use_bookmarked_channels or use_bookmarked_playlists :
2341+ bookmarks = context .get_bookmarks_list ().get_items ()
2342+ if bookmarks :
2343+ channel_ids = threaded_output ['channel_ids' ]
2344+ playlist_ids = threaded_output ['playlist_ids' ]
2345+ for item_id , item in bookmarks .items ():
2346+ if isinstance (item , DirectoryItem ):
2347+ if use_bookmarked_playlists :
2348+ item_id = getattr (item , PLAYLIST_ID , None )
2349+ if item_id :
2350+ playlist_ids .add (item_id )
2351+ continue
2352+ if use_bookmarked_channels :
2353+ item_id = getattr (item , CHANNEL_ID , None )
2354+ if item_id :
2355+ channel_ids .add (item_id )
2356+ continue
2357+ elif use_bookmarked_channels and isinstance (item , float ):
2358+ channel_ids .add (item_id )
23552359
23562360 headers = {
23572361 'Host' : 'www.youtube.com' ,
@@ -2378,6 +2382,7 @@ def _get_cached_feed(output,
23782382 ttl = feed_history .ONE_HOUR ):
23792383 feeds = output ['feeds' ]
23802384 to_refresh = output ['to_refresh' ]
2385+
23812386 if item_type == 'channel_id' :
23822387 channel_prefix = (
23832388 'UUSH' if feed_type == 'shorts' else
@@ -2386,7 +2391,9 @@ def _get_cached_feed(output,
23862391 )
23872392 else :
23882393 channel_prefix = False
2389- for item_id in inputs :
2394+
2395+ batch = inputs .copy ()
2396+ for item_id in batch :
23902397 if channel_prefix :
23912398 channel_id = item_id
23922399 item_id = item_id .replace ('UC' , channel_prefix , 1 )
@@ -2396,44 +2403,46 @@ def _get_cached_feed(output,
23962403 cached = feed_history .get_item (item_id )
23972404 if cached :
23982405 feed_details = cached ['value' ]
2399- _refresh = refresh or cached ['age' ] > ttl
2400- feed_details ['refresh' ] = _refresh
2406+
24012407 if channel_id :
24022408 feed_details .setdefault ('channel_id' , channel_id )
2403- if _refresh :
2404- to_refresh .append ({item_type : channel_id })
2405- elif _refresh :
2406- to_refresh .append ({item_type : item_id })
2409+
2410+ _refresh = refresh or cached ['age' ] > ttl
2411+ feed_details ['refresh' ] = _refresh
2412+ if _refresh :
2413+ to_refresh .add (channel_id )
2414+
24072415 if item_id in feeds :
24082416 feeds [item_id ].update (feed_details )
24092417 else :
24102418 feeds [item_id ] = feed_details
2411- elif channel_id :
2412- to_refresh .append ({item_type : channel_id })
24132419 else :
2414- to_refresh .append ({item_type : item_id })
2415- del inputs [:]
2420+ to_refresh .add (channel_id )
2421+
2422+ inputs -= batch
24162423 return True , False
24172424
24182425 def _get_feed (output ,
2419- channel_id = None ,
2420- playlist_id = None ,
2426+ input = None ,
24212427 feed_type = feed_type ,
24222428 headers = headers ,
24232429 stream = True ,
24242430 cache = False ):
2425- if channel_id :
2426- item_id = channel_id .replace (
2431+ if not input :
2432+ return True , False
2433+
2434+ if input .startswith ('UC' ):
2435+ channel_id = input
2436+ item_id = input .replace (
24272437 'UC' ,
24282438 'UUSH' if feed_type == 'shorts' else
24292439 'UULV' if feed_type == 'live' else
24302440 'UULF' ,
24312441 1 ,
24322442 )
2433- elif playlist_id :
2434- item_id = playlist_id
24352443 else :
2436- return True , False
2444+ channel_id = None
2445+ item_id = input
24372446
24382447 response = self .request (
24392448 '' .join ((self .BASE_URL ,
@@ -2615,6 +2624,7 @@ def _parse_feeds(feeds,
26152624
26162625 def _threaded_fetch (kwargs ,
26172626 do_batch ,
2627+ unpack ,
26182628 output ,
26192629 worker ,
26202630 threads ,
@@ -2631,7 +2641,12 @@ def _threaded_fetch(kwargs,
26312641 if kwargs is True :
26322642 _kwargs = {}
26332643 elif kwargs :
2634- _kwargs = {'inputs' : kwargs } if do_batch else kwargs .pop ()
2644+ if do_batch :
2645+ _kwargs = {'inputs' : kwargs }
2646+ elif unpack :
2647+ _kwargs = {'input' : kwargs .pop ()}
2648+ else :
2649+ _kwargs = kwargs .pop ()
26352650 elif check_inputs :
26362651 if check_inputs .wait (0.1 ) and kwargs :
26372652 continue
@@ -2686,51 +2701,44 @@ def _threaded_fetch(kwargs,
26862701 }
26872702
26882703 def _get_updated_subscriptions (new_data , old_data ):
2689- items = new_data and new_data .get ('items' )
2690- if not items :
2704+ new_items = new_data and new_data .get ('items' )
2705+ if not new_items :
26912706 new_data ['_abort' ] = True
26922707 return new_data
26932708
2694- _items = old_data and old_data .get ('items' )
2695- if _items :
2696- _items = {
2709+ old_items = old_data and old_data .get ('items' )
2710+ if old_items :
2711+ old_items = {
26972712 item ['snippet' ]['resourceId' ]['channelId' ]:
26982713 item ['contentDetails' ]
2699- for item in _items
2714+ for item in old_items
27002715 }
27012716
2702- updated_subscriptions = []
2703- old_subscriptions = []
2704-
2705- for item in items :
2717+ old_subscriptions = False
2718+ updated_subscriptions = set ()
2719+ for item in new_items :
27062720 channel_id = item ['snippet' ]['resourceId' ]['channelId' ]
2707- counts = item ['contentDetails' ]
2708-
2709- if (counts ['newItemCount' ]
2710- or counts ['totalItemCount' ]
2711- > _items .get (channel_id , {})['totalItemCount' ]):
2712- updated_subscriptions .append (
2713- {
2714- 'channel_id' : channel_id ,
2715- }
2716- )
2721+ if channel_id in updated_subscriptions :
2722+ continue
2723+
2724+ item_counts = item ['contentDetails' ]
2725+ if (item_counts ['newItemCount' ]
2726+ or (channel_id in old_items
2727+ and item_counts ['totalItemCount' ]
2728+ > old_items [channel_id ]['totalItemCount' ])):
2729+ updated_subscriptions .add (channel_id )
27172730 else :
2718- old_subscriptions . append ( channel_id )
2731+ old_subscriptions = True
27192732
27202733 if old_subscriptions :
27212734 new_data ['nextPageToken' ] = None
27222735 else :
2723- updated_subscriptions = [
2724- {
2725- 'channel_id' :
2726- item ['snippet' ]['resourceId' ]['channelId' ],
2727- }
2728- for item in items
2729- ]
2730- old_subscriptions = []
2736+ updated_subscriptions = {
2737+ item ['snippet' ]['resourceId' ]['channelId' ]
2738+ for item in new_items
2739+ }
27312740
27322741 new_data ['_updated_subscriptions' ] = updated_subscriptions
2733- new_data ['_old_subscriptions' ] = old_subscriptions
27342742 return new_data
27352743
27362744 def _get_channels (output ,
@@ -2753,11 +2761,14 @@ def _get_channels(output,
27532761
27542762 updated_subscriptions = json_data .get ('_updated_subscriptions' )
27552763 if updated_subscriptions :
2756- output ['to_refresh' ]. extend ( updated_subscriptions )
2764+ output ['to_refresh' ] |= updated_subscriptions
27572765
2758- old_subscriptions = json_data .get ('_old_subscriptions' )
2759- if old_subscriptions :
2760- output ['channel_ids' ].extend (old_subscriptions )
2766+ all_subscriptions = json_data .get ('items' )
2767+ if all_subscriptions :
2768+ output ['channel_ids' ].update ([
2769+ item ['snippet' ]['resourceId' ]['channelId' ]
2770+ for item in all_subscriptions
2771+ ])
27612772
27622773 page_token = json_data .get ('nextPageToken' )
27632774 if page_token :
@@ -2771,6 +2782,7 @@ def _get_channels(output,
27712782 'worker' : _get_channels ,
27722783 'kwargs' : True ,
27732784 'do_batch' : False ,
2785+ 'unpack' : False ,
27742786 'output' : threaded_output ,
27752787 'threads' : threads ,
27762788 'limit' : 1 ,
@@ -2812,10 +2824,12 @@ def _get_playlists(output,
28122824 if not json_data :
28132825 return False , True
28142826
2815- output ['playlist_ids' ].extend ([
2816- item ['id' ]
2817- for item in json_data .get ('items' , [])
2818- ])
2827+ saved_playlists = json_data .get ('items' )
2828+ if saved_playlists :
2829+ output ['playlist_ids' ].update ([
2830+ item ['id' ]
2831+ for item in saved_playlists
2832+ ])
28192833
28202834 subs_page_token = json_data .get ('nextPageToken' )
28212835 if subs_page_token :
@@ -2827,6 +2841,7 @@ def _get_playlists(output,
28272841 'worker' : _get_playlists ,
28282842 'kwargs' : True ,
28292843 'do_batch' : False ,
2844+ 'unpack' : False ,
28302845 'output' : threaded_output ,
28312846 'threads' : threads ,
28322847 'limit' : 1 ,
@@ -2838,6 +2853,7 @@ def _get_playlists(output,
28382853 'worker' : partial (_get_cached_feed , item_type = 'channel_id' ),
28392854 'kwargs' : threaded_output ['channel_ids' ],
28402855 'do_batch' : True ,
2856+ 'unpack' : False ,
28412857 'output' : threaded_output ,
28422858 'threads' : threads ,
28432859 'limit' : None ,
@@ -2849,6 +2865,7 @@ def _get_playlists(output,
28492865 'worker' : partial (_get_cached_feed , item_type = 'playlist_id' ),
28502866 'kwargs' : threaded_output ['playlist_ids' ],
28512867 'do_batch' : True ,
2868+ 'unpack' : False ,
28522869 'output' : threaded_output ,
28532870 'threads' : threads ,
28542871 'limit' : None ,
@@ -2860,6 +2877,7 @@ def _get_playlists(output,
28602877 'worker' : _get_feed ,
28612878 'kwargs' : threaded_output ['to_refresh' ],
28622879 'do_batch' : False ,
2880+ 'unpack' : True ,
28632881 'output' : threaded_output ,
28642882 'threads' : threads ,
28652883 'limit' : None ,
0 commit comments