From e6739c3087268c228805569b6d3b067a3d21e985 Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Sun, 12 Nov 2023 23:25:46 -0600 Subject: [PATCH 01/28] Wrapped async in daemon thread for icon loading --- src/solarfm/app.py | 5 +- src/solarfm/core/mixins/ui/grid_mixin.py | 53 ++++++------------- .../core/widgets/files_view/grid_mixin.py | 52 ++++++------------ 3 files changed, 33 insertions(+), 77 deletions(-) diff --git a/src/solarfm/app.py b/src/solarfm/app.py index 209e350..471b8d9 100644 --- a/src/solarfm/app.py +++ b/src/solarfm/app.py @@ -15,6 +15,7 @@ class AppLaunchException(Exception): ... + class Application(IPCServer): """ docstring for Application. """ @@ -30,7 +31,7 @@ class Application(IPCServer): message = f"FILE|{arg}" self.send_ipc_message(message) - raise AppLaunchException(f"{app_name} IPC Server Exists: Will send path(s) to it and close...") + raise AppLaunchException(f"{app_name} IPC Server Exists: Have sent path(s) to it and closing...") self.setup_debug_hook() Window(args, unknownargs) @@ -56,4 +57,4 @@ class Application(IPCServer): ) except ValueError: # Typically: ValueError: signal only works in main thread - ... + ... \ No newline at end of file diff --git a/src/solarfm/core/mixins/ui/grid_mixin.py b/src/solarfm/core/mixins/ui/grid_mixin.py index 31881ef..937802c 100644 --- a/src/solarfm/core/mixins/ui/grid_mixin.py +++ b/src/solarfm/core/mixins/ui/grid_mixin.py @@ -26,59 +26,36 @@ class GridMixin: store.append([None, file[0]]) Gtk.main_iteration() - # for i, file in enumerate(files): - # self.create_icon(i, tab, store, dir, file[0]) - - if use_generator: - # NOTE: tab > icon > _get_system_thumbnail_gtk_thread must not be used - # as the attempted promotion back to gtk threading stalls the generator. (We're already in main gtk thread) - for i, icon in enumerate( self.create_icons_generator(tab, dir, files) ): - self.load_icon(i, store, icon) - else: - try: - loop = asyncio.get_running_loop() - except RuntimeError: - loop = None - - if loop and loop.is_running(): - loop.create_task( self.create_icons(tab, store, dir, files) ) - else: - asyncio.run( self.create_icons(tab, store, dir, files) ) + self.generate_icons(tab, store, dir, files) # NOTE: Not likely called often from here but it could be useful if save_state and not trace_debug: self.fm_controller.save_state() + @daemon_threaded + def generate_icons(self, tab, store, dir, files): + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = None + + if loop and loop.is_running(): + loop.create_task( self.create_icons(tab, store, dir, files) ) + else: + asyncio.run( self.create_icons(tab, store, dir, files) ) + async def create_icons(self, tab, store, dir, files): tasks = [self.update_store(i, store, dir, tab, file[0]) for i, file in enumerate(files)] await asyncio.gather(*tasks) async def load_icon(self, i, store, icon): - self.update_store(i, store, icon) + GLib.idle_add(self.update_store, i, store, icon ) async def update_store(self, i, store, dir, tab, file): icon = tab.create_icon(dir, file) itr = store.get_iter(i) store.set_value(itr, 0, icon) - def create_icons_generator(self, tab, dir, files): - for file in files: - icon = tab.create_icon(dir, file[0]) - yield icon - - # @daemon_threaded - # def create_icon(self, i, tab, store, dir, file): - # icon = tab.create_icon(dir, file) - # GLib.idle_add(self.update_store, *(i, store, icon,)) - # - # @daemon_threaded - # def load_icon(self, i, store, icon): - # GLib.idle_add(self.update_store, *(i, store, icon,)) - - # def update_store(self, i, store, icon): - # itr = store.get_iter(i) - # store.set_value(itr, 0, icon) - def create_tab_widget(self, tab): return TabHeaderWidget(tab, self.close_tab) @@ -144,4 +121,4 @@ class GridMixin: icon_grid = obj.get_children()[0] name = icon_grid.get_name() if name == _name: - return icon_grid + return icon_grid \ No newline at end of file diff --git a/src/solarfm/core/widgets/files_view/grid_mixin.py b/src/solarfm/core/widgets/files_view/grid_mixin.py index 0b02715..11ca4f6 100644 --- a/src/solarfm/core/widgets/files_view/grid_mixin.py +++ b/src/solarfm/core/widgets/files_view/grid_mixin.py @@ -26,58 +26,36 @@ class GridMixin: store.append([None, file[0]]) Gtk.main_iteration() - if use_generator: - # NOTE: tab > icon > _get_system_thumbnail_gtk_thread must not be used - # as the attempted promotion back to gtk threading stalls the generator. (We're already in main gtk thread) - for i, icon in enumerate( self.create_icons_generator(tab, dir, files) ): - self.load_icon(i, store, icon) - else: - # for i, file in enumerate(files): - # self.create_icon(i, tab, store, dir, file[0]) - try: - loop = asyncio.get_running_loop() - except RuntimeError: - loop = None - - if loop and loop.is_running(): - loop.create_task( self.create_icons(tab, store, dir, files) ) - else: - asyncio.run( self.create_icons(tab, store, dir, files) ) + self.generate_icons(tab, store, dir, files) # NOTE: Not likely called often from here but it could be useful if save_state and not trace_debug: self.fm_controller.save_state() + @daemon_threaded + def generate_icons(self, tab, store, dir, files): + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = None + + if loop and loop.is_running(): + loop.create_task( self.create_icons(tab, store, dir, files) ) + else: + asyncio.run( self.create_icons(tab, store, dir, files) ) + async def create_icons(self, tab, store, dir, files): tasks = [self.update_store(i, store, dir, tab, file[0]) for i, file in enumerate(files)] await asyncio.gather(*tasks) async def load_icon(self, i, store, icon): - self.update_store(i, store, icon) + GLib.idle_add(self.update_store, i, store, icon ) async def update_store(self, i, store, dir, tab, file): icon = tab.create_icon(dir, file) itr = store.get_iter(i) store.set_value(itr, 0, icon) - def create_icons_generator(self, tab, dir, files): - for file in files: - icon = tab.create_icon(dir, file[0]) - yield icon - - # @daemon_threaded - # def create_icon(self, i, tab, store, dir, file): - # icon = tab.create_icon(dir, file) - # GLib.idle_add(self.update_store, *(i, store, icon,)) - # - # @daemon_threaded - # def load_icon(self, i, store, icon): - # GLib.idle_add(self.update_store, *(i, store, icon,)) - # - # def update_store(self, i, store, icon): - # itr = store.get_iter(i) - # store.set_value(itr, 0, icon) - def create_tab_widget(self, tab): return TabHeaderWidget(tab, self.close_tab) @@ -136,4 +114,4 @@ class GridMixin: store = icon_grid.get_model() tab_label = notebook.get_tab_label(obj).get_children()[0] - return store, tab_label + return store, tab_label \ No newline at end of file -- 2.39.5 From d936b1742945a204196abace53975495a84c183a Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Sat, 25 Nov 2023 15:52:43 -0600 Subject: [PATCH 02/28] Improved keybinding clarity; trying to fix thread and async issues --- src/solarfm/__builtins__.py | 10 ++-- src/solarfm/core/controller.py | 2 + src/solarfm/core/controller_data.py | 11 +---- .../mixins/signals/keyboard_signals_mixin.py | 47 +++++++++++-------- src/solarfm/core/mixins/ui/grid_mixin.py | 7 +-- src/solarfm/core/ui_mixin.py | 4 +- .../core/widgets/files_view/files_widget.py | 19 ++++---- .../core/widgets/files_view/grid_mixin.py | 7 +-- 8 files changed, 56 insertions(+), 51 deletions(-) diff --git a/src/solarfm/__builtins__.py b/src/solarfm/__builtins__.py index 6ead649..9c7cce9 100644 --- a/src/solarfm/__builtins__.py +++ b/src/solarfm/__builtins__.py @@ -16,13 +16,17 @@ from utils.settings_manager.manager import SettingsManager # NOTE: Threads WILL NOT die with parent's destruction. def threaded_wrapper(fn): def wrapper(*args, **kwargs): - threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=False).start() + thread = threading.Thread(target = fn, args = args, kwargs = kwargs, daemon = False) + thread.start() + return thread return wrapper # NOTE: Threads WILL die with parent's destruction. def daemon_threaded_wrapper(fn): def wrapper(*args, **kwargs): - threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start() + thread = threading.Thread(target = fn, args = args, kwargs = kwargs, daemon = True) + thread.start() + return thread return wrapper def sizeof_fmt_def(num, suffix="B"): @@ -61,4 +65,4 @@ def custom_except_hook(exc_type, exc_value, exc_traceback): logger.error("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)) -sys.excepthook = custom_except_hook +sys.excepthook = custom_except_hook \ No newline at end of file diff --git a/src/solarfm/core/controller.py b/src/solarfm/core/controller.py index ce10937..e818db6 100644 --- a/src/solarfm/core/controller.py +++ b/src/solarfm/core/controller.py @@ -78,6 +78,7 @@ class Controller(UIMixin, SignalsMixins, Controller_Data): event_system.subscribe("do_action_from_menu_controls", self.do_action_from_menu_controls) event_system.subscribe("set_clipboard_data", self.set_clipboard_data) + def _load_glade_file(self): self.builder.add_from_file( settings_manager.get_glade_file() ) self.builder.expose_object("main_window", self.window) @@ -113,6 +114,7 @@ class Controller(UIMixin, SignalsMixins, Controller_Data): if not settings_manager.is_trace_debug(): self.fm_controller.save_state() + def reload_plugins(self, widget=None, eve=None): self.plugins.reload_plugins() diff --git a/src/solarfm/core/controller_data.py b/src/solarfm/core/controller_data.py index 89961ac..7a1514d 100644 --- a/src/solarfm/core/controller_data.py +++ b/src/solarfm/core/controller_data.py @@ -70,23 +70,16 @@ class Controller_Data: Returns: state (obj): State ''' - # state = State() + state = self._state state.fm_controller = self.fm_controller state.notebooks = self.notebooks state.wid, state.tid = self.fm_controller.get_active_wid_and_tid() state.tab = self.get_fm_window(state.wid).get_tab_by_id(state.tid) state.icon_grid = self.builder.get_object(f"{state.wid}|{state.tid}|icon_grid", use_gtk = False) - # state.icon_grid = event_system.emit_and_await("get_files_view_icon_grid", (state.wid, state.tid)) state.store = state.icon_grid.get_model() - - # NOTE: Need to watch this as I thought we had issues with just using single reference upon closing it. - # But, I found that not doing it this way caused objects to generate upon every click... (Because we're getting state info, duh) - # Yet interactive debug view shows them just pilling on and never clearing... state.message_dialog = self.message_dialog state.user_pass_dialog = self.user_pass_dialog - # state.message_dialog = MessageWidget() - # state.user_pass_dialog = UserPassWidget() selected_files = state.icon_grid.get_selected_items() if selected_files: @@ -186,4 +179,4 @@ class Controller_Data: proc = subprocess.Popen(['xclip','-selection','clipboard'], stdin=subprocess.PIPE) proc.stdin.write(data.encode("utf-8")) proc.stdin.close() - retcode = proc.wait() + retcode = proc.wait() \ No newline at end of file diff --git a/src/solarfm/core/mixins/signals/keyboard_signals_mixin.py b/src/solarfm/core/mixins/signals/keyboard_signals_mixin.py index 9e02fe1..ac1dbf2 100644 --- a/src/solarfm/core/mixins/signals/keyboard_signals_mixin.py +++ b/src/solarfm/core/mixins/signals/keyboard_signals_mixin.py @@ -25,6 +25,14 @@ class KeyboardSignalsMixin: self.shift_down = False self.alt_down = False + def unmap_special_key(self, keyname): + if "control" in keyname: + self.ctrl_down = False + if "shift" in keyname: + self.shift_down = False + if "alt" in keyname: + self.alt_down = False + def on_global_key_press_controller(self, eve, user_data): keyname = Gdk.keyval_name(user_data.keyval).lower() if keyname.replace("_l", "").replace("_r", "") in ["control", "alt", "shift"]: @@ -35,32 +43,19 @@ class KeyboardSignalsMixin: if "alt" in keyname: self.alt_down = True + def on_global_key_release_controller(self, widget, event): """Handler for keyboard events""" keyname = Gdk.keyval_name(event.keyval).lower() if keyname.replace("_l", "").replace("_r", "") in ["control", "alt", "shift"]: - if "control" in keyname: - self.ctrl_down = False - if "shift" in keyname: - self.shift_down = False - if "alt" in keyname: - self.alt_down = False + self.unmap_special_key(keyname) mapping = keybindings.lookup(event) if mapping: - # See if in filemanager scope try: - getattr(self, mapping)() - return True + self.handle_as_controller_scope(mapping) except Exception: - # Must be plugins scope, event call, OR we forgot to add method to file manager scope - if "||" in mapping: - sender, eve_type = mapping.split("||") - else: - sender = "" - eve_type = mapping - - self.handle_plugin_key_event(sender, eve_type) + self.handle_as_plugin_scope(mapping) else: logger.debug(f"on_global_key_release_controller > key > {keyname}") @@ -68,7 +63,19 @@ class KeyboardSignalsMixin: if keyname in ["1", "kp_1", "2", "kp_2", "3", "kp_3", "4", "kp_4"]: self.builder.get_object(f"tggl_notebook_{keyname.strip('kp_')}").released() - def handle_plugin_key_event(self, sender, eve_type): + def handle_as_controller_scope(self, mapping): + getattr(self, mapping)() + + def handle_as_plugin_scope(self, mapping): + if "||" in mapping: + sender, eve_type = mapping.split("||") + else: + sender = "" + eve_type = mapping + + self.handle_as_key_event_system(sender, eve_type) + + def handle_as_key_event_system(self, sender, eve_type): event_system.emit(eve_type) def keyboard_close_tab(self): @@ -82,6 +89,6 @@ class KeyboardSignalsMixin: self.get_fm_window(wid).delete_tab_by_id(tid) notebook.remove_page(page) - if not trace_debug: + if not settings_manager.is_trace_debug(): self.fm_controller.save_state() - self.set_window_title() + self.set_window_title() \ No newline at end of file diff --git a/src/solarfm/core/mixins/ui/grid_mixin.py b/src/solarfm/core/mixins/ui/grid_mixin.py index 937802c..a769103 100644 --- a/src/solarfm/core/mixins/ui/grid_mixin.py +++ b/src/solarfm/core/mixins/ui/grid_mixin.py @@ -26,7 +26,7 @@ class GridMixin: store.append([None, file[0]]) Gtk.main_iteration() - self.generate_icons(tab, store, dir, files) + thread = self.generate_icons(tab, store, dir, files) # NOTE: Not likely called often from here but it could be useful if save_state and not trace_debug: @@ -48,13 +48,10 @@ class GridMixin: tasks = [self.update_store(i, store, dir, tab, file[0]) for i, file in enumerate(files)] await asyncio.gather(*tasks) - async def load_icon(self, i, store, icon): - GLib.idle_add(self.update_store, i, store, icon ) - async def update_store(self, i, store, dir, tab, file): icon = tab.create_icon(dir, file) itr = store.get_iter(i) - store.set_value(itr, 0, icon) + GLib.idle_add(store.set_value, itr, 0, icon) def create_tab_widget(self, tab): return TabHeaderWidget(tab, self.close_tab) diff --git a/src/solarfm/core/ui_mixin.py b/src/solarfm/core/ui_mixin.py index 406f1fa..b596497 100644 --- a/src/solarfm/core/ui_mixin.py +++ b/src/solarfm/core/ui_mixin.py @@ -34,7 +34,9 @@ class UIMixin(PaneMixin, WindowMixin): nickname = session["window"]["Nickname"] tabs = session["window"]["tabs"] isHidden = True if session["window"]["isHidden"] == "True" else False - event_system.emit("load_files_view_state", (nickname, tabs)) + event_system.emit("load_files_view_state", (nickname, tabs, isHidden)) + + @daemon_threaded def _focus_last_visible_notebook(self, icon_grid): diff --git a/src/solarfm/core/widgets/files_view/files_widget.py b/src/solarfm/core/widgets/files_view/files_widget.py index cca26d6..de10fa7 100644 --- a/src/solarfm/core/widgets/files_view/files_widget.py +++ b/src/solarfm/core/widgets/files_view/files_widget.py @@ -1,11 +1,9 @@ # Python imports # Lib imports -import gi -gi.require_version('Gtk', '3.0') -from gi.repository import Gtk # Application imports +from ...sfm_builder import SFMBuilder from ...mixins.signals.file_action_signals_mixin import FileActionSignalsMixin from .window_mixin import WindowMixin @@ -27,7 +25,8 @@ class FilesWidget(FileActionSignalsMixin, WindowMixin): self.INDEX = self.ccount self.NAME = f"window_{self.INDEX}" - self.builder = Gtk.Builder() + self.builder = SFMBuilder() + self.files_view = None self.fm_controller = None @@ -41,20 +40,21 @@ class FilesWidget(FileActionSignalsMixin, WindowMixin): ... def _setup_signals(self): - settings_manager.register_signals_to_builder([self,], self.builder) + settings_manager.register_signals_to_builder([self], self.builder) def _subscribe_to_events(self): event_system.subscribe("load_files_view_state", self._load_files_view_state) event_system.subscribe("get_files_view_icon_grid", self._get_files_view_icon_grid) def _load_widgets(self): - _builder = settings_manager.get_builder() + _builder = settings_manager.get_builder() self.files_view = _builder.get_object(f"{self.NAME}") self.files_view.set_group_name("files_widget") + self.builder.expose_object(f"{self.NAME}", self.files_view) - def _load_files_view_state(self, win_name = None, tabs = None): + def _load_files_view_state(self, win_name = None, tabs = None, isHidden = False): if win_name == self.NAME: if tabs: for tab in tabs: @@ -62,10 +62,13 @@ class FilesWidget(FileActionSignalsMixin, WindowMixin): else: self.create_new_tab_notebook(None, self.INDEX, None) + if isHidden: + self.files_view.hide() + def _get_files_view_icon_grid(self, win_index = None, tid = None): if win_index == str(self.INDEX): return self.builder.get_object(f"{self.INDEX}|{tid}|icon_grid", use_gtk = False) def set_fm_controller(self, _fm_controller): - self.fm_controller = _fm_controller + self.fm_controller = _fm_controller \ No newline at end of file diff --git a/src/solarfm/core/widgets/files_view/grid_mixin.py b/src/solarfm/core/widgets/files_view/grid_mixin.py index 11ca4f6..258b320 100644 --- a/src/solarfm/core/widgets/files_view/grid_mixin.py +++ b/src/solarfm/core/widgets/files_view/grid_mixin.py @@ -26,7 +26,7 @@ class GridMixin: store.append([None, file[0]]) Gtk.main_iteration() - self.generate_icons(tab, store, dir, files) + thread = self.generate_icons(tab, store, dir, files) # NOTE: Not likely called often from here but it could be useful if save_state and not trace_debug: @@ -48,13 +48,10 @@ class GridMixin: tasks = [self.update_store(i, store, dir, tab, file[0]) for i, file in enumerate(files)] await asyncio.gather(*tasks) - async def load_icon(self, i, store, icon): - GLib.idle_add(self.update_store, i, store, icon ) - async def update_store(self, i, store, dir, tab, file): icon = tab.create_icon(dir, file) itr = store.get_iter(i) - store.set_value(itr, 0, icon) + GLib.idle_add(store.set_value, itr, 0, icon) def create_tab_widget(self, tab): return TabHeaderWidget(tab, self.close_tab) -- 2.39.5 From 9336df2afa945d61f8a8f8631020ea11138a56f2 Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Sun, 24 Dec 2023 13:23:24 -0600 Subject: [PATCH 03/28] Changing out threading for some sections; added 2 new tab option --- src/solarfm/core/controller.py | 2 ++ src/solarfm/core/fs_actions/file_system_actions.py | 9 ++++++++- .../core/mixins/signals/file_action_signals_mixin.py | 4 ++-- src/solarfm/core/mixins/ui/grid_mixin.py | 5 +++-- src/solarfm/core/ui_mixin.py | 7 +++---- src/solarfm/core/widgets/files_view/grid_mixin.py | 3 +-- src/solarfm/shellfm/windows/tabs/icons/icon.py | 6 +++--- 7 files changed, 22 insertions(+), 14 deletions(-) diff --git a/src/solarfm/core/controller.py b/src/solarfm/core/controller.py index e818db6..31f1bee 100644 --- a/src/solarfm/core/controller.py +++ b/src/solarfm/core/controller.py @@ -135,6 +135,8 @@ class Controller(UIMixin, SignalsMixins, Controller_Data): event_system.emit("open_files") if action == "open_with": event_system.emit("show_appchooser_menu") + if action == "open_2_new_tab": + event_system.emit("open_2_new_tab") if action == "execute": event_system.emit("execute_files") if action == "execute_in_terminal": diff --git a/src/solarfm/core/fs_actions/file_system_actions.py b/src/solarfm/core/fs_actions/file_system_actions.py index 561de7b..be6573c 100644 --- a/src/solarfm/core/fs_actions/file_system_actions.py +++ b/src/solarfm/core/fs_actions/file_system_actions.py @@ -40,6 +40,7 @@ class FileSystemActions(HandlerMixin, CRUDMixin): event_system.subscribe("open_files", self.open_files) event_system.subscribe("open_with_files", self.open_with_files) + event_system.subscribe("open_2_new_tab", self.open_2_new_tab) event_system.subscribe("execute_files", self.execute_files) event_system.subscribe("cut_files", self.cut_files) @@ -104,6 +105,12 @@ class FileSystemActions(HandlerMixin, CRUDMixin): state.tab.app_chooser_exec(app_info, uris) + def open_2_new_tab(self): + state = event_system.emit_and_await("get_current_state") + uri = state.uris[0] + message = f"FILE|{uri}" + logger.info(message) + event_system.emit("post_file_to_ipc", message) def execute_files(self, in_terminal=False): state = event_system.emit_and_await("get_current_state") @@ -111,4 +118,4 @@ class FileSystemActions(HandlerMixin, CRUDMixin): command = None for path in state.uris: command = f"{shlex.quote(path)}" if not in_terminal else f"{state.tab.terminal_app} -e {shlex.quote(path)}" - state.tab.execute(shlex.split(command), start_dir=state.tab.get_current_directory()) + state.tab.execute(shlex.split(command), start_dir=state.tab.get_current_directory()) \ No newline at end of file diff --git a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py index aba1eac..6767b36 100644 --- a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py +++ b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py @@ -48,7 +48,7 @@ class FileActionSignalsMixin: else: self.soft_lock_countdown(data[0]) - @threaded + @daemon_threaded def soft_lock_countdown(self, tab_widget): self.soft_update_lock[tab_widget] = { "last_update_time": time.time()} @@ -102,4 +102,4 @@ class FileActionSignalsMixin: items = icon_grid.get_selected_items() if len(items) > 0: - icon_grid.scroll_to_path(items[0], False, 0.5, 0.5) + icon_grid.scroll_to_path(items[0], False, 0.5, 0.5) \ No newline at end of file diff --git a/src/solarfm/core/mixins/ui/grid_mixin.py b/src/solarfm/core/mixins/ui/grid_mixin.py index a769103..58c476d 100644 --- a/src/solarfm/core/mixins/ui/grid_mixin.py +++ b/src/solarfm/core/mixins/ui/grid_mixin.py @@ -26,13 +26,13 @@ class GridMixin: store.append([None, file[0]]) Gtk.main_iteration() - thread = self.generate_icons(tab, store, dir, files) + GLib.Thread("", self.generate_icons, tab, store, dir, files) # NOTE: Not likely called often from here but it could be useful if save_state and not trace_debug: self.fm_controller.save_state() - @daemon_threaded + def generate_icons(self, tab, store, dir, files): try: loop = asyncio.get_running_loop() @@ -47,6 +47,7 @@ class GridMixin: async def create_icons(self, tab, store, dir, files): tasks = [self.update_store(i, store, dir, tab, file[0]) for i, file in enumerate(files)] await asyncio.gather(*tasks) + GLib.idle_add(Gtk.main_iteration) async def update_store(self, i, store, dir, tab, file): icon = tab.create_icon(dir, file) diff --git a/src/solarfm/core/ui_mixin.py b/src/solarfm/core/ui_mixin.py index b596497..cbca606 100644 --- a/src/solarfm/core/ui_mixin.py +++ b/src/solarfm/core/ui_mixin.py @@ -6,6 +6,7 @@ gi.require_version('Gtk', '3.0') gi.require_version('Gdk', '3.0') from gi.repository import Gtk from gi.repository import Gdk +from gi.repository import GLib # Application imports from .mixins.ui.pane_mixin import PaneMixin @@ -37,14 +38,12 @@ class UIMixin(PaneMixin, WindowMixin): event_system.emit("load_files_view_state", (nickname, tabs, isHidden)) - - @daemon_threaded def _focus_last_visible_notebook(self, icon_grid): import time window = settings_manager.get_main_window() while not window.is_visible() and not window.get_realized(): - time.sleep(0.1) + time.sleep(0.2) icon_grid.event(Gdk.Event().new(type = Gdk.EventType.BUTTON_RELEASE)) @@ -79,7 +78,7 @@ class UIMixin(PaneMixin, WindowMixin): scroll_win = notebook.get_children()[-1] icon_grid = scroll_win.get_children()[0] - self._focus_last_visible_notebook(icon_grid) + GLib.Thread("", self._focus_last_visible_notebook, icon_grid) except UIMixinException as e: logger.info("\n: The saved session might be missing window data! :\nLocation: ~/.config/solarfm/session.json\nFix: Back it up and delete it to reset.\n") logger.debug(repr(e)) diff --git a/src/solarfm/core/widgets/files_view/grid_mixin.py b/src/solarfm/core/widgets/files_view/grid_mixin.py index 258b320..b6ab2f8 100644 --- a/src/solarfm/core/widgets/files_view/grid_mixin.py +++ b/src/solarfm/core/widgets/files_view/grid_mixin.py @@ -26,13 +26,12 @@ class GridMixin: store.append([None, file[0]]) Gtk.main_iteration() - thread = self.generate_icons(tab, store, dir, files) + GLib.Thread("", self.generate_icons, tab, store, dir, files) # NOTE: Not likely called often from here but it could be useful if save_state and not trace_debug: self.fm_controller.save_state() - @daemon_threaded def generate_icons(self, tab, store, dir, files): try: loop = asyncio.get_running_loop() diff --git a/src/solarfm/shellfm/windows/tabs/icons/icon.py b/src/solarfm/shellfm/windows/tabs/icons/icon.py index 72285b0..19e3faa 100644 --- a/src/solarfm/shellfm/windows/tabs/icons/icon.py +++ b/src/solarfm/shellfm/windows/tabs/icons/icon.py @@ -50,8 +50,8 @@ class Icon(DesktopIconMixin, VideoIconMixin, MeshsIconMixin): if not thumbnl: # TODO: Detect if not in a thread and use directly for speed get_system_thumbnail - thumbnl = self.get_system_thumbnail(full_path, self.sys_icon_wh[0]) - # thumbnl = self._get_system_thumbnail_gtk_thread(full_path, self.sys_icon_wh[0]) + # thumbnl = self.get_system_thumbnail(full_path, self.sys_icon_wh[0]) + thumbnl = self._get_system_thumbnail_gtk_thread(full_path, self.sys_icon_wh[0]) if not thumbnl: raise IconException("No known icons found.") @@ -201,4 +201,4 @@ class Icon(DesktopIconMixin, VideoIconMixin, MeshsIconMixin): pixbuf = GdkPixbuf.Pixbuf.new_from_bytes(data, GdkPixbuf.Colorspace.RGB, False, 8, w, h, w * 3) - return pixbuf.scale_simple(wxh[0], wxh[1], 2) # BILINEAR = 2 + return pixbuf.scale_simple(wxh[0], wxh[1], 2) # BILINEAR = 2 \ No newline at end of file -- 2.39.5 From 4cafb7ff9f4515b86f3d63413e148a1900d91e37 Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Sun, 31 Dec 2023 22:20:04 -0600 Subject: [PATCH 04/28] GLib idle add return effort --- .../signals/file_action_signals_mixin.py | 1 - src/solarfm/core/mixins/ui/grid_mixin.py | 31 ++++++++++++++--- src/solarfm/core/mixins/ui/tab_mixin.py | 2 +- .../core/widgets/files_view/grid_mixin.py | 33 ++++++++++++++++--- src/solarfm/core/widgets/icon_grid_widget.py | 3 +- .../shellfm/windows/tabs/icons/icon.py | 1 + 6 files changed, 60 insertions(+), 11 deletions(-) diff --git a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py index 6767b36..775a54f 100644 --- a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py +++ b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py @@ -39,7 +39,6 @@ class FileActionSignalsMixin: if eve_type in [Gio.FileMonitorEvent.CREATED, Gio.FileMonitorEvent.DELETED, Gio.FileMonitorEvent.RENAMED, Gio.FileMonitorEvent.MOVED_IN, Gio.FileMonitorEvent.MOVED_OUT]: - logger.debug(eve_type) if eve_type in [Gio.FileMonitorEvent.MOVED_IN, Gio.FileMonitorEvent.MOVED_OUT]: self.update_on_soft_lock_end(data[0]) diff --git a/src/solarfm/core/mixins/ui/grid_mixin.py b/src/solarfm/core/mixins/ui/grid_mixin.py index 58c476d..be7d7a3 100644 --- a/src/solarfm/core/mixins/ui/grid_mixin.py +++ b/src/solarfm/core/mixins/ui/grid_mixin.py @@ -7,6 +7,7 @@ import gi gi.require_version("Gtk", "3.0") from gi.repository import Gtk from gi.repository import GLib +from gi.repository import Gio # Application imports from ...widgets.tab_header_widget import TabHeaderWidget @@ -19,6 +20,18 @@ class GridMixin: """docstring for GridMixin""" def load_store(self, tab, store, save_state = False, use_generator = False): + # dir = tab.get_current_directory() + # file = Gio.File.new_for_path(dir) + # dir_list = Gtk.DirectoryList.new("standard::*", file) + # store.set(dir_list) + + # file = Gio.File.new_for_path(dir) + # for file in file.enumerate_children("standard::*", Gio.FILE_ATTRIBUTE_STANDARD_NAME, None): + # store.append(file) + + # return + + dir = tab.get_current_directory() files = tab.get_files() @@ -45,14 +58,24 @@ class GridMixin: asyncio.run( self.create_icons(tab, store, dir, files) ) async def create_icons(self, tab, store, dir, files): - tasks = [self.update_store(i, store, dir, tab, file[0]) for i, file in enumerate(files)] + icons = [self.get_icon(tab, dir, file[0]) for file in files] + data = await asyncio.gather(*icons) + tasks = [self.update_store(i, store, icon) for i, icon in enumerate(data)] await asyncio.gather(*tasks) + GLib.idle_add(Gtk.main_iteration) - async def update_store(self, i, store, dir, tab, file): - icon = tab.create_icon(dir, file) + async def update_store(self, i, store, icon): itr = store.get_iter(i) - GLib.idle_add(store.set_value, itr, 0, icon) + GLib.idle_add(self.insert_store, store, itr, icon) + + def insert_store(self, store, itr, icon): + store.set_value(itr, 0, icon) + # Note: If the function returns GLib.SOURCE_REMOVE or False it is automatically removed from the list of event sources and will not be called again. + return False + + async def get_icon(self, tab, dir, file): + return tab.create_icon(dir, file) def create_tab_widget(self, tab): return TabHeaderWidget(tab, self.close_tab) diff --git a/src/solarfm/core/mixins/ui/tab_mixin.py b/src/solarfm/core/mixins/ui/tab_mixin.py index aab38bd..ffd7f2b 100644 --- a/src/solarfm/core/mixins/ui/tab_mixin.py +++ b/src/solarfm/core/mixins/ui/tab_mixin.py @@ -236,4 +236,4 @@ class TabMixin(GridMixin): tab = self.get_fm_window(wid).get_tab_by_id(tid) tab.set_hiding_hidden(not tab.is_hiding_hidden()) tab.load_directory() - self.builder.get_object("refresh_tab").released() + self.builder.get_object("refresh_tab").released() \ No newline at end of file diff --git a/src/solarfm/core/widgets/files_view/grid_mixin.py b/src/solarfm/core/widgets/files_view/grid_mixin.py index b6ab2f8..dd11204 100644 --- a/src/solarfm/core/widgets/files_view/grid_mixin.py +++ b/src/solarfm/core/widgets/files_view/grid_mixin.py @@ -7,6 +7,7 @@ import gi gi.require_version("Gtk", "3.0") from gi.repository import Gtk from gi.repository import GLib +from gi.repository import Gio # Application imports from ...widgets.tab_header_widget import TabHeaderWidget @@ -19,6 +20,18 @@ class GridMixin: """docstring for GridMixin""" def load_store(self, tab, store, save_state = False, use_generator = False): + # dir = tab.get_current_directory() + # file = Gio.File.new_for_path(dir) + # dir_list = Gtk.DirectoryList.new("standard::*", file) + # store.set(dir_list) + + # file = Gio.File.new_for_path(dir) + # for file in file.enumerate_children("standard::*", Gio.FILE_ATTRIBUTE_STANDARD_NAME, None): + # store.append(file) + + # return + + dir = tab.get_current_directory() files = tab.get_files() @@ -32,6 +45,7 @@ class GridMixin: if save_state and not trace_debug: self.fm_controller.save_state() + def generate_icons(self, tab, store, dir, files): try: loop = asyncio.get_running_loop() @@ -44,13 +58,24 @@ class GridMixin: asyncio.run( self.create_icons(tab, store, dir, files) ) async def create_icons(self, tab, store, dir, files): - tasks = [self.update_store(i, store, dir, tab, file[0]) for i, file in enumerate(files)] + icons = [self.get_icon(tab, dir, file[0]) for file in files] + data = await asyncio.gather(*icons) + tasks = [self.update_store(i, store, icon) for i, icon in enumerate(data)] await asyncio.gather(*tasks) - async def update_store(self, i, store, dir, tab, file): - icon = tab.create_icon(dir, file) + GLib.idle_add(Gtk.main_iteration) + + async def update_store(self, i, store, icon): itr = store.get_iter(i) - GLib.idle_add(store.set_value, itr, 0, icon) + GLib.idle_add(self.insert_store, store, itr, icon) + + def insert_store(self, store, itr, icon): + store.set_value(itr, 0, icon) + # Note: If the function returns GLib.SOURCE_REMOVE or False it is automatically removed from the list of event sources and will not be called again. + return False + + async def get_icon(self, tab, dir, file): + return tab.create_icon(dir, file) def create_tab_widget(self, tab): return TabHeaderWidget(tab, self.close_tab) diff --git a/src/solarfm/core/widgets/icon_grid_widget.py b/src/solarfm/core/widgets/icon_grid_widget.py index 51088b3..2d25921 100644 --- a/src/solarfm/core/widgets/icon_grid_widget.py +++ b/src/solarfm/core/widgets/icon_grid_widget.py @@ -77,4 +77,5 @@ class IconGridWidget(Gtk.IconView): def clear_and_set_new_store(self): self.set_model(None) store = Gtk.ListStore(GdkPixbuf.Pixbuf or GdkPixbuf.PixbufAnimation or None, str or None) - self.set_model(store) + # store = Gtk.ListStore(Gtk.DirectoryList) + self.set_model(store) \ No newline at end of file diff --git a/src/solarfm/shellfm/windows/tabs/icons/icon.py b/src/solarfm/shellfm/windows/tabs/icons/icon.py index 19e3faa..1693293 100644 --- a/src/solarfm/shellfm/windows/tabs/icons/icon.py +++ b/src/solarfm/shellfm/windows/tabs/icons/icon.py @@ -138,6 +138,7 @@ class Icon(DesktopIconMixin, VideoIconMixin, MeshsIconMixin): def _call_gtk_thread(event, result): result.append( self.get_system_thumbnail(full_path, size) ) event.set() + return False result = [] event = threading.Event() -- 2.39.5 From 37e3265be54ba6ccb287614954cbb86921b0c639 Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Sun, 31 Dec 2023 22:35:33 -0600 Subject: [PATCH 05/28] GLib idle add return effort 2 --- src/solarfm/core/mixins/signals/file_action_signals_mixin.py | 2 ++ src/solarfm/core/mixins/ui/tab_mixin.py | 1 + src/solarfm/core/widgets/files_view/tab_mixin.py | 4 +++- src/solarfm/plugins/plugins_controller.py | 4 +++- src/solarfm/utils/ipc_server.py | 4 +++- 5 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py index 775a54f..6501975 100644 --- a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py +++ b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py @@ -80,6 +80,8 @@ class FileActionSignalsMixin: if [wid, tid] in [state.wid, state.tid]: self.set_bottom_labels(tab) + return False + def do_file_search(self, widget, eve = None): if not self.ctrl_down and not self.shift_down and not self.alt_down: diff --git a/src/solarfm/core/mixins/ui/tab_mixin.py b/src/solarfm/core/mixins/ui/tab_mixin.py index ffd7f2b..36bea62 100644 --- a/src/solarfm/core/mixins/ui/tab_mixin.py +++ b/src/solarfm/core/mixins/ui/tab_mixin.py @@ -218,6 +218,7 @@ class TabMixin(GridMixin): def do_focused_click(self, button): button.grab_focus() button.clicked() + return False def set_path_entry(self, button = None, eve = None): self.path_auto_filled = True diff --git a/src/solarfm/core/widgets/files_view/tab_mixin.py b/src/solarfm/core/widgets/files_view/tab_mixin.py index 9ea81ba..13bab35 100644 --- a/src/solarfm/core/widgets/files_view/tab_mixin.py +++ b/src/solarfm/core/widgets/files_view/tab_mixin.py @@ -225,6 +225,8 @@ class TabMixin(GridMixin): button.grab_focus() button.clicked() + return False + def set_path_entry(self, button = None, eve = None): self.path_auto_filled = True state = self.get_current_state() @@ -241,4 +243,4 @@ class TabMixin(GridMixin): tab = self.get_fm_window(wid).get_tab_by_id(tid) tab.set_hiding_hidden(not tab.is_hiding_hidden()) tab.load_directory() - self.builder.get_object("refresh_tab").released() + self.builder.get_object("refresh_tab").released() \ No newline at end of file diff --git a/src/solarfm/plugins/plugins_controller.py b/src/solarfm/plugins/plugins_controller.py index 3ee9bd0..43a108e 100644 --- a/src/solarfm/plugins/plugins_controller.py +++ b/src/solarfm/plugins/plugins_controller.py @@ -119,6 +119,8 @@ class PluginsController: plugin.reference.run() self._plugin_collection.append(plugin) + return False + def reload_plugins(self, file: str = None) -> None: logger.info(f"Reloading plugins...") parent_path = os.getcwd() @@ -127,4 +129,4 @@ class PluginsController: os.chdir(plugin.path) plugin.reference.reload_package(f"{plugin.path}/plugin.py") - os.chdir(parent_path) + os.chdir(parent_path) \ No newline at end of file diff --git a/src/solarfm/utils/ipc_server.py b/src/solarfm/utils/ipc_server.py index 7ce42e1..bc5178b 100644 --- a/src/solarfm/utils/ipc_server.py +++ b/src/solarfm/utils/ipc_server.py @@ -89,6 +89,8 @@ class IPCServer(Singleton): conn.close() break + return False + def send_ipc_message(self, message: str = "Empty Data...") -> None: try: @@ -123,4 +125,4 @@ class IPCServer(Singleton): logger.error("IPC Socket no longer valid.... Removing.") os.unlink(self._ipc_address) except Exception as e: - logger.error( repr(e) ) + logger.error( repr(e) ) \ No newline at end of file -- 2.39.5 From 8e5ae4824c762e6e8f34a0369d1ba6e44d3d0973 Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Wed, 3 Jan 2024 20:36:17 -0600 Subject: [PATCH 06/28] idle_add refactor for event source clearing; Gtk main call moved --- src/solarfm/__main__.py | 6 +----- src/solarfm/app.py | 2 +- .../mixins/signals/file_action_signals_mixin.py | 10 +++++++--- src/solarfm/core/mixins/ui/grid_mixin.py | 13 ++++++++++--- src/solarfm/core/widgets/files_view/grid_mixin.py | 13 ++++++++++--- src/solarfm/core/window.py | 8 +++++--- src/solarfm/utils/ipc_server.py | 5 +++-- 7 files changed, 37 insertions(+), 20 deletions(-) diff --git a/src/solarfm/__main__.py b/src/solarfm/__main__.py index 1da80a8..b798304 100644 --- a/src/solarfm/__main__.py +++ b/src/solarfm/__main__.py @@ -8,9 +8,6 @@ import traceback from setproctitle import setproctitle # Lib imports -import gi -gi.require_version('Gtk', '3.0') -from gi.repository import Gtk # Application imports from __builtins__ import * @@ -45,7 +42,6 @@ def run(): settings_manager.do_dirty_start_check() Application(args, unknownargs) - Gtk.main() except Exception as e: traceback.print_exc() quit() @@ -53,4 +49,4 @@ def run(): if __name__ == "__main__": """ Set process title, get arguments, and create GTK main thread. """ - run() + run() \ No newline at end of file diff --git a/src/solarfm/app.py b/src/solarfm/app.py index 471b8d9..b910ad6 100644 --- a/src/solarfm/app.py +++ b/src/solarfm/app.py @@ -34,7 +34,7 @@ class Application(IPCServer): raise AppLaunchException(f"{app_name} IPC Server Exists: Have sent path(s) to it and closing...") self.setup_debug_hook() - Window(args, unknownargs) + Window(args, unknownargs).main() def socket_realization_check(self): diff --git a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py index 6501975..0f61246 100644 --- a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py +++ b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py @@ -41,13 +41,15 @@ class FileActionSignalsMixin: Gio.FileMonitorEvent.MOVED_OUT]: if eve_type in [Gio.FileMonitorEvent.MOVED_IN, Gio.FileMonitorEvent.MOVED_OUT]: - self.update_on_soft_lock_end(data[0]) + # self.update_on_soft_lock_end(data[0]) + GLib.Thread("", self.soft_lock_countdown, data[0]) elif data[0] in self.soft_update_lock.keys(): self.soft_update_lock[data[0]]["last_update_time"] = time.time() else: - self.soft_lock_countdown(data[0]) + # self.soft_lock_countdown(data[0]) + GLib.Thread("", self.soft_lock_countdown, data[0]) - @daemon_threaded + # @daemon_threaded def soft_lock_countdown(self, tab_widget): self.soft_update_lock[tab_widget] = { "last_update_time": time.time()} @@ -61,6 +63,8 @@ class FileActionSignalsMixin: self.soft_update_lock.pop(tab_widget, None) GLib.idle_add(self.update_on_soft_lock_end, *(tab_widget,)) + thread = GLib.Thread.self() + thread.unref() def update_on_soft_lock_end(self, tab_widget): diff --git a/src/solarfm/core/mixins/ui/grid_mixin.py b/src/solarfm/core/mixins/ui/grid_mixin.py index be7d7a3..2c77129 100644 --- a/src/solarfm/core/mixins/ui/grid_mixin.py +++ b/src/solarfm/core/mixins/ui/grid_mixin.py @@ -57,25 +57,32 @@ class GridMixin: else: asyncio.run( self.create_icons(tab, store, dir, files) ) + thread = GLib.Thread.self() + thread.unref() + async def create_icons(self, tab, store, dir, files): icons = [self.get_icon(tab, dir, file[0]) for file in files] data = await asyncio.gather(*icons) tasks = [self.update_store(i, store, icon) for i, icon in enumerate(data)] await asyncio.gather(*tasks) - GLib.idle_add(Gtk.main_iteration) + GLib.idle_add(self.do_ui_update) async def update_store(self, i, store, icon): itr = store.get_iter(i) GLib.idle_add(self.insert_store, store, itr, icon) + async def get_icon(self, tab, dir, file): + return tab.create_icon(dir, file) + def insert_store(self, store, itr, icon): store.set_value(itr, 0, icon) # Note: If the function returns GLib.SOURCE_REMOVE or False it is automatically removed from the list of event sources and will not be called again. return False - async def get_icon(self, tab, dir, file): - return tab.create_icon(dir, file) + def do_ui_update(self): + Gtk.main_iteration() + return False def create_tab_widget(self, tab): return TabHeaderWidget(tab, self.close_tab) diff --git a/src/solarfm/core/widgets/files_view/grid_mixin.py b/src/solarfm/core/widgets/files_view/grid_mixin.py index dd11204..88b663b 100644 --- a/src/solarfm/core/widgets/files_view/grid_mixin.py +++ b/src/solarfm/core/widgets/files_view/grid_mixin.py @@ -57,25 +57,32 @@ class GridMixin: else: asyncio.run( self.create_icons(tab, store, dir, files) ) + thread = GLib.Thread.self() + thread.unref() + async def create_icons(self, tab, store, dir, files): icons = [self.get_icon(tab, dir, file[0]) for file in files] data = await asyncio.gather(*icons) tasks = [self.update_store(i, store, icon) for i, icon in enumerate(data)] await asyncio.gather(*tasks) - GLib.idle_add(Gtk.main_iteration) + GLib.idle_add(self.do_ui_update) async def update_store(self, i, store, icon): itr = store.get_iter(i) GLib.idle_add(self.insert_store, store, itr, icon) + async def get_icon(self, tab, dir, file): + return tab.create_icon(dir, file) + def insert_store(self, store, itr, icon): store.set_value(itr, 0, icon) # Note: If the function returns GLib.SOURCE_REMOVE or False it is automatically removed from the list of event sources and will not be called again. return False - async def get_icon(self, tab, dir, file): - return tab.create_icon(dir, file) + def do_ui_update(self): + Gtk.main_iteration() + return False def create_tab_widget(self, tab): return TabHeaderWidget(tab, self.close_tab) diff --git a/src/solarfm/core/window.py b/src/solarfm/core/window.py index 4bce515..ced3919 100644 --- a/src/solarfm/core/window.py +++ b/src/solarfm/core/window.py @@ -87,12 +87,14 @@ class Window(Gtk.ApplicationWindow): cr.set_operator(cairo.OPERATOR_SOURCE) cr.paint() cr.set_operator(cairo.OPERATOR_OVER) - + def _load_interactive_debug(self): self.set_interactive_debugging(True) - def _tear_down(self, widget = None, eve = None): event_system.emit("shutting_down") settings_manager.clear_pid() - Gtk.main_quit() \ No newline at end of file + Gtk.main_quit() + + def main(self): + Gtk.main() \ No newline at end of file diff --git a/src/solarfm/utils/ipc_server.py b/src/solarfm/utils/ipc_server.py index bc5178b..eda6dab 100644 --- a/src/solarfm/utils/ipc_server.py +++ b/src/solarfm/utils/ipc_server.py @@ -51,9 +51,10 @@ class IPCServer(Singleton): listener = Listener((self._ipc_address, self._ipc_port)) self.is_ipc_alive = True - self._run_ipc_loop(listener) + # self._run_ipc_loop(listener) + GLib.Thread("", self._run_ipc_loop, listener) - @daemon_threaded + # @daemon_threaded def _run_ipc_loop(self, listener) -> None: while True: try: -- 2.39.5 From be7be00f787285e3a7751a94589cfd5fd461b319 Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Mon, 8 Jan 2024 21:11:10 -0600 Subject: [PATCH 07/28] refactoring pid logic; addedd window state preservation; slight thread rework --- src/solarfm/app.py | 32 ++++++++------ .../signals/file_action_signals_mixin.py | 11 ++--- src/solarfm/core/mixins/ui/grid_mixin.py | 8 ++-- src/solarfm/core/mixins/ui/window_mixin.py | 6 ++- .../core/widgets/files_view/grid_mixin.py | 8 ++-- .../core/widgets/files_view/window_mixin.py | 6 ++- src/solarfm/core/window.py | 31 +++++++++++-- src/solarfm/utils/settings_manager/manager.py | 7 +++ .../utils/settings_manager/options/config.py | 9 +++- .../settings_manager/start_check_mixin.py | 44 ++++++++++++------- .../usr/share/solarfm/contexct_menu.json | 9 ++-- user_config/usr/share/solarfm/settings.json | 9 +++- 12 files changed, 122 insertions(+), 58 deletions(-) diff --git a/src/solarfm/app.py b/src/solarfm/app.py index b910ad6..41330d8 100644 --- a/src/solarfm/app.py +++ b/src/solarfm/app.py @@ -16,35 +16,39 @@ class AppLaunchException(Exception): -class Application(IPCServer): +class Application: """ docstring for Application. """ def __init__(self, args, unknownargs): super(Application, self).__init__() if not settings_manager.is_trace_debug(): - self.socket_realization_check() - - if not self.is_ipc_alive: - for arg in unknownargs + [args.new_tab,]: - if os.path.isdir(arg): - message = f"FILE|{arg}" - self.send_ipc_message(message) - - raise AppLaunchException(f"{app_name} IPC Server Exists: Have sent path(s) to it and closing...") + self.load_ipc(args, unknownargs) self.setup_debug_hook() Window(args, unknownargs).main() - def socket_realization_check(self): + def load_ipc(self, args, unknownargs): + ipc_server = IPCServer() + self.ipc_realization_check(ipc_server) + + if not ipc_server.is_ipc_alive: + for arg in unknownargs + [args.new_tab,]: + if os.path.isfile(arg): + message = f"FILE|{arg}" + ipc_server.send_ipc_message(message) + + raise AppLaunchException(f"{app_name} IPC Server Exists: Have sent path(s) to it and closing...") + + def ipc_realization_check(self, ipc_server): try: - self.create_ipc_listener() + ipc_server.create_ipc_listener() except Exception: - self.send_test_ipc_message() + ipc_server.send_test_ipc_message() try: - self.create_ipc_listener() + ipc_server.create_ipc_listener() except Exception as e: ... diff --git a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py index 0f61246..99346a2 100644 --- a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py +++ b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py @@ -41,15 +41,13 @@ class FileActionSignalsMixin: Gio.FileMonitorEvent.MOVED_OUT]: if eve_type in [Gio.FileMonitorEvent.MOVED_IN, Gio.FileMonitorEvent.MOVED_OUT]: - # self.update_on_soft_lock_end(data[0]) - GLib.Thread("", self.soft_lock_countdown, data[0]) + self.update_on_soft_lock_end(data[0]) elif data[0] in self.soft_update_lock.keys(): self.soft_update_lock[data[0]]["last_update_time"] = time.time() else: - # self.soft_lock_countdown(data[0]) - GLib.Thread("", self.soft_lock_countdown, data[0]) + self.soft_lock_countdown(data[0]) - # @daemon_threaded + @daemon_threaded def soft_lock_countdown(self, tab_widget): self.soft_update_lock[tab_widget] = { "last_update_time": time.time()} @@ -63,9 +61,6 @@ class FileActionSignalsMixin: self.soft_update_lock.pop(tab_widget, None) GLib.idle_add(self.update_on_soft_lock_end, *(tab_widget,)) - thread = GLib.Thread.self() - thread.unref() - def update_on_soft_lock_end(self, tab_widget): wid, tid = tab_widget.split("|") diff --git a/src/solarfm/core/mixins/ui/grid_mixin.py b/src/solarfm/core/mixins/ui/grid_mixin.py index 2c77129..9808843 100644 --- a/src/solarfm/core/mixins/ui/grid_mixin.py +++ b/src/solarfm/core/mixins/ui/grid_mixin.py @@ -39,13 +39,15 @@ class GridMixin: store.append([None, file[0]]) Gtk.main_iteration() - GLib.Thread("", self.generate_icons, tab, store, dir, files) + self.generate_icons(tab, store, dir, files) + # GLib.Thread("", self.generate_icons, tab, store, dir, files) # NOTE: Not likely called often from here but it could be useful if save_state and not trace_debug: self.fm_controller.save_state() + @daemon_threaded def generate_icons(self, tab, store, dir, files): try: loop = asyncio.get_running_loop() @@ -57,9 +59,6 @@ class GridMixin: else: asyncio.run( self.create_icons(tab, store, dir, files) ) - thread = GLib.Thread.self() - thread.unref() - async def create_icons(self, tab, store, dir, files): icons = [self.get_icon(tab, dir, file[0]) for file in files] data = await asyncio.gather(*icons) @@ -77,6 +76,7 @@ class GridMixin: def insert_store(self, store, itr, icon): store.set_value(itr, 0, icon) + # Note: If the function returns GLib.SOURCE_REMOVE or False it is automatically removed from the list of event sources and will not be called again. return False diff --git a/src/solarfm/core/mixins/ui/window_mixin.py b/src/solarfm/core/mixins/ui/window_mixin.py index 3d1d577..2fc76db 100644 --- a/src/solarfm/core/mixins/ui/window_mixin.py +++ b/src/solarfm/core/mixins/ui/window_mixin.py @@ -177,6 +177,10 @@ class WindowMixin(TabMixin): if from_uri != dest: event_system.emit("move_files", (uris, dest)) + Gtk.drag_finish(drag_context, True, False, time) + return + + Gtk.drag_finish(drag_context, False, False, time) def create_new_tab_notebook(self, widget=None, wid=None, path=None): - self.create_tab(wid, None, path) + self.create_tab(wid, None, path) \ No newline at end of file diff --git a/src/solarfm/core/widgets/files_view/grid_mixin.py b/src/solarfm/core/widgets/files_view/grid_mixin.py index 88b663b..f45406a 100644 --- a/src/solarfm/core/widgets/files_view/grid_mixin.py +++ b/src/solarfm/core/widgets/files_view/grid_mixin.py @@ -39,13 +39,15 @@ class GridMixin: store.append([None, file[0]]) Gtk.main_iteration() - GLib.Thread("", self.generate_icons, tab, store, dir, files) + self.generate_icons(tab, store, dir, files) + # GLib.Thread("", self.generate_icons, tab, store, dir, files) # NOTE: Not likely called often from here but it could be useful if save_state and not trace_debug: self.fm_controller.save_state() + @daemon_threaded def generate_icons(self, tab, store, dir, files): try: loop = asyncio.get_running_loop() @@ -57,9 +59,6 @@ class GridMixin: else: asyncio.run( self.create_icons(tab, store, dir, files) ) - thread = GLib.Thread.self() - thread.unref() - async def create_icons(self, tab, store, dir, files): icons = [self.get_icon(tab, dir, file[0]) for file in files] data = await asyncio.gather(*icons) @@ -77,6 +76,7 @@ class GridMixin: def insert_store(self, store, itr, icon): store.set_value(itr, 0, icon) + # Note: If the function returns GLib.SOURCE_REMOVE or False it is automatically removed from the list of event sources and will not be called again. return False diff --git a/src/solarfm/core/widgets/files_view/window_mixin.py b/src/solarfm/core/widgets/files_view/window_mixin.py index 59a33dc..a8eb8f5 100644 --- a/src/solarfm/core/widgets/files_view/window_mixin.py +++ b/src/solarfm/core/widgets/files_view/window_mixin.py @@ -173,6 +173,10 @@ class WindowMixin(TabMixin): if from_uri != dest: event_system.emit("move_files", (uris, dest)) + Gtk.drag_finish(drag_context, True, False, time) + return + + Gtk.drag_finish(drag_context, False, False, time) def create_new_tab_notebook(self, widget=None, wid=None, path=None): - self.create_tab(wid, None, path) + self.create_tab(wid, None, path) \ No newline at end of file diff --git a/src/solarfm/core/window.py b/src/solarfm/core/window.py index ced3919..75fe20f 100644 --- a/src/solarfm/core/window.py +++ b/src/solarfm/core/window.py @@ -25,17 +25,17 @@ class Window(Gtk.ApplicationWindow): def __init__(self, args, unknownargs): super(Window, self).__init__() - - self._controller = None settings_manager.set_main_window(self) - self._set_window_data() + self._controller = None + self._setup_styling() self._setup_signals() self._subscribe_to_events() - self._load_widgets(args, unknownargs) + self._set_window_data() + self._set_size_constraints() self.show() @@ -66,6 +66,18 @@ class Window(Gtk.ApplicationWindow): self.add( self._controller.get_core_widget() ) + def _set_size_constraints(self): + _window_x = settings.config.main_window_x + _window_y = settings.config.main_window_y + _min_width = settings.config.main_window_min_width + _min_height = settings.config.main_window_min_height + _width = settings.config.main_window_width + _height = settings.config.main_window_height + + self.move(_window_x, _window_y - 28) + self.set_size_request(_min_width, _min_height) + self.set_default_size(_width, _height) + def _set_window_data(self) -> None: screen = self.get_screen() visual = screen.get_rgba_visual() @@ -91,8 +103,19 @@ class Window(Gtk.ApplicationWindow): def _load_interactive_debug(self): self.set_interactive_debugging(True) + def _tear_down(self, widget = None, eve = None): event_system.emit("shutting_down") + + size = self.get_size() + pos = self.get_position() + + settings_manager.set_main_window_width(size.width) + settings_manager.set_main_window_height(size.height) + settings_manager.set_main_window_x(pos.root_x) + settings_manager.set_main_window_y(pos.root_y) + settings_manager.save_settings() + settings_manager.clear_pid() Gtk.main_quit() diff --git a/src/solarfm/utils/settings_manager/manager.py b/src/solarfm/utils/settings_manager/manager.py index 4730741..0c431bb 100644 --- a/src/solarfm/utils/settings_manager/manager.py +++ b/src/solarfm/utils/settings_manager/manager.py @@ -146,6 +146,13 @@ class SettingsManager(StartCheckMixin, Singleton): def is_trace_debug(self) -> bool: return self._trace_debug def is_debug(self) -> bool: return self._debug + def set_main_window_x(self, x = 0): self.settings.config.main_window_x = x + def set_main_window_y(self, y = 0): self.settings.config.main_window_y = y + def set_main_window_width(self, width = 800): self.settings.config.main_window_width = width + def set_main_window_height(self, height = 600): self.settings.config.main_window_height = height + def set_main_window_min_width(self, width = 720): self.settings.config.main_window_min_width = width + def set_main_window_min_height(self, height = 480): self.settings.config.main_window_min_height = height + def set_trace_debug(self, trace_debug: bool): self._trace_debug = trace_debug diff --git a/src/solarfm/utils/settings_manager/options/config.py b/src/solarfm/utils/settings_manager/options/config.py index 05c366f..f8719d6 100644 --- a/src/solarfm/utils/settings_manager/options/config.py +++ b/src/solarfm/utils/settings_manager/options/config.py @@ -30,7 +30,14 @@ class Config: sys_icon_wh: list = field(default_factory=lambda: [56, 56]) steam_cdn_url: str = "https://steamcdn-a.akamaihd.net/steam/apps/" remux_folder_max_disk_usage: str = "8589934592" + make_transparent: int = 0 + main_window_x: int = 721 + main_window_y: int = 465 + main_window_min_width: int = 720 + main_window_min_height: int = 480 + main_window_width: int = 800 + main_window_height: int = 600 application_dirs: list = field(default_factory=lambda: [ "/usr/share/applications", f"{settings_manager.get_home_path()}/.local/share/applications" - ]) + ]) \ No newline at end of file diff --git a/src/solarfm/utils/settings_manager/start_check_mixin.py b/src/solarfm/utils/settings_manager/start_check_mixin.py index 688da36..6fc8208 100644 --- a/src/solarfm/utils/settings_manager/start_check_mixin.py +++ b/src/solarfm/utils/settings_manager/start_check_mixin.py @@ -11,36 +11,48 @@ import inspect class StartCheckMixin: - def is_dirty_start(self) -> bool: return self._dirty_start - def clear_pid(self): self._clean_pid() + def is_dirty_start(self) -> bool: + return self._dirty_start + + def clear_pid(self): + if not self.is_trace_debug(): + self._clean_pid() def do_dirty_start_check(self): - if not os.path.exists(self._PID_FILE): - self._write_new_pid() - else: - with open(self._PID_FILE, "r") as _pid: - pid = _pid.readline().strip() + if self.is_trace_debug(): + pid = os.getpid() + self._print_pid(pid) + return + + if os.path.exists(self._PID_FILE): + with open(self._PID_FILE, "r") as f: + pid = f.readline().strip() if pid not in ("", None): - self._check_alive_status(int(pid)) - else: - self._write_new_pid() + if self.is_pid_alive( int(pid) ): + print("PID file exists and PID is alive... Letting downstream errors (sans debug args) handle app closure propigation.") + return + + self._write_new_pid() """ Check For the existence of a unix pid. """ - def _check_alive_status(self, pid): + def is_pid_alive(self, pid): print(f"PID Found: {pid}") + try: os.kill(pid, 0) except OSError: - print(f"{app_name} is starting dirty...") + print(f"{app_name} PID file exists but PID is irrelevant; starting dirty...") self._dirty_start = True - self._write_new_pid() - return + return False - print("PID is alive... Let downstream errors (sans debug args) handle app closure propigation.") + return True def _write_new_pid(self): pid = os.getpid() self._write_pid(pid) + self._print_pid(pid) + + def _print_pid(self, pid): print(f"{app_name} PID: {pid}") def _clean_pid(self): @@ -48,4 +60,4 @@ class StartCheckMixin: def _write_pid(self, pid): with open(self._PID_FILE, "w") as _pid: - _pid.write(f"{pid}") + _pid.write(f"{pid}") \ No newline at end of file diff --git a/user_config/usr/share/solarfm/contexct_menu.json b/user_config/usr/share/solarfm/contexct_menu.json index c17be4c..786c9dd 100644 --- a/user_config/usr/share/solarfm/contexct_menu.json +++ b/user_config/usr/share/solarfm/contexct_menu.json @@ -1,8 +1,9 @@ { "Open Actions": { - "Open": ["STOCK_OPEN", "open"], - "Open With": ["STOCK_OPEN", "open_with"], - "Execute": ["STOCK_EXECUTE", "execute"], + "Open": ["STOCK_OPEN", "open"], + "Open With": ["STOCK_OPEN", "open_with"], + "Open 2 Tab": ["STOCK_OPEN", "open_2_new_tab"], + "Execute": ["STOCK_EXECUTE", "execute"], "Execute in Terminal": ["STOCK_EXECUTE", "execute_in_terminal"] }, "File Actions": { @@ -16,4 +17,4 @@ "Paste": ["STOCK_PASTE", "paste"] }, "Plugins": {} -} +} \ No newline at end of file diff --git a/user_config/usr/share/solarfm/settings.json b/user_config/usr/share/solarfm/settings.json index a67c142..8627762 100644 --- a/user_config/usr/share/solarfm/settings.json +++ b/user_config/usr/share/solarfm/settings.json @@ -21,7 +21,14 @@ "sys_icon_wh": [56, 56], "file_manager_app": "solarfm", "steam_cdn_url": "https://steamcdn-a.akamaihd.net/steam/apps/", - "remux_folder_max_disk_usage": "8589934592" + "remux_folder_max_disk_usage": "8589934592", + "make_transparent":0, + "main_window_x":721, + "main_window_y":465, + "main_window_min_width":720, + "main_window_min_height":480, + "main_window_width":800, + "main_window_height":600, }, "filters": { "meshs": [".dae", ".fbx", ".gltf", ".obj", ".stl"], -- 2.39.5 From 44ef6ea2bb39f085d86fa459fae3c15ba6323e06 Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Mon, 29 Jan 2024 22:53:51 -0600 Subject: [PATCH 08/28] Reworking some tab logic to omit adding a label widget --- src/solarfm/__main__.py | 2 +- src/solarfm/core/mixins/ui/grid_mixin.py | 5 ++-- src/solarfm/core/mixins/ui/tab_mixin.py | 16 ++++++++++--- .../core/widgets/files_view/grid_mixin.py | 4 ++-- .../core/widgets/files_view/tab_mixin.py | 17 +++++++++---- .../widgets/popups/message_popup_widget.py | 4 ++-- src/solarfm/core/widgets/tab_header_widget.py | 24 +++++++------------ 7 files changed, 42 insertions(+), 30 deletions(-) diff --git a/src/solarfm/__main__.py b/src/solarfm/__main__.py index b798304..3cb7b48 100644 --- a/src/solarfm/__main__.py +++ b/src/solarfm/__main__.py @@ -20,7 +20,7 @@ def run(): locale.setlocale(locale.LC_NUMERIC, 'C') setproctitle(f"{app_name}") - faulthandler.enable() # For better debug info + # faulthandler.enable() # For better debug info parser = argparse.ArgumentParser() # Add long and short arguments diff --git a/src/solarfm/core/mixins/ui/grid_mixin.py b/src/solarfm/core/mixins/ui/grid_mixin.py index 9808843..2cbcbdc 100644 --- a/src/solarfm/core/mixins/ui/grid_mixin.py +++ b/src/solarfm/core/mixins/ui/grid_mixin.py @@ -34,6 +34,7 @@ class GridMixin: dir = tab.get_current_directory() files = tab.get_files() + store.clear() for file in files: store.append([None, file[0]]) @@ -84,8 +85,8 @@ class GridMixin: Gtk.main_iteration() return False - def create_tab_widget(self, tab): - return TabHeaderWidget(tab, self.close_tab) + def create_tab_widget(self): + return TabHeaderWidget(self.close_tab) def create_scroll_and_store(self, tab, wid, use_tree_view = False): scroll = Gtk.ScrolledWindow() diff --git a/src/solarfm/core/mixins/ui/tab_mixin.py b/src/solarfm/core/mixins/ui/tab_mixin.py index 36bea62..99827f5 100644 --- a/src/solarfm/core/mixins/ui/tab_mixin.py +++ b/src/solarfm/core/mixins/ui/tab_mixin.py @@ -34,7 +34,7 @@ class TabMixin(GridMixin): else: tab.set_path(path) - tab_widget = self.create_tab_widget(tab) + tab_widget = self.get_tab_widget(tab) scroll, store = self.create_scroll_and_store(tab, wid) index = notebook.append_page(scroll, tab_widget) notebook.set_tab_detachable(scroll, True) @@ -51,6 +51,14 @@ class TabMixin(GridMixin): self.set_window_title() self.set_file_watcher(tab) + def get_tab_widget(self, tab): + tab_widget = self.create_tab_widget() + tab_widget.tab = tab + + tab_widget.label.set_label(f"{tab.get_end_of_path()}") + tab_widget.label.set_width_chars(len(tab.get_end_of_path())) + + return tab_widget def close_tab(self, button, eve = None): notebook = button.get_parent().get_parent() @@ -84,12 +92,13 @@ class TabMixin(GridMixin): del watcher del tab - gc.collect() if not settings_manager.is_trace_debug(): self.fm_controller.save_state() self.set_window_title() + gc.collect() + # NOTE: Not actually getting called even tho set in the glade file... def on_tab_dnded(self, notebook, page, x, y): ... @@ -114,12 +123,13 @@ class TabMixin(GridMixin): def on_tab_switch_update(self, notebook, content = None, index = None): self.selected_files.clear() wid, tid = content.get_children()[0].get_name().split("|") + self.fm_controller.set_wid_and_tid(wid, tid) self.set_path_text(wid, tid) self.set_window_title() def get_id_from_tab_box(self, tab_box): - return tab_box.get_children()[2].get_text() + return tab_box.tab.get_id() def get_tab_label(self, notebook, icon_grid): return notebook.get_tab_label(icon_grid.get_parent()).get_children()[0] diff --git a/src/solarfm/core/widgets/files_view/grid_mixin.py b/src/solarfm/core/widgets/files_view/grid_mixin.py index f45406a..2071fac 100644 --- a/src/solarfm/core/widgets/files_view/grid_mixin.py +++ b/src/solarfm/core/widgets/files_view/grid_mixin.py @@ -84,8 +84,8 @@ class GridMixin: Gtk.main_iteration() return False - def create_tab_widget(self, tab): - return TabHeaderWidget(tab, self.close_tab) + def create_tab_widget(self): + return TabHeaderWidget(self.close_tab) def create_scroll_and_store(self, tab, wid, use_tree_view = False): scroll = Gtk.ScrolledWindow() diff --git a/src/solarfm/core/widgets/files_view/tab_mixin.py b/src/solarfm/core/widgets/files_view/tab_mixin.py index 13bab35..f87eeb5 100644 --- a/src/solarfm/core/widgets/files_view/tab_mixin.py +++ b/src/solarfm/core/widgets/files_view/tab_mixin.py @@ -34,7 +34,7 @@ class TabMixin(GridMixin): else: tab.set_path(path) - tab_widget = self.create_tab_widget(tab) + tab_widget = self.get_tab_widget(tab) scroll, store = self.create_scroll_and_store(tab, wid) index = notebook.append_page(scroll, tab_widget) notebook.set_tab_detachable(scroll, True) @@ -53,6 +53,14 @@ class TabMixin(GridMixin): event_system.emit("set_window_title", (tab.get_current_directory(),)) self.set_file_watcher(tab) + def get_tab_widget(self, tab): + tab_widget = self.create_tab_widget() + tab_widget.tab_id = tab.get_id() + + tab_widget.label.set_label(f"{tab.get_end_of_path()}") + tab_widget.label.set_width_chars(len(tab.get_end_of_path())) + + return tab_widget def close_tab(self, button, eve = None): notebook = button.get_parent().get_parent() @@ -90,12 +98,13 @@ class TabMixin(GridMixin): del watcher del tab - gc.collect() if not settings_manager.is_trace_debug(): self.fm_controller.save_state() self.set_window_title() + gc.collect() + # NOTE: Not actually getting called even tho set in the glade file... def on_tab_dnded(self, notebook, page, x, y): ... @@ -119,13 +128,13 @@ class TabMixin(GridMixin): def on_tab_switch_update(self, notebook, content = None, index = None): self.selected_files.clear() - wid, tid = content.get_children()[0].get_name().split("|") + wid, tid = content.get_children()[0].tab.get_name().split("|") self.fm_controller.set_wid_and_tid(wid, tid) self.set_path_text(wid, tid) self.set_window_title() def get_id_from_tab_box(self, tab_box): - return tab_box.get_children()[2].get_text() + return tab_box.tab.get_id() def get_tab_label(self, notebook, icon_grid): return notebook.get_tab_label(icon_grid.get_parent()).get_children()[0] diff --git a/src/solarfm/core/widgets/popups/message_popup_widget.py b/src/solarfm/core/widgets/popups/message_popup_widget.py index 75b0b5c..f3517a4 100644 --- a/src/solarfm/core/widgets/popups/message_popup_widget.py +++ b/src/solarfm/core/widgets/popups/message_popup_widget.py @@ -103,7 +103,7 @@ class MessagePopupWidget(Gtk.Popover): self.popup() self.hide_message_timeout(seconds) - @threaded + @daemon_threaded def hide_message_timeout(self, seconds=3): time.sleep(seconds) GLib.idle_add(event_system.emit, ("hide_messages_popup")) @@ -126,4 +126,4 @@ class MessagePopupWidget(Gtk.Popover): with open(target, "w") as f: f.write(text) - save_location_prompt.destroy() + save_location_prompt.destroy() \ No newline at end of file diff --git a/src/solarfm/core/widgets/tab_header_widget.py b/src/solarfm/core/widgets/tab_header_widget.py index df07706..729db6f 100644 --- a/src/solarfm/core/widgets/tab_header_widget.py +++ b/src/solarfm/core/widgets/tab_header_widget.py @@ -9,14 +9,12 @@ from gi.repository import Gtk - class TabHeaderWidget(Gtk.Box): """docstring for TabHeaderWidget""" - def __init__(self, tab, close_tab): + def __init__(self, close_tab): super(TabHeaderWidget, self).__init__() - self._tab = tab self._close_tab = close_tab # NOTE: Close method in tab_mixin self._setup_styling() @@ -32,25 +30,19 @@ class TabHeaderWidget(Gtk.Box): ... def _load_widgets(self): - label = Gtk.Label() - tid = Gtk.Label() + self.label = Gtk.Label() close = Gtk.Button() icon = Gtk.Image(stock=Gtk.STOCK_CLOSE) - label.set_label(f"{self._tab.get_end_of_path()}") - label.set_width_chars(len(self._tab.get_end_of_path())) - label.set_xalign(0.0) - label.set_margin_left(25) - label.set_margin_right(25) - label.set_hexpand(True) - tid.set_label(f"{self._tab.get_id()}") + self.label.set_xalign(0.0) + self.label.set_margin_left(25) + self.label.set_margin_right(25) + self.label.set_hexpand(True) close.connect("released", self._close_tab) close.add(icon) - self.add(label) + self.add(self.label) self.add(close) - self.add(tid) - self.show_all() - tid.hide() + self.show_all() \ No newline at end of file -- 2.39.5 From a47bd23e78aebb97ec3e5a2f078bd1147c0a878e Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Thu, 8 Feb 2024 21:24:01 -0600 Subject: [PATCH 09/28] made main method --- src/solarfm/__main__.py | 60 ++++++++++--------- .../shellfm/windows/tabs/utils/launcher.py | 2 +- 2 files changed, 32 insertions(+), 30 deletions(-) diff --git a/src/solarfm/__main__.py b/src/solarfm/__main__.py index 3cb7b48..f5121f8 100644 --- a/src/solarfm/__main__.py +++ b/src/solarfm/__main__.py @@ -3,10 +3,12 @@ # Python imports import argparse import faulthandler -import locale import traceback from setproctitle import setproctitle +import tracemalloc +tracemalloc.start() + # Lib imports # Application imports @@ -15,38 +17,38 @@ from app import Application -def run(): - try: - locale.setlocale(locale.LC_NUMERIC, 'C') +def main(args, unknownargs): + setproctitle(f'{app_name}') - setproctitle(f"{app_name}") - # faulthandler.enable() # For better debug info + if args.debug == "true": + settings_manager.set_debug(True) - parser = argparse.ArgumentParser() - # Add long and short arguments - parser.add_argument("--debug", "-d", default="false", help="Do extra console messaging.") - parser.add_argument("--trace-debug", "-td", default="false", help="Disable saves, ignore IPC lock, do extra console messaging.") - parser.add_argument("--no-plugins", "-np", default="false", help="Do not load plugins.") + if args.trace_debug == "true": + settings_manager.set_trace_debug(True) - parser.add_argument("--new-tab", "-t", default="", help="Open a file into new tab.") - parser.add_argument("--new-window", "-w", default="", help="Open a file into a new window.") + settings_manager.do_dirty_start_check() + Application(args, unknownargs) - # Read arguments (If any...) - args, unknownargs = parser.parse_known_args() - - if args.debug == "true": - settings_manager.set_debug(True) - - if args.trace_debug == "true": - settings_manager.set_trace_debug(True) - - settings_manager.do_dirty_start_check() - Application(args, unknownargs) - except Exception as e: - traceback.print_exc() - quit() if __name__ == "__main__": - """ Set process title, get arguments, and create GTK main thread. """ - run() \ No newline at end of file + ''' Set process title, get arguments, and create GTK main thread. ''' + + parser = argparse.ArgumentParser() + # Add long and short arguments + parser.add_argument("--debug", "-d", default="false", help="Do extra console messaging.") + parser.add_argument("--trace-debug", "-td", default="false", help="Disable saves, ignore IPC lock, do extra console messaging.") + parser.add_argument("--no-plugins", "-np", default="false", help="Do not load plugins.") + + parser.add_argument("--new-tab", "-nt", default="false", help="Opens a 'New Tab' if a handler is set for it.") + parser.add_argument("--file", "-f", default="default", help="JUST SOME FILE ARG.") + + # Read arguments (If any...) + args, unknownargs = parser.parse_known_args() + + try: + faulthandler.enable() # For better debug info + main(args, unknownargs) + except Exception as e: + traceback.print_exc() + quit() \ No newline at end of file diff --git a/src/solarfm/shellfm/windows/tabs/utils/launcher.py b/src/solarfm/shellfm/windows/tabs/utils/launcher.py index 8b6fd86..a7febc3 100644 --- a/src/solarfm/shellfm/windows/tabs/utils/launcher.py +++ b/src/solarfm/shellfm/windows/tabs/utils/launcher.py @@ -113,4 +113,4 @@ class Launcher: if not os.path.islink(fp): # Skip if it is symbolic link total_size += os.path.getsize(fp) - return total_size + return total_size \ No newline at end of file -- 2.39.5 From fec0d26ab7b12d8af4f53ab5118a8bee10ec8d3e Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Mon, 12 Feb 2024 19:58:23 -0600 Subject: [PATCH 10/28] Improved selection bounds on rename --- src/solarfm/core/widgets/dialogs/rename_widget.py | 5 ++++- src/solarfm/core/widgets/files_view/tab_mixin.py | 4 ---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/solarfm/core/widgets/dialogs/rename_widget.py b/src/solarfm/core/widgets/dialogs/rename_widget.py index 43b3d50..2e3ae1a 100644 --- a/src/solarfm/core/widgets/dialogs/rename_widget.py +++ b/src/solarfm/core/widgets/dialogs/rename_widget.py @@ -50,6 +50,9 @@ class RenameWidget: def show_rename_file_menu(self, widget=None, eve=None): if widget: widget.grab_focus() + end_i = widget.get_text().rfind(".") + if end_i > 0: + widget.select_region(0, end_i) response = self._rename_file_menu.run() if response == Gtk.ResponseType.CLOSE: @@ -78,4 +81,4 @@ class RenameWidget: def hide_rename_file_menu_enter_key(self, widget=None, eve=None): keyname = Gdk.keyval_name(eve.keyval).lower() if keyname in ["return", "enter"]: - self._rename_file_menu.hide() + self._rename_file_menu.hide() \ No newline at end of file diff --git a/src/solarfm/core/widgets/files_view/tab_mixin.py b/src/solarfm/core/widgets/files_view/tab_mixin.py index f87eeb5..acdb7e2 100644 --- a/src/solarfm/core/widgets/files_view/tab_mixin.py +++ b/src/solarfm/core/widgets/files_view/tab_mixin.py @@ -83,13 +83,9 @@ class TabMixin(GridMixin): self.builder.dereference_object(f"{wid}|{tid}") store.clear() - # store.run_dispose() icon_grid.destroy() - # icon_grid.run_dispose() scroll.destroy() - #scroll.run_dispose() tab_box.destroy() - #tab_box.run_dispose() del store del icon_grid -- 2.39.5 From d65ea8dec886dc1f6fae7525d9a070bf884c697a Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Mon, 11 Mar 2024 20:03:13 -0500 Subject: [PATCH 11/28] Scaling system icons as some do not match expected scale. --- .../mixins/signals/keyboard_signals_mixin.py | 70 ++++++++++--------- src/solarfm/core/mixins/ui/tab_mixin.py | 11 +-- .../core/widgets/files_view/tab_mixin.py | 11 +-- src/solarfm/core/widgets/icon_tree_widget.py | 2 +- .../shellfm/windows/tabs/icons/icon.py | 8 +-- 5 files changed, 53 insertions(+), 49 deletions(-) diff --git a/src/solarfm/core/mixins/signals/keyboard_signals_mixin.py b/src/solarfm/core/mixins/signals/keyboard_signals_mixin.py index ac1dbf2..03446d0 100644 --- a/src/solarfm/core/mixins/signals/keyboard_signals_mixin.py +++ b/src/solarfm/core/mixins/signals/keyboard_signals_mixin.py @@ -11,8 +11,8 @@ from gi.repository import Gdk # Application imports -valid_keyvalue_pat = re.compile(r"[a-z0-9A-Z-_\[\]\(\)\| ]") +valid_keyvalue_pat = re.compile(r"[a-z0-9A-Z-_\[\]\(\)\| ]") @@ -20,12 +20,12 @@ class KeyboardSignalsMixin: """ KeyboardSignalsMixin keyboard hooks controller. """ # TODO: Need to set methods that use this to somehow check the keybindings state instead. - def unset_keys_and_data(self, widget=None, eve=None): + def unset_keys_and_data(self, widget = None, eve = None): self.ctrl_down = False self.shift_down = False self.alt_down = False - def unmap_special_key(self, keyname): + def unmap_special_keys(self, keyname): if "control" in keyname: self.ctrl_down = False if "shift" in keyname: @@ -35,6 +35,10 @@ class KeyboardSignalsMixin: def on_global_key_press_controller(self, eve, user_data): keyname = Gdk.keyval_name(user_data.keyval).lower() + modifiers = Gdk.ModifierType(user_data.get_state() & ~Gdk.ModifierType.LOCK_MASK) + + self.was_midified_key = True if modifiers != 0 else False + if keyname.replace("_l", "").replace("_r", "") in ["control", "alt", "shift"]: if "control" in keyname: self.ctrl_down = True @@ -43,25 +47,34 @@ class KeyboardSignalsMixin: if "alt" in keyname: self.alt_down = True - def on_global_key_release_controller(self, widget, event): - """Handler for keyboard events""" - keyname = Gdk.keyval_name(event.keyval).lower() + """ Handler for keyboard events """ + keyname = Gdk.keyval_name(event.keyval).lower() + modifiers = Gdk.ModifierType(event.get_state() & ~Gdk.ModifierType.LOCK_MASK) + if keyname.replace("_l", "").replace("_r", "") in ["control", "alt", "shift"]: - self.unmap_special_key(keyname) + should_return = self.was_midified_key and (self.ctrl_down or self.shift_down or self.alt_down) + self.unmap_special_keys(keyname) + + if should_return: + self.was_midified_key = False + return mapping = keybindings.lookup(event) - if mapping: - try: - self.handle_as_controller_scope(mapping) - except Exception: - self.handle_as_plugin_scope(mapping) - else: - logger.debug(f"on_global_key_release_controller > key > {keyname}") + logger.debug(f"on_global_key_release_controller > key > {keyname}") + logger.debug(f"on_global_key_release_controller > keyval > {event.keyval}") + logger.debug(f"on_global_key_release_controller > mapping > {mapping}") - if self.ctrl_down: - if keyname in ["1", "kp_1", "2", "kp_2", "3", "kp_3", "4", "kp_4"]: - self.builder.get_object(f"tggl_notebook_{keyname.strip('kp_')}").released() + if mapping: + self.handle_mapped_key_event(mapping) + else: + self.handle_as_key_event_scope(keyname) + + def handle_mapped_key_event(self, mapping): + try: + self.handle_as_controller_scope(mapping) + except Exception: + self.handle_as_plugin_scope(mapping) def handle_as_controller_scope(self, mapping): getattr(self, mapping)() @@ -73,22 +86,11 @@ class KeyboardSignalsMixin: sender = "" eve_type = mapping - self.handle_as_key_event_system(sender, eve_type) + self.handle_key_event_system(sender, eve_type) - def handle_as_key_event_system(self, sender, eve_type): - event_system.emit(eve_type) + def handle_as_key_event_scope(self, keyname): + if self.ctrl_down and not keyname in ["1", "kp_1", "2", "kp_2", "3", "kp_3", "4", "kp_4"]: + self.handle_key_event_system(None, keyname) - def keyboard_close_tab(self): - wid, tid = self.fm_controller.get_active_wid_and_tid() - notebook = self.builder.get_object(f"window_{wid}") - scroll = self.builder.get_object(f"{wid}|{tid}", use_gtk = False) - page = notebook.page_num(scroll) - tab = self.get_fm_window(wid).get_tab_by_id(tid) - watcher = tab.get_dir_watcher() - watcher.cancel() - - self.get_fm_window(wid).delete_tab_by_id(tid) - notebook.remove_page(page) - if not settings_manager.is_trace_debug(): - self.fm_controller.save_state() - self.set_window_title() \ No newline at end of file + def handle_key_event_system(self, sender, eve_type): + event_system.emit(eve_type) \ No newline at end of file diff --git a/src/solarfm/core/mixins/ui/tab_mixin.py b/src/solarfm/core/mixins/ui/tab_mixin.py index 99827f5..656aafe 100644 --- a/src/solarfm/core/mixins/ui/tab_mixin.py +++ b/src/solarfm/core/mixins/ui/tab_mixin.py @@ -38,6 +38,7 @@ class TabMixin(GridMixin): scroll, store = self.create_scroll_and_store(tab, wid) index = notebook.append_page(scroll, tab_widget) notebook.set_tab_detachable(scroll, True) + notebook.set_tab_reorderable(scroll, True) self.fm_controller.set_wid_and_tid(wid, tab.get_id()) path_entry.set_text(tab.get_current_directory()) @@ -46,7 +47,6 @@ class TabMixin(GridMixin): ctx = notebook.get_style_context() ctx.add_class("notebook-unselected-focus") - notebook.set_tab_reorderable(scroll, True) self.load_store(tab, store) self.set_window_title() self.set_file_watcher(tab) @@ -79,11 +79,12 @@ class TabMixin(GridMixin): self.builder.dereference_object(f"{wid}|{tid}|icon_grid") self.builder.dereference_object(f"{wid}|{tid}") + icon_grid.set_model(None) - store.clear() - icon_grid.destroy() - scroll.destroy() - tab_box.destroy() + store.run_dispose() + icon_grid.run_dispose() + scroll.run_dispose() + tab_box.run_dispose() del store del icon_grid diff --git a/src/solarfm/core/widgets/files_view/tab_mixin.py b/src/solarfm/core/widgets/files_view/tab_mixin.py index acdb7e2..4bb474c 100644 --- a/src/solarfm/core/widgets/files_view/tab_mixin.py +++ b/src/solarfm/core/widgets/files_view/tab_mixin.py @@ -38,6 +38,7 @@ class TabMixin(GridMixin): scroll, store = self.create_scroll_and_store(tab, wid) index = notebook.append_page(scroll, tab_widget) notebook.set_tab_detachable(scroll, True) + notebook.set_tab_reorderable(scroll, True) self.fm_controller.set_wid_and_tid(wid, tab.get_id()) event_system.emit("go_to_path", (tab.get_current_directory(),)) # NOTE: Not efficent if I understand how @@ -47,7 +48,6 @@ class TabMixin(GridMixin): ctx = notebook.get_style_context() ctx.add_class("notebook-unselected-focus") - notebook.set_tab_reorderable(scroll, True) self.load_store(tab, store) # self.set_window_title() event_system.emit("set_window_title", (tab.get_current_directory(),)) @@ -81,11 +81,12 @@ class TabMixin(GridMixin): self.builder.dereference_object(f"{wid}|{tid}|icon_grid") self.builder.dereference_object(f"{wid}|{tid}") + icon_grid.set_model(None) - store.clear() - icon_grid.destroy() - scroll.destroy() - tab_box.destroy() + store.run_dispose() + icon_grid.run_dispose() + scroll.run_dispose() + tab_box.run_dispose() del store del icon_grid diff --git a/src/solarfm/core/widgets/icon_tree_widget.py b/src/solarfm/core/widgets/icon_tree_widget.py index 6f935e6..8bad446 100644 --- a/src/solarfm/core/widgets/icon_tree_widget.py +++ b/src/solarfm/core/widgets/icon_tree_widget.py @@ -59,7 +59,7 @@ class IconTreeWidget(Gtk.TreeView): name = Gtk.CellRendererText() selec = self.get_selection() - self.set_model(store) + self.set_model(self._store) selec.set_mode(3) column.pack_start(icon, False) diff --git a/src/solarfm/shellfm/windows/tabs/icons/icon.py b/src/solarfm/shellfm/windows/tabs/icons/icon.py index 1693293..4cc0781 100644 --- a/src/solarfm/shellfm/windows/tabs/icons/icon.py +++ b/src/solarfm/shellfm/windows/tabs/icons/icon.py @@ -50,8 +50,8 @@ class Icon(DesktopIconMixin, VideoIconMixin, MeshsIconMixin): if not thumbnl: # TODO: Detect if not in a thread and use directly for speed get_system_thumbnail - # thumbnl = self.get_system_thumbnail(full_path, self.sys_icon_wh[0]) - thumbnl = self._get_system_thumbnail_gtk_thread(full_path, self.sys_icon_wh[0]) + thumbnl = self.get_system_thumbnail(full_path, self.sys_icon_wh[0]) + # thumbnl = self._get_system_thumbnail_gtk_thread(full_path, self.sys_icon_wh[0]) if not thumbnl: raise IconException("No known icons found.") @@ -152,11 +152,11 @@ class Icon(DesktopIconMixin, VideoIconMixin, MeshsIconMixin): gio_file = Gio.File.new_for_path(full_path) info = gio_file.query_info('standard::icon' , 0, None) icon = info.get_icon().get_names()[0] - data = settings_manager.get_icon_theme().lookup_icon(icon , size , 0) + data = settings_manager.get_icon_theme().lookup_icon(icon , size, 0) if data: icon_path = data.get_filename() - return GdkPixbuf.Pixbuf.new_from_file(icon_path) + return GdkPixbuf.Pixbuf.new_from_file_at_size(icon_path, width = size, height = size) raise IconException("No system icon found...") except IconException: -- 2.39.5 From 02c31719d1fa3877164a09ca6ea3327de748c55d Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Mon, 11 Mar 2024 22:28:42 -0500 Subject: [PATCH 12/28] Fixing translate plugin; attempted dispose call --- plugins/translate/plugin.py | 6 +++--- src/solarfm/core/widgets/icon_grid_widget.py | 6 +++++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/plugins/translate/plugin.py b/plugins/translate/plugin.py index aca4062..d55d0d5 100644 --- a/plugins/translate/plugin.py +++ b/plugins/translate/plugin.py @@ -184,11 +184,11 @@ class Plugin(PluginBase): response = requests.post(self.vqd_link, headers=self.vqd_headers, data=self.vqd_data, timeout=2) if response.status_code == 200: data = response.content - vqd_start_index = data.index(b"vqd='") + 5 - vqd_end_index = data.index(b"'", vqd_start_index) + vqd_start_index = data.index(b"vqd=\"") + 5 + vqd_end_index = data.index(b"\"", vqd_start_index) self._vqd_attrib = data[vqd_start_index:vqd_end_index].decode("utf-8") print(f"Translation VQD: {self._vqd_attrib}") else: msg = f"Could not get VQS attribute... Response Code: {response.status_code}" - self._translate_to_buffer.set_text(msg) + self._translate_to_buffer.set_text(msg) \ No newline at end of file diff --git a/src/solarfm/core/widgets/icon_grid_widget.py b/src/solarfm/core/widgets/icon_grid_widget.py index 2d25921..2326912 100644 --- a/src/solarfm/core/widgets/icon_grid_widget.py +++ b/src/solarfm/core/widgets/icon_grid_widget.py @@ -75,7 +75,11 @@ class IconGridWidget(Gtk.IconView): return self.get_model() def clear_and_set_new_store(self): + store = self.get_model() + if store: + store.run_dispose() + self.set_model(None) store = Gtk.ListStore(GdkPixbuf.Pixbuf or GdkPixbuf.PixbufAnimation or None, str or None) # store = Gtk.ListStore(Gtk.DirectoryList) - self.set_model(store) \ No newline at end of file + self.set_model(store) -- 2.39.5 From a362039e7320f057d8288486c1e817a4f0195030 Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Mon, 25 Mar 2024 22:49:31 -0500 Subject: [PATCH 13/28] Fixed depricated exception class usage; fixed usertype interupt --- src/solarfm/app.py | 2 +- src/solarfm/utils/debugging.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/solarfm/app.py b/src/solarfm/app.py index 41330d8..417b2cc 100644 --- a/src/solarfm/app.py +++ b/src/solarfm/app.py @@ -56,7 +56,7 @@ class Application: try: # kill -SIGUSR2 from Linux/Unix or SIGBREAK signal from Windows signal.signal( - vars(signal).get("SIGBREAK") or vars(signal).get("SIGUSR1"), + vars(signal).get("SIGBREAK") or vars(signal).get("SIGUSR2"), debug_signal_handler ) except ValueError: diff --git a/src/solarfm/utils/debugging.py b/src/solarfm/utils/debugging.py index b84193a..70cdf82 100644 --- a/src/solarfm/utils/debugging.py +++ b/src/solarfm/utils/debugging.py @@ -18,7 +18,7 @@ def debug_signal_handler(signal, frame): rpdb2.start_embedded_debugger("foobar", True, True) rpdb2.setbreak(depth=1) return - except StandardError: + except Exception: ... try: @@ -26,7 +26,7 @@ def debug_signal_handler(signal, frame): logger.debug("\n\nStarting embedded rconsole debugger...\n\n") rconsole.spawn_server() return - except StandardError as ex: + except Exception as ex: ... try: @@ -34,7 +34,7 @@ def debug_signal_handler(signal, frame): logger.debug("\n\nStarting PuDB debugger...\n\n") set_trace(paused = True) return - except StandardError as ex: + except Exception as ex: ... try: @@ -42,11 +42,11 @@ def debug_signal_handler(signal, frame): logger.debug("\n\nStarting embedded PDB debugger...\n\n") pdb.Pdb(skip=['gi.*']).set_trace() return - except StandardError as ex: + except Exception as ex: ... try: import code code.interact() - except StandardError as ex: - logger.debug(f"{ex}, returning to normal program flow...") + except Exception as ex: + logger.debug(f"{ex}, returning to normal program flow...") \ No newline at end of file -- 2.39.5 From 2f954f4c79381b0949bf6ad389ac37abb75fc623 Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Wed, 12 Jun 2024 00:32:14 -0500 Subject: [PATCH 14/28] updated dir watch; removed keys call where senseable; added additional. debug hook; added threading and async code for testing --- .../signals/file_action_signals_mixin.py | 42 +++++-------- src/solarfm/core/mixins/ui/grid_mixin.py | 59 ++++++++++++------- .../core/widgets/context_menu_widget.py | 25 ++++---- .../core/widgets/files_view/grid_mixin.py | 5 +- src/solarfm/core/window.py | 5 +- src/solarfm/plugins/manifest.py | 11 ++-- src/solarfm/plugins/plugins_controller.py | 9 ++- src/solarfm/utils/debugging.py | 8 +++ 8 files changed, 88 insertions(+), 76 deletions(-) diff --git a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py index 99346a2..a30c862 100644 --- a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py +++ b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py @@ -30,58 +30,46 @@ class FileActionSignalsMixin: wid = tab.get_wid() tid = tab.get_id() - dir_watcher.connect("changed", self.dir_watch_updates, (f"{wid}|{tid}",)) + dir_watcher.connect("changed", self.dir_watch_updates, *(f"{wid}|{tid}",)) tab.set_dir_watcher(dir_watcher) - # NOTE: Too lazy to impliment a proper update handler and so just regen store and update tab. - # Use a lock system to prevent too many update calls for certain instances but user can manually refresh if they have urgency - def dir_watch_updates(self, file_monitor, file, other_file = None, eve_type = None, data = None): + def dir_watch_updates(self, file_monitor, file, other_file = None, eve_type = None, tab_widget_id = None): if eve_type in [Gio.FileMonitorEvent.CREATED, Gio.FileMonitorEvent.DELETED, Gio.FileMonitorEvent.RENAMED, Gio.FileMonitorEvent.MOVED_IN, Gio.FileMonitorEvent.MOVED_OUT]: - if eve_type in [Gio.FileMonitorEvent.MOVED_IN, Gio.FileMonitorEvent.MOVED_OUT]: - self.update_on_soft_lock_end(data[0]) - elif data[0] in self.soft_update_lock.keys(): - self.soft_update_lock[data[0]]["last_update_time"] = time.time() - else: - self.soft_lock_countdown(data[0]) + self.soft_lock_countdown(tab_widget_id) - @daemon_threaded - def soft_lock_countdown(self, tab_widget): - self.soft_update_lock[tab_widget] = { "last_update_time": time.time()} + def soft_lock_countdown(self, tab_widget_id): + if tab_widget_id in self.soft_update_lock: + timeout_id = self.soft_update_lock[tab_widget_id]["timeout_id"] + GLib.source_remove(timeout_id) - lock = True - while lock: - time.sleep(0.6) - last_update_time = self.soft_update_lock[tab_widget]["last_update_time"] - current_time = time.time() - if (current_time - last_update_time) > 0.6: - lock = False + timeout_id = GLib.timeout_add(0, self.update_on_soft_lock_end, 600, *(tab_widget_id,)) + self.soft_update_lock[tab_widget_id] = { "timeout_id": timeout_id } - self.soft_update_lock.pop(tab_widget, None) - GLib.idle_add(self.update_on_soft_lock_end, *(tab_widget,)) - def update_on_soft_lock_end(self, tab_widget): - wid, tid = tab_widget.split("|") + def update_on_soft_lock_end(self, timout_ms, tab_widget_id): + self.soft_update_lock.pop(tab_widget_id, None) + + wid, tid = tab_widget_id.split("|") notebook = self.builder.get_object(f"window_{wid}") tab = self.get_fm_window(wid).get_tab_by_id(tid) icon_grid = self.builder.get_object(f"{wid}|{tid}|icon_grid", use_gtk = False) store = icon_grid.get_model() - _store, tab_widget_label = self.get_store_and_label_from_notebook(notebook, f"{wid}|{tid}") + _store, tab_widget_id_label = self.get_store_and_label_from_notebook(notebook, f"{wid}|{tid}") tab.load_directory() icon_grid.clear_and_set_new_store() self.load_store(tab, icon_grid.get_store()) - tab_widget_label.set_label(tab.get_end_of_path()) + tab_widget_id_label.set_label(tab.get_end_of_path()) state = self.get_current_state() if [wid, tid] in [state.wid, state.tid]: self.set_bottom_labels(tab) return False - def do_file_search(self, widget, eve = None): if not self.ctrl_down and not self.shift_down and not self.alt_down: target = widget.get_name() diff --git a/src/solarfm/core/mixins/ui/grid_mixin.py b/src/solarfm/core/mixins/ui/grid_mixin.py index 2cbcbdc..0a36c09 100644 --- a/src/solarfm/core/mixins/ui/grid_mixin.py +++ b/src/solarfm/core/mixins/ui/grid_mixin.py @@ -34,7 +34,6 @@ class GridMixin: dir = tab.get_current_directory() files = tab.get_files() - store.clear() for file in files: store.append([None, file[0]]) @@ -48,33 +47,51 @@ class GridMixin: self.fm_controller.save_state() - @daemon_threaded def generate_icons(self, tab, store, dir, files): - try: - loop = asyncio.get_running_loop() - except RuntimeError: - loop = None + for i, file in enumerate(files): + # GLib.Thread(f"{i}", self.make_and_load_icon, i, store, tab, dir, file[0]) + self.make_and_load_icon( i, store, tab, dir, file[0]) - if loop and loop.is_running(): - loop.create_task( self.create_icons(tab, store, dir, files) ) - else: - asyncio.run( self.create_icons(tab, store, dir, files) ) - - async def create_icons(self, tab, store, dir, files): - icons = [self.get_icon(tab, dir, file[0]) for file in files] - data = await asyncio.gather(*icons) - tasks = [self.update_store(i, store, icon) for i, icon in enumerate(data)] - await asyncio.gather(*tasks) - - GLib.idle_add(self.do_ui_update) - - async def update_store(self, i, store, icon): + def update_store(self, i, store, icon): itr = store.get_iter(i) GLib.idle_add(self.insert_store, store, itr, icon) - async def get_icon(self, tab, dir, file): + @daemon_threaded + def make_and_load_icon(self, i, store, tab, dir, file): + icon = tab.create_icon(dir, file) + self.update_store(i, store, icon) + + def get_icon(self, tab, dir, file): return tab.create_icon(dir, file) + + # @daemon_threaded + # def generate_icons(self, tab, store, dir, files): + # try: + # loop = asyncio.get_running_loop() + # except RuntimeError: + # loop = None + + # if loop and loop.is_running(): + # loop = asyncio.get_event_loop() + # loop.create_task( self.create_icons(tab, store, dir, files) ) + # else: + # asyncio.run( self.create_icons(tab, store, dir, files) ) + + # async def create_icons(self, tab, store, dir, files): + # icons = [self.get_icon(tab, dir, file[0]) for file in files] + # data = await asyncio.gather(*icons) + # tasks = [self.update_store(i, store, icon) for i, icon in enumerate(data)] + # asyncio.gather(*tasks) + + # async def update_store(self, i, store, icon): + # itr = store.get_iter(i) + # GLib.idle_add(self.insert_store, store, itr, icon) + + # async def get_icon(self, tab, dir, file): + # return tab.create_icon(dir, file) + + def insert_store(self, store, itr, icon): store.set_value(itr, 0, icon) diff --git a/src/solarfm/core/widgets/context_menu_widget.py b/src/solarfm/core/widgets/context_menu_widget.py index 5774c5c..bbab184 100644 --- a/src/solarfm/core/widgets/context_menu_widget.py +++ b/src/solarfm/core/widgets/context_menu_widget.py @@ -40,16 +40,15 @@ class ContextMenuWidget(Gtk.Menu): def _emit(self, menu_item, type): event_system.emit("do_action_from_menu_controls", type) - - def make_submenu(self, name, data, keys): + def make_submenu(self, name, data): menu = Gtk.Menu() menu_item = Gtk.MenuItem(name) - for key in keys: + for key, value in data.items(): if isinstance(data, dict): - entry = self.make_menu_item(key, data[key]) + entry = self.make_menu_item(key, value) elif isinstance(data, list): - entry = self.make_menu_item(key, data) + entry = self.make_menu_item(key, value) else: continue @@ -58,11 +57,11 @@ class ContextMenuWidget(Gtk.Menu): menu_item.set_submenu(menu) return menu_item - def make_menu_item(self, name, data) -> Gtk.MenuItem: + def make_menu_item(self, label, data) -> Gtk.MenuItem: if isinstance(data, dict): - return self.make_submenu(name, data, data.keys()) + return self.make_submenu(label, data) elif isinstance(data, list): - entry = Gtk.ImageMenuItem(name) + entry = Gtk.ImageMenuItem(label) icon = getattr(Gtk, f"{data[0]}") entry.set_image( Gtk.Image(stock=icon) ) entry.set_always_show_image(True) @@ -71,18 +70,18 @@ class ContextMenuWidget(Gtk.Menu): def build_context_menu(self) -> None: data = self._context_menu_data - dkeys = data.keys() plugins_entry = None - for dkey in dkeys: - entry = self.make_menu_item(dkey, data[dkey]) + for key, value in data.items(): + entry = self.make_menu_item(key, value) self.append(entry) - if dkey == "Plugins": + if key == "Plugins": plugins_entry = entry self.attach_to_widget(self._window, None) - self.show_all() self.builder.expose_object("context_menu", self) + self.show_all() + if plugins_entry: self.builder.expose_object("context_menu_plugins", plugins_entry.get_submenu()) diff --git a/src/solarfm/core/widgets/files_view/grid_mixin.py b/src/solarfm/core/widgets/files_view/grid_mixin.py index 2071fac..a6b9ee6 100644 --- a/src/solarfm/core/widgets/files_view/grid_mixin.py +++ b/src/solarfm/core/widgets/files_view/grid_mixin.py @@ -55,6 +55,7 @@ class GridMixin: loop = None if loop and loop.is_running(): + loop = asyncio.get_event_loop() loop.create_task( self.create_icons(tab, store, dir, files) ) else: asyncio.run( self.create_icons(tab, store, dir, files) ) @@ -63,9 +64,7 @@ class GridMixin: icons = [self.get_icon(tab, dir, file[0]) for file in files] data = await asyncio.gather(*icons) tasks = [self.update_store(i, store, icon) for i, icon in enumerate(data)] - await asyncio.gather(*tasks) - - GLib.idle_add(self.do_ui_update) + asyncio.gather(*tasks) async def update_store(self, i, store, icon): itr = store.get_iter(i) diff --git a/src/solarfm/core/window.py b/src/solarfm/core/window.py index 75fe20f..d9b8807 100644 --- a/src/solarfm/core/window.py +++ b/src/solarfm/core/window.py @@ -10,6 +10,7 @@ gi.require_version('Gdk', '3.0') from gi.repository import Gtk from gi.repository import Gdk from gi.repository import GLib +from gi.repository import GObject # Application imports from core.controller import Controller @@ -24,6 +25,8 @@ class Window(Gtk.ApplicationWindow): """docstring for Window.""" def __init__(self, args, unknownargs): + GObject.threads_init() + super(Window, self).__init__() settings_manager.set_main_window(self) @@ -85,7 +88,7 @@ class Window(Gtk.ApplicationWindow): if visual != None and screen.is_composited(): self.set_visual(visual) self.set_app_paintable(True) - self.connect("draw", self._area_draw) + # self.connect("draw", self._area_draw) # bind css file cssProvider = Gtk.CssProvider() diff --git a/src/solarfm/plugins/manifest.py b/src/solarfm/plugins/manifest.py index 392fc2a..f7dc613 100644 --- a/src/solarfm/plugins/manifest.py +++ b/src/solarfm/plugins/manifest.py @@ -53,9 +53,8 @@ class ManifestProcessor: def get_loading_data(self): loading_data = {} requests = self._plugin.requests - keys = requests.keys() - if "ui_target" in keys: + if "ui_target" in requests: if requests["ui_target"] in [ "none", "other", "main_Window", "main_menu_bar", "main_menu_bttn_box_bar", "path_menu_bar", "plugin_control_list", @@ -63,7 +62,7 @@ class ManifestProcessor: "window_2", "window_3", "window_4" ]: if requests["ui_target"] == "other": - if "ui_target_id" in keys: + if "ui_target_id" in requests: loading_data["ui_target"] = self._builder.get_object(requests["ui_target_id"]) if loading_data["ui_target"] == None: raise ManifestProcessorException('Invalid "ui_target_id" given in requests. Must have one if setting "ui_target" to "other"...') @@ -74,11 +73,11 @@ class ManifestProcessor: else: raise ManifestProcessorException('Unknown "ui_target" given in requests.') - if "pass_fm_events" in keys: + if "pass_fm_events" in requests: if requests["pass_fm_events"] in ["true"]: loading_data["pass_fm_events"] = True - if "pass_ui_objects" in keys: + if "pass_ui_objects" in requests: if len(requests["pass_ui_objects"]) > 0: loading_data["pass_ui_objects"] = [] for ui_id in requests["pass_ui_objects"]: @@ -87,7 +86,7 @@ class ManifestProcessor: except ManifestProcessorException as e: logger.error(repr(e)) - if "bind_keys" in keys: + if "bind_keys" in requests: if isinstance(requests["bind_keys"], list): loading_data["bind_keys"] = requests["bind_keys"] diff --git a/src/solarfm/plugins/plugins_controller.py b/src/solarfm/plugins/plugins_controller.py index 43a108e..f37cffc 100644 --- a/src/solarfm/plugins/plugins_controller.py +++ b/src/solarfm/plugins/plugins_controller.py @@ -100,20 +100,19 @@ class PluginsController: def execute_plugin(self, module: type, plugin: PluginInfo, loading_data: []): plugin.reference = module.Plugin() - keys = loading_data.keys() - if "ui_target" in keys: + if "ui_target" in loading_data: loading_data["ui_target"].add( plugin.reference.generate_reference_ui_element() ) loading_data["ui_target"].show_all() - if "pass_ui_objects" in keys: + if "pass_ui_objects" in loading_data: plugin.reference.set_ui_object_collection( loading_data["pass_ui_objects"] ) - if "pass_fm_events" in keys: + if "pass_fm_events" in loading_data: plugin.reference.set_fm_event_system(event_system) plugin.reference.subscribe_to_events() - if "bind_keys" in keys: + if "bind_keys" in loading_data: keybindings.append_bindings( loading_data["bind_keys"] ) plugin.reference.run() diff --git a/src/solarfm/utils/debugging.py b/src/solarfm/utils/debugging.py index 70cdf82..5eaa286 100644 --- a/src/solarfm/utils/debugging.py +++ b/src/solarfm/utils/debugging.py @@ -37,6 +37,14 @@ def debug_signal_handler(signal, frame): except Exception as ex: ... + try: + import ipdb + logger.debug("\n\nStarting IPDB debugger...\n\n") + ipdb.set_trace() + return + except Exception as ex: + ... + try: import pdb logger.debug("\n\nStarting embedded PDB debugger...\n\n") -- 2.39.5 From ce00970171fee2904ec948c17f7e06d48b3d39df Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Sat, 29 Jun 2024 21:37:44 -0500 Subject: [PATCH 15/28] moved thumbnail generation to plugin; extended plugin loading for pre and post window loading --- plugins/README.md | 1 + plugins/template/manifest.json | 3 +- plugins/thumbnailer/__init__.py | 3 + plugins/thumbnailer/__main__.py | 3 + .../thumbnailer}/icons/__init__.py | 0 plugins/thumbnailer/icons/controller.py | 73 +++++++++++++ .../thumbnailer}/icons/icon.py | 0 .../thumbnailer}/icons/mixins/__init__.py | 0 .../icons/mixins/desktopiconmixin.py | 0 .../icons/mixins/meshsiconmixin.py | 2 +- .../icons/mixins/videoiconmixin.py | 5 +- .../icons/mixins/xdg/BaseDirectory.py | 0 .../thumbnailer}/icons/mixins/xdg/Config.py | 0 .../icons/mixins/xdg/DesktopEntry.py | 0 .../icons/mixins/xdg/Exceptions.py | 0 .../icons/mixins/xdg/IconTheme.py | 0 .../thumbnailer}/icons/mixins/xdg/IniFile.py | 0 .../thumbnailer}/icons/mixins/xdg/Locale.py | 0 .../thumbnailer}/icons/mixins/xdg/Menu.py | 0 .../icons/mixins/xdg/MenuEditor.py | 0 .../thumbnailer}/icons/mixins/xdg/Mime.py | 0 .../icons/mixins/xdg/RecentFiles.py | 0 .../thumbnailer}/icons/mixins/xdg/__init__.py | 0 .../thumbnailer}/icons/mixins/xdg/util.py | 0 plugins/thumbnailer/manifest.json | 12 +++ plugins/thumbnailer/plugin.py | 59 ++++++++++ plugins/thumbnailer/settings.json | 101 ++++++++++++++++++ plugins/trasher/plugin.py | 2 + plugins/trasher/trash.py | 2 +- plugins/trasher/xdgtrash.py | 2 +- src/solarfm/core/controller.py | 9 +- src/solarfm/core/controller_data.py | 4 +- src/solarfm/core/mixins/ui/grid_mixin.py | 3 +- src/solarfm/core/ui_mixin.py | 2 +- src/solarfm/plugins/manifest.py | 32 +++--- src/solarfm/plugins/plugins_controller.py | 61 ++++++++--- src/solarfm/shellfm/windows/tabs/tab.py | 37 ++----- .../shellfm/windows/tabs/utils/launcher.py | 20 ++-- .../shellfm/windows/tabs/utils/settings.py | 37 ------- user_config/usr/share/solarfm/settings.json | 11 +- 40 files changed, 359 insertions(+), 125 deletions(-) create mode 100644 plugins/thumbnailer/__init__.py create mode 100644 plugins/thumbnailer/__main__.py rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/__init__.py (100%) create mode 100644 plugins/thumbnailer/icons/controller.py rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/icon.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/__init__.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/desktopiconmixin.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/meshsiconmixin.py (89%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/videoiconmixin.py (95%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/BaseDirectory.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/Config.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/DesktopEntry.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/Exceptions.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/IconTheme.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/IniFile.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/Locale.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/Menu.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/MenuEditor.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/Mime.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/RecentFiles.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/__init__.py (100%) rename {src/solarfm/shellfm/windows/tabs => plugins/thumbnailer}/icons/mixins/xdg/util.py (100%) create mode 100644 plugins/thumbnailer/manifest.json create mode 100644 plugins/thumbnailer/plugin.py create mode 100644 plugins/thumbnailer/settings.json diff --git a/plugins/README.md b/plugins/README.md index 1f18e50..f475c08 100644 --- a/plugins/README.md +++ b/plugins/README.md @@ -14,6 +14,7 @@ class Manifest: 'ui_target': "plugin_control_list", 'pass_fm_events': "true" } + pre_launch: bool = False ``` diff --git a/plugins/template/manifest.json b/plugins/template/manifest.json index 4dcbf47..10483da 100644 --- a/plugins/template/manifest.json +++ b/plugins/template/manifest.json @@ -8,6 +8,7 @@ "ui_target": "plugin_control_list", "pass_fm_events": "true", "bind_keys": ["Example Plugin||send_message:f"] - } + }, + "pre_launch": "false" } } diff --git a/plugins/thumbnailer/__init__.py b/plugins/thumbnailer/__init__.py new file mode 100644 index 0000000..d36fa8c --- /dev/null +++ b/plugins/thumbnailer/__init__.py @@ -0,0 +1,3 @@ +""" + Pligin Module +""" diff --git a/plugins/thumbnailer/__main__.py b/plugins/thumbnailer/__main__.py new file mode 100644 index 0000000..a576329 --- /dev/null +++ b/plugins/thumbnailer/__main__.py @@ -0,0 +1,3 @@ +""" + Pligin Package +""" diff --git a/src/solarfm/shellfm/windows/tabs/icons/__init__.py b/plugins/thumbnailer/icons/__init__.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/__init__.py rename to plugins/thumbnailer/icons/__init__.py diff --git a/plugins/thumbnailer/icons/controller.py b/plugins/thumbnailer/icons/controller.py new file mode 100644 index 0000000..732cdb1 --- /dev/null +++ b/plugins/thumbnailer/icons/controller.py @@ -0,0 +1,73 @@ +# Python imports +import json +import os +from os import path + +# Lib imports +import gi +gi.require_version('Gtk', '3.0') +from gi.repository import Gtk + +# Application imports +from .icon import Icon + + + +class IconController(Icon): + def __init__(self): + CURRENT_PATH = os.path.dirname(os.path.realpath(__file__)) + + # NOTE: app_name should be defined using python 'builtins' and so too must be logger used in the various classes + app_name_exists = False + try: + app_name + app_name_exists = True + except Exception as e: + ... + + APP_CONTEXT = f"{app_name.lower()}" if app_name_exists else "shellfm" + USR_APP_CONTEXT = f"/usr/share/{APP_CONTEXT}" + USER_HOME = path.expanduser('~') + CONFIG_PATH = f"{USER_HOME}/.config/{APP_CONTEXT}" + self.DEFAULT_ICONS = f"{CONFIG_PATH}/icons" + self.DEFAULT_ICON = f"{self.DEFAULT_ICONS}/text.png" + self.FFMPG_THUMBNLR = f"{CONFIG_PATH}/ffmpegthumbnailer" # Thumbnail generator binary + self.BLENDER_THUMBNLR = f"{CONFIG_PATH}/blender-thumbnailer" # Blender thumbnail generator binary + + self.ICON_DIRS = ["/usr/share/icons", f"{USER_HOME}/.icons" "/usr/share/pixmaps"] + self.BASE_THUMBS_PTH = f"{USER_HOME}/.thumbnails" + self.ABS_THUMBS_PTH = f"{self.BASE_THUMBS_PTH}/normal" + self.STEAM_ICONS_PTH = f"{self.BASE_THUMBS_PTH}/steam_icons" + + if not path.isdir(self.BASE_THUMBS_PTH): + os.mkdir(self.BASE_THUMBS_PTH) + + if not path.isdir(self.ABS_THUMBS_PTH): + os.mkdir(self.ABS_THUMBS_PTH) + + if not path.isdir(self.STEAM_ICONS_PTH): + os.mkdir(self.STEAM_ICONS_PTH) + + if not os.path.exists(self.DEFAULT_ICONS): + self.DEFAULT_ICONS = f"{USR_APP_CONTEXT}/icons" + self.DEFAULT_ICON = f"{self.DEFAULT_ICONS}/text.png" + + CONFIG_FILE = f"{CURRENT_PATH}/../settings.json" + with open(CONFIG_FILE) as f: + settings = json.load(f) + config = settings["config"] + + self.container_icon_wh = config["container_icon_wh"] + self.video_icon_wh = config["video_icon_wh"] + self.sys_icon_wh = config["sys_icon_wh"] + + # Filters + filters = settings["filters"] + self.fmeshs = tuple(filters["meshs"]) + self.fcode = tuple(filters["code"]) + self.fvideos = tuple(filters["videos"]) + self.foffice = tuple(filters["office"]) + self.fimages = tuple(filters["images"]) + self.ftext = tuple(filters["text"]) + self.fmusic = tuple(filters["music"]) + self.fpdf = tuple(filters["pdf"]) diff --git a/src/solarfm/shellfm/windows/tabs/icons/icon.py b/plugins/thumbnailer/icons/icon.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/icon.py rename to plugins/thumbnailer/icons/icon.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/__init__.py b/plugins/thumbnailer/icons/mixins/__init__.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/__init__.py rename to plugins/thumbnailer/icons/mixins/__init__.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/desktopiconmixin.py b/plugins/thumbnailer/icons/mixins/desktopiconmixin.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/desktopiconmixin.py rename to plugins/thumbnailer/icons/mixins/desktopiconmixin.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/meshsiconmixin.py b/plugins/thumbnailer/icons/mixins/meshsiconmixin.py similarity index 89% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/meshsiconmixin.py rename to plugins/thumbnailer/icons/mixins/meshsiconmixin.py index 0d62636..8f7b057 100644 --- a/src/solarfm/shellfm/windows/tabs/icons/mixins/meshsiconmixin.py +++ b/plugins/thumbnailer/icons/mixins/meshsiconmixin.py @@ -14,4 +14,4 @@ class MeshsIconMixin: proc = subprocess.Popen([self.BLENDER_THUMBNLR, full_path, hash_img_path]) proc.wait() except Exception as e: - self.logger.debug(repr(e)) + logger.debug(repr(e)) diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/videoiconmixin.py b/plugins/thumbnailer/icons/mixins/videoiconmixin.py similarity index 95% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/videoiconmixin.py rename to plugins/thumbnailer/icons/mixins/videoiconmixin.py index 60fd16b..324cbb4 100644 --- a/src/solarfm/shellfm/windows/tabs/icons/mixins/videoiconmixin.py +++ b/plugins/thumbnailer/icons/mixins/videoiconmixin.py @@ -14,7 +14,7 @@ class VideoIconMixin: proc = subprocess.Popen([self.FFMPG_THUMBNLR, "-t", scrub_percent, "-s", "300", "-c", "jpg", "-i", full_path, "-o", hash_img_path]) proc.wait() except Exception as e: - self.logger.debug(repr(e)) + logger.info(repr(e)) self.ffprobe_generate_video_thumbnail(full_path, hash_img_path) @@ -51,5 +51,4 @@ class VideoIconMixin: proc.wait() except Exception as e: print("Video thumbnail generation issue in thread:") - print( repr(e) ) - self.logger.debug(repr(e)) + logger.info(repr(e)) diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/BaseDirectory.py b/plugins/thumbnailer/icons/mixins/xdg/BaseDirectory.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/BaseDirectory.py rename to plugins/thumbnailer/icons/mixins/xdg/BaseDirectory.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/Config.py b/plugins/thumbnailer/icons/mixins/xdg/Config.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/Config.py rename to plugins/thumbnailer/icons/mixins/xdg/Config.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/DesktopEntry.py b/plugins/thumbnailer/icons/mixins/xdg/DesktopEntry.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/DesktopEntry.py rename to plugins/thumbnailer/icons/mixins/xdg/DesktopEntry.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/Exceptions.py b/plugins/thumbnailer/icons/mixins/xdg/Exceptions.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/Exceptions.py rename to plugins/thumbnailer/icons/mixins/xdg/Exceptions.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/IconTheme.py b/plugins/thumbnailer/icons/mixins/xdg/IconTheme.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/IconTheme.py rename to plugins/thumbnailer/icons/mixins/xdg/IconTheme.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/IniFile.py b/plugins/thumbnailer/icons/mixins/xdg/IniFile.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/IniFile.py rename to plugins/thumbnailer/icons/mixins/xdg/IniFile.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/Locale.py b/plugins/thumbnailer/icons/mixins/xdg/Locale.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/Locale.py rename to plugins/thumbnailer/icons/mixins/xdg/Locale.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/Menu.py b/plugins/thumbnailer/icons/mixins/xdg/Menu.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/Menu.py rename to plugins/thumbnailer/icons/mixins/xdg/Menu.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/MenuEditor.py b/plugins/thumbnailer/icons/mixins/xdg/MenuEditor.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/MenuEditor.py rename to plugins/thumbnailer/icons/mixins/xdg/MenuEditor.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/Mime.py b/plugins/thumbnailer/icons/mixins/xdg/Mime.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/Mime.py rename to plugins/thumbnailer/icons/mixins/xdg/Mime.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/RecentFiles.py b/plugins/thumbnailer/icons/mixins/xdg/RecentFiles.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/RecentFiles.py rename to plugins/thumbnailer/icons/mixins/xdg/RecentFiles.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/__init__.py b/plugins/thumbnailer/icons/mixins/xdg/__init__.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/__init__.py rename to plugins/thumbnailer/icons/mixins/xdg/__init__.py diff --git a/src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/util.py b/plugins/thumbnailer/icons/mixins/xdg/util.py similarity index 100% rename from src/solarfm/shellfm/windows/tabs/icons/mixins/xdg/util.py rename to plugins/thumbnailer/icons/mixins/xdg/util.py diff --git a/plugins/thumbnailer/manifest.json b/plugins/thumbnailer/manifest.json new file mode 100644 index 0000000..576316c --- /dev/null +++ b/plugins/thumbnailer/manifest.json @@ -0,0 +1,12 @@ +{ + "manifest": { + "name": "Thumbnailer", + "author": "ITDominator", + "version": "0.0.1", + "support": "", + "requests": { + "pass_fm_events": "true" + }, + "pre_launch": "true" + } +} diff --git a/plugins/thumbnailer/plugin.py b/plugins/thumbnailer/plugin.py new file mode 100644 index 0000000..7dd778f --- /dev/null +++ b/plugins/thumbnailer/plugin.py @@ -0,0 +1,59 @@ +# Python imports +import os + +# Lib imports + +# Application imports +from plugins.plugin_base import PluginBase +from .icons.controller import IconController + + + +class Plugin(PluginBase): + def __init__(self): + super().__init__() + + self.name = "Thumbnailer" # NOTE: Need to remove after establishing private bidirectional 1-1 message bus + # where self.name should not be needed for message comms + # self.path = os.path.dirname(os.path.realpath(__file__)) + + + def run(self): + self.icon_controller = IconController() + self._event_system.subscribe("create-thumbnail", self.create_thumbnail) + + def generate_reference_ui_element(self): + ... + + def create_thumbnail(self, dir, file) -> str: + return self.icon_controller.create_icon(dir, file) + + def get_video_icons(self, dir) -> list: + data = [] + + def get_video_icons(self) -> list: + data = [] + fvideos = self.icon_controller.fvideos + vids = [ file for file in os.path.list_dir(dir) if file.lower().endswith(fvideos) ] + + for file in vids: + img_hash, hash_img_path = self.create_video_thumbnail(full_path = f"{dir}/{file}", returnHashInstead = True) + data.append([img_hash, hash_img_path]) + + return data + + def get_pixbuf_icon_str_combo(self, dir) -> list: + data = [] + for file in os.path.list_dir(dir): + icon = self.icon_controller.create_icon(dir, file).get_pixbuf() + data.append([icon, file]) + + return data + + def get_gtk_icon_str_combo(self, dir) -> list: + data = [] + for file in os.path.list_dir(dir): + icon = self.icon_controller.create_icon(dir, file) + data.append([icon, file[0]]) + + return data diff --git a/plugins/thumbnailer/settings.json b/plugins/thumbnailer/settings.json new file mode 100644 index 0000000..134ca82 --- /dev/null +++ b/plugins/thumbnailer/settings.json @@ -0,0 +1,101 @@ +{ + "config":{ + "thumbnailer_path":"ffmpegthumbnailer", + "blender_thumbnailer_path":"", + "container_icon_wh":[ + 128, + 128 + ], + "video_icon_wh":[ + 128, + 64 + ], + "sys_icon_wh":[ + 56, + 56 + ], + "steam_cdn_url":"https://steamcdn-a.akamaihd.net/steam/apps/", + "remux_folder_max_disk_usage":"8589934592" + }, + "filters":{ + "meshs":[ + ".dae", + ".fbx", + ".gltf", + ".obj", + ".stl" + ], + "code":[ + ".cpp", + ".css", + ".c", + ".go", + ".html", + ".htm", + ".java", + ".js", + ".json", + ".lua", + ".md", + ".py", + ".rs", + ".toml", + ".xml", + ".pom" + ], + "videos":[ + ".mkv", + ".mp4", + ".webm", + ".avi", + ".mov", + ".m4v", + ".mpg", + ".mpeg", + ".wmv", + ".flv" + ], + "office":[ + ".doc", + ".docx", + ".xls", + ".xlsx", + ".xlt", + ".xltx", + ".xlm", + ".ppt", + ".pptx", + ".pps", + ".ppsx", + ".odt", + ".rtf" + ], + "images":[ + ".png", + ".jpg", + ".jpeg", + ".gif", + ".ico", + ".tga", + ".webp" + ], + "text":[ + ".txt", + ".text", + ".sh", + ".cfg", + ".conf", + ".log" + ], + "music":[ + ".psf", + ".mp3", + ".ogg", + ".flac", + ".m4a" + ], + "pdf":[ + ".pdf" + ] + } +} diff --git a/plugins/trasher/plugin.py b/plugins/trasher/plugin.py index fd71ecf..dd0e81e 100644 --- a/plugins/trasher/plugin.py +++ b/plugins/trasher/plugin.py @@ -111,6 +111,8 @@ class Plugin(PluginBase): for uri in state.uris: self.trashman.trash(uri, verbocity) + self.trashman.regenerate() + def restore_trash_files(self, widget = None, eve = None, verbocity = False): self._event_system.emit("get_current_state") state = self._fm_state diff --git a/plugins/trasher/trash.py b/plugins/trasher/trash.py index 4210f9c..d60d086 100755 --- a/plugins/trasher/trash.py +++ b/plugins/trasher/trash.py @@ -43,4 +43,4 @@ class Trash(object): def restore(self, filename, verbose): """Restore a file from trash.""" - raise NotImplementedError(_('Backend didn’t \ implement this functionality')) + raise NotImplementedError(_('Backend didn’t implement this functionality')) diff --git a/plugins/trasher/xdgtrash.py b/plugins/trasher/xdgtrash.py index 02ff013..0ff2cf4 100755 --- a/plugins/trasher/xdgtrash.py +++ b/plugins/trasher/xdgtrash.py @@ -127,7 +127,7 @@ DeletionDate={} f.write(infofile) f.close() - self.regenerate() + # self.regenerate() if verbose: sys.stderr.write(_('trashed \'{}\'\n').format(filename)) diff --git a/src/solarfm/core/controller.py b/src/solarfm/core/controller.py index 31f1bee..79b0796 100644 --- a/src/solarfm/core/controller.py +++ b/src/solarfm/core/controller.py @@ -44,10 +44,13 @@ class Controller(UIMixin, SignalsMixins, Controller_Data): self._subscribe_to_events() self._load_widgets() + if args.no_plugins == "false": + self.plugins_controller.pre_launch_plugins() + self._generate_file_views(self.fm_controller_data) if args.no_plugins == "false": - self.plugins.launch_plugins() + self.plugins_controller.post_launch_plugins() for arg in unknownargs + [args.new_tab,]: if os.path.isdir(arg): @@ -116,7 +119,7 @@ class Controller(UIMixin, SignalsMixins, Controller_Data): def reload_plugins(self, widget=None, eve=None): - self.plugins.reload_plugins() + self.plugins_controller.reload_plugins() def do_action_from_menu_controls(self, _action=None, eve=None): @@ -196,4 +199,4 @@ class Controller(UIMixin, SignalsMixins, Controller_Data): tab.execute([f"{tab.terminal_app}"], start_dir=tab.get_current_directory()) def go_to_path(self, path: str): - self.builder.get_object("path_entry").set_text(path) \ No newline at end of file + self.builder.get_object("path_entry").set_text(path) diff --git a/src/solarfm/core/controller_data.py b/src/solarfm/core/controller_data.py index 7a1514d..da7b7b0 100644 --- a/src/solarfm/core/controller_data.py +++ b/src/solarfm/core/controller_data.py @@ -29,7 +29,7 @@ class Controller_Data: self._load_glade_file() self.fm_controller = WindowController() - self.plugins = PluginsController() + self.plugins_controller = PluginsController() self.fm_controller_data = self.fm_controller.get_state_from_file() self.window1 = self.builder.get_object("window_1") @@ -179,4 +179,4 @@ class Controller_Data: proc = subprocess.Popen(['xclip','-selection','clipboard'], stdin=subprocess.PIPE) proc.stdin.write(data.encode("utf-8")) proc.stdin.close() - retcode = proc.wait() \ No newline at end of file + retcode = proc.wait() diff --git a/src/solarfm/core/mixins/ui/grid_mixin.py b/src/solarfm/core/mixins/ui/grid_mixin.py index 0a36c09..c0c9948 100644 --- a/src/solarfm/core/mixins/ui/grid_mixin.py +++ b/src/solarfm/core/mixins/ui/grid_mixin.py @@ -31,7 +31,6 @@ class GridMixin: # return - dir = tab.get_current_directory() files = tab.get_files() @@ -167,4 +166,4 @@ class GridMixin: icon_grid = obj.get_children()[0] name = icon_grid.get_name() if name == _name: - return icon_grid \ No newline at end of file + return icon_grid diff --git a/src/solarfm/core/ui_mixin.py b/src/solarfm/core/ui_mixin.py index cbca606..0c04c7b 100644 --- a/src/solarfm/core/ui_mixin.py +++ b/src/solarfm/core/ui_mixin.py @@ -86,4 +86,4 @@ class UIMixin(PaneMixin, WindowMixin): for j in range(0, 4): i = j + 1 self.fm_controller.create_window() - self.create_new_tab_notebook(None, i, None) \ No newline at end of file + self.create_new_tab_notebook(None, i, None) diff --git a/src/solarfm/plugins/manifest.py b/src/solarfm/plugins/manifest.py index f7dc613..bfcebc7 100644 --- a/src/solarfm/plugins/manifest.py +++ b/src/solarfm/plugins/manifest.py @@ -15,32 +15,37 @@ class ManifestProcessorException(Exception): ... -@dataclass(slots=True) +@dataclass(slots = True) class PluginInfo: - path: str = None - name: str = None - author: str = None - version: str = None - support: str = None - requests:{} = None - reference: type = None + path: str = None + name: str = None + author: str = None + version: str = None + support: str = None + requests:{} = None + reference: type = None + pre_launch: bool = False class ManifestProcessor: def __init__(self, path, builder): - manifest = join(path, "manifest.json") - if not os.path.exists(manifest): + manifest_pth = join(path, "manifest.json") + if not os.path.exists(manifest_pth): raise ManifestProcessorException("Invalid Plugin Structure: Plugin doesn't have 'manifest.json'. Aboarting load...") self._path = path self._builder = builder - with open(manifest) as f: + with open(manifest_pth) as f: data = json.load(f) self._manifest = data["manifest"] self._plugin = self.collect_info() + def is_pre_launch(self) -> bool: + return self._plugin.pre_launch + def collect_info(self) -> PluginInfo: plugin = PluginInfo() + plugin.path = self._path plugin.name = self._manifest["name"] plugin.author = self._manifest["author"] @@ -48,6 +53,9 @@ class ManifestProcessor: plugin.support = self._manifest["support"] plugin.requests = self._manifest["requests"] + if "pre_launch" in self._manifest.keys(): + plugin.pre_launch = True if self._manifest["pre_launch"] == "true" else False + return plugin def get_loading_data(self): @@ -90,4 +98,4 @@ class ManifestProcessor: if isinstance(requests["bind_keys"], list): loading_data["bind_keys"] = requests["bind_keys"] - return self._plugin, loading_data \ No newline at end of file + return self._plugin, loading_data diff --git a/src/solarfm/plugins/plugins_controller.py b/src/solarfm/plugins/plugins_controller.py index f37cffc..07d4b72 100644 --- a/src/solarfm/plugins/plugins_controller.py +++ b/src/solarfm/plugins/plugins_controller.py @@ -36,41 +36,76 @@ class PluginsController: self._plugins_dir_watcher = None self._plugin_collection = [] + self._plugin_manifests = {} + + self._load_manifests() - def launch_plugins(self) -> None: + def _load_manifests(self): + logger.info(f"Loading manifests...") + + for path, folder in [[join(self._plugins_path, item), item] if os.path.isdir(join(self._plugins_path, item)) else None for item in os.listdir(self._plugins_path)]: + manifest = ManifestProcessor(path, self._builder) + self._plugin_manifests[path] = { + "path": path, + "folder": folder, + "manifest": manifest + } + self._set_plugins_watcher() - self.load_plugins() def _set_plugins_watcher(self) -> None: self._plugins_dir_watcher = Gio.File.new_for_path(self._plugins_path) \ .monitor_directory(Gio.FileMonitorFlags.WATCH_MOVES, Gio.Cancellable()) self._plugins_dir_watcher.connect("changed", self._on_plugins_changed, ()) - def _on_plugins_changed(self, file_monitor, file, other_file=None, eve_type=None, data=None): + def _on_plugins_changed(self, file_monitor, file, other_file = None, eve_type = None, data = None): if eve_type in [Gio.FileMonitorEvent.CREATED, Gio.FileMonitorEvent.DELETED, Gio.FileMonitorEvent.RENAMED, Gio.FileMonitorEvent.MOVED_IN, Gio.FileMonitorEvent.MOVED_OUT]: self.reload_plugins(file) - @daemon_threaded - def load_plugins(self, file: str = None) -> None: - logger.info(f"Loading plugins...") + def pre_launch_plugins(self) -> None: + logger.info(f"Loading pre-launch plugins...") + plugin_manifests: {} = {} + + for key in self._plugin_manifests: + target_manifest = self._plugin_manifests[key]["manifest"] + if target_manifest.is_pre_launch(): + plugin_manifests[key] = self._plugin_manifests[key] + + self._load_plugins(plugin_manifests, is_pre_launch = True) + + def post_launch_plugins(self) -> None: + logger.info(f"Loading post-launch plugins...") + plugin_manifests: {} = {} + + for key in self._plugin_manifests: + target_manifest = self._plugin_manifests[key]["manifest"] + if not target_manifest.is_pre_launch(): + plugin_manifests[key] = self._plugin_manifests[key] + + self._load_plugins(plugin_manifests) + + def _load_plugins(self, plugin_manifests: {} = {}, is_pre_launch: bool = False) -> None: parent_path = os.getcwd() - for path, folder in [[join(self._plugins_path, item), item] if os.path.isdir(join(self._plugins_path, item)) else None for item in os.listdir(self._plugins_path)]: - try: - target = join(path, "plugin.py") - manifest = ManifestProcessor(path, self._builder) + for key in plugin_manifests: + target_manifest = plugin_manifests[key] + path, folder, manifest = target_manifest["path"], target_manifest["folder"], target_manifest["manifest"] + try: + target = join(path, "plugin.py") if not os.path.exists(target): raise FileNotFoundError("Invalid Plugin Structure: Plugin doesn't have 'plugin.py'. Aboarting load...") plugin, loading_data = manifest.get_loading_data() module = self.load_plugin_module(path, folder, target) - GLib.idle_add(self.execute_plugin, *(module, plugin, loading_data)) - # self.execute_plugin(module, plugin, loading_data) + if is_pre_launch: + self.execute_plugin(module, plugin, loading_data) + else: + GLib.idle_add(self.execute_plugin, *(module, plugin, loading_data)) except InvalidPluginException as e: logger.info(f"Malformed Plugin: Not loading -->: '{folder}' !") logger.debug("Trace: ", traceback.print_exc()) @@ -128,4 +163,4 @@ class PluginsController: os.chdir(plugin.path) plugin.reference.reload_package(f"{plugin.path}/plugin.py") - os.chdir(parent_path) \ No newline at end of file + os.chdir(parent_path) diff --git a/src/solarfm/shellfm/windows/tabs/tab.py b/src/solarfm/shellfm/windows/tabs/tab.py index d450421..f51ea5d 100644 --- a/src/solarfm/shellfm/windows/tabs/tab.py +++ b/src/solarfm/shellfm/windows/tabs/tab.py @@ -14,7 +14,6 @@ from random import randint from .utils.settings import Settings from .utils.launcher import Launcher from .utils.filehandler import FileHandler -from .icons.icon import Icon from .path import Path @@ -40,9 +39,8 @@ except Exception as e: -class Tab(Settings, FileHandler, Launcher, Icon, Path): +class Tab(Settings, FileHandler, Launcher, Path): def __init__(self): - self.logger = None self._id_length: int = 10 self._id: str = "" @@ -168,33 +166,6 @@ class Tab(Settings, FileHandler, Launcher, Icon, Path): } } - def get_video_icons(self) -> list: - data = [] - dir = self.get_current_directory() - for file in self._vids: - img_hash, hash_img_path = self.create_video_thumbnail(full_path=f"{dir}/{file}", returnHashInstead=True) - data.append([img_hash, hash_img_path]) - - return data - - def get_pixbuf_icon_str_combo(self): - data = [] - dir = self.get_current_directory() - for file in self._files: - icon = self.create_icon(dir, file).get_pixbuf() - data.append([icon, file]) - - return data - - def get_gtk_icon_str_combo(self) -> list: - data = [] - dir = self.get_current_directory() - for file in self._files: - icon = self.create_icon(dir, file) - data.append([icon, file[0]]) - - return data - def get_current_directory(self) -> str: return self.get_path() @@ -264,7 +235,7 @@ class Tab(Settings, FileHandler, Launcher, Icon, Path): return int(text) if text.isdigit() else text def _natural_keys(self, text): - return [ self._atoi(c) for c in re.split('(\d+)',text) ] + return [ self._atoi(c) for c in re.split(r'(\d+)', text) ] def _hash_text(self, text) -> str: return hashlib.sha256(str.encode(text)).hexdigest()[:18] @@ -289,3 +260,7 @@ class Tab(Settings, FileHandler, Launcher, Icon, Path): def _set_error_message(self, text: str): self.error_message = text + + + def create_icon(self, dir, file): + return event_system.emit_and_await("create-thumbnail", (dir, file,)) diff --git a/src/solarfm/shellfm/windows/tabs/utils/launcher.py b/src/solarfm/shellfm/windows/tabs/utils/launcher.py index a7febc3..6e07b4b 100644 --- a/src/solarfm/shellfm/windows/tabs/utils/launcher.py +++ b/src/solarfm/shellfm/windows/tabs/utils/launcher.py @@ -41,11 +41,11 @@ class Launcher: def execute(self, command, start_dir=os.getenv("HOME"), use_shell=False): try: - self.logger.debug(command) + logger.debug(command) subprocess.Popen(command, cwd=start_dir, shell=use_shell, start_new_session=True, stdout=None, stderr=None, close_fds=True) except ShellFMLauncherException as e: - self.logger.error(f"Couldn't execute: {command}") - self.logger.error(e) + logger.error(f"Couldn't execute: {command}") + logger.error(e) # TODO: Return std(out/in/err) handlers along with subprocess instead of sinking to null def execute_and_return_thread_handler(self, command, start_dir=os.getenv("HOME"), use_shell=False): @@ -53,8 +53,8 @@ class Launcher: DEVNULL = open(os.devnull, 'w') return subprocess.Popen(command, cwd=start_dir, shell=use_shell, start_new_session=False, stdout=DEVNULL, stderr=DEVNULL, close_fds=False) except ShellFMLauncherException as e: - self.logger.error(f"Couldn't execute and return thread: {command}") - self.logger.error(e) + logger.error(f"Couldn't execute and return thread: {command}") + logger.error(e) return None @threaded @@ -63,7 +63,7 @@ class Launcher: def remux_video(self, hash, file): remux_vid_pth = "{self.REMUX_FOLDER}/{hash}.mp4" - self.logger.debug(remux_vid_pth) + logger.debug(remux_vid_pth) if not os.path.isfile(remux_vid_pth): self.check_remux_space() @@ -83,8 +83,8 @@ class Launcher: proc = subprocess.Popen(command) proc.wait() except ShellFMLauncherException as e: - self.logger.error(message) - self.logger.error(e) + logger.error(message) + logger.error(e) return False return True @@ -94,7 +94,7 @@ class Launcher: try: limit = int(limit) except ShellFMLauncherException as e: - self.logger.debug(e) + logger.debug(e) return usage = self.get_remux_folder_usage(self.REMUX_FOLDER) @@ -113,4 +113,4 @@ class Launcher: if not os.path.islink(fp): # Skip if it is symbolic link total_size += os.path.getsize(fp) - return total_size \ No newline at end of file + return total_size diff --git a/src/solarfm/shellfm/windows/tabs/utils/settings.py b/src/solarfm/shellfm/windows/tabs/utils/settings.py index f3c0cd2..3be7131 100644 --- a/src/solarfm/shellfm/windows/tabs/utils/settings.py +++ b/src/solarfm/shellfm/windows/tabs/utils/settings.py @@ -14,8 +14,6 @@ class ShellFMSettingsException(Exception): class Settings: - logger = None - # NOTE: app_name should be defined using python 'builtins' app_name_exists = False try: @@ -31,45 +29,13 @@ class Settings: CONFIG_FILE = f"{CONFIG_PATH}/settings.json" HIDE_HIDDEN_FILES = True - DEFAULT_ICONS = f"{CONFIG_PATH}/icons" - DEFAULT_ICON = f"{DEFAULT_ICONS}/text.png" - FFMPG_THUMBNLR = f"{CONFIG_PATH}/ffmpegthumbnailer" # Thumbnail generator binary - BLENDER_THUMBNLR = f"{CONFIG_PATH}/blender-thumbnailer" # Blender thumbnail generator binary REMUX_FOLDER = f"{USER_HOME}/.remuxs" # Remuxed files folder - ICON_DIRS = ["/usr/share/icons", f"{USER_HOME}/.icons" "/usr/share/pixmaps"] - BASE_THUMBS_PTH = f"{USER_HOME}/.thumbnails" - ABS_THUMBS_PTH = f"{BASE_THUMBS_PTH}/normal" - STEAM_ICONS_PTH = f"{BASE_THUMBS_PTH}/steam_icons" - - if not os.path.exists(CONFIG_PATH) or not os.path.exists(CONFIG_FILE): - msg = f"No config file located! Aborting loading ShellFM library...\nExpected: {CONFIG_FILE}" - raise ShellFMSettingsException(msg) - - if not path.isdir(REMUX_FOLDER): - os.mkdir(REMUX_FOLDER) - - if not path.isdir(BASE_THUMBS_PTH): - os.mkdir(BASE_THUMBS_PTH) - - if not path.isdir(ABS_THUMBS_PTH): - os.mkdir(ABS_THUMBS_PTH) - - if not path.isdir(STEAM_ICONS_PTH): - os.mkdir(STEAM_ICONS_PTH) - - if not os.path.exists(DEFAULT_ICONS): - DEFAULT_ICONS = f"{USR_APP_CONTEXT}/icons" - DEFAULT_ICON = f"{DEFAULT_ICONS}/text.png" - with open(CONFIG_FILE) as f: settings = json.load(f) config = settings["config"] subpath = config["base_of_home"] - STEAM_CDN_URL = config["steam_cdn_url"] - FFMPG_THUMBNLR = FFMPG_THUMBNLR if config["thumbnailer_path"] == "" else config["thumbnailer_path"] - BLENDER_THUMBNLR = BLENDER_THUMBNLR if config["blender_thumbnailer_path"] == "" else config["blender_thumbnailer_path"] HIDE_HIDDEN_FILES = True if config["hide_hidden_files"] in ["true", ""] else False go_past_home = True if config["go_past_home"] in ["true", ""] else False lock_folder = False if config["lock_folder"] in ["false", ""] else True @@ -83,9 +49,6 @@ class Settings: code_app = config["code_app"] text_app = config["text_app"] terminal_app = config["terminal_app"] - container_icon_wh = config["container_icon_wh"] - video_icon_wh = config["video_icon_wh"] - sys_icon_wh = config["sys_icon_wh"] file_manager_app = config["file_manager_app"] remux_folder_max_disk_usage = config["remux_folder_max_disk_usage"] diff --git a/user_config/usr/share/solarfm/settings.json b/user_config/usr/share/solarfm/settings.json index 8627762..1cd962f 100644 --- a/user_config/usr/share/solarfm/settings.json +++ b/user_config/usr/share/solarfm/settings.json @@ -2,8 +2,6 @@ "config": { "base_of_home": "", "hide_hidden_files": "true", - "thumbnailer_path": "ffmpegthumbnailer", - "blender_thumbnailer_path": "", "go_past_home": "true", "lock_folder": "false", "locked_folders": "venv::::flasks", @@ -16,11 +14,7 @@ "code_app": "newton", "text_app": "mousepad", "terminal_app": "terminator", - "container_icon_wh": [128, 128], - "video_icon_wh": [128, 64], - "sys_icon_wh": [56, 56], "file_manager_app": "solarfm", - "steam_cdn_url": "https://steamcdn-a.akamaihd.net/steam/apps/", "remux_folder_max_disk_usage": "8589934592", "make_transparent":0, "main_window_x":721, @@ -29,6 +23,9 @@ "main_window_min_height":480, "main_window_width":800, "main_window_height":600, + "application_dirs":[ + "/usr/share/applications" + ] }, "filters": { "meshs": [".dae", ".fbx", ".gltf", ".obj", ".stl"], @@ -49,4 +46,4 @@ "ch_log_lvl": 20, "fh_log_lvl": 10 } -} \ No newline at end of file +} -- 2.39.5 From 9d3a5b9f3b80234daff14e4510e06049ba1890b9 Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Thu, 4 Jul 2024 17:24:31 -0500 Subject: [PATCH 16/28] Attempting to prompt for gc; About page updates; small non crit errors fixed --- plugins/file_properties/plugin.py | 11 ++- src/solarfm/__main__.py | 2 +- src/solarfm/core/controller.py | 45 +++++----- src/solarfm/core/controller_data.py | 3 + .../signals/file_action_signals_mixin.py | 10 ++- src/solarfm/core/mixins/ui/grid_mixin.py | 8 +- src/solarfm/core/mixins/ui/tab_mixin.py | 85 ++++++++++++++++--- src/solarfm/core/mixins/ui/window_mixin.py | 16 +++- src/solarfm/core/ui_mixin.py | 4 +- .../core/widgets/context_menu_widget.py | 72 ++++++++-------- .../core/widgets/dialogs/about_widget.py | 5 +- .../core/widgets/files_view/grid_mixin.py | 72 +++++++++++----- .../core/widgets/files_view/tab_mixin.py | 82 +++++++++++++++--- .../core/widgets/files_view/window_mixin.py | 15 +++- src/solarfm/core/widgets/icon_grid_widget.py | 9 ++ .../widgets/popups/message_popup_widget.py | 3 +- src/solarfm/utils/ipc_server.py | 13 ++- src/solarfm/utils/settings_manager/manager.py | 2 +- user_config/bin/solarfm | 3 + .../share/solarfm/ui_widgets/about_ui.glade | 7 +- 20 files changed, 349 insertions(+), 118 deletions(-) diff --git a/plugins/file_properties/plugin.py b/plugins/file_properties/plugin.py index 1036fcf..a402831 100644 --- a/plugins/file_properties/plugin.py +++ b/plugins/file_properties/plugin.py @@ -122,7 +122,6 @@ class Plugin(PluginBase): uri = state.uris[0] path = state.tab.get_current_directory() - properties = self._set_ui_data(uri, path) response = self._properties_dialog.run() if response in [Gtk.ResponseType.CANCEL, Gtk.ResponseType.DELETE_EVENT]: @@ -168,13 +167,13 @@ class Plugin(PluginBase): def _set_ui_data(self, uri, path): properties = Properties() - file_info = Gio.File.new_for_path(uri).query_info(attributes="standard::*,owner::*,time::access,time::changed", - flags=Gio.FileQueryInfoFlags.NONE, - cancellable=None) + file_info = Gio.File.new_for_path(uri).query_info(attributes = "standard::*,owner::*,time::access,time::changed", + flags = Gio.FileQueryInfoFlags.NONE, + cancellable = None) is_symlink = file_info.get_attribute_as_string("standard::is-symlink") properties.file_uri = uri - properties.file_target = file_info.get_attribute_as_string("standard::symlink-target") if is_symlink else "" + properties.file_target = file_info.get_attribute_as_string("standard::symlink-target") if is_symlink in [True, "TRUE"] else "" properties.file_name = file_info.get_display_name() properties.file_location = path properties.mime_type = file_info.get_content_type() @@ -186,7 +185,7 @@ class Plugin(PluginBase): # NOTE: Read = 4, Write = 2, Exec = 1 command = ["stat", "-c", "%a", uri] - with subprocess.Popen(command, stdout=subprocess.PIPE) as proc: + with subprocess.Popen(command, stdout = subprocess.PIPE) as proc: properties.chmod_stat = list(proc.stdout.read().decode("UTF-8").strip()) owner = self._chmod_map[f"{properties.chmod_stat[0]}"] group = self._chmod_map[f"{properties.chmod_stat[1]}"] diff --git a/src/solarfm/__main__.py b/src/solarfm/__main__.py index f5121f8..bd6cfe0 100644 --- a/src/solarfm/__main__.py +++ b/src/solarfm/__main__.py @@ -51,4 +51,4 @@ if __name__ == "__main__": main(args, unknownargs) except Exception as e: traceback.print_exc() - quit() \ No newline at end of file + quit() diff --git a/src/solarfm/core/controller.py b/src/solarfm/core/controller.py index 79b0796..049bd71 100644 --- a/src/solarfm/core/controller.py +++ b/src/solarfm/core/controller.py @@ -135,46 +135,48 @@ class Controller(UIMixin, SignalsMixins, Controller_Data): event_system.emit("hide_rename_file_menu") if action == "open": - event_system.emit("open_files") + event_system.emit_and_await("open_files") if action == "open_with": - event_system.emit("show_appchooser_menu") + event_system.emit_and_await("show_appchooser_menu") if action == "open_2_new_tab": - event_system.emit("open_2_new_tab") + event_system.emit_and_await("open_2_new_tab") if action == "execute": - event_system.emit("execute_files") + event_system.emit_and_await("execute_files") if action == "execute_in_terminal": - event_system.emit("execute_files", (True,)) + event_system.emit_and_await("execute_files", (True,)) if action == "rename": - event_system.emit("rename_files") + event_system.emit_and_await("rename_files") if action == "cut": - event_system.emit("cut_files") + event_system.emit_and_await("cut_files") if action == "copy": - event_system.emit("copy_files") + event_system.emit_and_await("copy_files") if action == "copy_path": - event_system.emit("copy_path") + event_system.emit_and_await("copy_path") if action == "copy_name": - event_system.emit("copy_name") + event_system.emit_and_await("copy_name") if action == "copy_path_name": - event_system.emit("copy_path_name") + event_system.emit_and_await("copy_path_name") if action == "paste": - event_system.emit("paste_files") + event_system.emit_and_await("paste_files") if action == "create": - event_system.emit("create_files") + event_system.emit_and_await("create_files") if action in ["save_session", "save_session_as", "load_session"]: - event_system.emit("save_load_session", (action)) + event_system.emit_and_await("save_load_session", (action)) if action == "about_page": - event_system.emit("show_about_page") + event_system.emit_and_await("show_about_page") if action == "io_popup": - event_system.emit("show_io_popup") + event_system.emit_and_await("show_io_popup") if action == "plugins_popup": - event_system.emit("show_plugins_popup") + event_system.emit_and_await("show_plugins_popup") if action == "messages_popup": - event_system.emit("show_messages_popup") + event_system.emit_and_await("show_messages_popup") if action == "ui_debug": - event_system.emit("load_interactive_debug") + event_system.emit_and_await("load_interactive_debug") if action == "tear_down": - event_system.emit("tear_down") + event_system.emit_and_await("tear_down") + + action = None def go_home(self, widget=None, eve=None): @@ -192,11 +194,14 @@ class Controller(UIMixin, SignalsMixins, Controller_Data): def tggl_top_main_menubar(self, widget=None, eve=None): top_main_menubar = self.builder.get_object("top_main_menubar") top_main_menubar.hide() if top_main_menubar.is_visible() else top_main_menubar.show() + top_main_menubar = None def open_terminal(self, widget=None, eve=None): wid, tid = self.fm_controller.get_active_wid_and_tid() tab = self.get_fm_window(wid).get_tab_by_id(tid) tab.execute([f"{tab.terminal_app}"], start_dir=tab.get_current_directory()) + wid, tid, tab = None, None, None + def go_to_path(self, path: str): self.builder.get_object("path_entry").set_text(path) diff --git a/src/solarfm/core/controller_data.py b/src/solarfm/core/controller_data.py index da7b7b0..35e2618 100644 --- a/src/solarfm/core/controller_data.py +++ b/src/solarfm/core/controller_data.py @@ -114,6 +114,9 @@ class Controller_Data: uris.append(fpath) + tab = None + dir = None + return uris diff --git a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py index a30c862..9acce62 100644 --- a/src/solarfm/core/mixins/signals/file_action_signals_mixin.py +++ b/src/solarfm/core/mixins/signals/file_action_signals_mixin.py @@ -68,6 +68,14 @@ class FileActionSignalsMixin: if [wid, tid] in [state.wid, state.tid]: self.set_bottom_labels(tab) + wid, tid = None, None + notebook = None + tab = None + icon_grid = None + store = None + _store, tab_widget_id_label = None, None + state = None + return False def do_file_search(self, widget, eve = None): @@ -90,4 +98,4 @@ class FileActionSignalsMixin: items = icon_grid.get_selected_items() if len(items) > 0: - icon_grid.scroll_to_path(items[0], False, 0.5, 0.5) \ No newline at end of file + icon_grid.scroll_to_path(items[0], False, 0.5, 0.5) diff --git a/src/solarfm/core/mixins/ui/grid_mixin.py b/src/solarfm/core/mixins/ui/grid_mixin.py index c0c9948..1cdcb4f 100644 --- a/src/solarfm/core/mixins/ui/grid_mixin.py +++ b/src/solarfm/core/mixins/ui/grid_mixin.py @@ -45,7 +45,10 @@ class GridMixin: if save_state and not trace_debug: self.fm_controller.save_state() + dir = None + files = None + @daemon_threaded def generate_icons(self, tab, store, dir, files): for i, file in enumerate(files): # GLib.Thread(f"{i}", self.make_and_load_icon, i, store, tab, dir, file[0]) @@ -54,14 +57,16 @@ class GridMixin: def update_store(self, i, store, icon): itr = store.get_iter(i) GLib.idle_add(self.insert_store, store, itr, icon) + itr = None @daemon_threaded def make_and_load_icon(self, i, store, tab, dir, file): icon = tab.create_icon(dir, file) self.update_store(i, store, icon) + icon = None def get_icon(self, tab, dir, file): - return tab.create_icon(dir, file) + tab.create_icon(dir, file) # @daemon_threaded @@ -159,6 +164,7 @@ class GridMixin: store = icon_grid.get_model() tab_label = notebook.get_tab_label(obj).get_children()[0] + icon_grid = None return store, tab_label def get_icon_grid_from_notebook(self, notebook, _name): diff --git a/src/solarfm/core/mixins/ui/tab_mixin.py b/src/solarfm/core/mixins/ui/tab_mixin.py index 656aafe..79a2617 100644 --- a/src/solarfm/core/mixins/ui/tab_mixin.py +++ b/src/solarfm/core/mixins/ui/tab_mixin.py @@ -41,16 +41,26 @@ class TabMixin(GridMixin): notebook.set_tab_reorderable(scroll, True) self.fm_controller.set_wid_and_tid(wid, tab.get_id()) - path_entry.set_text(tab.get_current_directory()) + # path_entry.set_text(tab.get_current_directory()) + event_system.emit("go_to_path", (tab.get_current_directory(),)) # NOTE: Not efficent if I understand how notebook.show_all() notebook.set_current_page(index) ctx = notebook.get_style_context() ctx.add_class("notebook-unselected-focus") self.load_store(tab, store) - self.set_window_title() + event_system.emit("set_window_title", (tab.get_current_directory(),)) self.set_file_watcher(tab) + tab_widget = None + scroll, store = None, None + index = None + notebook = None + path_entry = None + tab = None + ctx = None + + def get_tab_widget(self, tab): tab_widget = self.create_tab_widget() tab_widget.tab = tab @@ -63,6 +73,7 @@ class TabMixin(GridMixin): def close_tab(self, button, eve = None): notebook = button.get_parent().get_parent() if notebook.get_n_pages() == 1: + notebook = None return tab_box = button.get_parent() @@ -79,25 +90,35 @@ class TabMixin(GridMixin): self.builder.dereference_object(f"{wid}|{tid}|icon_grid") self.builder.dereference_object(f"{wid}|{tid}") - icon_grid.set_model(None) + iter = store.get_iter_first() + while iter: + next_iter = store.iter_next(iter) + store.unref_node(iter) + iter = next_iter + + store.clear() store.run_dispose() + + icon_grid.set_model(None) icon_grid.run_dispose() scroll.run_dispose() tab_box.run_dispose() - del store - del icon_grid - del scroll - del tab_box - del watcher - del tab + iter = None + wid, tid = None, None + store = None + icon_grid = None + scroll = None + tab_box = None + watcher = None + tab = None + notebook = None if not settings_manager.is_trace_debug(): self.fm_controller.save_state() self.set_window_title() - gc.collect() # NOTE: Not actually getting called even tho set in the glade file... @@ -121,6 +142,11 @@ class TabMixin(GridMixin): if not settings_manager.is_trace_debug(): self.fm_controller.save_state() + wid, tid = None, None + window = None + tab = None + + def on_tab_switch_update(self, notebook, content = None, index = None): self.selected_files.clear() wid, tid = content.get_children()[0].get_name().split("|") @@ -129,6 +155,8 @@ class TabMixin(GridMixin): self.set_path_text(wid, tid) self.set_window_title() + wid, tid = None, None + def get_id_from_tab_box(self, tab_box): return tab_box.tab.get_id() @@ -146,6 +174,8 @@ class TabMixin(GridMixin): state.tab.load_directory() self.load_store(state.tab, state.store) + state = None + def update_tab(self, tab_label, tab, store, wid, tid): self.load_store(tab, store) self.set_path_text(wid, tid) @@ -186,16 +216,38 @@ class TabMixin(GridMixin): if isinstance(focused_obj, Gtk.Entry): self.process_path_menu(widget, tab, dir) + action = None + store = None + if path.endswith(".") or path == dir: + tab_label = None + notebook = None + wid, tid = None, None + path = None + tab = None return if not tab.set_path(path): + tab_label = None + notebook = None + wid, tid = None, None + path = None + tab = None return icon_grid = self.get_icon_grid_from_notebook(notebook, f"{wid}|{tid}") icon_grid.clear_and_set_new_store() self.update_tab(tab_label, tab, icon_grid.get_store(), wid, tid) + action = None + wid, tid = None, None + notebook = None + store, tab_label = None, None + path = None + tab = None + icon_grid = None + + def process_path_menu(self, gtk_entry, tab, dir): path_menu_buttons = self.builder.get_object("path_menu_buttons") query = gtk_entry.get_text().replace(dir, "") @@ -212,6 +264,10 @@ class TabMixin(GridMixin): path_menu_buttons.add(button) show_path_menu = True + path_menu_buttons = None + query = None + files = None + if not show_path_menu: event_system.emit("hide_path_menu") else: @@ -242,10 +298,17 @@ class TabMixin(GridMixin): path_entry.set_position(-1) event_system.emit("hide_path_menu") + state = None + path = None + path_entry = None + def show_hide_hidden_files(self): wid, tid = self.fm_controller.get_active_wid_and_tid() tab = self.get_fm_window(wid).get_tab_by_id(tid) tab.set_hiding_hidden(not tab.is_hiding_hidden()) tab.load_directory() - self.builder.get_object("refresh_tab").released() \ No newline at end of file + self.builder.get_object("refresh_tab").released() + + wid, tid = None, None + tab = None diff --git a/src/solarfm/core/mixins/ui/window_mixin.py b/src/solarfm/core/mixins/ui/window_mixin.py index 2fc76db..5d6aea7 100644 --- a/src/solarfm/core/mixins/ui/window_mixin.py +++ b/src/solarfm/core/mixins/ui/window_mixin.py @@ -46,11 +46,19 @@ class WindowMixin(TabMixin): self.window.set_title(f"{app_name} ~ {dir}") self.set_bottom_labels(tab) + wid, tid = None, None + notebook = None + tab = None + dir = None + def set_path_text(self, wid, tid): path_entry = self.builder.get_object("path_entry") tab = self.get_fm_window(wid).get_tab_by_id(tid) path_entry.set_text(tab.get_current_directory()) + path_entry = None + tab = None + def grid_set_selected_items(self, icons_grid): new_items = icons_grid.get_selected_items() items_size = len(new_items) @@ -164,6 +172,12 @@ class WindowMixin(TabMixin): if target not in current: self.fm_controller.set_wid_and_tid(wid, tid) + current = None + target = None + wid, tid = None, None + store = None + path_at_loc = None + def grid_on_drag_data_received(self, widget, drag_context, x, y, data, info, time): if info == 80: @@ -183,4 +197,4 @@ class WindowMixin(TabMixin): Gtk.drag_finish(drag_context, False, False, time) def create_new_tab_notebook(self, widget=None, wid=None, path=None): - self.create_tab(wid, None, path) \ No newline at end of file + self.create_tab(wid, None, path) diff --git a/src/solarfm/core/ui_mixin.py b/src/solarfm/core/ui_mixin.py index 0c04c7b..22842c9 100644 --- a/src/solarfm/core/ui_mixin.py +++ b/src/solarfm/core/ui_mixin.py @@ -35,7 +35,7 @@ class UIMixin(PaneMixin, WindowMixin): nickname = session["window"]["Nickname"] tabs = session["window"]["tabs"] isHidden = True if session["window"]["isHidden"] == "True" else False - event_system.emit("load_files_view_state", (nickname, tabs, isHidden)) + event_system.emit_and_await("load_files_view_state", (nickname, tabs, isHidden)) def _focus_last_visible_notebook(self, icon_grid): @@ -47,6 +47,8 @@ class UIMixin(PaneMixin, WindowMixin): icon_grid.event(Gdk.Event().new(type = Gdk.EventType.BUTTON_RELEASE)) + window = None + def _current_loading_process(self, session_json = None): if session_json: for j, value in enumerate(session_json): diff --git a/src/solarfm/core/widgets/context_menu_widget.py b/src/solarfm/core/widgets/context_menu_widget.py index bbab184..70745e7 100644 --- a/src/solarfm/core/widgets/context_menu_widget.py +++ b/src/solarfm/core/widgets/context_menu_widget.py @@ -16,10 +16,7 @@ class ContextMenuWidget(Gtk.Menu): def __init__(self): super(ContextMenuWidget, self).__init__() - self.builder = settings_manager.get_builder() - self._builder = Gtk.Builder() - self._context_menu_data = settings_manager.get_context_menu_data() - self._window = settings_manager.get_main_window() + self._builder = Gtk.Builder() self._setup_styling() self._setup_signals() @@ -32,14 +29,48 @@ class ContextMenuWidget(Gtk.Menu): def _setup_signals(self): event_system.subscribe("show_context_menu", self.show_context_menu) event_system.subscribe("hide_context_menu", self.hide_context_menu) - settings_manager.register_signals_to_builder([self,], self._builder) + settings_manager.register_signals_to_builder(self, self._builder) def _load_widgets(self): + self.builder = settings_manager.get_builder() + self._window = settings_manager.get_main_window() + self._context_menu_data = settings_manager.get_context_menu_data() + + self.builder.expose_object("context_menu", self) + self.build_context_menu() def _emit(self, menu_item, type): event_system.emit("do_action_from_menu_controls", type) + + def build_context_menu(self) -> None: + data = self._context_menu_data + plugins_entry = None + + for key, value in data.items(): + entry = self.make_menu_item(key, value) + self.append(entry) + if key == "Plugins": + plugins_entry = entry + + self.attach_to_widget(self._window, None) + self.show_all() + + if plugins_entry: + self.builder.expose_object("context_menu_plugins", plugins_entry.get_submenu()) + + def make_menu_item(self, label, data) -> Gtk.MenuItem: + if isinstance(data, dict): + return self.make_submenu(label, data) + elif isinstance(data, list): + entry = Gtk.ImageMenuItem(label) + icon = getattr(Gtk, f"{data[0]}") + entry.set_image( Gtk.Image(stock=icon) ) + entry.set_always_show_image(True) + entry.connect("activate", self._emit, (data[1])) + return entry + def make_submenu(self, name, data): menu = Gtk.Menu() menu_item = Gtk.MenuItem(name) @@ -57,36 +88,9 @@ class ContextMenuWidget(Gtk.Menu): menu_item.set_submenu(menu) return menu_item - def make_menu_item(self, label, data) -> Gtk.MenuItem: - if isinstance(data, dict): - return self.make_submenu(label, data) - elif isinstance(data, list): - entry = Gtk.ImageMenuItem(label) - icon = getattr(Gtk, f"{data[0]}") - entry.set_image( Gtk.Image(stock=icon) ) - entry.set_always_show_image(True) - entry.connect("activate", self._emit, (data[1])) - return entry - def build_context_menu(self) -> None: - data = self._context_menu_data - plugins_entry = None - - for key, value in data.items(): - entry = self.make_menu_item(key, value) - self.append(entry) - if key == "Plugins": - plugins_entry = entry - - self.attach_to_widget(self._window, None) - self.builder.expose_object("context_menu", self) - self.show_all() - - if plugins_entry: - self.builder.expose_object("context_menu_plugins", plugins_entry.get_submenu()) - - def show_context_menu(self, widget=None, eve=None): + def show_context_menu(self, widget = None, eve = None): self.builder.get_object("context_menu").popup_at_pointer(None) - def hide_context_menu(self, widget=None, eve=None): + def hide_context_menu(self, widget = None, eve = None): self.builder.get_object("context_menu").popdown() diff --git a/src/solarfm/core/widgets/dialogs/about_widget.py b/src/solarfm/core/widgets/dialogs/about_widget.py index b5535d9..6f661fd 100644 --- a/src/solarfm/core/widgets/dialogs/about_widget.py +++ b/src/solarfm/core/widgets/dialogs/about_widget.py @@ -39,11 +39,10 @@ class AboutWidget: self.about_page = self._builder.get_object("about_page") builder.expose_object(f"about_page", self.about_page) - - def show_about_page(self, widget=None, eve=None): + def show_about_page(self, widget = None, eve = None): response = self.about_page.run() if response in [Gtk.ResponseType.CANCEL, Gtk.ResponseType.DELETE_EVENT]: self.hide_about_page() - def hide_about_page(self, widget=None, eve=None): + def hide_about_page(self, widget = None, eve = None): self.about_page.hide() diff --git a/src/solarfm/core/widgets/files_view/grid_mixin.py b/src/solarfm/core/widgets/files_view/grid_mixin.py index a6b9ee6..083fb81 100644 --- a/src/solarfm/core/widgets/files_view/grid_mixin.py +++ b/src/solarfm/core/widgets/files_view/grid_mixin.py @@ -31,7 +31,6 @@ class GridMixin: # return - dir = tab.get_current_directory() files = tab.get_files() @@ -46,32 +45,56 @@ class GridMixin: if save_state and not trace_debug: self.fm_controller.save_state() + dir = None + files = None @daemon_threaded def generate_icons(self, tab, store, dir, files): - try: - loop = asyncio.get_running_loop() - except RuntimeError: - loop = None + for i, file in enumerate(files): + # GLib.Thread(f"{i}", self.make_and_load_icon, i, store, tab, dir, file[0]) + self.make_and_load_icon( i, store, tab, dir, file[0]) - if loop and loop.is_running(): - loop = asyncio.get_event_loop() - loop.create_task( self.create_icons(tab, store, dir, files) ) - else: - asyncio.run( self.create_icons(tab, store, dir, files) ) - - async def create_icons(self, tab, store, dir, files): - icons = [self.get_icon(tab, dir, file[0]) for file in files] - data = await asyncio.gather(*icons) - tasks = [self.update_store(i, store, icon) for i, icon in enumerate(data)] - asyncio.gather(*tasks) - - async def update_store(self, i, store, icon): + def update_store(self, i, store, icon): itr = store.get_iter(i) GLib.idle_add(self.insert_store, store, itr, icon) + itr = None + + @daemon_threaded + def make_and_load_icon(self, i, store, tab, dir, file): + icon = tab.create_icon(dir, file) + self.update_store(i, store, icon) + icon = None + + def get_icon(self, tab, dir, file): + tab.create_icon(dir, file) + + + # @daemon_threaded + # def generate_icons(self, tab, store, dir, files): + # try: + # loop = asyncio.get_running_loop() + # except RuntimeError: + # loop = None + + # if loop and loop.is_running(): + # loop = asyncio.get_event_loop() + # loop.create_task( self.create_icons(tab, store, dir, files) ) + # else: + # asyncio.run( self.create_icons(tab, store, dir, files) ) + + # async def create_icons(self, tab, store, dir, files): + # icons = [self.get_icon(tab, dir, file[0]) for file in files] + # data = await asyncio.gather(*icons) + # tasks = [self.update_store(i, store, icon) for i, icon in enumerate(data)] + # asyncio.gather(*tasks) + + # async def update_store(self, i, store, icon): + # itr = store.get_iter(i) + # GLib.idle_add(self.insert_store, store, itr, icon) + + # async def get_icon(self, tab, dir, file): + # return tab.create_icon(dir, file) - async def get_icon(self, tab, dir, file): - return tab.create_icon(dir, file) def insert_store(self, store, itr, icon): store.set_value(itr, 0, icon) @@ -141,4 +164,11 @@ class GridMixin: store = icon_grid.get_model() tab_label = notebook.get_tab_label(obj).get_children()[0] - return store, tab_label \ No newline at end of file + return store, tab_label + + def get_icon_grid_from_notebook(self, notebook, _name): + for obj in notebook.get_children(): + icon_grid = obj.get_children()[0] + name = icon_grid.get_name() + if name == _name: + return icon_grid diff --git a/src/solarfm/core/widgets/files_view/tab_mixin.py b/src/solarfm/core/widgets/files_view/tab_mixin.py index 4bb474c..8a49f94 100644 --- a/src/solarfm/core/widgets/files_view/tab_mixin.py +++ b/src/solarfm/core/widgets/files_view/tab_mixin.py @@ -41,18 +41,26 @@ class TabMixin(GridMixin): notebook.set_tab_reorderable(scroll, True) self.fm_controller.set_wid_and_tid(wid, tab.get_id()) - event_system.emit("go_to_path", (tab.get_current_directory(),)) # NOTE: Not efficent if I understand how # path_entry.set_text(tab.get_current_directory()) + event_system.emit("go_to_path", (tab.get_current_directory(),)) # NOTE: Not efficent if I understand how notebook.show_all() notebook.set_current_page(index) ctx = notebook.get_style_context() ctx.add_class("notebook-unselected-focus") self.load_store(tab, store) - # self.set_window_title() event_system.emit("set_window_title", (tab.get_current_directory(),)) self.set_file_watcher(tab) + tab_widget = None + scroll, store = None, None + index = None + notebook = None + # path_entry = None + tab = None + ctx = None + + def get_tab_widget(self, tab): tab_widget = self.create_tab_widget() tab_widget.tab_id = tab.get_id() @@ -65,6 +73,7 @@ class TabMixin(GridMixin): def close_tab(self, button, eve = None): notebook = button.get_parent().get_parent() if notebook.get_n_pages() == 1: + notebook = None return tab_box = button.get_parent() @@ -81,19 +90,30 @@ class TabMixin(GridMixin): self.builder.dereference_object(f"{wid}|{tid}|icon_grid") self.builder.dereference_object(f"{wid}|{tid}") - icon_grid.set_model(None) + iter = store.get_iter_first() + while iter: + next_iter = store.iter_next(iter) + store.unref_node(iter) + iter = next_iter + + store.clear() store.run_dispose() + + icon_grid.set_model(None) icon_grid.run_dispose() scroll.run_dispose() tab_box.run_dispose() - del store - del icon_grid - del scroll - del tab_box - del watcher - del tab + iter = None + wid, tid = None, None + store = None + icon_grid = None + scroll = None + tab_box = None + watcher = None + tab = None + notebook = None if not settings_manager.is_trace_debug(): self.fm_controller.save_state() @@ -123,6 +143,11 @@ class TabMixin(GridMixin): if not settings_manager.is_trace_debug(): self.fm_controller.save_state() + wid, tid = None, None + window = None + tab = None + + def on_tab_switch_update(self, notebook, content = None, index = None): self.selected_files.clear() wid, tid = content.get_children()[0].tab.get_name().split("|") @@ -130,6 +155,8 @@ class TabMixin(GridMixin): self.set_path_text(wid, tid) self.set_window_title() + wid, tid = None, None + def get_id_from_tab_box(self, tab_box): return tab_box.tab.get_id() @@ -147,6 +174,8 @@ class TabMixin(GridMixin): state.tab.load_directory() self.load_store(state.tab, state.store) + state = None + def update_tab(self, tab_label, tab, store, wid, tid): self.load_store(tab, store) self.set_path_text(wid, tid) @@ -187,16 +216,38 @@ class TabMixin(GridMixin): if isinstance(focused_obj, Gtk.Entry): self.process_path_menu(widget, tab, dir) + action = None + store = None + if path.endswith(".") or path == dir: + tab_label = None + notebook = None + wid, tid = None, None + path = None + tab = None return if not tab.set_path(path): + tab_label = None + notebook = None + wid, tid = None, None + path = None + tab = None return icon_grid = self.get_icon_grid_from_notebook(notebook, f"{wid}|{tid}") icon_grid.clear_and_set_new_store() self.update_tab(tab_label, tab, store, wid, tid) + action = None + wid, tid = None, None + notebook = None + store, tab_label = None, None + path = None + tab = None + icon_grid = None + + def process_path_menu(self, gtk_entry, tab, dir): path_menu_buttons = self.builder.get_object("path_menu_buttons") query = gtk_entry.get_text().replace(dir, "") @@ -213,6 +264,10 @@ class TabMixin(GridMixin): path_menu_buttons.add(button) show_path_menu = True + path_menu_buttons = None + query = None + files = None + if not show_path_menu: event_system.emit("hide_path_menu") else: @@ -244,9 +299,16 @@ class TabMixin(GridMixin): path_entry.set_position(-1) event_system.emit("hide_path_menu") + state = None + path = None + path_entry = None + def show_hide_hidden_files(self): wid, tid = self.fm_controller.get_active_wid_and_tid() tab = self.get_fm_window(wid).get_tab_by_id(tid) tab.set_hiding_hidden(not tab.is_hiding_hidden()) tab.load_directory() - self.builder.get_object("refresh_tab").released() \ No newline at end of file + self.builder.get_object("refresh_tab").released() + + wid, tid = None, None + tab = None diff --git a/src/solarfm/core/widgets/files_view/window_mixin.py b/src/solarfm/core/widgets/files_view/window_mixin.py index a8eb8f5..23a12ab 100644 --- a/src/solarfm/core/widgets/files_view/window_mixin.py +++ b/src/solarfm/core/widgets/files_view/window_mixin.py @@ -42,10 +42,17 @@ class WindowMixin(TabMixin): event_system.emit("set_window_title", (dir,)) self.set_bottom_labels(tab) + wid, tid = None, None + notebook = None + tab = None + dir = None + def set_path_text(self, wid, tid): tab = self.get_fm_window(wid).get_tab_by_id(tid) event_system.emit("go_to_path", (tab.get_current_directory(),)) + tab = None + def grid_set_selected_items(self, icons_grid): new_items = icons_grid.get_selected_items() items_size = len(new_items) @@ -160,6 +167,12 @@ class WindowMixin(TabMixin): if target not in current: self.fm_controller.set_wid_and_tid(wid, tid) + current = None + target = None + wid, tid = None, None + store = None + path_at_loc = None + def grid_on_drag_data_received(self, widget, drag_context, x, y, data, info, time): if info == 80: @@ -179,4 +192,4 @@ class WindowMixin(TabMixin): Gtk.drag_finish(drag_context, False, False, time) def create_new_tab_notebook(self, widget=None, wid=None, path=None): - self.create_tab(wid, None, path) \ No newline at end of file + self.create_tab(wid, None, path) diff --git a/src/solarfm/core/widgets/icon_grid_widget.py b/src/solarfm/core/widgets/icon_grid_widget.py index 2326912..e758ee2 100644 --- a/src/solarfm/core/widgets/icon_grid_widget.py +++ b/src/solarfm/core/widgets/icon_grid_widget.py @@ -77,9 +77,18 @@ class IconGridWidget(Gtk.IconView): def clear_and_set_new_store(self): store = self.get_model() if store: + iter = store.get_iter_first() + while iter: + next_iter = store.iter_next(iter) + store.unref_node(iter) + iter = next_iter + + store.clear() store.run_dispose() + store = None self.set_model(None) store = Gtk.ListStore(GdkPixbuf.Pixbuf or GdkPixbuf.PixbufAnimation or None, str or None) # store = Gtk.ListStore(Gtk.DirectoryList) self.set_model(store) + store = None diff --git a/src/solarfm/core/widgets/popups/message_popup_widget.py b/src/solarfm/core/widgets/popups/message_popup_widget.py index f3517a4..d8bf706 100644 --- a/src/solarfm/core/widgets/popups/message_popup_widget.py +++ b/src/solarfm/core/widgets/popups/message_popup_widget.py @@ -61,7 +61,6 @@ class MessagePopupWidget(Gtk.Popover): scroll_window.set_hexpand(True) vbox.set_orientation(Gtk.Orientation.VERTICAL) - self.builder.expose_object(f"message_popup_widget", self) self.builder.expose_object(f"message_text_view", message_text_view) scroll_window.add(message_text_view) @@ -126,4 +125,4 @@ class MessagePopupWidget(Gtk.Popover): with open(target, "w") as f: f.write(text) - save_location_prompt.destroy() \ No newline at end of file + save_location_prompt.destroy() diff --git a/src/solarfm/utils/ipc_server.py b/src/solarfm/utils/ipc_server.py index eda6dab..8fecbf2 100644 --- a/src/solarfm/utils/ipc_server.py +++ b/src/solarfm/utils/ipc_server.py @@ -60,7 +60,11 @@ class IPCServer(Singleton): try: conn = listener.accept() start_time = time.perf_counter() + GLib.idle_add(self._handle_ipc_message, *(conn, start_time,)) + + conn = None + start_time = None except Exception as e: logger.debug( repr(e) ) @@ -74,19 +78,24 @@ class IPCServer(Singleton): if "FILE|" in msg: file = msg.split("FILE|")[1].strip() if file: - event_system.emit("handle_file_from_ipc", file) + event_system.emit_and_await("handle_file_from_ipc", file) + msg = None + file = None conn.close() break if msg in ['close connection', 'close server']: + msg = None conn.close() break # NOTE: Not perfect but insures we don't lock up the connection for too long. end_time = time.perf_counter() if (end_time - start_time) > self._ipc_timeout: + msg = None + end_time = None conn.close() break @@ -126,4 +135,4 @@ class IPCServer(Singleton): logger.error("IPC Socket no longer valid.... Removing.") os.unlink(self._ipc_address) except Exception as e: - logger.error( repr(e) ) \ No newline at end of file + logger.error( repr(e) ) diff --git a/src/solarfm/utils/settings_manager/manager.py b/src/solarfm/utils/settings_manager/manager.py index 0c431bb..2be6222 100644 --- a/src/solarfm/utils/settings_manager/manager.py +++ b/src/solarfm/utils/settings_manager/manager.py @@ -171,4 +171,4 @@ class SettingsManager(StartCheckMixin, Singleton): def save_settings(self): with open(self._CONFIG_FILE, 'w') as outfile: - json.dump(self.settings.as_dict(), outfile, separators=(',', ':'), indent=4) \ No newline at end of file + json.dump(self.settings.as_dict(), outfile, separators=(',', ':'), indent=4) diff --git a/user_config/bin/solarfm b/user_config/bin/solarfm index e833ed3..cb94caf 100755 --- a/user_config/bin/solarfm +++ b/user_config/bin/solarfm @@ -20,6 +20,9 @@ function main() { files[$size]="${target}" done + G_SLICE=always-malloc + G_DEBUG=gc-friendly + GOBJECT_DEBUG=instance-count python /opt/solarfm.zip "${files[@]}" } main "$@"; diff --git a/user_config/usr/share/solarfm/ui_widgets/about_ui.glade b/user_config/usr/share/solarfm/ui_widgets/about_ui.glade index f559f6c..db5df48 100644 --- a/user_config/usr/share/solarfm/ui_widgets/about_ui.glade +++ b/user_config/usr/share/solarfm/ui_widgets/about_ui.glade @@ -8,7 +8,7 @@ False 5 center-on-parent - ../icons/solarfm.png + ../icons/solarfm-64x64.png dialog True True @@ -19,6 +19,7 @@ Copyright (C) 2021 GPL2 by ITDominator https://code.itdominator.com/itdominator/SolarFM + ITDominator SolarFM - Copyright (C) 2021 ITDominator GPL2 @@ -367,7 +368,9 @@ Public License instead of this License. SolarFM is developed on Atom, git, and using Python 3+ with Gtk GObject introspection. - translator-credits + ... + ... + ... ../icons/solarfm-64x64.png True custom -- 2.39.5 From 35456f2bca22b576c0dc7a627bbf20bee28338ed Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Fri, 26 Jul 2024 19:52:00 -0500 Subject: [PATCH 17/28] pyright changes, start.sh changes, misc. --- README.md | 4 ++-- pyrightconfig.json | 4 +++- src/solarfm/core/window.py | 3 +-- user_config/bin/solarfm | 6 +++--- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 1931e24..7a316dd 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ Additionally, if not building a .deb then just move the contents of user_config Copy the share/solarfm folder to your user .config/ directory too. `pyrightconfig.json` -

The pyrightconfig file needs to stay on same level as the .git folders in order to have settings detected when using pyright with lsp functionality.

+

The pyrightconfig file needs to stay on same level as the .git folders in order to have settings detected when using pyright with lsp functionality. "pyrightconfig.json" can prompt IDEs such as Zed on settings to use and where imports are located- look at venvPath and venv. "venvPath" is parent path of "venv" where "venv" is just the name of the folder under the parent path that is the python created venv.

Install Setup
``` @@ -32,4 +32,4 @@ A selected file in the active quad-pane will move to trash since it is the defau ![1 SolarFM single pane. ](images/pic1.png) ![2 SolarFM double pane. ](images/pic2.png) ![3 SolarFM triple pane. ](images/pic3.png) -![4 SolarFM quad pane. ](images/pic4.png) \ No newline at end of file +![4 SolarFM quad pane. ](images/pic4.png) diff --git a/pyrightconfig.json b/pyrightconfig.json index 6c993a3..4d8b8ec 100644 --- a/pyrightconfig.json +++ b/pyrightconfig.json @@ -7,5 +7,7 @@ { "root": "./src/versions/solarfm-0.0.1/solarfm" } - ] + ], + "venvPath": ".", + "venv": ".venv" } diff --git a/src/solarfm/core/window.py b/src/solarfm/core/window.py index d9b8807..6d3f3f5 100644 --- a/src/solarfm/core/window.py +++ b/src/solarfm/core/window.py @@ -1,5 +1,4 @@ # Python imports -import time import signal # Lib imports @@ -123,4 +122,4 @@ class Window(Gtk.ApplicationWindow): Gtk.main_quit() def main(self): - Gtk.main() \ No newline at end of file + Gtk.main() diff --git a/user_config/bin/solarfm b/user_config/bin/solarfm index cb94caf..60cf37e 100755 --- a/user_config/bin/solarfm +++ b/user_config/bin/solarfm @@ -20,9 +20,9 @@ function main() { files[$size]="${target}" done - G_SLICE=always-malloc - G_DEBUG=gc-friendly - GOBJECT_DEBUG=instance-count + export G_SLICE=always-malloc + export G_DEBUG=gc-friendly + export GOBJECT_DEBUG=instance-count python /opt/solarfm.zip "${files[@]}" } main "$@"; -- 2.39.5 From 3a2e8eeb089ae5e80e9225d44e2d610e9fc4953c Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Wed, 11 Sep 2024 02:11:00 -0500 Subject: [PATCH 18/28] Attempted further memory leak prevention; fixed bugs from moving to python 12; misc. --- plugins/searcher/widgets/grep_preview_widget.py | 4 ++-- pyrightconfig.json | 4 ++-- src/solarfm/core/fs_actions/handler_mixin.py | 16 ++++++++++------ src/solarfm/core/mixins/ui/grid_mixin.py | 3 ++- src/solarfm/core/mixins/ui/tab_mixin.py | 9 +++++---- src/solarfm/core/mixins/ui/window_mixin.py | 6 +++++- src/solarfm/core/widgets/io_widget.py | 17 +++++++++-------- .../widgets/popups/path_menu_popup_widget.py | 17 +++++++++-------- .../shellfm/windows/tabs/utils/launcher.py | 2 +- user_config/bin/solarfm | 3 ++- user_config/usr/bin/solarfm | 17 ----------------- 11 files changed, 47 insertions(+), 51 deletions(-) delete mode 100755 user_config/usr/bin/solarfm diff --git a/plugins/searcher/widgets/grep_preview_widget.py b/plugins/searcher/widgets/grep_preview_widget.py index 79a34d6..09e778a 100644 --- a/plugins/searcher/widgets/grep_preview_widget.py +++ b/plugins/searcher/widgets/grep_preview_widget.py @@ -48,7 +48,7 @@ class GrepPreviewWidget(Gtk.Box): return bytes(f"\n{target}", "utf-8").decode("utf-8") def make_utf8_line_highlight(self, buffer, itr, i, color, target, query): - parts = re.split(r"(" + query + ")(?i)", target.replace("\n", "")) + parts = re.split(r"(?i)(" + query + ")", target.replace("\n", "")) for part in parts: itr = buffer.get_end_iter() @@ -57,4 +57,4 @@ class GrepPreviewWidget(Gtk.Box): else: new_s = f"{part}" _part = bytes(new_s, "utf-8").decode("utf-8") - buffer.insert_markup(itr, _part, len(_part)) + buffer.insert_markup(itr, _part, len(_part)) \ No newline at end of file diff --git a/pyrightconfig.json b/pyrightconfig.json index 4d8b8ec..51882b6 100644 --- a/pyrightconfig.json +++ b/pyrightconfig.json @@ -8,6 +8,6 @@ "root": "./src/versions/solarfm-0.0.1/solarfm" } ], - "venvPath": ".", - "venv": ".venv" + "venvPath": "/home/abaddon/Portable_Apps/py-venvs/pylsp-venv/", + "venv": "venv" } diff --git a/src/solarfm/core/fs_actions/handler_mixin.py b/src/solarfm/core/fs_actions/handler_mixin.py index 73705cd..b2d2352 100644 --- a/src/solarfm/core/fs_actions/handler_mixin.py +++ b/src/solarfm/core/fs_actions/handler_mixin.py @@ -110,26 +110,30 @@ class HandlerMixin: tab.move_file(fPath, tPath) else: io_widget = IOWidget(action, file) + io_list = self._builder.get_object("io_list") + + io_list.add(io_widget) + io_list.show_all() if action == "copy": file.copy_async(destination=target, flags=Gio.FileCopyFlags.BACKUP, - io_priority=98, + io_priority=45, cancellable=io_widget.cancle_eve, progress_callback=io_widget.update_progress, callback=io_widget.finish_callback) - self._builder.get_object("io_list").add(io_widget) if action == "move" or action == "rename": file.move_async(destination=target, flags=Gio.FileCopyFlags.BACKUP, - io_priority=98, + io_priority=45, cancellable=io_widget.cancle_eve, - progress_callback=None, # NOTE: progress_callback here causes seg fault when set + progress_callback=None, callback=io_widget.finish_callback) - self._builder.get_object("io_list").add(io_widget) + io_widget = None + io_list = None except GObject.GError as e: raise OSError(e) @@ -162,4 +166,4 @@ class HandlerMixin: target = Gio.File.new_for_path(f"{base_path}/{file_name}-copy{i}{extension}") i += 1 - return target + return target \ No newline at end of file diff --git a/src/solarfm/core/mixins/ui/grid_mixin.py b/src/solarfm/core/mixins/ui/grid_mixin.py index 1cdcb4f..2b3cf94 100644 --- a/src/solarfm/core/mixins/ui/grid_mixin.py +++ b/src/solarfm/core/mixins/ui/grid_mixin.py @@ -56,8 +56,9 @@ class GridMixin: def update_store(self, i, store, icon): itr = store.get_iter(i) - GLib.idle_add(self.insert_store, store, itr, icon) + GLib.idle_add(self.insert_store, store, itr, icon.copy()) itr = None + del icon @daemon_threaded def make_and_load_icon(self, i, store, tab, dir, file): diff --git a/src/solarfm/core/mixins/ui/tab_mixin.py b/src/solarfm/core/mixins/ui/tab_mixin.py index 79a2617..70e53fe 100644 --- a/src/solarfm/core/mixins/ui/tab_mixin.py +++ b/src/solarfm/core/mixins/ui/tab_mixin.py @@ -264,15 +264,16 @@ class TabMixin(GridMixin): path_menu_buttons.add(button) show_path_menu = True - path_menu_buttons = None - query = None - files = None + query = None + files = None if not show_path_menu: + path_menu_buttons = None event_system.emit("hide_path_menu") else: event_system.emit("show_path_menu") buttons = path_menu_buttons.get_children() + path_menu_buttons = None if len(buttons) == 1: self.slowed_focus(buttons[0]) @@ -311,4 +312,4 @@ class TabMixin(GridMixin): self.builder.get_object("refresh_tab").released() wid, tid = None, None - tab = None + tab = None \ No newline at end of file diff --git a/src/solarfm/core/mixins/ui/window_mixin.py b/src/solarfm/core/mixins/ui/window_mixin.py index 5d6aea7..618b9bc 100644 --- a/src/solarfm/core/mixins/ui/window_mixin.py +++ b/src/solarfm/core/mixins/ui/window_mixin.py @@ -130,6 +130,10 @@ class WindowMixin(TabMixin): self.update_tab(tab_label, state.tab, state.icon_grid.get_store(), state.wid, state.tid) else: event_system.emit("open_files") + + state = None + notebook = None + tab_label = None except WindowException as e: traceback.print_exc() self.display_message(settings.theming.error_color, f"{repr(e)}") @@ -197,4 +201,4 @@ class WindowMixin(TabMixin): Gtk.drag_finish(drag_context, False, False, time) def create_new_tab_notebook(self, widget=None, wid=None, path=None): - self.create_tab(wid, None, path) + self.create_tab(wid, None, path) \ No newline at end of file diff --git a/src/solarfm/core/widgets/io_widget.py b/src/solarfm/core/widgets/io_widget.py index c0bcce1..ab8bf41 100644 --- a/src/solarfm/core/widgets/io_widget.py +++ b/src/solarfm/core/widgets/io_widget.py @@ -65,19 +65,20 @@ class IOWidget(Gtk.Box): logger.info(f"Canceling: [{self._action}] of {self._basename} ...") eve.cancel() - def update_progress(self, current, total, eve=None): + def update_progress(self, current, total, eve = None): self.progress.set_fraction(current/total) - def finish_callback(self, file, task=None, eve=None): + def finish_callback(self, file, task = None, eve = None): + if task.had_error(): + logger.info(f"{self._action} of {self._basename} cancelled/failed...") + return + if self._action == "move" or self._action == "rename": status = self._file.move_finish(task) if self._action == "copy": status = self._file.copy_finish(task) - if status: - self.delete_self() - else: - logger.info(f"{self._action} of {self._basename} failed...") + self.delete_self() - def delete_self(self, widget=None, eve=None): - self.get_parent().remove(self) + def delete_self(self, widget = None, eve = None): + self.get_parent().remove(self) \ No newline at end of file diff --git a/src/solarfm/core/widgets/popups/path_menu_popup_widget.py b/src/solarfm/core/widgets/popups/path_menu_popup_widget.py index e3359bf..3f2c507 100644 --- a/src/solarfm/core/widgets/popups/path_menu_popup_widget.py +++ b/src/solarfm/core/widgets/popups/path_menu_popup_widget.py @@ -29,16 +29,16 @@ class PathMenuPopupWidget(Gtk.Popover): self.set_relative_to(path_entry) self.set_modal(False) self.set_position(Gtk.PositionType.BOTTOM) - self.set_size_request(240, 420) + self.set_size_request(480, 420) def _setup_signals(self): event_system.subscribe("show_path_menu", self.show_path_menu) event_system.subscribe("hide_path_menu", self.hide_path_menu) def _load_widgets(self): - path_menu_buttons = Gtk.ButtonBox() - scroll_window = Gtk.ScrolledWindow() - view_port = Gtk.Viewport() + scroll_window = Gtk.ScrolledWindow() + view_port = Gtk.Viewport() + path_menu_buttons = Gtk.Box() scroll_window.set_vexpand(True) scroll_window.set_hexpand(True) @@ -47,12 +47,13 @@ class PathMenuPopupWidget(Gtk.Popover): self.builder.expose_object(f"path_menu_buttons", path_menu_buttons) view_port.add(path_menu_buttons) scroll_window.add(view_port) - scroll_window.show_all() self.add(scroll_window) + scroll_window.show_all() - def show_path_menu(self, widget=None, eve=None): + + def show_path_menu(self, widget = None, eve = None): self.popup() - def hide_path_menu(self, widget=None, eve=None): - self.popdown() + def hide_path_menu(self, widget = None, eve = None): + self.popdown() \ No newline at end of file diff --git a/src/solarfm/shellfm/windows/tabs/utils/launcher.py b/src/solarfm/shellfm/windows/tabs/utils/launcher.py index 6e07b4b..1b9d772 100644 --- a/src/solarfm/shellfm/windows/tabs/utils/launcher.py +++ b/src/solarfm/shellfm/windows/tabs/utils/launcher.py @@ -113,4 +113,4 @@ class Launcher: if not os.path.islink(fp): # Skip if it is symbolic link total_size += os.path.getsize(fp) - return total_size + return total_size \ No newline at end of file diff --git a/user_config/bin/solarfm b/user_config/bin/solarfm index 60cf37e..9921548 100755 --- a/user_config/bin/solarfm +++ b/user_config/bin/solarfm @@ -23,6 +23,7 @@ function main() { export G_SLICE=always-malloc export G_DEBUG=gc-friendly export GOBJECT_DEBUG=instance-count - python /opt/solarfm.zip "${files[@]}" + export GSK_RENDERER=cairo + python /opt/solarfm.zip "$@" } main "$@"; diff --git a/user_config/usr/bin/solarfm b/user_config/usr/bin/solarfm deleted file mode 100755 index 8cca2db..0000000 --- a/user_config/usr/bin/solarfm +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -# . CONFIG.sh - -# set -o xtrace ## To debug scripts -# set -o errexit ## To exit on error -# set -o errunset ## To exit if a variable is referenced but not set - - -function main() { - call_path=`pwd` - cd "${call_path}" - echo "Working Dir: " $(pwd) - - python /opt/solarfm.zip "$@" -} -main "$@"; -- 2.39.5 From d68d9ce4f9c8f64fd2847dae36f1ba3fe5ecefc7 Mon Sep 17 00:00:00 2001 From: itdominator <1itdominator@gmail.com> Date: Fri, 2 May 2025 16:11:08 -0500 Subject: [PATCH 19/28] Upgrade yt_dlp and download script --- plugins/youtube_download/download.sh | 29 +- plugins/youtube_download/yt_dlp/YoutubeDL.py | 785 +- plugins/youtube_download/yt_dlp/__init__.py | 186 +- plugins/youtube_download/yt_dlp/__main__.py | 2 +- .../yt_dlp/__pyinstaller/hook-yt_dlp.py | 14 +- plugins/youtube_download/yt_dlp/aes.py | 79 +- plugins/youtube_download/yt_dlp/cache.py | 4 +- plugins/youtube_download/yt_dlp/casefold.py | 5 - .../yt_dlp/compat/__init__.py | 25 +- .../yt_dlp/compat/_deprecated.py | 18 +- .../youtube_download/yt_dlp/compat/_legacy.py | 15 +- .../yt_dlp/compat/compat_utils.py | 4 +- .../yt_dlp/compat/functools.py | 26 - .../youtube_download/yt_dlp/compat/imghdr.py | 26 +- .../yt_dlp/compat/urllib/__init__.py | 2 +- .../yt_dlp/compat/urllib/request.py | 10 +- plugins/youtube_download/yt_dlp/cookies.py | 299 +- .../yt_dlp/dependencies/Cryptodome.py | 2 +- .../yt_dlp/dependencies/__init__.py | 21 +- .../yt_dlp/downloader/__init__.py | 6 +- .../yt_dlp/downloader/bunnycdn.py | 50 + .../yt_dlp/downloader/common.py | 45 +- .../yt_dlp/downloader/dash.py | 11 +- .../yt_dlp/downloader/external.py | 47 +- .../youtube_download/yt_dlp/downloader/f4m.py | 22 +- .../yt_dlp/downloader/fragment.py | 65 +- .../youtube_download/yt_dlp/downloader/hls.py | 83 +- .../yt_dlp/downloader/http.py | 29 +- .../youtube_download/yt_dlp/downloader/ism.py | 2 +- .../yt_dlp/downloader/mhtml.py | 53 +- .../yt_dlp/downloader/niconico.py | 69 +- .../yt_dlp/downloader/rtmp.py | 13 +- .../yt_dlp/downloader/rtsp.py | 4 +- .../yt_dlp/downloader/youtube_live_chat.py | 6 +- .../yt_dlp/extractor/__init__.py | 22 +- .../yt_dlp/extractor/_extractors.py | 1573 ++-- .../youtube_download/yt_dlp/extractor/abc.py | 159 +- .../yt_dlp/extractor/abcnews.py | 2 +- .../yt_dlp/extractor/abcotvs.py | 5 +- .../yt_dlp/extractor/abematv.py | 231 +- .../yt_dlp/extractor/academicearth.py | 2 +- .../yt_dlp/extractor/acast.py | 26 +- .../yt_dlp/extractor/acfun.py | 13 +- .../youtube_download/yt_dlp/extractor/adn.py | 160 +- .../yt_dlp/extractor/adobeconnect.py | 10 +- .../yt_dlp/extractor/adobepass.py | 875 +- .../yt_dlp/extractor/adobetv.py | 9 +- .../yt_dlp/extractor/adultswim.py | 7 +- .../yt_dlp/extractor/aenetworks.py | 60 +- .../yt_dlp/extractor/aeonco.py | 8 +- .../yt_dlp/extractor/afreecatv.py | 628 +- .../yt_dlp/extractor/agora.py | 31 +- .../yt_dlp/extractor/airmozilla.py | 63 - .../yt_dlp/extractor/airtv.py | 6 +- .../yt_dlp/extractor/aitube.py | 2 +- .../yt_dlp/extractor/aliexpress.py | 3 +- .../yt_dlp/extractor/aljazeera.py | 14 +- .../yt_dlp/extractor/allocine.py | 5 +- .../yt_dlp/extractor/allstar.py | 252 + .../yt_dlp/extractor/alphaporno.py | 6 +- .../yt_dlp/extractor/alsace20tv.py | 6 +- .../yt_dlp/extractor/altcensored.py | 104 + .../yt_dlp/extractor/alura.py | 30 +- .../yt_dlp/extractor/amadeustv.py | 77 + .../yt_dlp/extractor/amara.py | 12 +- .../yt_dlp/extractor/amazon.py | 8 +- .../yt_dlp/extractor/amazonminitv.py | 11 +- .../yt_dlp/extractor/amcnetworks.py | 13 +- .../yt_dlp/extractor/americastestkitchen.py | 14 +- .../youtube_download/yt_dlp/extractor/amp.py | 6 +- .../yt_dlp/extractor/anchorfm.py | 8 +- .../yt_dlp/extractor/angel.py | 12 +- .../extractor/{ant1newsgr.py => antenna.py} | 65 +- .../yt_dlp/extractor/anvato.py | 57 +- .../youtube_download/yt_dlp/extractor/aol.py | 9 +- .../youtube_download/yt_dlp/extractor/apa.py | 4 +- .../yt_dlp/extractor/appleconnect.py | 5 +- .../yt_dlp/extractor/applepodcasts.py | 80 +- .../yt_dlp/extractor/appletrailers.py | 33 +- .../yt_dlp/extractor/archiveorg.py | 379 +- .../yt_dlp/extractor/arcpublishing.py | 11 +- .../youtube_download/yt_dlp/extractor/ard.py | 710 +- .../yt_dlp/extractor/arkena.py | 6 +- .../yt_dlp/extractor/arnes.py | 14 +- .../yt_dlp/extractor/art19.py | 303 + .../youtube_download/yt_dlp/extractor/arte.py | 113 +- .../yt_dlp/extractor/asiancrush.py | 196 - .../yt_dlp/extractor/asobichannel.py | 168 + .../yt_dlp/extractor/asobistage.py | 155 + .../yt_dlp/extractor/atresplayer.py | 182 +- .../yt_dlp/extractor/atscaleconf.py | 10 +- .../yt_dlp/extractor/atttechchannel.py | 53 - .../yt_dlp/extractor/atvat.py | 26 +- .../yt_dlp/extractor/audimedia.py | 4 +- .../yt_dlp/extractor/audioboom.py | 4 +- .../yt_dlp/extractor/audiodraft.py | 13 +- .../yt_dlp/extractor/audiomack.py | 27 +- .../yt_dlp/extractor/audius.py | 46 +- .../yt_dlp/extractor/awaan.py | 27 +- .../youtube_download/yt_dlp/extractor/aws.py | 28 +- .../youtube_download/yt_dlp/extractor/axs.py | 89 + .../yt_dlp/extractor/azmedien.py | 29 +- .../yt_dlp/extractor/baidu.py | 7 +- .../yt_dlp/extractor/banbye.py | 110 +- .../yt_dlp/extractor/bandcamp.py | 84 +- .../yt_dlp/extractor/bandlab.py | 437 + .../yt_dlp/extractor/bannedvideo.py | 18 +- .../youtube_download/yt_dlp/extractor/bbc.py | 559 +- .../yt_dlp/extractor/beacon.py | 68 + .../yt_dlp/extractor/beatbump.py | 42 +- .../yt_dlp/extractor/beatport.py | 7 +- .../youtube_download/yt_dlp/extractor/beeg.py | 16 +- .../yt_dlp/extractor/behindkink.py | 3 +- .../yt_dlp/extractor/bellmedia.py | 4 +- .../yt_dlp/extractor/berufetv.py | 4 +- .../youtube_download/yt_dlp/extractor/bet.py | 11 +- .../youtube_download/yt_dlp/extractor/bfi.py | 1 + .../yt_dlp/extractor/bfmtv.py | 74 +- .../yt_dlp/extractor/bibeltv.py | 3 +- .../yt_dlp/extractor/bigflix.py | 14 +- .../youtube_download/yt_dlp/extractor/bigo.py | 5 +- .../youtube_download/yt_dlp/extractor/bild.py | 34 +- .../yt_dlp/extractor/bilibili.py | 1454 +++- .../yt_dlp/extractor/biqle.py | 110 - .../yt_dlp/extractor/bitchute.py | 69 +- .../yt_dlp/extractor/bitwave.py | 58 - .../yt_dlp/extractor/blackboardcollaborate.py | 2 +- .../yt_dlp/extractor/bleacherreport.py | 27 +- .../yt_dlp/extractor/blerp.py | 25 +- .../yt_dlp/extractor/blogger.py | 6 +- .../yt_dlp/extractor/bloomberg.py | 2 +- .../yt_dlp/extractor/bluesky.py | 393 + .../yt_dlp/extractor/bokecc.py | 17 +- .../yt_dlp/extractor/bongacams.py | 9 +- .../yt_dlp/extractor/boosty.py | 225 + .../yt_dlp/extractor/bostonglobe.py | 4 +- .../youtube_download/yt_dlp/extractor/box.py | 98 +- .../yt_dlp/extractor/boxcast.py | 16 +- .../youtube_download/yt_dlp/extractor/bpb.py | 161 +- .../youtube_download/yt_dlp/extractor/br.py | 152 +- .../yt_dlp/extractor/brainpop.py | 16 +- .../yt_dlp/extractor/bravotv.py | 11 +- .../yt_dlp/extractor/breakcom.py | 86 - .../yt_dlp/extractor/breitbart.py | 6 +- .../yt_dlp/extractor/brightcove.py | 93 +- .../yt_dlp/extractor/brilliantpala.py | 136 + .../yt_dlp/extractor/bundesliga.py | 10 +- .../yt_dlp/extractor/bundestag.py | 124 + .../yt_dlp/extractor/bunnycdn.py | 178 + .../yt_dlp/extractor/businessinsider.py | 4 +- .../yt_dlp/extractor/buzzfeed.py | 6 +- .../yt_dlp/extractor/byutv.py | 18 +- .../youtube_download/yt_dlp/extractor/c56.py | 4 +- .../yt_dlp/extractor/cableav.py | 32 - .../yt_dlp/extractor/caffeinetv.py | 74 + .../yt_dlp/extractor/callin.py | 18 +- .../yt_dlp/extractor/caltrans.py | 2 +- .../youtube_download/yt_dlp/extractor/cam4.py | 4 +- .../yt_dlp/extractor/camdemy.py | 33 +- .../yt_dlp/extractor/camfm.py | 8 +- .../yt_dlp/extractor/cammodels.py | 8 +- .../yt_dlp/extractor/camtasia.py | 6 +- .../yt_dlp/extractor/camwithher.py | 87 - .../yt_dlp/extractor/canal1.py | 39 + .../yt_dlp/extractor/canalalpha.py | 43 +- .../yt_dlp/extractor/canalc2.py | 2 +- .../yt_dlp/extractor/canalplus.py | 5 +- .../yt_dlp/extractor/canalsurmas.py | 84 + .../yt_dlp/extractor/caracoltv.py | 136 + .../yt_dlp/extractor/carambatv.py | 105 - .../yt_dlp/extractor/cartoonnetwork.py | 2 +- .../youtube_download/yt_dlp/extractor/cbc.py | 832 +- .../youtube_download/yt_dlp/extractor/cbs.py | 15 +- .../yt_dlp/extractor/cbsinteractive.py | 98 - .../yt_dlp/extractor/cbsnews.py | 3 +- .../yt_dlp/extractor/cbssports.py | 3 + .../youtube_download/yt_dlp/extractor/ccc.py | 13 +- .../youtube_download/yt_dlp/extractor/ccma.py | 73 +- .../youtube_download/yt_dlp/extractor/cctv.py | 23 +- .../youtube_download/yt_dlp/extractor/cda.py | 195 +- .../yt_dlp/extractor/cellebrite.py | 73 +- .../yt_dlp/extractor/ceskatelevize.py | 26 +- .../youtube_download/yt_dlp/extractor/cgtn.py | 28 +- .../yt_dlp/extractor/channel9.py | 252 - .../yt_dlp/extractor/chaturbate.py | 67 +- .../yt_dlp/extractor/chingari.py | 207 - .../yt_dlp/extractor/chirbit.py | 88 - .../yt_dlp/extractor/chzzk.py | 205 + .../yt_dlp/extractor/cinchcast.py | 56 - .../yt_dlp/extractor/cinemax.py | 3 +- .../yt_dlp/extractor/cinetecamilano.py | 9 +- .../yt_dlp/extractor/cineverse.py | 140 + .../yt_dlp/extractor/ciscolive.py | 4 +- .../yt_dlp/extractor/ciscowebex.py | 4 +- .../youtube_download/yt_dlp/extractor/cjsw.py | 2 +- .../yt_dlp/extractor/cliphunter.py | 76 - .../yt_dlp/extractor/clippit.py | 8 +- .../yt_dlp/extractor/cliprs.py | 3 +- .../yt_dlp/extractor/clipsyndicate.py | 52 - .../yt_dlp/extractor/closertotruth.py | 11 +- .../yt_dlp/extractor/cloudflarestream.py | 64 +- .../yt_dlp/extractor/cloudy.py | 57 - .../yt_dlp/extractor/cloudycdn.py | 98 + .../yt_dlp/extractor/clubic.py | 5 +- .../youtube_download/yt_dlp/extractor/clyp.py | 6 +- .../youtube_download/yt_dlp/extractor/cmt.py | 5 +- .../youtube_download/yt_dlp/extractor/cnbc.py | 143 +- .../youtube_download/yt_dlp/extractor/cnn.py | 315 +- .../yt_dlp/extractor/common.py | 580 +- .../yt_dlp/extractor/commonmistakes.py | 22 +- .../yt_dlp/extractor/commonprotocols.py | 2 +- .../yt_dlp/extractor/condenast.py | 35 +- .../yt_dlp/extractor/contv.py | 2 +- .../yt_dlp/extractor/corus.py | 17 +- .../youtube_download/yt_dlp/extractor/coub.py | 8 +- .../yt_dlp/extractor/cozytv.py | 10 +- .../youtube_download/yt_dlp/extractor/cpac.py | 26 +- .../yt_dlp/extractor/cracked.py | 4 +- .../yt_dlp/extractor/crackle.py | 16 +- .../yt_dlp/extractor/craftsy.py | 53 +- .../yt_dlp/extractor/crooksandliars.py | 9 +- .../yt_dlp/extractor/crowdbunker.py | 41 +- .../yt_dlp/extractor/crtvg.py | 27 +- .../yt_dlp/extractor/crunchyroll.py | 650 -- .../yt_dlp/extractor/cspan.py | 32 +- .../yt_dlp/extractor/ctsnews.py | 6 +- .../youtube_download/yt_dlp/extractor/ctv.py | 4 +- .../yt_dlp/extractor/ctvnews.py | 164 +- .../yt_dlp/extractor/cultureunplugged.py | 33 +- .../yt_dlp/extractor/curiositystream.py | 9 +- .../youtube_download/yt_dlp/extractor/cwtv.py | 101 +- .../yt_dlp/extractor/cybrary.py | 28 +- .../yt_dlp/extractor/dacast.py | 30 +- .../yt_dlp/extractor/daftsex.py | 150 - .../yt_dlp/extractor/dailymail.py | 12 +- .../yt_dlp/extractor/dailymotion.py | 315 +- .../yt_dlp/extractor/dailywire.py | 6 +- .../yt_dlp/extractor/damtomo.py | 10 +- .../yt_dlp/extractor/dangalplay.py | 197 + .../youtube_download/yt_dlp/extractor/daum.py | 38 +- .../youtube_download/yt_dlp/extractor/dbtv.py | 2 +- .../youtube_download/yt_dlp/extractor/dctp.py | 11 +- .../yt_dlp/extractor/deezer.py | 142 - .../yt_dlp/extractor/defense.py | 37 - .../yt_dlp/extractor/democracynow.py | 12 +- .../yt_dlp/extractor/detik.py | 20 +- .../yt_dlp/extractor/deuxm.py | 16 +- .../youtube_download/yt_dlp/extractor/dfb.py | 4 +- .../youtube_download/yt_dlp/extractor/dhm.py | 1 + .../youtube_download/yt_dlp/extractor/digg.py | 54 - .../yt_dlp/extractor/digitalconcerthall.py | 243 +- .../yt_dlp/extractor/digiteka.py | 2 +- .../yt_dlp/extractor/digiview.py | 130 + .../yt_dlp/extractor/discovery.py | 115 - .../yt_dlp/extractor/discoverygo.py | 172 - .../yt_dlp/extractor/disney.py | 12 +- .../yt_dlp/extractor/dispeak.py | 10 +- .../youtube_download/yt_dlp/extractor/dlf.py | 36 +- .../yt_dlp/extractor/dlive.py | 8 +- .../yt_dlp/extractor/dotsub.py | 81 - .../yt_dlp/extractor/douyutv.py | 277 +- .../yt_dlp/extractor/dplay.py | 577 +- .../yt_dlp/extractor/drbonanza.py | 2 +- .../yt_dlp/extractor/dreisat.py | 125 +- .../yt_dlp/extractor/drooble.py | 6 +- .../yt_dlp/extractor/dropbox.py | 102 +- .../yt_dlp/extractor/dropout.py | 36 +- .../yt_dlp/extractor/drtalks.py | 51 + .../yt_dlp/extractor/drtuber.py | 12 +- .../youtube_download/yt_dlp/extractor/drtv.py | 414 +- .../yt_dlp/extractor/dtube.py | 11 +- .../yt_dlp/extractor/duboku.py | 43 +- .../yt_dlp/extractor/dumpert.py | 13 +- .../yt_dlp/extractor/duoplay.py | 138 + .../youtube_download/yt_dlp/extractor/dvtv.py | 20 +- .../youtube_download/yt_dlp/extractor/dw.py | 19 +- .../yt_dlp/extractor/eagleplatform.py | 20 +- .../yt_dlp/extractor/ebaumsworld.py | 2 +- .../youtube_download/yt_dlp/extractor/ebay.py | 4 +- .../yt_dlp/extractor/echomsk.py | 43 - .../yt_dlp/extractor/egghead.py | 15 +- .../youtube_download/yt_dlp/extractor/eggs.py | 155 + .../youtube_download/yt_dlp/extractor/ehow.py | 36 - .../yt_dlp/extractor/eighttracks.py | 49 +- .../yt_dlp/extractor/einthusan.py | 105 - .../youtube_download/yt_dlp/extractor/eitb.py | 18 +- .../yt_dlp/extractor/elementorembed.py | 72 + .../yt_dlp/extractor/elevensports.py | 59 - .../yt_dlp/extractor/ellentube.py | 130 - .../yt_dlp/extractor/elpais.py | 4 +- .../yt_dlp/extractor/eltrecetv.py | 62 + .../yt_dlp/extractor/embedly.py | 2 +- .../yt_dlp/extractor/engadget.py | 15 - .../yt_dlp/extractor/epicon.py | 29 +- .../yt_dlp/extractor/epidemicsound.py | 124 + .../yt_dlp/extractor/eplus.py | 205 + .../yt_dlp/extractor/epoch.py | 10 +- .../yt_dlp/extractor/eporner.py | 26 +- .../yt_dlp/extractor/erocast.py | 63 + .../yt_dlp/extractor/eroprofile.py | 6 +- .../youtube_download/yt_dlp/extractor/err.py | 224 + .../yt_dlp/extractor/ertgr.py | 65 +- .../yt_dlp/extractor/escapist.py | 108 - .../youtube_download/yt_dlp/extractor/espn.py | 80 +- .../youtube_download/yt_dlp/extractor/esri.py | 70 - .../yt_dlp/extractor/ettutv.py | 2 +- .../yt_dlp/extractor/europa.py | 45 +- .../yt_dlp/extractor/europeantour.py | 8 +- .../yt_dlp/extractor/eurosport.py | 60 +- .../yt_dlp/extractor/euscreen.py | 21 +- .../yt_dlp/extractor/expotv.py | 74 - .../yt_dlp/extractor/expressen.py | 12 +- .../yt_dlp/extractor/extractors.py | 49 +- .../yt_dlp/extractor/extremetube.py | 48 - .../yt_dlp/extractor/eyedotv.py | 16 +- .../yt_dlp/extractor/facebook.py | 436 +- .../yt_dlp/extractor/fancode.py | 45 +- .../yt_dlp/extractor/fathom.py | 54 + .../youtube_download/yt_dlp/extractor/faz.py | 2 +- .../youtube_download/yt_dlp/extractor/fc2.py | 32 +- .../yt_dlp/extractor/fczenit.py | 2 +- .../youtube_download/yt_dlp/extractor/fifa.py | 3 +- .../yt_dlp/extractor/filmmodu.py | 69 - .../yt_dlp/extractor/filmon.py | 15 +- .../yt_dlp/extractor/filmweb.py | 2 +- .../yt_dlp/extractor/firsttv.py | 32 +- .../yt_dlp/extractor/flextv.py | 62 + .../yt_dlp/extractor/flickr.py | 14 +- .../yt_dlp/extractor/floatplane.py | 333 + .../yt_dlp/extractor/folketinget.py | 5 +- .../yt_dlp/extractor/footyroom.py | 2 +- .../yt_dlp/extractor/fourtube.py | 41 +- .../yt_dlp/extractor/fourzerostudio.py | 106 - .../youtube_download/yt_dlp/extractor/fox.py | 15 +- .../yt_dlp/extractor/foxgay.py | 58 - .../yt_dlp/extractor/fptplay.py | 2 +- .../yt_dlp/extractor/francaisfacile.py | 87 + .../yt_dlp/extractor/francetv.py | 329 +- .../yt_dlp/extractor/freesound.py | 2 +- .../yt_dlp/extractor/freetv.py | 10 +- .../yt_dlp/extractor/frontendmasters.py | 31 +- .../yt_dlp/extractor/fujitv.py | 8 +- .../yt_dlp/extractor/funimation.py | 349 - .../youtube_download/yt_dlp/extractor/funk.py | 33 +- .../yt_dlp/extractor/funker530.py | 7 +- .../yt_dlp/extractor/fusion.py | 81 - .../yt_dlp/extractor/fuyintv.py | 2 +- .../youtube_download/yt_dlp/extractor/gab.py | 25 +- .../youtube_download/yt_dlp/extractor/gaia.py | 14 +- .../yt_dlp/extractor/gamedevtv.py | 141 + .../yt_dlp/extractor/gameinformer.py | 46 - .../yt_dlp/extractor/gamejolt.py | 49 +- .../yt_dlp/extractor/gamespot.py | 5 +- .../yt_dlp/extractor/gamestar.py | 6 +- .../yt_dlp/extractor/gaskrank.py | 6 +- .../yt_dlp/extractor/gazeta.py | 5 +- .../yt_dlp/extractor/gbnews.py | 113 + .../yt_dlp/extractor/gdcvault.py | 11 +- .../yt_dlp/extractor/gedidigital.py | 4 +- .../yt_dlp/extractor/generic.py | 343 +- .../yt_dlp/extractor/genericembeds.py | 10 +- .../yt_dlp/extractor/germanupa.py | 91 + .../yt_dlp/extractor/getcourseru.py | 188 + .../yt_dlp/extractor/gettr.py | 16 +- .../yt_dlp/extractor/gfycat.py | 145 - .../yt_dlp/extractor/giantbomb.py | 2 +- .../youtube_download/yt_dlp/extractor/giga.py | 93 - .../yt_dlp/extractor/gigya.py | 20 - .../yt_dlp/extractor/glide.py | 2 +- .../yt_dlp/extractor/globalplayer.py | 4 +- .../yt_dlp/extractor/globo.py | 223 +- .../yt_dlp/extractor/glomex.py | 8 +- .../yt_dlp/extractor/gmanetwork.py | 4 +- .../youtube_download/yt_dlp/extractor/go.py | 36 +- .../yt_dlp/extractor/godresource.py | 79 + .../yt_dlp/extractor/godtube.py | 5 +- .../yt_dlp/extractor/gofile.py | 28 +- .../yt_dlp/extractor/golem.py | 18 +- .../yt_dlp/extractor/goodgame.py | 58 +- .../yt_dlp/extractor/googledrive.py | 66 +- .../yt_dlp/extractor/googlepodcasts.py | 2 +- .../yt_dlp/extractor/goplay.py | 273 +- .../yt_dlp/extractor/gopro.py | 16 +- .../yt_dlp/extractor/goshgay.py | 9 +- .../yt_dlp/extractor/gotostage.py | 28 +- .../yt_dlp/extractor/gputechconf.py | 4 +- .../yt_dlp/extractor/graspop.py | 32 + .../yt_dlp/extractor/gronkh.py | 14 +- .../yt_dlp/extractor/groupon.py | 4 +- .../yt_dlp/extractor/harpodeon.py | 16 +- .../youtube_download/yt_dlp/extractor/hbo.py | 13 +- .../yt_dlp/extractor/hearthisat.py | 46 +- .../yt_dlp/extractor/heise.py | 6 +- .../yt_dlp/extractor/helsinki.py | 38 - .../yt_dlp/extractor/hidive.py | 6 +- .../yt_dlp/extractor/historicfilms.py | 2 +- .../yt_dlp/extractor/hitbox.py | 209 - .../yt_dlp/extractor/hitrecord.py | 11 +- .../yt_dlp/extractor/hketv.py | 8 +- .../yt_dlp/extractor/hollywoodreporter.py | 2 +- .../yt_dlp/extractor/holodex.py | 2 +- .../yt_dlp/extractor/hotnewhiphop.py | 10 +- .../yt_dlp/extractor/hotstar.py | 63 +- .../yt_dlp/extractor/howcast.py | 41 - .../yt_dlp/extractor/howstuffworks.py | 86 - .../yt_dlp/extractor/hrfensehen.py | 14 +- .../youtube_download/yt_dlp/extractor/hrti.py | 22 +- .../youtube_download/yt_dlp/extractor/hse.py | 10 +- .../yt_dlp/extractor/huajiao.py | 2 +- .../yt_dlp/extractor/huffpost.py | 2 +- .../yt_dlp/extractor/hungama.py | 116 +- .../youtube_download/yt_dlp/extractor/huya.py | 97 +- .../yt_dlp/extractor/hypem.py | 6 +- .../yt_dlp/extractor/hypergryph.py | 8 +- .../yt_dlp/extractor/hytale.py | 9 +- .../yt_dlp/extractor/icareus.py | 12 +- .../yt_dlp/extractor/ichinanalive.py | 73 +- .../youtube_download/yt_dlp/extractor/ign.py | 25 +- .../yt_dlp/extractor/iheart.py | 4 +- .../yt_dlp/extractor/ilpost.py | 68 + .../yt_dlp/extractor/iltalehti.py | 2 +- .../youtube_download/yt_dlp/extractor/imdb.py | 8 +- .../yt_dlp/extractor/imggaming.py | 2 +- .../yt_dlp/extractor/imgur.py | 466 +- .../youtube_download/yt_dlp/extractor/ina.py | 2 +- .../youtube_download/yt_dlp/extractor/inc.py | 2 +- .../yt_dlp/extractor/indavideo.py | 75 +- .../yt_dlp/extractor/infoq.py | 16 +- .../yt_dlp/extractor/instagram.py | 203 +- .../yt_dlp/extractor/internazionale.py | 4 +- .../yt_dlp/extractor/iprima.py | 48 +- .../yt_dlp/extractor/iqiyi.py | 102 +- .../yt_dlp/extractor/islamchannel.py | 2 +- .../yt_dlp/extractor/israelnationalnews.py | 6 +- .../yt_dlp/extractor/itprotv.py | 21 +- .../youtube_download/yt_dlp/extractor/itv.py | 42 +- .../youtube_download/yt_dlp/extractor/ivi.py | 26 +- .../yt_dlp/extractor/ivideon.py | 16 +- .../yt_dlp/extractor/ivoox.py | 78 + .../yt_dlp/extractor/iwara.py | 11 +- .../yt_dlp/extractor/ixigua.py | 2 +- .../yt_dlp/extractor/izlesene.py | 18 +- .../yt_dlp/extractor/jable.py | 103 - .../yt_dlp/extractor/jamendo.py | 42 +- .../yt_dlp/extractor/japandiet.py | 19 +- .../yt_dlp/extractor/jeuxvideo.py | 2 + .../yt_dlp/extractor/jiocinema.py | 408 + .../yt_dlp/extractor/jiosaavn.py | 201 + .../youtube_download/yt_dlp/extractor/joj.py | 15 +- .../yt_dlp/extractor/joqrag.py | 112 + .../youtube_download/yt_dlp/extractor/jove.py | 9 +- .../yt_dlp/extractor/jstream.py | 2 +- .../youtube_download/yt_dlp/extractor/jtbc.py | 156 + .../yt_dlp/extractor/jwplatform.py | 4 +- .../yt_dlp/extractor/kakao.py | 12 +- .../yt_dlp/extractor/kaltura.py | 81 +- .../yt_dlp/extractor/kanal2.py | 66 - .../yt_dlp/extractor/kankanews.py | 7 +- .../yt_dlp/extractor/karaoketv.py | 4 +- .../yt_dlp/extractor/karrierevideos.py | 96 - .../yt_dlp/extractor/keezmovies.py | 125 - .../yt_dlp/extractor/kelbyone.py | 3 +- .../yt_dlp/extractor/kenh14.py | 160 + .../yt_dlp/extractor/khanacademy.py | 141 +- .../youtube_download/yt_dlp/extractor/kick.py | 232 +- .../yt_dlp/extractor/kicker.py | 6 +- .../youtube_download/yt_dlp/extractor/kika.py | 168 + .../yt_dlp/extractor/kinja.py | 27 +- .../yt_dlp/extractor/kommunetv.py | 12 +- .../yt_dlp/extractor/kompas.py | 2 +- .../yt_dlp/extractor/konserthusetplay.py | 119 - .../youtube_download/yt_dlp/extractor/koo.py | 28 +- .../yt_dlp/extractor/krasview.py | 1 + .../youtube_download/yt_dlp/extractor/kth.py | 7 +- .../youtube_download/yt_dlp/extractor/ku6.py | 10 +- .../yt_dlp/extractor/kukululive.py | 140 + .../youtube_download/yt_dlp/extractor/kusi.py | 83 - .../youtube_download/yt_dlp/extractor/kuwo.py | 52 +- .../youtube_download/yt_dlp/extractor/la7.py | 6 +- .../yt_dlp/extractor/laola1tv.py | 261 - .../yt_dlp/extractor/laracasts.py | 114 + .../yt_dlp/extractor/laxarxames.py | 73 + .../youtube_download/yt_dlp/extractor/lbry.py | 231 +- .../youtube_download/yt_dlp/extractor/lci.py | 27 +- .../youtube_download/yt_dlp/extractor/lcp.py | 4 +- .../yt_dlp/extractor/learningonscreen.py | 72 + .../yt_dlp/extractor/lecture2go.py | 5 +- .../yt_dlp/extractor/lecturio.py | 19 +- .../yt_dlp/extractor/leeco.py | 47 +- .../yt_dlp/extractor/lefigaro.py | 11 +- .../youtube_download/yt_dlp/extractor/lego.py | 6 +- .../yt_dlp/extractor/lenta.py | 3 +- .../yt_dlp/extractor/libraryofcongress.py | 3 +- .../yt_dlp/extractor/libsyn.py | 6 +- .../yt_dlp/extractor/lifenews.py | 29 +- .../yt_dlp/extractor/likee.py | 12 +- .../yt_dlp/extractor/limelight.py | 18 +- .../yt_dlp/extractor/linkedin.py | 196 +- .../yt_dlp/extractor/linuxacademy.py | 238 - .../yt_dlp/extractor/liputan6.py | 6 +- .../yt_dlp/extractor/listennotes.py | 28 +- .../youtube_download/yt_dlp/extractor/litv.py | 125 +- .../yt_dlp/extractor/livejournal.py | 5 +- .../yt_dlp/extractor/livestream.py | 42 +- .../yt_dlp/extractor/livestreamfails.py | 4 +- .../youtube_download/yt_dlp/extractor/lnk.py | 87 + .../yt_dlp/extractor/lnkgo.py | 163 - .../yt_dlp/extractor/localnews8.py | 42 - .../youtube_download/yt_dlp/extractor/loco.py | 159 + .../youtube_download/yt_dlp/extractor/loom.py | 466 ++ .../yt_dlp/extractor/lovehomeporn.py | 6 +- .../youtube_download/yt_dlp/extractor/lrt.py | 52 +- .../youtube_download/yt_dlp/extractor/lsm.py | 282 + .../yt_dlp/extractor/lumni.py | 9 +- .../yt_dlp/extractor/lynda.py | 54 +- .../youtube_download/yt_dlp/extractor/m6.py | 22 - .../yt_dlp/extractor/maariv.py | 62 + .../yt_dlp/extractor/magellantv.py | 52 +- .../yt_dlp/extractor/magentamusik.py | 62 + .../yt_dlp/extractor/magentamusik360.py | 58 - .../yt_dlp/extractor/mailru.py | 17 +- .../yt_dlp/extractor/mainstreaming.py | 33 +- .../yt_dlp/extractor/malltv.py | 107 - .../yt_dlp/extractor/mangomolo.py | 13 +- .../yt_dlp/extractor/manoto.py | 19 +- .../yt_dlp/extractor/manyvids.py | 177 +- .../yt_dlp/extractor/markiza.py | 11 +- .../yt_dlp/extractor/massengeschmacktv.py | 13 +- .../yt_dlp/extractor/masters.py | 3 +- .../yt_dlp/extractor/matchtv.py | 40 +- .../youtube_download/yt_dlp/extractor/mbn.py | 89 + .../youtube_download/yt_dlp/extractor/mdr.py | 58 +- .../yt_dlp/extractor/medaltv.py | 30 +- .../yt_dlp/extractor/mediaite.py | 32 +- .../yt_dlp/extractor/mediaklikk.py | 128 +- .../yt_dlp/extractor/mediaset.py | 15 +- .../yt_dlp/extractor/mediasite.py | 101 +- .../yt_dlp/extractor/mediastream.py | 26 +- .../yt_dlp/extractor/mediaworksnz.py | 10 +- .../yt_dlp/extractor/medici.py | 182 +- .../yt_dlp/extractor/megaphone.py | 10 +- .../yt_dlp/extractor/meipai.py | 4 +- .../yt_dlp/extractor/melonvod.py | 4 +- .../youtube_download/yt_dlp/extractor/meta.py | 70 - .../yt_dlp/extractor/metacafe.py | 281 - .../yt_dlp/extractor/metacritic.py | 6 +- .../yt_dlp/extractor/mgoon.py | 81 - .../youtube_download/yt_dlp/extractor/mgtv.py | 8 +- .../yt_dlp/extractor/miaopai.py | 36 - .../yt_dlp/extractor/microsoftembed.py | 305 +- .../yt_dlp/extractor/microsoftstream.py | 12 +- .../extractor/microsoftvirtualacademy.py | 189 - .../yt_dlp/extractor/mildom.py | 291 - .../yt_dlp/extractor/minds.py | 11 +- .../yt_dlp/extractor/ministrygrid.py | 55 - .../yt_dlp/extractor/minoto.py | 4 +- .../yt_dlp/extractor/miomio.py | 134 - .../yt_dlp/extractor/mirrativ.py | 6 +- .../youtube_download/yt_dlp/extractor/mit.py | 14 +- .../yt_dlp/extractor/mitele.py | 46 +- .../yt_dlp/extractor/mixch.py | 160 +- .../yt_dlp/extractor/mixcloud.py | 129 +- .../youtube_download/yt_dlp/extractor/mlb.py | 261 +- .../yt_dlp/extractor/mlssoccer.py | 69 +- .../youtube_download/yt_dlp/extractor/mnet.py | 85 - .../yt_dlp/extractor/mocha.py | 6 +- .../yt_dlp/extractor/moevideo.py | 74 - .../yt_dlp/extractor/mofosex.py | 70 - .../yt_dlp/extractor/mojevideo.py | 121 + .../yt_dlp/extractor/mojvideo.py | 6 +- .../yt_dlp/extractor/monstercat.py | 74 + .../yt_dlp/extractor/morningstar.py | 45 - .../yt_dlp/extractor/motherless.py | 41 +- .../yt_dlp/extractor/motorsport.py | 12 +- .../yt_dlp/extractor/movieclips.py | 46 - .../yt_dlp/extractor/moviepilot.py | 12 +- .../yt_dlp/extractor/moview.py | 6 +- .../yt_dlp/extractor/moviezine.py | 2 +- .../yt_dlp/extractor/movingimage.py | 4 +- .../youtube_download/yt_dlp/extractor/msn.py | 316 +- .../youtube_download/yt_dlp/extractor/mtv.py | 54 +- .../yt_dlp/extractor/muenchentv.py | 9 +- .../yt_dlp/extractor/murrtube.py | 159 +- .../yt_dlp/extractor/musescore.py | 12 +- .../yt_dlp/extractor/musicdex.py | 70 +- .../yt_dlp/extractor/mwave.py | 87 - .../youtube_download/yt_dlp/extractor/mx3.py | 171 + .../yt_dlp/extractor/mxplayer.py | 25 +- .../yt_dlp/extractor/mychannels.py | 35 - .../yt_dlp/extractor/myspace.py | 14 +- .../yt_dlp/extractor/myspass.py | 3 +- .../youtube_download/yt_dlp/extractor/myvi.py | 100 - .../yt_dlp/extractor/myvideoge.py | 2 +- .../yt_dlp/extractor/myvidster.py | 2 +- .../yt_dlp/extractor/mzaalo.py | 11 +- .../youtube_download/yt_dlp/extractor/n1.py | 108 +- .../youtube_download/yt_dlp/extractor/nate.py | 24 +- .../yt_dlp/extractor/nationalgeographic.py | 5 +- .../yt_dlp/extractor/naver.py | 210 +- .../youtube_download/yt_dlp/extractor/nba.py | 33 +- .../youtube_download/yt_dlp/extractor/nbc.py | 66 +- .../youtube_download/yt_dlp/extractor/ndr.py | 20 +- .../youtube_download/yt_dlp/extractor/ndtv.py | 27 +- .../yt_dlp/extractor/nebula.py | 650 +- .../yt_dlp/extractor/nekohacker.py | 67 +- .../yt_dlp/extractor/nerdcubed.py | 45 +- .../youtube_download/yt_dlp/extractor/nest.py | 117 + .../yt_dlp/extractor/neteasemusic.py | 708 +- .../yt_dlp/extractor/netverse.py | 14 +- .../yt_dlp/extractor/netzkino.py | 7 +- .../yt_dlp/extractor/newgrounds.py | 181 +- .../yt_dlp/extractor/newspicks.py | 2 +- .../yt_dlp/extractor/newstube.py | 75 - .../yt_dlp/extractor/newsy.py | 4 +- .../yt_dlp/extractor/nextmedia.py | 19 +- .../youtube_download/yt_dlp/extractor/nexx.py | 92 +- .../youtube_download/yt_dlp/extractor/nfb.py | 287 +- .../yt_dlp/extractor/nfhsnetwork.py | 60 +- .../youtube_download/yt_dlp/extractor/nfl.py | 347 +- .../youtube_download/yt_dlp/extractor/nhk.py | 611 +- .../youtube_download/yt_dlp/extractor/nhl.py | 12 +- .../youtube_download/yt_dlp/extractor/nick.py | 38 +- .../yt_dlp/extractor/niconico.py | 623 +- .../yt_dlp/extractor/niconicochannelplus.py | 426 + .../yt_dlp/extractor/ninaprotocol.py | 225 + .../yt_dlp/extractor/ninecninemedia.py | 16 +- .../yt_dlp/extractor/ninegag.py | 6 +- .../yt_dlp/extractor/ninenews.py | 72 + .../yt_dlp/extractor/ninenow.py | 174 +- .../yt_dlp/extractor/nintendo.py | 152 +- .../yt_dlp/extractor/nitter.py | 57 +- .../yt_dlp/extractor/njpwworld.py | 82 - .../yt_dlp/extractor/nobelprize.py | 9 +- .../yt_dlp/extractor/noice.py | 6 +- .../yt_dlp/extractor/nonktube.py | 2 +- .../yt_dlp/extractor/noodlemagazine.py | 40 +- .../yt_dlp/extractor/noovo.py | 7 +- .../yt_dlp/extractor/normalboots.py | 51 - .../yt_dlp/extractor/nosnl.py | 6 +- .../yt_dlp/extractor/nosvideo.py | 72 - .../youtube_download/yt_dlp/extractor/nova.py | 143 +- .../yt_dlp/extractor/novaplay.py | 8 +- .../yt_dlp/extractor/nowness.py | 7 +- .../youtube_download/yt_dlp/extractor/noz.py | 14 +- .../youtube_download/yt_dlp/extractor/npo.py | 42 +- .../youtube_download/yt_dlp/extractor/npr.py | 4 +- .../youtube_download/yt_dlp/extractor/nrk.py | 110 +- .../youtube_download/yt_dlp/extractor/nrl.py | 1 + .../youtube_download/yt_dlp/extractor/nts.py | 76 + .../yt_dlp/extractor/ntvde.py | 76 +- .../yt_dlp/extractor/ntvru.py | 10 +- .../yt_dlp/extractor/nubilesporn.py | 19 +- .../yt_dlp/extractor/nuevo.py | 8 +- .../youtube_download/yt_dlp/extractor/nuum.py | 201 + .../yt_dlp/extractor/nuvid.py | 10 +- .../yt_dlp/extractor/nytimes.py | 452 +- .../yt_dlp/extractor/nzherald.py | 26 +- .../yt_dlp/extractor/nzonscreen.py | 4 +- .../youtube_download/yt_dlp/extractor/nzz.py | 22 +- .../yt_dlp/extractor/odatv.py | 47 - .../yt_dlp/extractor/odkmedia.py | 6 +- .../yt_dlp/extractor/odnoklassniki.py | 19 +- .../youtube_download/yt_dlp/extractor/oftv.py | 12 +- .../yt_dlp/extractor/oktoberfesttv.py | 2 +- .../yt_dlp/extractor/olympics.py | 124 +- .../youtube_download/yt_dlp/extractor/on24.py | 31 +- .../yt_dlp/extractor/ondemandkorea.py | 198 +- .../yt_dlp/extractor/onefootball.py | 52 +- .../yt_dlp/extractor/onenewsnz.py | 16 +- .../yt_dlp/extractor/oneplace.py | 4 +- .../youtube_download/yt_dlp/extractor/onet.py | 10 +- .../yt_dlp/extractor/onionstudios.py | 3 +- .../yt_dlp/extractor/ooyala.py | 230 - .../yt_dlp/extractor/opencast.py | 4 +- .../yt_dlp/extractor/openload.py | 10 +- .../yt_dlp/extractor/openrec.py | 24 +- .../youtube_download/yt_dlp/extractor/ora.py | 9 +- .../youtube_download/yt_dlp/extractor/orf.py | 416 +- .../yt_dlp/extractor/outsidetv.py | 2 +- .../yt_dlp/extractor/packtpub.py | 12 +- .../yt_dlp/extractor/palcomp3.py | 11 +- .../yt_dlp/extractor/pandoratv.py | 128 - .../yt_dlp/extractor/panopto.py | 104 +- .../yt_dlp/extractor/paramountplus.py | 10 +- .../yt_dlp/extractor/parler.py | 8 +- .../yt_dlp/extractor/parlview.py | 9 +- .../yt_dlp/extractor/parti.py | 101 + .../yt_dlp/extractor/patreon.py | 379 +- .../youtube_download/yt_dlp/extractor/pbs.py | 95 +- .../yt_dlp/extractor/pearvideo.py | 6 +- .../yt_dlp/extractor/peekvids.py | 3 - .../yt_dlp/extractor/peertube.py | 1053 ++- .../yt_dlp/extractor/peertv.py | 2 +- .../yt_dlp/extractor/peloton.py | 26 +- .../yt_dlp/extractor/people.py | 29 - .../yt_dlp/extractor/performgroup.py | 11 +- .../yt_dlp/extractor/periscope.py | 25 +- .../yt_dlp/extractor/philharmoniedeparis.py | 7 +- .../yt_dlp/extractor/phoenix.py | 68 +- .../yt_dlp/extractor/photobucket.py | 6 +- .../yt_dlp/extractor/pialive.py | 122 + .../yt_dlp/extractor/piapro.py | 56 +- .../yt_dlp/extractor/picarto.py | 30 +- .../yt_dlp/extractor/piksel.py | 29 +- .../yt_dlp/extractor/pinkbike.py | 8 +- .../yt_dlp/extractor/pinterest.py | 38 +- .../yt_dlp/extractor/piramidetv.py | 99 + .../yt_dlp/extractor/pixivsketch.py | 9 +- .../yt_dlp/extractor/pladform.py | 17 +- .../yt_dlp/extractor/planetmarathi.py | 16 +- .../yt_dlp/extractor/platzi.py | 24 +- .../yt_dlp/extractor/playfm.py | 70 - .../yt_dlp/extractor/plays.py | 49 - .../yt_dlp/extractor/playstuff.py | 63 - .../yt_dlp/extractor/playsuisse.py | 108 +- .../yt_dlp/extractor/playtvak.py | 24 +- .../yt_dlp/extractor/playvid.py | 90 - .../yt_dlp/extractor/playwire.py | 2 +- .../yt_dlp/extractor/pluralsight.py | 71 +- .../yt_dlp/extractor/plutotv.py | 26 +- .../yt_dlp/extractor/plvideo.py | 130 + .../yt_dlp/extractor/podbayfm.py | 41 +- .../yt_dlp/extractor/podchaser.py | 16 +- .../yt_dlp/extractor/podomatic.py | 12 +- .../yt_dlp/extractor/pokemon.py | 136 - .../yt_dlp/extractor/pokergo.py | 29 +- .../yt_dlp/extractor/polsatgo.py | 12 +- .../yt_dlp/extractor/polskieradio.py | 37 +- .../yt_dlp/extractor/popcorntimes.py | 5 +- .../yt_dlp/extractor/popcorntv.py | 2 +- .../yt_dlp/extractor/porn91.py | 95 - .../yt_dlp/extractor/pornbox.py | 113 + .../yt_dlp/extractor/porncom.py | 99 - .../yt_dlp/extractor/pornez.py | 60 - .../yt_dlp/extractor/pornflip.py | 8 +- .../yt_dlp/extractor/pornhd.py | 116 - .../yt_dlp/extractor/pornhub.py | 75 +- .../yt_dlp/extractor/pornotube.py | 11 +- .../yt_dlp/extractor/pornovoisines.py | 9 +- .../yt_dlp/extractor/pornoxo.py | 3 +- .../yt_dlp/extractor/pr0gramm.py | 266 +- .../yt_dlp/extractor/prankcast.py | 91 +- .../yt_dlp/extractor/premiershiprugby.py | 2 +- .../yt_dlp/extractor/presstv.py | 10 +- .../yt_dlp/extractor/projectveritas.py | 11 +- .../yt_dlp/extractor/prosiebensat1.py | 26 +- .../youtube_download/yt_dlp/extractor/prx.py | 79 +- .../yt_dlp/extractor/puhutv.py | 43 +- .../yt_dlp/extractor/puls4.py | 3 +- .../yt_dlp/extractor/pyvideo.py | 7 +- .../yt_dlp/extractor/qdance.py | 25 +- .../yt_dlp/extractor/qingting.py | 5 +- .../yt_dlp/extractor/qqmusic.py | 551 +- .../youtube_download/yt_dlp/extractor/r7.py | 10 +- .../yt_dlp/extractor/radiko.py | 94 +- .../yt_dlp/extractor/radiobremen.py | 59 - .../yt_dlp/extractor/radiocanada.py | 10 +- .../yt_dlp/extractor/radiocomercial.py | 154 + .../yt_dlp/extractor/radiode.py | 5 +- .../yt_dlp/extractor/radiofrance.py | 385 +- .../yt_dlp/extractor/radiojavan.py | 3 +- .../yt_dlp/extractor/radiokapital.py | 18 +- .../yt_dlp/extractor/radioradicale.py | 105 + .../yt_dlp/extractor/radiozet.py | 4 +- .../yt_dlp/extractor/radlive.py | 18 +- .../youtube_download/yt_dlp/extractor/rai.py | 123 +- .../yt_dlp/extractor/raywenderlich.py | 16 +- .../yt_dlp/extractor/rbgtum.py | 89 +- .../yt_dlp/extractor/rbmaradio.py | 68 - .../youtube_download/yt_dlp/extractor/rcs.py | 30 +- .../youtube_download/yt_dlp/extractor/rcti.py | 48 +- .../youtube_download/yt_dlp/extractor/rds.py | 10 +- .../yt_dlp/extractor/recurbate.py | 42 - .../yt_dlp/extractor/redbee.py | 31 +- .../yt_dlp/extractor/redbulltv.py | 21 +- .../yt_dlp/extractor/reddit.py | 181 +- .../yt_dlp/extractor/redge.py | 134 + .../yt_dlp/extractor/redgifs.py | 76 +- .../yt_dlp/extractor/redtube.py | 16 +- .../yt_dlp/extractor/regiotv.py | 55 - .../yt_dlp/extractor/rentv.py | 9 +- .../yt_dlp/extractor/restudy.py | 5 +- .../yt_dlp/extractor/reuters.py | 11 +- .../yt_dlp/extractor/reverbnation.py | 8 +- .../youtube_download/yt_dlp/extractor/rice.py | 112 - .../yt_dlp/extractor/ridehome.py | 96 + .../yt_dlp/extractor/rinsefm.py | 89 + .../yt_dlp/extractor/rmcdecouverte.py | 10 +- .../yt_dlp/extractor/rockstargames.py | 11 +- .../yt_dlp/extractor/rokfin.py | 36 +- .../yt_dlp/extractor/roosterteeth.py | 205 +- .../yt_dlp/extractor/rottentomatoes.py | 4 +- .../youtube_download/yt_dlp/extractor/roya.py | 43 + .../yt_dlp/extractor/rozhlas.py | 44 +- .../youtube_download/yt_dlp/extractor/rte.py | 4 +- .../youtube_download/yt_dlp/extractor/rtl2.py | 104 +- .../yt_dlp/extractor/rtlnl.py | 26 +- .../yt_dlp/extractor/rtnews.py | 60 +- .../youtube_download/yt_dlp/extractor/rtp.py | 231 +- .../yt_dlp/extractor/rtrfm.py | 4 +- .../youtube_download/yt_dlp/extractor/rts.py | 12 +- .../yt_dlp/extractor/rtvcplay.py | 13 +- .../youtube_download/yt_dlp/extractor/rtve.py | 435 +- .../yt_dlp/extractor/rtvnh.py | 58 - .../youtube_download/yt_dlp/extractor/rtvs.py | 22 +- .../yt_dlp/extractor/rtvslo.py | 187 +- .../yt_dlp/extractor/rudovideo.py | 135 + .../youtube_download/yt_dlp/extractor/ruhd.py | 42 - .../yt_dlp/extractor/rule34video.py | 90 +- .../yt_dlp/extractor/rumble.py | 152 +- .../yt_dlp/extractor/rutube.py | 244 +- .../youtube_download/yt_dlp/extractor/rutv.py | 16 +- .../yt_dlp/extractor/ruutu.py | 18 +- .../youtube_download/yt_dlp/extractor/ruv.py | 8 +- .../youtube_download/yt_dlp/extractor/s4c.py | 55 +- .../yt_dlp/extractor/safari.py | 30 +- .../yt_dlp/extractor/saitosan.py | 9 +- .../yt_dlp/extractor/samplefocus.py | 20 +- .../youtube_download/yt_dlp/extractor/sapo.py | 2 +- .../yt_dlp/extractor/savefrom.py | 30 - .../youtube_download/yt_dlp/extractor/sbs.py | 13 +- .../yt_dlp/extractor/sbscokr.py | 200 + .../yt_dlp/extractor/screencast.py | 15 +- .../yt_dlp/extractor/screencastify.py | 20 +- .../yt_dlp/extractor/screencastomatic.py | 2 +- .../yt_dlp/extractor/screenrec.py | 33 + .../yt_dlp/extractor/scrippsnetworks.py | 22 +- .../yt_dlp/extractor/scrolller.py | 14 +- .../youtube_download/yt_dlp/extractor/scte.py | 10 +- .../yt_dlp/extractor/seeker.py | 55 - .../yt_dlp/extractor/sejmpl.py | 218 + .../youtube_download/yt_dlp/extractor/sen.py | 36 + .../yt_dlp/extractor/senalcolombia.py | 1 + .../yt_dlp/extractor/senategov.py | 234 +- .../yt_dlp/extractor/sendtonews.py | 15 +- .../yt_dlp/extractor/servus.py | 31 +- .../yt_dlp/extractor/sevenplus.py | 7 +- .../youtube_download/yt_dlp/extractor/sexu.py | 3 +- .../yt_dlp/extractor/seznamzpravy.py | 14 +- .../yt_dlp/extractor/shahid.py | 20 +- .../yt_dlp/extractor/shared.py | 138 - .../yt_dlp/extractor/sharepoint.py | 112 + .../yt_dlp/extractor/shemaroome.py | 29 +- .../yt_dlp/extractor/showroomlive.py | 9 +- .../yt_dlp/extractor/sibnet.py | 4 +- .../yt_dlp/extractor/simplecast.py | 6 +- .../youtube_download/yt_dlp/extractor/sina.py | 9 +- .../yt_dlp/extractor/sixplay.py | 13 +- .../youtube_download/yt_dlp/extractor/skeb.py | 26 +- .../youtube_download/yt_dlp/extractor/sky.py | 35 +- .../yt_dlp/extractor/skyit.py | 109 +- .../yt_dlp/extractor/skylinewebcams.py | 3 +- .../yt_dlp/extractor/skynewsarabia.py | 15 +- .../yt_dlp/extractor/skynewsau.py | 12 +- .../yt_dlp/extractor/slideshare.py | 8 +- .../yt_dlp/extractor/slideslive.py | 117 +- .../yt_dlp/extractor/slutload.py | 12 +- .../yt_dlp/extractor/snapchat.py | 76 + .../yt_dlp/extractor/snotr.py | 2 +- .../yt_dlp/extractor/softwhiteunderbelly.py | 87 + .../youtube_download/yt_dlp/extractor/sohu.py | 153 +- .../yt_dlp/extractor/sonyliv.py | 84 +- .../yt_dlp/extractor/soundcloud.py | 498 +- .../yt_dlp/extractor/soundgasm.py | 4 +- .../yt_dlp/extractor/southpark.py | 4 +- .../yt_dlp/extractor/sovietscloset.py | 49 +- .../yt_dlp/extractor/spankbang.py | 17 +- .../yt_dlp/extractor/spankwire.py | 174 - .../yt_dlp/extractor/spiegel.py | 4 +- .../yt_dlp/extractor/sport5.py | 6 +- .../yt_dlp/extractor/sportdeutschland.py | 12 +- .../yt_dlp/extractor/spotify.py | 6 +- .../yt_dlp/extractor/spreaker.py | 126 +- .../yt_dlp/extractor/springboardplatform.py | 11 +- .../yt_dlp/extractor/sproutvideo.py | 198 + .../yt_dlp/extractor/srgssr.py | 15 +- .../yt_dlp/extractor/srmediathek.py | 3 +- .../yt_dlp/extractor/stacommu.py | 164 +- .../yt_dlp/extractor/stageplus.py | 23 +- .../yt_dlp/extractor/stanfordoc.py | 18 +- .../yt_dlp/extractor/startrek.py | 5 +- .../yt_dlp/extractor/startv.py | 35 +- .../yt_dlp/extractor/steam.py | 42 +- .../yt_dlp/extractor/stitcher.py | 7 +- .../yt_dlp/extractor/storyfire.py | 14 +- .../yt_dlp/extractor/streaks.py | 236 + .../yt_dlp/extractor/streamable.py | 12 +- .../yt_dlp/extractor/streamcloud.py | 75 - .../yt_dlp/extractor/streamcz.py | 14 +- .../yt_dlp/extractor/streamff.py | 30 - .../yt_dlp/extractor/streetvoice.py | 8 +- .../yt_dlp/extractor/stretchinternet.py | 2 +- .../yt_dlp/extractor/stripchat.py | 29 +- .../youtube_download/yt_dlp/extractor/stv.py | 11 +- .../yt_dlp/extractor/subsplash.py | 199 + .../yt_dlp/extractor/substack.py | 53 +- .../yt_dlp/extractor/sunporno.py | 10 +- .../yt_dlp/extractor/sverigesradio.py | 2 +- .../youtube_download/yt_dlp/extractor/svt.py | 163 +- .../yt_dlp/extractor/swearnet.py | 72 +- .../yt_dlp/extractor/swrmediathek.py | 111 - .../youtube_download/yt_dlp/extractor/syfy.py | 8 +- .../yt_dlp/extractor/syvdk.py | 4 +- .../yt_dlp/extractor/tagesschau.py | 5 +- .../yt_dlp/extractor/taptap.py | 275 + .../youtube_download/yt_dlp/extractor/tass.py | 1 + .../youtube_download/yt_dlp/extractor/tbs.py | 19 +- .../yt_dlp/extractor/tbsjp.py | 150 + .../yt_dlp/extractor/tdslifeway.py | 31 - .../yt_dlp/extractor/teachable.py | 40 +- .../yt_dlp/extractor/teachertube.py | 16 +- .../yt_dlp/extractor/teachingchannel.py | 1 + .../yt_dlp/extractor/teamcoco.py | 4 +- .../yt_dlp/extractor/teamtreehouse.py | 2 +- .../yt_dlp/extractor/techtalks.py | 80 - .../youtube_download/yt_dlp/extractor/ted.py | 27 +- .../yt_dlp/extractor/tele13.py | 6 +- .../yt_dlp/extractor/tele5.py | 133 +- .../yt_dlp/extractor/telecaribe.py | 2 +- .../yt_dlp/extractor/telecinco.py | 143 +- .../yt_dlp/extractor/telegraaf.py | 8 +- .../yt_dlp/extractor/telegram.py | 2 +- .../yt_dlp/extractor/telemb.py | 7 +- .../yt_dlp/extractor/telemundo.py | 6 +- .../yt_dlp/extractor/telequebec.py | 9 +- .../yt_dlp/extractor/teletask.py | 9 +- .../yt_dlp/extractor/telewebion.py | 158 +- .../yt_dlp/extractor/tempo.py | 20 +- .../yt_dlp/extractor/tencent.py | 9 +- .../yt_dlp/extractor/tennistv.py | 14 +- .../yt_dlp/extractor/tenplay.py | 162 +- .../yt_dlp/extractor/testurl.py | 2 +- .../youtube_download/yt_dlp/extractor/tf1.py | 21 +- .../youtube_download/yt_dlp/extractor/tfo.py | 4 +- .../yt_dlp/extractor/theguardian.py | 134 + .../yt_dlp/extractor/theholetv.py | 6 +- .../yt_dlp/extractor/theintercept.py | 11 +- .../yt_dlp/extractor/theplatform.py | 73 +- .../yt_dlp/extractor/thestar.py | 2 +- .../yt_dlp/extractor/theta.py | 90 - .../yt_dlp/extractor/theweatherchannel.py | 28 +- .../yt_dlp/extractor/thisamericanlife.py | 4 +- .../yt_dlp/extractor/thisav.py | 66 - .../yt_dlp/extractor/thisoldhouse.py | 117 +- .../yt_dlp/extractor/thisvid.py | 6 +- .../yt_dlp/extractor/threeqsdn.py | 8 +- .../yt_dlp/extractor/threespeak.py | 28 +- .../yt_dlp/extractor/tiktok.py | 998 ++- .../yt_dlp/extractor/tinypic.py | 54 - .../youtube_download/yt_dlp/extractor/tmz.py | 270 +- .../yt_dlp/extractor/tnaflix.py | 14 +- .../yt_dlp/extractor/toggle.py | 48 +- .../yt_dlp/extractor/tokentube.py | 153 - .../yt_dlp/extractor/tonline.py | 6 +- .../yt_dlp/extractor/toongoggles.py | 2 +- .../yt_dlp/extractor/toutv.py | 2 +- .../yt_dlp/extractor/toypics.py | 13 +- .../yt_dlp/extractor/traileraddict.py | 4 +- .../yt_dlp/extractor/triller.py | 2 +- .../yt_dlp/extractor/trilulilu.py | 100 - .../yt_dlp/extractor/trovo.py | 4 +- .../yt_dlp/extractor/trtcocuk.py | 6 +- .../yt_dlp/extractor/trtworld.py | 101 + .../yt_dlp/extractor/trueid.py | 10 +- .../yt_dlp/extractor/truth.py | 1 - .../yt_dlp/extractor/trutv.py | 2 +- .../yt_dlp/extractor/tube8.py | 99 +- .../yt_dlp/extractor/tubetugraz.py | 79 +- .../yt_dlp/extractor/tubitv.py | 163 +- .../yt_dlp/extractor/tumblr.py | 401 +- .../yt_dlp/extractor/tunein.py | 4 +- .../yt_dlp/extractor/tunepk.py | 87 - .../yt_dlp/extractor/turbo.py | 64 - .../yt_dlp/extractor/turner.py | 25 +- .../youtube_download/yt_dlp/extractor/tv2.py | 24 +- .../yt_dlp/extractor/tv24ua.py | 6 +- .../yt_dlp/extractor/tv2dk.py | 76 +- .../yt_dlp/extractor/tv2hu.py | 24 +- .../youtube_download/yt_dlp/extractor/tv4.py | 2 +- .../yt_dlp/extractor/tv5mondeplus.py | 207 +- .../yt_dlp/extractor/tv5unis.py | 12 +- .../youtube_download/yt_dlp/extractor/tva.py | 105 +- .../yt_dlp/extractor/tvanouvelles.py | 6 +- .../youtube_download/yt_dlp/extractor/tvc.py | 2 +- .../youtube_download/yt_dlp/extractor/tver.py | 235 +- .../yt_dlp/extractor/tvigle.py | 8 +- .../yt_dlp/extractor/tviplayer.py | 8 +- .../yt_dlp/extractor/tvn24.py | 9 +- .../yt_dlp/extractor/tvnet.py | 138 - .../yt_dlp/extractor/tvnoe.py | 5 +- .../yt_dlp/extractor/tvnow.py | 639 -- .../youtube_download/yt_dlp/extractor/tvp.py | 39 +- .../yt_dlp/extractor/tvplay.py | 16 +- .../yt_dlp/extractor/tvplayer.py | 9 +- .../youtube_download/yt_dlp/extractor/tvw.py | 165 + .../yt_dlp/extractor/tweakers.py | 6 +- .../yt_dlp/extractor/twentyfourvideo.py | 128 - .../yt_dlp/extractor/twentymin.py | 6 +- .../yt_dlp/extractor/twentythreevideo.py | 4 +- .../yt_dlp/extractor/twitcasting.py | 128 +- .../yt_dlp/extractor/twitch.py | 332 +- .../yt_dlp/extractor/twitter.py | 694 +- .../youtube_download/yt_dlp/extractor/txxx.py | 58 +- .../yt_dlp/extractor/udemy.py | 44 +- .../youtube_download/yt_dlp/extractor/udn.py | 8 +- .../yt_dlp/extractor/ukcolumn.py | 11 +- .../yt_dlp/extractor/uktvplay.py | 2 +- .../yt_dlp/extractor/uliza.py | 113 + .../youtube_download/yt_dlp/extractor/umg.py | 5 +- .../yt_dlp/extractor/unistra.py | 8 +- .../yt_dlp/extractor/unity.py | 3 +- .../yt_dlp/extractor/unscripted.py | 53 - .../yt_dlp/extractor/unsupported.py | 47 +- .../youtube_download/yt_dlp/extractor/uol.py | 16 +- .../yt_dlp/extractor/urort.py | 13 +- .../yt_dlp/extractor/urplay.py | 12 +- .../yt_dlp/extractor/usatoday.py | 7 +- .../yt_dlp/extractor/ustream.py | 39 +- .../yt_dlp/extractor/ustudio.py | 14 +- .../yt_dlp/extractor/utreon.py | 43 +- .../yt_dlp/extractor/varzesh3.py | 1 + .../yt_dlp/extractor/vbox7.py | 82 +- .../yt_dlp/extractor/veehd.py | 116 - .../youtube_download/yt_dlp/extractor/veo.py | 7 +- .../youtube_download/yt_dlp/extractor/veoh.py | 188 - .../yt_dlp/extractor/vesti.py | 7 +- .../youtube_download/yt_dlp/extractor/vevo.py | 45 +- .../youtube_download/yt_dlp/extractor/vgtv.py | 19 +- .../youtube_download/yt_dlp/extractor/vh1.py | 2 +- .../youtube_download/yt_dlp/extractor/vice.py | 28 +- .../yt_dlp/extractor/vidbit.py | 82 - .../yt_dlp/extractor/viddler.py | 6 +- .../yt_dlp/extractor/videa.py | 12 +- .../yt_dlp/extractor/videocampus_sachsen.py | 113 +- .../yt_dlp/extractor/videofyme.py | 7 +- .../yt_dlp/extractor/videoken.py | 17 +- .../yt_dlp/extractor/videomore.py | 13 +- .../yt_dlp/extractor/videopress.py | 6 +- .../yt_dlp/extractor/vidflex.py | 148 + .../yt_dlp/extractor/vidio.py | 24 +- .../yt_dlp/extractor/vidlii.py | 6 +- .../yt_dlp/extractor/vidly.py | 83 + .../yt_dlp/extractor/vidyard.py | 425 + .../yt_dlp/extractor/viewlift.py | 61 +- .../yt_dlp/extractor/viidea.py | 23 +- .../youtube_download/yt_dlp/extractor/viki.py | 346 - .../yt_dlp/extractor/vimeo.py | 581 +- .../yt_dlp/extractor/vimple.py | 58 - .../youtube_download/yt_dlp/extractor/vine.py | 151 - .../yt_dlp/extractor/viously.py | 60 + .../yt_dlp/extractor/viqeo.py | 3 +- .../youtube_download/yt_dlp/extractor/viu.py | 47 +- .../youtube_download/yt_dlp/extractor/vk.py | 210 +- .../yt_dlp/extractor/vocaroo.py | 2 +- .../yt_dlp/extractor/vodlocker.py | 73 - .../yt_dlp/extractor/vodplatform.py | 2 +- .../yt_dlp/extractor/voicerepublic.py | 59 - .../yt_dlp/extractor/voicy.py | 21 +- .../yt_dlp/extractor/volejtv.py | 4 +- .../youtube_download/yt_dlp/extractor/voot.py | 210 - .../yt_dlp/extractor/voxmedia.py | 17 +- .../youtube_download/yt_dlp/extractor/vrak.py | 77 - .../yt_dlp/extractor/vrsquare.py | 185 + .../youtube_download/yt_dlp/extractor/vrt.py | 525 +- .../youtube_download/yt_dlp/extractor/vrv.py | 269 - .../yt_dlp/extractor/vshare.py | 57 - .../youtube_download/yt_dlp/extractor/vtm.py | 5 +- .../youtube_download/yt_dlp/extractor/vtv.py | 108 + .../yt_dlp/extractor/vuclip.py | 10 +- .../yt_dlp/extractor/vupload.py | 52 - .../yt_dlp/extractor/vvvvid.py | 99 +- .../yt_dlp/extractor/vyborymos.py | 52 - .../yt_dlp/extractor/vzaar.py | 100 - .../yt_dlp/extractor/wakanim.py | 75 - .../yt_dlp/extractor/walla.py | 6 +- .../yt_dlp/extractor/wasdtv.py | 159 - .../yt_dlp/extractor/washingtonpost.py | 7 +- .../youtube_download/yt_dlp/extractor/wat.py | 17 +- .../yt_dlp/extractor/watchbox.py | 153 - .../yt_dlp/extractor/watchindianporn.py | 65 - .../youtube_download/yt_dlp/extractor/wdr.py | 51 +- .../yt_dlp/extractor/webcamerapl.py | 4 +- .../yt_dlp/extractor/webcaster.py | 2 +- .../yt_dlp/extractor/webofstories.py | 18 +- .../yt_dlp/extractor/weibo.py | 359 +- .../yt_dlp/extractor/weiqitv.py | 1 + .../yt_dlp/extractor/weverse.py | 97 +- .../yt_dlp/extractor/wevidi.py | 14 +- .../yt_dlp/extractor/whowatch.py | 23 +- .../yt_dlp/extractor/wikimedia.py | 4 +- .../yt_dlp/extractor/willow.py | 56 - .../yt_dlp/extractor/wimtv.py | 30 +- .../yt_dlp/extractor/wistia.py | 40 +- .../yt_dlp/extractor/wordpress.py | 16 +- .../yt_dlp/extractor/worldstarhiphop.py | 4 +- .../yt_dlp/extractor/wppilot.py | 16 +- .../yt_dlp/extractor/wrestleuniverse.py | 23 +- .../youtube_download/yt_dlp/extractor/wsj.py | 26 +- .../youtube_download/yt_dlp/extractor/wwe.py | 7 +- .../yt_dlp/extractor/wykop.py | 12 +- .../yt_dlp/extractor/xanimu.py | 19 +- .../youtube_download/yt_dlp/extractor/xbef.py | 42 - .../yt_dlp/extractor/xboxclips.py | 4 +- .../yt_dlp/extractor/xfileshare.py | 198 - .../yt_dlp/extractor/xhamster.py | 69 +- .../yt_dlp/extractor/xiaohongshu.py | 109 + .../yt_dlp/extractor/ximalaya.py | 123 +- .../yt_dlp/extractor/xinpianchang.py | 36 +- .../yt_dlp/extractor/xminus.py | 3 +- .../youtube_download/yt_dlp/extractor/xnxx.py | 4 +- .../yt_dlp/extractor/xstream.py | 9 +- .../yt_dlp/extractor/xtube.py | 214 - .../yt_dlp/extractor/xuite.py | 149 - .../yt_dlp/extractor/xvideos.py | 113 +- .../yt_dlp/extractor/xxxymovies.py | 4 +- .../yt_dlp/extractor/yahoo.py | 22 +- .../yt_dlp/extractor/yandexdisk.py | 8 +- .../yt_dlp/extractor/yandexmusic.py | 75 +- .../yt_dlp/extractor/yandexvideo.py | 174 +- .../yt_dlp/extractor/yapfiles.py | 7 +- .../yt_dlp/extractor/yappy.py | 13 +- .../yt_dlp/extractor/yesjapan.py | 56 - .../yt_dlp/extractor/yinyuetai.py | 52 - .../yt_dlp/extractor/yle_areena.py | 182 +- .../youtube_download/yt_dlp/extractor/ynet.py | 48 - .../yt_dlp/extractor/youjizz.py | 2 +- .../yt_dlp/extractor/youku.py | 23 +- .../yt_dlp/extractor/younow.py | 50 +- .../yt_dlp/extractor/youporn.py | 417 +- .../yt_dlp/extractor/yourporn.py | 65 - .../yt_dlp/extractor/yourupload.py | 43 - .../yt_dlp/extractor/youtube.py | 7332 ----------------- .../yt_dlp/extractor/youtube/__init__.py | 50 + .../yt_dlp/extractor/youtube/_base.py | 1102 +++ .../yt_dlp/extractor/youtube/_clip.py | 66 + .../yt_dlp/extractor/youtube/_mistakes.py | 69 + .../extractor/youtube/_notifications.py | 98 + .../yt_dlp/extractor/youtube/_redirect.py | 247 + .../yt_dlp/extractor/youtube/_search.py | 167 + .../yt_dlp/extractor/youtube/_tab.py | 2385 ++++++ .../yt_dlp/extractor/youtube/_video.py | 4106 +++++++++ .../yt_dlp/extractor/zaiko.py | 31 +- .../yt_dlp/extractor/zapiks.py | 8 +- .../yt_dlp/extractor/zattoo.py | 51 +- .../youtube_download/yt_dlp/extractor/zdf.py | 918 ++- .../youtube_download/yt_dlp/extractor/zee5.py | 41 +- .../yt_dlp/extractor/zeenews.py | 8 +- .../yt_dlp/extractor/zenporn.py | 118 + .../yt_dlp/extractor/zetland.py | 71 + .../yt_dlp/extractor/zhihu.py | 4 +- .../yt_dlp/extractor/zingmp3.py | 258 +- .../youtube_download/yt_dlp/extractor/zoom.py | 51 +- .../youtube_download/yt_dlp/extractor/zype.py | 8 +- plugins/youtube_download/yt_dlp/globals.py | 30 + plugins/youtube_download/yt_dlp/jsinterp.py | 128 +- .../yt_dlp/networking/__init__.py | 27 +- .../yt_dlp/networking/_curlcffi.py | 296 + .../yt_dlp/networking/_helper.py | 85 +- .../yt_dlp/networking/_requests.py | 426 + .../yt_dlp/networking/_urllib.py | 110 +- .../yt_dlp/networking/_websockets.py | 189 + .../yt_dlp/networking/common.py | 78 +- .../yt_dlp/networking/exceptions.py | 122 +- .../yt_dlp/networking/impersonate.py | 155 + .../yt_dlp/networking/websocket.py | 23 + plugins/youtube_download/yt_dlp/options.py | 214 +- plugins/youtube_download/yt_dlp/plugins.py | 183 +- .../yt_dlp/postprocessor/__init__.py | 35 +- .../yt_dlp/postprocessor/common.py | 12 +- .../yt_dlp/postprocessor/embedthumbnail.py | 54 +- .../yt_dlp/postprocessor/exec.py | 15 +- .../yt_dlp/postprocessor/ffmpeg.py | 99 +- .../yt_dlp/postprocessor/modify_chapters.py | 2 +- .../postprocessor/movefilesafterdownload.py | 23 +- .../yt_dlp/postprocessor/sponskrub.py | 11 +- .../yt_dlp/postprocessor/sponsorblock.py | 12 +- .../yt_dlp/postprocessor/xattrpp.py | 37 +- plugins/youtube_download/yt_dlp/socks.py | 41 +- plugins/youtube_download/yt_dlp/update.py | 536 +- .../yt_dlp/utils/_deprecated.py | 36 +- .../youtube_download/yt_dlp/utils/_legacy.py | 112 +- .../youtube_download/yt_dlp/utils/_utils.py | 854 +- .../yt_dlp/utils/networking.py | 153 +- .../youtube_download/yt_dlp/utils/progress.py | 109 + .../yt_dlp/utils/traversal.py | 279 +- plugins/youtube_download/yt_dlp/version.py | 8 +- plugins/youtube_download/yt_dlp/webvtt.py | 19 +- src/solarfm/__builtins__.py | 14 + src/solarfm/__main__.py | 9 +- src/solarfm/core/fs_actions/handler_mixin.py | 25 +- src/solarfm/core/mixins/ui/grid_mixin.py | 47 +- src/solarfm/core/ui_mixin.py | 3 +- src/solarfm/core/widgets/icon_grid_widget.py | 2 +- src/solarfm/core/window.py | 7 +- 1194 files changed, 60099 insertions(+), 44436 deletions(-) delete mode 100644 plugins/youtube_download/yt_dlp/casefold.py delete mode 100644 plugins/youtube_download/yt_dlp/compat/functools.py create mode 100644 plugins/youtube_download/yt_dlp/downloader/bunnycdn.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/airmozilla.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/allstar.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/altcensored.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/amadeustv.py rename plugins/youtube_download/yt_dlp/extractor/{ant1newsgr.py => antenna.py} (67%) create mode 100644 plugins/youtube_download/yt_dlp/extractor/art19.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/asiancrush.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/asobichannel.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/asobistage.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/atttechchannel.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/axs.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/bandlab.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/beacon.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/biqle.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/bitwave.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/bluesky.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/boosty.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/breakcom.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/brilliantpala.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/bundestag.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/bunnycdn.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/cableav.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/caffeinetv.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/camwithher.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/canal1.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/canalsurmas.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/caracoltv.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/carambatv.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/cbsinteractive.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/channel9.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/chingari.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/chirbit.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/chzzk.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/cinchcast.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/cineverse.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/cliphunter.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/clipsyndicate.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/cloudy.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/cloudycdn.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/crunchyroll.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/daftsex.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/dangalplay.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/deezer.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/defense.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/digg.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/digiview.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/discovery.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/discoverygo.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/dotsub.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/drtalks.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/duoplay.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/echomsk.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/eggs.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/ehow.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/einthusan.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/elementorembed.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/elevensports.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/ellentube.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/eltrecetv.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/engadget.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/epidemicsound.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/eplus.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/erocast.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/err.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/escapist.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/esri.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/expotv.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/extremetube.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/fathom.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/filmmodu.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/flextv.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/floatplane.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/fourzerostudio.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/foxgay.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/francaisfacile.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/funimation.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/fusion.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/gamedevtv.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/gameinformer.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/gbnews.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/germanupa.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/getcourseru.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/gfycat.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/giga.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/gigya.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/godresource.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/graspop.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/helsinki.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/hitbox.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/howcast.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/howstuffworks.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/ilpost.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/ivoox.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/jable.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/jiocinema.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/jiosaavn.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/joqrag.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/jtbc.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/kanal2.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/karrierevideos.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/keezmovies.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/kenh14.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/kika.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/konserthusetplay.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/kukululive.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/kusi.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/laola1tv.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/laracasts.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/laxarxames.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/learningonscreen.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/linuxacademy.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/lnk.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/lnkgo.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/localnews8.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/loco.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/loom.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/lsm.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/m6.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/maariv.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/magentamusik.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/magentamusik360.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/malltv.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/mbn.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/meta.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/metacafe.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/mgoon.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/miaopai.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/microsoftvirtualacademy.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/mildom.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/ministrygrid.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/miomio.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/mnet.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/moevideo.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/mofosex.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/mojevideo.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/monstercat.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/morningstar.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/movieclips.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/mwave.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/mx3.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/mychannels.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/myvi.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/nest.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/newstube.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/niconicochannelplus.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/ninaprotocol.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/ninenews.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/njpwworld.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/normalboots.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/nosvideo.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/nts.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/nuum.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/odatv.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/ooyala.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/pandoratv.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/parti.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/people.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/pialive.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/piramidetv.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/playfm.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/plays.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/playstuff.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/playvid.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/plvideo.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/pokemon.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/porn91.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/pornbox.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/porncom.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/pornez.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/pornhd.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/radiobremen.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/radiocomercial.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/radioradicale.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/rbmaradio.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/recurbate.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/redge.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/regiotv.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/rice.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/ridehome.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/rinsefm.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/roya.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/rtvnh.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/rudovideo.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/ruhd.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/savefrom.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/sbscokr.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/screenrec.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/seeker.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/sejmpl.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/sen.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/shared.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/sharepoint.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/snapchat.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/softwhiteunderbelly.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/spankwire.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/sproutvideo.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/streaks.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/streamcloud.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/streamff.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/subsplash.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/swrmediathek.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/taptap.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/tbsjp.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/tdslifeway.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/techtalks.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/theguardian.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/theta.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/thisav.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/tinypic.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/tokentube.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/trilulilu.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/trtworld.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/tunepk.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/turbo.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/tvnet.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/tvnow.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/tvw.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/twentyfourvideo.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/uliza.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/unscripted.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/veehd.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/veoh.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/vidbit.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/vidflex.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/vidly.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/vidyard.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/viki.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/vimple.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/vine.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/viously.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/vodlocker.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/voicerepublic.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/voot.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/vrak.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/vrsquare.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/vrv.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/vshare.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/vtv.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/vupload.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/vyborymos.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/vzaar.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/wakanim.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/wasdtv.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/watchbox.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/watchindianporn.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/willow.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/xbef.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/xfileshare.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/xiaohongshu.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/xtube.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/xuite.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/yesjapan.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/yinyuetai.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/ynet.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/yourporn.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/yourupload.py delete mode 100644 plugins/youtube_download/yt_dlp/extractor/youtube.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/youtube/__init__.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/youtube/_base.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/youtube/_clip.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/youtube/_mistakes.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/youtube/_notifications.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/youtube/_redirect.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/youtube/_search.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/youtube/_tab.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/youtube/_video.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/zenporn.py create mode 100644 plugins/youtube_download/yt_dlp/extractor/zetland.py create mode 100644 plugins/youtube_download/yt_dlp/globals.py create mode 100644 plugins/youtube_download/yt_dlp/networking/_curlcffi.py create mode 100644 plugins/youtube_download/yt_dlp/networking/_requests.py create mode 100644 plugins/youtube_download/yt_dlp/networking/_websockets.py create mode 100644 plugins/youtube_download/yt_dlp/networking/impersonate.py create mode 100644 plugins/youtube_download/yt_dlp/networking/websocket.py create mode 100644 plugins/youtube_download/yt_dlp/utils/progress.py diff --git a/plugins/youtube_download/download.sh b/plugins/youtube_download/download.sh index de8e1ea..af03894 100755 --- a/plugins/youtube_download/download.sh +++ b/plugins/youtube_download/download.sh @@ -8,12 +8,29 @@ function main() { - cd "$(dirname "")" - echo "Working Dir: " $(pwd) + _STARGET="${1}" + _SPATH="${HOME}/.config/solarfm/plugins/youtube_download" LINK=`xclip -selection clipboard -o` - python "${HOME}/.config/solarfm/plugins/youtube_download/yt_dlp/__main__.py" \ - --write-sub --embed-sub --sub-langs en \ - -o "${1}/%(title)s.%(ext)s" "${LINK}" + cd "${_SPATH}" + echo "Working Dir: " $(pwd) + + rm "${_SPATH}/../../cookies.txt" + + # Note: Export cookies to file + python "${_SPATH}/yt_dlp/__main__.py" \ + --cookies-from-browser firefox --cookies "${_SPATH}/../../cookies.txt" + + # Note: Use cookies from browser directly + # python "${_SPATH}/yt_dlp/__main__.py" \ + # --cookies-from-browser firefox --write-sub --embed-sub --sub-langs en \ + # -o "${_STARGET}/%(title)s.%(ext)s" "${LINK}" + + # Note: Download video + python "${_SPATH}/yt_dlp/__main__.py" \ + -f "bestvideo[height<=1080][ext=mp4][vcodec^=avc]+bestaudio[ext=m4a]/best[ext=mp4]/best" \ + --cookies "${_SPATH}/../../cookies.txt" --write-sub --embed-sub --sub-langs en \ + -o "${_STARGET}/%(title)s.%(ext)s" "${LINK}" + } -main "$@"; +main "$@"; \ No newline at end of file diff --git a/plugins/youtube_download/yt_dlp/YoutubeDL.py b/plugins/youtube_download/yt_dlp/YoutubeDL.py index 666d89b..63e6e11 100644 --- a/plugins/youtube_download/yt_dlp/YoutubeDL.py +++ b/plugins/youtube_download/yt_dlp/YoutubeDL.py @@ -1,9 +1,10 @@ import collections import contextlib import copy -import datetime +import datetime as dt import errno import fileinput +import functools import http.cookiejar import io import itertools @@ -24,14 +25,23 @@ import traceback import unicodedata from .cache import Cache -from .compat import functools, urllib # isort: split -from .compat import compat_os_name, compat_shlex_quote, urllib_req_to_req -from .cookies import LenientSimpleCookie, load_cookies +from .compat import urllib # isort: split +from .compat import urllib_req_to_req +from .cookies import CookieLoadError, LenientSimpleCookie, load_cookies from .downloader import FFmpegFD, get_suitable_downloader, shorten_protocol_name from .downloader.rtmp import rtmpdump_version -from .extractor import gen_extractor_classes, get_info_extractor +from .extractor import gen_extractor_classes, get_info_extractor, import_extractors from .extractor.common import UnsupportedURLIE from .extractor.openload import PhantomJSwrapper +from .globals import ( + IN_CLI, + LAZY_EXTRACTORS, + plugin_ies, + plugin_ies_overrides, + plugin_pps, + all_plugins_loaded, + plugin_dirs, +) from .minicurses import format_text from .networking import HEADRequest, Request, RequestDirector from .networking.common import _REQUEST_HANDLERS, _RH_PREFERENCES @@ -40,11 +50,10 @@ from .networking.exceptions import ( NoSupportingHandlers, RequestError, SSLError, - _CompatHTTPError, network_exceptions, ) -from .plugins import directories as plugin_directories -from .postprocessor import _PLUGIN_CLASSES as plugin_pps +from .networking.impersonate import ImpersonateRequestHandler +from .plugins import directories as plugin_directories, load_all_plugins from .postprocessor import ( EmbedThumbnailPP, FFmpegFixupDuplicateMoovPP, @@ -60,7 +69,13 @@ from .postprocessor import ( get_postprocessor, ) from .postprocessor.ffmpeg import resolve_mapping as resolve_recode_mapping -from .update import REPOSITORY, current_git_head, detect_variant +from .update import ( + REPOSITORY, + _get_system_deprecation, + _make_label, + current_git_head, + detect_variant, +) from .utils import ( DEFAULT_OUTTMPL, IDENTITY, @@ -94,16 +109,14 @@ from .utils import ( SameFileError, UnavailableVideoError, UserNotLive, + YoutubeDLError, age_restricted, - args_to_str, bug_reports_message, date_from_str, deprecation_warning, determine_ext, determine_protocol, encode_compat_str, - encodeFilename, - error_to_compat_str, escapeHTML, expand_path, extract_basic_auth, @@ -134,11 +147,13 @@ from .utils import ( sanitize_filename, sanitize_path, sanitize_url, + shell_quote, str_or_none, strftime_or_none, subtitles_filename, supports_terminal_sequences, system_identifier, + filesize_from_tbr, timetuple_from_msec, to_high_limit_path, traverse_obj, @@ -146,24 +161,37 @@ from .utils import ( try_get, url_basename, variadic, - version_tuple, windows_enable_vt_mode, write_json_file, write_string, ) -from .utils._utils import _YDLLogger +from .utils._utils import _UnsafeExtensionError, _YDLLogger, _ProgressState from .utils.networking import ( HTTPHeaderDict, clean_headers, clean_proxies, std_headers, ) -from .version import CHANNEL, RELEASE_GIT_HEAD, VARIANT, __version__ +from .version import CHANNEL, ORIGIN, RELEASE_GIT_HEAD, VARIANT, __version__ -if compat_os_name == 'nt': +if os.name == 'nt': import ctypes +def _catch_unsafe_extension_error(func): + @functools.wraps(func) + def wrapper(self, *args, **kwargs): + try: + return func(self, *args, **kwargs) + except _UnsafeExtensionError as error: + self.report_error( + f'The extracted extension ({error.extension!r}) is unusual ' + 'and will be skipped for safety reasons. ' + f'If you believe this is an error{bug_reports_message(",")}') + + return wrapper + + class YoutubeDL: """YoutubeDL class. @@ -229,7 +257,7 @@ class YoutubeDL: format_sort_force: Force the given format_sort. see "Sorting Formats" for more details. prefer_free_formats: Whether to prefer video formats with free containers - over non-free ones of same quality. + over non-free ones of the same quality. allow_multiple_video_streams: Allow multiple video streams to be merged into a single file allow_multiple_audio_streams: Allow multiple audio streams to be merged @@ -239,14 +267,16 @@ class YoutubeDL: 'selected' (check selected formats), or None (check only if requested by extractor) paths: Dictionary of output paths. The allowed keys are 'home' - 'temp' and the keys of OUTTMPL_TYPES (in utils.py) + 'temp' and the keys of OUTTMPL_TYPES (in utils/_utils.py) outtmpl: Dictionary of templates for output names. Allowed keys - are 'default' and the keys of OUTTMPL_TYPES (in utils.py). + are 'default' and the keys of OUTTMPL_TYPES (in utils/_utils.py). For compatibility with youtube-dl, a single string can also be used outtmpl_na_placeholder: Placeholder for unavailable meta fields. restrictfilenames: Do not allow "&" and spaces in file names trim_file_name: Limit length of filename (extension excluded) - windowsfilenames: Force the filenames to be windows compatible + windowsfilenames: True: Force filenames to be Windows compatible + False: Sanitize filenames only minimally + This option has no effect when running on Windows ignoreerrors: Do not stop on download/postprocessing errors. Can be 'only_download' to ignore only download errors. Default is 'only_download' for CLI, but False for API @@ -261,9 +291,12 @@ class YoutubeDL: lazy_playlist: Process playlist entries as they are received. matchtitle: Download only matching titles. rejecttitle: Reject downloads for matching titles. - logger: Log messages to a logging.Logger instance. + logger: A class having a `debug`, `warning` and `error` function where + each has a single string parameter, the message to be logged. + For compatibility reasons, both debug and info messages are passed to `debug`. + A debug message will have a prefix of `[debug] ` to discern it from info messages. logtostderr: Print everything to stderr instead of stdout. - consoletitle: Display progress in console window's titlebar. + consoletitle: Display progress in the console window's titlebar. writedescription: Write the video description to a .description file writeinfojson: Write the video description to a .info.json file clean_infojson: Remove internal metadata from the infojson @@ -397,6 +430,8 @@ class YoutubeDL: - "detect_or_warn": check whether we can do anything about it, warn otherwise (default) source_address: Client-side IP address to bind to. + impersonate: Client to impersonate for requests. + An ImpersonateTarget (from yt_dlp.networking.impersonate) sleep_interval_requests: Number of seconds to sleep between requests during extraction sleep_interval: Number of seconds to sleep before each download when @@ -422,13 +457,14 @@ class YoutubeDL: asked whether to download the video. - Raise utils.DownloadCancelled(msg) to abort remaining downloads when a video is rejected. - match_filter_func in utils.py is one example for this. + match_filter_func in utils/_utils.py is one example for this. color: A Dictionary with output stream names as keys and their respective color policy as values. Can also just be a single color policy, in which case it applies to all outputs. Valid stream names are 'stdout' and 'stderr'. - Valid color policies are one of 'always', 'auto', 'no_color' or 'never'. + Valid color policies are one of 'always', 'auto', + 'no_color', 'never', 'auto-tty' or 'no_color-tty'. geo_bypass: Bypass geographic restriction via faking X-Forwarded-For HTTP header geo_bypass_country: @@ -444,8 +480,9 @@ class YoutubeDL: Set the value to 'native' to use the native downloader compat_opts: Compatibility options. See "Differences in default behavior". The following options do not work when used through the API: - filename, abort-on-error, multistreams, no-live-chat, format-sort - no-clean-infojson, no-playlist-metafiles, no-keep-subs, no-attach-info-json. + filename, abort-on-error, multistreams, no-live-chat, + format-sort, no-clean-infojson, no-playlist-metafiles, + no-keep-subs, no-attach-info-json, allow-unsafe-ext, prefer-vp9-sort. Refer __init__.py for their implementation progress_template: Dictionary of templates for progress outputs. Allowed keys are 'download', 'postprocess', @@ -471,7 +508,7 @@ class YoutubeDL: nopart, updatetime, buffersize, ratelimit, throttledratelimit, min_filesize, max_filesize, test, noresizebuffer, retries, file_access_retries, fragment_retries, continuedl, xattr_set_filesize, hls_use_mpegts, http_chunk_size, - external_downloader_args, concurrent_fragment_downloads. + external_downloader_args, concurrent_fragment_downloads, progress_delta. The following options are used by the post processors: ffmpeg_location: Location of the ffmpeg/avconv binary; either the path @@ -487,7 +524,7 @@ class YoutubeDL: The following options are used by the extractors: extractor_retries: Number of times to retry for known errors (default: 3) dynamic_mpd: Whether to process dynamic DASH manifests (default: True) - hls_split_discontinuity: Split HLS playlists to different formats at + hls_split_discontinuity: Split HLS playlists into different formats at discontinuities such as ad breaks (default: False) extractor_args: A dictionary of arguments to be passed to the extractors. See "EXTRACTOR ARGUMENTS" for details. @@ -527,7 +564,7 @@ class YoutubeDL: include_ads: - Doesn't work Download ads as well call_home: - Not implemented - Boolean, true iff we are allowed to contact the + Boolean, true if we are allowed to contact the yt-dlp servers for debugging. post_hooks: - Register a custom postprocessor A list of functions that get called as the final step @@ -569,15 +606,23 @@ class YoutubeDL: # NB: Keep in sync with the docstring of extractor/common.py 'url', 'manifest_url', 'manifest_stream_number', 'ext', 'format', 'format_id', 'format_note', 'width', 'height', 'aspect_ratio', 'resolution', 'dynamic_range', 'tbr', 'abr', 'acodec', 'asr', 'audio_channels', - 'vbr', 'fps', 'vcodec', 'container', 'filesize', 'filesize_approx', 'rows', 'columns', - 'player_url', 'protocol', 'fragment_base_url', 'fragments', 'is_from_start', + 'vbr', 'fps', 'vcodec', 'container', 'filesize', 'filesize_approx', 'rows', 'columns', 'hls_media_playlist_data', + 'player_url', 'protocol', 'fragment_base_url', 'fragments', 'is_from_start', 'is_dash_periods', 'request_data', 'preference', 'language', 'language_preference', 'quality', 'source_preference', 'cookies', - 'http_headers', 'stretched_ratio', 'no_resume', 'has_drm', 'extra_param_to_segment_url', 'hls_aes', 'downloader_options', - 'page_url', 'app', 'play_path', 'tc_url', 'flash_version', 'rtmp_live', 'rtmp_conn', 'rtmp_protocol', 'rtmp_real_time' + 'http_headers', 'stretched_ratio', 'no_resume', 'has_drm', 'extra_param_to_segment_url', 'extra_param_to_key_url', + 'hls_aes', 'downloader_options', 'page_url', 'app', 'play_path', 'tc_url', 'flash_version', + 'rtmp_live', 'rtmp_conn', 'rtmp_protocol', 'rtmp_real_time', + } + _deprecated_multivalue_fields = { + 'album_artist': 'album_artists', + 'artist': 'artists', + 'composer': 'composers', + 'creator': 'creators', + 'genre': 'genres', } _format_selection_exts = { 'audio': set(MEDIA_EXTENSIONS.common_audio), - 'video': set(MEDIA_EXTENSIONS.common_video + ('3gp', )), + 'video': {*MEDIA_EXTENSIONS.common_video, '3gp'}, 'storyboards': set(MEDIA_EXTENSIONS.storyboards), } @@ -605,13 +650,15 @@ class YoutubeDL: self.cache = Cache(self) self.__header_cookies = [] + # compat for API: load plugins if they have not already + if not all_plugins_loaded.value: + load_all_plugins() + stdout = sys.stderr if self.params.get('logtostderr') else sys.stdout self._out_files = Namespace( out=stdout, error=sys.stderr, screen=sys.stderr if self.params.get('quiet') else stdout, - console=None if compat_os_name == 'nt' else next( - filter(supports_terminal_sequences, (sys.stderr, sys.stdout)), None) ) try: @@ -619,19 +666,28 @@ class YoutubeDL: except Exception as e: self.write_debug(f'Failed to enable VT mode: {e}') + # hehe "immutable" namespace + self._out_files.console = next(filter(supports_terminal_sequences, (sys.stderr, sys.stdout)), None) + if self.params.get('no_color'): if self.params.get('color') is not None: self.params.setdefault('_warnings', []).append( 'Overwriting params from "color" with "no_color"') self.params['color'] = 'no_color' - term_allow_color = os.environ.get('TERM', '').lower() != 'dumb' + term_allow_color = os.getenv('TERM', '').lower() != 'dumb' + base_no_color = bool(os.getenv('NO_COLOR')) def process_color_policy(stream): stream_name = {sys.stdout: 'stdout', sys.stderr: 'stderr'}[stream] - policy = traverse_obj(self.params, ('color', (stream_name, None), {str}), get_all=False) - if policy in ('auto', None): - return term_allow_color and supports_terminal_sequences(stream) + policy = traverse_obj(self.params, ('color', (stream_name, None), {str}, any)) or 'auto' + if policy in ('auto', 'auto-tty', 'no_color-tty'): + no_color = base_no_color + if policy.endswith('tty'): + no_color = policy.startswith('no_color') + if term_allow_color and supports_terminal_sequences(stream): + return 'no_color' if no_color else True + return False assert policy in ('always', 'never', 'no_color'), policy return {'always': True, 'never': False}.get(policy, policy) @@ -640,17 +696,9 @@ class YoutubeDL: for name, stream in self._out_files.items_ if name != 'console' }) - # The code is left like this to be reused for future deprecations - MIN_SUPPORTED, MIN_RECOMMENDED = (3, 7), (3, 7) - current_version = sys.version_info[:2] - if current_version < MIN_RECOMMENDED: - msg = ('Support for Python version %d.%d has been deprecated. ' - 'See https://github.com/yt-dlp/yt-dlp/issues/3764 for more details.' - '\n You will no longer receive updates on this version') - if current_version < MIN_SUPPORTED: - msg = 'Python version %d.%d is no longer supported' - self.deprecated_feature( - f'{msg}! Please update to Python %d.%d or above' % (*current_version, *MIN_RECOMMENDED)) + system_deprecation = _get_system_deprecation() + if system_deprecation: + self.deprecated_feature(system_deprecation.replace('\n', '\n ')) if self.params.get('allow_unplayable_formats'): self.report_warning( @@ -667,9 +715,9 @@ class YoutubeDL: width_args = [] if width is None else ['-w', str(width)] sp_kwargs = {'stdin': subprocess.PIPE, 'stdout': slave, 'stderr': self._out_files.error} try: - self._output_process = Popen(['bidiv'] + width_args, **sp_kwargs) + self._output_process = Popen(['bidiv', *width_args], **sp_kwargs) except OSError: - self._output_process = Popen(['fribidi', '-c', 'UTF-8'] + width_args, **sp_kwargs) + self._output_process = Popen(['fribidi', '-c', 'UTF-8', *width_args], **sp_kwargs) self._output_channel = os.fdopen(master, 'rb') except OSError as ose: if ose.errno == errno.ENOENT: @@ -683,7 +731,6 @@ class YoutubeDL: self.params['http_headers'] = HTTPHeaderDict(std_headers, self.params.get('http_headers')) self._load_cookies(self.params['http_headers'].get('Cookie')) # compat self.params['http_headers'].pop('Cookie', None) - self._request_director = self.build_request_director(_REQUEST_HANDLERS.values(), _RH_PREFERENCES) if auto_init and auto_init != 'no_verbose_header': self.print_debug_header() @@ -707,6 +754,13 @@ class YoutubeDL: for msg in self.params.get('_deprecation_warnings', []): self.deprecated_feature(msg) + if impersonate_target := self.params.get('impersonate'): + if not self._impersonate_target_available(impersonate_target): + raise YoutubeDLError( + f'Impersonate target "{impersonate_target}" is not available. ' + f'Use --list-impersonate-targets to see available targets. ' + f'You may be missing dependencies required to support this target.') + if 'list-formats' in self.params['compat_opts']: self.params['listformats_table'] = False @@ -804,8 +858,7 @@ class YoutubeDL: ) self.report_warning( 'Long argument string detected. ' - 'Use -- to separate parameters and URLs, like this:\n%s' % - args_to_str(correct_argv)) + f'Use -- to separate parameters and URLs, like this:\n{shell_quote(correct_argv)}') def add_info_extractor(self, ie): """Add an InfoExtractor object to the end of the list.""" @@ -904,7 +957,7 @@ class YoutubeDL: if (self.params.get('quiet') if quiet is None else quiet) and not self.params.get('verbose'): return self._write_string( - '%s%s' % (self._bidi_workaround(message), ('' if skip_eol else '\n')), + '{}{}'.format(self._bidi_workaround(message), ('' if skip_eol else '\n')), self._out_files.screen, only_once=only_once) def to_stderr(self, message, only_once=False): @@ -916,21 +969,22 @@ class YoutubeDL: self._write_string(f'{self._bidi_workaround(message)}\n', self._out_files.error, only_once=only_once) def _send_console_code(self, code): - if compat_os_name == 'nt' or not self._out_files.console: - return + if not supports_terminal_sequences(self._out_files.console): + return False self._write_string(code, self._out_files.console) + return True - def to_console_title(self, message): - if not self.params.get('consoletitle', False): + def to_console_title(self, message=None, progress_state=None, percent=None): + if not self.params.get('consoletitle'): return - message = remove_terminal_sequences(message) - if compat_os_name == 'nt': - if ctypes.windll.kernel32.GetConsoleWindow(): - # c_wchar_p() might not be necessary if `message` is - # already of type unicode() - ctypes.windll.kernel32.SetConsoleTitleW(ctypes.c_wchar_p(message)) - else: - self._send_console_code(f'\033]0;{message}\007') + + if message: + success = self._send_console_code(f'\033]0;{remove_terminal_sequences(message)}\007') + if not success and os.name == 'nt' and ctypes.windll.kernel32.GetConsoleWindow(): + ctypes.windll.kernel32.SetConsoleTitleW(message) + + if isinstance(progress_state, _ProgressState): + self._send_console_code(progress_state.get_ansi_escape(percent)) def save_console_title(self): if not self.params.get('consoletitle') or self.params.get('simulate'): @@ -944,6 +998,7 @@ class YoutubeDL: def __enter__(self): self.save_console_title() + self.to_console_title(progress_state=_ProgressState.INDETERMINATE) return self def save_cookies(self): @@ -952,11 +1007,14 @@ class YoutubeDL: def __exit__(self, *args): self.restore_console_title() + self.to_console_title(progress_state=_ProgressState.HIDDEN) self.close() def close(self): self.save_cookies() - self._request_director.close() + if '_request_director' in self.__dict__: + self._request_director.close() + del self._request_director def trouble(self, message=None, tb=None, is_error=True): """Determine action to take when a download problem appears. @@ -1025,10 +1083,10 @@ class YoutubeDL: return self._format_text(self._out_files.error, self._allow_colors.error, *args, **kwargs) def report_warning(self, message, only_once=False): - ''' + """ Print the message to stderr, it will be prefixed with 'WARNING:' If stderr is a tty file the 'WARNING:' will be colored - ''' + """ if self.params.get('logger') is not None: self.params['logger'].warning(message) else: @@ -1046,14 +1104,14 @@ class YoutubeDL: self.to_stderr(f'{self._format_err("Deprecated Feature:", self.Styles.ERROR)} {message}', True) def report_error(self, message, *args, **kwargs): - ''' + """ Do the same as trouble, but prefixes the message with 'ERROR:', colored in red if stderr is a tty file. - ''' + """ self.trouble(f'{self._format_err("ERROR:", self.Styles.ERROR)} {message}', *args, **kwargs) def write_debug(self, message, only_once=False): - '''Log debug message or Print message to stderr''' + """Log debug message or Print message to stderr""" if not self.params.get('verbose', False): return message = f'[debug] {message}' @@ -1065,21 +1123,21 @@ class YoutubeDL: def report_file_already_downloaded(self, file_name): """Report file has already been fully downloaded.""" try: - self.to_screen('[download] %s has already been downloaded' % file_name) + self.to_screen(f'[download] {file_name} has already been downloaded') except UnicodeEncodeError: self.to_screen('[download] The file has already been downloaded') def report_file_delete(self, file_name): """Report that existing file will be deleted.""" try: - self.to_screen('Deleting existing file %s' % file_name) + self.to_screen(f'Deleting existing file {file_name}') except UnicodeEncodeError: self.to_screen('Deleting existing file') def raise_no_formats(self, info, forced=False, *, msg=None): has_drm = info.get('_has_drm') ignored, expected = self.params.get('ignore_no_formats_error'), bool(msg) - msg = msg or has_drm and 'This video is DRM protected' or 'No video formats found!' + msg = msg or (has_drm and 'This video is DRM protected') or 'No video formats found!' if forced or not ignored: raise ExtractorError(msg, video_id=info['id'], ie=info['extractor'], expected=has_drm or ignored or expected) @@ -1127,7 +1185,7 @@ class YoutubeDL: @staticmethod def escape_outtmpl(outtmpl): - ''' Escape any remaining strings like %s, %abc% etc. ''' + """ Escape any remaining strings like %s, %abc% etc. """ return re.sub( STR_FORMAT_RE_TMPL.format('', '(?![%(\0])'), lambda mobj: ('' if mobj.group('has_key') else '%') + mobj.group(0), @@ -1135,7 +1193,7 @@ class YoutubeDL: @classmethod def validate_outtmpl(cls, outtmpl): - ''' @return None or Exception object ''' + """ @return None or Exception object """ outtmpl = re.sub( STR_FORMAT_RE_TMPL.format('[^)]*', '[ljhqBUDS]'), lambda mobj: f'{mobj.group(0)[:-1]}s', @@ -1155,8 +1213,7 @@ class YoutubeDL: def prepare_outtmpl(self, outtmpl, info_dict, sanitize=False): """ Make the outtmpl and info_dict suitable for substitution: ydl.escape_outtmpl(outtmpl) % info_dict - @param sanitize Whether to sanitize the output as a filename. - For backward compatibility, a function can also be passed + @param sanitize Whether to sanitize the output as a filename """ info_dict.setdefault('epoch', int(time.time())) # keep epoch consistent once set @@ -1184,16 +1241,17 @@ class YoutubeDL: MATH_FUNCTIONS = { '+': float.__add__, '-': float.__sub__, + '*': float.__mul__, } # Field is of the form key1.key2... # where keys (except first) can be string, int, slice or "{field, ...}" - FIELD_INNER_RE = r'(?:\w+|%(num)s|%(num)s?(?::%(num)s?){1,2})' % {'num': r'(?:-?\d+)'} - FIELD_RE = r'\w*(?:\.(?:%(inner)s|{%(field)s(?:,%(field)s)*}))*' % { + FIELD_INNER_RE = r'(?:\w+|%(num)s|%(num)s?(?::%(num)s?){1,2})' % {'num': r'(?:-?\d+)'} # noqa: UP031 + FIELD_RE = r'\w*(?:\.(?:%(inner)s|{%(field)s(?:,%(field)s)*}))*' % { # noqa: UP031 'inner': FIELD_INNER_RE, - 'field': rf'\w*(?:\.{FIELD_INNER_RE})*' + 'field': rf'\w*(?:\.{FIELD_INNER_RE})*', } MATH_FIELD_RE = rf'(?:{FIELD_RE}|-?{NUMBER_RE})' - MATH_OPERATORS_RE = r'(?:%s)' % '|'.join(map(re.escape, MATH_FUNCTIONS.keys())) + MATH_OPERATORS_RE = r'(?:{})'.format('|'.join(map(re.escape, MATH_FUNCTIONS.keys()))) INTERNAL_FORMAT_RE = re.compile(rf'''(?xs) (?P-)? (?P{FIELD_RE}) @@ -1205,6 +1263,15 @@ class YoutubeDL: (?:\|(?P.*?))? )$''') + def _from_user_input(field): + if field == ':': + return ... + elif ':' in field: + return slice(*map(int_or_none, field.split(':'))) + elif int_or_none(field) is not None: + return int(field) + return field + def _traverse_infodict(fields): fields = [f for x in re.split(r'\.({.+?})\.?', fields) for f in ([x] if x.startswith('{') else x.split('.'))] @@ -1214,11 +1281,12 @@ class YoutubeDL: for i, f in enumerate(fields): if not f.startswith('{'): + fields[i] = _from_user_input(f) continue assert f.endswith('}'), f'No closing brace for {f} in {fields}' - fields[i] = {k: k.split('.') for k in f[1:-1].split(',')} + fields[i] = {k: list(map(_from_user_input, k.split('.'))) for k in f[1:-1].split(',')} - return traverse_obj(info_dict, fields, is_user_input=True, traverse_string=True) + return traverse_obj(info_dict, fields, traverse_string=True) def get_value(mdict): # Object traversal @@ -1261,14 +1329,23 @@ class YoutubeDL: na = self.params.get('outtmpl_na_placeholder', 'NA') - def filename_sanitizer(key, value, restricted=self.params.get('restrictfilenames')): + def filename_sanitizer(key, value, restricted): return sanitize_filename(str(value), restricted=restricted, is_id=( bool(re.search(r'(^|[_.])id(\.|$)', key)) if 'filename-sanitization' in self.params['compat_opts'] else NO_DEFAULT)) - sanitizer = sanitize if callable(sanitize) else filename_sanitizer - sanitize = bool(sanitize) + if callable(sanitize): + self.deprecation_warning('Passing a callable "sanitize" to YoutubeDL.prepare_outtmpl is deprecated') + elif not sanitize: + pass + elif (sys.platform != 'win32' and not self.params.get('restrictfilenames') + and self.params.get('windowsfilenames') is False): + def sanitize(key, value): + return str(value).replace('/', '\u29F8').replace('\0', '') + else: + def sanitize(key, value): + return filename_sanitizer(key, value, restricted=self.params.get('restrictfilenames')) def _dumpjson_default(obj): if isinstance(obj, (set, LazyList)): @@ -1306,7 +1383,7 @@ class YoutubeDL: value, default = None, na fmt = outer_mobj.group('format') - if fmt == 's' and last_field in field_size_compat_map.keys() and isinstance(value, int): + if fmt == 's' and last_field in field_size_compat_map and isinstance(value, int): fmt = f'0{field_size_compat_map[last_field]:d}d' flags = outer_mobj.group('conversion') or '' @@ -1324,14 +1401,14 @@ class YoutubeDL: value, fmt = escapeHTML(str(value)), str_fmt elif fmt[-1] == 'q': # quoted value = map(str, variadic(value) if '#' in flags else [value]) - value, fmt = ' '.join(map(compat_shlex_quote, value)), str_fmt + value, fmt = shell_quote(value, shell=True), str_fmt elif fmt[-1] == 'B': # bytes value = f'%{str_fmt}'.encode() % str(value).encode() value, fmt = value.decode('utf-8', 'ignore'), 's' elif fmt[-1] == 'U': # unicode normalized value, fmt = unicodedata.normalize( # "+" = compatibility equivalence, "#" = NFD - 'NF%s%s' % ('K' if '+' in flags else '', 'D' if '#' in flags else 'C'), + 'NF{}{}'.format('K' if '+' in flags else '', 'D' if '#' in flags else 'C'), value), str_fmt elif fmt[-1] == 'D': # decimal suffix num_fmt, fmt = fmt[:-1].replace('#', ''), 's' @@ -1351,15 +1428,15 @@ class YoutubeDL: if sanitize: # If value is an object, sanitize might convert it to a string - # So we convert it to repr first + # So we manually convert it before sanitizing if fmt[-1] == 'r': value, fmt = repr(value), str_fmt elif fmt[-1] == 'a': value, fmt = ascii(value), str_fmt if fmt[-1] in 'csra': - value = sanitizer(last_field, value) + value = sanitize(last_field, value) - key = '%s\0%s' % (key.replace('%', '%\0'), outer_mobj.group('format')) + key = '{}\0{}'.format(key.replace('%', '%\0'), outer_mobj.group('format')) TMPL_DICT[key] = value return '{prefix}%({key}){fmt}'.format(key=key, fmt=fmt, prefix=outer_mobj.group('prefix')) @@ -1369,6 +1446,7 @@ class YoutubeDL: outtmpl, info_dict = self.prepare_outtmpl(outtmpl, info_dict, *args, **kwargs) return self.escape_outtmpl(outtmpl) % info_dict + @_catch_unsafe_extension_error def _prepare_filename(self, info_dict, *, outtmpl=None, tmpl_type=None): assert None in (outtmpl, tmpl_type), 'outtmpl and tmpl_type are mutually exclusive' if outtmpl is None: @@ -1448,9 +1526,9 @@ class YoutubeDL: date = info_dict.get('upload_date') if date is not None: - dateRange = self.params.get('daterange', DateRange()) - if date not in dateRange: - return f'{date_from_str(date).isoformat()} upload date is not in range {dateRange}' + date_range = self.params.get('daterange', DateRange()) + if date not in date_range: + return f'{date_from_str(date).isoformat()} upload date is not in range {date_range}' view_count = info_dict.get('view_count') if view_count is not None: min_views = self.params.get('min_views') @@ -1460,7 +1538,7 @@ class YoutubeDL: if max_views is not None and view_count > max_views: return 'Skipping %s, because it has exceeded the maximum view count (%d/%d)' % (video_title, view_count, max_views) if age_restricted(info_dict.get('age_limit'), self.params.get('age_limit')): - return 'Skipping "%s" because it is age restricted' % video_title + return f'Skipping "{video_title}" because it is age restricted' match_filter = self.params.get('match_filter') if match_filter is None: @@ -1513,7 +1591,7 @@ class YoutubeDL: @staticmethod def add_extra_info(info_dict, extra_info): - '''Set the keys from extra_info in info dict if they are missing''' + """Set the keys from extra_info in info dict if they are missing""" for key, value in extra_info.items(): info_dict.setdefault(key, value) @@ -1559,7 +1637,7 @@ class YoutubeDL: self.to_screen(f'[download] {self._format_screen(temp_id, self.Styles.ID)}: ' 'has already been recorded in the archive') if self.params.get('break_on_existing', False): - raise ExistingVideoReached() + raise ExistingVideoReached break return self.__extract_info(url, self.get_info_extractor(key), download, extra_info, process) else: @@ -1573,7 +1651,7 @@ class YoutubeDL: while True: try: return func(self, *args, **kwargs) - except (DownloadCancelled, LazyList.IndexError, PagedList.IndexError): + except (CookieLoadError, DownloadCancelled, LazyList.IndexError, PagedList.IndexError): raise except ReExtractInfo as e: if e.expected: @@ -1585,8 +1663,8 @@ class YoutubeDL: except GeoRestrictedError as e: msg = e.msg if e.countries: - msg += '\nThis video is available in %s.' % ', '.join( - map(ISO3166Utils.short2full, e.countries)) + msg += '\nThis video is available in {}.'.format(', '.join( + map(ISO3166Utils.short2full, e.countries))) msg += '\nYou might want to use a VPN or a proxy server (with --proxy) to workaround.' self.report_error(msg) except ExtractorError as e: # An error we somewhat expected @@ -1795,8 +1873,8 @@ class YoutubeDL: if isinstance(additional_urls, str): additional_urls = [additional_urls] self.to_screen( - '[info] %s: %d additional URL(s) requested' % (ie_result['id'], len(additional_urls))) - self.write_debug('Additional URLs: "%s"' % '", "'.join(additional_urls)) + '[info] {}: {} additional URL(s) requested'.format(ie_result['id'], len(additional_urls))) + self.write_debug('Additional URLs: "{}"'.format('", "'.join(additional_urls))) ie_result['additional_entries'] = [ self.extract_info( url, download, extra_info=extra_info, @@ -1848,8 +1926,8 @@ class YoutubeDL: webpage_url = ie_result.get('webpage_url') # Playlists maynot have webpage_url if webpage_url and webpage_url in self._playlist_urls: self.to_screen( - '[download] Skipping already downloaded playlist: %s' - % ie_result.get('title') or ie_result.get('id')) + '[download] Skipping already downloaded playlist: {}'.format( + ie_result.get('title')) or ie_result.get('id')) return self._playlist_level += 1 @@ -1864,8 +1942,8 @@ class YoutubeDL: self._playlist_urls.clear() elif result_type == 'compat_list': self.report_warning( - 'Extractor %s returned a compat_list result. ' - 'It needs to be updated.' % ie_result.get('extractor')) + 'Extractor {} returned a compat_list result. ' + 'It needs to be updated.'.format(ie_result.get('extractor'))) def _fixup(r): self.add_extra_info(r, { @@ -1882,7 +1960,7 @@ class YoutubeDL: ] return ie_result else: - raise Exception('Invalid result type: %s' % result_type) + raise Exception(f'Invalid result type: {result_type}') def _ensure_dir_exists(self, path): return make_dir(path, self.report_error) @@ -1896,6 +1974,9 @@ class YoutubeDL: 'playlist_title': ie_result.get('title'), 'playlist_uploader': ie_result.get('uploader'), 'playlist_uploader_id': ie_result.get('uploader_id'), + 'playlist_channel': ie_result.get('channel'), + 'playlist_channel_id': ie_result.get('channel_id'), + 'playlist_webpage_url': ie_result.get('webpage_url'), **kwargs, } if strict: @@ -1998,8 +2079,9 @@ class YoutubeDL: resolved_entries[i] = (playlist_index, NO_DEFAULT) continue - self.to_screen('[download] Downloading item %s of %s' % ( - self._format_screen(i + 1, self.Styles.ID), self._format_screen(n_entries, self.Styles.EMPHASIS))) + self.to_screen( + f'[download] Downloading item {self._format_screen(i + 1, self.Styles.ID)} ' + f'of {self._format_screen(n_entries, self.Styles.EMPHASIS)}') entry_result = self.__process_iterable_entry(entry, download, collections.ChainMap({ 'playlist_index': playlist_index, @@ -2049,20 +2131,20 @@ class YoutubeDL: } operator_rex = re.compile(r'''(?x)\s* (?P[\w.-]+)\s* - (?P%s)(?P\s*\?)?\s* + (?P{})(?P\s*\?)?\s* (?P[0-9.]+(?:[kKmMgGtTpPeEzZyY]i?[Bb]?)?)\s* - ''' % '|'.join(map(re.escape, OPERATORS.keys()))) + '''.format('|'.join(map(re.escape, OPERATORS.keys())))) m = operator_rex.fullmatch(filter_spec) if m: try: - comparison_value = int(m.group('value')) + comparison_value = float(m.group('value')) except ValueError: comparison_value = parse_filesize(m.group('value')) if comparison_value is None: comparison_value = parse_filesize(m.group('value') + 'B') if comparison_value is None: raise ValueError( - 'Invalid value %r in format specification %r' % ( + 'Invalid value {!r} in format specification {!r}'.format( m.group('value'), filter_spec)) op = OPERATORS[m.group('op')] @@ -2072,15 +2154,15 @@ class YoutubeDL: '^=': lambda attr, value: attr.startswith(value), '$=': lambda attr, value: attr.endswith(value), '*=': lambda attr, value: value in attr, - '~=': lambda attr, value: value.search(attr) is not None + '~=': lambda attr, value: value.search(attr) is not None, } str_operator_rex = re.compile(r'''(?x)\s* (?P[a-zA-Z0-9._-]+)\s* - (?P!\s*)?(?P%s)\s*(?P\?\s*)? + (?P!\s*)?(?P{})\s*(?P\?\s*)? (?P["'])? (?P(?(quote)(?:(?!(?P=quote))[^\\]|\\.)+|[\w.-]+)) (?(quote)(?P=quote))\s* - ''' % '|'.join(map(re.escape, STR_OPERATORS.keys()))) + '''.format('|'.join(map(re.escape, STR_OPERATORS.keys())))) m = str_operator_rex.fullmatch(filter_spec) if m: if m.group('op') == '~=': @@ -2094,7 +2176,7 @@ class YoutubeDL: op = str_op if not m: - raise SyntaxError('Invalid filter specification %r' % filter_spec) + raise SyntaxError(f'Invalid filter specification {filter_spec!r}') def _filter(f): actual_value = f.get(m.group('key')) @@ -2105,7 +2187,12 @@ class YoutubeDL: def _check_formats(self, formats): for f in formats: - self.to_screen('[info] Testing format %s' % f['format_id']) + working = f.get('__working') + if working is not None: + if working: + yield f + continue + self.to_screen('[info] Testing format {}'.format(f['format_id'])) path = self.get_output_path('temp') if not self._ensure_dir_exists(f'{path}/'): continue @@ -2113,41 +2200,51 @@ class YoutubeDL: temp_file.close() try: success, _ = self.dl(temp_file.name, f, test=True) - except (DownloadError, OSError, ValueError) + network_exceptions: + except (DownloadError, OSError, ValueError, *network_exceptions): success = False finally: if os.path.exists(temp_file.name): try: os.remove(temp_file.name) except OSError: - self.report_warning('Unable to delete temporary file "%s"' % temp_file.name) + self.report_warning(f'Unable to delete temporary file "{temp_file.name}"') + f['__working'] = success if success: yield f else: - self.to_screen('[info] Unable to download format %s. Skipping...' % f['format_id']) + self.to_screen('[info] Unable to download format {}. Skipping...'.format(f['format_id'])) - def _default_format_spec(self, info_dict, download=True): + def _select_formats(self, formats, selector): + return list(selector({ + 'formats': formats, + 'has_merged_format': any('none' not in (f.get('acodec'), f.get('vcodec')) for f in formats), + 'incomplete_formats': (all(f.get('vcodec') == 'none' for f in formats) # No formats with video + or all(f.get('acodec') == 'none' for f in formats)), # OR, No formats with audio + })) + + def _default_format_spec(self, info_dict): + prefer_best = ( + self.params['outtmpl']['default'] == '-' + or (info_dict.get('is_live') and not self.params.get('live_from_start'))) def can_merge(): merger = FFmpegMergerPP(self) return merger.available and merger.can_merge() - prefer_best = ( - not self.params.get('simulate') - and download - and ( - not can_merge() - or info_dict.get('is_live') and not self.params.get('live_from_start') - or self.params['outtmpl']['default'] == '-')) - compat = ( - prefer_best - or self.params.get('allow_multiple_audio_streams', False) - or 'format-spec' in self.params['compat_opts']) + if not prefer_best and not can_merge(): + prefer_best = True + formats = self._get_formats(info_dict) + evaluate_formats = lambda spec: self._select_formats(formats, self.build_format_selector(spec)) + if evaluate_formats('b/bv+ba') != evaluate_formats('bv*+ba/b'): + self.report_warning('ffmpeg not found. The downloaded format may not be the best available. ' + 'Installing ffmpeg is strongly recommended: https://github.com/yt-dlp/yt-dlp#dependencies') - return ( - 'best/bestvideo+bestaudio' if prefer_best - else 'bestvideo*+bestaudio/best' if not compat - else 'bestvideo+bestaudio/best') + compat = (self.params.get('allow_multiple_audio_streams') + or 'format-spec' in self.params['compat_opts']) + + return ('best/bestvideo+bestaudio' if prefer_best + else 'bestvideo+bestaudio/best' if compat + else 'bestvideo*+bestaudio/best') def build_format_selector(self, format_spec): def syntax_error(note, start): @@ -2167,8 +2264,8 @@ class YoutubeDL: def _parse_filter(tokens): filter_parts = [] - for type, string_, start, _, _ in tokens: - if type == tokenize.OP and string_ == ']': + for type_, string_, _start, _, _ in tokens: + if type_ == tokenize.OP and string_ == ']': return ''.join(filter_parts) else: filter_parts.append(string_) @@ -2178,23 +2275,23 @@ class YoutubeDL: # E.g. 'mp4' '-' 'baseline' '-' '16x9' is converted to 'mp4-baseline-16x9' ALLOWED_OPS = ('/', '+', ',', '(', ')') last_string, last_start, last_end, last_line = None, None, None, None - for type, string_, start, end, line in tokens: - if type == tokenize.OP and string_ == '[': + for type_, string_, start, end, line in tokens: + if type_ == tokenize.OP and string_ == '[': if last_string: yield tokenize.NAME, last_string, last_start, last_end, last_line last_string = None - yield type, string_, start, end, line + yield type_, string_, start, end, line # everything inside brackets will be handled by _parse_filter - for type, string_, start, end, line in tokens: - yield type, string_, start, end, line - if type == tokenize.OP and string_ == ']': + for type_, string_, start, end, line in tokens: + yield type_, string_, start, end, line + if type_ == tokenize.OP and string_ == ']': break - elif type == tokenize.OP and string_ in ALLOWED_OPS: + elif type_ == tokenize.OP and string_ in ALLOWED_OPS: if last_string: yield tokenize.NAME, last_string, last_start, last_end, last_line last_string = None - yield type, string_, start, end, line - elif type in [tokenize.NAME, tokenize.NUMBER, tokenize.OP]: + yield type_, string_, start, end, line + elif type_ in [tokenize.NAME, tokenize.NUMBER, tokenize.OP]: if not last_string: last_string = string_ last_start = start @@ -2207,13 +2304,13 @@ class YoutubeDL: def _parse_format_selection(tokens, inside_merge=False, inside_choice=False, inside_group=False): selectors = [] current_selector = None - for type, string_, start, _, _ in tokens: - # ENCODING is only defined in python 3.x - if type == getattr(tokenize, 'ENCODING', None): + for type_, string_, start, _, _ in tokens: + # ENCODING is only defined in Python 3.x + if type_ == getattr(tokenize, 'ENCODING', None): continue - elif type in [tokenize.NAME, tokenize.NUMBER]: + elif type_ in [tokenize.NAME, tokenize.NUMBER]: current_selector = FormatSelector(SINGLE, string_, []) - elif type == tokenize.OP: + elif type_ == tokenize.OP: if string_ == ')': if not inside_group: # ')' will be handled by the parentheses group @@ -2256,7 +2353,7 @@ class YoutubeDL: current_selector = FormatSelector(MERGE, (selector_1, selector_2), []) else: raise syntax_error(f'Operator not recognized: "{string_}"', start) - elif type == tokenize.ENDMARKER: + elif type_ == tokenize.ENDMARKER: break if current_selector: selectors.append(current_selector) @@ -2297,7 +2394,7 @@ class YoutubeDL: vexts=[f['ext'] for f in video_fmts], aexts=[f['ext'] for f in audio_fmts], preferences=(try_call(lambda: self.params['merge_output_format'].split('/')) - or self.params.get('prefer_free_formats') and ('webm', 'mkv'))) + or (self.params.get('prefer_free_formats') and ('webm', 'mkv')))) filtered = lambda *keys: filter(None, (traverse_obj(fmt, *keys) for fmt in formats_info)) @@ -2331,7 +2428,7 @@ class YoutubeDL: 'acodec': the_only_audio.get('acodec'), 'abr': the_only_audio.get('abr'), 'asr': the_only_audio.get('asr'), - 'audio_channels': the_only_audio.get('audio_channels') + 'audio_channels': the_only_audio.get('audio_channels'), }) return new_dict @@ -2346,7 +2443,7 @@ class YoutubeDL: return for f in formats: - if f.get('has_drm'): + if f.get('has_drm') or f.get('__needs_testing'): yield from self._check_formats([f]) else: yield f @@ -2412,9 +2509,9 @@ class YoutubeDL: format_fallback = not format_type and not format_modified # for b, w _filter_f = ( - (lambda f: f.get('%scodec' % format_type) != 'none') + (lambda f: f.get(f'{format_type}codec') != 'none') if format_type and format_modified # bv*, ba*, wv*, wa* - else (lambda f: f.get('%scodec' % not_format_type) == 'none') + else (lambda f: f.get(f'{not_format_type}codec') == 'none') if format_type # bv, ba, wv, wa else (lambda f: f.get('vcodec') != 'none' and f.get('acodec') != 'none') if not format_modified # b, w @@ -2440,7 +2537,7 @@ class YoutubeDL: # for extractors with incomplete formats (audio only (soundcloud) # or video only (imgur)) best/worst will fallback to # best/worst {video,audio}-only format - matches = formats + matches = list(filter(lambda f: f.get('vcodec') != 'none' or f.get('acodec') != 'none', formats)) elif seperate_fallback and not ctx['has_merged_format']: # for compatibility with youtube-dl when there is no pre-merged format matches = list(filter(seperate_fallback, formats)) @@ -2459,9 +2556,16 @@ class YoutubeDL: return selector_function(ctx_copy) return final_selector - stream = io.BytesIO(format_spec.encode()) + # HACK: Python 3.12 changed the underlying parser, rendering '7_a' invalid + # Prefix numbers with random letters to avoid it being classified as a number + # See: https://github.com/yt-dlp/yt-dlp/pulls/8797 + # TODO: Implement parser not reliant on tokenize.tokenize + prefix = ''.join(random.choices(string.ascii_letters, k=32)) + stream = io.BytesIO(re.sub(r'\d[_\d]*', rf'{prefix}\g<0>', format_spec).encode()) try: - tokens = list(_remove_unused_ops(tokenize.tokenize(stream.readline))) + tokens = list(_remove_unused_ops( + token._replace(string=token.string.replace(prefix, '')) + for token in tokenize.tokenize(stream.readline))) except tokenize.TokenError: raise syntax_error('Missing closing/opening brackets or parenthesis', (0, len(format_spec))) @@ -2475,7 +2579,7 @@ class YoutubeDL: def __next__(self): if self.counter >= len(self.tokens): - raise StopIteration() + raise StopIteration value = self.tokens[self.counter] self.counter += 1 return value @@ -2558,7 +2662,7 @@ class YoutubeDL: self._sort_thumbnails(thumbnails) for i, t in enumerate(thumbnails): if t.get('id') is None: - t['id'] = '%d' % i + t['id'] = str(i) if t.get('width') and t.get('height'): t['resolution'] = '%dx%d' % (t['width'], t['height']) t['url'] = sanitize_url(t['url']) @@ -2591,9 +2695,12 @@ class YoutubeDL: # Working around out-of-range timestamp values (e.g. negative ones on Windows, # see http://bugs.python.org/issue1646728) with contextlib.suppress(ValueError, OverflowError, OSError): - upload_date = datetime.datetime.utcfromtimestamp(info_dict[ts_key]) + upload_date = dt.datetime.fromtimestamp(info_dict[ts_key], dt.timezone.utc) info_dict[date_key] = upload_date.strftime('%Y%m%d') + if not info_dict.get('release_year'): + info_dict['release_year'] = traverse_obj(info_dict, ('release_date', {lambda x: int(x[:4])})) + live_keys = ('is_live', 'was_live') live_status = info_dict.get('live_status') if live_status is None: @@ -2616,8 +2723,17 @@ class YoutubeDL: # Auto generate title fields corresponding to the *_number fields when missing # in order to always have clean titles. This is very common for TV series. for field in ('chapter', 'season', 'episode'): - if final and info_dict.get('%s_number' % field) is not None and not info_dict.get(field): - info_dict[field] = '%s %d' % (field.capitalize(), info_dict['%s_number' % field]) + if final and info_dict.get(f'{field}_number') is not None and not info_dict.get(field): + info_dict[field] = '%s %d' % (field.capitalize(), info_dict[f'{field}_number']) + + for old_key, new_key in self._deprecated_multivalue_fields.items(): + if new_key in info_dict and old_key in info_dict: + if '_version' not in info_dict: # HACK: Do not warn when using --load-info-json + self.deprecation_warning(f'Do not return {old_key!r} when {new_key!r} is present') + elif old_value := info_dict.get(old_key): + info_dict[new_key] = old_value.split(', ') + elif new_value := info_dict.get(new_key): + info_dict[old_key] = ', '.join(v.replace(',', '\N{FULLWIDTH COMMA}') for v in new_value) def _raise_pending_errors(self, info): err = info.pop('__pending_error', None) @@ -2640,8 +2756,8 @@ class YoutubeDL: def report_force_conversion(field, field_not, conversion): self.report_warning( - '"%s" field is not %s - forcing %s conversion, there is an error in extractor' - % (field, field_not, conversion)) + f'"{field}" field is not {field_not} - forcing {conversion} conversion, ' + 'there is an error in extractor') def sanitize_string_field(info, string_field): field = info.get(string_field) @@ -2733,7 +2849,7 @@ class YoutubeDL: get_from_start = not info_dict.get('is_live') or bool(self.params.get('live_from_start')) if not get_from_start: - info_dict['title'] += ' ' + datetime.datetime.now().strftime('%Y-%m-%d %H:%M') + info_dict['title'] += ' ' + dt.datetime.now().strftime('%Y-%m-%d %H:%M') if info_dict.get('is_live') and formats: formats = [f for f in formats if bool(f.get('is_from_start')) == get_from_start] if get_from_start and not formats: @@ -2758,25 +2874,25 @@ class YoutubeDL: if not formats: self.raise_no_formats(info_dict) - for format in formats: - sanitize_string_field(format, 'format_id') - sanitize_numeric_fields(format) - format['url'] = sanitize_url(format['url']) - if format.get('ext') is None: - format['ext'] = determine_ext(format['url']).lower() - if format.get('protocol') is None: - format['protocol'] = determine_protocol(format) - if format.get('resolution') is None: - format['resolution'] = self.format_resolution(format, default=None) - if format.get('dynamic_range') is None and format.get('vcodec') != 'none': - format['dynamic_range'] = 'SDR' - if format.get('aspect_ratio') is None: - format['aspect_ratio'] = try_call(lambda: round(format['width'] / format['height'], 2)) - if (not format.get('manifest_url') # For fragmented formats, "tbr" is often max bitrate and not average - and info_dict.get('duration') and format.get('tbr') - and not format.get('filesize') and not format.get('filesize_approx')): - format['filesize_approx'] = int(info_dict['duration'] * format['tbr'] * (1024 / 8)) - format['http_headers'] = self._calc_headers(collections.ChainMap(format, info_dict), load_cookies=True) + for fmt in formats: + sanitize_string_field(fmt, 'format_id') + sanitize_numeric_fields(fmt) + fmt['url'] = sanitize_url(fmt['url']) + FormatSorter._fill_sorting_fields(fmt) + if fmt['ext'] in ('aac', 'opus', 'mp3', 'flac', 'vorbis'): + if fmt.get('acodec') is None: + fmt['acodec'] = fmt['ext'] + if fmt.get('resolution') is None: + fmt['resolution'] = self.format_resolution(fmt, default=None) + if fmt.get('dynamic_range') is None and fmt.get('vcodec') != 'none': + fmt['dynamic_range'] = 'SDR' + if fmt.get('aspect_ratio') is None: + fmt['aspect_ratio'] = try_call(lambda: round(fmt['width'] / fmt['height'], 2)) + # For fragmented formats, "tbr" is often max bitrate and not average + if (('manifest-filesize-approx' in self.params['compat_opts'] or not fmt.get('manifest_url')) + and not fmt.get('filesize') and not fmt.get('filesize_approx')): + fmt['filesize_approx'] = filesize_from_tbr(fmt.get('tbr'), info_dict.get('duration')) + fmt['http_headers'] = self._calc_headers(collections.ChainMap(fmt, info_dict), load_cookies=True) # Safeguard against old/insecure infojson when using --load-info-json if info_dict.get('http_headers'): @@ -2789,36 +2905,36 @@ class YoutubeDL: self.sort_formats({ 'formats': formats, - '_format_sort_fields': info_dict.get('_format_sort_fields') + '_format_sort_fields': info_dict.get('_format_sort_fields'), }) # Sanitize and group by format_id formats_dict = {} - for i, format in enumerate(formats): - if not format.get('format_id'): - format['format_id'] = str(i) + for i, fmt in enumerate(formats): + if not fmt.get('format_id'): + fmt['format_id'] = str(i) else: # Sanitize format_id from characters used in format selector expression - format['format_id'] = re.sub(r'[\s,/+\[\]()]', '_', format['format_id']) - formats_dict.setdefault(format['format_id'], []).append(format) + fmt['format_id'] = re.sub(r'[\s,/+\[\]()]', '_', fmt['format_id']) + formats_dict.setdefault(fmt['format_id'], []).append(fmt) # Make sure all formats have unique format_id common_exts = set(itertools.chain(*self._format_selection_exts.values())) for format_id, ambiguous_formats in formats_dict.items(): ambigious_id = len(ambiguous_formats) > 1 - for i, format in enumerate(ambiguous_formats): + for i, fmt in enumerate(ambiguous_formats): if ambigious_id: - format['format_id'] = '%s-%d' % (format_id, i) + fmt['format_id'] = f'{format_id}-{i}' # Ensure there is no conflict between id and ext in format selection # See https://github.com/yt-dlp/yt-dlp/issues/1282 - if format['format_id'] != format['ext'] and format['format_id'] in common_exts: - format['format_id'] = 'f%s' % format['format_id'] + if fmt['format_id'] != fmt['ext'] and fmt['format_id'] in common_exts: + fmt['format_id'] = 'f{}'.format(fmt['format_id']) - if format.get('format') is None: - format['format'] = '{id} - {res}{note}'.format( - id=format['format_id'], - res=self.format_resolution(format), - note=format_field(format, 'format_note', ' (%s)'), + if fmt.get('format') is None: + fmt['format'] = '{id} - {res}{note}'.format( + id=fmt['format_id'], + res=self.format_resolution(fmt), + note=format_field(fmt, 'format_note', ' (%s)'), ) if self.params.get('check_formats') is True: @@ -2871,16 +2987,11 @@ class YoutubeDL: continue if format_selector is None: - req_format = self._default_format_spec(info_dict, download=download) + req_format = self._default_format_spec(info_dict) self.write_debug(f'Default format spec: {req_format}') format_selector = self.build_format_selector(req_format) - formats_to_download = list(format_selector({ - 'formats': formats, - 'has_merged_format': any('none' not in (f.get('acodec'), f.get('vcodec')) for f in formats), - 'incomplete_formats': (all(f.get('vcodec') == 'none' for f in formats) # No formats with video - or all(f.get('acodec') == 'none' for f in formats)), # OR, No formats with audio - })) + formats_to_download = self._select_formats(formats, format_selector) if interactive_format_selection and not formats_to_download: self.report_error('Requested format is not available', tb=False, is_error=False) continue @@ -2945,7 +3056,7 @@ class YoutubeDL: info_dict['requested_downloads'] = downloaded_formats info_dict = self.run_all_pps('after_video', info_dict) if max_downloads_reached: - raise MaxDownloadsReached() + raise MaxDownloadsReached # We update the info dict with the selected best quality format (backwards compatibility) info_dict.update(best_format) @@ -3006,8 +3117,8 @@ class YoutubeDL: else: f = formats[-1] self.report_warning( - 'No subtitle format found matching "%s" for language %s, ' - 'using %s' % (formats_query, lang, f['ext'])) + 'No subtitle format found matching "{}" for language {}, ' + 'using {}. Use --list-subs for a list of available subtitles'.format(formats_query, lang, f['ext'])) subs[lang] = f return subs @@ -3086,11 +3197,12 @@ class YoutubeDL: if test: verbose = self.params.get('verbose') + quiet = self.params.get('quiet') or not verbose params = { 'test': True, - 'quiet': self.params.get('quiet') or not verbose, + 'quiet': quiet, 'verbose': verbose, - 'noprogress': not verbose, + 'noprogress': quiet, 'nopart': True, 'skip_unavailable_fragments': False, 'keep_fragments': False, @@ -3125,6 +3237,7 @@ class YoutubeDL: os.remove(file) return None + @_catch_unsafe_extension_error def process_info(self, info_dict): """Process a single resolved IE result. (Modifies it in-place)""" @@ -3162,7 +3275,7 @@ class YoutubeDL: def check_max_downloads(): if self._num_downloads >= float(self.params.get('max_downloads') or 'inf'): - raise MaxDownloadsReached() + raise MaxDownloadsReached if self.params.get('simulate'): info_dict['__write_download_archive'] = self.params.get('force_write_download_archive') @@ -3171,9 +3284,9 @@ class YoutubeDL: if full_filename is None: return - if not self._ensure_dir_exists(encodeFilename(full_filename)): + if not self._ensure_dir_exists(full_filename): return - if not self._ensure_dir_exists(encodeFilename(temp_filename)): + if not self._ensure_dir_exists(temp_filename): return if self._write_description('video', info_dict, @@ -3205,16 +3318,16 @@ class YoutubeDL: if self.params.get('writeannotations', False): annofn = self.prepare_filename(info_dict, 'annotation') if annofn: - if not self._ensure_dir_exists(encodeFilename(annofn)): + if not self._ensure_dir_exists(annofn): return - if not self.params.get('overwrites', True) and os.path.exists(encodeFilename(annofn)): + if not self.params.get('overwrites', True) and os.path.exists(annofn): self.to_screen('[info] Video annotations are already present') elif not info_dict.get('annotations'): self.report_warning('There are no annotations to write.') else: try: self.to_screen('[info] Writing video annotations to: ' + annofn) - with open(encodeFilename(annofn), 'w', encoding='utf-8') as annofile: + with open(annofn, 'w', encoding='utf-8') as annofile: annofile.write(info_dict['annotations']) except (KeyError, TypeError): self.report_warning('There are no annotations to write.') @@ -3230,14 +3343,14 @@ class YoutubeDL: f'Cannot write internet shortcut file because the actual URL of "{info_dict["webpage_url"]}" is unknown') return True linkfn = replace_extension(self.prepare_filename(info_dict, 'link'), link_type, info_dict.get('ext')) - if not self._ensure_dir_exists(encodeFilename(linkfn)): + if not self._ensure_dir_exists(linkfn): return False - if self.params.get('overwrites', True) and os.path.exists(encodeFilename(linkfn)): + if self.params.get('overwrites', True) and os.path.exists(linkfn): self.to_screen(f'[info] Internet shortcut (.{link_type}) is already present') return True try: self.to_screen(f'[info] Writing internet shortcut (.{link_type}) to: {linkfn}') - with open(encodeFilename(to_high_limit_path(linkfn)), 'w', encoding='utf-8', + with open(to_high_limit_path(linkfn), 'w', encoding='utf-8', newline='\r\n' if link_type == 'url' else '\n') as linkfile: template_vars = {'url': url} if link_type == 'desktop': @@ -3268,7 +3381,7 @@ class YoutubeDL: if self.params.get('skip_download'): info_dict['filepath'] = temp_filename - info_dict['__finaldir'] = os.path.dirname(os.path.abspath(encodeFilename(full_filename))) + info_dict['__finaldir'] = os.path.dirname(os.path.abspath(full_filename)) info_dict['__files_to_move'] = files_to_move replace_info_dict(self.run_pp(MoveFilesAfterDownloadPP(self, False), info_dict)) info_dict['__write_download_archive'] = self.params.get('force_write_download_archive') @@ -3336,7 +3449,7 @@ class YoutubeDL: for f in info_dict['requested_formats'] if fd != FFmpegFD else []: f['filepath'] = fname = prepend_extension( correct_ext(temp_filename, info_dict['ext']), - 'f%s' % f['format_id'], info_dict['ext']) + 'f{}'.format(f['format_id']), info_dict['ext']) downloaded.append(fname) info_dict['url'] = '\n'.join(f['url'] for f in info_dict['requested_formats']) success, real_download = self.dl(temp_filename, info_dict) @@ -3369,7 +3482,7 @@ class YoutubeDL: if temp_filename != '-': fname = prepend_extension( correct_ext(temp_filename, new_info['ext']), - 'f%s' % f['format_id'], new_info['ext']) + 'f{}'.format(f['format_id']), new_info['ext']) if not self._ensure_dir_exists(fname): return f['filepath'] = fname @@ -3398,14 +3511,14 @@ class YoutubeDL: self.report_file_already_downloaded(dl_filename) dl_filename = dl_filename or temp_filename - info_dict['__finaldir'] = os.path.dirname(os.path.abspath(encodeFilename(full_filename))) + info_dict['__finaldir'] = os.path.dirname(os.path.abspath(full_filename)) except network_exceptions as err: - self.report_error('unable to download video data: %s' % error_to_compat_str(err)) + self.report_error(f'unable to download video data: {err}') return except OSError as err: raise UnavailableVideoError(err) - except (ContentTooShortError, ) as err: + except ContentTooShortError as err: self.report_error(f'content too short (expected {err.expected} bytes and served {err.downloaded})') return @@ -3457,11 +3570,12 @@ class YoutubeDL: and info_dict.get('container') == 'm4a_dash', 'writing DASH m4a. Only some players support this container', FFmpegFixupM4aPP) - ffmpeg_fixup(downloader == 'hlsnative' and not self.params.get('hls_use_mpegts') - or info_dict.get('is_live') and self.params.get('hls_use_mpegts') is None, + ffmpeg_fixup((downloader == 'hlsnative' and not self.params.get('hls_use_mpegts')) + or (info_dict.get('is_live') and self.params.get('hls_use_mpegts') is None), 'Possible MPEG-TS in MP4 container or malformed AAC timestamps', FFmpegFixupM3u8PP) - ffmpeg_fixup(info_dict.get('is_live') and downloader == 'dashsegments', + ffmpeg_fixup(downloader == 'dashsegments' + and (info_dict.get('is_live') or info_dict.get('is_dash_periods')), 'Possible duplicate MOOV atoms', FFmpegFixupDuplicateMoovPP) ffmpeg_fixup(downloader == 'web_socket_fragment', 'Malformed timestamps detected', FFmpegFixupTimestampPP) @@ -3471,13 +3585,13 @@ class YoutubeDL: try: replace_info_dict(self.post_process(dl_filename, info_dict, files_to_move)) except PostProcessingError as err: - self.report_error('Postprocessing: %s' % str(err)) + self.report_error(f'Postprocessing: {err}') return try: for ph in self._post_hooks: ph(info_dict['filepath']) except Exception as err: - self.report_error('post hooks: %s' % str(err)) + self.report_error(f'post hooks: {err}') return info_dict['__write_download_archive'] = True @@ -3491,6 +3605,8 @@ class YoutubeDL: def wrapper(*args, **kwargs): try: res = func(*args, **kwargs) + except CookieLoadError: + raise except UnavailableVideoError as e: self.report_error(e) except DownloadCancelled as e: @@ -3538,11 +3654,13 @@ class YoutubeDL: raise self.report_warning(f'The info failed to download: {e}; trying with URL {webpage_url}') self.download([webpage_url]) + except ExtractorError as e: + self.report_error(e) return self._download_retcode @staticmethod def sanitize_info(info_dict, remove_private_keys=False): - ''' Sanitize the infodict for converting to json ''' + """ Sanitize the infodict for converting to json """ if info_dict is None: return info_dict info_dict.setdefault('epoch', int(time.time())) @@ -3551,14 +3669,14 @@ class YoutubeDL: 'version': __version__, 'current_git_head': current_git_head(), 'release_git_head': RELEASE_GIT_HEAD, - 'repository': REPOSITORY, + 'repository': ORIGIN, }) if remove_private_keys: reject = lambda k, v: v is None or k.startswith('__') or k in { 'requested_downloads', 'requested_formats', 'requested_subtitles', 'requested_entries', 'entries', 'filepath', '_filename', 'filename', 'infojson_filename', 'original_url', - 'playlist_autonumber', '_format_sort_fields', + 'playlist_autonumber', } else: reject = lambda k, v: False @@ -3577,7 +3695,7 @@ class YoutubeDL: @staticmethod def filter_requested_info(info_dict, actually_filter=True): - ''' Alias of sanitize_info for backward compatibility ''' + """ Alias of sanitize_info for backward compatibility """ return YoutubeDL.sanitize_info(info_dict, actually_filter) def _delete_downloaded_files(self, *files_to_delete, info={}, msg=None): @@ -3599,7 +3717,7 @@ class YoutubeDL: actual_post_extract(video_dict or {}) return - post_extractor = info_dict.pop('__post_extractor', None) or (lambda: {}) + post_extractor = info_dict.pop('__post_extractor', None) or dict info_dict.update(post_extractor()) actual_post_extract(info_dict or {}) @@ -3704,7 +3822,7 @@ class YoutubeDL: if format.get('width') and format.get('height'): return '%dx%d' % (format['width'], format['height']) elif format.get('height'): - return '%sp' % format['height'] + return '{}p'.format(format['height']) elif format.get('width'): return '%dx?' % format['width'] return default @@ -3721,7 +3839,7 @@ class YoutubeDL: if fdict.get('language'): if res: res += ' ' - res += '[%s]' % fdict['language'] + res += '[{}]'.format(fdict['language']) if fdict.get('format_note') is not None: if res: res += ' ' @@ -3733,7 +3851,7 @@ class YoutubeDL: if fdict.get('container') is not None: if res: res += ', ' - res += '%s container' % fdict['container'] + res += '{} container'.format(fdict['container']) if (fdict.get('vcodec') is not None and fdict.get('vcodec') != 'none'): if res: @@ -3748,7 +3866,7 @@ class YoutubeDL: if fdict.get('fps') is not None: if res: res += ', ' - res += '%sfps' % fdict['fps'] + res += '{}fps'.format(fdict['fps']) if fdict.get('acodec') is not None: if res: res += ', ' @@ -3791,7 +3909,7 @@ class YoutubeDL: format_field(f, 'format_id'), format_field(f, 'ext'), self.format_resolution(f), - self._format_note(f) + self._format_note(f), ] for f in formats if (f.get('preference') or 0) >= -1000] return render_table(['format code', 'extension', 'resolution', 'note'], table, extra_gap=1) @@ -3822,8 +3940,8 @@ class YoutubeDL: delim, ( format_field(f, 'filesize', ' \t%s', func=format_bytes) or format_field(f, 'filesize_approx', '≈\t%s', func=format_bytes) - or format_field(try_call(lambda: format_bytes(int(info_dict['duration'] * f['tbr'] * (1024 / 8)))), - None, self._format_out('~\t%s', self.Styles.SUPPRESS))), + or format_field(filesize_from_tbr(f.get('tbr'), info_dict.get('duration')), None, + self._format_out('~\t%s', self.Styles.SUPPRESS), func=format_bytes)), format_field(f, 'tbr', '\t%dk', func=round), shorten_protocol_name(f.get('protocol', '')), delim, @@ -3891,17 +4009,8 @@ class YoutubeDL: if not self.params.get('verbose'): return - from . import _IN_CLI # Must be delayed import - - # These imports can be slow. So import them only as needed - from .extractor.extractors import _LAZY_LOADER - from .extractor.extractors import ( - _PLUGIN_CLASSES as plugin_ies, - _PLUGIN_OVERRIDES as plugin_ie_overrides - ) - def get_encoding(stream): - ret = str(getattr(stream, 'encoding', 'missing (%s)' % type(stream).__name__)) + ret = str(getattr(stream, 'encoding', f'missing ({type(stream).__name__})')) additional_info = [] if os.environ.get('TERM', '').lower() == 'dumb': additional_info.append('dumb') @@ -3912,13 +4021,13 @@ class YoutubeDL: ret = f'{ret} ({",".join(additional_info)})' return ret - encoding_str = 'Encodings: locale %s, fs %s, pref %s, %s' % ( + encoding_str = 'Encodings: locale {}, fs {}, pref {}, {}'.format( locale.getpreferredencoding(), sys.getfilesystemencoding(), self.get_encoding(), ', '.join( f'{key} {get_encoding(stream)}' for key, stream in self._out_files.items_ - if stream is not None and key != 'console') + if stream is not None and key != 'console'), ) logger = self.params.get('logger') @@ -3934,23 +4043,24 @@ class YoutubeDL: source += '*' klass = type(self) write_debug(join_nonempty( - f'{"yt-dlp" if REPOSITORY == "yt-dlp/yt-dlp" else REPOSITORY} version', - f'{CHANNEL}@{__version__}', + f'{REPOSITORY.rpartition("/")[2]} version', + _make_label(ORIGIN, CHANNEL.partition('@')[2] or __version__, __version__), f'[{RELEASE_GIT_HEAD[:9]}]' if RELEASE_GIT_HEAD else '', '' if source == 'unknown' else f'({source})', - '' if _IN_CLI else 'API' if klass == YoutubeDL else f'API:{self.__module__}.{klass.__qualname__}', + '' if IN_CLI.value else 'API' if klass == YoutubeDL else f'API:{self.__module__}.{klass.__qualname__}', delim=' ')) - if not _IN_CLI: + if not IN_CLI.value: write_debug(f'params: {self.params}') - if not _LAZY_LOADER: - if os.environ.get('YTDLP_NO_LAZY_EXTRACTORS'): - write_debug('Lazy loading extractors is forcibly disabled') - else: - write_debug('Lazy loading extractors is disabled') + import_extractors() + lazy_extractors = LAZY_EXTRACTORS.value + if lazy_extractors is None: + write_debug('Lazy loading extractors is disabled') + elif not lazy_extractors: + write_debug('Lazy loading extractors is forcibly disabled') if self.params['compat_opts']: - write_debug('Compatibility options: %s' % ', '.join(self.params['compat_opts'])) + write_debug('Compatibility options: {}'.format(', '.join(self.params['compat_opts']))) if current_git_head(): write_debug(f'Git HEAD: {current_git_head()}') @@ -3959,14 +4069,14 @@ class YoutubeDL: exe_versions, ffmpeg_features = FFmpegPostProcessor.get_versions_and_features(self) ffmpeg_features = {key for key, val in ffmpeg_features.items() if val} if ffmpeg_features: - exe_versions['ffmpeg'] += ' (%s)' % ','.join(sorted(ffmpeg_features)) + exe_versions['ffmpeg'] += ' ({})'.format(','.join(sorted(ffmpeg_features))) exe_versions['rtmpdump'] = rtmpdump_version() exe_versions['phantomjs'] = PhantomJSwrapper._version() exe_str = ', '.join( f'{exe} {v}' for exe, v in sorted(exe_versions.items()) if v ) or 'none' - write_debug('exe versions: %s' % exe_str) + write_debug(f'exe versions: {exe_str}') from .compat.compat_utils import get_package_info from .dependencies import available_dependencies @@ -3976,33 +4086,28 @@ class YoutubeDL: })) or 'none')) write_debug(f'Proxy map: {self.proxies}') - # write_debug(f'Request Handlers: {", ".join(rh.RH_NAME for rh in self._request_director.handlers.values())}') - for plugin_type, plugins in {'Extractor': plugin_ies, 'Post-Processor': plugin_pps}.items(): - display_list = ['%s%s' % ( - klass.__name__, '' if klass.__name__ == name else f' as {name}') - for name, klass in plugins.items()] + write_debug(f'Request Handlers: {", ".join(rh.RH_NAME for rh in self._request_director.handlers.values())}') + + for plugin_type, plugins in (('Extractor', plugin_ies), ('Post-Processor', plugin_pps)): + display_list = [ + klass.__name__ if klass.__name__ == name else f'{klass.__name__} as {name}' + for name, klass in plugins.value.items()] if plugin_type == 'Extractor': display_list.extend(f'{plugins[-1].IE_NAME.partition("+")[2]} ({parent.__name__})' - for parent, plugins in plugin_ie_overrides.items()) + for parent, plugins in plugin_ies_overrides.value.items()) if not display_list: continue write_debug(f'{plugin_type} Plugins: {", ".join(sorted(display_list))}') - plugin_dirs = plugin_directories() - if plugin_dirs: - write_debug(f'Plugin directories: {plugin_dirs}') + plugin_dirs_msg = 'none' + if not plugin_dirs.value: + plugin_dirs_msg = 'none (disabled)' + else: + found_plugin_directories = plugin_directories() + if found_plugin_directories: + plugin_dirs_msg = ', '.join(found_plugin_directories) - # Not implemented - if False and self.params.get('call_home'): - ipaddr = self.urlopen('https://yt-dl.org/ip').read().decode() - write_debug('Public IP address: %s' % ipaddr) - latest_version = self.urlopen( - 'https://yt-dl.org/latest/version').read().decode() - if version_tuple(latest_version) > version_tuple(__version__): - self.report_warning( - 'You are using an outdated version (newest version: %s)! ' - 'See https://yt-dl.org/update if you need help updating.' % - latest_version) + write_debug(f'Plugin directories: {plugin_dirs_msg}') @functools.cached_property def proxies(self): @@ -4023,8 +4128,14 @@ class YoutubeDL: @functools.cached_property def cookiejar(self): """Global cookiejar instance""" - return load_cookies( - self.params.get('cookiefile'), self.params.get('cookiesfrombrowser'), self) + try: + return load_cookies( + self.params.get('cookiefile'), self.params.get('cookiesfrombrowser'), self) + except CookieLoadError as error: + cause = error.__context__ + # compat: <=py3.9: `traceback.format_exception` has a different signature + self.report_error(str(cause), tb=''.join(traceback.format_exception(None, cause, cause.__traceback__))) + raise @property def _opener(self): @@ -4035,6 +4146,22 @@ class YoutubeDL: handler = self._request_director.handlers['Urllib'] return handler._get_instance(cookiejar=self.cookiejar, proxies=self.proxies) + def _get_available_impersonate_targets(self): + # TODO(future): make available as public API + return [ + (target, rh.RH_NAME) + for rh in self._request_director.handlers.values() + if isinstance(rh, ImpersonateRequestHandler) + for target in reversed(rh.supported_targets) + ] + + def _impersonate_target_available(self, target): + # TODO(future): make available as public API + return any( + rh.is_supported_target(target) + for rh in self._request_director.handlers.values() + if isinstance(rh, ImpersonateRequestHandler)) + def urlopen(self, req): """ Start an HTTP download """ if isinstance(req, str): @@ -4059,12 +4186,36 @@ class YoutubeDL: return self._request_director.send(req) except NoSupportingHandlers as e: for ue in e.unsupported_errors: + # FIXME: This depends on the order of errors. if not (ue.handler and ue.msg): continue if ue.handler.RH_KEY == 'Urllib' and 'unsupported url scheme: "file"' in ue.msg.lower(): raise RequestError( 'file:// URLs are disabled by default in yt-dlp for security reasons. ' 'Use --enable-file-urls to enable at your own risk.', cause=ue) from ue + if ( + 'unsupported proxy type: "https"' in ue.msg.lower() + and 'requests' not in self._request_director.handlers + and 'curl_cffi' not in self._request_director.handlers + ): + raise RequestError( + 'To use an HTTPS proxy for this request, one of the following dependencies needs to be installed: requests, curl_cffi') + + elif ( + re.match(r'unsupported url scheme: "wss?"', ue.msg.lower()) + and 'websockets' not in self._request_director.handlers + ): + raise RequestError( + 'This request requires WebSocket support. ' + 'Ensure one of the following dependencies are installed: websockets', + cause=ue) from ue + + elif re.match(r'unsupported (?:extensions: impersonate|impersonate target)', ue.msg.lower()): + raise RequestError( + f'Impersonate target "{req.extensions["impersonate"]}" is not available.' + f' See --list-impersonate-targets for available targets.' + f' This request requires browser impersonation, however you may be missing dependencies' + f' required to support this target.') raise except SSLError as e: if 'UNSAFE_LEGACY_RENEGOTIATION_DISABLED' in str(e): @@ -4074,8 +4225,6 @@ class YoutubeDL: 'SSLV3_ALERT_HANDSHAKE_FAILURE: The server may not support the current cipher list. ' 'Try using --legacy-server-connect', cause=e) from e raise - except HTTPError as e: # TODO: Remove in a future release - raise _CompatHTTPError(e) from e def build_request_director(self, handlers, preferences=None): logger = _YDLLogger(self) @@ -4099,6 +4248,7 @@ class YoutubeDL: 'timeout': 'socket_timeout', 'legacy_ssl_support': 'legacyserverconnect', 'enable_file_urls': 'enable_file_urls', + 'impersonate': 'impersonate', 'client_cert': { 'client_certificate': 'client_certificate', 'client_certificate_key': 'client_certificate_key', @@ -4107,8 +4257,14 @@ class YoutubeDL: }), )) director.preferences.update(preferences or []) + if 'prefer-legacy-http-handler' in self.params['compat_opts']: + director.preferences.add(lambda rh, _: 500 if rh.RH_KEY == 'Urllib' else 0) return director + @functools.cached_property + def _request_director(self): + return self.build_request_director(_REQUEST_HANDLERS.values(), _RH_PREFERENCES) + def encode(self, s): if isinstance(s, bytes): return s # Already encoded @@ -4126,7 +4282,7 @@ class YoutubeDL: return encoding def _write_info_json(self, label, ie_result, infofn, overwrite=None): - ''' Write infojson and returns True = written, 'exists' = Already exists, False = skip, None = error ''' + """ Write infojson and returns True = written, 'exists' = Already exists, False = skip, None = error """ if overwrite is None: overwrite = self.params.get('overwrites', True) if not self.params.get('writeinfojson'): @@ -4149,7 +4305,7 @@ class YoutubeDL: return None def _write_description(self, label, ie_result, descfn): - ''' Write description and returns True = written, False = skip, None = error ''' + """ Write description and returns True = written, False = skip, None = error """ if not self.params.get('writedescription'): return False elif not descfn: @@ -4165,7 +4321,7 @@ class YoutubeDL: else: try: self.to_screen(f'[info] Writing {label} description to: {descfn}') - with open(encodeFilename(descfn), 'w', encoding='utf-8') as descfile: + with open(descfn, 'w', encoding='utf-8') as descfile: descfile.write(ie_result['description']) except OSError: self.report_error(f'Cannot write {label} description file {descfn}') @@ -4173,7 +4329,7 @@ class YoutubeDL: return True def _write_subtitles(self, info_dict, filename): - ''' Write subtitles to file and return list of (sub_filename, final_sub_filename); or None if error''' + """ Write subtitles to file and return list of (sub_filename, final_sub_filename); or None if error""" ret = [] subtitles = info_dict.get('requested_subtitles') if not (self.params.get('writesubtitles') or self.params.get('writeautomaticsub')): @@ -4219,7 +4375,7 @@ class YoutubeDL: self.dl(sub_filename, sub_copy, subtitle=True) sub_info['filepath'] = sub_filename ret.append((sub_filename, sub_filename_final)) - except (DownloadError, ExtractorError, IOError, OSError, ValueError) + network_exceptions as err: + except (DownloadError, ExtractorError, OSError, ValueError, *network_exceptions) as err: msg = f'Unable to download video subtitles for {sub_lang!r}: {err}' if self.params.get('ignoreerrors') is not True: # False or 'only_download' if not self.params.get('ignoreerrors'): @@ -4229,7 +4385,7 @@ class YoutubeDL: return ret def _write_thumbnails(self, label, info_dict, filename, thumb_filename_base=None): - ''' Write thumbnails to file and return list of (thumb_filename, final_thumb_filename) ''' + """ Write thumbnails to file and return list of (thumb_filename, final_thumb_filename); or None if error """ write_all = self.params.get('write_all_thumbnails', False) thumbnails, ret = [], [] if write_all or self.params.get('writethumbnail', False): @@ -4245,16 +4401,21 @@ class YoutubeDL: self.write_debug(f'Skipping writing {label} thumbnail') return ret + if thumbnails and not self._ensure_dir_exists(filename): + return None + for idx, t in list(enumerate(thumbnails))[::-1]: - thumb_ext = (f'{t["id"]}.' if multiple else '') + determine_ext(t['url'], 'jpg') + thumb_ext = t.get('ext') or determine_ext(t['url'], 'jpg') + if multiple: + thumb_ext = f'{t["id"]}.{thumb_ext}' thumb_display_id = f'{label} thumbnail {t["id"]}' thumb_filename = replace_extension(filename, thumb_ext, info_dict.get('ext')) thumb_filename_final = replace_extension(thumb_filename_base, thumb_ext, info_dict.get('ext')) existing_thumb = self.existing_file((thumb_filename_final, thumb_filename)) if existing_thumb: - self.to_screen('[info] %s is already present' % ( - thumb_display_id if multiple else f'{label} thumbnail').capitalize()) + self.to_screen('[info] {} is already present'.format(( + thumb_display_id if multiple else f'{label} thumbnail').capitalize())) t['filepath'] = existing_thumb ret.append((existing_thumb, thumb_filename_final)) else: @@ -4262,7 +4423,7 @@ class YoutubeDL: try: uf = self.urlopen(Request(t['url'], headers=t.get('http_headers', {}))) self.to_screen(f'[info] Writing {thumb_display_id} to: {thumb_filename}') - with open(encodeFilename(thumb_filename), 'wb') as thumbf: + with open(thumb_filename, 'wb') as thumbf: shutil.copyfileobj(uf, thumbf) ret.append((thumb_filename, thumb_filename_final)) t['filepath'] = thumb_filename diff --git a/plugins/youtube_download/yt_dlp/__init__.py b/plugins/youtube_download/yt_dlp/__init__.py index 991dbcd..714d9ad 100644 --- a/plugins/youtube_download/yt_dlp/__init__.py +++ b/plugins/youtube_download/yt_dlp/__init__.py @@ -1,10 +1,10 @@ -try: - import contextvars # noqa: F401 -except Exception: - raise Exception( - f'You are using an unsupported version of Python. Only Python versions 3.7 and above are supported by yt-dlp') # noqa: F541 +import sys -__license__ = 'Public Domain' +if sys.version_info < (3, 9): + raise ImportError( + f'You are using an unsupported version of Python. Only Python versions 3.9 and above are supported by yt-dlp') # noqa: F541 + +__license__ = 'The Unlicense' import collections import getpass @@ -12,15 +12,16 @@ import itertools import optparse import os import re -import sys import traceback -from .compat import compat_shlex_quote -from .cookies import SUPPORTED_BROWSERS, SUPPORTED_KEYRINGS +from .cookies import SUPPORTED_BROWSERS, SUPPORTED_KEYRINGS, CookieLoadError from .downloader.external import get_external_downloader from .extractor import list_extractor_classes from .extractor.adobepass import MSO_INFO +from .networking.impersonate import ImpersonateTarget +from .globals import IN_CLI, plugin_dirs from .options import parseOpts +from .plugins import load_all_plugins as _load_all_plugins from .postprocessor import ( FFmpegExtractAudioPP, FFmpegMergerPP, @@ -43,12 +44,12 @@ from .utils import ( GeoUtils, PlaylistEntries, SameFileError, - decodeOption, download_range_func, expand_path, float_or_none, format_field, int_or_none, + join_nonempty, match_filter_func, parse_bytes, parse_duration, @@ -57,15 +58,15 @@ from .utils import ( read_stdin, render_table, setproctitle, + shell_quote, traverse_obj, variadic, write_string, ) from .utils.networking import std_headers +from .utils._utils import _UnsafeExtensionError from .YoutubeDL import YoutubeDL -_IN_CLI = False - def _exit(status=0, *args): for msg in args: @@ -74,14 +75,16 @@ def _exit(status=0, *args): def get_urls(urls, batchfile, verbose): - # Batch file verification + """ + @param verbose -1: quiet, 0: normal, 1: verbose + """ batch_urls = [] if batchfile is not None: try: batch_urls = read_batch_urls( - read_stdin('URLs') if batchfile == '-' + read_stdin(None if verbose == -1 else 'URLs') if batchfile == '-' else open(expand_path(batchfile), encoding='utf-8', errors='ignore')) - if verbose: + if verbose == 1: write_string('[debug] Batch file urls: ' + repr(batch_urls) + '\n') except OSError: _exit(f'ERROR: batch file {batchfile} could not be read') @@ -112,9 +115,9 @@ def print_extractor_information(opts, urls): ie.description(markdown=False, search_examples=_SEARCHES) for ie in list_extractor_classes(opts.age_limit) if ie.working() and ie.IE_DESC is not False) elif opts.ap_list_mso: - out = 'Supported TV Providers:\n%s\n' % render_table( + out = 'Supported TV Providers:\n{}\n'.format(render_table( ['mso', 'mso name'], - [[mso_id, mso_info['name']] for mso_id, mso_info in MSO_INFO.items()]) + [[mso_id, mso_info['name']] for mso_id, mso_info in MSO_INFO.items()])) else: return False write_string(out, out=sys.stdout) @@ -126,7 +129,7 @@ def set_compat_opts(opts): if name not in opts.compat_opts: return False opts.compat_opts.discard(name) - opts.compat_opts.update(['*%s' % name]) + opts.compat_opts.update([f'*{name}']) return True def set_default_compat(compat_name, opt_name, default=True, remove_compat=True): @@ -153,6 +156,9 @@ def set_compat_opts(opts): opts.embed_infojson = False if 'format-sort' in opts.compat_opts: opts.format_sort.extend(FormatSorter.ytdl_default) + elif 'prefer-vp9-sort' in opts.compat_opts: + opts.format_sort.extend(FormatSorter._prefer_vp9_sort) + _video_multistreams_set = set_default_compat('multistreams', 'allow_multiple_video_streams', False, remove_compat=False) _audio_multistreams_set = set_default_compat('multistreams', 'allow_multiple_audio_streams', False, remove_compat=False) if _video_multistreams_set is False and _audio_multistreams_set is False: @@ -219,7 +225,7 @@ def validate_options(opts): validate_minmax(opts.sleep_interval, opts.max_sleep_interval, 'sleep interval') if opts.wait_for_video is not None: - min_wait, max_wait, *_ = map(parse_duration, opts.wait_for_video.split('-', 1) + [None]) + min_wait, max_wait, *_ = map(parse_duration, [*opts.wait_for_video.split('-', 1), None]) validate(min_wait is not None and not (max_wait is None and '-' in opts.wait_for_video), 'time range to wait for video', opts.wait_for_video) validate_minmax(min_wait, max_wait, 'time range to wait for video') @@ -230,6 +236,11 @@ def validate_options(opts): validate_regex('format sorting', f, FormatSorter.regex) # Postprocessor formats + if opts.convertsubtitles == 'none': + opts.convertsubtitles = None + if opts.convertthumbnails == 'none': + opts.convertthumbnails = None + validate_regex('merge output format', opts.merge_output_format, r'({0})(/({0}))*'.format('|'.join(map(re.escape, FFmpegMergerPP.SUPPORTED_EXTS)))) validate_regex('audio format', opts.audioformat, FFmpegExtractAudioPP.FORMAT_RE) @@ -249,9 +260,11 @@ def validate_options(opts): elif value in ('inf', 'infinite'): return float('inf') try: - return int(value) + int_value = int(value) except (TypeError, ValueError): validate(False, f'{name} retry count', value) + validate_positive(f'{name} retry count', int_value) + return int_value opts.retries = parse_retries('download', opts.retries) opts.fragment_retries = parse_retries('fragment', opts.fragment_retries) @@ -261,9 +274,9 @@ def validate_options(opts): # Retry sleep function def parse_sleep_func(expr): NUMBER_RE = r'\d+(?:\.\d+)?' - op, start, limit, step, *_ = tuple(re.fullmatch( + op, start, limit, step, *_ = (*tuple(re.fullmatch( rf'(?:(linear|exp)=)?({NUMBER_RE})(?::({NUMBER_RE})?)?(?::({NUMBER_RE}))?', - expr.strip()).groups()) + (None, None) + expr.strip()).groups()), None, None) if op == 'exp': return lambda n: min(float(start) * (float(step or 2) ** n), float(limit or 'inf')) @@ -281,18 +294,20 @@ def validate_options(opts): raise ValueError(f'invalid {key} retry sleep expression {expr!r}') # Bytes - def validate_bytes(name, value): + def validate_bytes(name, value, strict_positive=False): if value is None: return None numeric_limit = parse_bytes(value) - validate(numeric_limit is not None, 'rate limit', value) + validate(numeric_limit is not None, name, value) + if strict_positive: + validate_positive(name, numeric_limit, True) return numeric_limit - opts.ratelimit = validate_bytes('rate limit', opts.ratelimit) + opts.ratelimit = validate_bytes('rate limit', opts.ratelimit, True) opts.throttledratelimit = validate_bytes('throttled rate limit', opts.throttledratelimit) opts.min_filesize = validate_bytes('min filesize', opts.min_filesize) opts.max_filesize = validate_bytes('max filesize', opts.max_filesize) - opts.buffersize = validate_bytes('buffer size', opts.buffersize) + opts.buffersize = validate_bytes('buffer size', opts.buffersize, True) opts.http_chunk_size = validate_bytes('http chunk size', opts.http_chunk_size) # Output templates @@ -387,16 +402,19 @@ def validate_options(opts): f'Supported keyrings are: {", ".join(sorted(SUPPORTED_KEYRINGS))}') opts.cookiesfrombrowser = (browser_name, profile, keyring, container) + if opts.impersonate is not None: + opts.impersonate = ImpersonateTarget.from_str(opts.impersonate.lower()) + # MetadataParser def metadataparser_actions(f): if isinstance(f, str): - cmd = '--parse-metadata %s' % compat_shlex_quote(f) + cmd = f'--parse-metadata {shell_quote(f)}' try: actions = [MetadataFromFieldPP.to_action(f)] except Exception as err: raise ValueError(f'{cmd} is invalid; {err}') else: - cmd = '--replace-in-metadata %s' % ' '.join(map(compat_shlex_quote, f)) + cmd = f'--replace-in-metadata {shell_quote(f)}' actions = ((MetadataParserPP.Actions.REPLACE, x, *f[1:]) for x in f[0].split(',')) for action in actions: @@ -407,13 +425,17 @@ def validate_options(opts): yield action if opts.metafromtitle is not None: - opts.parse_metadata.setdefault('pre_process', []).append('title:%s' % opts.metafromtitle) + opts.parse_metadata.setdefault('pre_process', []).append(f'title:{opts.metafromtitle}') opts.parse_metadata = { k: list(itertools.chain(*map(metadataparser_actions, v))) for k, v in opts.parse_metadata.items() } # Other options + opts.plugin_dirs = opts.plugin_dirs + if opts.plugin_dirs is None: + opts.plugin_dirs = ['default'] + if opts.playlist_items is not None: try: tuple(PlaylistEntries.parse_playlist_items(opts.playlist_items)) @@ -460,7 +482,7 @@ def validate_options(opts): default_downloader = ed.get_basename() for policy in opts.color.values(): - if policy not in ('always', 'auto', 'no_color', 'never'): + if policy not in ('always', 'auto', 'auto-tty', 'no_color', 'no_color-tty', 'never'): raise ValueError(f'"{policy}" is not a valid color policy') warnings, deprecation_warnings = [], [] @@ -586,6 +608,13 @@ def validate_options(opts): if opts.ap_username is not None and opts.ap_password is None: opts.ap_password = getpass.getpass('Type TV provider account password and press [Return]: ') + # compat option changes global state destructively; only allow from cli + if 'allow-unsafe-ext' in opts.compat_opts: + warnings.append( + 'Using allow-unsafe-ext opens you up to potential attacks. ' + 'Use with great care!') + _UnsafeExtensionError.sanitize_extension = lambda x, prepend=False: x + return warnings, deprecation_warnings @@ -596,7 +625,7 @@ def get_postprocessors(opts): yield { 'key': 'MetadataParser', 'actions': actions, - 'when': when + 'when': when, } sponsorblock_query = opts.sponsorblock_mark | opts.sponsorblock_remove if sponsorblock_query: @@ -604,19 +633,19 @@ def get_postprocessors(opts): 'key': 'SponsorBlock', 'categories': sponsorblock_query, 'api': opts.sponsorblock_api, - 'when': 'after_filter' + 'when': 'after_filter', } if opts.convertsubtitles: yield { 'key': 'FFmpegSubtitlesConvertor', 'format': opts.convertsubtitles, - 'when': 'before_dl' + 'when': 'before_dl', } if opts.convertthumbnails: yield { 'key': 'FFmpegThumbnailsConvertor', 'format': opts.convertthumbnails, - 'when': 'before_dl' + 'when': 'before_dl', } if opts.extractaudio: yield { @@ -641,7 +670,7 @@ def get_postprocessors(opts): yield { 'key': 'FFmpegEmbedSubtitle', # already_have_subtitle = True prevents the file from being deleted after embedding - 'already_have_subtitle': opts.writesubtitles and keep_subs + 'already_have_subtitle': opts.writesubtitles and keep_subs, } if not opts.writeautomaticsub and keep_subs: opts.writesubtitles = True @@ -654,7 +683,7 @@ def get_postprocessors(opts): 'remove_sponsor_segments': opts.sponsorblock_remove, 'remove_ranges': opts.remove_ranges, 'sponsorblock_chapter_title': opts.sponsorblock_chapter_title, - 'force_keyframes': opts.force_keyframes_at_cuts + 'force_keyframes': opts.force_keyframes_at_cuts, } # FFmpegMetadataPP should be run after FFmpegVideoConvertorPP and # FFmpegExtractAudioPP as containers before conversion may not support @@ -688,7 +717,7 @@ def get_postprocessors(opts): yield { 'key': 'EmbedThumbnail', # already_have_thumbnail = True prevents the file from being deleted after embedding - 'already_have_thumbnail': opts.writethumbnail + 'already_have_thumbnail': opts.writethumbnail, } if not opts.writethumbnail: opts.writethumbnail = True @@ -722,7 +751,7 @@ ParsedOptions = collections.namedtuple('ParsedOptions', ('parser', 'options', 'u def parse_options(argv=None): """@returns ParsedOptions(parser, opts, urls, ydl_opts)""" parser, opts, urls = parseOpts(argv) - urls = get_urls(urls, opts.batchfile, opts.verbose) + urls = get_urls(urls, opts.batchfile, -1 if opts.quiet and not opts.verbose else opts.verbose) set_compat_opts(opts) try: @@ -735,7 +764,7 @@ def parse_options(argv=None): print_only = bool(opts.forceprint) and all(k not in opts.forceprint for k in POSTPROCESS_WHEN[3:]) any_getting = any(getattr(opts, k) for k in ( 'dumpjson', 'dump_single_json', 'getdescription', 'getduration', 'getfilename', - 'getformat', 'getid', 'getthumbnail', 'gettitle', 'geturl' + 'getformat', 'getid', 'getthumbnail', 'gettitle', 'geturl', )) if opts.quiet is None: opts.quiet = any_getting or opts.print_json or bool(opts.forceprint) @@ -830,6 +859,7 @@ def parse_options(argv=None): 'noprogress': opts.quiet if opts.noprogress is None else opts.noprogress, 'progress_with_newline': opts.progress_with_newline, 'progress_template': opts.progress_template, + 'progress_delta': opts.progress_delta, 'playliststart': opts.playliststart, 'playlistend': opts.playlistend, 'playlistreverse': opts.playlist_reverse, @@ -858,8 +888,8 @@ def parse_options(argv=None): 'listsubtitles': opts.listsubtitles, 'subtitlesformat': opts.subtitlesformat, 'subtitleslangs': opts.subtitleslangs, - 'matchtitle': decodeOption(opts.matchtitle), - 'rejecttitle': decodeOption(opts.rejecttitle), + 'matchtitle': opts.matchtitle, + 'rejecttitle': opts.rejecttitle, 'max_downloads': opts.max_downloads, 'prefer_free_formats': opts.prefer_free_formats, 'trim_file_name': opts.trim_file_name, @@ -910,6 +940,7 @@ def parse_options(argv=None): 'postprocessors': postprocessors, 'fixup': opts.fixup, 'source_address': opts.source_address, + 'impersonate': opts.impersonate, 'call_home': opts.call_home, 'sleep_interval_requests': opts.sleep_interval_requests, 'sleep_interval': opts.sleep_interval, @@ -959,6 +990,11 @@ def _real_main(argv=None): if opts.ffmpeg_location: FFmpegPostProcessor._ffmpeg_location.set(opts.ffmpeg_location) + # load all plugins into the global lookup + plugin_dirs.value = opts.plugin_dirs + if plugin_dirs.value: + _load_all_plugins() + with YoutubeDL(ydl_opts) as ydl: pre_process = opts.update_self or opts.rm_cachedir actual_use = all_urls or opts.load_info_filename @@ -979,11 +1015,68 @@ def _real_main(argv=None): traceback.print_exc() ydl._download_retcode = 100 + if opts.list_impersonate_targets: + + known_targets = [ + # List of simplified targets we know are supported, + # to help users know what dependencies may be required. + (ImpersonateTarget('chrome'), 'curl_cffi'), + (ImpersonateTarget('safari'), 'curl_cffi'), + (ImpersonateTarget('firefox'), 'curl_cffi>=0.10'), + (ImpersonateTarget('edge'), 'curl_cffi'), + ] + + available_targets = ydl._get_available_impersonate_targets() + + def make_row(target, handler): + return [ + join_nonempty(target.client.title(), target.version, delim='-') or '-', + join_nonempty((target.os or '').title(), target.os_version, delim='-') or '-', + handler, + ] + + rows = [make_row(target, handler) for target, handler in available_targets] + + for known_target, known_handler in known_targets: + if not any( + known_target in target and known_handler.startswith(handler) + for target, handler in available_targets + ): + rows.insert(0, [ + ydl._format_out(text, ydl.Styles.SUPPRESS) + for text in make_row(known_target, f'{known_handler} (unavailable)') + ]) + + ydl.to_screen('[info] Available impersonate targets') + ydl.to_stdout(render_table(['Client', 'OS', 'Source'], rows, extra_gap=2, delim='-')) + return + if not actual_use: if pre_process: return ydl._download_retcode - ydl.warn_if_short_id(sys.argv[1:] if argv is None else argv) + args = sys.argv[1:] if argv is None else argv + ydl.warn_if_short_id(args) + + # Show a useful error message and wait for keypress if not launched from shell on Windows + if not args and os.name == 'nt' and getattr(sys, 'frozen', False): + import ctypes.wintypes + import msvcrt + + kernel32 = ctypes.WinDLL('Kernel32') + + buffer = (1 * ctypes.wintypes.DWORD)() + attached_processes = kernel32.GetConsoleProcessList(buffer, 1) + # If we only have a single process attached, then the executable was double clicked + # When using `pyinstaller` with `--onefile`, two processes get attached + is_onefile = hasattr(sys, '_MEIPASS') and os.path.basename(sys._MEIPASS).startswith('_MEI') + if attached_processes == 1 or (is_onefile and attached_processes == 2): + print(parser._generate_error_message( + 'Do not double-click the executable, instead call it from a command line.\n' + 'Please read the README for further information on how to use yt-dlp: ' + 'https://github.com/yt-dlp/yt-dlp#readme')) + msvcrt.getch() + _exit(2) parser.error( 'You must provide at least one URL.\n' 'Type yt-dlp --help to see a list of all options.') @@ -1002,11 +1095,10 @@ def _real_main(argv=None): def main(argv=None): - global _IN_CLI - _IN_CLI = True + IN_CLI.value = True try: _exit(*variadic(_real_main(argv))) - except DownloadError: + except (CookieLoadError, DownloadError): _exit(1) except SameFileError as e: _exit(f'ERROR: {e}') @@ -1024,9 +1116,9 @@ def main(argv=None): from .extractor import gen_extractors, list_extractors __all__ = [ - 'main', 'YoutubeDL', - 'parse_options', 'gen_extractors', 'list_extractors', + 'main', + 'parse_options', ] diff --git a/plugins/youtube_download/yt_dlp/__main__.py b/plugins/youtube_download/yt_dlp/__main__.py index 78701df..06c3920 100644 --- a/plugins/youtube_download/yt_dlp/__main__.py +++ b/plugins/youtube_download/yt_dlp/__main__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # Execute with -# $ python -m yt_dlp +# $ python3 -m yt_dlp import sys diff --git a/plugins/youtube_download/yt_dlp/__pyinstaller/hook-yt_dlp.py b/plugins/youtube_download/yt_dlp/__pyinstaller/hook-yt_dlp.py index 88c2b8b..8e7f42f 100644 --- a/plugins/youtube_download/yt_dlp/__pyinstaller/hook-yt_dlp.py +++ b/plugins/youtube_download/yt_dlp/__pyinstaller/hook-yt_dlp.py @@ -1,6 +1,6 @@ import sys -from PyInstaller.utils.hooks import collect_submodules +from PyInstaller.utils.hooks import collect_submodules, collect_data_files def pycryptodome_module(): @@ -10,7 +10,7 @@ def pycryptodome_module(): try: import Crypto # noqa: F401 print('WARNING: Using Crypto since Cryptodome is not available. ' - 'Install with: pip install pycryptodomex', file=sys.stderr) + 'Install with: python3 -m pip install pycryptodomex', file=sys.stderr) return 'Crypto' except ImportError: pass @@ -21,12 +21,16 @@ def get_hidden_imports(): yield from ('yt_dlp.compat._legacy', 'yt_dlp.compat._deprecated') yield from ('yt_dlp.utils._legacy', 'yt_dlp.utils._deprecated') yield pycryptodome_module() - yield from collect_submodules('websockets') + # Only `websockets` is required, others are collected just in case + for module in ('websockets', 'requests', 'urllib3'): + yield from collect_submodules(module) # These are auto-detected, but explicitly add them just in case - yield from ('mutagen', 'brotli', 'certifi') + yield from ('mutagen', 'brotli', 'certifi', 'secretstorage', 'curl_cffi') hiddenimports = list(get_hidden_imports()) print(f'Adding imports: {hiddenimports}') -excludedimports = ['youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts'] +excludedimports = ['youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts', 'bundle'] + +datas = collect_data_files('curl_cffi', includes=['cacert.pem']) diff --git a/plugins/youtube_download/yt_dlp/aes.py b/plugins/youtube_download/yt_dlp/aes.py index b3a383c..065901d 100644 --- a/plugins/youtube_download/yt_dlp/aes.py +++ b/plugins/youtube_download/yt_dlp/aes.py @@ -3,7 +3,6 @@ from math import ceil from .compat import compat_ord from .dependencies import Cryptodome -from .utils import bytes_to_intlist, intlist_to_bytes if Cryptodome.AES: def aes_cbc_decrypt_bytes(data, key, iv): @@ -17,15 +16,15 @@ if Cryptodome.AES: else: def aes_cbc_decrypt_bytes(data, key, iv): """ Decrypt bytes with AES-CBC using native implementation since pycryptodome is unavailable """ - return intlist_to_bytes(aes_cbc_decrypt(*map(bytes_to_intlist, (data, key, iv)))) + return bytes(aes_cbc_decrypt(*map(list, (data, key, iv)))) def aes_gcm_decrypt_and_verify_bytes(data, key, tag, nonce): """ Decrypt bytes with AES-GCM using native implementation since pycryptodome is unavailable """ - return intlist_to_bytes(aes_gcm_decrypt_and_verify(*map(bytes_to_intlist, (data, key, tag, nonce)))) + return bytes(aes_gcm_decrypt_and_verify(*map(list, (data, key, tag, nonce)))) def aes_cbc_encrypt_bytes(data, key, iv, **kwargs): - return intlist_to_bytes(aes_cbc_encrypt(*map(bytes_to_intlist, (data, key, iv)), **kwargs)) + return bytes(aes_cbc_encrypt(*map(list, (data, key, iv)), **kwargs)) BLOCK_SIZE_BYTES = 16 @@ -68,7 +67,7 @@ def pad_block(block, padding_mode): raise NotImplementedError(f'Padding mode {padding_mode} is not implemented') if padding_mode == 'iso7816' and padding_size: - block = block + [0x80] # NB: += mutates list + block = [*block, 0x80] # NB: += mutates list padding_size -= 1 return block + [PADDING_BYTE[padding_mode]] * padding_size @@ -84,7 +83,7 @@ def aes_ecb_encrypt(data, key, iv=None): @returns {int[]} encrypted data """ expanded_key = key_expansion(key) - block_count = int(ceil(float(len(data)) / BLOCK_SIZE_BYTES)) + block_count = ceil(len(data) / BLOCK_SIZE_BYTES) encrypted_data = [] for i in range(block_count): @@ -104,15 +103,13 @@ def aes_ecb_decrypt(data, key, iv=None): @returns {int[]} decrypted data """ expanded_key = key_expansion(key) - block_count = int(ceil(float(len(data)) / BLOCK_SIZE_BYTES)) + block_count = ceil(len(data) / BLOCK_SIZE_BYTES) encrypted_data = [] for i in range(block_count): block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES] encrypted_data += aes_decrypt(block, expanded_key) - encrypted_data = encrypted_data[:len(data)] - - return encrypted_data + return encrypted_data[:len(data)] def aes_ctr_decrypt(data, key, iv): @@ -137,7 +134,7 @@ def aes_ctr_encrypt(data, key, iv): @returns {int[]} encrypted data """ expanded_key = key_expansion(key) - block_count = int(ceil(float(len(data)) / BLOCK_SIZE_BYTES)) + block_count = ceil(len(data) / BLOCK_SIZE_BYTES) counter = iter_vector(iv) encrypted_data = [] @@ -148,9 +145,7 @@ def aes_ctr_encrypt(data, key, iv): cipher_counter_block = aes_encrypt(counter_block, expanded_key) encrypted_data += xor(block, cipher_counter_block) - encrypted_data = encrypted_data[:len(data)] - - return encrypted_data + return encrypted_data[:len(data)] def aes_cbc_decrypt(data, key, iv): @@ -163,7 +158,7 @@ def aes_cbc_decrypt(data, key, iv): @returns {int[]} decrypted data """ expanded_key = key_expansion(key) - block_count = int(ceil(float(len(data)) / BLOCK_SIZE_BYTES)) + block_count = ceil(len(data) / BLOCK_SIZE_BYTES) decrypted_data = [] previous_cipher_block = iv @@ -174,9 +169,7 @@ def aes_cbc_decrypt(data, key, iv): decrypted_block = aes_decrypt(block, expanded_key) decrypted_data += xor(decrypted_block, previous_cipher_block) previous_cipher_block = block - decrypted_data = decrypted_data[:len(data)] - - return decrypted_data + return decrypted_data[:len(data)] def aes_cbc_encrypt(data, key, iv, *, padding_mode='pkcs7'): @@ -190,7 +183,7 @@ def aes_cbc_encrypt(data, key, iv, *, padding_mode='pkcs7'): @returns {int[]} encrypted data """ expanded_key = key_expansion(key) - block_count = int(ceil(float(len(data)) / BLOCK_SIZE_BYTES)) + block_count = ceil(len(data) / BLOCK_SIZE_BYTES) encrypted_data = [] previous_cipher_block = iv @@ -224,10 +217,10 @@ def aes_gcm_decrypt_and_verify(data, key, tag, nonce): hash_subkey = aes_encrypt([0] * BLOCK_SIZE_BYTES, key_expansion(key)) if len(nonce) == 12: - j0 = nonce + [0, 0, 0, 1] + j0 = [*nonce, 0, 0, 0, 1] else: fill = (BLOCK_SIZE_BYTES - (len(nonce) % BLOCK_SIZE_BYTES)) % BLOCK_SIZE_BYTES + 8 - ghash_in = nonce + [0] * fill + bytes_to_intlist((8 * len(nonce)).to_bytes(8, 'big')) + ghash_in = nonce + [0] * fill + list((8 * len(nonce)).to_bytes(8, 'big')) j0 = ghash(hash_subkey, ghash_in) # TODO: add nonce support to aes_ctr_decrypt @@ -236,17 +229,17 @@ def aes_gcm_decrypt_and_verify(data, key, tag, nonce): iv_ctr = inc(j0) decrypted_data = aes_ctr_decrypt(data, key, iv_ctr + [0] * (BLOCK_SIZE_BYTES - len(iv_ctr))) - pad_len = len(data) // 16 * 16 + pad_len = (BLOCK_SIZE_BYTES - (len(data) % BLOCK_SIZE_BYTES)) % BLOCK_SIZE_BYTES s_tag = ghash( hash_subkey, data - + [0] * (BLOCK_SIZE_BYTES - len(data) + pad_len) # pad - + bytes_to_intlist((0 * 8).to_bytes(8, 'big') # length of associated data - + ((len(data) * 8).to_bytes(8, 'big'))) # length of data + + [0] * pad_len # pad + + list((0 * 8).to_bytes(8, 'big') # length of associated data + + ((len(data) * 8).to_bytes(8, 'big'))), # length of data ) if tag != aes_ctr_encrypt(s_tag, key, j0): - raise ValueError("Mismatching authentication tag") + raise ValueError('Mismatching authentication tag') return decrypted_data @@ -288,9 +281,7 @@ def aes_decrypt(data, expanded_key): data = list(iter_mix_columns(data, MIX_COLUMN_MATRIX_INV)) data = shift_rows_inv(data) data = sub_bytes_inv(data) - data = xor(data, expanded_key[:BLOCK_SIZE_BYTES]) - - return data + return xor(data, expanded_key[:BLOCK_SIZE_BYTES]) def aes_decrypt_text(data, password, key_size_bytes): @@ -308,8 +299,8 @@ def aes_decrypt_text(data, password, key_size_bytes): """ NONCE_LENGTH_BYTES = 8 - data = bytes_to_intlist(base64.b64decode(data)) - password = bytes_to_intlist(password.encode()) + data = list(base64.b64decode(data)) + password = list(password.encode()) key = password[:key_size_bytes] + [0] * (key_size_bytes - len(password)) key = aes_encrypt(key[:BLOCK_SIZE_BYTES], key_expansion(key)) * (key_size_bytes // BLOCK_SIZE_BYTES) @@ -318,9 +309,7 @@ def aes_decrypt_text(data, password, key_size_bytes): cipher = data[NONCE_LENGTH_BYTES:] decrypted_data = aes_ctr_decrypt(cipher, key, nonce + [0] * (BLOCK_SIZE_BYTES - NONCE_LENGTH_BYTES)) - plaintext = intlist_to_bytes(decrypted_data) - - return plaintext + return bytes(decrypted_data) RCON = (0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36) @@ -428,9 +417,7 @@ def key_expansion(data): for _ in range(3 if key_size_bytes == 32 else 2 if key_size_bytes == 24 else 0): temp = data[-4:] data += xor(temp, data[-key_size_bytes: 4 - key_size_bytes]) - data = data[:expanded_key_size_bytes] - - return data + return data[:expanded_key_size_bytes] def iter_vector(iv): @@ -511,7 +498,7 @@ def block_product(block_x, block_y): # NIST SP 800-38D, Algorithm 1 if len(block_x) != BLOCK_SIZE_BYTES or len(block_y) != BLOCK_SIZE_BYTES: - raise ValueError("Length of blocks need to be %d bytes" % BLOCK_SIZE_BYTES) + raise ValueError(f'Length of blocks need to be {BLOCK_SIZE_BYTES} bytes') block_r = [0xE1] + [0] * (BLOCK_SIZE_BYTES - 1) block_v = block_y[:] @@ -534,7 +521,7 @@ def ghash(subkey, data): # NIST SP 800-38D, Algorithm 2 if len(data) % BLOCK_SIZE_BYTES: - raise ValueError("Length of data should be %d bytes" % BLOCK_SIZE_BYTES) + raise ValueError(f'Length of data should be {BLOCK_SIZE_BYTES} bytes') last_y = [0] * BLOCK_SIZE_BYTES for i in range(0, len(data), BLOCK_SIZE_BYTES): @@ -547,19 +534,17 @@ def ghash(subkey, data): __all__ = [ 'aes_cbc_decrypt', 'aes_cbc_decrypt_bytes', - 'aes_ctr_decrypt', - 'aes_decrypt_text', - 'aes_decrypt', - 'aes_ecb_decrypt', - 'aes_gcm_decrypt_and_verify', - 'aes_gcm_decrypt_and_verify_bytes', - 'aes_cbc_encrypt', 'aes_cbc_encrypt_bytes', + 'aes_ctr_decrypt', 'aes_ctr_encrypt', + 'aes_decrypt', + 'aes_decrypt_text', + 'aes_ecb_decrypt', 'aes_ecb_encrypt', 'aes_encrypt', - + 'aes_gcm_decrypt_and_verify', + 'aes_gcm_decrypt_and_verify_bytes', 'key_expansion', 'pad_block', 'pkcs7_padding', diff --git a/plugins/youtube_download/yt_dlp/cache.py b/plugins/youtube_download/yt_dlp/cache.py index 9dd4f2f..71dca82 100644 --- a/plugins/youtube_download/yt_dlp/cache.py +++ b/plugins/youtube_download/yt_dlp/cache.py @@ -81,10 +81,10 @@ class Cache: cachedir = self._get_root_dir() if not any((term in cachedir) for term in ('cache', 'tmp')): - raise Exception('Not removing directory %s - this does not look like a cache dir' % cachedir) + raise Exception(f'Not removing directory {cachedir} - this does not look like a cache dir') self._ydl.to_screen( - 'Removing cache dir %s .' % cachedir, skip_eol=True) + f'Removing cache dir {cachedir} .', skip_eol=True) if os.path.exists(cachedir): self._ydl.to_screen('.', skip_eol=True) shutil.rmtree(cachedir) diff --git a/plugins/youtube_download/yt_dlp/casefold.py b/plugins/youtube_download/yt_dlp/casefold.py deleted file mode 100644 index 41a53e5..0000000 --- a/plugins/youtube_download/yt_dlp/casefold.py +++ /dev/null @@ -1,5 +0,0 @@ -import warnings - -warnings.warn(DeprecationWarning(f'{__name__} is deprecated')) - -casefold = str.casefold diff --git a/plugins/youtube_download/yt_dlp/compat/__init__.py b/plugins/youtube_download/yt_dlp/compat/__init__.py index 832a913..d779620 100644 --- a/plugins/youtube_download/yt_dlp/compat/__init__.py +++ b/plugins/youtube_download/yt_dlp/compat/__init__.py @@ -1,5 +1,4 @@ import os -import sys import xml.etree.ElementTree as etree from .compat_utils import passthrough_module @@ -24,36 +23,14 @@ def compat_etree_fromstring(text): return etree.XML(text, parser=etree.XMLParser(target=_TreeBuilder())) -compat_os_name = os._name if os.name == 'java' else os.name - - -if compat_os_name == 'nt': - def compat_shlex_quote(s): - import re - return s if re.match(r'^[-_\w./]+$', s) else '"%s"' % s.replace('"', '\\"') -else: - from shlex import quote as compat_shlex_quote # noqa: F401 - - def compat_ord(c): return c if isinstance(c, int) else ord(c) -if compat_os_name == 'nt' and sys.version_info < (3, 8): - # os.path.realpath on Windows does not follow symbolic links - # prior to Python 3.8 (see https://bugs.python.org/issue9949) - def compat_realpath(path): - while os.path.islink(path): - path = os.path.abspath(os.readlink(path)) - return os.path.realpath(path) -else: - compat_realpath = os.path.realpath - - # Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl # See https://github.com/yt-dlp/yt-dlp/issues/792 # https://docs.python.org/3/library/os.path.html#os.path.expanduser -if compat_os_name in ('nt', 'ce'): +if os.name in ('nt', 'ce'): def compat_expanduser(path): HOME = os.environ.get('HOME') if not HOME: diff --git a/plugins/youtube_download/yt_dlp/compat/_deprecated.py b/plugins/youtube_download/yt_dlp/compat/_deprecated.py index 607bae9..445acc1 100644 --- a/plugins/youtube_download/yt_dlp/compat/_deprecated.py +++ b/plugins/youtube_download/yt_dlp/compat/_deprecated.py @@ -8,16 +8,14 @@ passthrough_module(__name__, '.._legacy', callback=lambda attr: warnings.warn( DeprecationWarning(f'{__name__}.{attr} is deprecated'), stacklevel=6)) del passthrough_module -import base64 -import urllib.error -import urllib.parse +import functools # noqa: F401 +import os -compat_str = str -compat_b64decode = base64.b64decode +compat_os_name = os.name +compat_realpath = os.path.realpath -compat_urlparse = urllib.parse -compat_parse_qs = urllib.parse.parse_qs -compat_urllib_parse_unquote = urllib.parse.unquote -compat_urllib_parse_urlencode = urllib.parse.urlencode -compat_urllib_parse_urlparse = urllib.parse.urlparse + +def compat_shlex_quote(s): + from ..utils import shell_quote + return shell_quote(s) diff --git a/plugins/youtube_download/yt_dlp/compat/_legacy.py b/plugins/youtube_download/yt_dlp/compat/_legacy.py index 90ccf0f..dae2c14 100644 --- a/plugins/youtube_download/yt_dlp/compat/_legacy.py +++ b/plugins/youtube_download/yt_dlp/compat/_legacy.py @@ -30,11 +30,12 @@ from asyncio import run as compat_asyncio_run # noqa: F401 from re import Pattern as compat_Pattern # noqa: F401 from re import match as compat_Match # noqa: F401 -from . import compat_expanduser, compat_HTMLParseError, compat_realpath +from . import compat_expanduser, compat_HTMLParseError from .compat_utils import passthrough_module from ..dependencies import brotli as compat_brotli # noqa: F401 from ..dependencies import websockets as compat_websockets # noqa: F401 from ..dependencies.Cryptodome import AES as compat_pycrypto_AES # noqa: F401 +from ..networking.exceptions import HTTPError as compat_HTTPError passthrough_module(__name__, '...utils', ('WINDOWS_VT_MODE', 'windows_enable_vt_mode')) @@ -70,7 +71,6 @@ compat_html_parser_HTMLParseError = compat_HTMLParseError compat_HTMLParser = compat_html_parser_HTMLParser = html.parser.HTMLParser compat_http_client = http.client compat_http_server = http.server -compat_HTTPError = urllib.error.HTTPError compat_input = input compat_integer_types = (int, ) compat_itertools_count = itertools.count @@ -78,7 +78,7 @@ compat_kwargs = lambda kwargs: kwargs compat_map = map compat_numeric_types = (int, float, complex) compat_os_path_expanduser = compat_expanduser -compat_os_path_realpath = compat_realpath +compat_os_path_realpath = os.path.realpath compat_print = print compat_shlex_split = shlex.split compat_socket_create_connection = socket.create_connection @@ -88,7 +88,7 @@ compat_struct_unpack = struct.unpack compat_subprocess_get_DEVNULL = lambda: subprocess.DEVNULL compat_tokenize_tokenize = tokenize.tokenize compat_urllib_error = urllib.error -compat_urllib_HTTPError = urllib.error.HTTPError +compat_urllib_HTTPError = compat_HTTPError compat_urllib_parse = urllib.parse compat_urllib_parse_parse_qs = urllib.parse.parse_qs compat_urllib_parse_quote = urllib.parse.quote @@ -104,5 +104,12 @@ compat_xml_parse_error = compat_xml_etree_ElementTree_ParseError = etree.ParseEr compat_xpath = lambda xpath: xpath compat_zip = zip workaround_optparse_bug9161 = lambda: None +compat_str = str +compat_b64decode = base64.b64decode +compat_urlparse = urllib.parse +compat_parse_qs = urllib.parse.parse_qs +compat_urllib_parse_unquote = urllib.parse.unquote +compat_urllib_parse_urlencode = urllib.parse.urlencode +compat_urllib_parse_urlparse = urllib.parse.urlparse legacy = [] diff --git a/plugins/youtube_download/yt_dlp/compat/compat_utils.py b/plugins/youtube_download/yt_dlp/compat/compat_utils.py index 3ca46d2..d8b3c45 100644 --- a/plugins/youtube_download/yt_dlp/compat/compat_utils.py +++ b/plugins/youtube_download/yt_dlp/compat/compat_utils.py @@ -15,7 +15,7 @@ def get_package_info(module): name=getattr(module, '_yt_dlp__identifier', module.__name__), version=str(next(filter(None, ( getattr(module, attr, None) - for attr in ('__version__', 'version_string', 'version') + for attr in ('_yt_dlp__version', '__version__', 'version_string', 'version') )), None))) @@ -57,7 +57,7 @@ def passthrough_module(parent, child, allowed_attributes=(..., ), *, callback=la callback(attr) return ret - @functools.lru_cache(maxsize=None) + @functools.cache def from_child(attr): nonlocal child if attr not in allowed_attributes: diff --git a/plugins/youtube_download/yt_dlp/compat/functools.py b/plugins/youtube_download/yt_dlp/compat/functools.py deleted file mode 100644 index ec003ea..0000000 --- a/plugins/youtube_download/yt_dlp/compat/functools.py +++ /dev/null @@ -1,26 +0,0 @@ -# flake8: noqa: F405 -from functools import * # noqa: F403 - -from .compat_utils import passthrough_module - -passthrough_module(__name__, 'functools') -del passthrough_module - -try: - cache # >= 3.9 -except NameError: - cache = lru_cache(maxsize=None) - -try: - cached_property # >= 3.8 -except NameError: - class cached_property: - def __init__(self, func): - update_wrapper(self, func) - self.func = func - - def __get__(self, instance, _): - if instance is None: - return self - setattr(instance, self.func.__name__, self.func(instance)) - return getattr(instance, self.func.__name__) diff --git a/plugins/youtube_download/yt_dlp/compat/imghdr.py b/plugins/youtube_download/yt_dlp/compat/imghdr.py index 5d64ab0..4ae173f 100644 --- a/plugins/youtube_download/yt_dlp/compat/imghdr.py +++ b/plugins/youtube_download/yt_dlp/compat/imghdr.py @@ -1,16 +1,22 @@ -tests = { - 'webp': lambda h: h[0:4] == b'RIFF' and h[8:] == b'WEBP', - 'png': lambda h: h[:8] == b'\211PNG\r\n\032\n', - 'jpeg': lambda h: h[6:10] in (b'JFIF', b'Exif'), - 'gif': lambda h: h[:6] in (b'GIF87a', b'GIF89a'), -} - - def what(file=None, h=None): """Detect format of image (Currently supports jpeg, png, webp, gif only) - Ref: https://github.com/python/cpython/blob/3.10/Lib/imghdr.py + Ref: https://github.com/python/cpython/blob/3.11/Lib/imghdr.py + Ref: https://www.w3.org/Graphics/JPEG/itu-t81.pdf """ if h is None: with open(file, 'rb') as f: h = f.read(12) - return next((type_ for type_, test in tests.items() if test(h)), None) + + if h.startswith(b'RIFF') and h.startswith(b'WEBP', 8): + return 'webp' + + if h.startswith(b'\x89PNG'): + return 'png' + + if h.startswith(b'\xFF\xD8\xFF'): + return 'jpeg' + + if h.startswith(b'GIF'): + return 'gif' + + return None diff --git a/plugins/youtube_download/yt_dlp/compat/urllib/__init__.py b/plugins/youtube_download/yt_dlp/compat/urllib/__init__.py index b27cc61..9084b3c 100644 --- a/plugins/youtube_download/yt_dlp/compat/urllib/__init__.py +++ b/plugins/youtube_download/yt_dlp/compat/urllib/__init__.py @@ -1,7 +1,7 @@ # flake8: noqa: F405 from urllib import * # noqa: F403 -del request +del request # noqa: F821 from . import request # noqa: F401 from ..compat_utils import passthrough_module diff --git a/plugins/youtube_download/yt_dlp/compat/urllib/request.py b/plugins/youtube_download/yt_dlp/compat/urllib/request.py index ff63b2f..dfc7f4a 100644 --- a/plugins/youtube_download/yt_dlp/compat/urllib/request.py +++ b/plugins/youtube_download/yt_dlp/compat/urllib/request.py @@ -7,13 +7,13 @@ passthrough_module(__name__, 'urllib.request') del passthrough_module -from .. import compat_os_name +import os -if compat_os_name == 'nt': - # On older python versions, proxies are extracted from Windows registry erroneously. [1] +if os.name == 'nt': + # On older Python versions, proxies are extracted from Windows registry erroneously. [1] # If the https proxy in the registry does not have a scheme, urllib will incorrectly add https:// to it. [2] # It is unlikely that the user has actually set it to be https, so we should be fine to safely downgrade - # it to http on these older python versions to avoid issues + # it to http on these older Python versions to avoid issues # This also applies for ftp proxy type, as ftp:// proxy scheme is not supported. # 1: https://github.com/python/cpython/issues/86793 # 2: https://github.com/python/cpython/blob/51f1ae5ceb0673316c4e4b0175384e892e33cc6e/Lib/urllib/request.py#L2683-L2698 @@ -37,4 +37,4 @@ if compat_os_name == 'nt': def getproxies(): return getproxies_environment() or getproxies_registry_patched() -del compat_os_name +del os diff --git a/plugins/youtube_download/yt_dlp/cookies.py b/plugins/youtube_download/yt_dlp/cookies.py index a71fbc2..fad323c 100644 --- a/plugins/youtube_download/yt_dlp/cookies.py +++ b/plugins/youtube_download/yt_dlp/cookies.py @@ -1,6 +1,10 @@ import base64 import collections import contextlib +import datetime as dt +import functools +import glob +import hashlib import http.cookiejar import http.cookies import io @@ -14,16 +18,13 @@ import sys import tempfile import time import urllib.request -from datetime import datetime, timedelta, timezone from enum import Enum, auto -from hashlib import pbkdf2_hmac from .aes import ( aes_cbc_decrypt_bytes, aes_gcm_decrypt_and_verify_bytes, unpad_pkcs7, ) -from .compat import functools from .dependencies import ( _SECRETSTORAGE_UNAVAILABLE_REASON, secretstorage, @@ -31,6 +32,8 @@ from .dependencies import ( ) from .minicurses import MultilinePrinter, QuietMultilinePrinter from .utils import ( + DownloadError, + YoutubeDLError, Popen, error_to_str, expand_path, @@ -43,7 +46,7 @@ from .utils import ( from .utils._utils import _YDLLogger from .utils.networking import normalize_url -CHROMIUM_BASED_BROWSERS = {'brave', 'chrome', 'chromium', 'edge', 'opera', 'vivaldi'} +CHROMIUM_BASED_BROWSERS = {'brave', 'chrome', 'chromium', 'edge', 'opera', 'vivaldi', 'whale'} SUPPORTED_BROWSERS = CHROMIUM_BASED_BROWSERS | {'firefox', 'safari'} @@ -83,24 +86,31 @@ def _create_progress_bar(logger): return printer +class CookieLoadError(YoutubeDLError): + pass + + def load_cookies(cookie_file, browser_specification, ydl): - cookie_jars = [] - if browser_specification is not None: - browser_name, profile, keyring, container = _parse_browser_specification(*browser_specification) - cookie_jars.append( - extract_cookies_from_browser(browser_name, profile, YDLLogger(ydl), keyring=keyring, container=container)) + try: + cookie_jars = [] + if browser_specification is not None: + browser_name, profile, keyring, container = _parse_browser_specification(*browser_specification) + cookie_jars.append( + extract_cookies_from_browser(browser_name, profile, YDLLogger(ydl), keyring=keyring, container=container)) - if cookie_file is not None: - is_filename = is_path_like(cookie_file) - if is_filename: - cookie_file = expand_path(cookie_file) + if cookie_file is not None: + is_filename = is_path_like(cookie_file) + if is_filename: + cookie_file = expand_path(cookie_file) - jar = YoutubeDLCookieJar(cookie_file) - if not is_filename or os.access(cookie_file, os.R_OK): - jar.load() - cookie_jars.append(jar) + jar = YoutubeDLCookieJar(cookie_file) + if not is_filename or os.access(cookie_file, os.R_OK): + jar.load() + cookie_jars.append(jar) - return _merge_cookie_jars(cookie_jars) + return _merge_cookie_jars(cookie_jars) + except Exception: + raise CookieLoadError('failed to load cookies') def extract_cookies_from_browser(browser_name, profile=None, logger=YDLLogger(), *, keyring=None, container=None): @@ -118,17 +128,18 @@ def _extract_firefox_cookies(profile, container, logger): logger.info('Extracting cookies from firefox') if not sqlite3: logger.warning('Cannot extract cookies from firefox without sqlite3 support. ' - 'Please use a python interpreter compiled with sqlite3 support') + 'Please use a Python interpreter compiled with sqlite3 support') return YoutubeDLCookieJar() if profile is None: - search_root = _firefox_browser_dir() + search_roots = list(_firefox_browser_dirs()) elif _is_path(profile): - search_root = profile + search_roots = [profile] else: - search_root = os.path.join(_firefox_browser_dir(), profile) + search_roots = [os.path.join(path, profile) for path in _firefox_browser_dirs()] + search_root = ', '.join(map(repr, search_roots)) - cookie_database_path = _find_most_recently_used_file(search_root, 'cookies.sqlite', logger) + cookie_database_path = _newest(_firefox_cookie_dbs(search_roots)) if cookie_database_path is None: raise FileNotFoundError(f'could not find firefox cookies database in {search_root}') logger.debug(f'Extracting cookies from: "{cookie_database_path}"') @@ -142,7 +153,7 @@ def _extract_firefox_cookies(profile, container, logger): identities = json.load(containers).get('identities', []) container_id = next((context.get('userContextId') for context in identities if container in ( context.get('name'), - try_call(lambda: re.fullmatch(r'userContext([^\.]+)\.label', context['l10nID']).group()) + try_call(lambda: re.fullmatch(r'userContext([^\.]+)\.label', context['l10nID']).group()), )), None) if not isinstance(container_id, int): raise ValueError(f'could not find firefox container "{container}" in containers.json') @@ -182,12 +193,28 @@ def _extract_firefox_cookies(profile, container, logger): cursor.connection.close() -def _firefox_browser_dir(): +def _firefox_browser_dirs(): if sys.platform in ('cygwin', 'win32'): - return os.path.expandvars(R'%APPDATA%\Mozilla\Firefox\Profiles') + yield from map(os.path.expandvars, ( + R'%APPDATA%\Mozilla\Firefox\Profiles', + R'%LOCALAPPDATA%\Packages\Mozilla.Firefox_n80bbvh6b1yt2\LocalCache\Roaming\Mozilla\Firefox\Profiles', + )) + elif sys.platform == 'darwin': - return os.path.expanduser('~/Library/Application Support/Firefox') - return os.path.expanduser('~/.mozilla/firefox') + yield os.path.expanduser('~/Library/Application Support/Firefox/Profiles') + + else: + yield from map(os.path.expanduser, ( + '~/.mozilla/firefox', + '~/snap/firefox/common/.mozilla/firefox', + '~/.var/app/org.mozilla.firefox/.mozilla/firefox', + )) + + +def _firefox_cookie_dbs(roots): + for root in map(os.path.abspath, roots): + for pattern in ('', '*/', 'Profiles/*/'): + yield from glob.iglob(os.path.join(root, pattern, 'cookies.sqlite')) def _get_chromium_based_browser_settings(browser_name): @@ -202,6 +229,7 @@ def _get_chromium_based_browser_settings(browser_name): 'edge': os.path.join(appdata_local, R'Microsoft\Edge\User Data'), 'opera': os.path.join(appdata_roaming, R'Opera Software\Opera Stable'), 'vivaldi': os.path.join(appdata_local, R'Vivaldi\User Data'), + 'whale': os.path.join(appdata_local, R'Naver\Naver Whale\User Data'), }[browser_name] elif sys.platform == 'darwin': @@ -213,6 +241,7 @@ def _get_chromium_based_browser_settings(browser_name): 'edge': os.path.join(appdata, 'Microsoft Edge'), 'opera': os.path.join(appdata, 'com.operasoftware.Opera'), 'vivaldi': os.path.join(appdata, 'Vivaldi'), + 'whale': os.path.join(appdata, 'Naver/Whale'), }[browser_name] else: @@ -224,6 +253,7 @@ def _get_chromium_based_browser_settings(browser_name): 'edge': os.path.join(config, 'microsoft-edge'), 'opera': os.path.join(config, 'opera'), 'vivaldi': os.path.join(config, 'vivaldi'), + 'whale': os.path.join(config, 'naver-whale'), }[browser_name] # Linux keyring names can be determined by snooping on dbus while opening the browser in KDE: @@ -235,6 +265,7 @@ def _get_chromium_based_browser_settings(browser_name): 'edge': 'Microsoft Edge' if sys.platform == 'darwin' else 'Chromium', 'opera': 'Opera' if sys.platform == 'darwin' else 'Chromium', 'vivaldi': 'Vivaldi' if sys.platform == 'darwin' else 'Chrome', + 'whale': 'Whale', }[browser_name] browsers_without_profiles = {'opera'} @@ -242,7 +273,7 @@ def _get_chromium_based_browser_settings(browser_name): return { 'browser_dir': browser_dir, 'keyring_name': keyring_name, - 'supports_profiles': browser_name not in browsers_without_profiles + 'supports_profiles': browser_name not in browsers_without_profiles, } @@ -251,7 +282,7 @@ def _extract_chrome_cookies(browser_name, profile, keyring, logger): if not sqlite3: logger.warning(f'Cannot extract cookies from {browser_name} without sqlite3 support. ' - 'Please use a python interpreter compiled with sqlite3 support') + 'Please use a Python interpreter compiled with sqlite3 support') return YoutubeDLCookieJar() config = _get_chromium_based_browser_settings(browser_name) @@ -268,17 +299,23 @@ def _extract_chrome_cookies(browser_name, profile, keyring, logger): logger.error(f'{browser_name} does not support profiles') search_root = config['browser_dir'] - cookie_database_path = _find_most_recently_used_file(search_root, 'Cookies', logger) + cookie_database_path = _newest(_find_files(search_root, 'Cookies', logger)) if cookie_database_path is None: raise FileNotFoundError(f'could not find {browser_name} cookies database in "{search_root}"') logger.debug(f'Extracting cookies from: "{cookie_database_path}"') - decryptor = get_cookie_decryptor(config['browser_dir'], config['keyring_name'], logger, keyring=keyring) - with tempfile.TemporaryDirectory(prefix='yt_dlp') as tmpdir: cursor = None try: cursor = _open_database_copy(cookie_database_path, tmpdir) + + # meta_version is necessary to determine if we need to trim the hash prefix from the cookies + # Ref: https://chromium.googlesource.com/chromium/src/+/b02dcebd7cafab92770734dc2bc317bd07f1d891/net/extras/sqlite/sqlite_persistent_cookie_store.cc#223 + meta_version = int(cursor.execute('SELECT value FROM meta WHERE key = "version"').fetchone()[0]) + decryptor = get_cookie_decryptor( + config['browser_dir'], config['keyring_name'], logger, + keyring=keyring, meta_version=meta_version) + cursor.connection.text_factory = bytes column_names = _get_column_names(cursor, 'cookies') secure_column = 'is_secure' if 'is_secure' in column_names else 'secure' @@ -307,6 +344,12 @@ def _extract_chrome_cookies(browser_name, profile, keyring, logger): counts['unencrypted'] = unencrypted_cookies logger.debug(f'cookie version breakdown: {counts}') return jar + except PermissionError as error: + if os.name == 'nt' and error.errno == 13: + message = 'Could not copy Chrome cookie database. See https://github.com/yt-dlp/yt-dlp/issues/7271 for more info' + logger.error(message) + raise DownloadError(message) # force exit + raise finally: if cursor is not None: cursor.connection.close() @@ -324,6 +367,11 @@ def _process_chrome_cookie(decryptor, host_key, name, value, encrypted_value, pa if value is None: return is_encrypted, None + # In chrome, session cookies have expires_utc set to 0 + # In our cookie-store, cookies that do not expire should have expires set to None + if not expires_utc: + expires_utc = None + return is_encrypted, http.cookiejar.Cookie( version=0, name=name, value=value, port=None, port_specified=False, domain=host_key, domain_specified=bool(host_key), domain_initial_dot=host_key.startswith('.'), @@ -365,22 +413,23 @@ class ChromeCookieDecryptor: raise NotImplementedError('Must be implemented by sub classes') -def get_cookie_decryptor(browser_root, browser_keyring_name, logger, *, keyring=None): +def get_cookie_decryptor(browser_root, browser_keyring_name, logger, *, keyring=None, meta_version=None): if sys.platform == 'darwin': - return MacChromeCookieDecryptor(browser_keyring_name, logger) + return MacChromeCookieDecryptor(browser_keyring_name, logger, meta_version=meta_version) elif sys.platform in ('win32', 'cygwin'): - return WindowsChromeCookieDecryptor(browser_root, logger) - return LinuxChromeCookieDecryptor(browser_keyring_name, logger, keyring=keyring) + return WindowsChromeCookieDecryptor(browser_root, logger, meta_version=meta_version) + return LinuxChromeCookieDecryptor(browser_keyring_name, logger, keyring=keyring, meta_version=meta_version) class LinuxChromeCookieDecryptor(ChromeCookieDecryptor): - def __init__(self, browser_keyring_name, logger, *, keyring=None): + def __init__(self, browser_keyring_name, logger, *, keyring=None, meta_version=None): self._logger = logger self._v10_key = self.derive_key(b'peanuts') self._empty_key = self.derive_key(b'') self._cookie_counts = {'v10': 0, 'v11': 0, 'other': 0} self._browser_keyring_name = browser_keyring_name self._keyring = keyring + self._meta_version = meta_version or 0 @functools.cached_property def _v11_key(self): @@ -409,14 +458,18 @@ class LinuxChromeCookieDecryptor(ChromeCookieDecryptor): if version == b'v10': self._cookie_counts['v10'] += 1 - return _decrypt_aes_cbc_multi(ciphertext, (self._v10_key, self._empty_key), self._logger) + return _decrypt_aes_cbc_multi( + ciphertext, (self._v10_key, self._empty_key), self._logger, + hash_prefix=self._meta_version >= 24) elif version == b'v11': self._cookie_counts['v11'] += 1 if self._v11_key is None: self._logger.warning('cannot decrypt v11 cookies: no key found', only_once=True) return None - return _decrypt_aes_cbc_multi(ciphertext, (self._v11_key, self._empty_key), self._logger) + return _decrypt_aes_cbc_multi( + ciphertext, (self._v11_key, self._empty_key), self._logger, + hash_prefix=self._meta_version >= 24) else: self._logger.warning(f'unknown cookie version: "{version}"', only_once=True) @@ -425,11 +478,12 @@ class LinuxChromeCookieDecryptor(ChromeCookieDecryptor): class MacChromeCookieDecryptor(ChromeCookieDecryptor): - def __init__(self, browser_keyring_name, logger): + def __init__(self, browser_keyring_name, logger, meta_version=None): self._logger = logger password = _get_mac_keyring_password(browser_keyring_name, logger) self._v10_key = None if password is None else self.derive_key(password) self._cookie_counts = {'v10': 0, 'other': 0} + self._meta_version = meta_version or 0 @staticmethod def derive_key(password): @@ -447,7 +501,8 @@ class MacChromeCookieDecryptor(ChromeCookieDecryptor): self._logger.warning('cannot decrypt v10 cookies: no key found', only_once=True) return None - return _decrypt_aes_cbc_multi(ciphertext, (self._v10_key,), self._logger) + return _decrypt_aes_cbc_multi( + ciphertext, (self._v10_key,), self._logger, hash_prefix=self._meta_version >= 24) else: self._cookie_counts['other'] += 1 @@ -457,10 +512,11 @@ class MacChromeCookieDecryptor(ChromeCookieDecryptor): class WindowsChromeCookieDecryptor(ChromeCookieDecryptor): - def __init__(self, browser_root, logger): + def __init__(self, browser_root, logger, meta_version=None): self._logger = logger self._v10_key = _get_windows_v10_key(browser_root, logger) self._cookie_counts = {'v10': 0, 'other': 0} + self._meta_version = meta_version or 0 def decrypt(self, encrypted_value): version = encrypted_value[:3] @@ -484,7 +540,9 @@ class WindowsChromeCookieDecryptor(ChromeCookieDecryptor): ciphertext = raw_ciphertext[nonce_length:-authentication_tag_length] authentication_tag = raw_ciphertext[-authentication_tag_length:] - return _decrypt_aes_gcm(ciphertext, self._v10_key, nonce, authentication_tag, self._logger) + return _decrypt_aes_gcm( + ciphertext, self._v10_key, nonce, authentication_tag, self._logger, + hash_prefix=self._meta_version >= 24) else: self._cookie_counts['other'] += 1 @@ -575,7 +633,7 @@ class DataParser: def _mac_absolute_time_to_posix(timestamp): - return int((datetime(2001, 1, 1, 0, 0, tzinfo=timezone.utc) + timedelta(seconds=timestamp)).timestamp()) + return int((dt.datetime(2001, 1, 1, 0, 0, tzinfo=dt.timezone.utc) + dt.timedelta(seconds=timestamp)).timestamp()) def _parse_safari_cookies_header(data, logger): @@ -708,40 +766,38 @@ def _get_linux_desktop_environment(env, logger): xdg_current_desktop = env.get('XDG_CURRENT_DESKTOP', None) desktop_session = env.get('DESKTOP_SESSION', None) if xdg_current_desktop is not None: - xdg_current_desktop = xdg_current_desktop.split(':')[0].strip() - - if xdg_current_desktop == 'Unity': - if desktop_session is not None and 'gnome-fallback' in desktop_session: + for part in map(str.strip, xdg_current_desktop.split(':')): + if part == 'Unity': + if desktop_session is not None and 'gnome-fallback' in desktop_session: + return _LinuxDesktopEnvironment.GNOME + else: + return _LinuxDesktopEnvironment.UNITY + elif part == 'Deepin': + return _LinuxDesktopEnvironment.DEEPIN + elif part == 'GNOME': return _LinuxDesktopEnvironment.GNOME - else: - return _LinuxDesktopEnvironment.UNITY - elif xdg_current_desktop == 'Deepin': - return _LinuxDesktopEnvironment.DEEPIN - elif xdg_current_desktop == 'GNOME': - return _LinuxDesktopEnvironment.GNOME - elif xdg_current_desktop == 'X-Cinnamon': - return _LinuxDesktopEnvironment.CINNAMON - elif xdg_current_desktop == 'KDE': - kde_version = env.get('KDE_SESSION_VERSION', None) - if kde_version == '5': - return _LinuxDesktopEnvironment.KDE5 - elif kde_version == '6': - return _LinuxDesktopEnvironment.KDE6 - elif kde_version == '4': - return _LinuxDesktopEnvironment.KDE4 - else: - logger.info(f'unknown KDE version: "{kde_version}". Assuming KDE4') - return _LinuxDesktopEnvironment.KDE4 - elif xdg_current_desktop == 'Pantheon': - return _LinuxDesktopEnvironment.PANTHEON - elif xdg_current_desktop == 'XFCE': - return _LinuxDesktopEnvironment.XFCE - elif xdg_current_desktop == 'UKUI': - return _LinuxDesktopEnvironment.UKUI - elif xdg_current_desktop == 'LXQt': - return _LinuxDesktopEnvironment.LXQT - else: - logger.info(f'XDG_CURRENT_DESKTOP is set to an unknown value: "{xdg_current_desktop}"') + elif part == 'X-Cinnamon': + return _LinuxDesktopEnvironment.CINNAMON + elif part == 'KDE': + kde_version = env.get('KDE_SESSION_VERSION', None) + if kde_version == '5': + return _LinuxDesktopEnvironment.KDE5 + elif kde_version == '6': + return _LinuxDesktopEnvironment.KDE6 + elif kde_version == '4': + return _LinuxDesktopEnvironment.KDE4 + else: + logger.info(f'unknown KDE version: "{kde_version}". Assuming KDE4') + return _LinuxDesktopEnvironment.KDE4 + elif part == 'Pantheon': + return _LinuxDesktopEnvironment.PANTHEON + elif part == 'XFCE': + return _LinuxDesktopEnvironment.XFCE + elif part == 'UKUI': + return _LinuxDesktopEnvironment.UKUI + elif part == 'LXQt': + return _LinuxDesktopEnvironment.LXQT + logger.info(f'XDG_CURRENT_DESKTOP is set to an unknown value: "{xdg_current_desktop}"') elif desktop_session is not None: if desktop_session == 'deepin': @@ -794,7 +850,7 @@ def _choose_linux_keyring(logger): elif desktop_environment == _LinuxDesktopEnvironment.KDE6: linux_keyring = _LinuxKeyring.KWALLET6 elif desktop_environment in ( - _LinuxDesktopEnvironment.KDE3, _LinuxDesktopEnvironment.LXQT, _LinuxDesktopEnvironment.OTHER + _LinuxDesktopEnvironment.KDE3, _LinuxDesktopEnvironment.LXQT, _LinuxDesktopEnvironment.OTHER, ): linux_keyring = _LinuxKeyring.BASICTEXT else: @@ -829,7 +885,7 @@ def _get_kwallet_network_wallet(keyring, logger): 'dbus-send', '--session', '--print-reply=literal', f'--dest={service_name}', wallet_path, - 'org.kde.KWallet.networkWallet' + 'org.kde.KWallet.networkWallet', ], text=True, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) if returncode: @@ -859,7 +915,7 @@ def _get_kwallet_password(browser_keyring_name, keyring, logger): 'kwallet-query', '--read-password', f'{browser_keyring_name} Safe Storage', '--folder', f'{browser_keyring_name} Keys', - network_wallet + network_wallet, ], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) if returncode: @@ -899,9 +955,8 @@ def _get_gnome_keyring_password(browser_keyring_name, logger): for item in col.get_all_items(): if item.get_label() == f'{browser_keyring_name} Safe Storage': return item.get_secret() - else: - logger.error('failed to read from keyring') - return b'' + logger.error('failed to read from keyring') + return b'' def _get_linux_keyring_password(browser_keyring_name, keyring, logger): @@ -947,7 +1002,7 @@ def _get_windows_v10_key(browser_root, logger): References: - [1] https://chromium.googlesource.com/chromium/src/+/refs/heads/main/components/os_crypt/sync/os_crypt_win.cc """ - path = _find_most_recently_used_file(browser_root, 'Local State', logger) + path = _newest(_find_files(browser_root, 'Local State', logger)) if path is None: logger.error('could not find local state file') return None @@ -970,13 +1025,15 @@ def _get_windows_v10_key(browser_root, logger): def pbkdf2_sha1(password, salt, iterations, key_length): - return pbkdf2_hmac('sha1', password, salt, iterations, key_length) + return hashlib.pbkdf2_hmac('sha1', password, salt, iterations, key_length) -def _decrypt_aes_cbc_multi(ciphertext, keys, logger, initialization_vector=b' ' * 16): +def _decrypt_aes_cbc_multi(ciphertext, keys, logger, initialization_vector=b' ' * 16, hash_prefix=False): for key in keys: plaintext = unpad_pkcs7(aes_cbc_decrypt_bytes(ciphertext, key, initialization_vector)) try: + if hash_prefix: + return plaintext[32:].decode() return plaintext.decode() except UnicodeDecodeError: pass @@ -984,7 +1041,7 @@ def _decrypt_aes_cbc_multi(ciphertext, keys, logger, initialization_vector=b' ' return None -def _decrypt_aes_gcm(ciphertext, key, nonce, authentication_tag, logger): +def _decrypt_aes_gcm(ciphertext, key, nonce, authentication_tag, logger, hash_prefix=False): try: plaintext = aes_gcm_decrypt_and_verify_bytes(ciphertext, key, authentication_tag, nonce) except ValueError: @@ -992,6 +1049,8 @@ def _decrypt_aes_gcm(ciphertext, key, nonce, authentication_tag, logger): return None try: + if hash_prefix: + return plaintext[32:].decode() return plaintext.decode() except UnicodeDecodeError: logger.warning('failed to decrypt cookie (AES-GCM) because UTF-8 decoding failed. Possibly the key is wrong?', only_once=True) @@ -1021,11 +1080,12 @@ def _decrypt_windows_dpapi(ciphertext, logger): None, # pvReserved: must be NULL None, # pPromptStruct: information about prompts to display 0, # dwFlags - ctypes.byref(blob_out) # pDataOut + ctypes.byref(blob_out), # pDataOut ) if not ret: - logger.warning('failed to decrypt with DPAPI', only_once=True) - return None + message = 'Failed to decrypt with DPAPI. See https://github.com/yt-dlp/yt-dlp/issues/10927 for more info' + logger.error(message) + raise DownloadError(message) # force exit result = ctypes.string_at(blob_out.pbData, blob_out.cbData) ctypes.windll.kernel32.LocalFree(blob_out.pbData) @@ -1049,17 +1109,20 @@ def _get_column_names(cursor, table_name): return [row[1].decode() for row in table_info] -def _find_most_recently_used_file(root, filename, logger): +def _newest(files): + return max(files, key=lambda path: os.lstat(path).st_mtime, default=None) + + +def _find_files(root, filename, logger): # if there are multiple browser profiles, take the most recently used one - i, paths = 0, [] + i = 0 with _create_progress_bar(logger) as progress_bar: - for curr_root, dirs, files in os.walk(root): + for curr_root, _, files in os.walk(root): for file in files: i += 1 progress_bar.print(f'Searching for "{filename}": {i: 6d} files searched') if file == filename: - paths.append(os.path.join(curr_root, file)) - return None if not paths else max(paths, key=lambda path: os.lstat(path).st_mtime) + yield os.path.join(curr_root, file) def _merge_cookie_jars(jars): @@ -1073,7 +1136,7 @@ def _merge_cookie_jars(jars): def _is_path(value): - return os.path.sep in value + return any(sep in value for sep in (os.path.sep, os.path.altsep) if sep) def _parse_browser_specification(browser_name, profile=None, keyring=None, container=None): @@ -1094,24 +1157,24 @@ class LenientSimpleCookie(http.cookies.SimpleCookie): _LEGAL_VALUE_CHARS = _LEGAL_KEY_CHARS + re.escape('(),/<=>?@[]{}') _RESERVED = { - "expires", - "path", - "comment", - "domain", - "max-age", - "secure", - "httponly", - "version", - "samesite", + 'expires', + 'path', + 'comment', + 'domain', + 'max-age', + 'secure', + 'httponly', + 'version', + 'samesite', } - _FLAGS = {"secure", "httponly"} + _FLAGS = {'secure', 'httponly'} # Added 'bad' group to catch the remaining value - _COOKIE_PATTERN = re.compile(r""" + _COOKIE_PATTERN = re.compile(r''' \s* # Optional whitespace at start of cookie (?P # Start of group 'key' - [""" + _LEGAL_KEY_CHARS + r"""]+?# Any word of at least one letter + [''' + _LEGAL_KEY_CHARS + r''']+?# Any word of at least one letter ) # End of group 'key' ( # Optional group: there may not be a value. \s*=\s* # Equal Sign @@ -1121,7 +1184,7 @@ class LenientSimpleCookie(http.cookies.SimpleCookie): | # or \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr | # or - [""" + _LEGAL_VALUE_CHARS + r"""]* # Any word or empty string + [''' + _LEGAL_VALUE_CHARS + r''']* # Any word or empty string ) # End of group 'val' | # or (?P(?:\\;|[^;])*?) # 'bad' group fallback for invalid values @@ -1129,7 +1192,7 @@ class LenientSimpleCookie(http.cookies.SimpleCookie): )? # End of optional value group \s* # Any number of spaces. (\s+|;|$) # Ending either at space, semicolon, or EOS. - """, re.ASCII | re.VERBOSE) + ''', re.ASCII | re.VERBOSE) def load(self, data): # Workaround for https://github.com/yt-dlp/yt-dlp/issues/4776 @@ -1216,8 +1279,8 @@ class YoutubeDLCookieJar(http.cookiejar.MozillaCookieJar): def _really_save(self, f, ignore_discard, ignore_expires): now = time.time() for cookie in self: - if (not ignore_discard and cookie.discard - or not ignore_expires and cookie.is_expired(now)): + if ((not ignore_discard and cookie.discard) + or (not ignore_expires and cookie.is_expired(now))): continue name, value = cookie.name, cookie.value if value is None: @@ -1225,14 +1288,14 @@ class YoutubeDLCookieJar(http.cookiejar.MozillaCookieJar): # with no name, whereas http.cookiejar regards it as a # cookie with no value. name, value = '', name - f.write('%s\n' % '\t'.join(( + f.write('{}\n'.format('\t'.join(( cookie.domain, self._true_or_false(cookie.domain.startswith('.')), cookie.path, self._true_or_false(cookie.secure), str_or_none(cookie.expires, default=''), - name, value - ))) + name, value, + )))) def save(self, filename=None, ignore_discard=True, ignore_expires=True): """ @@ -1271,10 +1334,10 @@ class YoutubeDLCookieJar(http.cookiejar.MozillaCookieJar): return line cookie_list = line.split('\t') if len(cookie_list) != self._ENTRY_LEN: - raise http.cookiejar.LoadError('invalid length %d' % len(cookie_list)) + raise http.cookiejar.LoadError(f'invalid length {len(cookie_list)}') cookie = self._CookieFileEntry(*cookie_list) if cookie.expires_at and not cookie.expires_at.isdigit(): - raise http.cookiejar.LoadError('invalid expires at %s' % cookie.expires_at) + raise http.cookiejar.LoadError(f'invalid expires at {cookie.expires_at}') return line cf = io.StringIO() diff --git a/plugins/youtube_download/yt_dlp/dependencies/Cryptodome.py b/plugins/youtube_download/yt_dlp/dependencies/Cryptodome.py index 2cfa4c9..0e4404d 100644 --- a/plugins/youtube_download/yt_dlp/dependencies/Cryptodome.py +++ b/plugins/youtube_download/yt_dlp/dependencies/Cryptodome.py @@ -24,7 +24,7 @@ try: from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401 from Crypto.Hash import CMAC, SHA1 # noqa: F401 from Crypto.PublicKey import RSA # noqa: F401 -except ImportError: +except (ImportError, OSError): __version__ = f'broken {__version__}'.strip() diff --git a/plugins/youtube_download/yt_dlp/dependencies/__init__.py b/plugins/youtube_download/yt_dlp/dependencies/__init__.py index 6e7d29c..0d58da2 100644 --- a/plugins/youtube_download/yt_dlp/dependencies/__init__.py +++ b/plugins/youtube_download/yt_dlp/dependencies/__init__.py @@ -43,19 +43,28 @@ except Exception as _err: try: import sqlite3 + # We need to get the underlying `sqlite` version, see https://github.com/yt-dlp/yt-dlp/issues/8152 + sqlite3._yt_dlp__version = sqlite3.sqlite_version except ImportError: - # although sqlite3 is part of the standard library, it is possible to compile python without + # although sqlite3 is part of the standard library, it is possible to compile Python without # sqlite support. See: https://github.com/yt-dlp/yt-dlp/issues/544 sqlite3 = None try: import websockets -except (ImportError, SyntaxError): - # websockets 3.10 on python 3.6 causes SyntaxError - # See https://github.com/yt-dlp/yt-dlp/issues/2633 +except ImportError: websockets = None +try: + import urllib3 +except ImportError: + urllib3 = None + +try: + import requests +except ImportError: + requests = None try: import xattr # xattr or pyxattr @@ -65,6 +74,10 @@ else: if hasattr(xattr, 'set'): # pyxattr xattr._yt_dlp__identifier = 'pyxattr' +try: + import curl_cffi +except ImportError: + curl_cffi = None from . import Cryptodome diff --git a/plugins/youtube_download/yt_dlp/downloader/__init__.py b/plugins/youtube_download/yt_dlp/downloader/__init__.py index 51a9f28..9c34bd2 100644 --- a/plugins/youtube_download/yt_dlp/downloader/__init__.py +++ b/plugins/youtube_download/yt_dlp/downloader/__init__.py @@ -30,11 +30,12 @@ from .hls import HlsFD from .http import HttpFD from .ism import IsmFD from .mhtml import MhtmlFD -from .niconico import NiconicoDmcFD, NiconicoLiveFD +from .niconico import NiconicoLiveFD from .rtmp import RtmpFD from .rtsp import RtspFD from .websocket import WebSocketFragmentFD from .youtube_live_chat import YoutubeLiveChatFD +from .bunnycdn import BunnyCdnFD PROTOCOL_MAP = { 'rtmp': RtmpFD, @@ -49,12 +50,12 @@ PROTOCOL_MAP = { 'http_dash_segments_generator': DashSegmentsFD, 'ism': IsmFD, 'mhtml': MhtmlFD, - 'niconico_dmc': NiconicoDmcFD, 'niconico_live': NiconicoLiveFD, 'fc2_live': FC2LiveFD, 'websocket_frag': WebSocketFragmentFD, 'youtube_live_chat': YoutubeLiveChatFD, 'youtube_live_chat_replay': YoutubeLiveChatFD, + 'bunnycdn': BunnyCdnFD, } @@ -65,7 +66,6 @@ def shorten_protocol_name(proto, simplify=False): 'rtmp_ffmpeg': 'rtmpF', 'http_dash_segments': 'dash', 'http_dash_segments_generator': 'dashG', - 'niconico_dmc': 'dmc', 'websocket_frag': 'WSfrag', } if simplify: diff --git a/plugins/youtube_download/yt_dlp/downloader/bunnycdn.py b/plugins/youtube_download/yt_dlp/downloader/bunnycdn.py new file mode 100644 index 0000000..e787f69 --- /dev/null +++ b/plugins/youtube_download/yt_dlp/downloader/bunnycdn.py @@ -0,0 +1,50 @@ +import hashlib +import random +import threading + +from .common import FileDownloader +from . import HlsFD +from ..networking import Request +from ..networking.exceptions import network_exceptions + + +class BunnyCdnFD(FileDownloader): + """ + Downloads from BunnyCDN with required pings + Note, this is not a part of public API, and will be removed without notice. + DO NOT USE + """ + + def real_download(self, filename, info_dict): + self.to_screen(f'[{self.FD_NAME}] Downloading from BunnyCDN') + + fd = HlsFD(self.ydl, self.params) + + stop_event = threading.Event() + ping_thread = threading.Thread(target=self.ping_thread, args=(stop_event,), kwargs=info_dict['_bunnycdn_ping_data']) + ping_thread.start() + + try: + return fd.real_download(filename, info_dict) + finally: + stop_event.set() + + def ping_thread(self, stop_event, url, headers, secret, context_id): + # Site sends ping every 4 seconds, but this throttles the download. Pinging every 2 seconds seems to work. + ping_interval = 2 + # Hard coded resolution as it doesn't seem to matter + res = 1080 + paused = 'false' + current_time = 0 + + while not stop_event.wait(ping_interval): + current_time += ping_interval + + time = current_time + round(random.random(), 6) + md5_hash = hashlib.md5(f'{secret}_{context_id}_{time}_{paused}_{res}'.encode()).hexdigest() + ping_url = f'{url}?hash={md5_hash}&time={time}&paused={paused}&resolution={res}' + + try: + self.ydl.urlopen(Request(ping_url, headers=headers)).read() + except network_exceptions as e: + self.to_screen(f'[{self.FD_NAME}] Ping failed: {e}') diff --git a/plugins/youtube_download/yt_dlp/downloader/common.py b/plugins/youtube_download/yt_dlp/downloader/common.py index b71d7ee..bb9303f 100644 --- a/plugins/youtube_download/yt_dlp/downloader/common.py +++ b/plugins/youtube_download/yt_dlp/downloader/common.py @@ -4,6 +4,7 @@ import functools import os import random import re +import threading import time from ..minicurses import ( @@ -19,9 +20,7 @@ from ..utils import ( Namespace, RetryManager, classproperty, - decodeArgument, deprecation_warning, - encodeFilename, format_bytes, join_nonempty, parse_bytes, @@ -32,6 +31,7 @@ from ..utils import ( timetuple_from_msec, try_call, ) +from ..utils._utils import _ProgressState class FileDownloader: @@ -63,6 +63,7 @@ class FileDownloader: min_filesize: Skip files smaller than this size max_filesize: Skip files larger than this size xattr_set_filesize: Set ytdl.filesize user xattribute with expected size. + progress_delta: The minimum time between progress output, in seconds external_downloader_args: A dictionary of downloader keys (in lower case) and a list of additional command-line arguments for the executable. Use 'default' as the name for arguments to be @@ -88,6 +89,9 @@ class FileDownloader: self.params = params self._prepare_multiline_status() self.add_progress_hook(self.report_progress) + if self.params.get('progress_delta'): + self._progress_delta_lock = threading.Lock() + self._progress_delta_time = time.monotonic() def _set_ydl(self, ydl): self.ydl = ydl @@ -214,7 +218,7 @@ class FileDownloader: def temp_name(self, filename): """Returns a temporary filename for the given filename.""" if self.params.get('nopart', False) or filename == '-' or \ - (os.path.exists(encodeFilename(filename)) and not os.path.isfile(encodeFilename(filename))): + (os.path.exists(filename) and not os.path.isfile(filename)): return filename return filename + '.part' @@ -268,7 +272,7 @@ class FileDownloader: """Try to set the last-modified time of the given file.""" if last_modified_hdr is None: return - if not os.path.isfile(encodeFilename(filename)): + if not os.path.isfile(filename): return timestr = last_modified_hdr if timestr is None: @@ -330,7 +334,7 @@ class FileDownloader: progress_dict), s.get('progress_idx') or 0) self.to_console_title(self.ydl.evaluate_outtmpl( progress_template.get('download-title') or 'yt-dlp %(progress._default_template)s', - progress_dict)) + progress_dict), _ProgressState.from_dict(s), s.get('_percent')) def _format_progress(self, *args, **kwargs): return self.ydl._format_text( @@ -354,6 +358,7 @@ class FileDownloader: '_speed_str': self.format_speed(speed).strip(), '_total_bytes_str': _format_bytes('total_bytes'), '_elapsed_str': self.format_seconds(s.get('elapsed')), + '_percent': 100.0, '_percent_str': self.format_percent(100), }) self._report_progress_status(s, join_nonempty( @@ -366,13 +371,21 @@ class FileDownloader: if s['status'] != 'downloading': return + if update_delta := self.params.get('progress_delta'): + with self._progress_delta_lock: + if time.monotonic() < self._progress_delta_time: + return + self._progress_delta_time += update_delta + + progress = try_call( + lambda: 100 * s['downloaded_bytes'] / s['total_bytes'], + lambda: 100 * s['downloaded_bytes'] / s['total_bytes_estimate'], + lambda: s['downloaded_bytes'] == 0 and 0) s.update({ '_eta_str': self.format_eta(s.get('eta')).strip(), '_speed_str': self.format_speed(s.get('speed')), - '_percent_str': self.format_percent(try_call( - lambda: 100 * s['downloaded_bytes'] / s['total_bytes'], - lambda: 100 * s['downloaded_bytes'] / s['total_bytes_estimate'], - lambda: s['downloaded_bytes'] == 0 and 0)), + '_percent': progress, + '_percent_str': self.format_percent(progress), '_total_bytes_str': _format_bytes('total_bytes'), '_total_bytes_estimate_str': _format_bytes('total_bytes_estimate'), '_downloaded_bytes_str': _format_bytes('downloaded_bytes'), @@ -393,7 +406,7 @@ class FileDownloader: def report_resuming_byte(self, resume_len): """Report attempt to resume at given byte.""" - self.to_screen('[download] Resuming download at byte %s' % resume_len) + self.to_screen(f'[download] Resuming download at byte {resume_len}') def report_retry(self, err, count, retries, frag_index=NO_DEFAULT, fatal=True): """Report retry""" @@ -421,13 +434,13 @@ class FileDownloader: """ nooverwrites_and_exists = ( not self.params.get('overwrites', True) - and os.path.exists(encodeFilename(filename)) + and os.path.exists(filename) ) if not hasattr(filename, 'write'): continuedl_and_exists = ( self.params.get('continuedl', True) - and os.path.isfile(encodeFilename(filename)) + and os.path.isfile(filename) and not self.params.get('nopart', False) ) @@ -437,7 +450,7 @@ class FileDownloader: self._hook_progress({ 'filename': filename, 'status': 'finished', - 'total_bytes': os.path.getsize(encodeFilename(filename)), + 'total_bytes': os.path.getsize(filename), }, info_dict) self._finish_multiline_status() return True, False @@ -478,9 +491,7 @@ class FileDownloader: if not self.params.get('verbose', False): return - str_args = [decodeArgument(a) for a in args] - if exe is None: - exe = os.path.basename(str_args[0]) + exe = os.path.basename(args[0]) - self.write_debug(f'{exe} command line: {shell_quote(str_args)}') + self.write_debug(f'{exe} command line: {shell_quote(args)}') diff --git a/plugins/youtube_download/yt_dlp/downloader/dash.py b/plugins/youtube_download/yt_dlp/downloader/dash.py index 4328d73..afc79b6 100644 --- a/plugins/youtube_download/yt_dlp/downloader/dash.py +++ b/plugins/youtube_download/yt_dlp/downloader/dash.py @@ -15,12 +15,15 @@ class DashSegmentsFD(FragmentFD): FD_NAME = 'dashsegments' def real_download(self, filename, info_dict): - if info_dict.get('is_live') and set(info_dict['protocol'].split('+')) != {'http_dash_segments_generator'}: - self.report_error('Live DASH videos are not supported') + if 'http_dash_segments_generator' in info_dict['protocol'].split('+'): + real_downloader = None # No external FD can support --live-from-start + else: + if info_dict.get('is_live'): + self.report_error('Live DASH videos are not supported') + real_downloader = get_suitable_downloader( + info_dict, self.params, None, protocol='dash_frag_urls', to_stdout=(filename == '-')) real_start = time.time() - real_downloader = get_suitable_downloader( - info_dict, self.params, None, protocol='dash_frag_urls', to_stdout=(filename == '-')) requested_formats = [{**info_dict, **fmt} for fmt in info_dict.get('requested_formats', [])] args = [] diff --git a/plugins/youtube_download/yt_dlp/downloader/external.py b/plugins/youtube_download/yt_dlp/downloader/external.py index 4ce8a3b..ee73ac0 100644 --- a/plugins/youtube_download/yt_dlp/downloader/external.py +++ b/plugins/youtube_download/yt_dlp/downloader/external.py @@ -1,4 +1,5 @@ import enum +import functools import json import os import re @@ -9,7 +10,6 @@ import time import uuid from .fragment import FragmentFD -from ..compat import functools from ..networking import Request from ..postprocessor.ffmpeg import EXT_TO_OUT_FORMATS, FFmpegPostProcessor from ..utils import ( @@ -23,7 +23,6 @@ from ..utils import ( cli_valueless_option, determine_ext, encodeArgument, - encodeFilename, find_available_port, remove_end, traverse_obj, @@ -55,7 +54,7 @@ class ExternalFD(FragmentFD): # correct and expected termination thus all postprocessing # should take place retval = 0 - self.to_screen('[%s] Interrupted by user' % self.get_basename()) + self.to_screen(f'[{self.get_basename()}] Interrupted by user') finally: if self._cookies_tempfile: self.try_remove(self._cookies_tempfile) @@ -67,7 +66,7 @@ class ExternalFD(FragmentFD): 'elapsed': time.time() - started, } if filename != '-': - fsize = os.path.getsize(encodeFilename(tmpfilename)) + fsize = os.path.getsize(tmpfilename) self.try_rename(tmpfilename, filename) status.update({ 'downloaded_bytes': fsize, @@ -108,7 +107,7 @@ class ExternalFD(FragmentFD): return all(( not info_dict.get('to_stdout') or Features.TO_STDOUT in cls.SUPPORTED_FEATURES, '+' not in info_dict['protocol'] or Features.MULTIPLE_FORMATS in cls.SUPPORTED_FEATURES, - not traverse_obj(info_dict, ('hls_aes', ...), 'extra_param_to_segment_url'), + not traverse_obj(info_dict, ('hls_aes', ...), 'extra_param_to_segment_url', 'extra_param_to_key_url'), all(proto in cls.SUPPORTED_PROTOCOLS for proto in info_dict['protocol'].split('+')), )) @@ -172,7 +171,7 @@ class ExternalFD(FragmentFD): decrypt_fragment = self.decrypter(info_dict) dest, _ = self.sanitize_open(tmpfilename, 'wb') for frag_index, fragment in enumerate(info_dict['fragments']): - fragment_filename = '%s-Frag%d' % (tmpfilename, frag_index) + fragment_filename = f'{tmpfilename}-Frag{frag_index}' try: src, _ = self.sanitize_open(fragment_filename, 'rb') except OSError as err: @@ -184,9 +183,9 @@ class ExternalFD(FragmentFD): dest.write(decrypt_fragment(fragment, src.read())) src.close() if not self.params.get('keep_fragments', False): - self.try_remove(encodeFilename(fragment_filename)) + self.try_remove(fragment_filename) dest.close() - self.try_remove(encodeFilename('%s.frag.urls' % tmpfilename)) + self.try_remove(f'{tmpfilename}.frag.urls') return 0 def _call_process(self, cmd, info_dict): @@ -335,12 +334,12 @@ class Aria2cFD(ExternalFD): cmd += ['--auto-file-renaming=false'] if 'fragments' in info_dict: - cmd += ['--file-allocation=none', '--uri-selector=inorder'] - url_list_file = '%s.frag.urls' % tmpfilename + cmd += ['--uri-selector=inorder'] + url_list_file = f'{tmpfilename}.frag.urls' url_list = [] for frag_index, fragment in enumerate(info_dict['fragments']): - fragment_filename = '%s-Frag%d' % (os.path.basename(tmpfilename), frag_index) - url_list.append('%s\n\tout=%s' % (fragment['url'], self._aria2c_filename(fragment_filename))) + fragment_filename = f'{os.path.basename(tmpfilename)}-Frag{frag_index}' + url_list.append('{}\n\tout={}'.format(fragment['url'], self._aria2c_filename(fragment_filename))) stream, _ = self.sanitize_open(url_list_file, 'wb') stream.write('\n'.join(url_list).encode()) stream.close() @@ -357,7 +356,7 @@ class Aria2cFD(ExternalFD): 'id': sanitycheck, 'method': method, 'params': [f'token:{rpc_secret}', *params], - }).encode('utf-8') + }).encode() request = Request( f'http://localhost:{rpc_port}/jsonrpc', data=d, headers={ @@ -416,7 +415,7 @@ class Aria2cFD(ExternalFD): 'total_bytes_estimate': total, 'eta': (total - downloaded) / (speed or 1), 'fragment_index': min(frag_count, len(completed) + 1) if fragmented else None, - 'elapsed': time.time() - started + 'elapsed': time.time() - started, }) self._hook_progress(status, info_dict) @@ -458,8 +457,6 @@ class FFmpegFD(ExternalFD): @classmethod def available(cls, path=None): - # TODO: Fix path for ffmpeg - # Fixme: This may be wrong when --ffmpeg-location is used return FFmpegPostProcessor().available def on_process_started(self, proc, stdin): @@ -491,7 +488,7 @@ class FFmpegFD(ExternalFD): if not self.params.get('verbose'): args += ['-hide_banner'] - args += traverse_obj(info_dict, ('downloader_options', 'ffmpeg_args'), default=[]) + args += traverse_obj(info_dict, ('downloader_options', 'ffmpeg_args', ...)) # These exists only for compatibility. Extractors should use # info_dict['downloader_options']['ffmpeg_args'] instead @@ -508,13 +505,13 @@ class FFmpegFD(ExternalFD): env = None proxy = self.params.get('proxy') if proxy: - if not re.match(r'^[\da-zA-Z]+://', proxy): - proxy = 'http://%s' % proxy + if not re.match(r'[\da-zA-Z]+://', proxy): + proxy = f'http://{proxy}' if proxy.startswith('socks'): self.report_warning( - '%s does not support SOCKS proxies. Downloading is likely to fail. ' - 'Consider adding --hls-prefer-native to your command.' % self.get_basename()) + f'{self.get_basename()} does not support SOCKS proxies. Downloading is likely to fail. ' + 'Consider adding --hls-prefer-native to your command.') # Since December 2015 ffmpeg supports -http_proxy option (see # http://git.videolan.org/?p=ffmpeg.git;a=commit;h=b4eb1f29ebddd60c41a2eb39f5af701e38e0d3fd) @@ -559,7 +556,7 @@ class FFmpegFD(ExternalFD): selected_formats = info_dict.get('requested_formats') or [info_dict] for i, fmt in enumerate(selected_formats): - is_http = re.match(r'^https?://', fmt['url']) + is_http = re.match(r'https?://', fmt['url']) cookies = self.ydl.cookiejar.get_cookies_for_url(fmt['url']) if is_http else [] if cookies: args.extend(['-cookies', ''.join( @@ -575,7 +572,7 @@ class FFmpegFD(ExternalFD): if end_time: args += ['-t', str(end_time - start_time)] - args += self._configuration_args((f'_i{i + 1}', '_i')) + ['-i', fmt['url']] + args += [*self._configuration_args((f'_i{i + 1}', '_i')), '-i', fmt['url']] if not (start_time or end_time) or not self.params.get('force_keyframes_at_cuts'): args += ['-c', 'copy'] @@ -615,10 +612,12 @@ class FFmpegFD(ExternalFD): else: args += ['-f', EXT_TO_OUT_FORMATS.get(ext, ext)] + args += traverse_obj(info_dict, ('downloader_options', 'ffmpeg_args_out', ...)) + args += self._configuration_args(('_o1', '_o', '')) args = [encodeArgument(opt) for opt in args] - args.append(encodeFilename(ffpp._ffmpeg_filename_argument(tmpfilename), True)) + args.append(ffpp._ffmpeg_filename_argument(tmpfilename)) self._debug_cmd(args) piped = any(fmt['url'] in ('-', 'pipe:') for fmt in selected_formats) diff --git a/plugins/youtube_download/yt_dlp/downloader/f4m.py b/plugins/youtube_download/yt_dlp/downloader/f4m.py index 28cbba0..22d0ebd 100644 --- a/plugins/youtube_download/yt_dlp/downloader/f4m.py +++ b/plugins/youtube_download/yt_dlp/downloader/f4m.py @@ -67,12 +67,12 @@ class FlvReader(io.BytesIO): self.read_bytes(3) quality_entry_count = self.read_unsigned_char() # QualityEntryCount - for i in range(quality_entry_count): + for _ in range(quality_entry_count): self.read_string() segment_run_count = self.read_unsigned_int() segments = [] - for i in range(segment_run_count): + for _ in range(segment_run_count): first_segment = self.read_unsigned_int() fragments_per_segment = self.read_unsigned_int() segments.append((first_segment, fragments_per_segment)) @@ -91,12 +91,12 @@ class FlvReader(io.BytesIO): quality_entry_count = self.read_unsigned_char() # QualitySegmentUrlModifiers - for i in range(quality_entry_count): + for _ in range(quality_entry_count): self.read_string() fragments_count = self.read_unsigned_int() fragments = [] - for i in range(fragments_count): + for _ in range(fragments_count): first = self.read_unsigned_int() first_ts = self.read_unsigned_long_long() duration = self.read_unsigned_int() @@ -135,11 +135,11 @@ class FlvReader(io.BytesIO): self.read_string() # MovieIdentifier server_count = self.read_unsigned_char() # ServerEntryTable - for i in range(server_count): + for _ in range(server_count): self.read_string() quality_count = self.read_unsigned_char() # QualityEntryTable - for i in range(quality_count): + for _ in range(quality_count): self.read_string() # DrmData self.read_string() @@ -148,14 +148,14 @@ class FlvReader(io.BytesIO): segments_count = self.read_unsigned_char() segments = [] - for i in range(segments_count): + for _ in range(segments_count): box_size, box_type, box_data = self.read_box_info() assert box_type == b'asrt' segment = FlvReader(box_data).read_asrt() segments.append(segment) fragments_run_count = self.read_unsigned_char() fragments = [] - for i in range(fragments_run_count): + for _ in range(fragments_run_count): box_size, box_type, box_data = self.read_box_info() assert box_type == b'afrt' fragments.append(FlvReader(box_data).read_afrt()) @@ -309,7 +309,7 @@ class F4mFD(FragmentFD): def real_download(self, filename, info_dict): man_url = info_dict['url'] requested_bitrate = info_dict.get('tbr') - self.to_screen('[%s] Downloading f4m manifest' % self.FD_NAME) + self.to_screen(f'[{self.FD_NAME}] Downloading f4m manifest') urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url)) man_url = urlh.url @@ -326,8 +326,8 @@ class F4mFD(FragmentFD): formats = sorted(formats, key=lambda f: f[0]) rate, media = formats[-1] else: - rate, media = list(filter( - lambda f: int(f[0]) == requested_bitrate, formats))[0] + rate, media = next(filter( + lambda f: int(f[0]) == requested_bitrate, formats)) # Prefer baseURL for relative URLs as per 11.2 of F4M 3.0 spec. man_base_url = get_base_url(doc) or man_url diff --git a/plugins/youtube_download/yt_dlp/downloader/fragment.py b/plugins/youtube_download/yt_dlp/downloader/fragment.py index b4b680d..98784e7 100644 --- a/plugins/youtube_download/yt_dlp/downloader/fragment.py +++ b/plugins/youtube_download/yt_dlp/downloader/fragment.py @@ -9,11 +9,11 @@ import time from .common import FileDownloader from .http import HttpFD from ..aes import aes_cbc_decrypt_bytes, unpad_pkcs7 -from ..compat import compat_os_name from ..networking import Request from ..networking.exceptions import HTTPError, IncompleteRead -from ..utils import DownloadError, RetryManager, encodeFilename, traverse_obj +from ..utils import DownloadError, RetryManager, traverse_obj from ..utils.networking import HTTPHeaderDict +from ..utils.progress import ProgressCalculator class HttpQuietDownloader(HttpFD): @@ -151,7 +151,7 @@ class FragmentFD(FileDownloader): if self.__do_ytdl_file(ctx): self._write_ytdl_file(ctx) if not self.params.get('keep_fragments', False): - self.try_remove(encodeFilename(ctx['fragment_filename_sanitized'])) + self.try_remove(ctx['fragment_filename_sanitized']) del ctx['fragment_filename_sanitized'] def _prepare_frag_download(self, ctx): @@ -187,7 +187,7 @@ class FragmentFD(FileDownloader): }) if self.__do_ytdl_file(ctx): - ytdl_file_exists = os.path.isfile(encodeFilename(self.ytdl_filename(ctx['filename']))) + ytdl_file_exists = os.path.isfile(self.ytdl_filename(ctx['filename'])) continuedl = self.params.get('continuedl', True) if continuedl and ytdl_file_exists: self._read_ytdl_file(ctx) @@ -198,7 +198,7 @@ class FragmentFD(FileDownloader): '.ytdl file is corrupt' if is_corrupt else 'Inconsistent state of incomplete fragment download') self.report_warning( - '%s. Restarting from the beginning ...' % message) + f'{message}. Restarting from the beginning ...') ctx['fragment_index'] = resume_len = 0 if 'ytdl_corrupt' in ctx: del ctx['ytdl_corrupt'] @@ -226,8 +226,7 @@ class FragmentFD(FileDownloader): resume_len = ctx['complete_frags_downloaded_bytes'] total_frags = ctx['total_frags'] ctx_id = ctx.get('ctx_id') - # This dict stores the download progress, it's updated by the progress - # hook + # Stores the download progress, updated by the progress hook state = { 'status': 'downloading', 'downloaded_bytes': resume_len, @@ -237,14 +236,8 @@ class FragmentFD(FileDownloader): 'tmpfilename': ctx['tmpfilename'], } - start = time.time() - ctx.update({ - 'started': start, - 'fragment_started': start, - # Amount of fragment's bytes downloaded by the time of the previous - # frag progress hook invocation - 'prev_frag_downloaded_bytes': 0, - }) + ctx['started'] = time.time() + progress = ProgressCalculator(resume_len) def frag_progress_hook(s): if s['status'] not in ('downloading', 'finished'): @@ -259,38 +252,35 @@ class FragmentFD(FileDownloader): state['max_progress'] = ctx.get('max_progress') state['progress_idx'] = ctx.get('progress_idx') - time_now = time.time() - state['elapsed'] = time_now - start + state['elapsed'] = progress.elapsed frag_total_bytes = s.get('total_bytes') or 0 s['fragment_info_dict'] = s.pop('info_dict', {}) + + # XXX: Fragment resume is not accounted for here if not ctx['live']: estimated_size = ( (ctx['complete_frags_downloaded_bytes'] + frag_total_bytes) / (state['fragment_index'] + 1) * total_frags) - state['total_bytes_estimate'] = estimated_size + progress.total = estimated_size + progress.update(s.get('downloaded_bytes')) + state['total_bytes_estimate'] = progress.total + else: + progress.update(s.get('downloaded_bytes')) if s['status'] == 'finished': state['fragment_index'] += 1 ctx['fragment_index'] = state['fragment_index'] - state['downloaded_bytes'] += frag_total_bytes - ctx['prev_frag_downloaded_bytes'] - ctx['complete_frags_downloaded_bytes'] = state['downloaded_bytes'] - ctx['speed'] = state['speed'] = self.calc_speed( - ctx['fragment_started'], time_now, frag_total_bytes) - ctx['fragment_started'] = time.time() - ctx['prev_frag_downloaded_bytes'] = 0 - else: - frag_downloaded_bytes = s['downloaded_bytes'] - state['downloaded_bytes'] += frag_downloaded_bytes - ctx['prev_frag_downloaded_bytes'] - ctx['speed'] = state['speed'] = self.calc_speed( - ctx['fragment_started'], time_now, frag_downloaded_bytes - ctx.get('frag_resume_len', 0)) - if not ctx['live']: - state['eta'] = self.calc_eta(state['speed'], estimated_size - state['downloaded_bytes']) - ctx['prev_frag_downloaded_bytes'] = frag_downloaded_bytes + progress.thread_reset() + + state['downloaded_bytes'] = ctx['complete_frags_downloaded_bytes'] = progress.downloaded + state['speed'] = ctx['speed'] = progress.speed.smooth + state['eta'] = progress.eta.smooth + self._hook_progress(state, info_dict) ctx['dl'].add_progress_hook(frag_progress_hook) - return start + return ctx['started'] def _finish_frag_download(self, ctx, info_dict): ctx['dest_stream'].close() @@ -375,10 +365,10 @@ class FragmentFD(FileDownloader): return decrypt_fragment def download_and_append_fragments_multiple(self, *args, **kwargs): - ''' + """ @params (ctx1, fragments1, info_dict1), (ctx2, fragments2, info_dict2), ... all args must be either tuple or list - ''' + """ interrupt_trigger = [True] max_progress = len(args) if max_progress == 1: @@ -399,7 +389,7 @@ class FragmentFD(FileDownloader): def __exit__(self, exc_type, exc_val, exc_tb): pass - if compat_os_name == 'nt': + if os.name == 'nt': def future_result(future): while True: try: @@ -433,7 +423,7 @@ class FragmentFD(FileDownloader): finally: tpe.shutdown(wait=True) if not interrupt_trigger[0] and not is_live: - raise KeyboardInterrupt() + raise KeyboardInterrupt # we expect the user wants to stop and DO WANT the preceding postprocessors to run; # so returning a intermediate result here instead of KeyboardInterrupt on live return result @@ -500,7 +490,6 @@ class FragmentFD(FileDownloader): download_fragment(fragment, ctx_copy) return fragment, fragment['frag_index'], ctx_copy.get('fragment_filename_sanitized') - self.report_warning('The download speed shown is only of one thread. This is a known issue') with tpe or concurrent.futures.ThreadPoolExecutor(max_workers) as pool: try: for fragment, frag_index, frag_filename in pool.map(_download_fragment, fragments): diff --git a/plugins/youtube_download/yt_dlp/downloader/hls.py b/plugins/youtube_download/yt_dlp/downloader/hls.py index d4b3f03..1f36a07 100644 --- a/plugins/youtube_download/yt_dlp/downloader/hls.py +++ b/plugins/youtube_download/yt_dlp/downloader/hls.py @@ -16,6 +16,7 @@ from ..utils import ( update_url_query, urljoin, ) +from ..utils._utils import _request_dump_filename class HlsFD(FragmentFD): @@ -72,11 +73,23 @@ class HlsFD(FragmentFD): def real_download(self, filename, info_dict): man_url = info_dict['url'] - self.to_screen('[%s] Downloading m3u8 manifest' % self.FD_NAME) - urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url)) - man_url = urlh.url - s = urlh.read().decode('utf-8', 'ignore') + s = info_dict.get('hls_media_playlist_data') + if s: + self.to_screen(f'[{self.FD_NAME}] Using m3u8 manifest from extracted info') + else: + self.to_screen(f'[{self.FD_NAME}] Downloading m3u8 manifest') + urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url)) + man_url = urlh.url + s_bytes = urlh.read() + if self.params.get('write_pages'): + dump_filename = _request_dump_filename( + man_url, info_dict['id'], None, + trim_length=self.params.get('trim_file_name')) + self.to_screen(f'[{self.FD_NAME}] Saving request to {dump_filename}') + with open(dump_filename, 'wb') as outf: + outf.write(s_bytes) + s = s_bytes.decode('utf-8', 'ignore') can_download, message = self.can_download(s, info_dict, self.params.get('allow_unplayable_formats')), None if can_download: @@ -119,12 +132,12 @@ class HlsFD(FragmentFD): self.to_screen(f'[{self.FD_NAME}] Fragment downloads will be delegated to {real_downloader.get_basename()}') def is_ad_fragment_start(s): - return (s.startswith('#ANVATO-SEGMENT-INFO') and 'type=ad' in s - or s.startswith('#UPLYNK-SEGMENT') and s.endswith(',ad')) + return ((s.startswith('#ANVATO-SEGMENT-INFO') and 'type=ad' in s) + or (s.startswith('#UPLYNK-SEGMENT') and s.endswith(',ad'))) def is_ad_fragment_end(s): - return (s.startswith('#ANVATO-SEGMENT-INFO') and 'type=master' in s - or s.startswith('#UPLYNK-SEGMENT') and s.endswith(',segment')) + return ((s.startswith('#ANVATO-SEGMENT-INFO') and 'type=master' in s) + or (s.startswith('#UPLYNK-SEGMENT') and s.endswith(',segment'))) fragments = [] @@ -160,10 +173,12 @@ class HlsFD(FragmentFD): extra_state = ctx.setdefault('extra_state', {}) format_index = info_dict.get('format_index') - extra_query = None - extra_param_to_segment_url = info_dict.get('extra_param_to_segment_url') - if extra_param_to_segment_url: - extra_query = urllib.parse.parse_qs(extra_param_to_segment_url) + extra_segment_query = None + if extra_param_to_segment_url := info_dict.get('extra_param_to_segment_url'): + extra_segment_query = urllib.parse.parse_qs(extra_param_to_segment_url) + extra_key_query = None + if extra_param_to_key_url := info_dict.get('extra_param_to_key_url'): + extra_key_query = urllib.parse.parse_qs(extra_param_to_key_url) i = 0 media_sequence = 0 decrypt_info = {'METHOD': 'NONE'} @@ -175,6 +190,7 @@ class HlsFD(FragmentFD): if external_aes_iv: external_aes_iv = binascii.unhexlify(remove_start(external_aes_iv, '0x').zfill(32)) byte_range = {} + byte_range_offset = 0 discontinuity_count = 0 frag_index = 0 ad_frag_next = False @@ -190,8 +206,8 @@ class HlsFD(FragmentFD): if frag_index <= ctx['fragment_index']: continue frag_url = urljoin(man_url, line) - if extra_query: - frag_url = update_url_query(frag_url, extra_query) + if extra_segment_query: + frag_url = update_url_query(frag_url, extra_segment_query) fragments.append({ 'frag_index': frag_index, @@ -202,6 +218,11 @@ class HlsFD(FragmentFD): }) media_sequence += 1 + # If the byte_range is truthy, reset it after appending a fragment that uses it + if byte_range: + byte_range_offset = byte_range['end'] + byte_range = {} + elif line.startswith('#EXT-X-MAP'): if format_index and discontinuity_count != format_index: continue @@ -212,13 +233,15 @@ class HlsFD(FragmentFD): frag_index += 1 map_info = parse_m3u8_attributes(line[11:]) frag_url = urljoin(man_url, map_info.get('URI')) - if extra_query: - frag_url = update_url_query(frag_url, extra_query) + if extra_segment_query: + frag_url = update_url_query(frag_url, extra_segment_query) + + map_byte_range = {} if map_info.get('BYTERANGE'): splitted_byte_range = map_info.get('BYTERANGE').split('@') - sub_range_start = int(splitted_byte_range[1]) if len(splitted_byte_range) == 2 else byte_range['end'] - byte_range = { + sub_range_start = int(splitted_byte_range[1]) if len(splitted_byte_range) == 2 else 0 + map_byte_range = { 'start': sub_range_start, 'end': sub_range_start + int(splitted_byte_range[0]), } @@ -227,8 +250,8 @@ class HlsFD(FragmentFD): 'frag_index': frag_index, 'url': frag_url, 'decrypt_info': decrypt_info, - 'byte_range': byte_range, - 'media_sequence': media_sequence + 'byte_range': map_byte_range, + 'media_sequence': media_sequence, }) media_sequence += 1 @@ -244,8 +267,10 @@ class HlsFD(FragmentFD): decrypt_info['KEY'] = external_aes_key else: decrypt_info['URI'] = urljoin(man_url, decrypt_info['URI']) - if extra_query: - decrypt_info['URI'] = update_url_query(decrypt_info['URI'], extra_query) + if extra_key_query or extra_segment_query: + # Fall back to extra_segment_query to key for backwards compat + decrypt_info['URI'] = update_url_query( + decrypt_info['URI'], extra_key_query or extra_segment_query) if decrypt_url != decrypt_info['URI']: decrypt_info['KEY'] = None @@ -253,7 +278,7 @@ class HlsFD(FragmentFD): media_sequence = int(line[22:]) elif line.startswith('#EXT-X-BYTERANGE'): splitted_byte_range = line[17:].split('@') - sub_range_start = int(splitted_byte_range[1]) if len(splitted_byte_range) == 2 else byte_range['end'] + sub_range_start = int(splitted_byte_range[1]) if len(splitted_byte_range) == 2 else byte_range_offset byte_range = { 'start': sub_range_start, 'end': sub_range_start + int(splitted_byte_range[0]), @@ -350,9 +375,8 @@ class HlsFD(FragmentFD): # XXX: this should probably be silent as well # or verify that all segments contain the same data self.report_warning(bug_reports_message( - 'Discarding a %s block found in the middle of the stream; ' - 'if the subtitles display incorrectly,' - % (type(block).__name__))) + f'Discarding a {type(block).__name__} block found in the middle of the stream; ' + 'if the subtitles display incorrectly,')) continue block.write_into(output) @@ -369,7 +393,10 @@ class HlsFD(FragmentFD): return output.getvalue().encode() - self.download_and_append_fragments( - ctx, fragments, info_dict, pack_func=pack_fragment, finish_func=fin_fragments) + if len(fragments) == 1: + self.download_and_append_fragments(ctx, fragments, info_dict) + else: + self.download_and_append_fragments( + ctx, fragments, info_dict, pack_func=pack_fragment, finish_func=fin_fragments) else: return self.download_and_append_fragments(ctx, fragments, info_dict) diff --git a/plugins/youtube_download/yt_dlp/downloader/http.py b/plugins/youtube_download/yt_dlp/downloader/http.py index f523744..9c6dd8b 100644 --- a/plugins/youtube_download/yt_dlp/downloader/http.py +++ b/plugins/youtube_download/yt_dlp/downloader/http.py @@ -15,7 +15,6 @@ from ..utils import ( ThrottledDownload, XAttrMetadataError, XAttrUnavailableError, - encodeFilename, int_or_none, parse_http_range, try_call, @@ -58,9 +57,8 @@ class HttpFD(FileDownloader): if self.params.get('continuedl', True): # Establish possible resume length - if os.path.isfile(encodeFilename(ctx.tmpfilename)): - ctx.resume_len = os.path.getsize( - encodeFilename(ctx.tmpfilename)) + if os.path.isfile(ctx.tmpfilename): + ctx.resume_len = os.path.getsize(ctx.tmpfilename) ctx.is_resume = ctx.resume_len > 0 @@ -176,7 +174,7 @@ class HttpFD(FileDownloader): 'downloaded_bytes': ctx.resume_len, 'total_bytes': ctx.resume_len, }, info_dict) - raise SucceedDownload() + raise SucceedDownload else: # The length does not match, we start the download over self.report_unable_to_resume() @@ -194,7 +192,7 @@ class HttpFD(FileDownloader): def close_stream(): if ctx.stream is not None: - if not ctx.tmpfilename == '-': + if ctx.tmpfilename != '-': ctx.stream.close() ctx.stream = None @@ -237,8 +235,13 @@ class HttpFD(FileDownloader): def retry(e): close_stream() - ctx.resume_len = (byte_counter if ctx.tmpfilename == '-' - else os.path.getsize(encodeFilename(ctx.tmpfilename))) + if ctx.tmpfilename == '-': + ctx.resume_len = byte_counter + else: + try: + ctx.resume_len = os.path.getsize(ctx.tmpfilename) + except FileNotFoundError: + ctx.resume_len = 0 raise RetryDownload(e) while True: @@ -263,20 +266,20 @@ class HttpFD(FileDownloader): ctx.filename = self.undo_temp_name(ctx.tmpfilename) self.report_destination(ctx.filename) except OSError as err: - self.report_error('unable to open for writing: %s' % str(err)) + self.report_error(f'unable to open for writing: {err}') return False if self.params.get('xattr_set_filesize', False) and data_len is not None: try: write_xattr(ctx.tmpfilename, 'user.ytdl.filesize', str(data_len).encode()) except (XAttrUnavailableError, XAttrMetadataError) as err: - self.report_error('unable to set filesize xattr: %s' % str(err)) + self.report_error(f'unable to set filesize xattr: {err}') try: ctx.stream.write(data_block) except OSError as err: self.to_stderr('\n') - self.report_error('unable to write data: %s' % str(err)) + self.report_error(f'unable to write data: {err}') return False # Apply rate limit @@ -322,7 +325,7 @@ class HttpFD(FileDownloader): elif now - ctx.throttle_start > 3: if ctx.stream is not None and ctx.tmpfilename != '-': ctx.stream.close() - raise ThrottledDownload() + raise ThrottledDownload elif speed: ctx.throttle_start = None @@ -333,7 +336,7 @@ class HttpFD(FileDownloader): if not is_test and ctx.chunk_size and ctx.content_len is not None and byte_counter < ctx.content_len: ctx.resume_len = byte_counter - raise NextFragment() + raise NextFragment if ctx.tmpfilename != '-': ctx.stream.close() diff --git a/plugins/youtube_download/yt_dlp/downloader/ism.py b/plugins/youtube_download/yt_dlp/downloader/ism.py index dd688f5..62c3a3b 100644 --- a/plugins/youtube_download/yt_dlp/downloader/ism.py +++ b/plugins/youtube_download/yt_dlp/downloader/ism.py @@ -251,7 +251,7 @@ class IsmFD(FragmentFD): skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True) frag_index = 0 - for i, segment in enumerate(segments): + for segment in segments: frag_index += 1 if frag_index <= ctx['fragment_index']: continue diff --git a/plugins/youtube_download/yt_dlp/downloader/mhtml.py b/plugins/youtube_download/yt_dlp/downloader/mhtml.py index d977dce..3d4f2d7 100644 --- a/plugins/youtube_download/yt_dlp/downloader/mhtml.py +++ b/plugins/youtube_download/yt_dlp/downloader/mhtml.py @@ -10,7 +10,7 @@ from ..version import __version__ as YT_DLP_VERSION class MhtmlFD(FragmentFD): - _STYLESHEET = """\ + _STYLESHEET = '''\ html, body { margin: 0; padding: 0; @@ -45,7 +45,7 @@ body > figure > img { max-width: 100%; max-height: calc(100vh - 5em); } -""" +''' _STYLESHEET = re.sub(r'\s+', ' ', _STYLESHEET) _STYLESHEET = re.sub(r'\B \B|(?<=[\w\-]) (?=[^\w\-])|(?<=[^\w\-]) (?=[\w\-])', '', _STYLESHEET) @@ -57,24 +57,19 @@ body > figure > img { )).decode('us-ascii') + '?=' def _gen_cid(self, i, fragment, frag_boundary): - return '%u.%s@yt-dlp.github.io.invalid' % (i, frag_boundary) + return f'{i}.{frag_boundary}@yt-dlp.github.io.invalid' def _gen_stub(self, *, fragments, frag_boundary, title): output = io.StringIO() - output.write(( + output.write( '' '' '' - '' '' - '' '{title}' - '' '' - '' - ).format( - version=escapeHTML(YT_DLP_VERSION), - styles=self._STYLESHEET, - title=escapeHTML(title) - )) + f'' + f'{escapeHTML(title)}' + f'' + '') t0 = 0 for i, frag in enumerate(fragments): @@ -87,15 +82,12 @@ body > figure > img { num=i + 1, t0=srt_subtitles_timecode(t0), t1=srt_subtitles_timecode(t1), - duration=formatSeconds(frag['duration'], msec=True) + duration=formatSeconds(frag['duration'], msec=True), )) except (KeyError, ValueError, TypeError): t1 = None - output.write(( - '
Slide #{num}
' - ).format(num=i + 1)) - output.write(''.format( - cid=self._gen_cid(i, frag, frag_boundary))) + output.write(f'
Slide #{i + 1}
') + output.write(f'') output.write('') t0 = t1 @@ -126,31 +118,24 @@ body > figure > img { stub = self._gen_stub( fragments=fragments, frag_boundary=frag_boundary, - title=title + title=title, ) ctx['dest_stream'].write(( 'MIME-Version: 1.0\r\n' 'From: \r\n' 'To: \r\n' - 'Subject: {title}\r\n' + f'Subject: {self._escape_mime(title)}\r\n' 'Content-type: multipart/related; ' - '' 'boundary="{boundary}"; ' - '' 'type="text/html"\r\n' - 'X.yt-dlp.Origin: {origin}\r\n' + f'boundary="{frag_boundary}"; ' + 'type="text/html"\r\n' + f'X.yt-dlp.Origin: {origin}\r\n' '\r\n' - '--{boundary}\r\n' + f'--{frag_boundary}\r\n' 'Content-Type: text/html; charset=utf-8\r\n' - 'Content-Length: {length}\r\n' + f'Content-Length: {len(stub)}\r\n' '\r\n' - '{stub}\r\n' - ).format( - origin=origin, - boundary=frag_boundary, - length=len(stub), - title=self._escape_mime(title), - stub=stub - ).encode()) + f'{stub}\r\n').encode()) extra_state['header_written'] = True for i, fragment in enumerate(fragments): diff --git a/plugins/youtube_download/yt_dlp/downloader/niconico.py b/plugins/youtube_download/yt_dlp/downloader/niconico.py index 5720f6e..33cf15d 100644 --- a/plugins/youtube_download/yt_dlp/downloader/niconico.py +++ b/plugins/youtube_download/yt_dlp/downloader/niconico.py @@ -2,58 +2,10 @@ import json import threading import time -from . import get_suitable_downloader from .common import FileDownloader from .external import FFmpegFD from ..networking import Request -from ..utils import DownloadError, WebSocketsWrapper, str_or_none, try_get - - -class NiconicoDmcFD(FileDownloader): - """ Downloading niconico douga from DMC with heartbeat """ - - def real_download(self, filename, info_dict): - from ..extractor.niconico import NiconicoIE - - self.to_screen('[%s] Downloading from DMC' % self.FD_NAME) - ie = NiconicoIE(self.ydl) - info_dict, heartbeat_info_dict = ie._get_heartbeat_info(info_dict) - - fd = get_suitable_downloader(info_dict, params=self.params)(self.ydl, self.params) - - success = download_complete = False - timer = [None] - heartbeat_lock = threading.Lock() - heartbeat_url = heartbeat_info_dict['url'] - heartbeat_data = heartbeat_info_dict['data'].encode() - heartbeat_interval = heartbeat_info_dict.get('interval', 30) - - request = Request(heartbeat_url, heartbeat_data) - - def heartbeat(): - try: - self.ydl.urlopen(request).read() - except Exception: - self.to_screen('[%s] Heartbeat failed' % self.FD_NAME) - - with heartbeat_lock: - if not download_complete: - timer[0] = threading.Timer(heartbeat_interval, heartbeat) - timer[0].start() - - heartbeat_info_dict['ping']() - self.to_screen('[%s] Heartbeat with %d second interval ...' % (self.FD_NAME, heartbeat_interval)) - try: - heartbeat() - if type(fd).__name__ == 'HlsFD': - info_dict.update(ie._extract_m3u8_formats(info_dict['url'], info_dict['id'])[0]) - success = fd.real_download(filename, info_dict) - finally: - if heartbeat_lock: - with heartbeat_lock: - timer[0].cancel() - download_complete = True - return success +from ..utils import DownloadError, str_or_none, try_get class NiconicoLiveFD(FileDownloader): @@ -64,7 +16,6 @@ class NiconicoLiveFD(FileDownloader): ws_url = info_dict['url'] ws_extractor = info_dict['ws'] ws_origin_host = info_dict['origin'] - cookies = info_dict.get('cookies') live_quality = info_dict.get('live_quality', 'high') live_latency = info_dict.get('live_latency', 'high') dl = FFmpegFD(self.ydl, self.params or {}) @@ -76,12 +27,7 @@ class NiconicoLiveFD(FileDownloader): def communicate_ws(reconnect): if reconnect: - ws = WebSocketsWrapper(ws_url, { - 'Cookies': str_or_none(cookies) or '', - 'Origin': f'https://{ws_origin_host}', - 'Accept': '*/*', - 'User-Agent': self.params['http_headers']['User-Agent'], - }) + ws = self.ydl.urlopen(Request(ws_url, headers={'Origin': f'https://{ws_origin_host}'})) if self.ydl.params.get('verbose', False): self.to_screen('[debug] Sending startWatching request') ws.send(json.dumps({ @@ -91,14 +37,15 @@ class NiconicoLiveFD(FileDownloader): 'quality': live_quality, 'protocol': 'hls+fmp4', 'latency': live_latency, - 'chasePlay': False + 'accessRightMethod': 'single_cookie', + 'chasePlay': False, }, 'room': { 'protocol': 'webSocket', - 'commentable': True + 'commentable': True, }, 'reconnect': True, - } + }, })) else: ws = ws_extractor @@ -124,7 +71,7 @@ class NiconicoLiveFD(FileDownloader): elif self.ydl.params.get('verbose', False): if len(recv) > 100: recv = recv[:100] + '...' - self.to_screen('[debug] Server said: %s' % recv) + self.to_screen(f'[debug] Server said: {recv}') def ws_main(): reconnect = False @@ -134,7 +81,7 @@ class NiconicoLiveFD(FileDownloader): if ret is True: return except BaseException as e: - self.to_screen('[%s] %s: Connection error occured, reconnecting after 10 seconds: %s' % ('niconico:live', video_id, str_or_none(e))) + self.to_screen('[{}] {}: Connection error occured, reconnecting after 10 seconds: {}'.format('niconico:live', video_id, str_or_none(e))) time.sleep(10) continue finally: diff --git a/plugins/youtube_download/yt_dlp/downloader/rtmp.py b/plugins/youtube_download/yt_dlp/downloader/rtmp.py index 0e09525..1b831e5 100644 --- a/plugins/youtube_download/yt_dlp/downloader/rtmp.py +++ b/plugins/youtube_download/yt_dlp/downloader/rtmp.py @@ -8,7 +8,6 @@ from ..utils import ( Popen, check_executable, encodeArgument, - encodeFilename, get_exe_version, ) @@ -179,15 +178,15 @@ class RtmpFD(FileDownloader): return False while retval in (RD_INCOMPLETE, RD_FAILED) and not test and not live: - prevsize = os.path.getsize(encodeFilename(tmpfilename)) - self.to_screen('[rtmpdump] Downloaded %s bytes' % prevsize) + prevsize = os.path.getsize(tmpfilename) + self.to_screen(f'[rtmpdump] Downloaded {prevsize} bytes') time.sleep(5.0) # This seems to be needed - args = basic_args + ['--resume'] + args = [*basic_args, '--resume'] if retval == RD_FAILED: args += ['--skip', '1'] args = [encodeArgument(a) for a in args] retval = run_rtmpdump(args) - cursize = os.path.getsize(encodeFilename(tmpfilename)) + cursize = os.path.getsize(tmpfilename) if prevsize == cursize and retval == RD_FAILED: break # Some rtmp streams seem abort after ~ 99.8%. Don't complain for those @@ -196,8 +195,8 @@ class RtmpFD(FileDownloader): retval = RD_SUCCESS break if retval == RD_SUCCESS or (test and retval == RD_INCOMPLETE): - fsize = os.path.getsize(encodeFilename(tmpfilename)) - self.to_screen('[rtmpdump] Downloaded %s bytes' % fsize) + fsize = os.path.getsize(tmpfilename) + self.to_screen(f'[rtmpdump] Downloaded {fsize} bytes') self.try_rename(tmpfilename, filename) self._hook_progress({ 'downloaded_bytes': fsize, diff --git a/plugins/youtube_download/yt_dlp/downloader/rtsp.py b/plugins/youtube_download/yt_dlp/downloader/rtsp.py index e89269f..b4b0be7 100644 --- a/plugins/youtube_download/yt_dlp/downloader/rtsp.py +++ b/plugins/youtube_download/yt_dlp/downloader/rtsp.py @@ -2,7 +2,7 @@ import os import subprocess from .common import FileDownloader -from ..utils import check_executable, encodeFilename +from ..utils import check_executable class RtspFD(FileDownloader): @@ -26,7 +26,7 @@ class RtspFD(FileDownloader): retval = subprocess.call(args) if retval == 0: - fsize = os.path.getsize(encodeFilename(tmpfilename)) + fsize = os.path.getsize(tmpfilename) self.to_screen(f'\r[{args[0]}] {fsize} bytes') self.try_rename(tmpfilename, filename) self._hook_progress({ diff --git a/plugins/youtube_download/yt_dlp/downloader/youtube_live_chat.py b/plugins/youtube_download/yt_dlp/downloader/youtube_live_chat.py index c7a8637..ddd912c 100644 --- a/plugins/youtube_download/yt_dlp/downloader/youtube_live_chat.py +++ b/plugins/youtube_download/yt_dlp/downloader/youtube_live_chat.py @@ -18,7 +18,7 @@ class YoutubeLiveChatFD(FragmentFD): def real_download(self, filename, info_dict): video_id = info_dict['video_id'] - self.to_screen('[%s] Downloading live chat' % self.FD_NAME) + self.to_screen(f'[{self.FD_NAME}] Downloading live chat') if not self.params.get('skip_download') and info_dict['protocol'] == 'youtube_live_chat': self.report_warning('Live chat download runs until the livestream ends. ' 'If you wish to download the video simultaneously, run a separate yt-dlp instance') @@ -123,8 +123,8 @@ class YoutubeLiveChatFD(FragmentFD): data, lambda x: x['continuationContents']['liveChatContinuation'], dict) or {} - func = (info_dict['protocol'] == 'youtube_live_chat' and parse_actions_live - or frag_index == 1 and try_refresh_replay_beginning + func = ((info_dict['protocol'] == 'youtube_live_chat' and parse_actions_live) + or (frag_index == 1 and try_refresh_replay_beginning) or parse_actions_replay) return (True, *func(live_chat_continuation)) except HTTPError as err: diff --git a/plugins/youtube_download/yt_dlp/extractor/__init__.py b/plugins/youtube_download/yt_dlp/extractor/__init__.py index 6bfa4bd..a090e94 100644 --- a/plugins/youtube_download/yt_dlp/extractor/__init__.py +++ b/plugins/youtube_download/yt_dlp/extractor/__init__.py @@ -1,16 +1,25 @@ from ..compat.compat_utils import passthrough_module +from ..globals import extractors as _extractors_context +from ..globals import plugin_ies as _plugin_ies_context +from ..plugins import PluginSpec, register_plugin_spec passthrough_module(__name__, '.extractors') del passthrough_module +register_plugin_spec(PluginSpec( + module_name='extractor', + suffix='IE', + destination=_extractors_context, + plugin_destination=_plugin_ies_context, +)) + def gen_extractor_classes(): """ Return a list of supported extractors. The order does matter; the first extractor matched is the one handling the URL. """ - from .extractors import _ALL_CLASSES - - return _ALL_CLASSES + import_extractors() + return list(_extractors_context.value.values()) def gen_extractors(): @@ -37,6 +46,9 @@ def list_extractors(age_limit=None): def get_info_extractor(ie_name): """Returns the info extractor class with the given ie_name""" - from . import extractors + import_extractors() + return _extractors_context.value[f'{ie_name}IE'] - return getattr(extractors, f'{ie_name}IE') + +def import_extractors(): + from . import extractors # noqa: F401 diff --git a/plugins/youtube_download/yt_dlp/extractor/_extractors.py b/plugins/youtube_download/yt_dlp/extractor/_extractors.py index 63bb55e..bb1c3db 100644 --- a/plugins/youtube_download/yt_dlp/extractor/_extractors.py +++ b/plugins/youtube_download/yt_dlp/extractor/_extractors.py @@ -1,4 +1,5 @@ # flake8: noqa: F401 +# isort: off from .youtube import ( # Youtube is moved to the top to improve performance YoutubeIE, @@ -24,6 +25,8 @@ from .youtube import ( # Youtube is moved to the top to improve performance YoutubeConsentRedirectIE, ) +# isort: on + from .abc import ( ABCIE, ABCIViewIE, @@ -43,30 +46,37 @@ from .abematv import ( ) from .academicearth import AcademicEarthCourseIE from .acast import ( - ACastIE, ACastChannelIE, + ACastIE, +) +from .acfun import ( + AcFunBangumiIE, + AcFunVideoIE, +) +from .adn import ( + ADNIE, + ADNSeasonIE, ) -from .acfun import AcFunVideoIE, AcFunBangumiIE -from .adn import ADNIE from .adobeconnect import AdobeConnectIE from .adobetv import ( + AdobeTVChannelIE, AdobeTVEmbedIE, AdobeTVIE, AdobeTVShowIE, - AdobeTVChannelIE, AdobeTVVideoIE, ) from .adultswim import AdultSwimIE from .aenetworks import ( - AENetworksIE, AENetworksCollectionIE, + AENetworksIE, AENetworksShowIE, - HistoryTopicIE, - HistoryPlayerIE, BiographyIE, + HistoryPlayerIE, + HistoryTopicIE, ) from .aeonco import AeonCoIE from .afreecatv import ( + AfreecaTVCatchStoryIE, AfreecaTVIE, AfreecaTVLiveIE, AfreecaTVUserIE, @@ -77,74 +87,90 @@ from .agora import ( WyborczaPodcastIE, WyborczaVideoIE, ) -from .airmozilla import AirMozillaIE from .airtv import AirTVIE from .aitube import AitubeKZVideoIE +from .aliexpress import AliExpressLiveIE from .aljazeera import AlJazeeraIE -from .alphaporno import AlphaPornoIE -from .amara import AmaraIE -from .alura import ( - AluraIE, - AluraCourseIE +from .allocine import AllocineIE +from .allstar import ( + AllstarIE, + AllstarProfileIE, ) -from .amcnetworks import AMCNetworksIE +from .alphaporno import AlphaPornoIE +from .alsace20tv import ( + Alsace20TVEmbedIE, + Alsace20TVIE, +) +from .altcensored import ( + AltCensoredChannelIE, + AltCensoredIE, +) +from .alura import ( + AluraCourseIE, + AluraIE, +) +from .amadeustv import AmadeusTVIE +from .amara import AmaraIE from .amazon import ( - AmazonStoreIE, AmazonReviewsIE, + AmazonStoreIE, ) from .amazonminitv import ( AmazonMiniTVIE, AmazonMiniTVSeasonIE, AmazonMiniTVSeriesIE, ) +from .amcnetworks import AMCNetworksIE from .americastestkitchen import ( AmericasTestKitchenIE, AmericasTestKitchenSeasonIE, ) from .anchorfm import AnchorFMEpisodeIE from .angel import AngelIE +from .antenna import ( + Ant1NewsGrArticleIE, + Ant1NewsGrEmbedIE, + AntennaGrWatchIE, +) from .anvato import AnvatoIE from .aol import AolIE -from .allocine import AllocineIE -from .aliexpress import AliExpressLiveIE -from .alsace20tv import ( - Alsace20TVIE, - Alsace20TVEmbedIE, -) from .apa import APAIE from .aparat import AparatIE from .appleconnect import AppleConnectIE +from .applepodcasts import ApplePodcastsIE from .appletrailers import ( AppleTrailersIE, AppleTrailersSectionIE, ) -from .applepodcasts import ApplePodcastsIE from .archiveorg import ( ArchiveOrgIE, YoutubeWebArchiveIE, - VLiveWebArchiveIE, ) from .arcpublishing import ArcPublishingIE -from .arkena import ArkenaIE from .ard import ( - ARDBetaMediathekIE, ARDIE, - ARDMediathekIE, + ARDBetaMediathekIE, + ARDMediathekCollectionIE, +) +from .arkena import ArkenaIE +from .arnes import ArnesIE +from .art19 import ( + Art19IE, + Art19ShowIE, ) from .arte import ( - ArteTVIE, - ArteTVEmbedIE, - ArteTVPlaylistIE, ArteTVCategoryIE, + ArteTVEmbedIE, + ArteTVIE, + ArteTVPlaylistIE, ) -from .arnes import ArnesIE -from .asiancrush import ( - AsianCrushIE, - AsianCrushPlaylistIE, +from .asobichannel import ( + AsobiChannelIE, + AsobiChannelTagURLIE, ) +from .asobistage import AsobiStageIE from .atresplayer import AtresPlayerIE from .atscaleconf import AtScaleConfEventIE -from .atttechchannel import ATTTechChannelIE from .atvat import ATVAtIE from .audimedia import AudiMediaIE from .audioboom import AudioBoomIE @@ -152,56 +178,65 @@ from .audiodraft import ( AudiodraftCustomIE, AudiodraftGenericIE, ) -from .audiomack import AudiomackIE, AudiomackAlbumIE +from .audiomack import ( + AudiomackAlbumIE, + AudiomackIE, +) from .audius import ( AudiusIE, - AudiusTrackIE, AudiusPlaylistIE, AudiusProfileIE, + AudiusTrackIE, ) from .awaan import ( AWAANIE, - AWAANVideoIE, AWAANLiveIE, AWAANSeasonIE, + AWAANVideoIE, ) +from .axs import AxsIE from .azmedien import AZMedienIE from .baidu import BaiduVideoIE from .banbye import ( - BanByeIE, BanByeChannelIE, + BanByeIE, ) from .bandaichannel import BandaiChannelIE from .bandcamp import ( - BandcampIE, BandcampAlbumIE, - BandcampWeeklyIE, + BandcampIE, BandcampUserIE, + BandcampWeeklyIE, +) +from .bandlab import ( + BandlabIE, + BandlabPlaylistIE, ) from .bannedvideo import BannedVideoIE from .bbc import ( - BBCCoUkIE, + BBCIE, BBCCoUkArticleIE, + BBCCoUkIE, BBCCoUkIPlayerEpisodesIE, BBCCoUkIPlayerGroupIE, BBCCoUkPlaylistIE, - BBCIE, ) +from .beacon import BeaconTvIE +from .beatbump import ( + BeatBumpPlaylistIE, + BeatBumpVideoIE, +) +from .beatport import BeatportIE from .beeg import BeegIE from .behindkink import BehindKinkIE from .bellmedia import BellMediaIE -from .beatbump import ( - BeatBumpVideoIE, - BeatBumpPlaylistIE, -) -from .beatport import BeatportIE from .berufetv import BerufeTVIE from .bet import BetIE from .bfi import BFIPlayerIE from .bfmtv import ( BFMTVIE, - BFMTVLiveIE, BFMTVArticleIE, + BFMTVLiveIE, ) from .bibeltv import ( BibelTVLiveIE, @@ -212,112 +247,116 @@ from .bigflix import BigflixIE from .bigo import BigoIE from .bild import BildIE from .bilibili import ( - BiliBiliIE, - BiliBiliBangumiIE, - BiliBiliBangumiSeasonIE, - BiliBiliBangumiMediaIE, - BiliBiliSearchIE, - BilibiliCategoryIE, - BilibiliAudioIE, BilibiliAudioAlbumIE, + BilibiliAudioIE, + BiliBiliBangumiIE, + BiliBiliBangumiMediaIE, + BiliBiliBangumiSeasonIE, + BilibiliCategoryIE, + BilibiliCheeseIE, + BilibiliCheeseSeasonIE, + BilibiliCollectionListIE, + BiliBiliDynamicIE, + BilibiliFavoritesListIE, + BiliBiliIE, BiliBiliPlayerIE, - BilibiliSpaceVideoIE, + BilibiliPlaylistIE, + BiliBiliSearchIE, + BilibiliSeriesListIE, BilibiliSpaceAudioIE, - BilibiliSpacePlaylistIE, + BilibiliSpaceVideoIE, + BilibiliWatchlaterIE, BiliIntlIE, BiliIntlSeriesIE, BiliLiveIE, ) from .biobiochiletv import BioBioChileTVIE from .bitchute import ( - BitChuteIE, BitChuteChannelIE, + BitChuteIE, ) -from .bitwave import ( - BitwaveReplayIE, - BitwaveStreamIE, -) -from .biqle import BIQLEIE from .blackboardcollaborate import BlackboardCollaborateIE from .bleacherreport import ( - BleacherReportIE, BleacherReportCMSIE, + BleacherReportIE, ) from .blerp import BlerpIE from .blogger import BloggerIE from .bloomberg import BloombergIE +from .bluesky import BlueskyIE from .bokecc import BokeCCIE from .bongacams import BongaCamsIE +from .boosty import BoostyIE from .bostonglobe import BostonGlobeIE from .box import BoxIE from .boxcast import BoxCastVideoIE from .bpb import BpbIE -from .br import ( - BRIE, - BRMediathekIE, -) -from .bravotv import BravoTVIE +from .br import BRIE from .brainpop import ( - BrainPOPIE, - BrainPOPJrIE, BrainPOPELLIE, BrainPOPEspIE, BrainPOPFrIE, + BrainPOPIE, BrainPOPIlIE, + BrainPOPJrIE, ) -from .breakcom import BreakIE +from .bravotv import BravoTVIE from .breitbart import BreitBartIE from .brightcove import ( BrightcoveLegacyIE, BrightcoveNewIE, ) -from .businessinsider import BusinessInsiderIE +from .brilliantpala import ( + BrilliantpalaClassesIE, + BrilliantpalaElearnIE, +) from .bundesliga import BundesligaIE +from .bundestag import BundestagIE +from .bunnycdn import BunnyCdnIE +from .businessinsider import BusinessInsiderIE from .buzzfeed import BuzzFeedIE from .byutv import BYUtvIE from .c56 import C56IE -from .cableav import CableAVIE +from .caffeinetv import CaffeineTVIE from .callin import CallinIE from .caltrans import CaltransIE from .cam4 import CAM4IE from .camdemy import ( + CamdemyFolderIE, CamdemyIE, - CamdemyFolderIE ) from .camfm import ( CamFMEpisodeIE, - CamFMShowIE + CamFMShowIE, ) from .cammodels import CamModelsIE from .camsoda import CamsodaIE from .camtasia import CamtasiaEmbedIE -from .camwithher import CamWithHerIE +from .canal1 import Canal1IE from .canalalpha import CanalAlphaIE -from .canalplus import CanalplusIE from .canalc2 import Canalc2IE -from .carambatv import ( - CarambaTVIE, - CarambaTVPageIE, -) +from .canalplus import CanalplusIE +from .canalsurmas import CanalsurmasIE +from .caracoltv import CaracolTvPlayIE from .cartoonnetwork import CartoonNetworkIE from .cbc import ( CBCIE, - CBCPlayerIE, CBCGemIE, - CBCGemPlaylistIE, CBCGemLiveIE, + CBCGemPlaylistIE, + CBCPlayerIE, + CBCPlayerPlaylistIE, ) from .cbs import ( CBSIE, ParamountPressExpressIE, ) -from .cbsinteractive import CBSInteractiveIE from .cbsnews import ( + CBSLocalArticleIE, + CBSLocalIE, + CBSLocalLiveIE, CBSNewsEmbedIE, CBSNewsIE, - CBSLocalIE, - CBSLocalArticleIE, - CBSLocalLiveIE, CBSNewsLiveIE, CBSNewsLiveVideoIE, ) @@ -332,58 +371,55 @@ from .ccc import ( ) from .ccma import CCMAIE from .cctv import CCTVIE -from .cda import CDAIE +from .cda import ( + CDAIE, + CDAFolderIE, +) from .cellebrite import CellebriteIE from .ceskatelevize import CeskaTelevizeIE from .cgtn import CGTNIE -from .channel9 import Channel9IE from .charlierose import CharlieRoseIE from .chaturbate import ChaturbateIE from .chilloutzone import ChilloutzoneIE -from .chingari import ( - ChingariIE, - ChingariUserIE, +from .chzzk import ( + CHZZKLiveIE, + CHZZKVideoIE, ) -from .chirbit import ( - ChirbitIE, - ChirbitProfileIE, -) -from .cinchcast import CinchcastIE from .cinemax import CinemaxIE from .cinetecamilano import CinetecaMilanoIE +from .cineverse import ( + CineverseDetailsIE, + CineverseIE, +) from .ciscolive import ( - CiscoLiveSessionIE, CiscoLiveSearchIE, + CiscoLiveSessionIE, ) from .ciscowebex import CiscoWebexIE from .cjsw import CJSWIE from .clipchamp import ClipchampIE -from .cliphunter import CliphunterIE from .clippit import ClippitIE from .cliprs import ClipRsIE -from .clipsyndicate import ClipsyndicateIE from .closertotruth import CloserToTruthIE from .cloudflarestream import CloudflareStreamIE -from .cloudy import CloudyIE +from .cloudycdn import CloudyCDNIE from .clubic import ClubicIE from .clyp import ClypIE from .cmt import CMTIE -from .cnbc import ( - CNBCIE, - CNBCVideoIE, -) +from .cnbc import CNBCVideoIE from .cnn import ( CNNIE, - CNNBlogsIE, - CNNArticleIE, CNNIndonesiaIE, ) -from .coub import CoubIE from .comedycentral import ( ComedyCentralIE, ComedyCentralTVIE, ) -from .commonmistakes import CommonMistakesIE, UnicodeBOMIE +from .commonmistakes import ( + BlobIE, + CommonMistakesIE, + UnicodeBOMIE, +) from .commonprotocols import ( MmsIE, RtmpIE, @@ -392,50 +428,51 @@ from .commonprotocols import ( from .condenast import CondeNastIE from .contv import CONtvIE from .corus import CorusIE +from .coub import CoubIE +from .cozytv import CozyTVIE from .cpac import ( CPACIE, CPACPlaylistIE, ) -from .cozytv import CozyTVIE from .cracked import CrackedIE from .crackle import CrackleIE from .craftsy import CraftsyIE from .crooksandliars import CrooksAndLiarsIE from .crowdbunker import ( - CrowdBunkerIE, CrowdBunkerChannelIE, + CrowdBunkerIE, ) from .crtvg import CrtvgIE -from .crunchyroll import ( - CrunchyrollBetaIE, - CrunchyrollBetaShowIE, - CrunchyrollMusicIE, - CrunchyrollArtistIE, +from .cspan import ( + CSpanCongressIE, + CSpanIE, ) -from .cspan import CSpanIE, CSpanCongressIE from .ctsnews import CtsNewsIE from .ctv import CTVIE from .ctvnews import CTVNewsIE from .cultureunplugged import CultureUnpluggedIE from .curiositystream import ( - CuriosityStreamIE, CuriosityStreamCollectionsIE, + CuriosityStreamIE, CuriosityStreamSeriesIE, ) -from .cwtv import CWTVIE +from .cwtv import ( + CWTVIE, + CWTVMovieIE, +) from .cybrary import ( + CybraryCourseIE, CybraryIE, - CybraryCourseIE ) from .dacast import ( - DacastVODIE, DacastPlaylistIE, + DacastVODIE, ) -from .daftsex import DaftsexIE from .dailymail import DailyMailIE from .dailymotion import ( DailymotionIE, DailymotionPlaylistIE, + DailymotionSearchIE, DailymotionUserIE, ) from .dailywire import ( @@ -446,61 +483,76 @@ from .damtomo import ( DamtomoRecordIE, DamtomoVideoIE, ) +from .dangalplay import ( + DangalPlayIE, + DangalPlaySeasonIE, +) from .daum import ( - DaumIE, DaumClipIE, + DaumIE, DaumPlaylistIE, DaumUserIE, ) from .daystar import DaystarClipIE from .dbtv import DBTVIE from .dctp import DctpTvIE -from .deezer import ( - DeezerPlaylistIE, - DeezerAlbumIE, -) from .democracynow import DemocracynowIE from .detik import DetikEmbedIE +from .deuxm import ( + DeuxMIE, + DeuxMNewsIE, +) +from .dfb import DFBIE +from .dhm import DHMIE +from .digitalconcerthall import DigitalConcertHallIE +from .digiteka import DigitekaIE +from .digiview import DigiviewIE +from .discogs import DiscogsReleasePlaylistIE +from .disney import DisneyIE +from .dispeak import DigitallySpeakingIE from .dlf import ( DLFIE, DLFCorpusIE, ) -from .dfb import DFBIE -from .dhm import DHMIE -from .digg import DiggIE -from .dotsub import DotsubIE +from .dlive import ( + DLiveStreamIE, + DLiveVODIE, +) from .douyutv import ( DouyuShowIE, DouyuTVIE, ) from .dplay import ( - DPlayIE, - DiscoveryPlusIE, - HGTVDeIE, - GoDiscoveryIE, - TravelChannelIE, - CookingChannelIE, - HGTVUsaIE, - FoodNetworkIE, - InvestigationDiscoveryIE, - DestinationAmericaIE, - AmHistoryChannelIE, - ScienceChannelIE, - DIYNetworkIE, - DiscoveryLifeIE, - AnimalPlanetIE, TLCIE, - MotorTrendIE, - MotorTrendOnDemandIE, - DiscoveryPlusIndiaIE, + AmHistoryChannelIE, + AnimalPlanetIE, + CookingChannelIE, + DestinationAmericaIE, + DiscoveryLifeIE, DiscoveryNetworksDeIE, + DiscoveryPlusIE, + DiscoveryPlusIndiaIE, + DiscoveryPlusIndiaShowIE, DiscoveryPlusItalyIE, DiscoveryPlusItalyShowIE, - DiscoveryPlusIndiaShowIE, - GlobalCyclingNetworkPlusIE, + DPlayIE, + FoodNetworkIE, + GoDiscoveryIE, + HGTVDeIE, + HGTVUsaIE, + InvestigationDiscoveryIE, + ScienceChannelIE, + TravelChannelIE, ) -from .dreisat import DreiSatIE from .drbonanza import DRBonanzaIE +from .dreisat import DreiSatIE +from .drooble import DroobleIE +from .dropbox import DropboxIE +from .dropout import ( + DropoutIE, + DropoutSeasonIE, +) +from .drtalks import DrTalksIE from .drtuber import DrTuberIE from .drtv import ( DRTVIE, @@ -509,97 +561,86 @@ from .drtv import ( DRTVSeriesIE, ) from .dtube import DTubeIE -from .dvtv import DVTVIE from .duboku import ( DubokuIE, - DubokuPlaylistIE + DubokuPlaylistIE, ) from .dumpert import DumpertIE -from .defense import DefenseGouvFrIE -from .deuxm import ( - DeuxMIE, - DeuxMNewsIE -) -from .digitalconcerthall import DigitalConcertHallIE -from .discogs import DiscogsReleasePlaylistIE -from .discovery import DiscoveryIE -from .disney import DisneyIE -from .dispeak import DigitallySpeakingIE -from .dropbox import DropboxIE -from .dropout import ( - DropoutSeasonIE, - DropoutIE -) +from .duoplay import DuoplayIE +from .dvtv import DVTVIE from .dw import ( DWIE, DWArticleIE, ) -from .eagleplatform import EaglePlatformIE, ClipYouEmbedIE +from .eagleplatform import ( + ClipYouEmbedIE, + EaglePlatformIE, +) from .ebaumsworld import EbaumsWorldIE from .ebay import EbayIE -from .echomsk import EchoMskIE from .egghead import ( EggheadCourseIE, EggheadLessonIE, ) -from .ehow import EHowIE -from .eighttracks import EightTracksIE -from .einthusan import EinthusanIE -from .eitb import EitbIE -from .elevensports import ElevenSportsIE -from .ellentube import ( - EllenTubeIE, - EllenTubeVideoIE, - EllenTubePlaylistIE, +from .eggs import ( + EggsArtistIE, + EggsIE, ) +from .eighttracks import EightTracksIE +from .eitb import EitbIE +from .elementorembed import ElementorEmbedIE from .elonet import ElonetIE from .elpais import ElPaisIE +from .eltrecetv import ElTreceTVIE from .embedly import EmbedlyIE -from .engadget import EngadgetIE from .epicon import ( EpiconIE, EpiconSeriesIE, ) +from .epidemicsound import EpidemicSoundIE +from .eplus import EplusIbIE from .epoch import EpochIE from .eporner import EpornerIE +from .erocast import ErocastIE from .eroprofile import ( - EroProfileIE, EroProfileAlbumIE, + EroProfileIE, ) +from .err import ERRJupiterIE from .ertgr import ( ERTFlixCodenameIE, ERTFlixIE, ERTWebtvEmbedIE, ) -from .escapist import EscapistIE from .espn import ( ESPNIE, - WatchESPNIE, ESPNArticleIE, - FiveThirtyEightIE, ESPNCricInfoIE, + FiveThirtyEightIE, + WatchESPNIE, ) -from .esri import EsriVideoIE from .ettutv import EttuTvIE -from .europa import EuropaIE, EuroParlWebstreamIE +from .europa import ( + EuropaIE, + EuroParlWebstreamIE, +) from .europeantour import EuropeanTourIE from .eurosport import EurosportIE from .euscreen import EUScreenIE -from .expotv import ExpoTVIE from .expressen import ExpressenIE -from .extremetube import ExtremeTubeIE from .eyedotv import EyedoTVIE from .facebook import ( + FacebookAdsIE, FacebookIE, FacebookPluginsVideoIE, FacebookRedirectURLIE, FacebookReelIE, ) from .fancode import ( + FancodeLiveIE, FancodeVodIE, - FancodeLiveIE ) - +from .fathom import FathomIE from .faz import FazIE from .fc2 import ( FC2IE, @@ -608,136 +649,144 @@ from .fc2 import ( ) from .fczenit import FczenitIE from .fifa import FifaIE -from .filmmodu import FilmmoduIE from .filmon import ( - FilmOnIE, FilmOnChannelIE, + FilmOnIE, ) from .filmweb import FilmwebIE from .firsttv import FirstTVIE from .fivetv import FiveTVIE +from .flextv import FlexTVIE from .flickr import FlickrIE +from .floatplane import ( + FloatplaneChannelIE, + FloatplaneIE, +) from .folketinget import FolketingetIE from .footyroom import FootyRoomIE from .formula1 import Formula1IE from .fourtube import ( FourTubeIE, - PornTubeIE, - PornerBrosIE, FuxIE, -) -from .fourzerostudio import ( - FourZeroStudioArchiveIE, - FourZeroStudioClipIE, + PornerBrosIE, + PornTubeIE, ) from .fox import FOXIE from .fox9 import ( FOX9IE, FOX9NewsIE, ) -from .foxgay import FoxgayIE from .foxnews import ( - FoxNewsIE, FoxNewsArticleIE, + FoxNewsIE, FoxNewsVideoIE, ) from .foxsports import FoxSportsIE from .fptplay import FptplayIE +from .francaisfacile import FrancaisFacileIE from .franceinter import FranceInterIE from .francetv import ( FranceTVIE, - FranceTVSiteIE, FranceTVInfoIE, + FranceTVSiteIE, ) from .freesound import FreesoundIE from .freespeech import FreespeechIE -from .frontendmasters import ( - FrontendMastersIE, - FrontendMastersLessonIE, - FrontendMastersCourseIE -) from .freetv import ( FreeTvIE, FreeTvMoviesIE, ) -from .fujitv import FujiTVFODPlus7IE -from .funimation import ( - FunimationIE, - FunimationPageIE, - FunimationShowIE, +from .frontendmasters import ( + FrontendMastersCourseIE, + FrontendMastersIE, + FrontendMastersLessonIE, ) +from .fujitv import FujiTVFODPlus7IE from .funk import FunkIE from .funker530 import Funker530IE -from .fusion import FusionIE from .fuyintv import FuyinTVIE from .gab import ( - GabTVIE, GabIE, + GabTVIE, ) from .gaia import GaiaIE -from .gameinformer import GameInformerIE +from .gamedevtv import GameDevTVDashboardIE from .gamejolt import ( - GameJoltIE, - GameJoltUserIE, + GameJoltCommunityIE, GameJoltGameIE, GameJoltGameSoundtrackIE, - GameJoltCommunityIE, + GameJoltIE, GameJoltSearchIE, + GameJoltUserIE, ) from .gamespot import GameSpotIE from .gamestar import GameStarIE from .gaskrank import GaskrankIE from .gazeta import GazetaIE +from .gbnews import GBNewsIE from .gdcvault import GDCVaultIE from .gedidigital import GediDigitalIE from .generic import GenericIE +from .genericembeds import ( + HTML5MediaEmbedIE, + QuotedHTMLIE, +) from .genius import ( GeniusIE, GeniusLyricsIE, ) +from .germanupa import GermanupaIE +from .getcourseru import ( + GetCourseRuIE, + GetCourseRuPlayerIE, +) from .gettr import ( GettrIE, GettrStreamingIE, ) -from .gfycat import GfycatIE from .giantbomb import GiantBombIE -from .giga import GigaIE from .glide import GlideIE from .globalplayer import ( + GlobalPlayerAudioEpisodeIE, + GlobalPlayerAudioIE, GlobalPlayerLiveIE, GlobalPlayerLivePlaylistIE, - GlobalPlayerAudioIE, - GlobalPlayerAudioEpisodeIE, - GlobalPlayerVideoIE + GlobalPlayerVideoIE, ) from .globo import ( - GloboIE, GloboArticleIE, + GloboIE, +) +from .glomex import ( + GlomexEmbedIE, + GlomexIE, ) from .gmanetwork import GMANetworkVideoIE from .go import GoIE +from .godresource import GodResourceIE from .godtube import GodTubeIE from .gofile import GofileIE from .golem import GolemIE from .goodgame import GoodGameIE from .googledrive import ( - GoogleDriveIE, GoogleDriveFolderIE, + GoogleDriveIE, ) from .googlepodcasts import ( - GooglePodcastsIE, GooglePodcastsFeedIE, + GooglePodcastsIE, ) from .googlesearch import GoogleSearchIE -from .gopro import GoProIE from .goplay import GoPlayIE +from .gopro import GoProIE from .goshgay import GoshgayIE from .gotostage import GoToStageIE from .gputechconf import GPUTechConfIE +from .graspop import GraspopIE from .gronkh import ( - GronkhIE, GronkhFeedIE, - GronkhVodsIE + GronkhIE, + GronkhVodsIE, ) from .groupon import GrouponIE from .harpodeon import HarpodeonIE @@ -745,13 +794,11 @@ from .hbo import HBOIE from .hearthisat import HearThisAtIE from .heise import HeiseIE from .hellporno import HellPornoIE -from .helsinki import HelsinkiIE from .hgtv import HGTVComShowIE -from .hketv import HKETVIE from .hidive import HiDiveIE from .historicfilms import HistoricFilmsIE -from .hitbox import HitboxIE, HitboxLiveIE from .hitrecord import HitRecordIE +from .hketv import HKETVIE from .hollywoodreporter import ( HollywoodReporterIE, HollywoodReporterPlaylistIE, @@ -760,13 +807,11 @@ from .holodex import HolodexIE from .hotnewhiphop import HotNewHipHopIE from .hotstar import ( HotStarIE, - HotStarPrefixIE, HotStarPlaylistIE, + HotStarPrefixIE, HotStarSeasonIE, HotStarSeriesIE, ) -from .howcast import HowcastIE -from .howstuffworks import HowStuffWorksIE from .hrefli import HrefLiRedirectIE from .hrfensehen import HRFernsehenIE from .hrti import ( @@ -774,48 +819,49 @@ from .hrti import ( HRTiPlaylistIE, ) from .hse import ( - HSEShowIE, HSEProductIE, -) -from .genericembeds import ( - HTML5MediaEmbedIE, - QuotedHTMLIE, + HSEShowIE, ) from .huajiao import HuajiaoIE -from .huya import HuyaLiveIE from .huffpost import HuffPostIE from .hungama import ( + HungamaAlbumPlaylistIE, HungamaIE, HungamaSongIE, - HungamaAlbumPlaylistIE, +) +from .huya import ( + HuyaLiveIE, + HuyaVideoIE, ) from .hypem import HypemIE from .hypergryph import MonsterSirenHypergryphMusicIE from .hytale import HytaleIE from .icareus import IcareusIE from .ichinanalive import ( - IchinanaLiveIE, IchinanaLiveClipIE, + IchinanaLiveIE, + IchinanaLiveVODIE, ) from .idolplus import IdolPlusIE from .ign import ( IGNIE, - IGNVideoIE, IGNArticleIE, + IGNVideoIE, ) from .iheart import ( IHeartRadioIE, IHeartRadioPodcastIE, ) +from .ilpost import IlPostIE from .iltalehti import IltalehtiIE from .imdb import ( ImdbIE, - ImdbListIE + ImdbListIE, ) from .imgur import ( - ImgurIE, ImgurAlbumIE, ImgurGalleryIE, + ImgurIE, ) from .ina import InaIE from .inc import IncIE @@ -824,20 +870,20 @@ from .infoq import InfoQIE from .instagram import ( InstagramIE, InstagramIOSIE, - InstagramUserIE, - InstagramTagIE, InstagramStoryIE, + InstagramTagIE, + InstagramUserIE, ) from .internazionale import InternazionaleIE from .internetvideoarchive import InternetVideoArchiveIE from .iprima import ( + IPrimaCNNIE, IPrimaIE, - IPrimaCNNIE ) from .iqiyi import ( - IqiyiIE, + IqAlbumIE, IqIE, - IqAlbumIE + IqiyiIE, ) from .islamchannel import ( IslamChannelIE, @@ -845,18 +891,19 @@ from .islamchannel import ( ) from .israelnationalnews import IsraelNationalNewsIE from .itprotv import ( + ITProTVCourseIE, ITProTVIE, - ITProTVCourseIE ) from .itv import ( - ITVIE, ITVBTCCIE, + ITVIE, ) from .ivi import ( + IviCompilationIE, IviIE, - IviCompilationIE ) from .ivideon import IvideonIE +from .ivoox import IvooxIE from .iwara import ( IwaraIE, IwaraPlaylistIE, @@ -864,92 +911,108 @@ from .iwara import ( ) from .ixigua import IxiguaIE from .izlesene import IzleseneIE -from .jable import ( - JableIE, - JablePlaylistIE, -) from .jamendo import ( - JamendoIE, JamendoAlbumIE, + JamendoIE, ) from .japandiet import ( + SangiinIE, + SangiinInstructionIE, ShugiinItvLiveIE, ShugiinItvLiveRoomIE, ShugiinItvVodIE, - SangiinInstructionIE, - SangiinIE, ) from .jeuxvideo import JeuxVideoIE -from .jove import JoveIE +from .jiocinema import ( + JioCinemaIE, + JioCinemaSeriesIE, +) +from .jiosaavn import ( + JioSaavnAlbumIE, + JioSaavnPlaylistIE, + JioSaavnSongIE, +) from .joj import JojIE +from .joqrag import JoqrAgIE +from .jove import JoveIE from .jstream import JStreamIE +from .jtbc import ( + JTBCIE, + JTBCProgramIE, +) from .jwplatform import JWPlatformIE from .kakao import KakaoIE from .kaltura import KalturaIE -from .kanal2 import Kanal2IE from .kankanews import KankaNewsIE from .karaoketv import KaraoketvIE -from .karrierevideos import KarriereVideosIE -from .keezmovies import KeezMoviesIE from .kelbyone import KelbyOneIE +from .kenh14 import ( + Kenh14PlaylistIE, + Kenh14VideoIE, +) from .khanacademy import ( KhanAcademyIE, KhanAcademyUnitIE, ) from .kick import ( + KickClipIE, KickIE, KickVODIE, ) from .kicker import KickerIE from .kickstarter import KickStarterIE +from .kika import ( + KikaIE, + KikaPlaylistIE, +) from .kinja import KinjaEmbedIE from .kinopoisk import KinoPoiskIE from .kommunetv import KommunetvIE from .kompas import KompasVideoIE -from .konserthusetplay import KonserthusetPlayIE from .koo import KooIE -from .kth import KTHIE from .krasview import KrasViewIE +from .kth import KTHIE from .ku6 import Ku6IE -from .kusi import KUSIIE +from .kukululive import KukuluLiveIE from .kuwo import ( - KuwoIE, KuwoAlbumIE, - KuwoChartIE, - KuwoSingerIE, KuwoCategoryIE, + KuwoChartIE, + KuwoIE, KuwoMvIE, + KuwoSingerIE, ) from .la7 import ( LA7IE, LA7PodcastEpisodeIE, LA7PodcastIE, ) -from .laola1tv import ( - Laola1TvEmbedIE, - Laola1TvIE, - EHFTVIE, - ITTFIE, +from .laracasts import ( + LaracastsIE, + LaracastsPlaylistIE, ) from .lastfm import ( LastFMIE, LastFMPlaylistIE, LastFMUserIE, ) +from .laxarxames import LaXarxaMesIE from .lbry import ( LBRYIE, LBRYChannelIE, + LBRYPlaylistIE, ) from .lci import LCIIE from .lcp import ( - LcpPlayIE, LcpIE, + LcpPlayIE, ) +from .learningonscreen import LearningOnScreenIE from .lecture2go import Lecture2GoIE from .lecturio import ( - LecturioIE, LecturioCourseIE, LecturioDeCourseIE, + LecturioIE, ) from .leeco import ( LeIE, @@ -966,24 +1029,24 @@ from .lenta import LentaIE from .libraryofcongress import LibraryOfCongressIE from .libsyn import LibsynIE from .lifenews import ( - LifeNewsIE, LifeEmbedIE, + LifeNewsIE, ) from .likee import ( LikeeIE, - LikeeUserIE + LikeeUserIE, ) from .limelight import ( - LimelightMediaIE, LimelightChannelIE, LimelightChannelListIE, + LimelightMediaIE, ) from .linkedin import ( + LinkedInEventsIE, LinkedInIE, - LinkedInLearningIE, LinkedInLearningCourseIE, + LinkedInLearningIE, ) -from .linuxacademy import LinuxAcademyIE from .liputan6 import Liputan6IE from .listennotes import ListenNotesIE from .litv import LiTVIE @@ -994,41 +1057,45 @@ from .livestream import ( LivestreamShortenerIE, ) from .livestreamfails import LivestreamfailsIE -from .lnkgo import ( - LnkGoIE, - LnkIE, +from .lnk import LnkIE +from .loco import LocoIE +from .loom import ( + LoomFolderIE, + LoomIE, ) -from .localnews8 import LocalNews8IE from .lovehomeporn import LoveHomePornIE from .lrt import ( LRTVODIE, - LRTStreamIE + LRTRadioIE, + LRTStreamIE, ) -from .lumni import ( - LumniIE +from .lsm import ( + LSMLREmbedIE, + LSMLTVEmbedIE, + LSMReplayIE, ) +from .lumni import LumniIE from .lynda import ( + LyndaCourseIE, LyndaIE, - LyndaCourseIE ) -from .m6 import M6IE +from .maariv import MaarivIE from .magellantv import MagellanTVIE -from .magentamusik360 import MagentaMusik360IE +from .magentamusik import MagentaMusikIE from .mailru import ( MailRuIE, MailRuMusicIE, MailRuMusicSearchIE, ) from .mainstreaming import MainStreamingIE -from .malltv import MallTVIE from .mangomolo import ( - MangomoloVideoIE, MangomoloLiveIE, + MangomoloVideoIE, ) from .manoto import ( ManotoTVIE, - ManotoTVShowIE, ManotoTVLiveIE, + ManotoTVShowIE, ) from .manyvids import ManyVidsIE from .maoritv import MaoriTVIE @@ -1039,17 +1106,19 @@ from .markiza import ( from .massengeschmacktv import MassengeschmackTVIE from .masters import MastersIE from .matchtv import MatchTVIE +from .mbn import MBNIE from .mdr import MDRIE from .medaltv import MedalTVIE from .mediaite import MediaiteIE from .mediaklikk import MediaKlikkIE +from .medialaan import MedialaanIE from .mediaset import ( MediasetIE, MediasetShowIE, ) from .mediasite import ( - MediasiteIE, MediasiteCatalogIE, + MediasiteIE, MediasiteNamedCatalogIE, ) from .mediastream import ( @@ -1059,109 +1128,108 @@ from .mediastream import ( from .mediaworksnz import MediaWorksNZVODIE from .medici import MediciIE from .megaphone import MegaphoneIE +from .megatvcom import ( + MegaTVComEmbedIE, + MegaTVComIE, +) from .meipai import MeipaiIE from .melonvod import MelonVODIE -from .meta import METAIE -from .metacafe import MetacafeIE from .metacritic import MetacriticIE -from .mgoon import MgoonIE from .mgtv import MGTVIE -from .miaopai import MiaoPaiIE +from .microsoftembed import ( + MicrosoftBuildIE, + MicrosoftEmbedIE, + MicrosoftLearnEpisodeIE, + MicrosoftLearnPlaylistIE, + MicrosoftLearnSessionIE, + MicrosoftMediusIE, +) from .microsoftstream import MicrosoftStreamIE -from .microsoftvirtualacademy import ( - MicrosoftVirtualAcademyIE, - MicrosoftVirtualAcademyCourseIE, -) -from .microsoftembed import MicrosoftEmbedIE -from .mildom import ( - MildomIE, - MildomVodIE, - MildomClipIE, - MildomUserVodIE, -) from .minds import ( - MindsIE, MindsChannelIE, MindsGroupIE, + MindsIE, ) -from .ministrygrid import MinistryGridIE from .minoto import MinotoIE -from .miomio import MioMioIE from .mirrativ import ( MirrativIE, MirrativUserIE, ) from .mirrorcouk import MirrorCoUKIE -from .mit import TechTVMITIE, OCWMITIE +from .mit import ( + OCWMITIE, + TechTVMITIE, +) from .mitele import MiTeleIE from .mixch import ( - MixchIE, MixchArchiveIE, + MixchIE, + MixchMovieIE, ) from .mixcloud import ( MixcloudIE, - MixcloudUserIE, MixcloudPlaylistIE, + MixcloudUserIE, ) from .mlb import ( MLBIE, - MLBVideoIE, MLBTVIE, MLBArticleIE, + MLBVideoIE, ) from .mlssoccer import MLSSoccerIE -from .mnet import MnetIE from .mocha import MochaVideoIE -from .moevideo import MoeVideoIE -from .mofosex import ( - MofosexIE, - MofosexEmbedIE, -) +from .mojevideo import MojevideoIE from .mojvideo import MojvideoIE -from .morningstar import MorningstarIE +from .monstercat import MonstercatIE from .motherless import ( - MotherlessIE, - MotherlessGroupIE, MotherlessGalleryIE, + MotherlessGroupIE, + MotherlessIE, + MotherlessUploaderIE, ) from .motorsport import MotorsportIE -from .movieclips import MovieClipsIE from .moviepilot import MoviepilotIE from .moview import MoviewPlayIE from .moviezine import MoviezineIE from .movingimage import MovingImageIE from .msn import MSNIE from .mtv import ( - MTVIE, - MTVVideoIE, - MTVServicesEmbeddedIE, MTVDEIE, - MTVJapanIE, + MTVIE, MTVItaliaIE, MTVItaliaProgrammaIE, + MTVJapanIE, + MTVServicesEmbeddedIE, + MTVVideoIE, ) from .muenchentv import MuenchenTVIE -from .murrtube import MurrtubeIE, MurrtubeUserIE +from .murrtube import ( + MurrtubeIE, + MurrtubeUserIE, +) from .museai import MuseAIIE from .musescore import MuseScoreIE from .musicdex import ( - MusicdexSongIE, MusicdexAlbumIE, MusicdexArtistIE, MusicdexPlaylistIE, + MusicdexSongIE, +) +from .mx3 import ( + Mx3IE, + Mx3NeoIE, + Mx3VolksmusikIE, ) -from .mwave import MwaveIE, MwaveMeetGreetIE from .mxplayer import ( MxplayerIE, MxplayerShowIE, ) -from .mychannels import MyChannelsIE -from .myspace import MySpaceIE, MySpaceAlbumIE -from .myspass import MySpassIE -from .myvi import ( - MyviIE, - MyviEmbedIE, +from .myspace import ( + MySpaceAlbumIE, + MySpaceIE, ) +from .myspass import MySpassIE from .myvideoge import MyVideoGeIE from .myvidster import MyVidsterIE from .mzaalo import MzaaloIE @@ -1174,8 +1242,8 @@ from .nate import ( NateProgramIE, ) from .nationalgeographic import ( - NationalGeographicVideoIE, NationalGeographicTVIE, + NationalGeographicVideoIE, ) from .naver import ( NaverIE, @@ -1183,12 +1251,12 @@ from .naver import ( NaverNowIE, ) from .nba import ( - NBAWatchEmbedIE, - NBAWatchIE, - NBAWatchCollectionIE, - NBAEmbedIE, NBAIE, NBAChannelIE, + NBAEmbedIE, + NBAWatchCollectionIE, + NBAWatchEmbedIE, + NBAWatchIE, ) from .nbc import ( NBCIE, @@ -1202,53 +1270,60 @@ from .nbc import ( ) from .ndr import ( NDRIE, - NJoyIE, NDREmbedBaseIE, NDREmbedIE, NJoyEmbedIE, + NJoyIE, ) from .ndtv import NDTVIE from .nebula import ( + NebulaChannelIE, + NebulaClassIE, NebulaIE, NebulaSubscriptionsIE, - NebulaChannelIE, ) from .nekohacker import NekoHackerIE from .nerdcubed import NerdCubedFeedIE -from .netzkino import NetzkinoIE +from .nest import ( + NestClipIE, + NestIE, +) from .neteasemusic import ( - NetEaseMusicIE, NetEaseMusicAlbumIE, - NetEaseMusicSingerIE, + NetEaseMusicDjRadioIE, + NetEaseMusicIE, NetEaseMusicListIE, NetEaseMusicMvIE, NetEaseMusicProgramIE, - NetEaseMusicDjRadioIE, + NetEaseMusicSingerIE, ) from .netverse import ( NetverseIE, NetversePlaylistIE, NetverseSearchIE, ) +from .netzkino import NetzkinoIE from .newgrounds import ( NewgroundsIE, NewgroundsPlaylistIE, NewgroundsUserIE, ) from .newspicks import NewsPicksIE -from .newstube import NewstubeIE from .newsy import NewsyIE from .nextmedia import ( - NextMediaIE, - NextMediaActionNewsIE, AppleDailyIE, + NextMediaActionNewsIE, + NextMediaIE, NextTVIE, ) from .nexx import ( - NexxIE, NexxEmbedIE, + NexxIE, +) +from .nfb import ( + NFBIE, + NFBSeriesIE, ) -from .nfb import NFBIE from .nfhsnetwork import NFHSNetworkIE from .nfl import ( NFLIE, @@ -1257,51 +1332,54 @@ from .nfl import ( NFLPlusReplayIE, ) from .nhk import ( - NhkVodIE, - NhkVodProgramIE, NhkForSchoolBangumiIE, - NhkForSchoolSubjectIE, NhkForSchoolProgramListIE, + NhkForSchoolSubjectIE, NhkRadioNewsPageIE, NhkRadiruIE, NhkRadiruLiveIE, + NhkVodIE, + NhkVodProgramIE, ) from .nhl import NHLIE from .nick import ( - NickIE, NickBrIE, NickDeIE, - NickNightIE, + NickIE, NickRuIE, ) from .niconico import ( - NiconicoIE, - NiconicoPlaylistIE, - NiconicoUserIE, - NiconicoSeriesIE, NiconicoHistoryIE, + NiconicoIE, + NiconicoLiveIE, + NiconicoPlaylistIE, + NiconicoSeriesIE, + NiconicoUserIE, NicovideoSearchDateIE, NicovideoSearchIE, NicovideoSearchURLIE, NicovideoTagURLIE, - NiconicoLiveIE, ) +from .niconicochannelplus import ( + NiconicoChannelPlusChannelLivesIE, + NiconicoChannelPlusChannelVideosIE, + NiconicoChannelPlusIE, +) +from .ninaprotocol import NinaProtocolIE from .ninecninemedia import ( - NineCNineMediaIE, CPTwentyFourIE, + NineCNineMediaIE, ) from .ninegag import NineGagIE +from .ninenews import NineNewsIE from .ninenow import NineNowIE from .nintendo import NintendoIE from .nitter import NitterIE -from .njpwworld import NJPWWorldIE from .nobelprize import NobelPrizeIE from .noice import NoicePodcastIE from .nonktube import NonkTubeIE from .noodlemagazine import NoodleMagazineIE from .noovo import NoovoIE -from .normalboots import NormalbootsIE -from .nosvideo import NosVideoIE from .nosnl import NOSNLArticleIE from .nova import ( NovaEmbedIE, @@ -1315,100 +1393,105 @@ from .nowness import ( ) from .noz import NozIE from .npo import ( - AndereTijdenIE, NPOIE, - NPOLiveIE, - NPORadioIE, - NPORadioFragmentIE, - SchoolTVIE, - HetKlokhuisIE, VPROIE, WNLIE, + AndereTijdenIE, + HetKlokhuisIE, + NPOLiveIE, + NPORadioFragmentIE, + NPORadioIE, + SchoolTVIE, ) from .npr import NprIE from .nrk import ( NRKIE, - NRKPlaylistIE, - NRKSkoleIE, NRKTVIE, - NRKTVDirekteIE, + NRKPlaylistIE, NRKRadioPodkastIE, + NRKSkoleIE, + NRKTVDirekteIE, NRKTVEpisodeIE, NRKTVEpisodesIE, NRKTVSeasonIE, NRKTVSeriesIE, ) from .nrl import NRLTVIE +from .nts import NTSLiveIE from .ntvcojp import NTVCoJpCUIE from .ntvde import NTVDeIE from .ntvru import NTVRuIE from .nubilesporn import NubilesPornIE -from .nytimes import ( - NYTimesIE, - NYTimesArticleIE, - NYTimesCookingIE, +from .nuum import ( + NuumLiveIE, + NuumMediaIE, + NuumTabIE, ) from .nuvid import NuvidIE +from .nytimes import ( + NYTimesArticleIE, + NYTimesCookingIE, + NYTimesCookingRecipeIE, + NYTimesIE, +) from .nzherald import NZHeraldIE from .nzonscreen import NZOnScreenIE from .nzz import NZZIE -from .odatv import OdaTVIE from .odkmedia import OnDemandChinaEpisodeIE from .odnoklassniki import OdnoklassnikiIE from .oftv import ( OfTVIE, - OfTVPlaylistIE + OfTVPlaylistIE, ) from .oktoberfesttv import OktoberfestTVIE from .olympics import OlympicsReplayIE from .on24 import On24IE -from .ondemandkorea import OnDemandKoreaIE +from .ondemandkorea import ( + OnDemandKoreaIE, + OnDemandKoreaProgramIE, +) from .onefootball import OneFootballIE from .onenewsnz import OneNewsNZIE from .oneplace import OnePlacePodcastIE from .onet import ( - OnetIE, OnetChannelIE, + OnetIE, OnetMVPIE, OnetPlIE, ) from .onionstudios import OnionStudiosIE -from .ooyala import ( - OoyalaIE, - OoyalaExternalIE, -) from .opencast import ( OpencastIE, OpencastPlaylistIE, ) from .openrec import ( - OpenRecIE, OpenRecCaptureIE, + OpenRecIE, OpenRecMovieIE, ) from .ora import OraTVIE from .orf import ( - ORFTVthekIE, - ORFFM4StoryIE, - ORFRadioIE, ORFIPTVIE, + ORFONIE, + ORFFM4StoryIE, + ORFPodcastIE, + ORFRadioIE, ) from .outsidetv import OutsideTVIE from .owncloud import OwnCloudIE from .packtpub import ( - PacktPubIE, PacktPubCourseIE, + PacktPubIE, ) from .palcomp3 import ( - PalcoMP3IE, PalcoMP3ArtistIE, + PalcoMP3IE, PalcoMP3VideoIE, ) -from .pandoratv import PandoraTVIE from .panopto import ( PanoptoIE, PanoptoListIE, - PanoptoPlaylistIE + PanoptoPlaylistIE, ) from .paramountplus import ( ParamountPlusIE, @@ -1416,13 +1499,23 @@ from .paramountplus import ( ) from .parler import ParlerIE from .parlview import ParlviewIE -from .patreon import ( - PatreonIE, - PatreonCampaignIE +from .parti import ( + PartiLivestreamIE, + PartiVideoIE, +) +from .patreon import ( + PatreonCampaignIE, + PatreonIE, +) +from .pbs import ( + PBSIE, + PBSKidsIE, ) -from .pbs import PBSIE, PBSKidsIE from .pearvideo import PearVideoIE -from .peekvids import PeekVidsIE, PlayVidsIE +from .peekvids import ( + PeekVidsIE, + PlayVidsIE, +) from .peertube import ( PeerTubeIE, PeerTubePlaylistIE, @@ -1430,9 +1523,8 @@ from .peertube import ( from .peertv import PeerTVIE from .peloton import ( PelotonIE, - PelotonLiveIE + PelotonLiveIE, ) -from .people import PeopleIE from .performgroup import PerformGroupIE from .periscope import ( PeriscopeIE, @@ -1442,6 +1534,7 @@ from .pgatour import PGATourIE from .philharmoniedeparis import PhilharmonieDeParisIE from .phoenix import PhoenixIE from .photobucket import PhotobucketIE +from .pialive import PiaLiveIE from .piapro import PiaproIE from .picarto import ( PicartoIE, @@ -1450,8 +1543,12 @@ from .picarto import ( from .piksel import PikselIE from .pinkbike import PinkbikeIE from .pinterest import ( - PinterestIE, PinterestCollectionIE, + PinterestIE, +) +from .piramidetv import ( + PiramideTVChannelIE, + PiramideTVIE, ) from .pixivsketch import ( PixivSketchIE, @@ -1460,131 +1557,142 @@ from .pixivsketch import ( from .pladform import PladformIE from .planetmarathi import PlanetMarathiIE from .platzi import ( - PlatziIE, PlatziCourseIE, + PlatziIE, ) -from .playfm import PlayFMIE from .playplustv import PlayPlusTVIE -from .plays import PlaysTVIE -from .playstuff import PlayStuffIE from .playsuisse import PlaySuisseIE from .playtvak import PlaytvakIE -from .playvid import PlayvidIE from .playwire import PlaywireIE -from .plutotv import PlutoTVIE from .pluralsight import ( - PluralsightIE, PluralsightCourseIE, + PluralsightIE, +) +from .plutotv import PlutoTVIE +from .plvideo import PlVideoIE +from .podbayfm import ( + PodbayFMChannelIE, + PodbayFMIE, ) -from .podbayfm import PodbayFMIE, PodbayFMChannelIE from .podchaser import PodchaserIE from .podomatic import PodomaticIE -from .pokemon import ( - PokemonIE, - PokemonWatchIE, -) from .pokergo import ( - PokerGoIE, PokerGoCollectionIE, + PokerGoIE, ) from .polsatgo import PolsatGoIE from .polskieradio import ( - PolskieRadioIE, - PolskieRadioLegacyIE, PolskieRadioAuditionIE, PolskieRadioCategoryIE, + PolskieRadioIE, + PolskieRadioLegacyIE, PolskieRadioPlayerIE, PolskieRadioPodcastIE, PolskieRadioPodcastListIE, ) from .popcorntimes import PopcorntimesIE from .popcorntv import PopcornTVIE -from .porn91 import Porn91IE -from .porncom import PornComIE +from .pornbox import PornboxIE from .pornflip import PornFlipIE -from .pornhd import PornHdIE from .pornhub import ( PornHubIE, - PornHubUserIE, - PornHubPlaylistIE, PornHubPagedVideoListIE, + PornHubPlaylistIE, + PornHubUserIE, PornHubUserVideosUploadIE, ) from .pornotube import PornotubeIE from .pornovoisines import PornoVoisinesIE from .pornoxo import PornoXOIE -from .pornez import PornezIE -from .puhutv import ( - PuhuTVIE, - PuhuTVSerieIE, +from .pr0gramm import Pr0grammIE +from .prankcast import ( + PrankCastIE, + PrankCastPostIE, ) -from .pr0gramm import Pr0grammStaticIE, Pr0grammIE -from .prankcast import PrankCastIE from .premiershiprugby import PremiershipRugbyIE from .presstv import PressTVIE from .projectveritas import ProjectVeritasIE from .prosiebensat1 import ProSiebenSat1IE from .prx import ( - PRXStoryIE, - PRXSeriesIE, PRXAccountIE, + PRXSeriesIE, + PRXSeriesSearchIE, PRXStoriesSearchIE, - PRXSeriesSearchIE + PRXStoryIE, +) +from .puhutv import ( + PuhuTVIE, + PuhuTVSerieIE, ) from .puls4 import Puls4IE from .pyvideo import PyvideoIE from .qdance import QDanceIE from .qingting import QingTingIE from .qqmusic import ( - QQMusicIE, - QQMusicSingerIE, QQMusicAlbumIE, - QQMusicToplistIE, + QQMusicIE, QQMusicPlaylistIE, + QQMusicSingerIE, + QQMusicToplistIE, + QQMusicVideoIE, ) from .r7 import ( R7IE, R7ArticleIE, ) -from .radiko import RadikoIE, RadikoRadioIE +from .radiko import ( + RadikoIE, + RadikoRadioIE, +) from .radiocanada import ( - RadioCanadaIE, RadioCanadaAudioVideoIE, + RadioCanadaIE, +) +from .radiocomercial import ( + RadioComercialIE, + RadioComercialPlaylistIE, ) from .radiode import RadioDeIE +from .radiofrance import ( + FranceCultureIE, + RadioFranceIE, + RadioFranceLiveIE, + RadioFrancePodcastIE, + RadioFranceProfileIE, + RadioFranceProgramScheduleIE, +) from .radiojavan import RadioJavanIE -from .radiobremen import RadioBremenIE -from .radiofrance import FranceCultureIE, RadioFranceIE -from .radiozet import RadioZetPodcastIE from .radiokapital import ( RadioKapitalIE, RadioKapitalShowIE, ) +from .radioradicale import RadioRadicaleIE +from .radiozet import RadioZetPodcastIE from .radlive import ( - RadLiveIE, RadLiveChannelIE, + RadLiveIE, RadLiveSeasonIE, ) from .rai import ( - RaiIE, RaiCulturaIE, + RaiIE, + RaiNewsIE, RaiPlayIE, RaiPlayLiveIE, RaiPlayPlaylistIE, RaiPlaySoundIE, RaiPlaySoundLiveIE, RaiPlaySoundPlaylistIE, - RaiNewsIE, RaiSudtirolIE, ) from .raywenderlich import ( - RayWenderlichIE, RayWenderlichCourseIE, + RayWenderlichIE, ) -from .rbmaradio import RBMARadioIE from .rbgtum import ( - RbgTumIE, RbgTumCourseIE, + RbgTumIE, + RbgTumNewCourseIE, ) from .rcs import ( RCSIE, @@ -1597,22 +1705,24 @@ from .rcti import ( RCTIPlusTVIE, ) from .rds import RDSIE -from .recurbate import RecurbateIE -from .redbee import ParliamentLiveUKIE, RTBFIE +from .redbee import ( + RTBFIE, + ParliamentLiveUKIE, +) from .redbulltv import ( - RedBullTVIE, RedBullEmbedIE, - RedBullTVRrnContentIE, RedBullIE, + RedBullTVIE, + RedBullTVRrnContentIE, ) from .reddit import RedditIE +from .redge import RedCDNLivxIE from .redgifs import ( RedGifsIE, RedGifsSearchIE, RedGifsUserIE, ) from .redtube import RedTubeIE -from .regiotv import RegioTVIE from .rentv import ( RENTVIE, RENTVArticleIE, @@ -1621,176 +1731,187 @@ from .restudy import RestudyIE from .reuters import ReutersIE from .reverbnation import ReverbNationIE from .rheinmaintv import RheinMainTVIE -from .rice import RICEIE +from .ridehome import RideHomeIE +from .rinsefm import ( + RinseFMArtistPlaylistIE, + RinseFMIE, +) from .rmcdecouverte import RMCDecouverteIE from .rockstargames import RockstarGamesIE from .rokfin import ( - RokfinIE, - RokfinStackIE, RokfinChannelIE, + RokfinIE, RokfinSearchIE, + RokfinStackIE, +) +from .roosterteeth import ( + RoosterTeethIE, + RoosterTeethSeriesIE, ) -from .roosterteeth import RoosterTeethIE, RoosterTeethSeriesIE from .rottentomatoes import RottenTomatoesIE +from .roya import RoyaLiveIE from .rozhlas import ( + MujRozhlasIE, RozhlasIE, RozhlasVltavaIE, - MujRozhlasIE, ) -from .rte import RteIE, RteRadioIE +from .rte import ( + RteIE, + RteRadioIE, +) +from .rtl2 import RTL2IE from .rtlnl import ( - RtlNlIE, - RTLLuTeleVODIE, RTLLuArticleIE, RTLLuLiveIE, RTLLuRadioIE, -) -from .rtl2 import ( - RTL2IE, - RTL2YouIE, - RTL2YouSeriesIE, + RTLLuTeleVODIE, + RtlNlIE, ) from .rtnews import ( - RTNewsIE, RTDocumentryIE, RTDocumentryPlaylistIE, + RTNewsIE, RuptlyIE, ) from .rtp import RTPIE from .rtrfm import RTRFMIE from .rts import RTSIE from .rtvcplay import ( - RTVCPlayIE, - RTVCPlayEmbedIE, RTVCKalturaIE, + RTVCPlayEmbedIE, + RTVCPlayIE, ) from .rtve import ( RTVEALaCartaIE, RTVEAudioIE, RTVELiveIE, - RTVEInfantilIE, RTVETelevisionIE, ) -from .rtvnh import RTVNHIE from .rtvs import RTVSIE -from .rtvslo import RTVSLOIE -from .ruhd import RUHDIE +from .rtvslo import ( + RTVSLOIE, + RTVSLOShowIE, +) +from .rudovideo import RudoVideoIE from .rule34video import Rule34VideoIE from .rumble import ( + RumbleChannelIE, RumbleEmbedIE, RumbleIE, - RumbleChannelIE, ) from .rutube import ( - RutubeIE, RutubeChannelIE, RutubeEmbedIE, + RutubeIE, RutubeMovieIE, RutubePersonIE, RutubePlaylistIE, RutubeTagsIE, ) -from .glomex import ( - GlomexIE, - GlomexEmbedIE, -) -from .megatvcom import ( - MegaTVComIE, - MegaTVComEmbedIE, -) -from .ant1newsgr import ( - Ant1NewsGrWatchIE, - Ant1NewsGrArticleIE, - Ant1NewsGrEmbedIE, -) from .rutv import RUTVIE from .ruutu import RuutuIE from .ruv import ( RuvIE, - RuvSpilaIE + RuvSpilaIE, +) +from .s4c import ( + S4CIE, + S4CSeriesIE, ) -from .s4c import S4CIE from .safari import ( - SafariIE, SafariApiIE, SafariCourseIE, + SafariIE, ) from .saitosan import SaitosanIE from .samplefocus import SampleFocusIE from .sapo import SapoIE -from .savefrom import SaveFromIE from .sbs import SBSIE +from .sbscokr import ( + SBSCoKrAllvodProgramIE, + SBSCoKrIE, + SBSCoKrProgramsVodIE, +) from .screen9 import Screen9IE from .screencast import ScreencastIE from .screencastify import ScreencastifyIE from .screencastomatic import ScreencastOMaticIE +from .screenrec import ScreenRecIE from .scrippsnetworks import ( - ScrippsNetworksWatchIE, ScrippsNetworksIE, + ScrippsNetworksWatchIE, ) +from .scrolller import ScrolllerIE from .scte import ( SCTEIE, SCTECourseIE, ) -from .scrolller import ScrolllerIE -from .seeker import SeekerIE +from .sejmpl import SejmIE +from .sen import SenIE from .senalcolombia import SenalColombiaLiveIE -from .senategov import SenateISVPIE, SenateGovIE +from .senategov import ( + SenateGovIE, + SenateISVPIE, +) from .sendtonews import SendtoNewsIE from .servus import ServusIE from .sevenplus import SevenPlusIE from .sexu import SexuIE from .seznamzpravy import ( - SeznamZpravyIE, SeznamZpravyArticleIE, + SeznamZpravyIE, ) from .shahid import ( ShahidIE, ShahidShowIE, ) -from .shared import ( - SharedIE, - VivoIE, -) +from .sharepoint import SharePointIE from .sharevideos import ShareVideosEmbedIE -from .sibnet import SibnetEmbedIE from .shemaroome import ShemarooMeIE from .showroomlive import ShowRoomLiveIE +from .sibnet import SibnetEmbedIE from .simplecast import ( - SimplecastIE, SimplecastEpisodeIE, + SimplecastIE, SimplecastPodcastIE, ) from .sina import SinaIE from .sixplay import SixPlayIE from .skeb import SkebIE -from .skyit import ( - SkyItPlayerIE, - SkyItVideoIE, - SkyItVideoLiveIE, - SkyItIE, - SkyItArteIE, - CieloTVItIE, - TV8ItIE, -) -from .skylinewebcams import SkylineWebcamsIE -from .skynewsarabia import ( - SkyNewsArabiaIE, - SkyNewsArabiaArticleIE, -) -from .skynewsau import SkyNewsAUIE from .sky import ( SkyNewsIE, SkyNewsStoryIE, SkySportsIE, SkySportsNewsIE, ) +from .skyit import ( + CieloTVItIE, + SkyItArteIE, + SkyItIE, + SkyItPlayerIE, + SkyItVideoIE, + SkyItVideoLiveIE, + TV8ItIE, + TV8ItLiveIE, + TV8ItPlaylistIE, +) +from .skylinewebcams import SkylineWebcamsIE +from .skynewsarabia import ( + SkyNewsArabiaArticleIE, + SkyNewsArabiaIE, +) +from .skynewsau import SkyNewsAUIE from .slideshare import SlideshareIE from .slideslive import SlidesLiveIE from .slutload import SlutloadIE from .smotrim import SmotrimIE +from .snapchat import SnapchatSpotlightIE from .snotr import SnotrIE -from .sohu import SohuIE +from .softwhiteunderbelly import SoftWhiteUnderbellyIE +from .sohu import ( + SohuIE, + SohuVIE, +) from .sonyliv import ( SonyLIVIE, SonyLIVSeriesIE, @@ -1798,46 +1919,39 @@ from .sonyliv import ( from .soundcloud import ( SoundcloudEmbedIE, SoundcloudIE, - SoundcloudSetIE, + SoundcloudPlaylistIE, SoundcloudRelatedIE, + SoundcloudSearchIE, + SoundcloudSetIE, + SoundcloudTrackStationIE, SoundcloudUserIE, SoundcloudUserPermalinkIE, - SoundcloudTrackStationIE, - SoundcloudPlaylistIE, - SoundcloudSearchIE, ) from .soundgasm import ( SoundgasmIE, - SoundgasmProfileIE + SoundgasmProfileIE, ) from .southpark import ( - SouthParkIE, SouthParkDeIE, SouthParkDkIE, SouthParkEsIE, + SouthParkIE, SouthParkLatIE, - SouthParkNlIE + SouthParkNlIE, ) from .sovietscloset import ( SovietsClosetIE, - SovietsClosetPlaylistIE + SovietsClosetPlaylistIE, ) from .spankbang import ( SpankBangIE, SpankBangPlaylistIE, ) -from .spankwire import SpankwireIE from .spiegel import SpiegelIE from .spike import ( BellatorIE, ParamountNetworkIE, ) -from .stageplus import StagePlusVODConcertIE -from .startrek import StarTrekIE -from .stitcher import ( - StitcherIE, - StitcherShowIE, -) from .sport5 import Sport5IE from .sportbox import SportBoxIE from .sportdeutschland import SportDeutschlandIE @@ -1847,12 +1961,14 @@ from .spotify import ( ) from .spreaker import ( SpreakerIE, - SpreakerPageIE, SpreakerShowIE, - SpreakerShowPageIE, ) from .springboardplatform import SpringboardPlatformIE from .sprout import SproutIE +from .sproutvideo import ( + SproutVideoIE, + VidsIoIE, +) from .srgssr import ( SRGSSRIE, SRGSSRPlayIE, @@ -1861,26 +1977,37 @@ from .srmediathek import SRMediathekIE from .stacommu import ( StacommuLiveIE, StacommuVODIE, + TheaterComplexTownPPVIE, + TheaterComplexTownVODIE, ) +from .stageplus import StagePlusVODConcertIE from .stanfordoc import StanfordOpenClassroomIE +from .startrek import StarTrekIE from .startv import StarTVIE from .steam import ( - SteamIE, SteamCommunityBroadcastIE, + SteamIE, +) +from .stitcher import ( + StitcherIE, + StitcherShowIE, ) from .storyfire import ( StoryFireIE, - StoryFireUserIE, StoryFireSeriesIE, + StoryFireUserIE, ) +from .streaks import StreaksIE from .streamable import StreamableIE -from .streamcloud import StreamcloudIE from .streamcz import StreamCZIE -from .streamff import StreamFFIE from .streetvoice import StreetVoiceIE from .stretchinternet import StretchInternetIE from .stripchat import StripchatIE from .stv import STVPlayerIE +from .subsplash import ( + SubsplashIE, + SubsplashPlaylistIE, +) from .substack import SubstackIE from .sunporno import SunPornoIE from .sverigesradio import ( @@ -1894,17 +2021,26 @@ from .svt import ( SVTSeriesIE, ) from .swearnet import SwearnetEpisodeIE -from .swrmediathek import SWRMediathekIE -from .syvdk import SYVDKIE from .syfy import SyfyIE +from .syvdk import SYVDKIE from .sztvhu import SztvHuIE from .tagesschau import TagesschauIE +from .taptap import ( + TapTapAppIE, + TapTapAppIntlIE, + TapTapMomentIE, + TapTapPostIntlIE, +) from .tass import TassIE from .tbs import TBSIE -from .tdslifeway import TDSLifewayIE +from .tbsjp import ( + TBSJPEpisodeIE, + TBSJPPlaylistIE, + TBSJPProgramIE, +) from .teachable import ( - TeachableIE, TeachableCourseIE, + TeachableIE, ) from .teachertube import ( TeacherTubeIE, @@ -1912,11 +2048,10 @@ from .teachertube import ( ) from .teachingchannel import TeachingChannelIE from .teamcoco import ( - TeamcocoIE, ConanClassicIE, + TeamcocoIE, ) from .teamtreehouse import TeamTreeHouseIE -from .techtalks import TechTalksIE from .ted import ( TedEmbedIE, TedPlaylistIE, @@ -1933,15 +2068,18 @@ from .telegram import TelegramEmbedIE from .telemb import TeleMBIE from .telemundo import TelemundoIE from .telequebec import ( - TeleQuebecIE, - TeleQuebecSquatIE, TeleQuebecEmissionIE, + TeleQuebecIE, TeleQuebecLiveIE, + TeleQuebecSquatIE, TeleQuebecVideoIE, ) from .teletask import TeleTaskIE from .telewebion import TelewebionIE -from .tempo import TempoIE, IVXPlayerIE +from .tempo import ( + IVXPlayerIE, + TempoIE, +) from .tencent import ( IflixEpisodeIE, IflixSeriesIE, @@ -1951,110 +2089,107 @@ from .tencent import ( WeTvSeriesIE, ) from .tennistv import TennisTVIE -from .tenplay import TenPlayIE +from .tenplay import ( + TenPlayIE, + TenPlaySeasonIE, +) from .testurl import TestURLIE from .tf1 import TF1IE from .tfo import TFOIE +from .theguardian import ( + TheGuardianPodcastIE, + TheGuardianPodcastPlaylistIE, +) from .theholetv import TheHoleTvIE from .theintercept import TheInterceptIE from .theplatform import ( - ThePlatformIE, ThePlatformFeedIE, + ThePlatformIE, ) from .thestar import TheStarIE from .thesun import TheSunIE -from .theta import ( - ThetaVideoIE, - ThetaStreamIE, -) from .theweatherchannel import TheWeatherChannelIE from .thisamericanlife import ThisAmericanLifeIE -from .thisav import ThisAVIE from .thisoldhouse import ThisOldHouseIE from .thisvid import ( ThisVidIE, ThisVidMemberIE, ThisVidPlaylistIE, ) +from .threeqsdn import ThreeQSDNIE from .threespeak import ( ThreeSpeakIE, ThreeSpeakUserIE, ) -from .threeqsdn import ThreeQSDNIE from .tiktok import ( - TikTokIE, - TikTokUserIE, - TikTokSoundIE, - TikTokEffectIE, - TikTokTagIE, - TikTokVMIE, - TikTokLiveIE, DouyinIE, + TikTokCollectionIE, + TikTokEffectIE, + TikTokIE, + TikTokLiveIE, + TikTokSoundIE, + TikTokTagIE, + TikTokUserIE, + TikTokVMIE, ) -from .tinypic import TinyPicIE from .tmz import TMZIE from .tnaflix import ( - TNAFlixNetworkEmbedIE, - TNAFlixIE, EMPFlixIE, MovieFapIE, + TNAFlixIE, + TNAFlixNetworkEmbedIE, ) from .toggle import ( - ToggleIE, MeWatchIE, + ToggleIE, ) -from .toggo import ( - ToggoIE, -) -from .tokentube import ( - TokentubeIE, - TokentubeChannelIE -) +from .toggo import ToggoIE from .tonline import TOnlineIE from .toongoggles import ToonGogglesIE from .toutv import TouTvIE -from .toypics import ToypicsUserIE, ToypicsIE +from .toypics import ( + ToypicsIE, + ToypicsUserIE, +) from .traileraddict import TrailerAddictIE from .triller import ( TrillerIE, - TrillerUserIE, TrillerShortIE, + TrillerUserIE, ) -from .trilulilu import TriluliluIE from .trovo import ( + TrovoChannelClipIE, + TrovoChannelVodIE, TrovoIE, TrovoVodIE, - TrovoChannelVodIE, - TrovoChannelClipIE, ) from .trtcocuk import TrtCocukVideoIE +from .trtworld import TrtWorldIE from .trueid import TrueIDIE from .trunews import TruNewsIE from .truth import TruthIE from .trutv import TruTVIE from .tube8 import Tube8IE -from .tubetugraz import TubeTuGrazIE, TubeTuGrazSeriesIE +from .tubetugraz import ( + TubeTuGrazIE, + TubeTuGrazSeriesIE, +) from .tubitv import ( TubiTvIE, TubiTvShowIE, ) from .tumblr import TumblrIE from .tunein import ( - TuneInStationIE, - TuneInPodcastIE, TuneInPodcastEpisodeIE, + TuneInPodcastIE, TuneInShortenerIE, + TuneInStationIE, ) -from .tunepk import TunePkIE -from .turbo import TurboIE from .tv2 import ( TV2IE, - TV2ArticleIE, KatsomoIE, MTVUutisetArticleIE, -) -from .tv24ua import ( - TV24UAVideoIE, + TV2ArticleIE, ) from .tv2dk import ( TV2DKIE, @@ -2067,16 +2202,14 @@ from .tv2hu import ( from .tv4 import TV4IE from .tv5mondeplus import TV5MondePlusIE from .tv5unis import ( - TV5UnisVideoIE, TV5UnisIE, + TV5UnisVideoIE, ) -from .tva import ( - TVAIE, - QubIE, -) +from .tv24ua import TV24UAVideoIE +from .tva import TVAIE from .tvanouvelles import ( - TVANouvellesIE, TVANouvellesArticleIE, + TVANouvellesIE, ) from .tvc import ( TVCIE, @@ -2087,34 +2220,28 @@ from .tvigle import TvigleIE from .tviplayer import TVIPlayerIE from .tvland import TVLandIE from .tvn24 import TVN24IE -from .tvnet import TVNetIE from .tvnoe import TVNoeIE -from .tvnow import ( - TVNowIE, - TVNowFilmIE, - TVNowNewIE, - TVNowSeasonIE, - TVNowAnnualIE, - TVNowShowIE, -) from .tvopengr import ( - TVOpenGrWatchIE, TVOpenGrEmbedIE, + TVOpenGrWatchIE, ) from .tvp import ( - TVPEmbedIE, TVPIE, + TVPEmbedIE, TVPStreamIE, TVPVODSeriesIE, TVPVODVideoIE, ) from .tvplay import ( - TVPlayIE, TVPlayHomeIE, + TVPlayIE, ) from .tvplayer import TVPlayerIE +from .tvw import ( + TvwIE, + TvwTvChannelsIE, +) from .tweakers import TweakersIE -from .twentyfourvideo import TwentyFourVideoIE from .twentymin import TwentyMinutenIE from .twentythreevideo import TwentyThreeVideoIE from .twitcasting import ( @@ -2123,29 +2250,29 @@ from .twitcasting import ( TwitCastingUserIE, ) from .twitch import ( - TwitchVodIE, + TwitchClipsIE, TwitchCollectionIE, - TwitchVideosIE, + TwitchStreamIE, TwitchVideosClipsIE, TwitchVideosCollectionsIE, - TwitchStreamIE, - TwitchClipsIE, + TwitchVideosIE, + TwitchVodIE, ) from .twitter import ( - TwitterCardIE, - TwitterIE, TwitterAmplifyIE, TwitterBroadcastIE, - TwitterSpacesIE, + TwitterCardIE, + TwitterIE, TwitterShortenerIE, + TwitterSpacesIE, ) from .txxx import ( - TxxxIE, PornTopIE, + TxxxIE, ) from .udemy import ( + UdemyCourseIE, UdemyIE, - UdemyCourseIE ) from .udn import UDNEmbedIE from .ufctv import ( @@ -2154,17 +2281,17 @@ from .ufctv import ( ) from .ukcolumn import UkColumnIE from .uktvplay import UKTVPlayIE -from .digiteka import DigitekaIE -from .dlive import ( - DLiveVODIE, - DLiveStreamIE, +from .uliza import ( + UlizaPlayerIE, + UlizaPortalIE, ) -from .drooble import DroobleIE from .umg import UMGDeIE from .unistra import UnistraIE from .unity import UnityIE -from .unscripted import UnscriptedNewsVideoIE -from .unsupported import KnownDRMIE, KnownPiracyIE +from .unsupported import ( + KnownDRMIE, + KnownPiracyIE, +) from .uol import UOLIE from .uplynk import ( UplynkIE, @@ -2174,37 +2301,34 @@ from .urort import UrortIE from .urplay import URPlayIE from .usanetwork import USANetworkIE from .usatoday import USATodayIE -from .ustream import UstreamIE, UstreamChannelIE +from .ustream import ( + UstreamChannelIE, + UstreamIE, +) from .ustudio import ( - UstudioIE, UstudioEmbedIE, + UstudioIE, ) from .utreon import UtreonIE from .varzesh3 import Varzesh3IE from .vbox7 import Vbox7IE -from .veehd import VeeHDIE from .veo import VeoIE -from .veoh import ( - VeohIE, - VeohUserIE -) from .vesti import VestiIE from .vevo import ( VevoIE, VevoPlaylistIE, ) from .vgtv import ( + VGTVIE, BTArticleIE, BTVestlendingenIE, - VGTVIE, ) from .vh1 import VH1IE from .vice import ( - ViceIE, ViceArticleIE, + ViceIE, ViceShowIE, ) -from .vidbit import VidbitIE from .viddler import ViddlerIE from .videa import VideaIE from .videocampus_sachsen import ( @@ -2214,133 +2338,114 @@ from .videocampus_sachsen import ( from .videodetective import VideoDetectiveIE from .videofyme import VideofyMeIE from .videoken import ( + VideoKenCategoryIE, VideoKenIE, VideoKenPlayerIE, VideoKenPlaylistIE, - VideoKenCategoryIE, VideoKenTopicIE, ) from .videomore import ( VideomoreIE, - VideomoreVideoIE, VideomoreSeasonIE, + VideomoreVideoIE, ) from .videopress import VideoPressIE +from .vidflex import VidflexIE from .vidio import ( VidioIE, + VidioLiveIE, VidioPremierIE, - VidioLiveIE ) from .vidlii import VidLiiIE +from .vidly import VidlyIE +from .vidyard import VidyardIE from .viewlift import ( - ViewLiftIE, ViewLiftEmbedIE, + ViewLiftIE, ) from .viidea import ViideaIE from .vimeo import ( - VimeoIE, + VHXEmbedIE, VimeoAlbumIE, VimeoChannelIE, VimeoGroupsIE, + VimeoIE, VimeoLikesIE, VimeoOndemandIE, VimeoProIE, VimeoReviewIE, VimeoUserIE, VimeoWatchLaterIE, - VHXEmbedIE, ) from .vimm import ( VimmIE, VimmRecordingIE, ) -from .vimple import VimpleIE -from .vine import ( - VineIE, - VineUserIE, -) -from .viki import ( - VikiIE, - VikiChannelIE, -) +from .viously import ViouslyIE from .viqeo import ViqeoIE from .viu import ( ViuIE, - ViuPlaylistIE, ViuOTTIE, ViuOTTIndonesiaIE, + ViuPlaylistIE, ) from .vk import ( VKIE, - VKUserVideosIE, - VKWallPostIE, VKPlayIE, VKPlayLiveIE, + VKUserVideosIE, + VKWallPostIE, ) from .vocaroo import VocarooIE -from .vodlocker import VodlockerIE from .vodpl import VODPlIE from .vodplatform import VODPlatformIE -from .voicerepublic import VoiceRepublicIE from .voicy import ( - VoicyIE, VoicyChannelIE, + VoicyIE, ) from .volejtv import VolejTVIE -from .voot import ( - VootIE, - VootSeriesIE, -) from .voxmedia import ( - VoxMediaVolumeIE, VoxMediaIE, + VoxMediaVolumeIE, +) +from .vrsquare import ( + VrSquareChannelIE, + VrSquareIE, + VrSquareSearchIE, + VrSquareSectionIE, ) from .vrt import ( VRTIE, - VrtNUIE, - KetnetIE, DagelijkseKostIE, + Radio1BeIE, + VrtNUIE, ) -from .vrak import VrakIE -from .vrv import ( - VRVIE, - VRVSeriesIE, -) -from .vshare import VShareIE from .vtm import VTMIE -from .medialaan import MedialaanIE +from .vtv import ( + VTVIE, + VTVGoIE, +) from .vuclip import VuClipIE -from .vupload import VuploadIE from .vvvvid import ( VVVVIDIE, VVVVIDShowIE, ) -from .vyborymos import VyboryMosIE -from .vzaar import VzaarIE -from .wakanim import WakanimIE from .walla import WallaIE from .washingtonpost import ( - WashingtonPostIE, WashingtonPostArticleIE, -) -from .wasdtv import ( - WASDTVStreamIE, - WASDTVRecordIE, - WASDTVClipIE, + WashingtonPostIE, ) from .wat import WatIE -from .watchbox import WatchBoxIE -from .watchindianporn import WatchIndianPornIE from .wdr import ( WDRIE, - WDRPageIE, WDRElefantIE, WDRMobileIE, + WDRPageIE, ) from .webcamerapl import WebcameraplIE from .webcaster import ( - WebcasterIE, WebcasterFeedIE, + WebcasterIE, ) from .webofstories import ( WebOfStoriesIE, @@ -2348,42 +2453,42 @@ from .webofstories import ( ) from .weibo import ( WeiboIE, - WeiboMobileIE + WeiboUserIE, + WeiboVideoIE, ) from .weiqitv import WeiqiTVIE from .weverse import ( WeverseIE, - WeverseMediaIE, - WeverseMomentIE, - WeverseLiveTabIE, - WeverseMediaTabIE, WeverseLiveIE, + WeverseLiveTabIE, + WeverseMediaIE, + WeverseMediaTabIE, + WeverseMomentIE, ) from .wevidi import WeVidiIE from .weyyak import WeyyakIE +from .whowatch import WhoWatchIE from .whyp import WhypIE from .wikimedia import WikimediaIE -from .willow import WillowIE from .wimbledon import WimbledonIE from .wimtv import WimTVIE -from .whowatch import WhoWatchIE from .wistia import ( + WistiaChannelIE, WistiaIE, WistiaPlaylistIE, - WistiaChannelIE, ) from .wordpress import ( - WordpressPlaylistEmbedIE, WordpressMiniAudioPlayerEmbedIE, + WordpressPlaylistEmbedIE, ) from .worldstarhiphop import WorldStarHipHopIE from .wppilot import ( - WPPilotIE, WPPilotChannelsIE, + WPPilotIE, ) from .wrestleuniverse import ( - WrestleUniverseVODIE, WrestleUniversePPVIE, + WrestleUniverseVODIE, ) from .wsj import ( WSJIE, @@ -2391,89 +2496,91 @@ from .wsj import ( ) from .wwe import WWEIE from .wykop import ( - WykopDigIE, WykopDigCommentIE, - WykopPostIE, + WykopDigIE, WykopPostCommentIE, + WykopPostIE, ) from .xanimu import XanimuIE -from .xbef import XBefIE from .xboxclips import XboxClipsIE -from .xfileshare import XFileShareIE from .xhamster import ( - XHamsterIE, XHamsterEmbedIE, + XHamsterIE, XHamsterUserIE, ) +from .xiaohongshu import XiaoHongShuIE from .ximalaya import ( + XimalayaAlbumIE, XimalayaIE, - XimalayaAlbumIE ) from .xinpianchang import XinpianchangIE from .xminus import XMinusIE from .xnxx import XNXXIE from .xstream import XstreamIE -from .xtube import XTubeUserIE, XTubeIE -from .xuite import XuiteIE from .xvideos import ( XVideosIE, - XVideosQuickiesIE + XVideosQuickiesIE, ) from .xxxymovies import XXXYMoviesIE from .yahoo import ( YahooIE, - YahooSearchIE, YahooJapanNewsIE, + YahooSearchIE, ) from .yandexdisk import YandexDiskIE from .yandexmusic import ( - YandexMusicTrackIE, YandexMusicAlbumIE, - YandexMusicPlaylistIE, - YandexMusicArtistTracksIE, YandexMusicArtistAlbumsIE, + YandexMusicArtistTracksIE, + YandexMusicPlaylistIE, + YandexMusicTrackIE, ) from .yandexvideo import ( YandexVideoIE, YandexVideoPreviewIE, - ZenYandexIE, ZenYandexChannelIE, + ZenYandexIE, ) from .yapfiles import YapFilesIE from .yappy import ( YappyIE, YappyProfileIE, ) -from .yesjapan import YesJapanIE -from .yinyuetai import YinYueTaiIE from .yle_areena import YleAreenaIE -from .ynet import YnetIE from .youjizz import YouJizzIE from .youku import ( YoukuIE, YoukuShowIE, ) from .younow import ( - YouNowLiveIE, YouNowChannelIE, + YouNowLiveIE, YouNowMomentIE, ) -from .youporn import YouPornIE -from .yourporn import YourPornIE -from .yourupload import YourUploadIE +from .youporn import ( + YouPornCategoryIE, + YouPornChannelIE, + YouPornCollectionIE, + YouPornIE, + YouPornStarIE, + YouPornTagIE, + YouPornVideosIE, +) from .zaiko import ( - ZaikoIE, ZaikoETicketIE, + ZaikoIE, ) from .zapiks import ZapiksIE from .zattoo import ( BBVTVIE, + EWETVIE, + SAKTVIE, + VTXTVIE, BBVTVLiveIE, BBVTVRecordingsIE, EinsUndEinsTVIE, EinsUndEinsTVLiveIE, EinsUndEinsTVRecordingsIE, - EWETVIE, EWETVLiveIE, EWETVRecordingsIE, GlattvisionTVIE, @@ -2491,13 +2598,11 @@ from .zattoo import ( QuantumTVIE, QuantumTVLiveIE, QuantumTVRecordingsIE, + SAKTVLiveIE, + SAKTVRecordingsIE, SaltTVIE, SaltTVLiveIE, SaltTVRecordingsIE, - SAKTVIE, - SAKTVLiveIE, - SAKTVRecordingsIE, - VTXTVIE, VTXTVLiveIE, VTXTVRecordingsIE, WalyTVIE, @@ -2508,21 +2613,29 @@ from .zattoo import ( ZattooMoviesIE, ZattooRecordingsIE, ) -from .zdf import ZDFIE, ZDFChannelIE +from .zdf import ( + ZDFIE, + ZDFChannelIE, +) from .zee5 import ( Zee5IE, Zee5SeriesIE, ) from .zeenews import ZeeNewsIE +from .zenporn import ZenPornIE +from .zetland import ZetlandDKArticleIE from .zhihu import ZhihuIE from .zingmp3 import ( - ZingMp3IE, ZingMp3AlbumIE, ZingMp3ChartHomeIE, - ZingMp3WeekChartIE, ZingMp3ChartMusicVideoIE, - ZingMp3UserIE, ZingMp3HubIE, + ZingMp3IE, + ZingMp3LiveRadioIE, + ZingMp3PodcastEpisodeIE, + ZingMp3PodcastIE, + ZingMp3UserIE, + ZingMp3WeekChartIE, ) from .zoom import ZoomIE from .zype import ZypeIE diff --git a/plugins/youtube_download/yt_dlp/extractor/abc.py b/plugins/youtube_download/yt_dlp/extractor/abc.py index f56133e..7296be7 100644 --- a/plugins/youtube_download/yt_dlp/extractor/abc.py +++ b/plugins/youtube_download/yt_dlp/extractor/abc.py @@ -4,18 +4,18 @@ import re import time from .common import InfoExtractor -from ..compat import compat_str from ..utils import ( - dict_get, ExtractorError, - js_to_json, + dict_get, int_or_none, + js_to_json, parse_iso8601, str_or_none, traverse_obj, try_get, unescapeHTML, update_url_query, + url_or_none, ) @@ -66,7 +66,7 @@ class ABCIE(InfoExtractor): 'ext': 'mp4', 'title': 'WWI Centenary', 'description': 'md5:c2379ec0ca84072e86b446e536954546', - } + }, }, { 'url': 'https://www.abc.net.au/news/programs/the-world/2020-06-10/black-lives-matter-protests-spawn-support-for/12342074', 'info_dict': { @@ -74,7 +74,7 @@ class ABCIE(InfoExtractor): 'ext': 'mp4', 'title': 'Black Lives Matter protests spawn support for Papuans in Indonesia', 'description': 'md5:2961a17dc53abc558589ccd0fb8edd6f', - } + }, }, { 'url': 'https://www.abc.net.au/btn/newsbreak/btn-newsbreak-20200814/12560476', 'info_dict': { @@ -85,7 +85,7 @@ class ABCIE(InfoExtractor): 'upload_date': '20200813', 'uploader': 'Behind the News', 'uploader_id': 'behindthenews', - } + }, }, { 'url': 'https://www.abc.net.au/news/2023-06-25/wagner-boss-orders-troops-back-to-bases-to-avoid-bloodshed/102520540', 'info_dict': { @@ -94,7 +94,7 @@ class ABCIE(InfoExtractor): 'ext': 'mp4', 'description': 'Wagner troops leave Rostov-on-Don and\xa0Yevgeny Prigozhin will move to Belarus under a deal brokered by Belarusian President Alexander Lukashenko to end the mutiny.', 'thumbnail': 'https://live-production.wcms.abc-cdn.net.au/0c170f5b57f0105c432f366c0e8e267b?impolicy=wcms_crop_resize&cropH=2813&cropW=5000&xPos=0&yPos=249&width=862&height=485', - } + }, }] def _real_extract(self, url): @@ -125,7 +125,7 @@ class ABCIE(InfoExtractor): if mobj is None: expired = self._html_search_regex(r'(?s)class="expired-(?:video|audio)".+?(.+?)', webpage, 'expired', None) if expired: - raise ExtractorError('%s said: %s' % (self.IE_NAME, expired), expected=True) + raise ExtractorError(f'{self.IE_NAME} said: {expired}', expected=True) raise ExtractorError('Unable to extract video urls') urls_info = self._parse_json( @@ -163,7 +163,7 @@ class ABCIE(InfoExtractor): 'height': height, 'tbr': bitrate, 'filesize': int_or_none(url_info.get('filesize')), - 'format_id': format_id + 'format_id': format_id, }) return { @@ -180,20 +180,100 @@ class ABCIViewIE(InfoExtractor): _VALID_URL = r'https?://iview\.abc\.net\.au/(?:[^/]+/)*video/(?P[^/?#]+)' _GEO_COUNTRIES = ['AU'] - # ABC iview programs are normally available for 14 days only. _TESTS = [{ + 'url': 'https://iview.abc.net.au/show/utopia/series/1/video/CO1211V001S00', + 'md5': '52a942bfd7a0b79a6bfe9b4ce6c9d0ed', + 'info_dict': { + 'id': 'CO1211V001S00', + 'ext': 'mp4', + 'title': 'Series 1 Ep 1 Wood For The Trees', + 'series': 'Utopia', + 'description': 'md5:0cfb2c183c1b952d1548fd65c8a95c00', + 'upload_date': '20230726', + 'uploader_id': 'abc1', + 'series_id': 'CO1211V', + 'episode_id': 'CO1211V001S00', + 'season_number': 1, + 'season': 'Season 1', + 'episode_number': 1, + 'episode': 'Wood For The Trees', + 'thumbnail': 'https://cdn.iview.abc.net.au/thumbs/i/co/CO1211V001S00_5ad8353f4df09_1280.jpg', + 'timestamp': 1690403700, + }, + 'params': { + 'skip_download': True, + }, + }, { + 'note': 'No episode name', 'url': 'https://iview.abc.net.au/show/gruen/series/11/video/LE1927H001S00', 'md5': '67715ce3c78426b11ba167d875ac6abf', 'info_dict': { 'id': 'LE1927H001S00', 'ext': 'mp4', - 'title': "Series 11 Ep 1", - 'series': "Gruen", + 'title': 'Series 11 Ep 1', + 'series': 'Gruen', 'description': 'md5:52cc744ad35045baf6aded2ce7287f67', 'upload_date': '20190925', 'uploader_id': 'abc1', + 'series_id': 'LE1927H', + 'episode_id': 'LE1927H001S00', + 'season_number': 11, + 'season': 'Season 11', + 'episode_number': 1, + 'episode': 'Episode 1', + 'thumbnail': 'https://cdn.iview.abc.net.au/thumbs/i/le/LE1927H001S00_5d954fbd79e25_1280.jpg', 'timestamp': 1569445289, }, + 'expected_warnings': ['Ignoring subtitle tracks found in the HLS manifest'], + 'params': { + 'skip_download': True, + }, + }, { + 'note': 'No episode number', + 'url': 'https://iview.abc.net.au/show/four-corners/series/2022/video/NC2203H039S00', + 'md5': '77cb7d8434440e3b28fbebe331c2456a', + 'info_dict': { + 'id': 'NC2203H039S00', + 'ext': 'mp4', + 'title': 'Series 2022 Locking Up Kids', + 'series': 'Four Corners', + 'description': 'md5:54829ca108846d1a70e1fcce2853e720', + 'upload_date': '20221114', + 'uploader_id': 'abc1', + 'series_id': 'NC2203H', + 'episode_id': 'NC2203H039S00', + 'season_number': 2022, + 'season': 'Season 2022', + 'episode': 'Locking Up Kids', + 'thumbnail': 'https://cdn.iview.abc.net.au/thumbs/i/nc/NC2203H039S00_636d8a0944a22_1920.jpg', + 'timestamp': 1668460497, + + }, + 'expected_warnings': ['Ignoring subtitle tracks found in the HLS manifest'], + 'params': { + 'skip_download': True, + }, + }, { + 'note': 'No episode name or number', + 'url': 'https://iview.abc.net.au/show/landline/series/2021/video/RF2004Q043S00', + 'md5': '2e17dec06b13cc81dc119d2565289396', + 'info_dict': { + 'id': 'RF2004Q043S00', + 'ext': 'mp4', + 'title': 'Series 2021', + 'series': 'Landline', + 'description': 'md5:c9f30d9c0c914a7fd23842f6240be014', + 'upload_date': '20211205', + 'uploader_id': 'abc1', + 'series_id': 'RF2004Q', + 'episode_id': 'RF2004Q043S00', + 'season_number': 2021, + 'season': 'Season 2021', + 'thumbnail': 'https://cdn.iview.abc.net.au/thumbs/i/rf/RF2004Q043S00_61a950639dbc0_1920.jpg', + 'timestamp': 1638710705, + + }, + 'expected_warnings': ['Ignoring subtitle tracks found in the HLS manifest'], 'params': { 'skip_download': True, }, @@ -207,13 +287,12 @@ class ABCIViewIE(InfoExtractor): stream = next(s for s in video_params['playlist'] if s.get('type') in ('program', 'livestream')) house_number = video_params.get('episodeHouseNumber') or video_id - path = '/auth/hls/sign?ts={0}&hn={1}&d=android-tablet'.format( - int(time.time()), house_number) + path = f'/auth/hls/sign?ts={int(time.time())}&hn={house_number}&d=android-tablet' sig = hmac.new( b'android.content.res.Resources', - path.encode('utf-8'), hashlib.sha256).hexdigest() + path.encode(), hashlib.sha256).hexdigest() token = self._download_webpage( - 'http://iview.abc.net.au{0}&sig={1}'.format(path, sig), video_id) + f'http://iview.abc.net.au{path}&sig={sig}', video_id) def tokenize_url(url, token): return update_url_query(url, { @@ -222,7 +301,7 @@ class ABCIViewIE(InfoExtractor): for sd in ('1080', '720', 'sd', 'sd-low'): sd_url = try_get( - stream, lambda x: x['streams']['hls'][sd], compat_str) + stream, lambda x: x['streams']['hls'][sd], str) if not sd_url: continue formats = self._extract_m3u8_formats( @@ -255,6 +334,8 @@ class ABCIViewIE(InfoExtractor): 'episode_number': int_or_none(self._search_regex( r'\bEp\s+(\d+)\b', title, 'episode number', default=None)), 'episode_id': house_number, + 'episode': self._search_regex( + r'^(?:Series\s+\d+)?\s*(?:Ep\s+\d+)?\s*(.*)$', title, 'episode', default='') or None, 'uploader_id': video_params.get('channel'), 'formats': formats, 'subtitles': subtitles, @@ -275,7 +356,7 @@ class ABCIViewShowSeriesIE(InfoExtractor): 'description': 'md5:93119346c24a7c322d446d8eece430ff', 'series': 'Upper Middle Bogan', 'season': 'Series 1', - 'thumbnail': r're:^https?://cdn\.iview\.abc\.net\.au/thumbs/.*\.jpg$' + 'thumbnail': r're:^https?://cdn\.iview\.abc\.net\.au/thumbs/.*\.jpg$', }, 'playlist_count': 8, }, { @@ -294,17 +375,39 @@ class ABCIViewShowSeriesIE(InfoExtractor): 'noplaylist': True, 'skip_download': 'm3u8', }, + }, { + # 'videoEpisodes' is a dict with `items` key + 'url': 'https://iview.abc.net.au/show/7-30-mark-humphries-satire', + 'info_dict': { + 'id': '178458-0', + 'title': 'Episodes', + 'description': 'Satirist Mark Humphries brings his unique perspective on current political events for 7.30.', + 'series': '7.30 Mark Humphries Satire', + 'season': 'Episodes', + 'thumbnail': r're:^https?://cdn\.iview\.abc\.net\.au/thumbs/.*\.jpg$', + }, + 'playlist_count': 15, + 'skip': 'This program is not currently available in ABC iview', + }, { + 'url': 'https://iview.abc.net.au/show/inbestigators', + 'info_dict': { + 'id': '175343-1', + 'title': 'Series 1', + 'description': 'md5:b9976935a6450e5b78ce2a940a755685', + 'series': 'The Inbestigators', + 'season': 'Series 1', + 'thumbnail': r're:^https?://cdn\.iview\.abc\.net\.au/thumbs/.+\.jpg', + }, + 'playlist_count': 17, }] def _real_extract(self, url): show_id = self._match_id(url) webpage = self._download_webpage(url, show_id) - webpage_data = self._search_regex( - r'window\.__INITIAL_STATE__\s*=\s*[\'"](.+?)[\'"]\s*;', - webpage, 'initial state') - video_data = self._parse_json( - unescapeHTML(webpage_data).encode('utf-8').decode('unicode_escape'), show_id) - video_data = video_data['route']['pageData']['_embedded'] + video_data = self._search_json( + r'window\.__INITIAL_STATE__\s*=\s*[\'"]', webpage, 'initial state', show_id, + transform_source=lambda x: x.encode().decode('unicode_escape'), + end_pattern=r'[\'"]\s*;')['route']['pageData']['_embedded'] highlight = try_get(video_data, lambda x: x['highlightVideo']['shareUrl']) if not self._yes_playlist(show_id, bool(highlight), video_label='highlight video'): @@ -313,12 +416,14 @@ class ABCIViewShowSeriesIE(InfoExtractor): series = video_data['selectedSeries'] return { '_type': 'playlist', - 'entries': [self.url_result(episode['shareUrl']) - for episode in series['_embedded']['videoEpisodes']], + 'entries': [self.url_result(episode_url, ABCIViewIE) + for episode_url in traverse_obj(series, ( + '_embedded', 'videoEpisodes', (None, 'items'), ..., 'shareUrl', {url_or_none}))], 'id': series.get('id'), 'title': dict_get(series, ('title', 'displaySubtitle')), 'description': series.get('description'), 'series': dict_get(series, ('showTitle', 'displayTitle')), 'season': dict_get(series, ('title', 'displaySubtitle')), - 'thumbnail': series.get('thumbnail'), + 'thumbnail': traverse_obj( + series, 'thumbnail', ('images', lambda _, v: v['name'] == 'seriesThumbnail', 'url'), get_all=False), } diff --git a/plugins/youtube_download/yt_dlp/extractor/abcnews.py b/plugins/youtube_download/yt_dlp/extractor/abcnews.py index a57295b..7215500 100644 --- a/plugins/youtube_download/yt_dlp/extractor/abcnews.py +++ b/plugins/youtube_download/yt_dlp/extractor/abcnews.py @@ -58,7 +58,7 @@ class AbcNewsVideoIE(AMPIE): display_id = mobj.group('display_id') video_id = mobj.group('id') info_dict = self._extract_feed_info( - 'http://abcnews.go.com/video/itemfeed?id=%s' % video_id) + f'http://abcnews.go.com/video/itemfeed?id={video_id}') info_dict.update({ 'id': video_id, 'display_id': display_id, diff --git a/plugins/youtube_download/yt_dlp/extractor/abcotvs.py b/plugins/youtube_download/yt_dlp/extractor/abcotvs.py index 6dca19d..ea5882b 100644 --- a/plugins/youtube_download/yt_dlp/extractor/abcotvs.py +++ b/plugins/youtube_download/yt_dlp/extractor/abcotvs.py @@ -1,5 +1,4 @@ from .common import InfoExtractor -from ..compat import compat_str from ..utils import ( dict_get, int_or_none, @@ -57,11 +56,11 @@ class ABCOTVSIE(InfoExtractor): data = self._download_json( 'https://api.abcotvs.com/v2/content', display_id, query={ 'id': video_id, - 'key': 'otv.web.%s.story' % station, + 'key': f'otv.web.{station}.story', 'station': station, })['data'] video = try_get(data, lambda x: x['featuredMedia']['video'], dict) or data - video_id = compat_str(dict_get(video, ('id', 'publishedKey'), video_id)) + video_id = str(dict_get(video, ('id', 'publishedKey'), video_id)) title = video.get('title') or video['linkText'] formats = [] diff --git a/plugins/youtube_download/yt_dlp/extractor/abematv.py b/plugins/youtube_download/yt_dlp/extractor/abematv.py index 163b83c..8f2fc4c 100644 --- a/plugins/youtube_download/yt_dlp/extractor/abematv.py +++ b/plugins/youtube_download/yt_dlp/extractor/abematv.py @@ -6,53 +6,54 @@ import hmac import io import json import re -import struct import time import urllib.parse -import urllib.request -import urllib.response import uuid from .common import InfoExtractor from ..aes import aes_ecb_decrypt +from ..networking import RequestHandler, Response +from ..networking.exceptions import TransportError from ..utils import ( ExtractorError, - bytes_to_intlist, + OnDemandPagedList, decode_base_n, int_or_none, - intlist_to_bytes, - OnDemandPagedList, time_seconds, traverse_obj, + update_url, update_url_query, ) -def add_opener(ydl, handler): # FIXME: Create proper API in .networking - """Add a handler for opening URLs, like _download_webpage""" - # https://github.com/python/cpython/blob/main/Lib/urllib/request.py#L426 - # https://github.com/python/cpython/blob/main/Lib/urllib/request.py#L605 - rh = ydl._request_director.handlers['Urllib'] - if 'abematv-license' in rh._SUPPORTED_URL_SCHEMES: - return - opener = rh._get_instance(cookiejar=ydl.cookiejar, proxies=ydl.proxies) - assert isinstance(opener, urllib.request.OpenerDirector) - opener.add_handler(handler) - rh._SUPPORTED_URL_SCHEMES = (*rh._SUPPORTED_URL_SCHEMES, 'abematv-license') +class AbemaLicenseRH(RequestHandler): + _SUPPORTED_URL_SCHEMES = ('abematv-license',) + _SUPPORTED_PROXY_SCHEMES = None + _SUPPORTED_FEATURES = None + RH_NAME = 'abematv_license' + _STRTABLE = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' + _HKEY = b'3AF0298C219469522A313570E8583005A642E73EDD58E3EA2FB7339D3DF1597E' -class AbemaLicenseHandler(urllib.request.BaseHandler): - handler_order = 499 - STRTABLE = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' - HKEY = b'3AF0298C219469522A313570E8583005A642E73EDD58E3EA2FB7339D3DF1597E' - - def __init__(self, ie: 'AbemaTVIE'): - # the protocol that this should really handle is 'abematv-license://' - # abematv_license_open is just a placeholder for development purposes - # ref. https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Lib/urllib/request.py#L510 - setattr(self, 'abematv-license_open', getattr(self, 'abematv_license_open')) + def __init__(self, *, ie: 'AbemaTVIE', **kwargs): + super().__init__(**kwargs) self.ie = ie + def _send(self, request): + url = request.url + ticket = urllib.parse.urlparse(url).netloc + + try: + response_data = self._get_videokey_from_ticket(ticket) + except ExtractorError as e: + raise TransportError(cause=e.cause) from e + except (IndexError, KeyError, TypeError) as e: + raise TransportError(cause=repr(e)) from e + + return Response( + io.BytesIO(response_data), url, + headers={'Content-Length': str(len(response_data))}) + def _get_videokey_from_ticket(self, ticket): to_show = self.ie.get_param('verbose', False) media_token = self.ie._get_media_token(to_show=to_show) @@ -62,33 +63,27 @@ class AbemaLicenseHandler(urllib.request.BaseHandler): query={'t': media_token}, data=json.dumps({ 'kv': 'a', - 'lt': ticket - }).encode('utf-8'), + 'lt': ticket, + }).encode(), headers={ 'Content-Type': 'application/json', }) - res = decode_base_n(license_response['k'], table=self.STRTABLE) - encvideokey = bytes_to_intlist(struct.pack('>QQ', res >> 64, res & 0xffffffffffffffff)) + res = decode_base_n(license_response['k'], table=self._STRTABLE) + encvideokey = list(res.to_bytes(16, 'big')) h = hmac.new( - binascii.unhexlify(self.HKEY), - (license_response['cid'] + self.ie._DEVICE_ID).encode('utf-8'), + binascii.unhexlify(self._HKEY), + (license_response['cid'] + self.ie._DEVICE_ID).encode(), digestmod=hashlib.sha256) - enckey = bytes_to_intlist(h.digest()) + enckey = list(h.digest()) - return intlist_to_bytes(aes_ecb_decrypt(encvideokey, enckey)) - - def abematv_license_open(self, url): - url = url.get_full_url() if isinstance(url, urllib.request.Request) else url - ticket = urllib.parse.urlparse(url).netloc - response_data = self._get_videokey_from_ticket(ticket) - return urllib.response.addinfourl(io.BytesIO(response_data), headers={ - 'Content-Length': str(len(response_data)), - }, url=url, code=200) + return bytes(aes_ecb_decrypt(encvideokey, enckey)) class AbemaTVBaseIE(InfoExtractor): + _NETRC_MACHINE = 'abematv' + _USERTOKEN = None _DEVICE_ID = None _MEDIATOKEN = None @@ -97,11 +92,11 @@ class AbemaTVBaseIE(InfoExtractor): @classmethod def _generate_aks(cls, deviceid): - deviceid = deviceid.encode('utf-8') + deviceid = deviceid.encode() # add 1 hour and then drop minute and secs ts_1hour = int((time_seconds() // 3600 + 1) * 3600) time_struct = time.gmtime(ts_1hour) - ts_1hour_str = str(ts_1hour).encode('utf-8') + ts_1hour_str = str(ts_1hour).encode() tmp = None @@ -113,7 +108,7 @@ class AbemaTVBaseIE(InfoExtractor): def mix_tmp(count): nonlocal tmp - for i in range(count): + for _ in range(count): mix_once(tmp) def mix_twist(nonce): @@ -133,11 +128,15 @@ class AbemaTVBaseIE(InfoExtractor): if self._USERTOKEN: return self._USERTOKEN + self._downloader._request_director.add_handler(AbemaLicenseRH(ie=self, logger=None)) + username, _ = self._get_login_info() - AbemaTVBaseIE._USERTOKEN = username and self.cache.load(self._NETRC_MACHINE, username) + auth_cache = username and self.cache.load(self._NETRC_MACHINE, username, min_ver='2024.01.19') + AbemaTVBaseIE._USERTOKEN = auth_cache and auth_cache.get('usertoken') if AbemaTVBaseIE._USERTOKEN: # try authentication with locally stored token try: + AbemaTVBaseIE._DEVICE_ID = auth_cache.get('device_id') self._get_media_token(True) return except ExtractorError as e: @@ -150,13 +149,12 @@ class AbemaTVBaseIE(InfoExtractor): data=json.dumps({ 'deviceId': self._DEVICE_ID, 'applicationKeySecret': aks, - }).encode('utf-8'), + }).encode(), headers={ 'Content-Type': 'application/json', }) AbemaTVBaseIE._USERTOKEN = user_data['token'] - add_opener(self._downloader, AbemaLicenseHandler(self)) return self._USERTOKEN def _get_media_token(self, invalidate=False, to_show=True): @@ -171,13 +169,44 @@ class AbemaTVBaseIE(InfoExtractor): 'osLang': 'ja_JP', 'osTimezone': 'Asia/Tokyo', 'appId': 'tv.abema', - 'appVersion': '3.27.1' + 'appVersion': '3.27.1', }, headers={ 'Authorization': f'bearer {self._get_device_token()}', })['token'] return self._MEDIATOKEN + def _perform_login(self, username, password): + self._get_device_token() + if self.cache.load(self._NETRC_MACHINE, username, min_ver='2024.01.19') and self._get_media_token(): + self.write_debug('Skipping logging in') + return + + if '@' in username: # don't strictly check if it's email address or not + ep, method = 'user/email', 'email' + else: + ep, method = 'oneTimePassword', 'userId' + + login_response = self._download_json( + f'https://api.abema.io/v1/auth/{ep}', None, note='Logging in', + data=json.dumps({ + method: username, + 'password': password, + }).encode(), headers={ + 'Authorization': f'bearer {self._get_device_token()}', + 'Origin': 'https://abema.tv', + 'Referer': 'https://abema.tv/', + 'Content-Type': 'application/json', + }) + + AbemaTVBaseIE._USERTOKEN = login_response['token'] + self._get_media_token(True) + auth_cache = { + 'device_id': AbemaTVBaseIE._DEVICE_ID, + 'usertoken': AbemaTVBaseIE._USERTOKEN, + } + self.cache.store(self._NETRC_MACHINE, username, auth_cache) + def _call_api(self, endpoint, video_id, query=None, note='Downloading JSON metadata'): return self._download_json( f'https://api.abema.io/{endpoint}', video_id, query=query or {}, @@ -201,14 +230,14 @@ class AbemaTVBaseIE(InfoExtractor): class AbemaTVIE(AbemaTVBaseIE): _VALID_URL = r'https?://abema\.tv/(?Pnow-on-air|video/episode|channels/.+?/slots)/(?P[^?/]+)' - _NETRC_MACHINE = 'abematv' _TESTS = [{ 'url': 'https://abema.tv/video/episode/194-25_s2_p1', 'info_dict': { 'id': '194-25_s2_p1', 'title': '第1話 「チーズケーキ」 「モーニング再び」', 'series': '異世界食堂2', - 'series_number': 2, + 'season': 'シーズン2', + 'season_number': 2, 'episode': '第1話 「チーズケーキ」 「モーニング再び」', 'episode_number': 1, }, @@ -220,7 +249,7 @@ class AbemaTVIE(AbemaTVBaseIE): 'title': 'ゆるキャン△ SEASON2 全話一挙【無料ビデオ72時間】', 'series': 'ゆるキャン△ SEASON2', 'episode': 'ゆるキャン△ SEASON2 全話一挙【無料ビデオ72時間】', - 'series_number': 2, + 'season_number': 2, 'episode_number': 1, 'description': 'md5:9c5a3172ae763278f9303922f0ea5b17', }, @@ -249,33 +278,6 @@ class AbemaTVIE(AbemaTVBaseIE): }] _TIMETABLE = None - def _perform_login(self, username, password): - self._get_device_token() - if self.cache.load(self._NETRC_MACHINE, username) and self._get_media_token(): - self.write_debug('Skipping logging in') - return - - if '@' in username: # don't strictly check if it's email address or not - ep, method = 'user/email', 'email' - else: - ep, method = 'oneTimePassword', 'userId' - - login_response = self._download_json( - f'https://api.abema.io/v1/auth/{ep}', None, note='Logging in', - data=json.dumps({ - method: username, - 'password': password - }).encode('utf-8'), headers={ - 'Authorization': f'bearer {self._get_device_token()}', - 'Origin': 'https://abema.tv', - 'Referer': 'https://abema.tv/', - 'Content-Type': 'application/json', - }) - - AbemaTVBaseIE._USERTOKEN = login_response['token'] - self._get_media_token(True) - self.cache.store(self._NETRC_MACHINE, username, AbemaTVBaseIE._USERTOKEN) - def _real_extract(self, url): # starting download using infojson from this extractor is undefined behavior, # and never be fixed in the future; you must trigger downloads by directly specifying URL. @@ -331,7 +333,7 @@ class AbemaTVIE(AbemaTVBaseIE): description = self._html_search_regex( (r'(.+?)

(.+?)(.+?)[^?/]+)' + _VALID_URL = r'https?://abema\.tv/video/title/(?P[^?/#]+)/?(?:\?(?:[^#]+&)?s=(?P[^&#]+))?' _PAGE_SIZE = 25 _TESTS = [{ - 'url': 'https://abema.tv/video/title/90-1597', + 'url': 'https://abema.tv/video/title/90-1887', 'info_dict': { - 'id': '90-1597', + 'id': '90-1887', 'title': 'シャッフルアイランド', + 'description': 'md5:61b2425308f41a5282a926edda66f178', }, 'playlist_mincount': 2, }, { @@ -432,41 +442,54 @@ class AbemaTVTitleIE(AbemaTVBaseIE): 'info_dict': { 'id': '193-132', 'title': '真心が届く~僕とスターのオフィス・ラブ!?~', + 'description': 'md5:9b59493d1f3a792bafbc7319258e7af8', }, 'playlist_mincount': 16, }, { - 'url': 'https://abema.tv/video/title/25-102', + 'url': 'https://abema.tv/video/title/25-1nzan-whrxe', 'info_dict': { - 'id': '25-102', - 'title': 'ソードアート・オンライン アリシゼーション', + 'id': '25-1nzan-whrxe', + 'title': 'ソードアート・オンライン', + 'description': 'md5:c094904052322e6978495532bdbf06e6', }, - 'playlist_mincount': 24, + 'playlist_mincount': 25, + }, { + 'url': 'https://abema.tv/video/title/26-2mzbynr-cph?s=26-2mzbynr-cph_s40', + 'info_dict': { + 'title': '〈物語〉シリーズ', + 'id': '26-2mzbynr-cph', + 'description': 'md5:e67873de1c88f360af1f0a4b84847a52', + }, + 'playlist_count': 59, }] - def _fetch_page(self, playlist_id, series_version, page): + def _fetch_page(self, playlist_id, series_version, season_id, page): + query = { + 'seriesVersion': series_version, + 'offset': str(page * self._PAGE_SIZE), + 'order': 'seq', + 'limit': str(self._PAGE_SIZE), + } + if season_id: + query['seasonId'] = season_id programs = self._call_api( f'v1/video/series/{playlist_id}/programs', playlist_id, note=f'Downloading page {page + 1}', - query={ - 'seriesVersion': series_version, - 'offset': str(page * self._PAGE_SIZE), - 'order': 'seq', - 'limit': str(self._PAGE_SIZE), - }) + query=query) yield from ( self.url_result(f'https://abema.tv/video/episode/{x}') for x in traverse_obj(programs, ('programs', ..., 'id'))) - def _entries(self, playlist_id, series_version): + def _entries(self, playlist_id, series_version, season_id): return OnDemandPagedList( - functools.partial(self._fetch_page, playlist_id, series_version), + functools.partial(self._fetch_page, playlist_id, series_version, season_id), self._PAGE_SIZE) def _real_extract(self, url): - playlist_id = self._match_id(url) + playlist_id, season_id = self._match_valid_url(url).group('id', 'season') series_info = self._call_api(f'v1/video/series/{playlist_id}', playlist_id) return self.playlist_result( - self._entries(playlist_id, series_info['version']), playlist_id=playlist_id, + self._entries(playlist_id, series_info['version'], season_id), playlist_id=playlist_id, playlist_title=series_info.get('title'), playlist_description=series_info.get('content')) diff --git a/plugins/youtube_download/yt_dlp/extractor/academicearth.py b/plugins/youtube_download/yt_dlp/extractor/academicearth.py index d9691cb..b997a02 100644 --- a/plugins/youtube_download/yt_dlp/extractor/academicearth.py +++ b/plugins/youtube_download/yt_dlp/extractor/academicearth.py @@ -4,7 +4,7 @@ from .common import InfoExtractor class AcademicEarthCourseIE(InfoExtractor): - _VALID_URL = r'^https?://(?:www\.)?academicearth\.org/playlists/(?P[^?#/]+)' + _VALID_URL = r'https?://(?:www\.)?academicearth\.org/playlists/(?P[^?#/]+)' IE_NAME = 'AcademicEarth:Course' _TEST = { 'url': 'http://academicearth.org/playlists/laws-of-nature/', diff --git a/plugins/youtube_download/yt_dlp/extractor/acast.py b/plugins/youtube_download/yt_dlp/extractor/acast.py index 427d04c..eb467cb 100644 --- a/plugins/youtube_download/yt_dlp/extractor/acast.py +++ b/plugins/youtube_download/yt_dlp/extractor/acast.py @@ -43,14 +43,14 @@ class ACastIE(ACastBaseIE): _VALID_URL = r'''(?x: https?:// (?: - (?:(?:embed|www)\.)?acast\.com/| + (?:(?:embed|www|shows)\.)?acast\.com/| play\.acast\.com/s/ ) - (?P[^/]+)/(?P[^/#?"]+) + (?P[^/?#]+)/(?:episodes/)?(?P[^/#?"]+) )''' _EMBED_REGEX = [rf'(?x)]+\bsrc=[\'"](?P{_VALID_URL})'] _TESTS = [{ - 'url': 'https://www.acast.com/sparpodcast/2.raggarmordet-rosterurdetforflutna', + 'url': 'https://shows.acast.com/sparpodcast/episodes/2.raggarmordet-rosterurdetforflutna', 'info_dict': { 'id': '2a92b283-1a75-4ad8-8396-499c641de0d9', 'ext': 'mp3', @@ -59,7 +59,7 @@ class ACastIE(ACastBaseIE): 'timestamp': 1477346700, 'upload_date': '20161024', 'duration': 2766, - 'creator': 'Third Ear Studio', + 'creators': ['Third Ear Studio'], 'series': 'Spår', 'episode': '2. Raggarmordet - Röster ur det förflutna', 'thumbnail': 'https://assets.pippa.io/shows/616ebe1886d7b1398620b943/616ebe33c7e6e70013cae7da.jpg', @@ -67,13 +67,16 @@ class ACastIE(ACastBaseIE): 'display_id': '2.raggarmordet-rosterurdetforflutna', 'season_number': 4, 'season': 'Season 4', - } + }, }, { 'url': 'http://embed.acast.com/adambuxton/ep.12-adam-joeschristmaspodcast2015', 'only_matching': True, }, { 'url': 'https://play.acast.com/s/rattegangspodden/s04e09styckmordetihelenelund-del2-2', 'only_matching': True, + }, { + 'url': 'https://www.acast.com/sparpodcast/2.raggarmordet-rosterurdetforflutna', + 'only_matching': True, }, { 'url': 'https://play.acast.com/s/sparpodcast/2a92b283-1a75-4ad8-8396-499c641de0d9', 'only_matching': True, @@ -93,13 +96,13 @@ class ACastIE(ACastBaseIE): 'series': 'Democracy Sausage with Mark Kenny', 'timestamp': 1684826362, 'description': 'md5:feabe1fc5004c78ee59c84a46bf4ba16', - } + }, }] def _real_extract(self, url): channel, display_id = self._match_valid_url(url).groups() episode = self._call_api( - '%s/episodes/%s' % (channel, display_id), + f'{channel}/episodes/{display_id}', display_id, {'showInfo': 'true'}) return self._extract_episode( episode, self._extract_show_info(episode.get('show') or {})) @@ -110,7 +113,7 @@ class ACastChannelIE(ACastBaseIE): _VALID_URL = r'''(?x) https?:// (?: - (?:www\.)?acast\.com/| + (?:(?:www|shows)\.)?acast\.com/| play\.acast\.com/s/ ) (?P[^/#?]+) @@ -120,17 +123,20 @@ class ACastChannelIE(ACastBaseIE): 'info_dict': { 'id': '4efc5294-5385-4847-98bd-519799ce5786', 'title': 'Today in Focus', - 'description': 'md5:c09ce28c91002ce4ffce71d6504abaae', + 'description': 'md5:feca253de9947634605080cd9eeea2bf', }, 'playlist_mincount': 200, }, { 'url': 'http://play.acast.com/s/ft-banking-weekly', 'only_matching': True, + }, { + 'url': 'https://shows.acast.com/sparpodcast', + 'only_matching': True, }] @classmethod def suitable(cls, url): - return False if ACastIE.suitable(url) else super(ACastChannelIE, cls).suitable(url) + return False if ACastIE.suitable(url) else super().suitable(url) def _real_extract(self, url): show_slug = self._match_id(url) diff --git a/plugins/youtube_download/yt_dlp/extractor/acfun.py b/plugins/youtube_download/yt_dlp/extractor/acfun.py index dc57929..28559ba 100644 --- a/plugins/youtube_download/yt_dlp/extractor/acfun.py +++ b/plugins/youtube_download/yt_dlp/extractor/acfun.py @@ -3,9 +3,10 @@ from ..utils import ( float_or_none, format_field, int_or_none, - traverse_obj, parse_codecs, parse_qs, + str_or_none, + traverse_obj, ) @@ -24,7 +25,7 @@ class AcFunVideoBaseIE(InfoExtractor): 'width': int_or_none(video.get('width')), 'height': int_or_none(video.get('height')), 'tbr': float_or_none(video.get('avgBitrate')), - **parse_codecs(video.get('codecs', '')) + **parse_codecs(video.get('codecs', '')), }) return { @@ -76,7 +77,7 @@ class AcFunVideoIE(AcFunVideoBaseIE): 'comment_count': int, 'thumbnail': r're:^https?://.*\.(jpg|jpeg)', 'description': 'md5:67583aaf3a0f933bd606bc8a2d3ebb17', - } + }, }] def _real_extract(self, url): @@ -129,7 +130,7 @@ class AcFunBangumiIE(AcFunVideoBaseIE): 'title': '红孩儿之趴趴蛙寻石记 第5话 ', 'duration': 760.0, 'season': '红孩儿之趴趴蛙寻石记', - 'season_id': 5023171, + 'season_id': '5023171', 'season_number': 1, # series has only 1 season 'episode': 'Episode 5', 'episode_number': 5, @@ -146,7 +147,7 @@ class AcFunBangumiIE(AcFunVideoBaseIE): 'title': '叽歪老表(第二季) 第5话 坚不可摧', 'season': '叽歪老表(第二季)', 'season_number': 2, - 'season_id': 6065485, + 'season_id': '6065485', 'episode': '坚不可摧', 'episode_number': 5, 'upload_date': '20220324', @@ -191,7 +192,7 @@ class AcFunBangumiIE(AcFunVideoBaseIE): 'title': json_bangumi_data.get('showTitle'), 'thumbnail': json_bangumi_data.get('image'), 'season': json_bangumi_data.get('bangumiTitle'), - 'season_id': season_id, + 'season_id': str_or_none(season_id), 'season_number': season_number, 'episode': json_bangumi_data.get('title'), 'episode_number': episode_number, diff --git a/plugins/youtube_download/yt_dlp/extractor/adn.py b/plugins/youtube_download/yt_dlp/extractor/adn.py index b59dbc8..7dff405 100644 --- a/plugins/youtube_download/yt_dlp/extractor/adn.py +++ b/plugins/youtube_download/yt_dlp/extractor/adn.py @@ -3,33 +3,53 @@ import binascii import json import os import random +import time from .common import InfoExtractor from ..aes import aes_cbc_decrypt_bytes, unpad_pkcs7 -from ..compat import compat_b64decode from ..networking.exceptions import HTTPError from ..utils import ( - ass_subtitles_timecode, - bytes_to_intlist, - bytes_to_long, ExtractorError, + ass_subtitles_timecode, + bytes_to_long, float_or_none, int_or_none, - intlist_to_bytes, + join_nonempty, long_to_bytes, + parse_iso8601, pkcs1pad, + str_or_none, strip_or_none, try_get, unified_strdate, urlencode_postdata, ) +from ..utils.traversal import traverse_obj -class ADNIE(InfoExtractor): +class ADNBaseIE(InfoExtractor): IE_DESC = 'Animation Digital Network' - _VALID_URL = r'https?://(?:www\.)?(?:animation|anime)digitalnetwork\.fr/video/[^/]+/(?P\d+)' + _NETRC_MACHINE = 'animationdigitalnetwork' + _BASE = 'animationdigitalnetwork.fr' + _API_BASE_URL = f'https://gw.api.{_BASE}/' + _PLAYER_BASE_URL = f'{_API_BASE_URL}player/' + _HEADERS = {} + _LOGIN_ERR_MESSAGE = 'Unable to log in' + _RSA_KEY = (0x9B42B08905199A5CCE2026274399CA560ECB209EE9878A708B1C0812E1BB8CB5D1FB7441861147C1A1F2F3A0476DD63A9CAC20D3E983613346850AA6CB38F16DC7D720FD7D86FC6E5B3D5BBC72E14CD0BF9E869F2CEA2CCAD648F1DCE38F1FF916CEFB2D339B64AA0264372344BC775E265E8A852F88144AB0BD9AA06C1A4ABB, 65537) + _POS_ALIGN_MAP = { + 'start': 1, + 'end': 3, + } + _LINE_ALIGN_MAP = { + 'middle': 8, + 'end': 4, + } + + +class ADNIE(ADNBaseIE): + _VALID_URL = r'https?://(?:www\.)?animationdigitalnetwork\.com/(?:(?Pde)/)?video/[^/?#]+/(?P\d+)' _TESTS = [{ - 'url': 'https://animationdigitalnetwork.fr/video/fruits-basket/9841-episode-1-a-ce-soir', + 'url': 'https://animationdigitalnetwork.com/video/558-fruits-basket/9841-episode-1-a-ce-soir', 'md5': '1c9ef066ceb302c86f80c2b371615261', 'info_dict': { 'id': '9841', @@ -44,29 +64,32 @@ class ADNIE(InfoExtractor): 'season_number': 1, 'episode': 'À ce soir !', 'episode_number': 1, + 'thumbnail': str, + 'season': 'Season 1', }, - 'skip': 'Only available in region (FR, ...)', + 'skip': 'Only available in French and German speaking Europe', }, { - 'url': 'http://animedigitalnetwork.fr/video/blue-exorcist-kyoto-saga/7778-episode-1-debut-des-hostilites', - 'only_matching': True, + 'url': 'https://animationdigitalnetwork.com/de/video/973-the-eminence-in-shadow/23550-folge-1', + 'md5': '5c5651bf5791fa6fcd7906012b9d94e8', + 'info_dict': { + 'id': '23550', + 'ext': 'mp4', + 'episode_number': 1, + 'duration': 1417, + 'release_date': '20231004', + 'series': 'The Eminence in Shadow', + 'season_number': 2, + 'episode': str, + 'title': str, + 'thumbnail': str, + 'season': 'Season 2', + 'comment_count': int, + 'average_rating': float, + 'description': str, + }, + # 'skip': 'Only available in French and German speaking Europe', }] - _NETRC_MACHINE = 'animationdigitalnetwork' - _BASE = 'animationdigitalnetwork.fr' - _API_BASE_URL = 'https://gw.api.' + _BASE + '/' - _PLAYER_BASE_URL = _API_BASE_URL + 'player/' - _HEADERS = {} - _LOGIN_ERR_MESSAGE = 'Unable to log in' - _RSA_KEY = (0x9B42B08905199A5CCE2026274399CA560ECB209EE9878A708B1C0812E1BB8CB5D1FB7441861147C1A1F2F3A0476DD63A9CAC20D3E983613346850AA6CB38F16DC7D720FD7D86FC6E5B3D5BBC72E14CD0BF9E869F2CEA2CCAD648F1DCE38F1FF916CEFB2D339B64AA0264372344BC775E265E8A852F88144AB0BD9AA06C1A4ABB, 65537) - _POS_ALIGN_MAP = { - 'start': 1, - 'end': 3, - } - _LINE_ALIGN_MAP = { - 'middle': 8, - 'end': 4, - } - def _get_subtitles(self, sub_url, video_id): if not sub_url: return None @@ -83,9 +106,9 @@ class ADNIE(InfoExtractor): # http://animationdigitalnetwork.fr/components/com_vodvideo/videojs/adn-vjs.min.js dec_subtitles = unpad_pkcs7(aes_cbc_decrypt_bytes( - compat_b64decode(enc_subtitles[24:]), + base64.b64decode(enc_subtitles[24:]), binascii.unhexlify(self._K + '7fac1178830cfe0c'), - compat_b64decode(enc_subtitles[:24]))) + base64.b64decode(enc_subtitles[:24]))) subtitles_json = self._parse_json(dec_subtitles.decode(), None, fatal=False) if not subtitles_json: return None @@ -108,7 +131,7 @@ Format: Marked,Start,End,Style,Name,MarginL,MarginR,MarginV,Effect,Text''' if start is None or end is None or text is None: continue alignment = self._POS_ALIGN_MAP.get(position_align, 2) + self._LINE_ALIGN_MAP.get(line_align, 0) - ssa += os.linesep + 'Dialogue: Marked=0,%s,%s,Default,,0,0,0,,%s%s' % ( + ssa += os.linesep + 'Dialogue: Marked=0,{},{},Default,,0,0,0,,{}{}'.format( ass_subtitles_timecode(start), ass_subtitles_timecode(end), '{\\a%d}' % alignment if alignment != 2 else '', @@ -116,6 +139,8 @@ Format: Marked,Start,End,Style,Name,MarginL,MarginR,MarginV,Effect,Text''' if sub_lang == 'vostf': sub_lang = 'fr' + elif sub_lang == 'vostde': + sub_lang = 'de' subtitles.setdefault(sub_lang, []).extend([{ 'ext': 'json', 'data': json.dumps(sub), @@ -137,7 +162,7 @@ Format: Marked,Start,End,Style,Name,MarginL,MarginR,MarginV,Effect,Text''' 'username': username, })) or {}).get('accessToken') if access_token: - self._HEADERS = {'authorization': 'Bearer ' + access_token} + self._HEADERS['Authorization'] = f'Bearer {access_token}' except ExtractorError as e: message = None if isinstance(e.cause, HTTPError) and e.cause.status == 401: @@ -147,8 +172,9 @@ Format: Marked,Start,End,Style,Name,MarginL,MarginR,MarginV,Effect,Text''' self.report_warning(message or self._LOGIN_ERR_MESSAGE) def _real_extract(self, url): - video_id = self._match_id(url) - video_base_url = self._PLAYER_BASE_URL + 'video/%s/' % video_id + lang, video_id = self._match_valid_url(url).group('lang', 'id') + self._HEADERS['X-Target-Distribution'] = lang or 'fr' + video_base_url = self._PLAYER_BASE_URL + f'video/{video_id}/' player = self._download_json( video_base_url + 'configuration', video_id, 'Downloading player config JSON metadata', @@ -157,26 +183,29 @@ Format: Marked,Start,End,Style,Name,MarginL,MarginR,MarginV,Effect,Text''' user = options['user'] if not user.get('hasAccess'): - self.raise_login_required() + start_date = traverse_obj(options, ('video', 'startDate', {str})) + if (parse_iso8601(start_date) or 0) > time.time(): + raise ExtractorError(f'This video is not available yet. Release date: {start_date}', expected=True) + self.raise_login_required('This video requires a subscription', method='password') token = self._download_json( user.get('refreshTokenUrl') or (self._PLAYER_BASE_URL + 'refresh/token'), video_id, 'Downloading access token', headers={ - 'x-player-refresh-token': user['refreshToken'] + 'X-Player-Refresh-Token': user['refreshToken'], }, data=b'')['token'] links_url = try_get(options, lambda x: x['video']['url']) or (video_base_url + 'link') self._K = ''.join(random.choices('0123456789abcdef', k=16)) - message = bytes_to_intlist(json.dumps({ + message = list(json.dumps({ 'k': self._K, 't': token, - })) + }).encode()) # Sometimes authentication fails for no good reason, retry with # a different random padding links_data = None for _ in range(3): - padded_message = intlist_to_bytes(pkcs1pad(message, 128)) + padded_message = bytes(pkcs1pad(message, 128)) n, e = self._RSA_KEY encrypted_message = long_to_bytes(pow(bytes_to_long(padded_message), e, n)) authorization = base64.b64encode(encrypted_message).decode() @@ -184,12 +213,13 @@ Format: Marked,Start,End,Style,Name,MarginL,MarginR,MarginV,Effect,Text''' try: links_data = self._download_json( links_url, video_id, 'Downloading links JSON metadata', headers={ - 'X-Player-Token': authorization + 'X-Player-Token': authorization, + **self._HEADERS, }, query={ 'freeWithAds': 'true', 'adaptive': 'false', 'withMetadata': 'true', - 'source': 'Web' + 'source': 'Web', }) break except ExtractorError as e: @@ -202,7 +232,7 @@ Format: Marked,Start,End,Style,Name,MarginL,MarginR,MarginV,Effect,Text''' error = self._parse_json(e.cause.response.read(), video_id) message = error.get('message') - if e.cause.code == 403 and error.get('code') == 'player-bad-geolocation-country': + if e.cause.status == 403 and error.get('code') == 'player-bad-geolocation-country': self.raise_geo_restricted(msg=message) raise ExtractorError(message) else: @@ -221,7 +251,8 @@ Format: Marked,Start,End,Style,Name,MarginL,MarginR,MarginV,Effect,Text''' for quality, load_balancer_url in qualities.items(): load_balancer_data = self._download_json( load_balancer_url, video_id, - 'Downloading %s %s JSON metadata' % (format_id, quality), + f'Downloading {format_id} {quality} JSON metadata', + headers=self._HEADERS, fatal=False) or {} m3u8_url = load_balancer_data.get('location') if not m3u8_url: @@ -232,11 +263,17 @@ Format: Marked,Start,End,Style,Name,MarginL,MarginR,MarginV,Effect,Text''' if format_id == 'vf': for f in m3u8_formats: f['language'] = 'fr' + elif format_id == 'vde': + for f in m3u8_formats: + f['language'] = 'de' formats.extend(m3u8_formats) + if not formats: + self.raise_login_required('This video requires a subscription', method='password') + video = (self._download_json( - self._API_BASE_URL + 'video/%s' % video_id, video_id, - 'Downloading additional video metadata', fatal=False) or {}).get('video') or {} + self._API_BASE_URL + f'video/{video_id}', video_id, + 'Downloading additional video metadata', fatal=False, headers=self._HEADERS) or {}).get('video') or {} show = video.get('show') or {} return { @@ -255,3 +292,38 @@ Format: Marked,Start,End,Style,Name,MarginL,MarginR,MarginV,Effect,Text''' 'average_rating': float_or_none(video.get('rating') or metas.get('rating')), 'comment_count': int_or_none(video.get('commentsCount')), } + + +class ADNSeasonIE(ADNBaseIE): + _VALID_URL = r'https?://(?:www\.)?animationdigitalnetwork\.com/(?:(?Pde)/)?video/(?P\d+)[^/?#]*/?(?:$|[#?])' + _TESTS = [{ + 'url': 'https://animationdigitalnetwork.com/video/911-tokyo-mew-mew-new', + 'playlist_count': 12, + 'info_dict': { + 'id': '911', + 'title': 'Tokyo Mew Mew New', + }, + # 'skip': 'Only available in French end German speaking Europe', + }] + + def _real_extract(self, url): + lang, video_show_slug = self._match_valid_url(url).group('lang', 'id') + self._HEADERS['X-Target-Distribution'] = lang or 'fr' + show = self._download_json( + f'{self._API_BASE_URL}show/{video_show_slug}/', video_show_slug, + 'Downloading show JSON metadata', headers=self._HEADERS)['show'] + show_id = str(show['id']) + episodes = self._download_json( + f'{self._API_BASE_URL}video/show/{show_id}', video_show_slug, + 'Downloading episode list', headers=self._HEADERS, query={ + 'order': 'asc', + 'limit': '-1', + }) + + def entries(): + for episode_id in traverse_obj(episodes, ('videos', ..., 'id', {str_or_none})): + yield self.url_result(join_nonempty( + 'https://animationdigitalnetwork.com', lang, 'video', + video_show_slug, episode_id, delim='/'), ADNIE, episode_id) + + return self.playlist_result(entries(), show_id, show.get('title')) diff --git a/plugins/youtube_download/yt_dlp/extractor/adobeconnect.py b/plugins/youtube_download/yt_dlp/extractor/adobeconnect.py index 8963b12..b2934d6 100644 --- a/plugins/youtube_download/yt_dlp/extractor/adobeconnect.py +++ b/plugins/youtube_download/yt_dlp/extractor/adobeconnect.py @@ -1,8 +1,6 @@ +import urllib.parse + from .common import InfoExtractor -from ..compat import ( - compat_parse_qs, - compat_urlparse, -) class AdobeConnectIE(InfoExtractor): @@ -12,13 +10,13 @@ class AdobeConnectIE(InfoExtractor): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) title = self._html_extract_title(webpage) - qs = compat_parse_qs(self._search_regex(r"swfUrl\s*=\s*'([^']+)'", webpage, 'swf url').split('?')[1]) + qs = urllib.parse.parse_qs(self._search_regex(r"swfUrl\s*=\s*'([^']+)'", webpage, 'swf url').split('?')[1]) is_live = qs.get('isLive', ['false'])[0] == 'true' formats = [] for con_string in qs['conStrings'][0].split(','): formats.append({ 'format_id': con_string.split('://')[0], - 'app': compat_urlparse.quote('?' + con_string.split('?')[1] + 'flvplayerapp/' + qs['appInstance'][0]), + 'app': urllib.parse.quote('?' + con_string.split('?')[1] + 'flvplayerapp/' + qs['appInstance'][0]), 'ext': 'flv', 'play_path': 'mp4:' + qs['streamName'][0], 'rtmp_conn': 'S:' + qs['ticket'][0], diff --git a/plugins/youtube_download/yt_dlp/extractor/adobepass.py b/plugins/youtube_download/yt_dlp/extractor/adobepass.py index 5eed0ca..f1b8779 100644 --- a/plugins/youtube_download/yt_dlp/extractor/adobepass.py +++ b/plugins/youtube_download/yt_dlp/extractor/adobepass.py @@ -2,10 +2,10 @@ import getpass import json import re import time +import urllib.parse import xml.etree.ElementTree as etree from .common import InfoExtractor -from ..compat import compat_urlparse from ..networking.exceptions import HTTPError from ..utils import ( NO_DEFAULT, @@ -68,7 +68,7 @@ MSO_INFO = { }, 'Philo': { 'name': 'Philo', - 'username_field': 'ident' + 'username_field': 'ident', }, 'Verizon': { 'name': 'Verizon FiOS', @@ -81,1258 +81,1258 @@ MSO_INFO = { 'password_field': 'j_password', }, 'thr030': { - 'name': '3 Rivers Communications' + 'name': '3 Rivers Communications', }, 'com140': { - 'name': 'Access Montana' + 'name': 'Access Montana', }, 'acecommunications': { - 'name': 'AcenTek' + 'name': 'AcenTek', }, 'acm010': { - 'name': 'Acme Communications' + 'name': 'Acme Communications', }, 'ada020': { - 'name': 'Adams Cable Service' + 'name': 'Adams Cable Service', }, 'alb020': { - 'name': 'Albany Mutual Telephone' + 'name': 'Albany Mutual Telephone', }, 'algona': { - 'name': 'Algona Municipal Utilities' + 'name': 'Algona Municipal Utilities', }, 'allwest': { - 'name': 'All West Communications' + 'name': 'All West Communications', }, 'all025': { - 'name': 'Allen\'s Communications' + 'name': 'Allen\'s Communications', }, 'spl010': { - 'name': 'Alliance Communications' + 'name': 'Alliance Communications', }, 'all070': { - 'name': 'ALLO Communications' + 'name': 'ALLO Communications', }, 'alpine': { - 'name': 'Alpine Communications' + 'name': 'Alpine Communications', }, 'hun015': { - 'name': 'American Broadband' + 'name': 'American Broadband', }, 'nwc010': { - 'name': 'American Broadband Missouri' + 'name': 'American Broadband Missouri', }, 'com130-02': { - 'name': 'American Community Networks' + 'name': 'American Community Networks', }, 'com130-01': { - 'name': 'American Warrior Networks' + 'name': 'American Warrior Networks', }, 'tom020': { - 'name': 'Amherst Telephone/Tomorrow Valley' + 'name': 'Amherst Telephone/Tomorrow Valley', }, 'tvc020': { - 'name': 'Andycable' + 'name': 'Andycable', }, 'arkwest': { - 'name': 'Arkwest Communications' + 'name': 'Arkwest Communications', }, 'art030': { - 'name': 'Arthur Mutual Telephone Company' + 'name': 'Arthur Mutual Telephone Company', }, 'arvig': { - 'name': 'Arvig' + 'name': 'Arvig', }, 'nttcash010': { - 'name': 'Ashland Home Net' + 'name': 'Ashland Home Net', }, 'astound': { - 'name': 'Astound (now Wave)' + 'name': 'Astound (now Wave)', }, 'dix030': { - 'name': 'ATC Broadband' + 'name': 'ATC Broadband', }, 'ara010': { - 'name': 'ATC Communications' + 'name': 'ATC Communications', }, 'she030-02': { - 'name': 'Ayersville Communications' + 'name': 'Ayersville Communications', }, 'baldwin': { - 'name': 'Baldwin Lightstream' + 'name': 'Baldwin Lightstream', }, 'bal040': { - 'name': 'Ballard TV' + 'name': 'Ballard TV', }, 'cit025': { - 'name': 'Bardstown Cable TV' + 'name': 'Bardstown Cable TV', }, 'bay030': { - 'name': 'Bay Country Communications' + 'name': 'Bay Country Communications', }, 'tel095': { - 'name': 'Beaver Creek Cooperative Telephone' + 'name': 'Beaver Creek Cooperative Telephone', }, 'bea020': { - 'name': 'Beaver Valley Cable' + 'name': 'Beaver Valley Cable', }, 'bee010': { - 'name': 'Bee Line Cable' + 'name': 'Bee Line Cable', }, 'wir030': { - 'name': 'Beehive Broadband' + 'name': 'Beehive Broadband', }, 'bra020': { - 'name': 'BELD' + 'name': 'BELD', }, 'bel020': { - 'name': 'Bellevue Municipal Cable' + 'name': 'Bellevue Municipal Cable', }, 'vol040-01': { - 'name': 'Ben Lomand Connect / BLTV' + 'name': 'Ben Lomand Connect / BLTV', }, 'bev010': { - 'name': 'BEVCOMM' + 'name': 'BEVCOMM', }, 'big020': { - 'name': 'Big Sandy Broadband' + 'name': 'Big Sandy Broadband', }, 'ble020': { - 'name': 'Bledsoe Telephone Cooperative' + 'name': 'Bledsoe Telephone Cooperative', }, 'bvt010': { - 'name': 'Blue Valley Tele-Communications' + 'name': 'Blue Valley Tele-Communications', }, 'bra050': { - 'name': 'Brandenburg Telephone Co.' + 'name': 'Brandenburg Telephone Co.', }, 'bte010': { - 'name': 'Bristol Tennessee Essential Services' + 'name': 'Bristol Tennessee Essential Services', }, 'annearundel': { - 'name': 'Broadstripe' + 'name': 'Broadstripe', }, 'btc010': { - 'name': 'BTC Communications' + 'name': 'BTC Communications', }, 'btc040': { - 'name': 'BTC Vision - Nahunta' + 'name': 'BTC Vision - Nahunta', }, 'bul010': { - 'name': 'Bulloch Telephone Cooperative' + 'name': 'Bulloch Telephone Cooperative', }, 'but010': { - 'name': 'Butler-Bremer Communications' + 'name': 'Butler-Bremer Communications', }, 'tel160-csp': { - 'name': 'C Spire SNAP' + 'name': 'C Spire SNAP', }, 'csicable': { - 'name': 'Cable Services Inc.' + 'name': 'Cable Services Inc.', }, 'cableamerica': { - 'name': 'CableAmerica' + 'name': 'CableAmerica', }, 'cab038': { - 'name': 'CableSouth Media 3' + 'name': 'CableSouth Media 3', }, 'weh010-camtel': { - 'name': 'Cam-Tel Company' + 'name': 'Cam-Tel Company', }, 'car030': { - 'name': 'Cameron Communications' + 'name': 'Cameron Communications', }, 'canbytel': { - 'name': 'Canby Telcom' + 'name': 'Canby Telcom', }, 'crt020': { - 'name': 'CapRock Tv' + 'name': 'CapRock Tv', }, 'car050': { - 'name': 'Carnegie Cable' + 'name': 'Carnegie Cable', }, 'cas': { - 'name': 'CAS Cable' + 'name': 'CAS Cable', }, 'casscomm': { - 'name': 'CASSCOMM' + 'name': 'CASSCOMM', }, 'mid180-02': { - 'name': 'Catalina Broadband Solutions' + 'name': 'Catalina Broadband Solutions', }, 'cccomm': { - 'name': 'CC Communications' + 'name': 'CC Communications', }, 'nttccde010': { - 'name': 'CDE Lightband' + 'name': 'CDE Lightband', }, 'cfunet': { - 'name': 'Cedar Falls Utilities' + 'name': 'Cedar Falls Utilities', }, 'dem010-01': { - 'name': 'Celect-Bloomer Telephone Area' + 'name': 'Celect-Bloomer Telephone Area', }, 'dem010-02': { - 'name': 'Celect-Bruce Telephone Area' + 'name': 'Celect-Bruce Telephone Area', }, 'dem010-03': { - 'name': 'Celect-Citizens Connected Area' + 'name': 'Celect-Citizens Connected Area', }, 'dem010-04': { - 'name': 'Celect-Elmwood/Spring Valley Area' + 'name': 'Celect-Elmwood/Spring Valley Area', }, 'dem010-06': { - 'name': 'Celect-Mosaic Telecom' + 'name': 'Celect-Mosaic Telecom', }, 'dem010-05': { - 'name': 'Celect-West WI Telephone Area' + 'name': 'Celect-West WI Telephone Area', }, 'net010-02': { - 'name': 'Cellcom/Nsight Telservices' + 'name': 'Cellcom/Nsight Telservices', }, 'cen100': { - 'name': 'CentraCom' + 'name': 'CentraCom', }, 'nttccst010': { - 'name': 'Central Scott / CSTV' + 'name': 'Central Scott / CSTV', }, 'cha035': { - 'name': 'Chaparral CableVision' + 'name': 'Chaparral CableVision', }, 'cha050': { - 'name': 'Chariton Valley Communication Corporation, Inc.' + 'name': 'Chariton Valley Communication Corporation, Inc.', }, 'cha060': { - 'name': 'Chatmoss Cablevision' + 'name': 'Chatmoss Cablevision', }, 'nttcche010': { - 'name': 'Cherokee Communications' + 'name': 'Cherokee Communications', }, 'che050': { - 'name': 'Chesapeake Bay Communications' + 'name': 'Chesapeake Bay Communications', }, 'cimtel': { - 'name': 'Cim-Tel Cable, LLC.' + 'name': 'Cim-Tel Cable, LLC.', }, 'cit180': { - 'name': 'Citizens Cablevision - Floyd, VA' + 'name': 'Citizens Cablevision - Floyd, VA', }, 'cit210': { - 'name': 'Citizens Cablevision, Inc.' + 'name': 'Citizens Cablevision, Inc.', }, 'cit040': { - 'name': 'Citizens Fiber' + 'name': 'Citizens Fiber', }, 'cit250': { - 'name': 'Citizens Mutual' + 'name': 'Citizens Mutual', }, 'war040': { - 'name': 'Citizens Telephone Corporation' + 'name': 'Citizens Telephone Corporation', }, 'wat025': { - 'name': 'City Of Monroe' + 'name': 'City Of Monroe', }, 'wadsworth': { - 'name': 'CityLink' + 'name': 'CityLink', }, 'nor100': { - 'name': 'CL Tel' + 'name': 'CL Tel', }, 'cla010': { - 'name': 'Clarence Telephone and Cedar Communications' + 'name': 'Clarence Telephone and Cedar Communications', }, 'ser060': { - 'name': 'Clear Choice Communications' + 'name': 'Clear Choice Communications', }, 'tac020': { - 'name': 'Click! Cable TV' + 'name': 'Click! Cable TV', }, 'war020': { - 'name': 'CLICK1.NET' + 'name': 'CLICK1.NET', }, 'cml010': { - 'name': 'CML Telephone Cooperative Association' + 'name': 'CML Telephone Cooperative Association', }, 'cns': { - 'name': 'CNS' + 'name': 'CNS', }, 'com160': { - 'name': 'Co-Mo Connect' + 'name': 'Co-Mo Connect', }, 'coa020': { - 'name': 'Coast Communications' + 'name': 'Coast Communications', }, 'coa030': { - 'name': 'Coaxial Cable TV' + 'name': 'Coaxial Cable TV', }, 'mid055': { - 'name': 'Cobalt TV (Mid-State Community TV)' + 'name': 'Cobalt TV (Mid-State Community TV)', }, 'col070': { - 'name': 'Columbia Power & Water Systems' + 'name': 'Columbia Power & Water Systems', }, 'col080': { - 'name': 'Columbus Telephone' + 'name': 'Columbus Telephone', }, 'nor105': { - 'name': 'Communications 1 Cablevision, Inc.' + 'name': 'Communications 1 Cablevision, Inc.', }, 'com150': { - 'name': 'Community Cable & Broadband' + 'name': 'Community Cable & Broadband', }, 'com020': { - 'name': 'Community Communications Company' + 'name': 'Community Communications Company', }, 'coy010': { - 'name': 'commZoom' + 'name': 'commZoom', }, 'com025': { - 'name': 'Complete Communication Services' + 'name': 'Complete Communication Services', }, 'cat020': { - 'name': 'Comporium' + 'name': 'Comporium', }, 'com071': { - 'name': 'ComSouth Telesys' + 'name': 'ComSouth Telesys', }, 'consolidatedcable': { - 'name': 'Consolidated' + 'name': 'Consolidated', }, 'conwaycorp': { - 'name': 'Conway Corporation' + 'name': 'Conway Corporation', }, 'coo050': { - 'name': 'Coon Valley Telecommunications Inc' + 'name': 'Coon Valley Telecommunications Inc', }, 'coo080': { - 'name': 'Cooperative Telephone Company' + 'name': 'Cooperative Telephone Company', }, 'cpt010': { - 'name': 'CP-TEL' + 'name': 'CP-TEL', }, 'cra010': { - 'name': 'Craw-Kan Telephone' + 'name': 'Craw-Kan Telephone', }, 'crestview': { - 'name': 'Crestview Cable Communications' + 'name': 'Crestview Cable Communications', }, 'cross': { - 'name': 'Cross TV' + 'name': 'Cross TV', }, 'cro030': { - 'name': 'Crosslake Communications' + 'name': 'Crosslake Communications', }, 'ctc040': { - 'name': 'CTC - Brainerd MN' + 'name': 'CTC - Brainerd MN', }, 'phe030': { - 'name': 'CTV-Beam - East Alabama' + 'name': 'CTV-Beam - East Alabama', }, 'cun010': { - 'name': 'Cunningham Telephone & Cable' + 'name': 'Cunningham Telephone & Cable', }, 'dpc010': { - 'name': 'D & P Communications' + 'name': 'D & P Communications', }, 'dak030': { - 'name': 'Dakota Central Telecommunications' + 'name': 'Dakota Central Telecommunications', }, 'nttcdel010': { - 'name': 'Delcambre Telephone LLC' + 'name': 'Delcambre Telephone LLC', }, 'tel160-del': { - 'name': 'Delta Telephone Company' + 'name': 'Delta Telephone Company', }, 'sal040': { - 'name': 'DiamondNet' + 'name': 'DiamondNet', }, 'ind060-dc': { - 'name': 'Direct Communications' + 'name': 'Direct Communications', }, 'doy010': { - 'name': 'Doylestown Cable TV' + 'name': 'Doylestown Cable TV', }, 'dic010': { - 'name': 'DRN' + 'name': 'DRN', }, 'dtc020': { - 'name': 'DTC' + 'name': 'DTC', }, 'dtc010': { - 'name': 'DTC Cable (Delhi)' + 'name': 'DTC Cable (Delhi)', }, 'dum010': { - 'name': 'Dumont Telephone Company' + 'name': 'Dumont Telephone Company', }, 'dun010': { - 'name': 'Dunkerton Telephone Cooperative' + 'name': 'Dunkerton Telephone Cooperative', }, 'cci010': { - 'name': 'Duo County Telecom' + 'name': 'Duo County Telecom', }, 'eagle': { - 'name': 'Eagle Communications' + 'name': 'Eagle Communications', }, 'weh010-east': { - 'name': 'East Arkansas Cable TV' + 'name': 'East Arkansas Cable TV', }, 'eatel': { - 'name': 'EATEL Video, LLC' + 'name': 'EATEL Video, LLC', }, 'ell010': { - 'name': 'ECTA' + 'name': 'ECTA', }, 'emerytelcom': { - 'name': 'Emery Telcom Video LLC' + 'name': 'Emery Telcom Video LLC', }, 'nor200': { - 'name': 'Empire Access' + 'name': 'Empire Access', }, 'endeavor': { - 'name': 'Endeavor Communications' + 'name': 'Endeavor Communications', }, 'sun045': { - 'name': 'Enhanced Telecommunications Corporation' + 'name': 'Enhanced Telecommunications Corporation', }, 'mid030': { - 'name': 'enTouch' + 'name': 'enTouch', }, 'epb020': { - 'name': 'EPB Smartnet' + 'name': 'EPB Smartnet', }, 'jea010': { - 'name': 'EPlus Broadband' + 'name': 'EPlus Broadband', }, 'com065': { - 'name': 'ETC' + 'name': 'ETC', }, 'ete010': { - 'name': 'Etex Communications' + 'name': 'Etex Communications', }, 'fbc-tele': { - 'name': 'F&B Communications' + 'name': 'F&B Communications', }, 'fal010': { - 'name': 'Falcon Broadband' + 'name': 'Falcon Broadband', }, 'fam010': { - 'name': 'FamilyView CableVision' + 'name': 'FamilyView CableVision', }, 'far020': { - 'name': 'Farmers Mutual Telephone Company' + 'name': 'Farmers Mutual Telephone Company', }, 'fay010': { - 'name': 'Fayetteville Public Utilities' + 'name': 'Fayetteville Public Utilities', }, 'sal060': { - 'name': 'fibrant' + 'name': 'fibrant', }, 'fid010': { - 'name': 'Fidelity Communications' + 'name': 'Fidelity Communications', }, 'for030': { - 'name': 'FJ Communications' + 'name': 'FJ Communications', }, 'fli020': { - 'name': 'Flint River Communications' + 'name': 'Flint River Communications', }, 'far030': { - 'name': 'FMT - Jesup' + 'name': 'FMT - Jesup', }, 'foo010': { - 'name': 'Foothills Communications' + 'name': 'Foothills Communications', }, 'for080': { - 'name': 'Forsyth CableNet' + 'name': 'Forsyth CableNet', }, 'fbcomm': { - 'name': 'Frankfort Plant Board' + 'name': 'Frankfort Plant Board', }, 'tel160-fra': { - 'name': 'Franklin Telephone Company' + 'name': 'Franklin Telephone Company', }, 'nttcftc010': { - 'name': 'FTC' + 'name': 'FTC', }, 'fullchannel': { - 'name': 'Full Channel, Inc.' + 'name': 'Full Channel, Inc.', }, 'gar040': { - 'name': 'Gardonville Cooperative Telephone Association' + 'name': 'Gardonville Cooperative Telephone Association', }, 'gbt010': { - 'name': 'GBT Communications, Inc.' + 'name': 'GBT Communications, Inc.', }, 'tec010': { - 'name': 'Genuine Telecom' + 'name': 'Genuine Telecom', }, 'clr010': { - 'name': 'Giant Communications' + 'name': 'Giant Communications', }, 'gla010': { - 'name': 'Glasgow EPB' + 'name': 'Glasgow EPB', }, 'gle010': { - 'name': 'Glenwood Telecommunications' + 'name': 'Glenwood Telecommunications', }, 'gra060': { - 'name': 'GLW Broadband Inc.' + 'name': 'GLW Broadband Inc.', }, 'goldenwest': { - 'name': 'Golden West Cablevision' + 'name': 'Golden West Cablevision', }, 'vis030': { - 'name': 'Grantsburg Telcom' + 'name': 'Grantsburg Telcom', }, 'gpcom': { - 'name': 'Great Plains Communications' + 'name': 'Great Plains Communications', }, 'gri010': { - 'name': 'Gridley Cable Inc' + 'name': 'Gridley Cable Inc', }, 'hbc010': { - 'name': 'H&B Cable Services' + 'name': 'H&B Cable Services', }, 'hae010': { - 'name': 'Haefele TV Inc.' + 'name': 'Haefele TV Inc.', }, 'htc010': { - 'name': 'Halstad Telephone Company' + 'name': 'Halstad Telephone Company', }, 'har005': { - 'name': 'Harlan Municipal Utilities' + 'name': 'Harlan Municipal Utilities', }, 'har020': { - 'name': 'Hart Communications' + 'name': 'Hart Communications', }, 'ced010': { - 'name': 'Hartelco TV' + 'name': 'Hartelco TV', }, 'hea040': { - 'name': 'Heart of Iowa Communications Cooperative' + 'name': 'Heart of Iowa Communications Cooperative', }, 'htc020': { - 'name': 'Hickory Telephone Company' + 'name': 'Hickory Telephone Company', }, 'nttchig010': { - 'name': 'Highland Communication Services' + 'name': 'Highland Communication Services', }, 'hig030': { - 'name': 'Highland Media' + 'name': 'Highland Media', }, 'spc010': { - 'name': 'Hilliary Communications' + 'name': 'Hilliary Communications', }, 'hin020': { - 'name': 'Hinton CATV Co.' + 'name': 'Hinton CATV Co.', }, 'hometel': { - 'name': 'HomeTel Entertainment, Inc.' + 'name': 'HomeTel Entertainment, Inc.', }, 'hoodcanal': { - 'name': 'Hood Canal Communications' + 'name': 'Hood Canal Communications', }, 'weh010-hope': { - 'name': 'Hope - Prescott Cable TV' + 'name': 'Hope - Prescott Cable TV', }, 'horizoncable': { - 'name': 'Horizon Cable TV, Inc.' + 'name': 'Horizon Cable TV, Inc.', }, 'hor040': { - 'name': 'Horizon Chillicothe Telephone' + 'name': 'Horizon Chillicothe Telephone', }, 'htc030': { - 'name': 'HTC Communications Co. - IL' + 'name': 'HTC Communications Co. - IL', }, 'htccomm': { - 'name': 'HTC Communications, Inc. - IA' + 'name': 'HTC Communications, Inc. - IA', }, 'wal005': { - 'name': 'Huxley Communications' + 'name': 'Huxley Communications', }, 'imon': { - 'name': 'ImOn Communications' + 'name': 'ImOn Communications', }, 'ind040': { - 'name': 'Independence Telecommunications' + 'name': 'Independence Telecommunications', }, 'rrc010': { - 'name': 'Inland Networks' + 'name': 'Inland Networks', }, 'stc020': { - 'name': 'Innovative Cable TV St Croix' + 'name': 'Innovative Cable TV St Croix', }, 'car100': { - 'name': 'Innovative Cable TV St Thomas-St John' + 'name': 'Innovative Cable TV St Thomas-St John', }, 'icc010': { - 'name': 'Inside Connect Cable' + 'name': 'Inside Connect Cable', }, 'int100': { - 'name': 'Integra Telecom' + 'name': 'Integra Telecom', }, 'int050': { - 'name': 'Interstate Telecommunications Coop' + 'name': 'Interstate Telecommunications Coop', }, 'irv010': { - 'name': 'Irvine Cable' + 'name': 'Irvine Cable', }, 'k2c010': { - 'name': 'K2 Communications' + 'name': 'K2 Communications', }, 'kal010': { - 'name': 'Kalida Telephone Company, Inc.' + 'name': 'Kalida Telephone Company, Inc.', }, 'kal030': { - 'name': 'Kalona Cooperative Telephone Company' + 'name': 'Kalona Cooperative Telephone Company', }, 'kmt010': { - 'name': 'KMTelecom' + 'name': 'KMTelecom', }, 'kpu010': { - 'name': 'KPU Telecommunications' + 'name': 'KPU Telecommunications', }, 'kuh010': { - 'name': 'Kuhn Communications, Inc.' + 'name': 'Kuhn Communications, Inc.', }, 'lak130': { - 'name': 'Lakeland Communications' + 'name': 'Lakeland Communications', }, 'lan010': { - 'name': 'Langco' + 'name': 'Langco', }, 'lau020': { - 'name': 'Laurel Highland Total Communications, Inc.' + 'name': 'Laurel Highland Total Communications, Inc.', }, 'leh010': { - 'name': 'Lehigh Valley Cooperative Telephone' + 'name': 'Lehigh Valley Cooperative Telephone', }, 'bra010': { - 'name': 'Limestone Cable/Bracken Cable' + 'name': 'Limestone Cable/Bracken Cable', }, 'loc020': { - 'name': 'LISCO' + 'name': 'LISCO', }, 'lit020': { - 'name': 'Litestream' + 'name': 'Litestream', }, 'tel140': { - 'name': 'LivCom' + 'name': 'LivCom', }, 'loc010': { - 'name': 'LocalTel Communications' + 'name': 'LocalTel Communications', }, 'weh010-longview': { - 'name': 'Longview - Kilgore Cable TV' + 'name': 'Longview - Kilgore Cable TV', }, 'lon030': { - 'name': 'Lonsdale Video Ventures, LLC' + 'name': 'Lonsdale Video Ventures, LLC', }, 'lns010': { - 'name': 'Lost Nation-Elwood Telephone Co.' + 'name': 'Lost Nation-Elwood Telephone Co.', }, 'nttclpc010': { - 'name': 'LPC Connect' + 'name': 'LPC Connect', }, 'lumos': { - 'name': 'Lumos Networks' + 'name': 'Lumos Networks', }, 'madison': { - 'name': 'Madison Communications' + 'name': 'Madison Communications', }, 'mad030': { - 'name': 'Madison County Cable Inc.' + 'name': 'Madison County Cable Inc.', }, 'nttcmah010': { - 'name': 'Mahaska Communication Group' + 'name': 'Mahaska Communication Group', }, 'mar010': { - 'name': 'Marne & Elk Horn Telephone Company' + 'name': 'Marne & Elk Horn Telephone Company', }, 'mcc040': { - 'name': 'McClure Telephone Co.' + 'name': 'McClure Telephone Co.', }, 'mctv': { - 'name': 'MCTV' + 'name': 'MCTV', }, 'merrimac': { - 'name': 'Merrimac Communications Ltd.' + 'name': 'Merrimac Communications Ltd.', }, 'metronet': { - 'name': 'Metronet' + 'name': 'Metronet', }, 'mhtc': { - 'name': 'MHTC' + 'name': 'MHTC', }, 'midhudson': { - 'name': 'Mid-Hudson Cable' + 'name': 'Mid-Hudson Cable', }, 'midrivers': { - 'name': 'Mid-Rivers Communications' + 'name': 'Mid-Rivers Communications', }, 'mid045': { - 'name': 'Midstate Communications' + 'name': 'Midstate Communications', }, 'mil080': { - 'name': 'Milford Communications' + 'name': 'Milford Communications', }, 'min030': { - 'name': 'MINET' + 'name': 'MINET', }, 'nttcmin010': { - 'name': 'Minford TV' + 'name': 'Minford TV', }, 'san040-02': { - 'name': 'Mitchell Telecom' + 'name': 'Mitchell Telecom', }, 'mlg010': { - 'name': 'MLGC' + 'name': 'MLGC', }, 'mon060': { - 'name': 'Mon-Cre TVE' + 'name': 'Mon-Cre TVE', }, 'mou110': { - 'name': 'Mountain Telephone' + 'name': 'Mountain Telephone', }, 'mou050': { - 'name': 'Mountain Village Cable' + 'name': 'Mountain Village Cable', }, 'mtacomm': { - 'name': 'MTA Communications, LLC' + 'name': 'MTA Communications, LLC', }, 'mtc010': { - 'name': 'MTC Cable' + 'name': 'MTC Cable', }, 'med040': { - 'name': 'MTC Technologies' + 'name': 'MTC Technologies', }, 'man060': { - 'name': 'MTCC' + 'name': 'MTCC', }, 'mtc030': { - 'name': 'MTCO Communications' + 'name': 'MTCO Communications', }, 'mul050': { - 'name': 'Mulberry Telecommunications' + 'name': 'Mulberry Telecommunications', }, 'mur010': { - 'name': 'Murray Electric System' + 'name': 'Murray Electric System', }, 'musfiber': { - 'name': 'MUS FiberNET' + 'name': 'MUS FiberNET', }, 'mpw': { - 'name': 'Muscatine Power & Water' + 'name': 'Muscatine Power & Water', }, 'nttcsli010': { - 'name': 'myEVTV.com' + 'name': 'myEVTV.com', }, 'nor115': { - 'name': 'NCC' + 'name': 'NCC', }, 'nor260': { - 'name': 'NDTC' + 'name': 'NDTC', }, 'nctc': { - 'name': 'Nebraska Central Telecom, Inc.' + 'name': 'Nebraska Central Telecom, Inc.', }, 'nel020': { - 'name': 'Nelsonville TV Cable' + 'name': 'Nelsonville TV Cable', }, 'nem010': { - 'name': 'Nemont' + 'name': 'Nemont', }, 'new075': { - 'name': 'New Hope Telephone Cooperative' + 'name': 'New Hope Telephone Cooperative', }, 'nor240': { - 'name': 'NICP' + 'name': 'NICP', }, 'cic010': { - 'name': 'NineStar Connect' + 'name': 'NineStar Connect', }, 'nktelco': { - 'name': 'NKTelco' + 'name': 'NKTelco', }, 'nortex': { - 'name': 'Nortex Communications' + 'name': 'Nortex Communications', }, 'nor140': { - 'name': 'North Central Telephone Cooperative' + 'name': 'North Central Telephone Cooperative', }, 'nor030': { - 'name': 'Northland Communications' + 'name': 'Northland Communications', }, 'nor075': { - 'name': 'Northwest Communications' + 'name': 'Northwest Communications', }, 'nor125': { - 'name': 'Norwood Light Broadband' + 'name': 'Norwood Light Broadband', }, 'net010': { - 'name': 'Nsight Telservices' + 'name': 'Nsight Telservices', }, 'dur010': { - 'name': 'Ntec' + 'name': 'Ntec', }, 'nts010': { - 'name': 'NTS Communications' + 'name': 'NTS Communications', }, 'new045': { - 'name': 'NU-Telecom' + 'name': 'NU-Telecom', }, 'nulink': { - 'name': 'NuLink' + 'name': 'NuLink', }, 'jam030': { - 'name': 'NVC' + 'name': 'NVC', }, 'far035': { - 'name': 'OmniTel Communications' + 'name': 'OmniTel Communications', }, 'onesource': { - 'name': 'OneSource Communications' + 'name': 'OneSource Communications', }, 'cit230': { - 'name': 'Opelika Power Services' + 'name': 'Opelika Power Services', }, 'daltonutilities': { - 'name': 'OptiLink' + 'name': 'OptiLink', }, 'mid140': { - 'name': 'OPTURA' + 'name': 'OPTURA', }, 'ote010': { - 'name': 'OTEC Communication Company' + 'name': 'OTEC Communication Company', }, 'cci020': { - 'name': 'Packerland Broadband' + 'name': 'Packerland Broadband', }, 'pan010': { - 'name': 'Panora Telco/Guthrie Center Communications' + 'name': 'Panora Telco/Guthrie Center Communications', }, 'otter': { - 'name': 'Park Region Telephone & Otter Tail Telcom' + 'name': 'Park Region Telephone & Otter Tail Telcom', }, 'mid050': { - 'name': 'Partner Communications Cooperative' + 'name': 'Partner Communications Cooperative', }, 'fib010': { - 'name': 'Pathway' + 'name': 'Pathway', }, 'paulbunyan': { - 'name': 'Paul Bunyan Communications' + 'name': 'Paul Bunyan Communications', }, 'pem020': { - 'name': 'Pembroke Telephone Company' + 'name': 'Pembroke Telephone Company', }, 'mck010': { - 'name': 'Peoples Rural Telephone Cooperative' + 'name': 'Peoples Rural Telephone Cooperative', }, 'pul010': { - 'name': 'PES Energize' + 'name': 'PES Energize', }, 'phi010': { - 'name': 'Philippi Communications System' + 'name': 'Philippi Communications System', }, 'phonoscope': { - 'name': 'Phonoscope Cable' + 'name': 'Phonoscope Cable', }, 'pin070': { - 'name': 'Pine Belt Communications, Inc.' + 'name': 'Pine Belt Communications, Inc.', }, 'weh010-pine': { - 'name': 'Pine Bluff Cable TV' + 'name': 'Pine Bluff Cable TV', }, 'pin060': { - 'name': 'Pineland Telephone Cooperative' + 'name': 'Pineland Telephone Cooperative', }, 'cam010': { - 'name': 'Pinpoint Communications' + 'name': 'Pinpoint Communications', }, 'pio060': { - 'name': 'Pioneer Broadband' + 'name': 'Pioneer Broadband', }, 'pioncomm': { - 'name': 'Pioneer Communications' + 'name': 'Pioneer Communications', }, 'pioneer': { - 'name': 'Pioneer DTV' + 'name': 'Pioneer DTV', }, 'pla020': { - 'name': 'Plant TiftNet, Inc.' + 'name': 'Plant TiftNet, Inc.', }, 'par010': { - 'name': 'PLWC' + 'name': 'PLWC', }, 'pro035': { - 'name': 'PMT' + 'name': 'PMT', }, 'vik011': { - 'name': 'Polar Cablevision' + 'name': 'Polar Cablevision', }, 'pottawatomie': { - 'name': 'Pottawatomie Telephone Co.' + 'name': 'Pottawatomie Telephone Co.', }, 'premiercomm': { - 'name': 'Premier Communications' + 'name': 'Premier Communications', }, 'psc010': { - 'name': 'PSC' + 'name': 'PSC', }, 'pan020': { - 'name': 'PTCI' + 'name': 'PTCI', }, 'qco010': { - 'name': 'QCOL' + 'name': 'QCOL', }, 'qua010': { - 'name': 'Quality Cablevision' + 'name': 'Quality Cablevision', }, 'rad010': { - 'name': 'Radcliffe Telephone Company' + 'name': 'Radcliffe Telephone Company', }, 'car040': { - 'name': 'Rainbow Communications' + 'name': 'Rainbow Communications', }, 'rai030': { - 'name': 'Rainier Connect' + 'name': 'Rainier Connect', }, 'ral010': { - 'name': 'Ralls Technologies' + 'name': 'Ralls Technologies', }, 'rct010': { - 'name': 'RC Technologies' + 'name': 'RC Technologies', }, 'red040': { - 'name': 'Red River Communications' + 'name': 'Red River Communications', }, 'ree010': { - 'name': 'Reedsburg Utility Commission' + 'name': 'Reedsburg Utility Commission', }, 'mol010': { - 'name': 'Reliance Connects- Oregon' + 'name': 'Reliance Connects- Oregon', }, 'res020': { - 'name': 'Reserve Telecommunications' + 'name': 'Reserve Telecommunications', }, 'weh010-resort': { - 'name': 'Resort TV Cable' + 'name': 'Resort TV Cable', }, 'rld010': { - 'name': 'Richland Grant Telephone Cooperative, Inc.' + 'name': 'Richland Grant Telephone Cooperative, Inc.', }, 'riv030': { - 'name': 'River Valley Telecommunications Coop' + 'name': 'River Valley Telecommunications Coop', }, 'rockportcable': { - 'name': 'Rock Port Cablevision' + 'name': 'Rock Port Cablevision', }, 'rsf010': { - 'name': 'RS Fiber' + 'name': 'RS Fiber', }, 'rtc': { - 'name': 'RTC Communication Corp' + 'name': 'RTC Communication Corp', }, 'res040': { - 'name': 'RTC-Reservation Telephone Coop.' + 'name': 'RTC-Reservation Telephone Coop.', }, 'rte010': { - 'name': 'RTEC Communications' + 'name': 'RTEC Communications', }, 'stc010': { - 'name': 'S&T' + 'name': 'S&T', }, 'san020': { - 'name': 'San Bruno Cable TV' + 'name': 'San Bruno Cable TV', }, 'san040-01': { - 'name': 'Santel' + 'name': 'Santel', }, 'sav010': { - 'name': 'SCI Broadband-Savage Communications Inc.' + 'name': 'SCI Broadband-Savage Communications Inc.', }, 'sco050': { - 'name': 'Scottsboro Electric Power Board' + 'name': 'Scottsboro Electric Power Board', }, 'scr010': { - 'name': 'Scranton Telephone Company' + 'name': 'Scranton Telephone Company', }, 'selco': { - 'name': 'SELCO' + 'name': 'SELCO', }, 'she010': { - 'name': 'Shentel' + 'name': 'Shentel', }, 'she030': { - 'name': 'Sherwood Mutual Telephone Association, Inc.' + 'name': 'Sherwood Mutual Telephone Association, Inc.', }, 'ind060-ssc': { - 'name': 'Silver Star Communications' + 'name': 'Silver Star Communications', }, 'sjoberg': { - 'name': 'Sjoberg\'s Inc.' + 'name': 'Sjoberg\'s Inc.', }, 'sou025': { - 'name': 'SKT' + 'name': 'SKT', }, 'sky050': { - 'name': 'SkyBest TV' + 'name': 'SkyBest TV', }, 'nttcsmi010': { - 'name': 'Smithville Communications' + 'name': 'Smithville Communications', }, 'woo010': { - 'name': 'Solarus' + 'name': 'Solarus', }, 'sou075': { - 'name': 'South Central Rural Telephone Cooperative' + 'name': 'South Central Rural Telephone Cooperative', }, 'sou065': { - 'name': 'South Holt Cablevision, Inc.' + 'name': 'South Holt Cablevision, Inc.', }, 'sou035': { - 'name': 'South Slope Cooperative Communications' + 'name': 'South Slope Cooperative Communications', }, 'spa020': { - 'name': 'Spanish Fork Community Network' + 'name': 'Spanish Fork Community Network', }, 'spe010': { - 'name': 'Spencer Municipal Utilities' + 'name': 'Spencer Municipal Utilities', }, 'spi005': { - 'name': 'Spillway Communications, Inc.' + 'name': 'Spillway Communications, Inc.', }, 'srt010': { - 'name': 'SRT' + 'name': 'SRT', }, 'cccsmc010': { - 'name': 'St. Maarten Cable TV' + 'name': 'St. Maarten Cable TV', }, 'sta025': { - 'name': 'Star Communications' + 'name': 'Star Communications', }, 'sco020': { - 'name': 'STE' + 'name': 'STE', }, 'uin010': { - 'name': 'STRATA Networks' + 'name': 'STRATA Networks', }, 'sum010': { - 'name': 'Sumner Cable TV' + 'name': 'Sumner Cable TV', }, 'pie010': { - 'name': 'Surry TV/PCSI TV' + 'name': 'Surry TV/PCSI TV', }, 'swa010': { - 'name': 'Swayzee Communications' + 'name': 'Swayzee Communications', }, 'sweetwater': { - 'name': 'Sweetwater Cable Television Co' + 'name': 'Sweetwater Cable Television Co', }, 'weh010-talequah': { - 'name': 'Tahlequah Cable TV' + 'name': 'Tahlequah Cable TV', }, 'tct': { - 'name': 'TCT' + 'name': 'TCT', }, 'tel050': { - 'name': 'Tele-Media Company' + 'name': 'Tele-Media Company', }, 'com050': { - 'name': 'The Community Agency' + 'name': 'The Community Agency', }, 'thr020': { - 'name': 'Three River' + 'name': 'Three River', }, 'cab140': { - 'name': 'Town & Country Technologies' + 'name': 'Town & Country Technologies', }, 'tra010': { - 'name': 'Trans-Video' + 'name': 'Trans-Video', }, 'tre010': { - 'name': 'Trenton TV Cable Company' + 'name': 'Trenton TV Cable Company', }, 'tcc': { - 'name': 'Tri County Communications Cooperative' + 'name': 'Tri County Communications Cooperative', }, 'tri025': { - 'name': 'TriCounty Telecom' + 'name': 'TriCounty Telecom', }, 'tri110': { - 'name': 'TrioTel Communications, Inc.' + 'name': 'TrioTel Communications, Inc.', }, 'tro010': { - 'name': 'Troy Cablevision, Inc.' + 'name': 'Troy Cablevision, Inc.', }, 'tsc': { - 'name': 'TSC' + 'name': 'TSC', }, 'cit220': { - 'name': 'Tullahoma Utilities Board' + 'name': 'Tullahoma Utilities Board', }, 'tvc030': { - 'name': 'TV Cable of Rensselaer' + 'name': 'TV Cable of Rensselaer', }, 'tvc015': { - 'name': 'TVC Cable' + 'name': 'TVC Cable', }, 'cab180': { - 'name': 'TVision' + 'name': 'TVision', }, 'twi040': { - 'name': 'Twin Lakes' + 'name': 'Twin Lakes', }, 'tvtinc': { - 'name': 'Twin Valley' + 'name': 'Twin Valley', }, 'uis010': { - 'name': 'Union Telephone Company' + 'name': 'Union Telephone Company', }, 'uni110': { - 'name': 'United Communications - TN' + 'name': 'United Communications - TN', }, 'uni120': { - 'name': 'United Services' + 'name': 'United Services', }, 'uss020': { - 'name': 'US Sonet' + 'name': 'US Sonet', }, 'cab060': { - 'name': 'USA Communications' + 'name': 'USA Communications', }, 'she005': { - 'name': 'USA Communications/Shellsburg, IA' + 'name': 'USA Communications/Shellsburg, IA', }, 'val040': { - 'name': 'Valley TeleCom Group' + 'name': 'Valley TeleCom Group', }, 'val025': { - 'name': 'Valley Telecommunications' + 'name': 'Valley Telecommunications', }, 'val030': { - 'name': 'Valparaiso Broadband' + 'name': 'Valparaiso Broadband', }, 'cla050': { - 'name': 'Vast Broadband' + 'name': 'Vast Broadband', }, 'sul015': { - 'name': 'Venture Communications Cooperative, Inc.' + 'name': 'Venture Communications Cooperative, Inc.', }, 'ver025': { - 'name': 'Vernon Communications Co-op' + 'name': 'Vernon Communications Co-op', }, 'weh010-vicksburg': { - 'name': 'Vicksburg Video' + 'name': 'Vicksburg Video', }, 'vis070': { - 'name': 'Vision Communications' + 'name': 'Vision Communications', }, 'volcanotel': { - 'name': 'Volcano Vision, Inc.' + 'name': 'Volcano Vision, Inc.', }, 'vol040-02': { - 'name': 'VolFirst / BLTV' + 'name': 'VolFirst / BLTV', }, 'ver070': { - 'name': 'VTel' + 'name': 'VTel', }, 'nttcvtx010': { - 'name': 'VTX1' + 'name': 'VTX1', }, 'bci010-02': { - 'name': 'Vyve Broadband' + 'name': 'Vyve Broadband', }, 'wab020': { - 'name': 'Wabash Mutual Telephone' + 'name': 'Wabash Mutual Telephone', }, 'waitsfield': { - 'name': 'Waitsfield Cable' + 'name': 'Waitsfield Cable', }, 'wal010': { - 'name': 'Walnut Communications' + 'name': 'Walnut Communications', }, 'wavebroadband': { - 'name': 'Wave' + 'name': 'Wave', }, 'wav030': { - 'name': 'Waverly Communications Utility' + 'name': 'Waverly Communications Utility', }, 'wbi010': { - 'name': 'WBI' + 'name': 'WBI', }, 'web020': { - 'name': 'Webster-Calhoun Cooperative Telephone Association' + 'name': 'Webster-Calhoun Cooperative Telephone Association', }, 'wes005': { - 'name': 'West Alabama TV Cable' + 'name': 'West Alabama TV Cable', }, 'carolinata': { - 'name': 'West Carolina Communications' + 'name': 'West Carolina Communications', }, 'wct010': { - 'name': 'West Central Telephone Association' + 'name': 'West Central Telephone Association', }, 'wes110': { - 'name': 'West River Cooperative Telephone Company' + 'name': 'West River Cooperative Telephone Company', }, 'ani030': { - 'name': 'WesTel Systems' + 'name': 'WesTel Systems', }, 'westianet': { - 'name': 'Western Iowa Networks' + 'name': 'Western Iowa Networks', }, 'nttcwhi010': { - 'name': 'Whidbey Telecom' + 'name': 'Whidbey Telecom', }, 'weh010-white': { - 'name': 'White County Cable TV' + 'name': 'White County Cable TV', }, 'wes130': { - 'name': 'Wiatel' + 'name': 'Wiatel', }, 'wik010': { - 'name': 'Wiktel' + 'name': 'Wiktel', }, 'wil070': { - 'name': 'Wilkes Communications, Inc./RiverStreet Networks' + 'name': 'Wilkes Communications, Inc./RiverStreet Networks', }, 'wil015': { - 'name': 'Wilson Communications' + 'name': 'Wilson Communications', }, 'win010': { - 'name': 'Windomnet/SMBS' + 'name': 'Windomnet/SMBS', }, 'win090': { - 'name': 'Windstream Cable TV' + 'name': 'Windstream Cable TV', }, 'wcta': { - 'name': 'Winnebago Cooperative Telecom Association' + 'name': 'Winnebago Cooperative Telecom Association', }, 'wtc010': { - 'name': 'WTC' + 'name': 'WTC', }, 'wil040': { - 'name': 'WTC Communications, Inc.' + 'name': 'WTC Communications, Inc.', }, 'wya010': { - 'name': 'Wyandotte Cable' + 'name': 'Wyandotte Cable', }, 'hin020-02': { - 'name': 'X-Stream Services' + 'name': 'X-Stream Services', }, 'xit010': { - 'name': 'XIT Communications' + 'name': 'XIT Communications', }, 'yel010': { - 'name': 'Yelcot Communications' + 'name': 'Yelcot Communications', }, 'mid180-01': { - 'name': 'yondoo' + 'name': 'yondoo', }, 'cou060': { - 'name': 'Zito Media' + 'name': 'Zito Media', }, 'slingtv': { 'name': 'Sling TV', @@ -1355,15 +1355,16 @@ MSO_INFO = { class AdobePassIE(InfoExtractor): # XXX: Conventionally, base classes should end with BaseIE/InfoExtractor _SERVICE_PROVIDER_TEMPLATE = 'https://sp.auth.adobe.com/adobe-services/%s' _USER_AGENT = 'Mozilla/5.0 (X11; Linux i686; rv:47.0) Gecko/20100101 Firefox/47.0' + _MODERN_USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; rv:131.0) Gecko/20100101 Firefox/131.0' _MVPD_CACHE = 'ap-mvpd' _DOWNLOADING_LOGIN_PAGE = 'Downloading Provider Login Page' def _download_webpage_handle(self, *args, **kwargs): headers = self.geo_verification_headers() - headers.update(kwargs.get('headers', {})) + headers.update(kwargs.get('headers') or {}) kwargs['headers'] = headers - return super(AdobePassIE, self)._download_webpage_handle( + return super()._download_webpage_handle( *args, **kwargs) @staticmethod @@ -1384,7 +1385,7 @@ class AdobePassIE(InfoExtractor): # XXX: Conventionally, base classes should en def _extract_mvpd_auth(self, url, video_id, requestor_id, resource): def xml_text(xml_str, tag): return self._search_regex( - '<%s>(.+?)' % (tag, tag), xml_str, tag) + f'<{tag}>(.+?)', xml_str, tag) def is_expired(token, date_ele): token_expires = unified_timestamp(re.sub(r'[_ ]GMT', '', xml_text(token, date_ele))) @@ -1394,7 +1395,7 @@ class AdobePassIE(InfoExtractor): # XXX: Conventionally, base classes should en form_page, urlh = form_page_res post_url = self._html_search_regex(r']+action=(["\'])(?P.+?)\1', form_page, 'post url', group='url') if not re.match(r'https?://', post_url): - post_url = compat_urlparse.urljoin(urlh.url, post_url) + post_url = urllib.parse.urljoin(urlh.url, post_url) form_data = self._hidden_inputs(form_page) form_data.update(data) return self._download_webpage_handle( @@ -1414,13 +1415,13 @@ class AdobePassIE(InfoExtractor): # XXX: Conventionally, base classes should en REDIRECT_REGEX = r'[0-9]{,2};\s*(?:URL|url)=\'?([^\'"]+)' redirect_url = self._search_regex( r'(?i)]+src="(%s)' % HistoryPlayerIE._VALID_URL, + rf']+src="({HistoryPlayerIE._VALID_URL})', webpage, 'player URL') return self.url_result(player_url, HistoryPlayerIE.ie_key()) diff --git a/plugins/youtube_download/yt_dlp/extractor/aeonco.py b/plugins/youtube_download/yt_dlp/extractor/aeonco.py index 390eae3..22d0266 100644 --- a/plugins/youtube_download/yt_dlp/extractor/aeonco.py +++ b/plugins/youtube_download/yt_dlp/extractor/aeonco.py @@ -16,8 +16,8 @@ class AeonCoIE(InfoExtractor): 'uploader': 'Semiconductor', 'uploader_id': 'semiconductor', 'uploader_url': 'https://vimeo.com/semiconductor', - 'duration': 348 - } + 'duration': 348, + }, }, { 'url': 'https://aeon.co/videos/dazzling-timelapse-shows-how-microbes-spoil-our-food-and-sometimes-enrich-it', 'md5': '03582d795382e49f2fd0b427b55de409', @@ -29,8 +29,8 @@ class AeonCoIE(InfoExtractor): 'uploader': 'Aeon Video', 'uploader_id': 'aeonvideo', 'uploader_url': 'https://vimeo.com/aeonvideo', - 'duration': 1344 - } + 'duration': 1344, + }, }, { 'url': 'https://aeon.co/videos/chew-over-the-prisoners-dilemma-and-see-if-you-can-find-the-rational-path-out', 'md5': '1cfda0bf3ae24df17d00f2c0cb6cc21b', diff --git a/plugins/youtube_download/yt_dlp/extractor/afreecatv.py b/plugins/youtube_download/yt_dlp/extractor/afreecatv.py index 3d26d9c..aadb4d6 100644 --- a/plugins/youtube_download/yt_dlp/extractor/afreecatv.py +++ b/plugins/youtube_download/yt_dlp/extractor/afreecatv.py @@ -1,142 +1,26 @@ +import datetime as dt import functools -import re from .common import InfoExtractor +from ..networking import Request from ..utils import ( ExtractorError, OnDemandPagedList, - date_from_str, + UserNotLive, determine_ext, + filter_dict, int_or_none, - qualities, - traverse_obj, - unified_strdate, - unified_timestamp, - update_url_query, + orderedSet, + parse_iso8601, url_or_none, urlencode_postdata, - xpath_text, + urljoin, ) +from ..utils.traversal import traverse_obj -class AfreecaTVIE(InfoExtractor): - IE_NAME = 'afreecatv' - IE_DESC = 'afreecatv.com' - _VALID_URL = r'''(?x) - https?:// - (?: - (?:(?:live|afbbs|www)\.)?afreeca(?:tv)?\.com(?::\d+)? - (?: - /app/(?:index|read_ucc_bbs)\.cgi| - /player/[Pp]layer\.(?:swf|html) - )\?.*?\bnTitleNo=| - vod\.afreecatv\.com/(PLAYER/STATION|player)/ - ) - (?P\d+) - ''' +class AfreecaTVBaseIE(InfoExtractor): _NETRC_MACHINE = 'afreecatv' - _TESTS = [{ - 'url': 'http://live.afreecatv.com:8079/app/index.cgi?szType=read_ucc_bbs&szBjId=dailyapril&nStationNo=16711924&nBbsNo=18605867&nTitleNo=36164052&szSkin=', - 'md5': 'f72c89fe7ecc14c1b5ce506c4996046e', - 'info_dict': { - 'id': '36164052', - 'ext': 'mp4', - 'title': '데일리 에이프릴 요정들의 시상식!', - 'thumbnail': 're:^https?://(?:video|st)img.afreecatv.com/.*$', - 'uploader': 'dailyapril', - 'uploader_id': 'dailyapril', - 'upload_date': '20160503', - }, - 'skip': 'Video is gone', - }, { - 'url': 'http://afbbs.afreecatv.com:8080/app/read_ucc_bbs.cgi?nStationNo=16711924&nTitleNo=36153164&szBjId=dailyapril&nBbsNo=18605867', - 'info_dict': { - 'id': '36153164', - 'title': "BJ유트루와 함께하는 '팅커벨 메이크업!'", - 'thumbnail': 're:^https?://(?:video|st)img.afreecatv.com/.*$', - 'uploader': 'dailyapril', - 'uploader_id': 'dailyapril', - }, - 'playlist_count': 2, - 'playlist': [{ - 'md5': 'd8b7c174568da61d774ef0203159bf97', - 'info_dict': { - 'id': '36153164_1', - 'ext': 'mp4', - 'title': "BJ유트루와 함께하는 '팅커벨 메이크업!'", - 'upload_date': '20160502', - }, - }, { - 'md5': '58f2ce7f6044e34439ab2d50612ab02b', - 'info_dict': { - 'id': '36153164_2', - 'ext': 'mp4', - 'title': "BJ유트루와 함께하는 '팅커벨 메이크업!'", - 'upload_date': '20160502', - }, - }], - 'skip': 'Video is gone', - }, { - # non standard key - 'url': 'http://vod.afreecatv.com/PLAYER/STATION/20515605', - 'info_dict': { - 'id': '20170411_BE689A0E_190960999_1_2_h', - 'ext': 'mp4', - 'title': '혼자사는여자집', - 'thumbnail': 're:^https?://(?:video|st)img.afreecatv.com/.*$', - 'uploader': '♥이슬이', - 'uploader_id': 'dasl8121', - 'upload_date': '20170411', - 'duration': 213, - }, - 'params': { - 'skip_download': True, - }, - }, { - # adult content - 'url': 'https://vod.afreecatv.com/player/97267690', - 'info_dict': { - 'id': '20180327_27901457_202289533_1', - 'ext': 'mp4', - 'title': '[생]빨개요♥ (part 1)', - 'thumbnail': 're:^https?://(?:video|st)img.afreecatv.com/.*$', - 'uploader': '[SA]서아', - 'uploader_id': 'bjdyrksu', - 'upload_date': '20180327', - 'duration': 3601, - }, - 'params': { - 'skip_download': True, - }, - 'skip': 'The VOD does not exist', - }, { - 'url': 'http://www.afreecatv.com/player/Player.swf?szType=szBjId=djleegoon&nStationNo=11273158&nBbsNo=13161095&nTitleNo=36327652', - 'only_matching': True, - }, { - 'url': 'https://vod.afreecatv.com/player/96753363', - 'info_dict': { - 'id': '20230108_9FF5BEE1_244432674_1', - 'ext': 'mp4', - 'uploader_id': 'rlantnghks', - 'uploader': '페이즈으', - 'duration': 10840, - 'thumbnail': 'http://videoimg.afreecatv.com/php/SnapshotLoad.php?rowKey=20230108_9FF5BEE1_244432674_1_r', - 'upload_date': '20230108', - 'title': '젠지 페이즈', - }, - 'params': { - 'skip_download': True, - }, - }] - - @staticmethod - def parse_video_key(key): - video_key = {} - m = re.match(r'^(?P\d{8})_\w+_(?P\d+)$', key) - if m: - video_key['upload_date'] = m.group('upload_date') - video_key['part'] = int(m.group('part')) - return video_key def _perform_login(self, username, password): login_form = { @@ -150,21 +34,21 @@ class AfreecaTVIE(InfoExtractor): } response = self._download_json( - 'https://login.afreecatv.com/app/LoginAction.php', None, + 'https://login.sooplive.co.kr/app/LoginAction.php', None, 'Logging in', data=urlencode_postdata(login_form)) _ERRORS = { -4: 'Your account has been suspended due to a violation of our terms and policies.', - -5: 'https://member.afreecatv.com/app/user_delete_progress.php', - -6: 'https://login.afreecatv.com/membership/changeMember.php', - -8: "Hello! AfreecaTV here.\nThe username you have entered belongs to \n an account that requires a legal guardian's consent. \nIf you wish to use our services without restriction, \nplease make sure to go through the necessary verification process.", - -9: 'https://member.afreecatv.com/app/pop_login_block.php', - -11: 'https://login.afreecatv.com/afreeca/second_login.php', - -12: 'https://member.afreecatv.com/app/user_security.php', + -5: 'https://member.sooplive.co.kr/app/user_delete_progress.php', + -6: 'https://login.sooplive.co.kr/membership/changeMember.php', + -8: "Hello! Soop here.\nThe username you have entered belongs to \n an account that requires a legal guardian's consent. \nIf you wish to use our services without restriction, \nplease make sure to go through the necessary verification process.", + -9: 'https://member.sooplive.co.kr/app/pop_login_block.php', + -11: 'https://login.sooplive.co.kr/afreeca/second_login.php', + -12: 'https://member.sooplive.co.kr/app/user_security.php', 0: 'The username does not exist or you have entered the wrong password.', -1: 'The username does not exist or you have entered the wrong password.', -3: 'You have entered your username/password incorrectly.', - -7: 'You cannot use your Global AfreecaTV account to access Korean AfreecaTV.', + -7: 'You cannot use your Global Soop account to access Korean Soop.', -10: 'Sorry for the inconvenience. \nYour account has been blocked due to an unauthorized access. \nPlease contact our Help Center for assistance.', -32008: 'You have failed to log in. Please contact our Help Center.', } @@ -173,169 +57,206 @@ class AfreecaTVIE(InfoExtractor): if result != 1: error = _ERRORS.get(result, 'You have failed to log in.') raise ExtractorError( - 'Unable to login: %s said: %s' % (self.IE_NAME, error), + f'Unable to login: {self.IE_NAME} said: {error}', expected=True) + def _call_api(self, endpoint, display_id, data=None, headers=None, query=None): + return self._download_json(Request( + f'https://api.m.sooplive.co.kr/{endpoint}', + data=data, headers=headers, query=query, + extensions={'legacy_ssl': True}), display_id, + 'Downloading API JSON', 'Unable to download API JSON') + + @staticmethod + def _fixup_thumb(thumb_url): + if not url_or_none(thumb_url): + return None + # Core would determine_ext as 'php' from the url, so we need to provide the real ext + # See: https://github.com/yt-dlp/yt-dlp/issues/11537 + return [{'url': thumb_url, 'ext': 'jpg'}] + + +class AfreecaTVIE(AfreecaTVBaseIE): + IE_NAME = 'soop' + IE_DESC = 'sooplive.co.kr' + _VALID_URL = r'https?://vod\.(?:sooplive\.co\.kr|afreecatv\.com)/(?:PLAYER/STATION|player)/(?P\d+)/?(?:$|[?#&])' + _TESTS = [{ + 'url': 'https://vod.sooplive.co.kr/player/96753363', + 'info_dict': { + 'id': '20230108_9FF5BEE1_244432674_1', + 'ext': 'mp4', + 'uploader_id': 'rlantnghks', + 'uploader': '페이즈으', + 'duration': 10840, + 'thumbnail': r're:https?://videoimg\.(?:sooplive\.co\.kr|afreecatv\.com)/.+', + 'upload_date': '20230108', + 'timestamp': 1673186405, + 'title': '젠지 페이즈', + }, + 'params': { + 'skip_download': True, + }, + }, { + # non standard key + 'url': 'http://vod.sooplive.co.kr/PLAYER/STATION/20515605', + 'info_dict': { + 'id': '20170411_BE689A0E_190960999_1_2_h', + 'ext': 'mp4', + 'title': '혼자사는여자집', + 'thumbnail': r're:https?://(?:video|st)img\.(?:sooplive\.co\.kr|afreecatv\.com)/.+', + 'uploader': '♥이슬이', + 'uploader_id': 'dasl8121', + 'upload_date': '20170411', + 'timestamp': 1491929865, + 'duration': 213, + }, + 'params': { + 'skip_download': True, + }, + }, { + # adult content + 'url': 'https://vod.sooplive.co.kr/player/97267690', + 'info_dict': { + 'id': '20180327_27901457_202289533_1', + 'ext': 'mp4', + 'title': '[생]빨개요♥ (part 1)', + 'thumbnail': r're:https?://(?:video|st)img\.(?:sooplive\.co\.kr|afreecatv\.com)/.+', + 'uploader': '[SA]서아', + 'uploader_id': 'bjdyrksu', + 'upload_date': '20180327', + 'duration': 3601, + }, + 'params': { + 'skip_download': True, + }, + 'skip': 'The VOD does not exist', + }, { + # adult content + 'url': 'https://vod.sooplive.co.kr/player/70395877', + 'only_matching': True, + }, { + # subscribers only + 'url': 'https://vod.sooplive.co.kr/player/104647403', + 'only_matching': True, + }, { + # private + 'url': 'https://vod.sooplive.co.kr/player/81669846', + 'only_matching': True, + }] + def _real_extract(self, url): video_id = self._match_id(url) - - partial_view = False - adult_view = False - for _ in range(2): - data = self._download_json( - 'https://api.m.afreecatv.com/station/video/a/view', - video_id, headers={'Referer': url}, data=urlencode_postdata({ - 'nTitleNo': video_id, - 'nApiLevel': 10, - }))['data'] - if traverse_obj(data, ('code', {int})) == -6221: - raise ExtractorError('The VOD does not exist', expected=True) - query = { + data = self._call_api( + 'station/video/a/view', video_id, headers={'Referer': url}, + data=urlencode_postdata({ 'nTitleNo': video_id, - 'nStationNo': data['station_no'], - 'nBbsNo': data['bbs_no'], - } - if partial_view: - query['partialView'] = 'SKIP_ADULT' - if adult_view: - query['adultView'] = 'ADULT_VIEW' - video_xml = self._download_xml( - 'http://afbbs.afreecatv.com:8080/api/video/get_video_info.php', - video_id, 'Downloading video info XML%s' - % (' (skipping adult)' if partial_view else ''), - video_id, headers={ - 'Referer': url, - }, query=query) + 'nApiLevel': 10, + }))['data'] - flag = xpath_text(video_xml, './track/flag', 'flag', default=None) - if flag and flag == 'SUCCEED': - break - if flag == 'PARTIAL_ADULT': - self.report_warning( - 'In accordance with local laws and regulations, underage users are restricted from watching adult content. ' - 'Only content suitable for all ages will be downloaded. ' - 'Provide account credentials if you wish to download restricted content.') - partial_view = True - continue - elif flag == 'ADULT': - if not adult_view: - adult_view = True - continue - error = 'Only users older than 19 are able to watch this video. Provide account credentials to download this content.' - else: - error = flag - raise ExtractorError( - '%s said: %s' % (self.IE_NAME, error), expected=True) - else: - raise ExtractorError('Unable to download video info') + error_code = traverse_obj(data, ('code', {int})) + if error_code == -6221: + raise ExtractorError('The VOD does not exist', expected=True) + elif error_code == -6205: + raise ExtractorError('This VOD is private', expected=True) - video_element = video_xml.findall('./track/video')[-1] - if video_element is None or video_element.text is None: - raise ExtractorError( - 'Video %s does not exist' % video_id, expected=True) - - video_url = video_element.text.strip() - - title = xpath_text(video_xml, './track/title', 'title', fatal=True) - - uploader = xpath_text(video_xml, './track/nickname', 'uploader') - uploader_id = xpath_text(video_xml, './track/bj_id', 'uploader id') - duration = int_or_none(xpath_text( - video_xml, './track/duration', 'duration')) - thumbnail = xpath_text(video_xml, './track/titleImage', 'thumbnail') - - common_entry = { - 'uploader': uploader, - 'uploader_id': uploader_id, - 'thumbnail': thumbnail, - } - - info = common_entry.copy() - info.update({ - 'id': video_id, - 'title': title, - 'duration': duration, + common_info = traverse_obj(data, { + 'title': ('title', {str}), + 'uploader': ('writer_nick', {str}), + 'uploader_id': ('bj_id', {str}), + 'duration': ('total_file_duration', {int_or_none(scale=1000)}), + 'thumbnails': ('thumb', {self._fixup_thumb}), }) - if not video_url: - entries = [] - file_elements = video_element.findall('./file') - one = len(file_elements) == 1 - for file_num, file_element in enumerate(file_elements, start=1): - file_url = url_or_none(file_element.text) - if not file_url: - continue - key = file_element.get('key', '') - upload_date = unified_strdate(self._search_regex( - r'^(\d{8})_', key, 'upload date', default=None)) - if upload_date is not None: - # sometimes the upload date isn't included in the file name - # instead, another random ID is, which may parse as a valid - # date but be wildly out of a reasonable range - parsed_date = date_from_str(upload_date) - if parsed_date.year < 2000 or parsed_date.year >= 2100: - upload_date = None - file_duration = int_or_none(file_element.get('duration')) - format_id = key if key else '%s_%s' % (video_id, file_num) - if determine_ext(file_url) == 'm3u8': - formats = self._extract_m3u8_formats( - file_url, video_id, 'mp4', entry_protocol='m3u8_native', - m3u8_id='hls', - note='Downloading part %d m3u8 information' % file_num) - else: - formats = [{ - 'url': file_url, - 'format_id': 'http', - }] - if not formats and not self.get_param('ignore_no_formats'): - continue - file_info = common_entry.copy() - file_info.update({ - 'id': format_id, - 'title': title if one else '%s (part %d)' % (title, file_num), - 'upload_date': upload_date, - 'duration': file_duration, - 'formats': formats, - }) - entries.append(file_info) - entries_info = info.copy() - entries_info.update({ - '_type': 'multi_video', - 'entries': entries, - }) - return entries_info + entries = [] + for file_num, file_element in enumerate( + traverse_obj(data, ('files', lambda _, v: url_or_none(v['file']))), start=1): + file_url = file_element['file'] + if determine_ext(file_url) == 'm3u8': + formats = self._extract_m3u8_formats( + file_url, video_id, 'mp4', m3u8_id='hls', + note=f'Downloading part {file_num} m3u8 information') + else: + formats = [{ + 'url': file_url, + 'format_id': 'http', + }] - info = { - 'id': video_id, - 'title': title, - 'uploader': uploader, - 'uploader_id': uploader_id, - 'duration': duration, - 'thumbnail': thumbnail, - } - - if determine_ext(video_url) == 'm3u8': - info['formats'] = self._extract_m3u8_formats( - video_url, video_id, 'mp4', entry_protocol='m3u8_native', - m3u8_id='hls') - else: - app, playpath = video_url.split('mp4:') - info.update({ - 'url': app, - 'ext': 'flv', - 'play_path': 'mp4:' + playpath, - 'rtmp_live': True, # downloading won't end without this + entries.append({ + **common_info, + 'id': file_element.get('file_info_key') or f'{video_id}_{file_num}', + 'title': f'{common_info.get("title") or "Untitled"} (part {file_num})', + 'formats': formats, + **traverse_obj(file_element, { + 'duration': ('duration', {int_or_none(scale=1000)}), + 'timestamp': ('file_start', {parse_iso8601(delimiter=' ', timezone=dt.timedelta(hours=9))}), + }), }) - return info + if traverse_obj(data, ('adult_status', {str})) == 'notLogin': + if not entries: + self.raise_login_required( + 'Only users older than 19 are able to watch this video', method='password') + self.report_warning( + 'In accordance with local laws and regulations, underage users are ' + 'restricted from watching adult content. Only content suitable for all ' + f'ages will be downloaded. {self._login_hint("password")}') + + if not entries and traverse_obj(data, ('sub_upload_type', {str})): + self.raise_login_required('This VOD is for subscribers only', method='password') + + if len(entries) == 1: + return { + **entries[0], + 'title': common_info.get('title'), + } + + common_info['timestamp'] = traverse_obj(entries, (..., 'timestamp'), get_all=False) + + return self.playlist_result(entries, video_id, multi_video=True, **common_info) -class AfreecaTVLiveIE(AfreecaTVIE): # XXX: Do not subclass from concrete IE - - IE_NAME = 'afreecatv:live' - _VALID_URL = r'https?://play\.afreeca(?:tv)?\.com/(?P[^/]+)(?:/(?P\d+))?' +class AfreecaTVCatchStoryIE(AfreecaTVBaseIE): + IE_NAME = 'soop:catchstory' + IE_DESC = 'sooplive.co.kr catch story' + _VALID_URL = r'https?://vod\.(?:sooplive\.co\.kr|afreecatv\.com)/player/(?P\d+)/catchstory' _TESTS = [{ - 'url': 'https://play.afreecatv.com/pyh3646/237852185', + 'url': 'https://vod.sooplive.co.kr/player/103247/catchstory', + 'info_dict': { + 'id': '103247', + }, + 'playlist_count': 2, + }] + + def _real_extract(self, url): + video_id = self._match_id(url) + data = self._call_api( + 'catchstory/a/view', video_id, headers={'Referer': url}, + query={'aStoryListIdx': '', 'nStoryIdx': video_id}) + + return self.playlist_result(self._entries(data), video_id) + + def _entries(self, data): + # 'files' is always a list with 1 element + yield from traverse_obj(data, ( + 'data', lambda _, v: v['story_type'] == 'catch', + 'catch_list', lambda _, v: v['files'][0]['file'], { + 'id': ('files', 0, 'file_info_key', {str}), + 'url': ('files', 0, 'file', {url_or_none}), + 'duration': ('files', 0, 'duration', {int_or_none(scale=1000)}), + 'title': ('title', {str}), + 'uploader': ('writer_nick', {str}), + 'uploader_id': ('writer_id', {str}), + 'thumbnails': ('thumb', {self._fixup_thumb}), + 'timestamp': ('write_timestamp', {int_or_none}), + })) + + +class AfreecaTVLiveIE(AfreecaTVBaseIE): + IE_NAME = 'soop:live' + IE_DESC = 'sooplive.co.kr livestreams' + _VALID_URL = r'https?://play\.(?:sooplive\.co\.kr|afreecatv\.com)/(?P[^/?#]+)(?:/(?P\d+))?' + _TESTS = [{ + 'url': 'https://play.sooplive.co.kr/pyh3646/237852185', 'info_dict': { 'id': '237852185', 'ext': 'mp4', @@ -347,94 +268,121 @@ class AfreecaTVLiveIE(AfreecaTVIE): # XXX: Do not subclass from concrete IE }, 'skip': 'Livestream has ended', }, { - 'url': 'http://play.afreeca.com/pyh3646/237852185', + 'url': 'https://play.sooplive.co.kr/pyh3646/237852185', 'only_matching': True, }, { - 'url': 'http://play.afreeca.com/pyh3646', + 'url': 'https://play.sooplive.co.kr/pyh3646', 'only_matching': True, }] - _LIVE_API_URL = 'https://live.afreecatv.com/afreeca/player_live_api.php' + _LIVE_API_URL = 'https://live.sooplive.co.kr/afreeca/player_live_api.php' + _WORKING_CDNS = [ + 'gcp_cdn', # live-global-cdn-v02.sooplive.co.kr + 'gs_cdn_pc_app', # pc-app.stream.sooplive.co.kr + 'gs_cdn_mobile_web', # mobile-web.stream.sooplive.co.kr + 'gs_cdn_pc_web', # pc-web.stream.sooplive.co.kr + ] + _BAD_CDNS = [ + 'gs_cdn', # chromecast.afreeca.gscdn.com (cannot resolve) + 'gs_cdn_chromecast', # chromecast.stream.sooplive.co.kr (HTTP Error 400) + 'azure_cdn', # live-global-cdn-v01.sooplive.co.kr (cannot resolve) + 'aws_cf', # live-global-cdn-v03.sooplive.co.kr (cannot resolve) + 'kt_cdn', # kt.stream.sooplive.co.kr (HTTP Error 400) + ] - _QUALITIES = ('sd', 'hd', 'hd2k', 'original') + def _extract_formats(self, channel_info, broadcast_no, aid): + stream_base_url = channel_info.get('RMD') or 'https://livestream-manager.sooplive.co.kr' + + # If user has not passed CDN IDs, try API-provided CDN ID followed by other working CDN IDs + default_cdn_ids = orderedSet([ + *traverse_obj(channel_info, ('CDN', {str}, all, lambda _, v: v not in self._BAD_CDNS)), + *self._WORKING_CDNS, + ]) + cdn_ids = self._configuration_arg('cdn', default_cdn_ids) + + for attempt, cdn_id in enumerate(cdn_ids, start=1): + m3u8_url = traverse_obj(self._download_json( + urljoin(stream_base_url, 'broad_stream_assign.html'), broadcast_no, + f'Downloading {cdn_id} stream info', f'Unable to download {cdn_id} stream info', + fatal=False, query={ + 'return_type': cdn_id, + 'broad_key': f'{broadcast_no}-common-master-hls', + }), ('view_url', {url_or_none})) + try: + return self._extract_m3u8_formats( + m3u8_url, broadcast_no, 'mp4', m3u8_id='hls', query={'aid': aid}, + headers={'Referer': 'https://play.sooplive.co.kr/'}) + except ExtractorError as e: + if attempt == len(cdn_ids): + raise + self.report_warning( + f'{e.cause or e.msg}. Retrying... (attempt {attempt} of {len(cdn_ids)})') def _real_extract(self, url): broadcaster_id, broadcast_no = self._match_valid_url(url).group('id', 'bno') - password = self.get_param('videopassword') + channel_info = traverse_obj(self._download_json( + self._LIVE_API_URL, broadcaster_id, data=urlencode_postdata({'bid': broadcaster_id})), + ('CHANNEL', {dict})) or {} - info = self._download_json(self._LIVE_API_URL, broadcaster_id, fatal=False, - data=urlencode_postdata({'bid': broadcaster_id})) or {} - channel_info = info.get('CHANNEL') or {} broadcaster_id = channel_info.get('BJID') or broadcaster_id broadcast_no = channel_info.get('BNO') or broadcast_no - password_protected = channel_info.get('BPWD') if not broadcast_no: - raise ExtractorError(f'Unable to extract broadcast number ({broadcaster_id} may not be live)', expected=True) - if password_protected == 'Y' and password is None: + result = channel_info.get('RESULT') + if result == 0: + raise UserNotLive(video_id=broadcaster_id) + elif result == -6: + self.raise_login_required( + 'This channel is streaming for subscribers only', method='password') + raise ExtractorError('Unable to extract broadcast number') + + password = self.get_param('videopassword') + if channel_info.get('BPWD') == 'Y' and password is None: raise ExtractorError( 'This livestream is protected by a password, use the --video-password option', expected=True) - formats = [] - quality_key = qualities(self._QUALITIES) - for quality_str in self._QUALITIES: - params = { + token_info = traverse_obj(self._download_json( + self._LIVE_API_URL, broadcast_no, 'Downloading access token for stream', + 'Unable to download access token for stream', data=urlencode_postdata(filter_dict({ 'bno': broadcast_no, 'stream_type': 'common', 'type': 'aid', - 'quality': quality_str, - } - if password is not None: - params['pwd'] = password - aid_response = self._download_json( - self._LIVE_API_URL, broadcast_no, fatal=False, - data=urlencode_postdata(params), - note=f'Downloading access token for {quality_str} stream', - errnote=f'Unable to download access token for {quality_str} stream') - aid = traverse_obj(aid_response, ('CHANNEL', 'AID')) - if not aid: - continue + 'quality': 'master', + 'pwd': password, + }))), ('CHANNEL', {dict})) or {} + aid = token_info.get('AID') + if not aid: + result = token_info.get('RESULT') + if result == 0: + raise ExtractorError('This livestream has ended', expected=True) + elif result == -6: + self.raise_login_required('This livestream is for subscribers only', method='password') + raise ExtractorError('Unable to extract access token') - stream_base_url = channel_info.get('RMD') or 'https://livestream-manager.afreecatv.com' - stream_info = self._download_json( - f'{stream_base_url}/broad_stream_assign.html', broadcast_no, fatal=False, - query={ - 'return_type': channel_info.get('CDN', 'gcp_cdn'), - 'broad_key': f'{broadcast_no}-common-{quality_str}-hls', - }, - note=f'Downloading metadata for {quality_str} stream', - errnote=f'Unable to download metadata for {quality_str} stream') or {} + formats = self._extract_formats(channel_info, broadcast_no, aid) - if stream_info.get('view_url'): - formats.append({ - 'format_id': quality_str, - 'url': update_url_query(stream_info['view_url'], {'aid': aid}), - 'ext': 'mp4', - 'protocol': 'm3u8', - 'quality': quality_key(quality_str), - }) - - station_info = self._download_json( - 'https://st.afreecatv.com/api/get_station_status.php', broadcast_no, - query={'szBjId': broadcaster_id}, fatal=False, - note='Downloading channel metadata', errnote='Unable to download channel metadata') or {} + station_info = traverse_obj(self._download_json( + 'https://st.sooplive.co.kr/api/get_station_status.php', broadcast_no, + 'Downloading channel metadata', 'Unable to download channel metadata', + query={'szBjId': broadcaster_id}, fatal=False), {dict}) or {} return { 'id': broadcast_no, 'title': channel_info.get('TITLE') or station_info.get('station_title'), 'uploader': channel_info.get('BJNICK') or station_info.get('station_name'), 'uploader_id': broadcaster_id, - 'timestamp': unified_timestamp(station_info.get('broad_start')), + 'timestamp': parse_iso8601(station_info.get('broad_start'), delimiter=' ', timezone=dt.timedelta(hours=9)), 'formats': formats, 'is_live': True, + 'http_headers': {'Referer': url}, } -class AfreecaTVUserIE(InfoExtractor): - IE_NAME = 'afreecatv:user' - _VALID_URL = r'https?://bj\.afreeca(?:tv)?\.com/(?P[^/]+)/vods/?(?P[^/]+)?' +class AfreecaTVUserIE(AfreecaTVBaseIE): + IE_NAME = 'soop:user' + _VALID_URL = r'https?://ch\.(?:sooplive\.co\.kr|afreecatv\.com)/(?P[^/?#]+)/vods/?(?P[^/?#]+)?' _TESTS = [{ - 'url': 'https://bj.afreecatv.com/ryuryu24/vods/review', + 'url': 'https://ch.sooplive.co.kr/ryuryu24/vods/review', 'info_dict': { '_type': 'playlist', 'id': 'ryuryu24', @@ -442,7 +390,7 @@ class AfreecaTVUserIE(InfoExtractor): }, 'playlist_count': 218, }, { - 'url': 'https://bj.afreecatv.com/parang1995/vods/highlight', + 'url': 'https://ch.sooplive.co.kr/parang1995/vods/highlight', 'info_dict': { '_type': 'playlist', 'id': 'parang1995', @@ -450,7 +398,7 @@ class AfreecaTVUserIE(InfoExtractor): }, 'playlist_count': 997, }, { - 'url': 'https://bj.afreecatv.com/ryuryu24/vods', + 'url': 'https://ch.sooplive.co.kr/ryuryu24/vods', 'info_dict': { '_type': 'playlist', 'id': 'ryuryu24', @@ -458,7 +406,7 @@ class AfreecaTVUserIE(InfoExtractor): }, 'playlist_count': 221, }, { - 'url': 'https://bj.afreecatv.com/ryuryu24/vods/balloonclip', + 'url': 'https://ch.sooplive.co.kr/ryuryu24/vods/balloonclip', 'info_dict': { '_type': 'playlist', 'id': 'ryuryu24', @@ -470,12 +418,12 @@ class AfreecaTVUserIE(InfoExtractor): def _fetch_page(self, user_id, user_type, page): page += 1 - info = self._download_json(f'https://bjapi.afreecatv.com/api/{user_id}/vods/{user_type}', user_id, + info = self._download_json(f'https://chapi.sooplive.co.kr/api/{user_id}/vods/{user_type}', user_id, query={'page': page, 'per_page': self._PER_PAGE, 'orderby': 'reg_date'}, note=f'Downloading {user_type} video page {page}') for item in info['data']: yield self.url_result( - f'https://vod.afreecatv.com/player/{item["title_no"]}/', AfreecaTVIE, item['title_no']) + f'https://vod.sooplive.co.kr/player/{item["title_no"]}/', AfreecaTVIE, item['title_no']) def _real_extract(self, url): user_id, user_type = self._match_valid_url(url).group('id', 'slug_type') diff --git a/plugins/youtube_download/yt_dlp/extractor/agora.py b/plugins/youtube_download/yt_dlp/extractor/agora.py index abb2d3f..e040db6 100644 --- a/plugins/youtube_download/yt_dlp/extractor/agora.py +++ b/plugins/youtube_download/yt_dlp/extractor/agora.py @@ -146,7 +146,7 @@ class TokFMPodcastIE(InfoExtractor): 'url': 'https://audycje.tokfm.pl/podcast/91275,-Systemowy-rasizm-Czy-zamieszki-w-USA-po-morderstwie-w-Minneapolis-doprowadza-do-zmian-w-sluzbach-panstwowych', 'info_dict': { 'id': '91275', - 'ext': 'aac', + 'ext': 'mp3', 'title': 'md5:a9b15488009065556900169fb8061cce', 'episode': 'md5:a9b15488009065556900169fb8061cce', 'series': 'Analizy', @@ -164,23 +164,20 @@ class TokFMPodcastIE(InfoExtractor): raise ExtractorError('No such podcast', expected=True) metadata = metadata[0] - formats = [] - for ext in ('aac', 'mp3'): - url_data = self._download_json( - f'https://api.podcast.radioagora.pl/api4/getSongUrl?podcast_id={media_id}&device_id={uuid.uuid4()}&ppre=false&audio={ext}', - media_id, 'Downloading podcast %s URL' % ext) - # prevents inserting the mp3 (default) multiple times - if 'link_ssl' in url_data and f'.{ext}' in url_data['link_ssl']: - formats.append({ - 'url': url_data['link_ssl'], - 'ext': ext, - 'vcodec': 'none', - 'acodec': ext, - }) + mp3_url = self._download_json( + 'https://api.podcast.radioagora.pl/api4/getSongUrl', + media_id, 'Downloading podcast mp3 URL', query={ + 'podcast_id': media_id, + 'device_id': str(uuid.uuid4()), + 'ppre': 'false', + 'audio': 'mp3', + })['link_ssl'] return { 'id': media_id, - 'formats': formats, + 'url': mp3_url, + 'vcodec': 'none', + 'ext': 'mp3', 'title': metadata.get('podcast_name'), 'series': metadata.get('series_name'), 'episode': metadata.get('podcast_name'), @@ -206,8 +203,8 @@ class TokFMAuditionIE(InfoExtractor): } @staticmethod - def _create_url(id): - return f'https://audycje.tokfm.pl/audycja/{id}' + def _create_url(video_id): + return f'https://audycje.tokfm.pl/audycja/{video_id}' def _real_extract(self, url): audition_id = self._match_id(url) diff --git a/plugins/youtube_download/yt_dlp/extractor/airmozilla.py b/plugins/youtube_download/yt_dlp/extractor/airmozilla.py deleted file mode 100644 index 669556b..0000000 --- a/plugins/youtube_download/yt_dlp/extractor/airmozilla.py +++ /dev/null @@ -1,63 +0,0 @@ -import re - -from .common import InfoExtractor -from ..utils import ( - int_or_none, - parse_duration, - parse_iso8601, -) - - -class AirMozillaIE(InfoExtractor): - _VALID_URL = r'https?://air\.mozilla\.org/(?P[0-9a-z-]+)/?' - _TEST = { - 'url': 'https://air.mozilla.org/privacy-lab-a-meetup-for-privacy-minded-people-in-san-francisco/', - 'md5': '8d02f53ee39cf006009180e21df1f3ba', - 'info_dict': { - 'id': '6x4q2w', - 'ext': 'mp4', - 'title': 'Privacy Lab - a meetup for privacy minded people in San Francisco', - 'thumbnail': r're:https?://.*/poster\.jpg', - 'description': 'Brings together privacy professionals and others interested in privacy at for-profits, non-profits, and NGOs in an effort to contribute to the state of the ecosystem...', - 'timestamp': 1422487800, - 'upload_date': '20150128', - 'location': 'SFO Commons', - 'duration': 3780, - 'view_count': int, - 'categories': ['Main', 'Privacy'], - } - } - - def _real_extract(self, url): - display_id = self._match_id(url) - webpage = self._download_webpage(url, display_id) - video_id = self._html_search_regex(r'//vid\.ly/(.*?)/embed', webpage, 'id') - - embed_script = self._download_webpage('https://vid.ly/{0}/embed'.format(video_id), video_id) - jwconfig = self._parse_json(self._search_regex( - r'initCallback\((.*)\);', embed_script, 'metadata'), video_id)['config'] - - info_dict = self._parse_jwplayer_data(jwconfig, video_id) - view_count = int_or_none(self._html_search_regex( - r'Views since archived: ([0-9]+)', - webpage, 'view count', fatal=False)) - timestamp = parse_iso8601(self._html_search_regex( - r'