develop #11
							
								
								
									
										15
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										15
									
								
								README.md
									
									
									
									
									
								
							| @@ -4,11 +4,24 @@ SolarFM is a Gtk+ Python file manager. | ||||
| # Notes | ||||
| <b>Still Work in  progress! Use at own risk!</b> | ||||
|  | ||||
| Additionally, if not building a .deb then just move the contents of user_config to their respective folders. | ||||
| Copy the share/solarfm folder to your user .config/ directory too. | ||||
|  | ||||
| `pyrightconfig.json` | ||||
| <p>The pyrightconfig file needs to stay on same level as the .git folders in order to have settings detected when using pyright with lsp functionality.</p> | ||||
|  | ||||
| <h6>Install Setup</h6> | ||||
| ``` | ||||
| sudo apt-get install python3.8 wget python3-setproctitle python3-gi ffmpegthumbnailer steamcmd | ||||
| sudo apt-get install python3.8 python3-setproctitle python3-gi wget ffmpegthumbnailer steamcmd | ||||
| ``` | ||||
|  | ||||
| # Known Issues | ||||
| <ul> | ||||
| <li>There's a memory leak. Still analyzing where exactly.</li> | ||||
| <li>Doing Ctrl+D when in Terminator (maybe other terminals too) somehow propagates the signal to SolarFM too. | ||||
| A selected file in the active quad-pane will move to trash since it is the default key-binding for that action.</li> | ||||
| </ul> | ||||
|  | ||||
| # TODO | ||||
| <ul> | ||||
| <li>Add simpleish preview plugin for various file types.</li> | ||||
|   | ||||
| @@ -1 +0,0 @@ | ||||
| Remove me... | ||||
										
											Binary file not shown.
										
									
								
							| @@ -1,8 +1,5 @@ | ||||
| # Python imports | ||||
| import os | ||||
| import threading | ||||
| import subprocess | ||||
| import inspect | ||||
| import shlex | ||||
|  | ||||
| # Lib imports | ||||
| @@ -14,28 +11,16 @@ from gi.repository import Gtk | ||||
| from plugins.plugin_base import PluginBase | ||||
|  | ||||
|  | ||||
| # NOTE: Threads WILL NOT die with parent's destruction. | ||||
| def threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=False).start() | ||||
|     return wrapper | ||||
|  | ||||
| # NOTE: Threads WILL die with parent's destruction. | ||||
| def daemon_threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start() | ||||
|     return wrapper | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| class Plugin(PluginBase): | ||||
|     def __init__(self): | ||||
|         super().__init__() | ||||
|         self.path               = os.path.dirname(os.path.realpath(__file__)) | ||||
|         self._GLADE_FILE        = f"{self.path}/archiver.glade" | ||||
|  | ||||
|         self.name = "Archiver"  # NOTE: Need to remove after establishing private bidirectional 1-1 message bus | ||||
|                                 #       where self.name should not be needed for message comms | ||||
|         self.path               = os.path.dirname(os.path.realpath(__file__)) | ||||
|         self._GLADE_FILE        = f"{self.path}/archiver.glade" | ||||
|         self._archiver_dialogue  = None | ||||
|         self._arc_command_buffer = None | ||||
|  | ||||
| @@ -67,20 +52,9 @@ class Plugin(PluginBase): | ||||
|  | ||||
|  | ||||
|     def generate_reference_ui_element(self): | ||||
|         self._builder           = Gtk.Builder() | ||||
|         self._builder = Gtk.Builder() | ||||
|         self._builder.add_from_file(self._GLADE_FILE) | ||||
|  | ||||
|         classes  = [self] | ||||
|         handlers = {} | ||||
|         for c in classes: | ||||
|             methods = None | ||||
|             try: | ||||
|                 methods = inspect.getmembers(c, predicate=inspect.ismethod) | ||||
|                 handlers.update(methods) | ||||
|             except Exception as e: | ||||
|                 print(repr(e)) | ||||
|  | ||||
|         self._builder.connect_signals(handlers) | ||||
|         self._connect_builder_signals(self, self._builder) | ||||
|  | ||||
|         self._archiver_dialogue  = self._builder.get_object("archiver_dialogue") | ||||
|         self._arc_command_buffer = self._builder.get_object("arc_command_buffer") | ||||
| @@ -113,7 +87,7 @@ class Plugin(PluginBase): | ||||
|         self._archiver_dialogue.hide() | ||||
|  | ||||
|     def archive_files(self, save_target, state): | ||||
|         paths       = [shlex.quote(p) for p in state.selected_files] | ||||
|         paths       = [shlex.quote(p) for p in state.uris] | ||||
|  | ||||
|         sItr, eItr  = self._arc_command_buffer.get_bounds() | ||||
|         pre_command = self._arc_command_buffer.get_text(sItr, eItr, False) | ||||
|   | ||||
| @@ -60,9 +60,11 @@ | ||||
|             <property name="can-focus">False</property> | ||||
|             <property name="orientation">vertical</property> | ||||
|             <child> | ||||
|               <object class="GtkLabel" id="current_dir_lbl"> | ||||
|               <object class="GtkLabel"> | ||||
|                 <property name="visible">True</property> | ||||
|                 <property name="can-focus">False</property> | ||||
|                 <property name="margin-left">5</property> | ||||
|                 <property name="margin-right">5</property> | ||||
|                 <property name="margin-start">5</property> | ||||
|                 <property name="margin-end">5</property> | ||||
|                 <property name="margin-top">5</property> | ||||
| @@ -76,6 +78,22 @@ | ||||
|                 <property name="position">0</property> | ||||
|               </packing> | ||||
|             </child> | ||||
|             <child> | ||||
|               <object class="GtkLabel" id="current_dir_lbl"> | ||||
|                 <property name="visible">True</property> | ||||
|                 <property name="can-focus">False</property> | ||||
|                 <property name="margin-start">5</property> | ||||
|                 <property name="margin-end">5</property> | ||||
|                 <property name="margin-top">5</property> | ||||
|                 <property name="margin-bottom">5</property> | ||||
|                 <property name="justify">center</property> | ||||
|               </object> | ||||
|               <packing> | ||||
|                 <property name="expand">False</property> | ||||
|                 <property name="fill">True</property> | ||||
|                 <property name="position">1</property> | ||||
|               </packing> | ||||
|             </child> | ||||
|             <child> | ||||
|               <object class="GtkScrolledWindow"> | ||||
|                 <property name="visible">True</property> | ||||
| @@ -91,7 +109,7 @@ | ||||
|                       <object class="GtkTreeSelection"/> | ||||
|                     </child> | ||||
|                     <child> | ||||
|                       <object class="GtkTreeViewColumn"> | ||||
|                       <object class="GtkTreeViewColumn" id="du_tree_view"> | ||||
|                         <property name="title" translatable="yes">Disk Usage</property> | ||||
|                         <child> | ||||
|                           <object class="GtkCellRendererText"/> | ||||
| @@ -113,7 +131,7 @@ | ||||
|               <packing> | ||||
|                 <property name="expand">True</property> | ||||
|                 <property name="fill">True</property> | ||||
|                 <property name="position">1</property> | ||||
|                 <property name="position">2</property> | ||||
|               </packing> | ||||
|             </child> | ||||
|           </object> | ||||
|   | ||||
| @@ -2,7 +2,6 @@ | ||||
| import os | ||||
| import subprocess | ||||
| import time | ||||
| import inspect | ||||
|  | ||||
| # Lib imports | ||||
| import gi | ||||
| @@ -29,25 +28,18 @@ class Plugin(PluginBase): | ||||
|  | ||||
|  | ||||
|     def run(self): | ||||
|         self._builder    = Gtk.Builder() | ||||
|         self._builder = Gtk.Builder() | ||||
|         self._builder.add_from_file(self._GLADE_FILE) | ||||
|         self._connect_builder_signals(self, self._builder) | ||||
|  | ||||
|         classes  = [self] | ||||
|         handlers = {} | ||||
|         for c in classes: | ||||
|             methods = None | ||||
|             try: | ||||
|                 methods = inspect.getmembers(c, predicate=inspect.ismethod) | ||||
|                 handlers.update(methods) | ||||
|             except Exception as e: | ||||
|                 print(repr(e)) | ||||
|  | ||||
|         self._builder.connect_signals(handlers) | ||||
|  | ||||
|         self._du_dialog = self._builder.get_object("du_dialog") | ||||
|         self._du_store  = self._builder.get_object("du_store") | ||||
|         self._du_dialog    = self._builder.get_object("du_dialog") | ||||
|         self._du_tree_view = self._builder.get_object("du_tree_view") | ||||
|         self._du_store     = self._builder.get_object("du_store") | ||||
|         self._current_dir_lbl  = self._builder.get_object("current_dir_lbl") | ||||
|  | ||||
|         self._current_dir_lbl.set_line_wrap(False) | ||||
|         self._current_dir_lbl.set_ellipsize(1)  # NONE = 0¶, START = 1¶, MIDDLE = 2¶, END = 3¶ | ||||
|  | ||||
|         self._event_system.subscribe("show_du_menu", self._show_du_menu) | ||||
|  | ||||
|     def generate_reference_ui_element(self): | ||||
| @@ -61,7 +53,9 @@ class Plugin(PluginBase): | ||||
|         self._event_system.emit("get_current_state") | ||||
|  | ||||
|     def _set_current_dir_lbl(self, widget=None, eve=None): | ||||
|         self._current_dir_lbl.set_label(f"Current Directory:\n{self._fm_state.tab.get_current_directory()}") | ||||
|         path = self._fm_state.tab.get_current_directory() | ||||
|         self._current_dir_lbl.set_label(path) | ||||
|         self._current_dir_lbl.set_tooltip_text(path) | ||||
|  | ||||
|     def _show_du_menu(self, widget=None, eve=None): | ||||
|         self._fm_state = None | ||||
| @@ -84,7 +78,7 @@ class Plugin(PluginBase): | ||||
|  | ||||
|         # NOTE: Last entry is curret dir. Move to top of list and pop off... | ||||
|         size, file = parts[-1].split("\t") | ||||
|         self._du_store.append([size, file.split("/")[-1]]) | ||||
|         self._du_tree_view.set_title(f"Disk Usage: {file.split('/')[-1]} ( {size} )") | ||||
|         parts.pop() | ||||
|  | ||||
|         for part in parts: | ||||
|   | ||||
| @@ -1,6 +1,5 @@ | ||||
| # Python imports | ||||
| import os | ||||
| import inspect | ||||
| import json | ||||
|  | ||||
| # Lib imports | ||||
| @@ -31,20 +30,9 @@ class Plugin(PluginBase): | ||||
|  | ||||
|  | ||||
|     def run(self): | ||||
|         self._builder          = Gtk.Builder() | ||||
|         self._builder = Gtk.Builder() | ||||
|         self._builder.add_from_file(self._GLADE_FILE) | ||||
|  | ||||
|         classes  = [self] | ||||
|         handlers = {} | ||||
|         for c in classes: | ||||
|             methods = None | ||||
|             try: | ||||
|                 methods = inspect.getmembers(c, predicate=inspect.ismethod) | ||||
|                 handlers.update(methods) | ||||
|             except Exception as e: | ||||
|                 print(repr(e)) | ||||
|  | ||||
|         self._builder.connect_signals(handlers) | ||||
|         self._connect_builder_signals(self, self._builder) | ||||
|  | ||||
|         self._favorites_dialog = self._builder.get_object("favorites_dialog") | ||||
|         self._favorites_store  = self._builder.get_object("favorites_store") | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| <?xml version="1.0" encoding="UTF-8"?> | ||||
| <!-- Generated with glade 3.38.2 --> | ||||
| <!-- Generated with glade 3.40.0 --> | ||||
| <interface> | ||||
|   <requires lib="gtk+" version="3.16"/> | ||||
|   <object class="GtkDialog" id="file_properties_dialog"> | ||||
| @@ -14,7 +14,6 @@ | ||||
|     <property name="skip-taskbar-hint">True</property> | ||||
|     <property name="skip-pager-hint">True</property> | ||||
|     <property name="gravity">center</property> | ||||
|     <signal name="response" handler="on_filePropertiesDlg_response" swapped="no"/> | ||||
|     <child internal-child="vbox"> | ||||
|       <object class="GtkBox" id="dialog_vbox"> | ||||
|         <property name="visible">True</property> | ||||
|   | ||||
| @@ -24,12 +24,6 @@ def threaded(fn): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=False).start() | ||||
|     return wrapper | ||||
|  | ||||
| # NOTE: Threads WILL die with parent's destruction. | ||||
| def daemon_threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start() | ||||
|     return wrapper | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -51,10 +45,10 @@ class Plugin(PluginBase): | ||||
|     def __init__(self): | ||||
|         super().__init__() | ||||
|  | ||||
|         self.path               = os.path.dirname(os.path.realpath(__file__)) | ||||
|         self._GLADE_FILE        = f"{self.path}/file_properties.glade" | ||||
|         self.name               = "Properties"  # NOTE: Need to remove after establishing private bidirectional 1-1 message bus | ||||
|                                                 #       where self.name should not be needed for message comms | ||||
|         self.path               = os.path.dirname(os.path.realpath(__file__)) | ||||
|         self._GLADE_FILE        = f"{self.path}/file_properties.glade" | ||||
|  | ||||
|         self._properties_dialog = None | ||||
|         self._file_name         = None | ||||
| @@ -91,8 +85,9 @@ class Plugin(PluginBase): | ||||
|  | ||||
|  | ||||
|     def run(self): | ||||
|         self._builder           = Gtk.Builder() | ||||
|         self._builder = Gtk.Builder() | ||||
|         self._builder.add_from_file(self._GLADE_FILE) | ||||
|         self._connect_builder_signals(self, self._builder) | ||||
|  | ||||
|         self._properties_dialog = self._builder.get_object("file_properties_dialog") | ||||
|         self._file_name     = self._builder.get_object("file_name") | ||||
| @@ -123,8 +118,8 @@ class Plugin(PluginBase): | ||||
|         GLib.idle_add(self._process_changes, (state)) | ||||
|  | ||||
|     def _process_changes(self, state): | ||||
|         if len(state.selected_files) == 1: | ||||
|             uri  = state.selected_files[0] | ||||
|         if len(state.uris) == 1: | ||||
|             uri  = state.uris[0] | ||||
|             path = state.tab.get_current_directory() | ||||
|  | ||||
|  | ||||
|   | ||||
							
								
								
									
										3
									
								
								plugins/git_clone/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								plugins/git_clone/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| """ | ||||
|     Pligin Module | ||||
| """ | ||||
							
								
								
									
										3
									
								
								plugins/git_clone/__main__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								plugins/git_clone/__main__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| """ | ||||
|     Pligin Package | ||||
| """ | ||||
							
								
								
									
										12
									
								
								plugins/git_clone/manifest.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								plugins/git_clone/manifest.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| { | ||||
|     "manifest": { | ||||
|         "name": "Git Clone", | ||||
|         "author": "ITDominator", | ||||
|         "version": "0.0.1", | ||||
|         "support": "", | ||||
|         "requests": { | ||||
|             "ui_target": "plugin_control_list", | ||||
|             "pass_fm_events": "true" | ||||
|         } | ||||
|     } | ||||
| } | ||||
							
								
								
									
										351
									
								
								plugins/git_clone/pexpect/ANSI.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										351
									
								
								plugins/git_clone/pexpect/ANSI.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,351 @@ | ||||
| '''This implements an ANSI (VT100) terminal emulator as a subclass of screen. | ||||
|  | ||||
| PEXPECT LICENSE | ||||
|  | ||||
|     This license is approved by the OSI and FSF as GPL-compatible. | ||||
|         http://opensource.org/licenses/isc-license.txt | ||||
|  | ||||
|     Copyright (c) 2012, Noah Spurrier <noah@noah.org> | ||||
|     PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY | ||||
|     PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE | ||||
|     COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. | ||||
|     THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||||
|     WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||||
|     MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||||
|     ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||||
|     WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||||
|     ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||||
|     OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||||
|  | ||||
| ''' | ||||
|  | ||||
| # references: | ||||
| #     http://en.wikipedia.org/wiki/ANSI_escape_code | ||||
| #     http://www.retards.org/terminals/vt102.html | ||||
| #     http://vt100.net/docs/vt102-ug/contents.html | ||||
| #     http://vt100.net/docs/vt220-rm/ | ||||
| #     http://www.termsys.demon.co.uk/vtansi.htm | ||||
|  | ||||
| from . import screen | ||||
| from . import FSM | ||||
| import string | ||||
|  | ||||
| # | ||||
| # The 'Do.*' functions are helper functions for the ANSI class. | ||||
| # | ||||
| def DoEmit (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     screen.write_ch(fsm.input_symbol) | ||||
|  | ||||
| def DoStartNumber (fsm): | ||||
|  | ||||
|     fsm.memory.append (fsm.input_symbol) | ||||
|  | ||||
| def DoBuildNumber (fsm): | ||||
|  | ||||
|     ns = fsm.memory.pop() | ||||
|     ns = ns + fsm.input_symbol | ||||
|     fsm.memory.append (ns) | ||||
|  | ||||
| def DoBackOne (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_back () | ||||
|  | ||||
| def DoBack (fsm): | ||||
|  | ||||
|     count = int(fsm.memory.pop()) | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_back (count) | ||||
|  | ||||
| def DoDownOne (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_down () | ||||
|  | ||||
| def DoDown (fsm): | ||||
|  | ||||
|     count = int(fsm.memory.pop()) | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_down (count) | ||||
|  | ||||
| def DoForwardOne (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_forward () | ||||
|  | ||||
| def DoForward (fsm): | ||||
|  | ||||
|     count = int(fsm.memory.pop()) | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_forward (count) | ||||
|  | ||||
| def DoUpReverse (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_up_reverse() | ||||
|  | ||||
| def DoUpOne (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_up () | ||||
|  | ||||
| def DoUp (fsm): | ||||
|  | ||||
|     count = int(fsm.memory.pop()) | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_up (count) | ||||
|  | ||||
| def DoHome (fsm): | ||||
|  | ||||
|     c = int(fsm.memory.pop()) | ||||
|     r = int(fsm.memory.pop()) | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_home (r,c) | ||||
|  | ||||
| def DoHomeOrigin (fsm): | ||||
|  | ||||
|     c = 1 | ||||
|     r = 1 | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_home (r,c) | ||||
|  | ||||
| def DoEraseDown (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     screen.erase_down() | ||||
|  | ||||
| def DoErase (fsm): | ||||
|  | ||||
|     arg = int(fsm.memory.pop()) | ||||
|     screen = fsm.memory[0] | ||||
|     if arg == 0: | ||||
|         screen.erase_down() | ||||
|     elif arg == 1: | ||||
|         screen.erase_up() | ||||
|     elif arg == 2: | ||||
|         screen.erase_screen() | ||||
|  | ||||
| def DoEraseEndOfLine (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     screen.erase_end_of_line() | ||||
|  | ||||
| def DoEraseLine (fsm): | ||||
|  | ||||
|     arg = int(fsm.memory.pop()) | ||||
|     screen = fsm.memory[0] | ||||
|     if arg == 0: | ||||
|         screen.erase_end_of_line() | ||||
|     elif arg == 1: | ||||
|         screen.erase_start_of_line() | ||||
|     elif arg == 2: | ||||
|         screen.erase_line() | ||||
|  | ||||
| def DoEnableScroll (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     screen.scroll_screen() | ||||
|  | ||||
| def DoCursorSave (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_save_attrs() | ||||
|  | ||||
| def DoCursorRestore (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     screen.cursor_restore_attrs() | ||||
|  | ||||
| def DoScrollRegion (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     r2 = int(fsm.memory.pop()) | ||||
|     r1 = int(fsm.memory.pop()) | ||||
|     screen.scroll_screen_rows (r1,r2) | ||||
|  | ||||
| def DoMode (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     mode = fsm.memory.pop() # Should be 4 | ||||
|     # screen.setReplaceMode () | ||||
|  | ||||
| def DoLog (fsm): | ||||
|  | ||||
|     screen = fsm.memory[0] | ||||
|     fsm.memory = [screen] | ||||
|     fout = open ('log', 'a') | ||||
|     fout.write (fsm.input_symbol + ',' + fsm.current_state + '\n') | ||||
|     fout.close() | ||||
|  | ||||
| class term (screen.screen): | ||||
|  | ||||
|     '''This class is an abstract, generic terminal. | ||||
|     This does nothing. This is a placeholder that | ||||
|     provides a common base class for other terminals | ||||
|     such as an ANSI terminal. ''' | ||||
|  | ||||
|     def __init__ (self, r=24, c=80, *args, **kwargs): | ||||
|  | ||||
|         screen.screen.__init__(self, r,c,*args,**kwargs) | ||||
|  | ||||
| class ANSI (term): | ||||
|     '''This class implements an ANSI (VT100) terminal. | ||||
|     It is a stream filter that recognizes ANSI terminal | ||||
|     escape sequences and maintains the state of a screen object. ''' | ||||
|  | ||||
|     def __init__ (self, r=24,c=80,*args,**kwargs): | ||||
|  | ||||
|         term.__init__(self,r,c,*args,**kwargs) | ||||
|  | ||||
|         #self.screen = screen (24,80) | ||||
|         self.state = FSM.FSM ('INIT',[self]) | ||||
|         self.state.set_default_transition (DoLog, 'INIT') | ||||
|         self.state.add_transition_any ('INIT', DoEmit, 'INIT') | ||||
|         self.state.add_transition ('\x1b', 'INIT', None, 'ESC') | ||||
|         self.state.add_transition_any ('ESC', DoLog, 'INIT') | ||||
|         self.state.add_transition ('(', 'ESC', None, 'G0SCS') | ||||
|         self.state.add_transition (')', 'ESC', None, 'G1SCS') | ||||
|         self.state.add_transition_list ('AB012', 'G0SCS', None, 'INIT') | ||||
|         self.state.add_transition_list ('AB012', 'G1SCS', None, 'INIT') | ||||
|         self.state.add_transition ('7', 'ESC', DoCursorSave, 'INIT') | ||||
|         self.state.add_transition ('8', 'ESC', DoCursorRestore, 'INIT') | ||||
|         self.state.add_transition ('M', 'ESC', DoUpReverse, 'INIT') | ||||
|         self.state.add_transition ('>', 'ESC', DoUpReverse, 'INIT') | ||||
|         self.state.add_transition ('<', 'ESC', DoUpReverse, 'INIT') | ||||
|         self.state.add_transition ('=', 'ESC', None, 'INIT') # Selects application keypad. | ||||
|         self.state.add_transition ('#', 'ESC', None, 'GRAPHICS_POUND') | ||||
|         self.state.add_transition_any ('GRAPHICS_POUND', None, 'INIT') | ||||
|         self.state.add_transition ('[', 'ESC', None, 'ELB') | ||||
|         # ELB means Escape Left Bracket. That is ^[[ | ||||
|         self.state.add_transition ('H', 'ELB', DoHomeOrigin, 'INIT') | ||||
|         self.state.add_transition ('D', 'ELB', DoBackOne, 'INIT') | ||||
|         self.state.add_transition ('B', 'ELB', DoDownOne, 'INIT') | ||||
|         self.state.add_transition ('C', 'ELB', DoForwardOne, 'INIT') | ||||
|         self.state.add_transition ('A', 'ELB', DoUpOne, 'INIT') | ||||
|         self.state.add_transition ('J', 'ELB', DoEraseDown, 'INIT') | ||||
|         self.state.add_transition ('K', 'ELB', DoEraseEndOfLine, 'INIT') | ||||
|         self.state.add_transition ('r', 'ELB', DoEnableScroll, 'INIT') | ||||
|         self.state.add_transition ('m', 'ELB', self.do_sgr, 'INIT') | ||||
|         self.state.add_transition ('?', 'ELB', None, 'MODECRAP') | ||||
|         self.state.add_transition_list (string.digits, 'ELB', DoStartNumber, 'NUMBER_1') | ||||
|         self.state.add_transition_list (string.digits, 'NUMBER_1', DoBuildNumber, 'NUMBER_1') | ||||
|         self.state.add_transition ('D', 'NUMBER_1', DoBack, 'INIT') | ||||
|         self.state.add_transition ('B', 'NUMBER_1', DoDown, 'INIT') | ||||
|         self.state.add_transition ('C', 'NUMBER_1', DoForward, 'INIT') | ||||
|         self.state.add_transition ('A', 'NUMBER_1', DoUp, 'INIT') | ||||
|         self.state.add_transition ('J', 'NUMBER_1', DoErase, 'INIT') | ||||
|         self.state.add_transition ('K', 'NUMBER_1', DoEraseLine, 'INIT') | ||||
|         self.state.add_transition ('l', 'NUMBER_1', DoMode, 'INIT') | ||||
|         ### It gets worse... the 'm' code can have infinite number of | ||||
|         ### number;number;number before it. I've never seen more than two, | ||||
|         ### but the specs say it's allowed. crap! | ||||
|         self.state.add_transition ('m', 'NUMBER_1', self.do_sgr, 'INIT') | ||||
|         ### LED control. Same implementation problem as 'm' code. | ||||
|         self.state.add_transition ('q', 'NUMBER_1', self.do_decsca, 'INIT') | ||||
|  | ||||
|         # \E[?47h switch to alternate screen | ||||
|         # \E[?47l restores to normal screen from alternate screen. | ||||
|         self.state.add_transition_list (string.digits, 'MODECRAP', DoStartNumber, 'MODECRAP_NUM') | ||||
|         self.state.add_transition_list (string.digits, 'MODECRAP_NUM', DoBuildNumber, 'MODECRAP_NUM') | ||||
|         self.state.add_transition ('l', 'MODECRAP_NUM', self.do_modecrap, 'INIT') | ||||
|         self.state.add_transition ('h', 'MODECRAP_NUM', self.do_modecrap, 'INIT') | ||||
|  | ||||
| #RM   Reset Mode                Esc [ Ps l                   none | ||||
|         self.state.add_transition (';', 'NUMBER_1', None, 'SEMICOLON') | ||||
|         self.state.add_transition_any ('SEMICOLON', DoLog, 'INIT') | ||||
|         self.state.add_transition_list (string.digits, 'SEMICOLON', DoStartNumber, 'NUMBER_2') | ||||
|         self.state.add_transition_list (string.digits, 'NUMBER_2', DoBuildNumber, 'NUMBER_2') | ||||
|         self.state.add_transition_any ('NUMBER_2', DoLog, 'INIT') | ||||
|         self.state.add_transition ('H', 'NUMBER_2', DoHome, 'INIT') | ||||
|         self.state.add_transition ('f', 'NUMBER_2', DoHome, 'INIT') | ||||
|         self.state.add_transition ('r', 'NUMBER_2', DoScrollRegion, 'INIT') | ||||
|         ### It gets worse... the 'm' code can have infinite number of | ||||
|         ### number;number;number before it. I've never seen more than two, | ||||
|         ### but the specs say it's allowed. crap! | ||||
|         self.state.add_transition ('m', 'NUMBER_2', self.do_sgr, 'INIT') | ||||
|         ### LED control. Same problem as 'm' code. | ||||
|         self.state.add_transition ('q', 'NUMBER_2', self.do_decsca, 'INIT') | ||||
|         self.state.add_transition (';', 'NUMBER_2', None, 'SEMICOLON_X') | ||||
|  | ||||
|         # Create a state for 'q' and 'm' which allows an infinite number of ignored numbers | ||||
|         self.state.add_transition_any ('SEMICOLON_X', DoLog, 'INIT') | ||||
|         self.state.add_transition_list (string.digits, 'SEMICOLON_X', DoStartNumber, 'NUMBER_X') | ||||
|         self.state.add_transition_list (string.digits, 'NUMBER_X', DoBuildNumber, 'NUMBER_X') | ||||
|         self.state.add_transition_any ('NUMBER_X', DoLog, 'INIT') | ||||
|         self.state.add_transition ('m', 'NUMBER_X', self.do_sgr, 'INIT') | ||||
|         self.state.add_transition ('q', 'NUMBER_X', self.do_decsca, 'INIT') | ||||
|         self.state.add_transition (';', 'NUMBER_X', None, 'SEMICOLON_X') | ||||
|  | ||||
|     def process (self, c): | ||||
|         """Process a single character. Called by :meth:`write`.""" | ||||
|         if isinstance(c, bytes): | ||||
|             c = self._decode(c) | ||||
|         self.state.process(c) | ||||
|  | ||||
|     def process_list (self, l): | ||||
|  | ||||
|         self.write(l) | ||||
|  | ||||
|     def write (self, s): | ||||
|         """Process text, writing it to the virtual screen while handling | ||||
|         ANSI escape codes. | ||||
|         """ | ||||
|         if isinstance(s, bytes): | ||||
|             s = self._decode(s) | ||||
|         for c in s: | ||||
|             self.process(c) | ||||
|  | ||||
|     def flush (self): | ||||
|         pass | ||||
|  | ||||
|     def write_ch (self, ch): | ||||
|         '''This puts a character at the current cursor position. The cursor | ||||
|         position is moved forward with wrap-around, but no scrolling is done if | ||||
|         the cursor hits the lower-right corner of the screen. ''' | ||||
|  | ||||
|         if isinstance(ch, bytes): | ||||
|             ch = self._decode(ch) | ||||
|  | ||||
|         #\r and \n both produce a call to cr() and lf(), respectively. | ||||
|         ch = ch[0] | ||||
|  | ||||
|         if ch == u'\r': | ||||
|             self.cr() | ||||
|             return | ||||
|         if ch == u'\n': | ||||
|             self.crlf() | ||||
|             return | ||||
|         if ch == chr(screen.BS): | ||||
|             self.cursor_back() | ||||
|             return | ||||
|         self.put_abs(self.cur_r, self.cur_c, ch) | ||||
|         old_r = self.cur_r | ||||
|         old_c = self.cur_c | ||||
|         self.cursor_forward() | ||||
|         if old_c == self.cur_c: | ||||
|             self.cursor_down() | ||||
|             if old_r != self.cur_r: | ||||
|                 self.cursor_home (self.cur_r, 1) | ||||
|             else: | ||||
|                 self.scroll_up () | ||||
|                 self.cursor_home (self.cur_r, 1) | ||||
|                 self.erase_line() | ||||
|  | ||||
|     def do_sgr (self, fsm): | ||||
|         '''Select Graphic Rendition, e.g. color. ''' | ||||
|         screen = fsm.memory[0] | ||||
|         fsm.memory = [screen] | ||||
|  | ||||
|     def do_decsca (self, fsm): | ||||
|         '''Select character protection attribute. ''' | ||||
|         screen = fsm.memory[0] | ||||
|         fsm.memory = [screen] | ||||
|  | ||||
|     def do_modecrap (self, fsm): | ||||
|         '''Handler for \x1b[?<number>h and \x1b[?<number>l. If anyone | ||||
|         wanted to actually use these, they'd need to add more states to the | ||||
|         FSM rather than just improve or override this method. ''' | ||||
|         screen = fsm.memory[0] | ||||
|         fsm.memory = [screen] | ||||
							
								
								
									
										334
									
								
								plugins/git_clone/pexpect/FSM.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										334
									
								
								plugins/git_clone/pexpect/FSM.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,334 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| '''This module implements a Finite State Machine (FSM). In addition to state | ||||
| this FSM also maintains a user defined "memory". So this FSM can be used as a | ||||
| Push-down Automata (PDA) since a PDA is a FSM + memory. | ||||
|  | ||||
| The following describes how the FSM works, but you will probably also need to | ||||
| see the example function to understand how the FSM is used in practice. | ||||
|  | ||||
| You define an FSM by building tables of transitions. For a given input symbol | ||||
| the process() method uses these tables to decide what action to call and what | ||||
| the next state will be. The FSM has a table of transitions that associate: | ||||
|  | ||||
|         (input_symbol, current_state) --> (action, next_state) | ||||
|  | ||||
| Where "action" is a function you define. The symbols and states can be any | ||||
| objects. You use the add_transition() and add_transition_list() methods to add | ||||
| to the transition table. The FSM also has a table of transitions that | ||||
| associate: | ||||
|  | ||||
|         (current_state) --> (action, next_state) | ||||
|  | ||||
| You use the add_transition_any() method to add to this transition table. The | ||||
| FSM also has one default transition that is not associated with any specific | ||||
| input_symbol or state. You use the set_default_transition() method to set the | ||||
| default transition. | ||||
|  | ||||
| When an action function is called it is passed a reference to the FSM. The | ||||
| action function may then access attributes of the FSM such as input_symbol, | ||||
| current_state, or "memory". The "memory" attribute can be any object that you | ||||
| want to pass along to the action functions. It is not used by the FSM itself. | ||||
| For parsing you would typically pass a list to be used as a stack. | ||||
|  | ||||
| The processing sequence is as follows. The process() method is given an | ||||
| input_symbol to process. The FSM will search the table of transitions that | ||||
| associate: | ||||
|  | ||||
|         (input_symbol, current_state) --> (action, next_state) | ||||
|  | ||||
| If the pair (input_symbol, current_state) is found then process() will call the | ||||
| associated action function and then set the current state to the next_state. | ||||
|  | ||||
| If the FSM cannot find a match for (input_symbol, current_state) it will then | ||||
| search the table of transitions that associate: | ||||
|  | ||||
|         (current_state) --> (action, next_state) | ||||
|  | ||||
| If the current_state is found then the process() method will call the | ||||
| associated action function and then set the current state to the next_state. | ||||
| Notice that this table lacks an input_symbol. It lets you define transitions | ||||
| for a current_state and ANY input_symbol. Hence, it is called the "any" table. | ||||
| Remember, it is always checked after first searching the table for a specific | ||||
| (input_symbol, current_state). | ||||
|  | ||||
| For the case where the FSM did not match either of the previous two cases the | ||||
| FSM will try to use the default transition. If the default transition is | ||||
| defined then the process() method will call the associated action function and | ||||
| then set the current state to the next_state. This lets you define a default | ||||
| transition as a catch-all case. You can think of it as an exception handler. | ||||
| There can be only one default transition. | ||||
|  | ||||
| Finally, if none of the previous cases are defined for an input_symbol and | ||||
| current_state then the FSM will raise an exception. This may be desirable, but | ||||
| you can always prevent this just by defining a default transition. | ||||
|  | ||||
| Noah Spurrier 20020822 | ||||
|  | ||||
| PEXPECT LICENSE | ||||
|  | ||||
|     This license is approved by the OSI and FSF as GPL-compatible. | ||||
|         http://opensource.org/licenses/isc-license.txt | ||||
|  | ||||
|     Copyright (c) 2012, Noah Spurrier <noah@noah.org> | ||||
|     PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY | ||||
|     PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE | ||||
|     COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. | ||||
|     THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||||
|     WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||||
|     MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||||
|     ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||||
|     WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||||
|     ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||||
|     OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||||
|  | ||||
| ''' | ||||
|  | ||||
| class ExceptionFSM(Exception): | ||||
|  | ||||
|     '''This is the FSM Exception class.''' | ||||
|  | ||||
|     def __init__(self, value): | ||||
|         self.value = value | ||||
|  | ||||
|     def __str__(self): | ||||
|         return 'ExceptionFSM: ' + str(self.value) | ||||
|  | ||||
| class FSM: | ||||
|  | ||||
|     '''This is a Finite State Machine (FSM). | ||||
|     ''' | ||||
|  | ||||
|     def __init__(self, initial_state, memory=None): | ||||
|  | ||||
|         '''This creates the FSM. You set the initial state here. The "memory" | ||||
|         attribute is any object that you want to pass along to the action | ||||
|         functions. It is not used by the FSM. For parsing you would typically | ||||
|         pass a list to be used as a stack. ''' | ||||
|  | ||||
|         # Map (input_symbol, current_state) --> (action, next_state). | ||||
|         self.state_transitions = {} | ||||
|         # Map (current_state) --> (action, next_state). | ||||
|         self.state_transitions_any = {} | ||||
|         self.default_transition = None | ||||
|  | ||||
|         self.input_symbol = None | ||||
|         self.initial_state = initial_state | ||||
|         self.current_state = self.initial_state | ||||
|         self.next_state = None | ||||
|         self.action = None | ||||
|         self.memory = memory | ||||
|  | ||||
|     def reset (self): | ||||
|  | ||||
|         '''This sets the current_state to the initial_state and sets | ||||
|         input_symbol to None. The initial state was set by the constructor | ||||
|         __init__(). ''' | ||||
|  | ||||
|         self.current_state = self.initial_state | ||||
|         self.input_symbol = None | ||||
|  | ||||
|     def add_transition (self, input_symbol, state, action=None, next_state=None): | ||||
|  | ||||
|         '''This adds a transition that associates: | ||||
|  | ||||
|                 (input_symbol, current_state) --> (action, next_state) | ||||
|  | ||||
|         The action may be set to None in which case the process() method will | ||||
|         ignore the action and only set the next_state. The next_state may be | ||||
|         set to None in which case the current state will be unchanged. | ||||
|  | ||||
|         You can also set transitions for a list of symbols by using | ||||
|         add_transition_list(). ''' | ||||
|  | ||||
|         if next_state is None: | ||||
|             next_state = state | ||||
|         self.state_transitions[(input_symbol, state)] = (action, next_state) | ||||
|  | ||||
|     def add_transition_list (self, list_input_symbols, state, action=None, next_state=None): | ||||
|  | ||||
|         '''This adds the same transition for a list of input symbols. | ||||
|         You can pass a list or a string. Note that it is handy to use | ||||
|         string.digits, string.whitespace, string.letters, etc. to add | ||||
|         transitions that match character classes. | ||||
|  | ||||
|         The action may be set to None in which case the process() method will | ||||
|         ignore the action and only set the next_state. The next_state may be | ||||
|         set to None in which case the current state will be unchanged. ''' | ||||
|  | ||||
|         if next_state is None: | ||||
|             next_state = state | ||||
|         for input_symbol in list_input_symbols: | ||||
|             self.add_transition (input_symbol, state, action, next_state) | ||||
|  | ||||
|     def add_transition_any (self, state, action=None, next_state=None): | ||||
|  | ||||
|         '''This adds a transition that associates: | ||||
|  | ||||
|                 (current_state) --> (action, next_state) | ||||
|  | ||||
|         That is, any input symbol will match the current state. | ||||
|         The process() method checks the "any" state associations after it first | ||||
|         checks for an exact match of (input_symbol, current_state). | ||||
|  | ||||
|         The action may be set to None in which case the process() method will | ||||
|         ignore the action and only set the next_state. The next_state may be | ||||
|         set to None in which case the current state will be unchanged. ''' | ||||
|  | ||||
|         if next_state is None: | ||||
|             next_state = state | ||||
|         self.state_transitions_any [state] = (action, next_state) | ||||
|  | ||||
|     def set_default_transition (self, action, next_state): | ||||
|  | ||||
|         '''This sets the default transition. This defines an action and | ||||
|         next_state if the FSM cannot find the input symbol and the current | ||||
|         state in the transition list and if the FSM cannot find the | ||||
|         current_state in the transition_any list. This is useful as a final | ||||
|         fall-through state for catching errors and undefined states. | ||||
|  | ||||
|         The default transition can be removed by setting the attribute | ||||
|         default_transition to None. ''' | ||||
|  | ||||
|         self.default_transition = (action, next_state) | ||||
|  | ||||
|     def get_transition (self, input_symbol, state): | ||||
|  | ||||
|         '''This returns (action, next state) given an input_symbol and state. | ||||
|         This does not modify the FSM state, so calling this method has no side | ||||
|         effects. Normally you do not call this method directly. It is called by | ||||
|         process(). | ||||
|  | ||||
|         The sequence of steps to check for a defined transition goes from the | ||||
|         most specific to the least specific. | ||||
|  | ||||
|         1. Check state_transitions[] that match exactly the tuple, | ||||
|             (input_symbol, state) | ||||
|  | ||||
|         2. Check state_transitions_any[] that match (state) | ||||
|             In other words, match a specific state and ANY input_symbol. | ||||
|  | ||||
|         3. Check if the default_transition is defined. | ||||
|             This catches any input_symbol and any state. | ||||
|             This is a handler for errors, undefined states, or defaults. | ||||
|  | ||||
|         4. No transition was defined. If we get here then raise an exception. | ||||
|         ''' | ||||
|  | ||||
|         if (input_symbol, state) in self.state_transitions: | ||||
|             return self.state_transitions[(input_symbol, state)] | ||||
|         elif state in self.state_transitions_any: | ||||
|             return self.state_transitions_any[state] | ||||
|         elif self.default_transition is not None: | ||||
|             return self.default_transition | ||||
|         else: | ||||
|             raise ExceptionFSM ('Transition is undefined: (%s, %s).' % | ||||
|                 (str(input_symbol), str(state)) ) | ||||
|  | ||||
|     def process (self, input_symbol): | ||||
|  | ||||
|         '''This is the main method that you call to process input. This may | ||||
|         cause the FSM to change state and call an action. This method calls | ||||
|         get_transition() to find the action and next_state associated with the | ||||
|         input_symbol and current_state. If the action is None then the action | ||||
|         is not called and only the current state is changed. This method | ||||
|         processes one complete input symbol. You can process a list of symbols | ||||
|         (or a string) by calling process_list(). ''' | ||||
|  | ||||
|         self.input_symbol = input_symbol | ||||
|         (self.action, self.next_state) = self.get_transition (self.input_symbol, self.current_state) | ||||
|         if self.action is not None: | ||||
|             self.action (self) | ||||
|         self.current_state = self.next_state | ||||
|         self.next_state = None | ||||
|  | ||||
|     def process_list (self, input_symbols): | ||||
|  | ||||
|         '''This takes a list and sends each element to process(). The list may | ||||
|         be a string or any iterable object. ''' | ||||
|  | ||||
|         for s in input_symbols: | ||||
|             self.process (s) | ||||
|  | ||||
| ############################################################################## | ||||
| # The following is an example that demonstrates the use of the FSM class to | ||||
| # process an RPN expression. Run this module from the command line. You will | ||||
| # get a prompt > for input. Enter an RPN Expression. Numbers may be integers. | ||||
| # Operators are * / + - Use the = sign to evaluate and print the expression. | ||||
| # For example: | ||||
| # | ||||
| #    167 3 2 2 * * * 1 - = | ||||
| # | ||||
| # will print: | ||||
| # | ||||
| #    2003 | ||||
| ############################################################################## | ||||
|  | ||||
| import sys | ||||
| import string | ||||
|  | ||||
| PY3 = (sys.version_info[0] >= 3) | ||||
|  | ||||
| # | ||||
| # These define the actions. | ||||
| # Note that "memory" is a list being used as a stack. | ||||
| # | ||||
|  | ||||
| def BeginBuildNumber (fsm): | ||||
|     fsm.memory.append (fsm.input_symbol) | ||||
|  | ||||
| def BuildNumber (fsm): | ||||
|     s = fsm.memory.pop () | ||||
|     s = s + fsm.input_symbol | ||||
|     fsm.memory.append (s) | ||||
|  | ||||
| def EndBuildNumber (fsm): | ||||
|     s = fsm.memory.pop () | ||||
|     fsm.memory.append (int(s)) | ||||
|  | ||||
| def DoOperator (fsm): | ||||
|     ar = fsm.memory.pop() | ||||
|     al = fsm.memory.pop() | ||||
|     if fsm.input_symbol == '+': | ||||
|         fsm.memory.append (al + ar) | ||||
|     elif fsm.input_symbol == '-': | ||||
|         fsm.memory.append (al - ar) | ||||
|     elif fsm.input_symbol == '*': | ||||
|         fsm.memory.append (al * ar) | ||||
|     elif fsm.input_symbol == '/': | ||||
|         fsm.memory.append (al / ar) | ||||
|  | ||||
| def DoEqual (fsm): | ||||
|     print(str(fsm.memory.pop())) | ||||
|  | ||||
| def Error (fsm): | ||||
|     print('That does not compute.') | ||||
|     print(str(fsm.input_symbol)) | ||||
|  | ||||
| def main(): | ||||
|  | ||||
|     '''This is where the example starts and the FSM state transitions are | ||||
|     defined. Note that states are strings (such as 'INIT'). This is not | ||||
|     necessary, but it makes the example easier to read. ''' | ||||
|  | ||||
|     f = FSM ('INIT', []) | ||||
|     f.set_default_transition (Error, 'INIT') | ||||
|     f.add_transition_any  ('INIT', None, 'INIT') | ||||
|     f.add_transition      ('=',               'INIT',            DoEqual,          'INIT') | ||||
|     f.add_transition_list (string.digits,     'INIT',            BeginBuildNumber, 'BUILDING_NUMBER') | ||||
|     f.add_transition_list (string.digits,     'BUILDING_NUMBER', BuildNumber,      'BUILDING_NUMBER') | ||||
|     f.add_transition_list (string.whitespace, 'BUILDING_NUMBER', EndBuildNumber,   'INIT') | ||||
|     f.add_transition_list ('+-*/',            'INIT',            DoOperator,       'INIT') | ||||
|  | ||||
|     print() | ||||
|     print('Enter an RPN Expression.') | ||||
|     print('Numbers may be integers. Operators are * / + -') | ||||
|     print('Use the = sign to evaluate and print the expression.') | ||||
|     print('For example: ') | ||||
|     print('    167 3 2 2 * * * 1 - =') | ||||
|     inputstr = (input if PY3 else raw_input)('> ')  # analysis:ignore | ||||
|     f.process_list(inputstr) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     main() | ||||
							
								
								
									
										20
									
								
								plugins/git_clone/pexpect/LICENSE
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								plugins/git_clone/pexpect/LICENSE
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | ||||
| ISC LICENSE | ||||
|  | ||||
|     This license is approved by the OSI and FSF as GPL-compatible. | ||||
|         http://opensource.org/licenses/isc-license.txt | ||||
|  | ||||
|     Copyright (c) 2013-2014, Pexpect development team | ||||
|     Copyright (c) 2012, Noah Spurrier <noah@noah.org> | ||||
|  | ||||
|     Permission to use, copy, modify, and/or distribute this software for any | ||||
|     purpose with or without fee is hereby granted, provided that the above | ||||
|     copyright notice and this permission notice appear in all copies. | ||||
|      | ||||
|     THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||||
|     WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||||
|     MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||||
|     ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||||
|     WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||||
|     ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||||
|     OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||||
|  | ||||
							
								
								
									
										85
									
								
								plugins/git_clone/pexpect/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										85
									
								
								plugins/git_clone/pexpect/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,85 @@ | ||||
| '''Pexpect is a Python module for spawning child applications and controlling | ||||
| them automatically. Pexpect can be used for automating interactive applications | ||||
| such as ssh, ftp, passwd, telnet, etc. It can be used to a automate setup | ||||
| scripts for duplicating software package installations on different servers. It | ||||
| can be used for automated software testing. Pexpect is in the spirit of Don | ||||
| Libes' Expect, but Pexpect is pure Python. Other Expect-like modules for Python | ||||
| require TCL and Expect or require C extensions to be compiled. Pexpect does not | ||||
| use C, Expect, or TCL extensions. It should work on any platform that supports | ||||
| the standard Python pty module. The Pexpect interface focuses on ease of use so | ||||
| that simple tasks are easy. | ||||
|  | ||||
| There are two main interfaces to the Pexpect system; these are the function, | ||||
| run() and the class, spawn. The spawn class is more powerful. The run() | ||||
| function is simpler than spawn, and is good for quickly calling program. When | ||||
| you call the run() function it executes a given program and then returns the | ||||
| output. This is a handy replacement for os.system(). | ||||
|  | ||||
| For example:: | ||||
|  | ||||
|     pexpect.run('ls -la') | ||||
|  | ||||
| The spawn class is the more powerful interface to the Pexpect system. You can | ||||
| use this to spawn a child program then interact with it by sending input and | ||||
| expecting responses (waiting for patterns in the child's output). | ||||
|  | ||||
| For example:: | ||||
|  | ||||
|     child = pexpect.spawn('scp foo user@example.com:.') | ||||
|     child.expect('Password:') | ||||
|     child.sendline(mypassword) | ||||
|  | ||||
| This works even for commands that ask for passwords or other input outside of | ||||
| the normal stdio streams. For example, ssh reads input directly from the TTY | ||||
| device which bypasses stdin. | ||||
|  | ||||
| Credits: Noah Spurrier, Richard Holden, Marco Molteni, Kimberley Burchett, | ||||
| Robert Stone, Hartmut Goebel, Chad Schroeder, Erick Tryzelaar, Dave Kirby, Ids | ||||
| vander Molen, George Todd, Noel Taylor, Nicolas D. Cesar, Alexander Gattin, | ||||
| Jacques-Etienne Baudoux, Geoffrey Marshall, Francisco Lourenco, Glen Mabey, | ||||
| Karthik Gurusamy, Fernando Perez, Corey Minyard, Jon Cohen, Guillaume | ||||
| Chazarain, Andrew Ryan, Nick Craig-Wood, Andrew Stone, Jorgen Grahn, John | ||||
| Spiegel, Jan Grant, and Shane Kerr. Let me know if I forgot anyone. | ||||
|  | ||||
| Pexpect is free, open source, and all that good stuff. | ||||
| http://pexpect.sourceforge.net/ | ||||
|  | ||||
| PEXPECT LICENSE | ||||
|  | ||||
|     This license is approved by the OSI and FSF as GPL-compatible. | ||||
|         http://opensource.org/licenses/isc-license.txt | ||||
|  | ||||
|     Copyright (c) 2012, Noah Spurrier <noah@noah.org> | ||||
|     PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY | ||||
|     PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE | ||||
|     COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. | ||||
|     THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||||
|     WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||||
|     MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||||
|     ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||||
|     WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||||
|     ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||||
|     OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||||
|  | ||||
| ''' | ||||
|  | ||||
| import sys | ||||
| PY3 = (sys.version_info[0] >= 3) | ||||
|  | ||||
| from .exceptions import ExceptionPexpect, EOF, TIMEOUT | ||||
| from .utils import split_command_line, which, is_executable_file | ||||
| from .expect import Expecter, searcher_re, searcher_string | ||||
|  | ||||
| if sys.platform != 'win32': | ||||
|     # On Unix, these are available at the top level for backwards compatibility | ||||
|     from .pty_spawn import spawn, spawnu | ||||
|     from .run import run, runu | ||||
|  | ||||
| __version__ = '4.8.0' | ||||
| __revision__ = '' | ||||
| __all__ = ['ExceptionPexpect', 'EOF', 'TIMEOUT', 'spawn', 'spawnu', 'run', 'runu', | ||||
|            'which', 'split_command_line', '__version__', '__revision__'] | ||||
|  | ||||
|  | ||||
|  | ||||
| # vim: set shiftround expandtab tabstop=4 shiftwidth=4 ft=python autoindent : | ||||
							
								
								
									
										103
									
								
								plugins/git_clone/pexpect/_async.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										103
									
								
								plugins/git_clone/pexpect/_async.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,103 @@ | ||||
| import asyncio | ||||
| import errno | ||||
| import signal | ||||
|  | ||||
| from pexpect import EOF | ||||
|  | ||||
| @asyncio.coroutine | ||||
| def expect_async(expecter, timeout=None): | ||||
|     # First process data that was previously read - if it maches, we don't need | ||||
|     # async stuff. | ||||
|     idx = expecter.existing_data() | ||||
|     if idx is not None: | ||||
|         return idx | ||||
|     if not expecter.spawn.async_pw_transport: | ||||
|         pw = PatternWaiter() | ||||
|         pw.set_expecter(expecter) | ||||
|         transport, pw = yield from asyncio.get_event_loop()\ | ||||
|             .connect_read_pipe(lambda: pw, expecter.spawn) | ||||
|         expecter.spawn.async_pw_transport = pw, transport | ||||
|     else: | ||||
|         pw, transport = expecter.spawn.async_pw_transport | ||||
|         pw.set_expecter(expecter) | ||||
|         transport.resume_reading() | ||||
|     try: | ||||
|         return (yield from asyncio.wait_for(pw.fut, timeout)) | ||||
|     except asyncio.TimeoutError as e: | ||||
|         transport.pause_reading() | ||||
|         return expecter.timeout(e) | ||||
|  | ||||
| @asyncio.coroutine | ||||
| def repl_run_command_async(repl, cmdlines, timeout=-1): | ||||
|     res = [] | ||||
|     repl.child.sendline(cmdlines[0]) | ||||
|     for line in cmdlines[1:]: | ||||
|         yield from repl._expect_prompt(timeout=timeout, async_=True) | ||||
|         res.append(repl.child.before) | ||||
|         repl.child.sendline(line) | ||||
|  | ||||
|     # Command was fully submitted, now wait for the next prompt | ||||
|     prompt_idx = yield from repl._expect_prompt(timeout=timeout, async_=True) | ||||
|     if prompt_idx == 1: | ||||
|         # We got the continuation prompt - command was incomplete | ||||
|         repl.child.kill(signal.SIGINT) | ||||
|         yield from repl._expect_prompt(timeout=1, async_=True) | ||||
|         raise ValueError("Continuation prompt found - input was incomplete:") | ||||
|     return u''.join(res + [repl.child.before]) | ||||
|  | ||||
| class PatternWaiter(asyncio.Protocol): | ||||
|     transport = None | ||||
|  | ||||
|     def set_expecter(self, expecter): | ||||
|         self.expecter = expecter | ||||
|         self.fut = asyncio.Future() | ||||
|  | ||||
|     def found(self, result): | ||||
|         if not self.fut.done(): | ||||
|             self.fut.set_result(result) | ||||
|             self.transport.pause_reading() | ||||
|  | ||||
|     def error(self, exc): | ||||
|         if not self.fut.done(): | ||||
|             self.fut.set_exception(exc) | ||||
|             self.transport.pause_reading() | ||||
|  | ||||
|     def connection_made(self, transport): | ||||
|         self.transport = transport | ||||
|  | ||||
|     def data_received(self, data): | ||||
|         spawn = self.expecter.spawn | ||||
|         s = spawn._decoder.decode(data) | ||||
|         spawn._log(s, 'read') | ||||
|  | ||||
|         if self.fut.done(): | ||||
|             spawn._before.write(s) | ||||
|             spawn._buffer.write(s) | ||||
|             return | ||||
|  | ||||
|         try: | ||||
|             index = self.expecter.new_data(s) | ||||
|             if index is not None: | ||||
|                 # Found a match | ||||
|                 self.found(index) | ||||
|         except Exception as e: | ||||
|             self.expecter.errored() | ||||
|             self.error(e) | ||||
|  | ||||
|     def eof_received(self): | ||||
|         # N.B. If this gets called, async will close the pipe (the spawn object) | ||||
|         # for us | ||||
|         try: | ||||
|             self.expecter.spawn.flag_eof = True | ||||
|             index = self.expecter.eof() | ||||
|         except EOF as e: | ||||
|             self.error(e) | ||||
|         else: | ||||
|             self.found(index) | ||||
|  | ||||
|     def connection_lost(self, exc): | ||||
|         if isinstance(exc, OSError) and exc.errno == errno.EIO: | ||||
|             # We may get here without eof_received being called, e.g on Linux | ||||
|             self.eof_received() | ||||
|         elif exc is not None: | ||||
|             self.error(exc) | ||||
							
								
								
									
										16
									
								
								plugins/git_clone/pexpect/bashrc.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								plugins/git_clone/pexpect/bashrc.sh
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| # Different platforms have different names for the systemwide bashrc | ||||
| if [[ -f /etc/bashrc ]]; then | ||||
|   source /etc/bashrc | ||||
| fi | ||||
| if [[ -f /etc/bash.bashrc ]]; then | ||||
|   source /etc/bash.bashrc | ||||
| fi | ||||
| if [[ -f ~/.bashrc ]]; then | ||||
|   source ~/.bashrc | ||||
| fi | ||||
|  | ||||
| # Reset PS1 so pexpect can find it | ||||
| PS1="$" | ||||
|  | ||||
| # Unset PROMPT_COMMAND, so that it can't change PS1 to something unexpected. | ||||
| unset PROMPT_COMMAND | ||||
							
								
								
									
										35
									
								
								plugins/git_clone/pexpect/exceptions.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								plugins/git_clone/pexpect/exceptions.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | ||||
| """Exception classes used by Pexpect""" | ||||
|  | ||||
| import traceback | ||||
| import sys | ||||
|  | ||||
| class ExceptionPexpect(Exception): | ||||
|     '''Base class for all exceptions raised by this module. | ||||
|     ''' | ||||
|  | ||||
|     def __init__(self, value): | ||||
|         super(ExceptionPexpect, self).__init__(value) | ||||
|         self.value = value | ||||
|  | ||||
|     def __str__(self): | ||||
|         return str(self.value) | ||||
|  | ||||
|     def get_trace(self): | ||||
|         '''This returns an abbreviated stack trace with lines that only concern | ||||
|         the caller. In other words, the stack trace inside the Pexpect module | ||||
|         is not included. ''' | ||||
|  | ||||
|         tblist = traceback.extract_tb(sys.exc_info()[2]) | ||||
|         tblist = [item for item in tblist if ('pexpect/__init__' not in item[0]) | ||||
|                                            and ('pexpect/expect' not in item[0])] | ||||
|         tblist = traceback.format_list(tblist) | ||||
|         return ''.join(tblist) | ||||
|  | ||||
|  | ||||
| class EOF(ExceptionPexpect): | ||||
|     '''Raised when EOF is read from a child. | ||||
|     This usually means the child has exited.''' | ||||
|  | ||||
|  | ||||
| class TIMEOUT(ExceptionPexpect): | ||||
|     '''Raised when a read time exceeds the timeout. ''' | ||||
							
								
								
									
										371
									
								
								plugins/git_clone/pexpect/expect.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										371
									
								
								plugins/git_clone/pexpect/expect.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,371 @@ | ||||
| import time | ||||
|  | ||||
| from .exceptions import EOF, TIMEOUT | ||||
|  | ||||
| class Expecter(object): | ||||
|     def __init__(self, spawn, searcher, searchwindowsize=-1): | ||||
|         self.spawn = spawn | ||||
|         self.searcher = searcher | ||||
|         # A value of -1 means to use the figure from spawn, which should | ||||
|         # be None or a positive number. | ||||
|         if searchwindowsize == -1: | ||||
|             searchwindowsize = spawn.searchwindowsize | ||||
|         self.searchwindowsize = searchwindowsize | ||||
|         self.lookback = None | ||||
|         if hasattr(searcher, 'longest_string'): | ||||
|             self.lookback = searcher.longest_string | ||||
|  | ||||
|     def do_search(self, window, freshlen): | ||||
|         spawn = self.spawn | ||||
|         searcher = self.searcher | ||||
|         if freshlen > len(window): | ||||
|             freshlen = len(window) | ||||
|         index = searcher.search(window, freshlen, self.searchwindowsize) | ||||
|         if index >= 0: | ||||
|             spawn._buffer = spawn.buffer_type() | ||||
|             spawn._buffer.write(window[searcher.end:]) | ||||
|             spawn.before = spawn._before.getvalue()[ | ||||
|                 0:-(len(window) - searcher.start)] | ||||
|             spawn._before = spawn.buffer_type() | ||||
|             spawn._before.write(window[searcher.end:]) | ||||
|             spawn.after = window[searcher.start:searcher.end] | ||||
|             spawn.match = searcher.match | ||||
|             spawn.match_index = index | ||||
|             # Found a match | ||||
|             return index | ||||
|         elif self.searchwindowsize or self.lookback: | ||||
|             maintain = self.searchwindowsize or self.lookback | ||||
|             if spawn._buffer.tell() > maintain: | ||||
|                 spawn._buffer = spawn.buffer_type() | ||||
|                 spawn._buffer.write(window[-maintain:]) | ||||
|  | ||||
|     def existing_data(self): | ||||
|         # First call from a new call to expect_loop or expect_async. | ||||
|         # self.searchwindowsize may have changed. | ||||
|         # Treat all data as fresh. | ||||
|         spawn = self.spawn | ||||
|         before_len = spawn._before.tell() | ||||
|         buf_len = spawn._buffer.tell() | ||||
|         freshlen = before_len | ||||
|         if before_len > buf_len: | ||||
|             if not self.searchwindowsize: | ||||
|                 spawn._buffer = spawn.buffer_type() | ||||
|                 window = spawn._before.getvalue() | ||||
|                 spawn._buffer.write(window) | ||||
|             elif buf_len < self.searchwindowsize: | ||||
|                 spawn._buffer = spawn.buffer_type() | ||||
|                 spawn._before.seek( | ||||
|                     max(0, before_len - self.searchwindowsize)) | ||||
|                 window = spawn._before.read() | ||||
|                 spawn._buffer.write(window) | ||||
|             else: | ||||
|                 spawn._buffer.seek(max(0, buf_len - self.searchwindowsize)) | ||||
|                 window = spawn._buffer.read() | ||||
|         else: | ||||
|             if self.searchwindowsize: | ||||
|                 spawn._buffer.seek(max(0, buf_len - self.searchwindowsize)) | ||||
|                 window = spawn._buffer.read() | ||||
|             else: | ||||
|                 window = spawn._buffer.getvalue() | ||||
|         return self.do_search(window, freshlen) | ||||
|  | ||||
|     def new_data(self, data): | ||||
|         # A subsequent call, after a call to existing_data. | ||||
|         spawn = self.spawn | ||||
|         freshlen = len(data) | ||||
|         spawn._before.write(data) | ||||
|         if not self.searchwindowsize: | ||||
|             if self.lookback: | ||||
|                 # search lookback + new data. | ||||
|                 old_len = spawn._buffer.tell() | ||||
|                 spawn._buffer.write(data) | ||||
|                 spawn._buffer.seek(max(0, old_len - self.lookback)) | ||||
|                 window = spawn._buffer.read() | ||||
|             else: | ||||
|                 # copy the whole buffer (really slow for large datasets). | ||||
|                 spawn._buffer.write(data) | ||||
|                 window = spawn.buffer | ||||
|         else: | ||||
|             if len(data) >= self.searchwindowsize or not spawn._buffer.tell(): | ||||
|                 window = data[-self.searchwindowsize:] | ||||
|                 spawn._buffer = spawn.buffer_type() | ||||
|                 spawn._buffer.write(window[-self.searchwindowsize:]) | ||||
|             else: | ||||
|                 spawn._buffer.write(data) | ||||
|                 new_len = spawn._buffer.tell() | ||||
|                 spawn._buffer.seek(max(0, new_len - self.searchwindowsize)) | ||||
|                 window = spawn._buffer.read() | ||||
|         return self.do_search(window, freshlen) | ||||
|  | ||||
|     def eof(self, err=None): | ||||
|         spawn = self.spawn | ||||
|  | ||||
|         spawn.before = spawn._before.getvalue() | ||||
|         spawn._buffer = spawn.buffer_type() | ||||
|         spawn._before = spawn.buffer_type() | ||||
|         spawn.after = EOF | ||||
|         index = self.searcher.eof_index | ||||
|         if index >= 0: | ||||
|             spawn.match = EOF | ||||
|             spawn.match_index = index | ||||
|             return index | ||||
|         else: | ||||
|             spawn.match = None | ||||
|             spawn.match_index = None | ||||
|             msg = str(spawn) | ||||
|             msg += '\nsearcher: %s' % self.searcher | ||||
|             if err is not None: | ||||
|                 msg = str(err) + '\n' + msg | ||||
|  | ||||
|             exc = EOF(msg) | ||||
|             exc.__cause__ = None # in Python 3.x we can use "raise exc from None" | ||||
|             raise exc | ||||
|  | ||||
|     def timeout(self, err=None): | ||||
|         spawn = self.spawn | ||||
|  | ||||
|         spawn.before = spawn._before.getvalue() | ||||
|         spawn.after = TIMEOUT | ||||
|         index = self.searcher.timeout_index | ||||
|         if index >= 0: | ||||
|             spawn.match = TIMEOUT | ||||
|             spawn.match_index = index | ||||
|             return index | ||||
|         else: | ||||
|             spawn.match = None | ||||
|             spawn.match_index = None | ||||
|             msg = str(spawn) | ||||
|             msg += '\nsearcher: %s' % self.searcher | ||||
|             if err is not None: | ||||
|                 msg = str(err) + '\n' + msg | ||||
|  | ||||
|             exc = TIMEOUT(msg) | ||||
|             exc.__cause__ = None    # in Python 3.x we can use "raise exc from None" | ||||
|             raise exc | ||||
|  | ||||
|     def errored(self): | ||||
|         spawn = self.spawn | ||||
|         spawn.before = spawn._before.getvalue() | ||||
|         spawn.after = None | ||||
|         spawn.match = None | ||||
|         spawn.match_index = None | ||||
|  | ||||
|     def expect_loop(self, timeout=-1): | ||||
|         """Blocking expect""" | ||||
|         spawn = self.spawn | ||||
|  | ||||
|         if timeout is not None: | ||||
|             end_time = time.time() + timeout | ||||
|  | ||||
|         try: | ||||
|             idx = self.existing_data() | ||||
|             if idx is not None: | ||||
|                 return idx | ||||
|             while True: | ||||
|                 # No match at this point | ||||
|                 if (timeout is not None) and (timeout < 0): | ||||
|                     return self.timeout() | ||||
|                 # Still have time left, so read more data | ||||
|                 incoming = spawn.read_nonblocking(spawn.maxread, timeout) | ||||
|                 if self.spawn.delayafterread is not None: | ||||
|                     time.sleep(self.spawn.delayafterread) | ||||
|                 idx = self.new_data(incoming) | ||||
|                 # Keep reading until exception or return. | ||||
|                 if idx is not None: | ||||
|                     return idx | ||||
|                 if timeout is not None: | ||||
|                     timeout = end_time - time.time() | ||||
|         except EOF as e: | ||||
|             return self.eof(e) | ||||
|         except TIMEOUT as e: | ||||
|             return self.timeout(e) | ||||
|         except: | ||||
|             self.errored() | ||||
|             raise | ||||
|  | ||||
|  | ||||
| class searcher_string(object): | ||||
|     '''This is a plain string search helper for the spawn.expect_any() method. | ||||
|     This helper class is for speed. For more powerful regex patterns | ||||
|     see the helper class, searcher_re. | ||||
|  | ||||
|     Attributes: | ||||
|  | ||||
|         eof_index     - index of EOF, or -1 | ||||
|         timeout_index - index of TIMEOUT, or -1 | ||||
|  | ||||
|     After a successful match by the search() method the following attributes | ||||
|     are available: | ||||
|  | ||||
|         start - index into the buffer, first byte of match | ||||
|         end   - index into the buffer, first byte after match | ||||
|         match - the matching string itself | ||||
|  | ||||
|     ''' | ||||
|  | ||||
|     def __init__(self, strings): | ||||
|         '''This creates an instance of searcher_string. This argument 'strings' | ||||
|         may be a list; a sequence of strings; or the EOF or TIMEOUT types. ''' | ||||
|  | ||||
|         self.eof_index = -1 | ||||
|         self.timeout_index = -1 | ||||
|         self._strings = [] | ||||
|         self.longest_string = 0 | ||||
|         for n, s in enumerate(strings): | ||||
|             if s is EOF: | ||||
|                 self.eof_index = n | ||||
|                 continue | ||||
|             if s is TIMEOUT: | ||||
|                 self.timeout_index = n | ||||
|                 continue | ||||
|             self._strings.append((n, s)) | ||||
|             if len(s) > self.longest_string: | ||||
|                 self.longest_string = len(s) | ||||
|  | ||||
|     def __str__(self): | ||||
|         '''This returns a human-readable string that represents the state of | ||||
|         the object.''' | ||||
|  | ||||
|         ss = [(ns[0], '    %d: %r' % ns) for ns in self._strings] | ||||
|         ss.append((-1, 'searcher_string:')) | ||||
|         if self.eof_index >= 0: | ||||
|             ss.append((self.eof_index, '    %d: EOF' % self.eof_index)) | ||||
|         if self.timeout_index >= 0: | ||||
|             ss.append((self.timeout_index, | ||||
|                 '    %d: TIMEOUT' % self.timeout_index)) | ||||
|         ss.sort() | ||||
|         ss = list(zip(*ss))[1] | ||||
|         return '\n'.join(ss) | ||||
|  | ||||
|     def search(self, buffer, freshlen, searchwindowsize=None): | ||||
|         '''This searches 'buffer' for the first occurrence of one of the search | ||||
|         strings.  'freshlen' must indicate the number of bytes at the end of | ||||
|         'buffer' which have not been searched before. It helps to avoid | ||||
|         searching the same, possibly big, buffer over and over again. | ||||
|  | ||||
|         See class spawn for the 'searchwindowsize' argument. | ||||
|  | ||||
|         If there is a match this returns the index of that string, and sets | ||||
|         'start', 'end' and 'match'. Otherwise, this returns -1. ''' | ||||
|  | ||||
|         first_match = None | ||||
|  | ||||
|         # 'freshlen' helps a lot here. Further optimizations could | ||||
|         # possibly include: | ||||
|         # | ||||
|         # using something like the Boyer-Moore Fast String Searching | ||||
|         # Algorithm; pre-compiling the search through a list of | ||||
|         # strings into something that can scan the input once to | ||||
|         # search for all N strings; realize that if we search for | ||||
|         # ['bar', 'baz'] and the input is '...foo' we need not bother | ||||
|         # rescanning until we've read three more bytes. | ||||
|         # | ||||
|         # Sadly, I don't know enough about this interesting topic. /grahn | ||||
|  | ||||
|         for index, s in self._strings: | ||||
|             if searchwindowsize is None: | ||||
|                 # the match, if any, can only be in the fresh data, | ||||
|                 # or at the very end of the old data | ||||
|                 offset = -(freshlen + len(s)) | ||||
|             else: | ||||
|                 # better obey searchwindowsize | ||||
|                 offset = -searchwindowsize | ||||
|             n = buffer.find(s, offset) | ||||
|             if n >= 0 and (first_match is None or n < first_match): | ||||
|                 first_match = n | ||||
|                 best_index, best_match = index, s | ||||
|         if first_match is None: | ||||
|             return -1 | ||||
|         self.match = best_match | ||||
|         self.start = first_match | ||||
|         self.end = self.start + len(self.match) | ||||
|         return best_index | ||||
|  | ||||
|  | ||||
| class searcher_re(object): | ||||
|     '''This is regular expression string search helper for the | ||||
|     spawn.expect_any() method. This helper class is for powerful | ||||
|     pattern matching. For speed, see the helper class, searcher_string. | ||||
|  | ||||
|     Attributes: | ||||
|  | ||||
|         eof_index     - index of EOF, or -1 | ||||
|         timeout_index - index of TIMEOUT, or -1 | ||||
|  | ||||
|     After a successful match by the search() method the following attributes | ||||
|     are available: | ||||
|  | ||||
|         start - index into the buffer, first byte of match | ||||
|         end   - index into the buffer, first byte after match | ||||
|         match - the re.match object returned by a successful re.search | ||||
|  | ||||
|     ''' | ||||
|  | ||||
|     def __init__(self, patterns): | ||||
|         '''This creates an instance that searches for 'patterns' Where | ||||
|         'patterns' may be a list or other sequence of compiled regular | ||||
|         expressions, or the EOF or TIMEOUT types.''' | ||||
|  | ||||
|         self.eof_index = -1 | ||||
|         self.timeout_index = -1 | ||||
|         self._searches = [] | ||||
|         for n, s in enumerate(patterns): | ||||
|             if s is EOF: | ||||
|                 self.eof_index = n | ||||
|                 continue | ||||
|             if s is TIMEOUT: | ||||
|                 self.timeout_index = n | ||||
|                 continue | ||||
|             self._searches.append((n, s)) | ||||
|  | ||||
|     def __str__(self): | ||||
|         '''This returns a human-readable string that represents the state of | ||||
|         the object.''' | ||||
|  | ||||
|         #ss = [(n, '    %d: re.compile("%s")' % | ||||
|         #    (n, repr(s.pattern))) for n, s in self._searches] | ||||
|         ss = list() | ||||
|         for n, s in self._searches: | ||||
|             ss.append((n, '    %d: re.compile(%r)' % (n, s.pattern))) | ||||
|         ss.append((-1, 'searcher_re:')) | ||||
|         if self.eof_index >= 0: | ||||
|             ss.append((self.eof_index, '    %d: EOF' % self.eof_index)) | ||||
|         if self.timeout_index >= 0: | ||||
|             ss.append((self.timeout_index, '    %d: TIMEOUT' % | ||||
|                 self.timeout_index)) | ||||
|         ss.sort() | ||||
|         ss = list(zip(*ss))[1] | ||||
|         return '\n'.join(ss) | ||||
|  | ||||
|     def search(self, buffer, freshlen, searchwindowsize=None): | ||||
|         '''This searches 'buffer' for the first occurrence of one of the regular | ||||
|         expressions. 'freshlen' must indicate the number of bytes at the end of | ||||
|         'buffer' which have not been searched before. | ||||
|  | ||||
|         See class spawn for the 'searchwindowsize' argument. | ||||
|  | ||||
|         If there is a match this returns the index of that string, and sets | ||||
|         'start', 'end' and 'match'. Otherwise, returns -1.''' | ||||
|  | ||||
|         first_match = None | ||||
|         # 'freshlen' doesn't help here -- we cannot predict the | ||||
|         # length of a match, and the re module provides no help. | ||||
|         if searchwindowsize is None: | ||||
|             searchstart = 0 | ||||
|         else: | ||||
|             searchstart = max(0, len(buffer) - searchwindowsize) | ||||
|         for index, s in self._searches: | ||||
|             match = s.search(buffer, searchstart) | ||||
|             if match is None: | ||||
|                 continue | ||||
|             n = match.start() | ||||
|             if first_match is None or n < first_match: | ||||
|                 first_match = n | ||||
|                 the_match = match | ||||
|                 best_index = index | ||||
|         if first_match is None: | ||||
|             return -1 | ||||
|         self.start = first_match | ||||
|         self.match = the_match | ||||
|         self.end = self.match.end() | ||||
|         return best_index | ||||
							
								
								
									
										148
									
								
								plugins/git_clone/pexpect/fdpexpect.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										148
									
								
								plugins/git_clone/pexpect/fdpexpect.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,148 @@ | ||||
| '''This is like pexpect, but it will work with any file descriptor that you | ||||
| pass it. You are responsible for opening and close the file descriptor. | ||||
| This allows you to use Pexpect with sockets and named pipes (FIFOs). | ||||
|  | ||||
| PEXPECT LICENSE | ||||
|  | ||||
|     This license is approved by the OSI and FSF as GPL-compatible. | ||||
|         http://opensource.org/licenses/isc-license.txt | ||||
|  | ||||
|     Copyright (c) 2012, Noah Spurrier <noah@noah.org> | ||||
|     PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY | ||||
|     PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE | ||||
|     COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. | ||||
|     THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||||
|     WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||||
|     MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||||
|     ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||||
|     WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||||
|     ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||||
|     OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||||
|  | ||||
| ''' | ||||
|  | ||||
| from .spawnbase import SpawnBase | ||||
| from .exceptions import ExceptionPexpect, TIMEOUT | ||||
| from .utils import select_ignore_interrupts, poll_ignore_interrupts | ||||
| import os | ||||
|  | ||||
| __all__ = ['fdspawn'] | ||||
|  | ||||
| class fdspawn(SpawnBase): | ||||
|     '''This is like pexpect.spawn but allows you to supply your own open file | ||||
|     descriptor. For example, you could use it to read through a file looking | ||||
|     for patterns, or to control a modem or serial device. ''' | ||||
|  | ||||
|     def __init__ (self, fd, args=None, timeout=30, maxread=2000, searchwindowsize=None, | ||||
|                   logfile=None, encoding=None, codec_errors='strict', use_poll=False): | ||||
|         '''This takes a file descriptor (an int) or an object that support the | ||||
|         fileno() method (returning an int). All Python file-like objects | ||||
|         support fileno(). ''' | ||||
|  | ||||
|         if type(fd) != type(0) and hasattr(fd, 'fileno'): | ||||
|             fd = fd.fileno() | ||||
|  | ||||
|         if type(fd) != type(0): | ||||
|             raise ExceptionPexpect('The fd argument is not an int. If this is a command string then maybe you want to use pexpect.spawn.') | ||||
|  | ||||
|         try: # make sure fd is a valid file descriptor | ||||
|             os.fstat(fd) | ||||
|         except OSError: | ||||
|             raise ExceptionPexpect('The fd argument is not a valid file descriptor.') | ||||
|  | ||||
|         self.args = None | ||||
|         self.command = None | ||||
|         SpawnBase.__init__(self, timeout, maxread, searchwindowsize, logfile, | ||||
|                            encoding=encoding, codec_errors=codec_errors) | ||||
|         self.child_fd = fd | ||||
|         self.own_fd = False | ||||
|         self.closed = False | ||||
|         self.name = '<file descriptor %d>' % fd | ||||
|         self.use_poll = use_poll | ||||
|  | ||||
|     def close (self): | ||||
|         """Close the file descriptor. | ||||
|  | ||||
|         Calling this method a second time does nothing, but if the file | ||||
|         descriptor was closed elsewhere, :class:`OSError` will be raised. | ||||
|         """ | ||||
|         if self.child_fd == -1: | ||||
|             return | ||||
|  | ||||
|         self.flush() | ||||
|         os.close(self.child_fd) | ||||
|         self.child_fd = -1 | ||||
|         self.closed = True | ||||
|  | ||||
|     def isalive (self): | ||||
|         '''This checks if the file descriptor is still valid. If :func:`os.fstat` | ||||
|         does not raise an exception then we assume it is alive. ''' | ||||
|  | ||||
|         if self.child_fd == -1: | ||||
|             return False | ||||
|         try: | ||||
|             os.fstat(self.child_fd) | ||||
|             return True | ||||
|         except: | ||||
|             return False | ||||
|  | ||||
|     def terminate (self, force=False):  # pragma: no cover | ||||
|         '''Deprecated and invalid. Just raises an exception.''' | ||||
|         raise ExceptionPexpect('This method is not valid for file descriptors.') | ||||
|  | ||||
|     # These four methods are left around for backwards compatibility, but not | ||||
|     # documented as part of fdpexpect. You're encouraged to use os.write | ||||
|     # directly. | ||||
|     def send(self, s): | ||||
|         "Write to fd, return number of bytes written" | ||||
|         s = self._coerce_send_string(s) | ||||
|         self._log(s, 'send') | ||||
|  | ||||
|         b = self._encoder.encode(s, final=False) | ||||
|         return os.write(self.child_fd, b) | ||||
|  | ||||
|     def sendline(self, s): | ||||
|         "Write to fd with trailing newline, return number of bytes written" | ||||
|         s = self._coerce_send_string(s) | ||||
|         return self.send(s + self.linesep) | ||||
|  | ||||
|     def write(self, s): | ||||
|         "Write to fd, return None" | ||||
|         self.send(s) | ||||
|  | ||||
|     def writelines(self, sequence): | ||||
|         "Call self.write() for each item in sequence" | ||||
|         for s in sequence: | ||||
|             self.write(s) | ||||
|  | ||||
|     def read_nonblocking(self, size=1, timeout=-1): | ||||
|         """ | ||||
|         Read from the file descriptor and return the result as a string. | ||||
|  | ||||
|         The read_nonblocking method of :class:`SpawnBase` assumes that a call | ||||
|         to os.read will not block (timeout parameter is ignored). This is not | ||||
|         the case for POSIX file-like objects such as sockets and serial ports. | ||||
|  | ||||
|         Use :func:`select.select`, timeout is implemented conditionally for | ||||
|         POSIX systems. | ||||
|  | ||||
|         :param int size: Read at most *size* bytes. | ||||
|         :param int timeout: Wait timeout seconds for file descriptor to be | ||||
|             ready to read. When -1 (default), use self.timeout. When 0, poll. | ||||
|         :return: String containing the bytes read | ||||
|         """ | ||||
|         if os.name == 'posix': | ||||
|             if timeout == -1: | ||||
|                 timeout = self.timeout | ||||
|             rlist = [self.child_fd] | ||||
|             wlist = [] | ||||
|             xlist = [] | ||||
|             if self.use_poll: | ||||
|                 rlist = poll_ignore_interrupts(rlist, timeout) | ||||
|             else: | ||||
|                 rlist, wlist, xlist = select_ignore_interrupts( | ||||
|                     rlist, wlist, xlist, timeout | ||||
|                 ) | ||||
|             if self.child_fd not in rlist: | ||||
|                 raise TIMEOUT('Timeout exceeded.') | ||||
|         return super(fdspawn, self).read_nonblocking(size) | ||||
							
								
								
									
										188
									
								
								plugins/git_clone/pexpect/popen_spawn.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										188
									
								
								plugins/git_clone/pexpect/popen_spawn.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,188 @@ | ||||
| """Provides an interface like pexpect.spawn interface using subprocess.Popen | ||||
| """ | ||||
| import os | ||||
| import threading | ||||
| import subprocess | ||||
| import sys | ||||
| import time | ||||
| import signal | ||||
| import shlex | ||||
|  | ||||
| try: | ||||
|     from queue import Queue, Empty  # Python 3 | ||||
| except ImportError: | ||||
|     from Queue import Queue, Empty  # Python 2 | ||||
|  | ||||
| from .spawnbase import SpawnBase, PY3 | ||||
| from .exceptions import EOF | ||||
| from .utils import string_types | ||||
|  | ||||
| class PopenSpawn(SpawnBase): | ||||
|     def __init__(self, cmd, timeout=30, maxread=2000, searchwindowsize=None, | ||||
|                  logfile=None, cwd=None, env=None, encoding=None, | ||||
|                  codec_errors='strict', preexec_fn=None): | ||||
|         super(PopenSpawn, self).__init__(timeout=timeout, maxread=maxread, | ||||
|                 searchwindowsize=searchwindowsize, logfile=logfile, | ||||
|                 encoding=encoding, codec_errors=codec_errors) | ||||
|  | ||||
|         # Note that `SpawnBase` initializes `self.crlf` to `\r\n` | ||||
|         # because the default behaviour for a PTY is to convert | ||||
|         # incoming LF to `\r\n` (see the `onlcr` flag and | ||||
|         # https://stackoverflow.com/a/35887657/5397009). Here we set | ||||
|         # it to `os.linesep` because that is what the spawned | ||||
|         # application outputs by default and `popen` doesn't translate | ||||
|         # anything. | ||||
|         if encoding is None: | ||||
|             self.crlf = os.linesep.encode ("ascii") | ||||
|         else: | ||||
|             self.crlf = self.string_type (os.linesep) | ||||
|  | ||||
|         kwargs = dict(bufsize=0, stdin=subprocess.PIPE, | ||||
|                       stderr=subprocess.STDOUT, stdout=subprocess.PIPE, | ||||
|                       cwd=cwd, preexec_fn=preexec_fn, env=env) | ||||
|  | ||||
|         if sys.platform == 'win32': | ||||
|             startupinfo = subprocess.STARTUPINFO() | ||||
|             startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW | ||||
|             kwargs['startupinfo'] = startupinfo | ||||
|             kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP | ||||
|  | ||||
|         if isinstance(cmd, string_types) and sys.platform != 'win32': | ||||
|             cmd = shlex.split(cmd, posix=os.name == 'posix') | ||||
|  | ||||
|         self.proc = subprocess.Popen(cmd, **kwargs) | ||||
|         self.pid = self.proc.pid | ||||
|         self.closed = False | ||||
|         self._buf = self.string_type() | ||||
|  | ||||
|         self._read_queue = Queue() | ||||
|         self._read_thread = threading.Thread(target=self._read_incoming) | ||||
|         self._read_thread.setDaemon(True) | ||||
|         self._read_thread.start() | ||||
|  | ||||
|     _read_reached_eof = False | ||||
|  | ||||
|     def read_nonblocking(self, size, timeout): | ||||
|         buf = self._buf | ||||
|         if self._read_reached_eof: | ||||
|             # We have already finished reading. Use up any buffered data, | ||||
|             # then raise EOF | ||||
|             if buf: | ||||
|                 self._buf = buf[size:] | ||||
|                 return buf[:size] | ||||
|             else: | ||||
|                 self.flag_eof = True | ||||
|                 raise EOF('End Of File (EOF).') | ||||
|  | ||||
|         if timeout == -1: | ||||
|             timeout = self.timeout | ||||
|         elif timeout is None: | ||||
|             timeout = 1e6 | ||||
|  | ||||
|         t0 = time.time() | ||||
|         while (time.time() - t0) < timeout and size and len(buf) < size: | ||||
|             try: | ||||
|                 incoming = self._read_queue.get_nowait() | ||||
|             except Empty: | ||||
|                 break | ||||
|             else: | ||||
|                 if incoming is None: | ||||
|                     self._read_reached_eof = True | ||||
|                     break | ||||
|  | ||||
|                 buf += self._decoder.decode(incoming, final=False) | ||||
|  | ||||
|         r, self._buf = buf[:size], buf[size:] | ||||
|  | ||||
|         self._log(r, 'read') | ||||
|         return r | ||||
|  | ||||
|     def _read_incoming(self): | ||||
|         """Run in a thread to move output from a pipe to a queue.""" | ||||
|         fileno = self.proc.stdout.fileno() | ||||
|         while 1: | ||||
|             buf = b'' | ||||
|             try: | ||||
|                 buf = os.read(fileno, 1024) | ||||
|             except OSError as e: | ||||
|                 self._log(e, 'read') | ||||
|  | ||||
|             if not buf: | ||||
|                 # This indicates we have reached EOF | ||||
|                 self._read_queue.put(None) | ||||
|                 return | ||||
|  | ||||
|             self._read_queue.put(buf) | ||||
|  | ||||
|     def write(self, s): | ||||
|         '''This is similar to send() except that there is no return value. | ||||
|         ''' | ||||
|         self.send(s) | ||||
|  | ||||
|     def writelines(self, sequence): | ||||
|         '''This calls write() for each element in the sequence. | ||||
|  | ||||
|         The sequence can be any iterable object producing strings, typically a | ||||
|         list of strings. This does not add line separators. There is no return | ||||
|         value. | ||||
|         ''' | ||||
|         for s in sequence: | ||||
|             self.send(s) | ||||
|  | ||||
|     def send(self, s): | ||||
|         '''Send data to the subprocess' stdin. | ||||
|  | ||||
|         Returns the number of bytes written. | ||||
|         ''' | ||||
|         s = self._coerce_send_string(s) | ||||
|         self._log(s, 'send') | ||||
|  | ||||
|         b = self._encoder.encode(s, final=False) | ||||
|         if PY3: | ||||
|             return self.proc.stdin.write(b) | ||||
|         else: | ||||
|             # On Python 2, .write() returns None, so we return the length of | ||||
|             # bytes written ourselves. This assumes they all got written. | ||||
|             self.proc.stdin.write(b) | ||||
|             return len(b) | ||||
|  | ||||
|     def sendline(self, s=''): | ||||
|         '''Wraps send(), sending string ``s`` to child process, with os.linesep | ||||
|         automatically appended. Returns number of bytes written. ''' | ||||
|  | ||||
|         n = self.send(s) | ||||
|         return n + self.send(self.linesep) | ||||
|  | ||||
|     def wait(self): | ||||
|         '''Wait for the subprocess to finish. | ||||
|  | ||||
|         Returns the exit code. | ||||
|         ''' | ||||
|         status = self.proc.wait() | ||||
|         if status >= 0: | ||||
|             self.exitstatus = status | ||||
|             self.signalstatus = None | ||||
|         else: | ||||
|             self.exitstatus = None | ||||
|             self.signalstatus = -status | ||||
|         self.terminated = True | ||||
|         return status | ||||
|  | ||||
|     def kill(self, sig): | ||||
|         '''Sends a Unix signal to the subprocess. | ||||
|  | ||||
|         Use constants from the :mod:`signal` module to specify which signal. | ||||
|         ''' | ||||
|         if sys.platform == 'win32': | ||||
|             if sig in [signal.SIGINT, signal.CTRL_C_EVENT]: | ||||
|                 sig = signal.CTRL_C_EVENT | ||||
|             elif sig in [signal.SIGBREAK, signal.CTRL_BREAK_EVENT]: | ||||
|                 sig = signal.CTRL_BREAK_EVENT | ||||
|             else: | ||||
|                 sig = signal.SIGTERM | ||||
|  | ||||
|         os.kill(self.proc.pid, sig) | ||||
|  | ||||
|     def sendeof(self): | ||||
|         '''Closes the stdin pipe from the writing end.''' | ||||
|         self.proc.stdin.close() | ||||
							
								
								
									
										860
									
								
								plugins/git_clone/pexpect/pty_spawn.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										860
									
								
								plugins/git_clone/pexpect/pty_spawn.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,860 @@ | ||||
| import os | ||||
| import sys | ||||
| import time | ||||
| import pty | ||||
| import tty | ||||
| import errno | ||||
| import signal | ||||
| from contextlib import contextmanager | ||||
|  | ||||
| from .ptyprocess import ptyprocess | ||||
| from .ptyprocess.ptyprocess import use_native_pty_fork | ||||
|  | ||||
| from .exceptions import ExceptionPexpect, EOF, TIMEOUT | ||||
| from .spawnbase import SpawnBase | ||||
| from .utils import ( | ||||
|     which, split_command_line, select_ignore_interrupts, poll_ignore_interrupts | ||||
| ) | ||||
|  | ||||
| @contextmanager | ||||
| def _wrap_ptyprocess_err(): | ||||
|     """Turn ptyprocess errors into our own ExceptionPexpect errors""" | ||||
|     try: | ||||
|         yield | ||||
|     except ptyprocess.PtyProcessError as e: | ||||
|         raise ExceptionPexpect(*e.args) | ||||
|  | ||||
| PY3 = (sys.version_info[0] >= 3) | ||||
|  | ||||
| class spawn(SpawnBase): | ||||
|     '''This is the main class interface for Pexpect. Use this class to start | ||||
|     and control child applications. ''' | ||||
|  | ||||
|     # This is purely informational now - changing it has no effect | ||||
|     use_native_pty_fork = use_native_pty_fork | ||||
|  | ||||
|     def __init__(self, command, args=[], timeout=30, maxread=2000, | ||||
|                  searchwindowsize=None, logfile=None, cwd=None, env=None, | ||||
|                  ignore_sighup=False, echo=True, preexec_fn=None, | ||||
|                  encoding=None, codec_errors='strict', dimensions=None, | ||||
|                  use_poll=False): | ||||
|         '''This is the constructor. The command parameter may be a string that | ||||
|         includes a command and any arguments to the command. For example:: | ||||
|  | ||||
|             child = pexpect.spawn('/usr/bin/ftp') | ||||
|             child = pexpect.spawn('/usr/bin/ssh user@example.com') | ||||
|             child = pexpect.spawn('ls -latr /tmp') | ||||
|  | ||||
|         You may also construct it with a list of arguments like so:: | ||||
|  | ||||
|             child = pexpect.spawn('/usr/bin/ftp', []) | ||||
|             child = pexpect.spawn('/usr/bin/ssh', ['user@example.com']) | ||||
|             child = pexpect.spawn('ls', ['-latr', '/tmp']) | ||||
|  | ||||
|         After this the child application will be created and will be ready to | ||||
|         talk to. For normal use, see expect() and send() and sendline(). | ||||
|  | ||||
|         Remember that Pexpect does NOT interpret shell meta characters such as | ||||
|         redirect, pipe, or wild cards (``>``, ``|``, or ``*``). This is a | ||||
|         common mistake.  If you want to run a command and pipe it through | ||||
|         another command then you must also start a shell. For example:: | ||||
|  | ||||
|             child = pexpect.spawn('/bin/bash -c "ls -l | grep LOG > logs.txt"') | ||||
|             child.expect(pexpect.EOF) | ||||
|  | ||||
|         The second form of spawn (where you pass a list of arguments) is useful | ||||
|         in situations where you wish to spawn a command and pass it its own | ||||
|         argument list. This can make syntax more clear. For example, the | ||||
|         following is equivalent to the previous example:: | ||||
|  | ||||
|             shell_cmd = 'ls -l | grep LOG > logs.txt' | ||||
|             child = pexpect.spawn('/bin/bash', ['-c', shell_cmd]) | ||||
|             child.expect(pexpect.EOF) | ||||
|  | ||||
|         The maxread attribute sets the read buffer size. This is maximum number | ||||
|         of bytes that Pexpect will try to read from a TTY at one time. Setting | ||||
|         the maxread size to 1 will turn off buffering. Setting the maxread | ||||
|         value higher may help performance in cases where large amounts of | ||||
|         output are read back from the child. This feature is useful in | ||||
|         conjunction with searchwindowsize. | ||||
|  | ||||
|         When the keyword argument *searchwindowsize* is None (default), the | ||||
|         full buffer is searched at each iteration of receiving incoming data. | ||||
|         The default number of bytes scanned at each iteration is very large | ||||
|         and may be reduced to collaterally reduce search cost.  After | ||||
|         :meth:`~.expect` returns, the full buffer attribute remains up to | ||||
|         size *maxread* irrespective of *searchwindowsize* value. | ||||
|  | ||||
|         When the keyword argument ``timeout`` is specified as a number, | ||||
|         (default: *30*), then :class:`TIMEOUT` will be raised after the value | ||||
|         specified has elapsed, in seconds, for any of the :meth:`~.expect` | ||||
|         family of method calls.  When None, TIMEOUT will not be raised, and | ||||
|         :meth:`~.expect` may block indefinitely until match. | ||||
|  | ||||
|  | ||||
|         The logfile member turns on or off logging. All input and output will | ||||
|         be copied to the given file object. Set logfile to None to stop | ||||
|         logging. This is the default. Set logfile to sys.stdout to echo | ||||
|         everything to standard output. The logfile is flushed after each write. | ||||
|  | ||||
|         Example log input and output to a file:: | ||||
|  | ||||
|             child = pexpect.spawn('some_command') | ||||
|             fout = open('mylog.txt','wb') | ||||
|             child.logfile = fout | ||||
|  | ||||
|         Example log to stdout:: | ||||
|  | ||||
|             # In Python 2: | ||||
|             child = pexpect.spawn('some_command') | ||||
|             child.logfile = sys.stdout | ||||
|  | ||||
|             # In Python 3, we'll use the ``encoding`` argument to decode data | ||||
|             # from the subprocess and handle it as unicode: | ||||
|             child = pexpect.spawn('some_command', encoding='utf-8') | ||||
|             child.logfile = sys.stdout | ||||
|  | ||||
|         The logfile_read and logfile_send members can be used to separately log | ||||
|         the input from the child and output sent to the child. Sometimes you | ||||
|         don't want to see everything you write to the child. You only want to | ||||
|         log what the child sends back. For example:: | ||||
|  | ||||
|             child = pexpect.spawn('some_command') | ||||
|             child.logfile_read = sys.stdout | ||||
|  | ||||
|         You will need to pass an encoding to spawn in the above code if you are | ||||
|         using Python 3. | ||||
|  | ||||
|         To separately log output sent to the child use logfile_send:: | ||||
|  | ||||
|             child.logfile_send = fout | ||||
|  | ||||
|         If ``ignore_sighup`` is True, the child process will ignore SIGHUP | ||||
|         signals. The default is False from Pexpect 4.0, meaning that SIGHUP | ||||
|         will be handled normally by the child. | ||||
|  | ||||
|         The delaybeforesend helps overcome a weird behavior that many users | ||||
|         were experiencing. The typical problem was that a user would expect() a | ||||
|         "Password:" prompt and then immediately call sendline() to send the | ||||
|         password. The user would then see that their password was echoed back | ||||
|         to them. Passwords don't normally echo. The problem is caused by the | ||||
|         fact that most applications print out the "Password" prompt and then | ||||
|         turn off stdin echo, but if you send your password before the | ||||
|         application turned off echo, then you get your password echoed. | ||||
|         Normally this wouldn't be a problem when interacting with a human at a | ||||
|         real keyboard. If you introduce a slight delay just before writing then | ||||
|         this seems to clear up the problem. This was such a common problem for | ||||
|         many users that I decided that the default pexpect behavior should be | ||||
|         to sleep just before writing to the child application. 1/20th of a | ||||
|         second (50 ms) seems to be enough to clear up the problem. You can set | ||||
|         delaybeforesend to None to return to the old behavior. | ||||
|  | ||||
|         Note that spawn is clever about finding commands on your path. | ||||
|         It uses the same logic that "which" uses to find executables. | ||||
|  | ||||
|         If you wish to get the exit status of the child you must call the | ||||
|         close() method. The exit or signal status of the child will be stored | ||||
|         in self.exitstatus or self.signalstatus. If the child exited normally | ||||
|         then exitstatus will store the exit return code and signalstatus will | ||||
|         be None. If the child was terminated abnormally with a signal then | ||||
|         signalstatus will store the signal value and exitstatus will be None:: | ||||
|  | ||||
|             child = pexpect.spawn('some_command') | ||||
|             child.close() | ||||
|             print(child.exitstatus, child.signalstatus) | ||||
|  | ||||
|         If you need more detail you can also read the self.status member which | ||||
|         stores the status returned by os.waitpid. You can interpret this using | ||||
|         os.WIFEXITED/os.WEXITSTATUS or os.WIFSIGNALED/os.TERMSIG. | ||||
|  | ||||
|         The echo attribute may be set to False to disable echoing of input. | ||||
|         As a pseudo-terminal, all input echoed by the "keyboard" (send() | ||||
|         or sendline()) will be repeated to output.  For many cases, it is | ||||
|         not desirable to have echo enabled, and it may be later disabled | ||||
|         using setecho(False) followed by waitnoecho().  However, for some | ||||
|         platforms such as Solaris, this is not possible, and should be | ||||
|         disabled immediately on spawn. | ||||
|  | ||||
|         If preexec_fn is given, it will be called in the child process before | ||||
|         launching the given command. This is useful to e.g. reset inherited | ||||
|         signal handlers. | ||||
|  | ||||
|         The dimensions attribute specifies the size of the pseudo-terminal as | ||||
|         seen by the subprocess, and is specified as a two-entry tuple (rows, | ||||
|         columns). If this is unspecified, the defaults in ptyprocess will apply. | ||||
|  | ||||
|         The use_poll attribute enables using select.poll() over select.select() | ||||
|         for socket handling. This is handy if your system could have > 1024 fds | ||||
|         ''' | ||||
|         super(spawn, self).__init__(timeout=timeout, maxread=maxread, searchwindowsize=searchwindowsize, | ||||
|                                     logfile=logfile, encoding=encoding, codec_errors=codec_errors) | ||||
|         self.STDIN_FILENO = pty.STDIN_FILENO | ||||
|         self.STDOUT_FILENO = pty.STDOUT_FILENO | ||||
|         self.STDERR_FILENO = pty.STDERR_FILENO | ||||
|         self.str_last_chars = 100 | ||||
|         self.cwd = cwd | ||||
|         self.env = env | ||||
|         self.echo = echo | ||||
|         self.ignore_sighup = ignore_sighup | ||||
|         self.__irix_hack = sys.platform.lower().startswith('irix') | ||||
|         if command is None: | ||||
|             self.command = None | ||||
|             self.args = None | ||||
|             self.name = '<pexpect factory incomplete>' | ||||
|         else: | ||||
|             self._spawn(command, args, preexec_fn, dimensions) | ||||
|         self.use_poll = use_poll | ||||
|  | ||||
|     def __str__(self): | ||||
|         '''This returns a human-readable string that represents the state of | ||||
|         the object. ''' | ||||
|  | ||||
|         s = [] | ||||
|         s.append(repr(self)) | ||||
|         s.append('command: ' + str(self.command)) | ||||
|         s.append('args: %r' % (self.args,)) | ||||
|         s.append('buffer (last %s chars): %r' % (self.str_last_chars,self.buffer[-self.str_last_chars:])) | ||||
|         s.append('before (last %s chars): %r' % (self.str_last_chars,self.before[-self.str_last_chars:] if self.before else '')) | ||||
|         s.append('after: %r' % (self.after,)) | ||||
|         s.append('match: %r' % (self.match,)) | ||||
|         s.append('match_index: ' + str(self.match_index)) | ||||
|         s.append('exitstatus: ' + str(self.exitstatus)) | ||||
|         if hasattr(self, 'ptyproc'): | ||||
|             s.append('flag_eof: ' + str(self.flag_eof)) | ||||
|         s.append('pid: ' + str(self.pid)) | ||||
|         s.append('child_fd: ' + str(self.child_fd)) | ||||
|         s.append('closed: ' + str(self.closed)) | ||||
|         s.append('timeout: ' + str(self.timeout)) | ||||
|         s.append('delimiter: ' + str(self.delimiter)) | ||||
|         s.append('logfile: ' + str(self.logfile)) | ||||
|         s.append('logfile_read: ' + str(self.logfile_read)) | ||||
|         s.append('logfile_send: ' + str(self.logfile_send)) | ||||
|         s.append('maxread: ' + str(self.maxread)) | ||||
|         s.append('ignorecase: ' + str(self.ignorecase)) | ||||
|         s.append('searchwindowsize: ' + str(self.searchwindowsize)) | ||||
|         s.append('delaybeforesend: ' + str(self.delaybeforesend)) | ||||
|         s.append('delayafterclose: ' + str(self.delayafterclose)) | ||||
|         s.append('delayafterterminate: ' + str(self.delayafterterminate)) | ||||
|         return '\n'.join(s) | ||||
|  | ||||
|     def _spawn(self, command, args=[], preexec_fn=None, dimensions=None): | ||||
|         '''This starts the given command in a child process. This does all the | ||||
|         fork/exec type of stuff for a pty. This is called by __init__. If args | ||||
|         is empty then command will be parsed (split on spaces) and args will be | ||||
|         set to parsed arguments. ''' | ||||
|  | ||||
|         # The pid and child_fd of this object get set by this method. | ||||
|         # Note that it is difficult for this method to fail. | ||||
|         # You cannot detect if the child process cannot start. | ||||
|         # So the only way you can tell if the child process started | ||||
|         # or not is to try to read from the file descriptor. If you get | ||||
|         # EOF immediately then it means that the child is already dead. | ||||
|         # That may not necessarily be bad because you may have spawned a child | ||||
|         # that performs some task; creates no stdout output; and then dies. | ||||
|  | ||||
|         # If command is an int type then it may represent a file descriptor. | ||||
|         if isinstance(command, type(0)): | ||||
|             raise ExceptionPexpect('Command is an int type. ' + | ||||
|                     'If this is a file descriptor then maybe you want to ' + | ||||
|                     'use fdpexpect.fdspawn which takes an existing ' + | ||||
|                     'file descriptor instead of a command string.') | ||||
|  | ||||
|         if not isinstance(args, type([])): | ||||
|             raise TypeError('The argument, args, must be a list.') | ||||
|  | ||||
|         if args == []: | ||||
|             self.args = split_command_line(command) | ||||
|             self.command = self.args[0] | ||||
|         else: | ||||
|             # Make a shallow copy of the args list. | ||||
|             self.args = args[:] | ||||
|             self.args.insert(0, command) | ||||
|             self.command = command | ||||
|  | ||||
|         command_with_path = which(self.command, env=self.env) | ||||
|         if command_with_path is None: | ||||
|             raise ExceptionPexpect('The command was not found or was not ' + | ||||
|                     'executable: %s.' % self.command) | ||||
|         self.command = command_with_path | ||||
|         self.args[0] = self.command | ||||
|  | ||||
|         self.name = '<' + ' '.join(self.args) + '>' | ||||
|  | ||||
|         assert self.pid is None, 'The pid member must be None.' | ||||
|         assert self.command is not None, 'The command member must not be None.' | ||||
|  | ||||
|         kwargs = {'echo': self.echo, 'preexec_fn': preexec_fn} | ||||
|         if self.ignore_sighup: | ||||
|             def preexec_wrapper(): | ||||
|                 "Set SIGHUP to be ignored, then call the real preexec_fn" | ||||
|                 signal.signal(signal.SIGHUP, signal.SIG_IGN) | ||||
|                 if preexec_fn is not None: | ||||
|                     preexec_fn() | ||||
|             kwargs['preexec_fn'] = preexec_wrapper | ||||
|  | ||||
|         if dimensions is not None: | ||||
|             kwargs['dimensions'] = dimensions | ||||
|  | ||||
|         if self.encoding is not None: | ||||
|             # Encode command line using the specified encoding | ||||
|             self.args = [a if isinstance(a, bytes) else a.encode(self.encoding) | ||||
|                          for a in self.args] | ||||
|  | ||||
|         self.ptyproc = self._spawnpty(self.args, env=self.env, | ||||
|                                      cwd=self.cwd, **kwargs) | ||||
|  | ||||
|         self.pid = self.ptyproc.pid | ||||
|         self.child_fd = self.ptyproc.fd | ||||
|  | ||||
|  | ||||
|         self.terminated = False | ||||
|         self.closed = False | ||||
|  | ||||
|     def _spawnpty(self, args, **kwargs): | ||||
|         '''Spawn a pty and return an instance of PtyProcess.''' | ||||
|         return ptyprocess.PtyProcess.spawn(args, **kwargs) | ||||
|  | ||||
|     def close(self, force=True): | ||||
|         '''This closes the connection with the child application. Note that | ||||
|         calling close() more than once is valid. This emulates standard Python | ||||
|         behavior with files. Set force to True if you want to make sure that | ||||
|         the child is terminated (SIGKILL is sent if the child ignores SIGHUP | ||||
|         and SIGINT). ''' | ||||
|  | ||||
|         self.flush() | ||||
|         with _wrap_ptyprocess_err(): | ||||
|             # PtyProcessError may be raised if it is not possible to terminate | ||||
|             # the child. | ||||
|             self.ptyproc.close(force=force) | ||||
|         self.isalive()  # Update exit status from ptyproc | ||||
|         self.child_fd = -1 | ||||
|         self.closed = True | ||||
|  | ||||
|     def isatty(self): | ||||
|         '''This returns True if the file descriptor is open and connected to a | ||||
|         tty(-like) device, else False. | ||||
|  | ||||
|         On SVR4-style platforms implementing streams, such as SunOS and HP-UX, | ||||
|         the child pty may not appear as a terminal device.  This means | ||||
|         methods such as setecho(), setwinsize(), getwinsize() may raise an | ||||
|         IOError. ''' | ||||
|  | ||||
|         return os.isatty(self.child_fd) | ||||
|  | ||||
|     def waitnoecho(self, timeout=-1): | ||||
|         '''This waits until the terminal ECHO flag is set False. This returns | ||||
|         True if the echo mode is off. This returns False if the ECHO flag was | ||||
|         not set False before the timeout. This can be used to detect when the | ||||
|         child is waiting for a password. Usually a child application will turn | ||||
|         off echo mode when it is waiting for the user to enter a password. For | ||||
|         example, instead of expecting the "password:" prompt you can wait for | ||||
|         the child to set ECHO off:: | ||||
|  | ||||
|             p = pexpect.spawn('ssh user@example.com') | ||||
|             p.waitnoecho() | ||||
|             p.sendline(mypassword) | ||||
|  | ||||
|         If timeout==-1 then this method will use the value in self.timeout. | ||||
|         If timeout==None then this method to block until ECHO flag is False. | ||||
|         ''' | ||||
|  | ||||
|         if timeout == -1: | ||||
|             timeout = self.timeout | ||||
|         if timeout is not None: | ||||
|             end_time = time.time() + timeout | ||||
|         while True: | ||||
|             if not self.getecho(): | ||||
|                 return True | ||||
|             if timeout < 0 and timeout is not None: | ||||
|                 return False | ||||
|             if timeout is not None: | ||||
|                 timeout = end_time - time.time() | ||||
|             time.sleep(0.1) | ||||
|  | ||||
|     def getecho(self): | ||||
|         '''This returns the terminal echo mode. This returns True if echo is | ||||
|         on or False if echo is off. Child applications that are expecting you | ||||
|         to enter a password often set ECHO False. See waitnoecho(). | ||||
|  | ||||
|         Not supported on platforms where ``isatty()`` returns False.  ''' | ||||
|         return self.ptyproc.getecho() | ||||
|  | ||||
|     def setecho(self, state): | ||||
|         '''This sets the terminal echo mode on or off. Note that anything the | ||||
|         child sent before the echo will be lost, so you should be sure that | ||||
|         your input buffer is empty before you call setecho(). For example, the | ||||
|         following will work as expected:: | ||||
|  | ||||
|             p = pexpect.spawn('cat') # Echo is on by default. | ||||
|             p.sendline('1234') # We expect see this twice from the child... | ||||
|             p.expect(['1234']) # ... once from the tty echo... | ||||
|             p.expect(['1234']) # ... and again from cat itself. | ||||
|             p.setecho(False) # Turn off tty echo | ||||
|             p.sendline('abcd') # We will set this only once (echoed by cat). | ||||
|             p.sendline('wxyz') # We will set this only once (echoed by cat) | ||||
|             p.expect(['abcd']) | ||||
|             p.expect(['wxyz']) | ||||
|  | ||||
|         The following WILL NOT WORK because the lines sent before the setecho | ||||
|         will be lost:: | ||||
|  | ||||
|             p = pexpect.spawn('cat') | ||||
|             p.sendline('1234') | ||||
|             p.setecho(False) # Turn off tty echo | ||||
|             p.sendline('abcd') # We will set this only once (echoed by cat). | ||||
|             p.sendline('wxyz') # We will set this only once (echoed by cat) | ||||
|             p.expect(['1234']) | ||||
|             p.expect(['1234']) | ||||
|             p.expect(['abcd']) | ||||
|             p.expect(['wxyz']) | ||||
|  | ||||
|  | ||||
|         Not supported on platforms where ``isatty()`` returns False. | ||||
|         ''' | ||||
|         return self.ptyproc.setecho(state) | ||||
|  | ||||
|     def read_nonblocking(self, size=1, timeout=-1): | ||||
|         '''This reads at most size characters from the child application. It | ||||
|         includes a timeout. If the read does not complete within the timeout | ||||
|         period then a TIMEOUT exception is raised. If the end of file is read | ||||
|         then an EOF exception will be raised.  If a logfile is specified, a | ||||
|         copy is written to that log. | ||||
|  | ||||
|         If timeout is None then the read may block indefinitely. | ||||
|         If timeout is -1 then the self.timeout value is used. If timeout is 0 | ||||
|         then the child is polled and if there is no data immediately ready | ||||
|         then this will raise a TIMEOUT exception. | ||||
|  | ||||
|         The timeout refers only to the amount of time to read at least one | ||||
|         character. This is not affected by the 'size' parameter, so if you call | ||||
|         read_nonblocking(size=100, timeout=30) and only one character is | ||||
|         available right away then one character will be returned immediately. | ||||
|         It will not wait for 30 seconds for another 99 characters to come in. | ||||
|  | ||||
|         On the other hand, if there are bytes available to read immediately, | ||||
|         all those bytes will be read (up to the buffer size). So, if the | ||||
|         buffer size is 1 megabyte and there is 1 megabyte of data available | ||||
|         to read, the buffer will be filled, regardless of timeout. | ||||
|  | ||||
|         This is a wrapper around os.read(). It uses select.select() or | ||||
|         select.poll() to implement the timeout. ''' | ||||
|  | ||||
|         if self.closed: | ||||
|             raise ValueError('I/O operation on closed file.') | ||||
|  | ||||
|         if self.use_poll: | ||||
|             def select(timeout): | ||||
|                 return poll_ignore_interrupts([self.child_fd], timeout) | ||||
|         else: | ||||
|             def select(timeout): | ||||
|                 return select_ignore_interrupts([self.child_fd], [], [], timeout)[0] | ||||
|  | ||||
|         # If there is data available to read right now, read as much as | ||||
|         # we can. We do this to increase performance if there are a lot | ||||
|         # of bytes to be read. This also avoids calling isalive() too | ||||
|         # often. See also: | ||||
|         # * https://github.com/pexpect/pexpect/pull/304 | ||||
|         # * http://trac.sagemath.org/ticket/10295 | ||||
|         if select(0): | ||||
|             try: | ||||
|                 incoming = super(spawn, self).read_nonblocking(size) | ||||
|             except EOF: | ||||
|                 # Maybe the child is dead: update some attributes in that case | ||||
|                 self.isalive() | ||||
|                 raise | ||||
|             while len(incoming) < size and select(0): | ||||
|                 try: | ||||
|                     incoming += super(spawn, self).read_nonblocking(size - len(incoming)) | ||||
|                 except EOF: | ||||
|                     # Maybe the child is dead: update some attributes in that case | ||||
|                     self.isalive() | ||||
|                     # Don't raise EOF, just return what we read so far. | ||||
|                     return incoming | ||||
|             return incoming | ||||
|  | ||||
|         if timeout == -1: | ||||
|             timeout = self.timeout | ||||
|  | ||||
|         if not self.isalive(): | ||||
|             # The process is dead, but there may or may not be data | ||||
|             # available to read. Note that some systems such as Solaris | ||||
|             # do not give an EOF when the child dies. In fact, you can | ||||
|             # still try to read from the child_fd -- it will block | ||||
|             # forever or until TIMEOUT. For that reason, it's important | ||||
|             # to do this check before calling select() with timeout. | ||||
|             if select(0): | ||||
|                 return super(spawn, self).read_nonblocking(size) | ||||
|             self.flag_eof = True | ||||
|             raise EOF('End Of File (EOF). Braindead platform.') | ||||
|         elif self.__irix_hack: | ||||
|             # Irix takes a long time before it realizes a child was terminated. | ||||
|             # Make sure that the timeout is at least 2 seconds. | ||||
|             # FIXME So does this mean Irix systems are forced to always have | ||||
|             # FIXME a 2 second delay when calling read_nonblocking? That sucks. | ||||
|             if timeout is not None and timeout < 2: | ||||
|                 timeout = 2 | ||||
|  | ||||
|         # Because of the select(0) check above, we know that no data | ||||
|         # is available right now. But if a non-zero timeout is given | ||||
|         # (possibly timeout=None), we call select() with a timeout. | ||||
|         if (timeout != 0) and select(timeout): | ||||
|             return super(spawn, self).read_nonblocking(size) | ||||
|  | ||||
|         if not self.isalive(): | ||||
|             # Some platforms, such as Irix, will claim that their | ||||
|             # processes are alive; timeout on the select; and | ||||
|             # then finally admit that they are not alive. | ||||
|             self.flag_eof = True | ||||
|             raise EOF('End of File (EOF). Very slow platform.') | ||||
|         else: | ||||
|             raise TIMEOUT('Timeout exceeded.') | ||||
|  | ||||
|     def write(self, s): | ||||
|         '''This is similar to send() except that there is no return value. | ||||
|         ''' | ||||
|  | ||||
|         self.send(s) | ||||
|  | ||||
|     def writelines(self, sequence): | ||||
|         '''This calls write() for each element in the sequence. The sequence | ||||
|         can be any iterable object producing strings, typically a list of | ||||
|         strings. This does not add line separators. There is no return value. | ||||
|         ''' | ||||
|  | ||||
|         for s in sequence: | ||||
|             self.write(s) | ||||
|  | ||||
|     def send(self, s): | ||||
|         '''Sends string ``s`` to the child process, returning the number of | ||||
|         bytes written. If a logfile is specified, a copy is written to that | ||||
|         log. | ||||
|  | ||||
|         The default terminal input mode is canonical processing unless set | ||||
|         otherwise by the child process. This allows backspace and other line | ||||
|         processing to be performed prior to transmitting to the receiving | ||||
|         program. As this is buffered, there is a limited size of such buffer. | ||||
|  | ||||
|         On Linux systems, this is 4096 (defined by N_TTY_BUF_SIZE). All | ||||
|         other systems honor the POSIX.1 definition PC_MAX_CANON -- 1024 | ||||
|         on OSX, 256 on OpenSolaris, and 1920 on FreeBSD. | ||||
|  | ||||
|         This value may be discovered using fpathconf(3):: | ||||
|  | ||||
|             >>> from os import fpathconf | ||||
|             >>> print(fpathconf(0, 'PC_MAX_CANON')) | ||||
|             256 | ||||
|  | ||||
|         On such a system, only 256 bytes may be received per line. Any | ||||
|         subsequent bytes received will be discarded. BEL (``'\a'``) is then | ||||
|         sent to output if IMAXBEL (termios.h) is set by the tty driver. | ||||
|         This is usually enabled by default.  Linux does not honor this as | ||||
|         an option -- it behaves as though it is always set on. | ||||
|  | ||||
|         Canonical input processing may be disabled altogether by executing | ||||
|         a shell, then stty(1), before executing the final program:: | ||||
|  | ||||
|             >>> bash = pexpect.spawn('/bin/bash', echo=False) | ||||
|             >>> bash.sendline('stty -icanon') | ||||
|             >>> bash.sendline('base64') | ||||
|             >>> bash.sendline('x' * 5000) | ||||
|         ''' | ||||
|  | ||||
|         if self.delaybeforesend is not None: | ||||
|             time.sleep(self.delaybeforesend) | ||||
|  | ||||
|         s = self._coerce_send_string(s) | ||||
|         self._log(s, 'send') | ||||
|  | ||||
|         b = self._encoder.encode(s, final=False) | ||||
|         return os.write(self.child_fd, b) | ||||
|  | ||||
|     def sendline(self, s=''): | ||||
|         '''Wraps send(), sending string ``s`` to child process, with | ||||
|         ``os.linesep`` automatically appended. Returns number of bytes | ||||
|         written.  Only a limited number of bytes may be sent for each | ||||
|         line in the default terminal mode, see docstring of :meth:`send`. | ||||
|         ''' | ||||
|         s = self._coerce_send_string(s) | ||||
|         return self.send(s + self.linesep) | ||||
|  | ||||
|     def _log_control(self, s): | ||||
|         """Write control characters to the appropriate log files""" | ||||
|         if self.encoding is not None: | ||||
|             s = s.decode(self.encoding, 'replace') | ||||
|         self._log(s, 'send') | ||||
|  | ||||
|     def sendcontrol(self, char): | ||||
|         '''Helper method that wraps send() with mnemonic access for sending control | ||||
|         character to the child (such as Ctrl-C or Ctrl-D).  For example, to send | ||||
|         Ctrl-G (ASCII 7, bell, '\a'):: | ||||
|  | ||||
|             child.sendcontrol('g') | ||||
|  | ||||
|         See also, sendintr() and sendeof(). | ||||
|         ''' | ||||
|         n, byte = self.ptyproc.sendcontrol(char) | ||||
|         self._log_control(byte) | ||||
|         return n | ||||
|  | ||||
|     def sendeof(self): | ||||
|         '''This sends an EOF to the child. This sends a character which causes | ||||
|         the pending parent output buffer to be sent to the waiting child | ||||
|         program without waiting for end-of-line. If it is the first character | ||||
|         of the line, the read() in the user program returns 0, which signifies | ||||
|         end-of-file. This means to work as expected a sendeof() has to be | ||||
|         called at the beginning of a line. This method does not send a newline. | ||||
|         It is the responsibility of the caller to ensure the eof is sent at the | ||||
|         beginning of a line. ''' | ||||
|  | ||||
|         n, byte = self.ptyproc.sendeof() | ||||
|         self._log_control(byte) | ||||
|  | ||||
|     def sendintr(self): | ||||
|         '''This sends a SIGINT to the child. It does not require | ||||
|         the SIGINT to be the first character on a line. ''' | ||||
|  | ||||
|         n, byte = self.ptyproc.sendintr() | ||||
|         self._log_control(byte) | ||||
|  | ||||
|     @property | ||||
|     def flag_eof(self): | ||||
|         return self.ptyproc.flag_eof | ||||
|  | ||||
|     @flag_eof.setter | ||||
|     def flag_eof(self, value): | ||||
|         self.ptyproc.flag_eof = value | ||||
|  | ||||
|     def eof(self): | ||||
|         '''This returns True if the EOF exception was ever raised. | ||||
|         ''' | ||||
|         return self.flag_eof | ||||
|  | ||||
|     def terminate(self, force=False): | ||||
|         '''This forces a child process to terminate. It starts nicely with | ||||
|         SIGHUP and SIGINT. If "force" is True then moves onto SIGKILL. This | ||||
|         returns True if the child was terminated. This returns False if the | ||||
|         child could not be terminated. ''' | ||||
|  | ||||
|         if not self.isalive(): | ||||
|             return True | ||||
|         try: | ||||
|             self.kill(signal.SIGHUP) | ||||
|             time.sleep(self.delayafterterminate) | ||||
|             if not self.isalive(): | ||||
|                 return True | ||||
|             self.kill(signal.SIGCONT) | ||||
|             time.sleep(self.delayafterterminate) | ||||
|             if not self.isalive(): | ||||
|                 return True | ||||
|             self.kill(signal.SIGINT) | ||||
|             time.sleep(self.delayafterterminate) | ||||
|             if not self.isalive(): | ||||
|                 return True | ||||
|             if force: | ||||
|                 self.kill(signal.SIGKILL) | ||||
|                 time.sleep(self.delayafterterminate) | ||||
|                 if not self.isalive(): | ||||
|                     return True | ||||
|                 else: | ||||
|                     return False | ||||
|             return False | ||||
|         except OSError: | ||||
|             # I think there are kernel timing issues that sometimes cause | ||||
|             # this to happen. I think isalive() reports True, but the | ||||
|             # process is dead to the kernel. | ||||
|             # Make one last attempt to see if the kernel is up to date. | ||||
|             time.sleep(self.delayafterterminate) | ||||
|             if not self.isalive(): | ||||
|                 return True | ||||
|             else: | ||||
|                 return False | ||||
|  | ||||
|     def wait(self): | ||||
|         '''This waits until the child exits. This is a blocking call. This will | ||||
|         not read any data from the child, so this will block forever if the | ||||
|         child has unread output and has terminated. In other words, the child | ||||
|         may have printed output then called exit(), but, the child is | ||||
|         technically still alive until its output is read by the parent. | ||||
|  | ||||
|         This method is non-blocking if :meth:`wait` has already been called | ||||
|         previously or :meth:`isalive` method returns False.  It simply returns | ||||
|         the previously determined exit status. | ||||
|         ''' | ||||
|  | ||||
|         ptyproc = self.ptyproc | ||||
|         with _wrap_ptyprocess_err(): | ||||
|             # exception may occur if "Is some other process attempting | ||||
|             # "job control with our child pid?" | ||||
|             exitstatus = ptyproc.wait() | ||||
|         self.status = ptyproc.status | ||||
|         self.exitstatus = ptyproc.exitstatus | ||||
|         self.signalstatus = ptyproc.signalstatus | ||||
|         self.terminated = True | ||||
|  | ||||
|         return exitstatus | ||||
|  | ||||
|     def isalive(self): | ||||
|         '''This tests if the child process is running or not. This is | ||||
|         non-blocking. If the child was terminated then this will read the | ||||
|         exitstatus or signalstatus of the child. This returns True if the child | ||||
|         process appears to be running or False if not. It can take literally | ||||
|         SECONDS for Solaris to return the right status. ''' | ||||
|  | ||||
|         ptyproc = self.ptyproc | ||||
|         with _wrap_ptyprocess_err(): | ||||
|             alive = ptyproc.isalive() | ||||
|  | ||||
|         if not alive: | ||||
|             self.status = ptyproc.status | ||||
|             self.exitstatus = ptyproc.exitstatus | ||||
|             self.signalstatus = ptyproc.signalstatus | ||||
|             self.terminated = True | ||||
|  | ||||
|         return alive | ||||
|  | ||||
|     def kill(self, sig): | ||||
|  | ||||
|         '''This sends the given signal to the child application. In keeping | ||||
|         with UNIX tradition it has a misleading name. It does not necessarily | ||||
|         kill the child unless you send the right signal. ''' | ||||
|  | ||||
|         # Same as os.kill, but the pid is given for you. | ||||
|         if self.isalive(): | ||||
|             os.kill(self.pid, sig) | ||||
|  | ||||
|     def getwinsize(self): | ||||
|         '''This returns the terminal window size of the child tty. The return | ||||
|         value is a tuple of (rows, cols). ''' | ||||
|         return self.ptyproc.getwinsize() | ||||
|  | ||||
|     def setwinsize(self, rows, cols): | ||||
|         '''This sets the terminal window size of the child tty. This will cause | ||||
|         a SIGWINCH signal to be sent to the child. This does not change the | ||||
|         physical window size. It changes the size reported to TTY-aware | ||||
|         applications like vi or curses -- applications that respond to the | ||||
|         SIGWINCH signal. ''' | ||||
|         return self.ptyproc.setwinsize(rows, cols) | ||||
|  | ||||
|  | ||||
|     def interact(self, escape_character=chr(29), | ||||
|             input_filter=None, output_filter=None): | ||||
|  | ||||
|         '''This gives control of the child process to the interactive user (the | ||||
|         human at the keyboard). Keystrokes are sent to the child process, and | ||||
|         the stdout and stderr output of the child process is printed. This | ||||
|         simply echos the child stdout and child stderr to the real stdout and | ||||
|         it echos the real stdin to the child stdin. When the user types the | ||||
|         escape_character this method will return None. The escape_character | ||||
|         will not be transmitted.  The default for escape_character is | ||||
|         entered as ``Ctrl - ]``, the very same as BSD telnet. To prevent | ||||
|         escaping, escape_character may be set to None. | ||||
|  | ||||
|         If a logfile is specified, then the data sent and received from the | ||||
|         child process in interact mode is duplicated to the given log. | ||||
|  | ||||
|         You may pass in optional input and output filter functions. These | ||||
|         functions should take bytes array and return bytes array too. Even | ||||
|         with ``encoding='utf-8'`` support, meth:`interact` will always pass | ||||
|         input_filter and output_filter bytes. You may need to wrap your | ||||
|         function to decode and encode back to UTF-8. | ||||
|  | ||||
|         The output_filter will be passed all the output from the child process. | ||||
|         The input_filter will be passed all the keyboard input from the user. | ||||
|         The input_filter is run BEFORE the check for the escape_character. | ||||
|  | ||||
|         Note that if you change the window size of the parent the SIGWINCH | ||||
|         signal will not be passed through to the child. If you want the child | ||||
|         window size to change when the parent's window size changes then do | ||||
|         something like the following example:: | ||||
|  | ||||
|             import pexpect, struct, fcntl, termios, signal, sys | ||||
|             def sigwinch_passthrough (sig, data): | ||||
|                 s = struct.pack("HHHH", 0, 0, 0, 0) | ||||
|                 a = struct.unpack('hhhh', fcntl.ioctl(sys.stdout.fileno(), | ||||
|                     termios.TIOCGWINSZ , s)) | ||||
|                 if not p.closed: | ||||
|                     p.setwinsize(a[0],a[1]) | ||||
|  | ||||
|             # Note this 'p' is global and used in sigwinch_passthrough. | ||||
|             p = pexpect.spawn('/bin/bash') | ||||
|             signal.signal(signal.SIGWINCH, sigwinch_passthrough) | ||||
|             p.interact() | ||||
|         ''' | ||||
|  | ||||
|         # Flush the buffer. | ||||
|         self.write_to_stdout(self.buffer) | ||||
|         self.stdout.flush() | ||||
|         self._buffer = self.buffer_type() | ||||
|         mode = tty.tcgetattr(self.STDIN_FILENO) | ||||
|         tty.setraw(self.STDIN_FILENO) | ||||
|         if escape_character is not None and PY3: | ||||
|             escape_character = escape_character.encode('latin-1') | ||||
|         try: | ||||
|             self.__interact_copy(escape_character, input_filter, output_filter) | ||||
|         finally: | ||||
|             tty.tcsetattr(self.STDIN_FILENO, tty.TCSAFLUSH, mode) | ||||
|  | ||||
|     def __interact_writen(self, fd, data): | ||||
|         '''This is used by the interact() method. | ||||
|         ''' | ||||
|  | ||||
|         while data != b'' and self.isalive(): | ||||
|             n = os.write(fd, data) | ||||
|             data = data[n:] | ||||
|  | ||||
|     def __interact_read(self, fd): | ||||
|         '''This is used by the interact() method. | ||||
|         ''' | ||||
|  | ||||
|         return os.read(fd, 1000) | ||||
|  | ||||
|     def __interact_copy( | ||||
|         self, escape_character=None, input_filter=None, output_filter=None | ||||
|     ): | ||||
|  | ||||
|         '''This is used by the interact() method. | ||||
|         ''' | ||||
|  | ||||
|         while self.isalive(): | ||||
|             if self.use_poll: | ||||
|                 r = poll_ignore_interrupts([self.child_fd, self.STDIN_FILENO]) | ||||
|             else: | ||||
|                 r, w, e = select_ignore_interrupts( | ||||
|                     [self.child_fd, self.STDIN_FILENO], [], [] | ||||
|                 ) | ||||
|             if self.child_fd in r: | ||||
|                 try: | ||||
|                     data = self.__interact_read(self.child_fd) | ||||
|                 except OSError as err: | ||||
|                     if err.args[0] == errno.EIO: | ||||
|                         # Linux-style EOF | ||||
|                         break | ||||
|                     raise | ||||
|                 if data == b'': | ||||
|                     # BSD-style EOF | ||||
|                     break | ||||
|                 if output_filter: | ||||
|                     data = output_filter(data) | ||||
|                 self._log(data, 'read') | ||||
|                 os.write(self.STDOUT_FILENO, data) | ||||
|             if self.STDIN_FILENO in r: | ||||
|                 data = self.__interact_read(self.STDIN_FILENO) | ||||
|                 if input_filter: | ||||
|                     data = input_filter(data) | ||||
|                 i = -1 | ||||
|                 if escape_character is not None: | ||||
|                     i = data.rfind(escape_character) | ||||
|                 if i != -1: | ||||
|                     data = data[:i] | ||||
|                     if data: | ||||
|                         self._log(data, 'send') | ||||
|                     self.__interact_writen(self.child_fd, data) | ||||
|                     break | ||||
|                 self._log(data, 'send') | ||||
|                 self.__interact_writen(self.child_fd, data) | ||||
|  | ||||
|  | ||||
| def spawnu(*args, **kwargs): | ||||
|     """Deprecated: pass encoding to spawn() instead.""" | ||||
|     kwargs.setdefault('encoding', 'utf-8') | ||||
|     return spawn(*args, **kwargs) | ||||
							
								
								
									
										19
									
								
								plugins/git_clone/pexpect/ptyprocess/LICENSE
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								plugins/git_clone/pexpect/ptyprocess/LICENSE
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | ||||
| ISC LICENSE | ||||
|  | ||||
|     This license is approved by the OSI and FSF as GPL-compatible. | ||||
|         http://opensource.org/licenses/isc-license.txt | ||||
|  | ||||
|     Copyright (c) 2013-2014, Pexpect development team | ||||
|     Copyright (c) 2012, Noah Spurrier <noah@noah.org> | ||||
|  | ||||
|     Permission to use, copy, modify, and/or distribute this software for any | ||||
|     purpose with or without fee is hereby granted, provided that the above | ||||
|     copyright notice and this permission notice appear in all copies. | ||||
|      | ||||
|     THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||||
|     WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||||
|     MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||||
|     ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||||
|     WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||||
|     ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||||
|     OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||||
							
								
								
									
										4
									
								
								plugins/git_clone/pexpect/ptyprocess/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								plugins/git_clone/pexpect/ptyprocess/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | ||||
| """Run a subprocess in a pseudo terminal""" | ||||
| from .ptyprocess import PtyProcess, PtyProcessUnicode, PtyProcessError | ||||
|  | ||||
| __version__ = '0.7.0' | ||||
							
								
								
									
										78
									
								
								plugins/git_clone/pexpect/ptyprocess/_fork_pty.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										78
									
								
								plugins/git_clone/pexpect/ptyprocess/_fork_pty.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,78 @@ | ||||
| """Substitute for the forkpty system call, to support Solaris. | ||||
| """ | ||||
| import os | ||||
| import errno | ||||
|  | ||||
| from pty import (STDIN_FILENO, STDOUT_FILENO, STDERR_FILENO, CHILD) | ||||
| from .util import PtyProcessError | ||||
|  | ||||
| def fork_pty(): | ||||
|     '''This implements a substitute for the forkpty system call. This | ||||
|     should be more portable than the pty.fork() function. Specifically, | ||||
|     this should work on Solaris. | ||||
|  | ||||
|     Modified 10.06.05 by Geoff Marshall: Implemented __fork_pty() method to | ||||
|     resolve the issue with Python's pty.fork() not supporting Solaris, | ||||
|     particularly ssh. Based on patch to posixmodule.c authored by Noah | ||||
|     Spurrier:: | ||||
|  | ||||
|         http://mail.python.org/pipermail/python-dev/2003-May/035281.html | ||||
|  | ||||
|     ''' | ||||
|  | ||||
|     parent_fd, child_fd = os.openpty() | ||||
|     if parent_fd < 0 or child_fd < 0: | ||||
|         raise OSError("os.openpty() failed") | ||||
|  | ||||
|     pid = os.fork() | ||||
|     if pid == CHILD: | ||||
|         # Child. | ||||
|         os.close(parent_fd) | ||||
|         pty_make_controlling_tty(child_fd) | ||||
|  | ||||
|         os.dup2(child_fd, STDIN_FILENO) | ||||
|         os.dup2(child_fd, STDOUT_FILENO) | ||||
|         os.dup2(child_fd, STDERR_FILENO) | ||||
|  | ||||
|     else: | ||||
|         # Parent. | ||||
|         os.close(child_fd) | ||||
|  | ||||
|     return pid, parent_fd | ||||
|  | ||||
| def pty_make_controlling_tty(tty_fd): | ||||
|     '''This makes the pseudo-terminal the controlling tty. This should be | ||||
|     more portable than the pty.fork() function. Specifically, this should | ||||
|     work on Solaris. ''' | ||||
|  | ||||
|     child_name = os.ttyname(tty_fd) | ||||
|  | ||||
|     # Disconnect from controlling tty, if any.  Raises OSError of ENXIO | ||||
|     # if there was no controlling tty to begin with, such as when | ||||
|     # executed by a cron(1) job. | ||||
|     try: | ||||
|         fd = os.open("/dev/tty", os.O_RDWR | os.O_NOCTTY) | ||||
|         os.close(fd) | ||||
|     except OSError as err: | ||||
|         if err.errno != errno.ENXIO: | ||||
|             raise | ||||
|  | ||||
|     os.setsid() | ||||
|  | ||||
|     # Verify we are disconnected from controlling tty by attempting to open | ||||
|     # it again.  We expect that OSError of ENXIO should always be raised. | ||||
|     try: | ||||
|         fd = os.open("/dev/tty", os.O_RDWR | os.O_NOCTTY) | ||||
|         os.close(fd) | ||||
|         raise PtyProcessError("OSError of errno.ENXIO should be raised.") | ||||
|     except OSError as err: | ||||
|         if err.errno != errno.ENXIO: | ||||
|             raise | ||||
|  | ||||
|     # Verify we can open child pty. | ||||
|     fd = os.open(child_name, os.O_RDWR) | ||||
|     os.close(fd) | ||||
|  | ||||
|     # Verify we now have a controlling tty. | ||||
|     fd = os.open("/dev/tty", os.O_WRONLY) | ||||
|     os.close(fd) | ||||
							
								
								
									
										855
									
								
								plugins/git_clone/pexpect/ptyprocess/ptyprocess.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										855
									
								
								plugins/git_clone/pexpect/ptyprocess/ptyprocess.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,855 @@ | ||||
| import codecs | ||||
| import errno | ||||
| import fcntl | ||||
| import io | ||||
| import os | ||||
| import pty | ||||
| import resource | ||||
| import signal | ||||
| import struct | ||||
| import sys | ||||
| import termios | ||||
| import time | ||||
|  | ||||
| try: | ||||
|     import builtins  # Python 3 | ||||
| except ImportError: | ||||
|     import __builtin__ as builtins  # Python 2 | ||||
|  | ||||
| # Constants | ||||
| from pty import (STDIN_FILENO, CHILD) | ||||
|  | ||||
| from .util import which, PtyProcessError | ||||
|  | ||||
| _platform = sys.platform.lower() | ||||
|  | ||||
| # Solaris uses internal __fork_pty(). All others use pty.fork(). | ||||
| _is_solaris = ( | ||||
|     _platform.startswith('solaris') or | ||||
|     _platform.startswith('sunos')) | ||||
|  | ||||
| if _is_solaris: | ||||
|     use_native_pty_fork = False | ||||
|     from . import _fork_pty | ||||
| else: | ||||
|     use_native_pty_fork = True | ||||
|  | ||||
| PY3 = sys.version_info[0] >= 3 | ||||
|  | ||||
| if PY3: | ||||
|     def _byte(i): | ||||
|         return bytes([i]) | ||||
| else: | ||||
|     def _byte(i): | ||||
|         return chr(i) | ||||
|      | ||||
|     class FileNotFoundError(OSError): pass | ||||
|     class TimeoutError(OSError): pass | ||||
|  | ||||
| _EOF, _INTR = None, None | ||||
|  | ||||
| def _make_eof_intr(): | ||||
|     """Set constants _EOF and _INTR. | ||||
|      | ||||
|     This avoids doing potentially costly operations on module load. | ||||
|     """ | ||||
|     global _EOF, _INTR | ||||
|     if (_EOF is not None) and (_INTR is not None): | ||||
|         return | ||||
|  | ||||
|     # inherit EOF and INTR definitions from controlling process. | ||||
|     try: | ||||
|         from termios import VEOF, VINTR | ||||
|         fd = None | ||||
|         for name in 'stdin', 'stdout': | ||||
|             stream = getattr(sys, '__%s__' % name, None) | ||||
|             if stream is None or not hasattr(stream, 'fileno'): | ||||
|                 continue | ||||
|             try: | ||||
|                 fd = stream.fileno() | ||||
|             except ValueError: | ||||
|                 continue | ||||
|         if fd is None: | ||||
|             # no fd, raise ValueError to fallback on CEOF, CINTR | ||||
|             raise ValueError("No stream has a fileno") | ||||
|         intr = ord(termios.tcgetattr(fd)[6][VINTR]) | ||||
|         eof = ord(termios.tcgetattr(fd)[6][VEOF]) | ||||
|     except (ImportError, OSError, IOError, ValueError, termios.error): | ||||
|         # unless the controlling process is also not a terminal, | ||||
|         # such as cron(1), or when stdin and stdout are both closed. | ||||
|         # Fall-back to using CEOF and CINTR. There | ||||
|         try: | ||||
|             from termios import CEOF, CINTR | ||||
|             (intr, eof) = (CINTR, CEOF) | ||||
|         except ImportError: | ||||
|             #                         ^C, ^D | ||||
|             (intr, eof) = (3, 4) | ||||
|      | ||||
|     _INTR = _byte(intr) | ||||
|     _EOF = _byte(eof) | ||||
|  | ||||
| # setecho and setwinsize are pulled out here because on some platforms, we need | ||||
| # to do this from the child before we exec() | ||||
|      | ||||
| def _setecho(fd, state): | ||||
|     errmsg = 'setecho() may not be called on this platform (it may still be possible to enable/disable echo when spawning the child process)' | ||||
|  | ||||
|     try: | ||||
|         attr = termios.tcgetattr(fd) | ||||
|     except termios.error as err: | ||||
|         if err.args[0] == errno.EINVAL: | ||||
|             raise IOError(err.args[0], '%s: %s.' % (err.args[1], errmsg)) | ||||
|         raise | ||||
|  | ||||
|     if state: | ||||
|         attr[3] = attr[3] | termios.ECHO | ||||
|     else: | ||||
|         attr[3] = attr[3] & ~termios.ECHO | ||||
|  | ||||
|     try: | ||||
|         # I tried TCSADRAIN and TCSAFLUSH, but these were inconsistent and | ||||
|         # blocked on some platforms. TCSADRAIN would probably be ideal. | ||||
|         termios.tcsetattr(fd, termios.TCSANOW, attr) | ||||
|     except IOError as err: | ||||
|         if err.args[0] == errno.EINVAL: | ||||
|             raise IOError(err.args[0], '%s: %s.' % (err.args[1], errmsg)) | ||||
|         raise | ||||
|  | ||||
| def _setwinsize(fd, rows, cols): | ||||
|     # Some very old platforms have a bug that causes the value for | ||||
|     # termios.TIOCSWINSZ to be truncated. There was a hack here to work | ||||
|     # around this, but it caused problems with newer platforms so has been | ||||
|     # removed. For details see https://github.com/pexpect/pexpect/issues/39 | ||||
|     TIOCSWINSZ = getattr(termios, 'TIOCSWINSZ', -2146929561) | ||||
|     # Note, assume ws_xpixel and ws_ypixel are zero. | ||||
|     s = struct.pack('HHHH', rows, cols, 0, 0) | ||||
|     fcntl.ioctl(fd, TIOCSWINSZ, s) | ||||
|  | ||||
| class PtyProcess(object): | ||||
|     '''This class represents a process running in a pseudoterminal. | ||||
|      | ||||
|     The main constructor is the :meth:`spawn` classmethod. | ||||
|     ''' | ||||
|     string_type = bytes | ||||
|     if PY3: | ||||
|         linesep = os.linesep.encode('ascii') | ||||
|         crlf = '\r\n'.encode('ascii') | ||||
|  | ||||
|         @staticmethod | ||||
|         def write_to_stdout(b): | ||||
|             try: | ||||
|                 return sys.stdout.buffer.write(b) | ||||
|             except AttributeError: | ||||
|                 # If stdout has been replaced, it may not have .buffer | ||||
|                 return sys.stdout.write(b.decode('ascii', 'replace')) | ||||
|     else: | ||||
|         linesep = os.linesep | ||||
|         crlf = '\r\n' | ||||
|         write_to_stdout = sys.stdout.write | ||||
|  | ||||
|     encoding = None | ||||
|      | ||||
|     argv = None | ||||
|     env = None | ||||
|     launch_dir = None | ||||
|  | ||||
|     def __init__(self, pid, fd): | ||||
|         _make_eof_intr()  # Ensure _EOF and _INTR are calculated | ||||
|         self.pid = pid | ||||
|         self.fd = fd | ||||
|         readf = io.open(fd, 'rb', buffering=0) | ||||
|         writef = io.open(fd, 'wb', buffering=0, closefd=False) | ||||
|         self.fileobj = io.BufferedRWPair(readf, writef) | ||||
|  | ||||
|         self.terminated = False | ||||
|         self.closed = False | ||||
|         self.exitstatus = None | ||||
|         self.signalstatus = None | ||||
|         # status returned by os.waitpid | ||||
|         self.status = None | ||||
|         self.flag_eof = False | ||||
|         # Used by close() to give kernel time to update process status. | ||||
|         # Time in seconds. | ||||
|         self.delayafterclose = 0.1 | ||||
|         # Used by terminate() to give kernel time to update process status. | ||||
|         # Time in seconds. | ||||
|         self.delayafterterminate = 0.1 | ||||
|  | ||||
|     @classmethod | ||||
|     def spawn( | ||||
|             cls, argv, cwd=None, env=None, echo=True, preexec_fn=None, | ||||
|             dimensions=(24, 80), pass_fds=()): | ||||
|         '''Start the given command in a child process in a pseudo terminal. | ||||
|  | ||||
|         This does all the fork/exec type of stuff for a pty, and returns an | ||||
|         instance of PtyProcess. | ||||
|  | ||||
|         If preexec_fn is supplied, it will be called with no arguments in the | ||||
|         child process before exec-ing the specified command. | ||||
|         It may, for instance, set signal handlers to SIG_DFL or SIG_IGN. | ||||
|  | ||||
|         Dimensions of the psuedoterminal used for the subprocess can be | ||||
|         specified as a tuple (rows, cols), or the default (24, 80) will be used. | ||||
|  | ||||
|         By default, all file descriptors except 0, 1 and 2 are closed. This | ||||
|         behavior can be overridden with pass_fds, a list of file descriptors to | ||||
|         keep open between the parent and the child. | ||||
|         ''' | ||||
|         # Note that it is difficult for this method to fail. | ||||
|         # You cannot detect if the child process cannot start. | ||||
|         # So the only way you can tell if the child process started | ||||
|         # or not is to try to read from the file descriptor. If you get | ||||
|         # EOF immediately then it means that the child is already dead. | ||||
|         # That may not necessarily be bad because you may have spawned a child | ||||
|         # that performs some task; creates no stdout output; and then dies. | ||||
|  | ||||
|         if not isinstance(argv, (list, tuple)): | ||||
|             raise TypeError("Expected a list or tuple for argv, got %r" % argv) | ||||
|  | ||||
|         # Shallow copy of argv so we can modify it | ||||
|         argv = argv[:] | ||||
|         command = argv[0] | ||||
|  | ||||
|         command_with_path = which(command) | ||||
|         if command_with_path is None: | ||||
|             raise FileNotFoundError('The command was not found or was not ' + | ||||
|                                     'executable: %s.' % command) | ||||
|         command = command_with_path | ||||
|         argv[0] = command | ||||
|  | ||||
|         # [issue #119] To prevent the case where exec fails and the user is | ||||
|         # stuck interacting with a python child process instead of whatever | ||||
|         # was expected, we implement the solution from | ||||
|         # http://stackoverflow.com/a/3703179 to pass the exception to the | ||||
|         # parent process | ||||
|  | ||||
|         # [issue #119] 1. Before forking, open a pipe in the parent process. | ||||
|         exec_err_pipe_read, exec_err_pipe_write = os.pipe() | ||||
|  | ||||
|         if use_native_pty_fork: | ||||
|             pid, fd = pty.fork() | ||||
|         else: | ||||
|             # Use internal fork_pty, for Solaris | ||||
|             pid, fd = _fork_pty.fork_pty() | ||||
|  | ||||
|         # Some platforms must call setwinsize() and setecho() from the | ||||
|         # child process, and others from the master process. We do both, | ||||
|         # allowing IOError for either. | ||||
|  | ||||
|         if pid == CHILD: | ||||
|             # set window size | ||||
|             try: | ||||
|                 _setwinsize(STDIN_FILENO, *dimensions) | ||||
|             except IOError as err: | ||||
|                 if err.args[0] not in (errno.EINVAL, errno.ENOTTY): | ||||
|                     raise | ||||
|  | ||||
|             # disable echo if spawn argument echo was unset | ||||
|             if not echo: | ||||
|                 try: | ||||
|                     _setecho(STDIN_FILENO, False) | ||||
|                 except (IOError, termios.error) as err: | ||||
|                     if err.args[0] not in (errno.EINVAL, errno.ENOTTY): | ||||
|                         raise | ||||
|  | ||||
|             # [issue #119] 3. The child closes the reading end and sets the | ||||
|             # close-on-exec flag for the writing end. | ||||
|             os.close(exec_err_pipe_read) | ||||
|             fcntl.fcntl(exec_err_pipe_write, fcntl.F_SETFD, fcntl.FD_CLOEXEC) | ||||
|  | ||||
|             # Do not allow child to inherit open file descriptors from parent, | ||||
|             # with the exception of the exec_err_pipe_write of the pipe | ||||
|             # and pass_fds. | ||||
|             # Impose ceiling on max_fd: AIX bugfix for users with unlimited | ||||
|             # nofiles where resource.RLIMIT_NOFILE is 2^63-1 and os.closerange() | ||||
|             # occasionally raises out of range error | ||||
|             max_fd = min(1048576, resource.getrlimit(resource.RLIMIT_NOFILE)[0]) | ||||
|             spass_fds = sorted(set(pass_fds) | {exec_err_pipe_write}) | ||||
|             for pair in zip([2] + spass_fds, spass_fds + [max_fd]): | ||||
|                 os.closerange(pair[0]+1, pair[1]) | ||||
|  | ||||
|             if cwd is not None: | ||||
|                 os.chdir(cwd) | ||||
|  | ||||
|             if preexec_fn is not None: | ||||
|                 try: | ||||
|                     preexec_fn() | ||||
|                 except Exception as e: | ||||
|                     ename = type(e).__name__ | ||||
|                     tosend = '{}:0:{}'.format(ename, str(e)) | ||||
|                     if PY3: | ||||
|                         tosend = tosend.encode('utf-8') | ||||
|  | ||||
|                     os.write(exec_err_pipe_write, tosend) | ||||
|                     os.close(exec_err_pipe_write) | ||||
|                     os._exit(1) | ||||
|  | ||||
|             try: | ||||
|                 if env is None: | ||||
|                     os.execv(command, argv) | ||||
|                 else: | ||||
|                     os.execvpe(command, argv, env) | ||||
|             except OSError as err: | ||||
|                 # [issue #119] 5. If exec fails, the child writes the error | ||||
|                 # code back to the parent using the pipe, then exits. | ||||
|                 tosend = 'OSError:{}:{}'.format(err.errno, str(err)) | ||||
|                 if PY3: | ||||
|                     tosend = tosend.encode('utf-8') | ||||
|                 os.write(exec_err_pipe_write, tosend) | ||||
|                 os.close(exec_err_pipe_write) | ||||
|                 os._exit(os.EX_OSERR) | ||||
|  | ||||
|         # Parent | ||||
|         inst = cls(pid, fd) | ||||
|          | ||||
|         # Set some informational attributes | ||||
|         inst.argv = argv | ||||
|         if env is not None: | ||||
|             inst.env = env | ||||
|         if cwd is not None: | ||||
|             inst.launch_dir = cwd | ||||
|  | ||||
|         # [issue #119] 2. After forking, the parent closes the writing end | ||||
|         # of the pipe and reads from the reading end. | ||||
|         os.close(exec_err_pipe_write) | ||||
|         exec_err_data = os.read(exec_err_pipe_read, 4096) | ||||
|         os.close(exec_err_pipe_read) | ||||
|  | ||||
|         # [issue #119] 6. The parent reads eof (a zero-length read) if the | ||||
|         # child successfully performed exec, since close-on-exec made | ||||
|         # successful exec close the writing end of the pipe. Or, if exec | ||||
|         # failed, the parent reads the error code and can proceed | ||||
|         # accordingly. Either way, the parent blocks until the child calls | ||||
|         # exec. | ||||
|         if len(exec_err_data) != 0: | ||||
|             try: | ||||
|                 errclass, errno_s, errmsg = exec_err_data.split(b':', 2) | ||||
|                 exctype = getattr(builtins, errclass.decode('ascii'), Exception) | ||||
|  | ||||
|                 exception = exctype(errmsg.decode('utf-8', 'replace')) | ||||
|                 if exctype is OSError: | ||||
|                     exception.errno = int(errno_s) | ||||
|             except: | ||||
|                 raise Exception('Subprocess failed, got bad error data: %r' | ||||
|                                     % exec_err_data) | ||||
|             else: | ||||
|                 raise exception | ||||
|  | ||||
|         try: | ||||
|             inst.setwinsize(*dimensions) | ||||
|         except IOError as err: | ||||
|             if err.args[0] not in (errno.EINVAL, errno.ENOTTY, errno.ENXIO): | ||||
|                 raise | ||||
|  | ||||
|         return inst | ||||
|  | ||||
|     def __repr__(self): | ||||
|         clsname = type(self).__name__ | ||||
|         if self.argv is not None: | ||||
|             args = [repr(self.argv)] | ||||
|             if self.env is not None: | ||||
|                 args.append("env=%r" % self.env) | ||||
|             if self.launch_dir is not None: | ||||
|                 args.append("cwd=%r" % self.launch_dir) | ||||
|              | ||||
|             return "{}.spawn({})".format(clsname, ", ".join(args)) | ||||
|          | ||||
|         else: | ||||
|             return "{}(pid={}, fd={})".format(clsname, self.pid, self.fd) | ||||
|  | ||||
|     @staticmethod | ||||
|     def _coerce_send_string(s): | ||||
|         if not isinstance(s, bytes): | ||||
|             return s.encode('utf-8') | ||||
|         return s | ||||
|  | ||||
|     @staticmethod | ||||
|     def _coerce_read_string(s): | ||||
|         return s | ||||
|  | ||||
|     def __del__(self): | ||||
|         '''This makes sure that no system resources are left open. Python only | ||||
|         garbage collects Python objects. OS file descriptors are not Python | ||||
|         objects, so they must be handled explicitly. If the child file | ||||
|         descriptor was opened outside of this class (passed to the constructor) | ||||
|         then this does not close it. ''' | ||||
|  | ||||
|         if not self.closed: | ||||
|             # It is possible for __del__ methods to execute during the | ||||
|             # teardown of the Python VM itself. Thus self.close() may | ||||
|             # trigger an exception because os.close may be None. | ||||
|             try: | ||||
|                 self.close() | ||||
|             # which exception, shouldn't we catch explicitly .. ? | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|  | ||||
|     def fileno(self): | ||||
|         '''This returns the file descriptor of the pty for the child. | ||||
|         ''' | ||||
|         return self.fd | ||||
|  | ||||
|     def close(self, force=True): | ||||
|         '''This closes the connection with the child application. Note that | ||||
|         calling close() more than once is valid. This emulates standard Python | ||||
|         behavior with files. Set force to True if you want to make sure that | ||||
|         the child is terminated (SIGKILL is sent if the child ignores SIGHUP | ||||
|         and SIGINT). ''' | ||||
|         if not self.closed: | ||||
|             self.flush() | ||||
|             self.fileobj.close() # Closes the file descriptor | ||||
|             # Give kernel time to update process status. | ||||
|             time.sleep(self.delayafterclose) | ||||
|             if self.isalive(): | ||||
|                 if not self.terminate(force): | ||||
|                     raise PtyProcessError('Could not terminate the child.') | ||||
|             self.fd = -1 | ||||
|             self.closed = True | ||||
|             #self.pid = None | ||||
|  | ||||
|     def flush(self): | ||||
|         '''This does nothing. It is here to support the interface for a | ||||
|         File-like object. ''' | ||||
|  | ||||
|         pass | ||||
|  | ||||
|     def isatty(self): | ||||
|         '''This returns True if the file descriptor is open and connected to a | ||||
|         tty(-like) device, else False. | ||||
|  | ||||
|         On SVR4-style platforms implementing streams, such as SunOS and HP-UX, | ||||
|         the child pty may not appear as a terminal device.  This means | ||||
|         methods such as setecho(), setwinsize(), getwinsize() may raise an | ||||
|         IOError. ''' | ||||
|  | ||||
|         return os.isatty(self.fd) | ||||
|  | ||||
|     def waitnoecho(self, timeout=None): | ||||
|         '''Wait until the terminal ECHO flag is set False. | ||||
|  | ||||
|         This returns True if the echo mode is off, or False if echo was not | ||||
|         disabled before the timeout. This can be used to detect when the | ||||
|         child is waiting for a password. Usually a child application will turn | ||||
|         off echo mode when it is waiting for the user to enter a password. For | ||||
|         example, instead of expecting the "password:" prompt you can wait for | ||||
|         the child to turn echo off:: | ||||
|  | ||||
|             p = pexpect.spawn('ssh user@example.com') | ||||
|             p.waitnoecho() | ||||
|             p.sendline(mypassword) | ||||
|  | ||||
|         If ``timeout=None`` then this method to block until ECHO flag is False. | ||||
|         ''' | ||||
|  | ||||
|         if timeout is not None: | ||||
|             end_time = time.time() + timeout | ||||
|         while True: | ||||
|             if not self.getecho(): | ||||
|                 return True | ||||
|             if timeout < 0 and timeout is not None: | ||||
|                 return False | ||||
|             if timeout is not None: | ||||
|                 timeout = end_time - time.time() | ||||
|             time.sleep(0.1) | ||||
|  | ||||
|     def getecho(self): | ||||
|         '''Returns True if terminal echo is on, or False if echo is off. | ||||
|  | ||||
|         Child applications that are expecting you to enter a password often | ||||
|         disable echo. See also :meth:`waitnoecho`. | ||||
|  | ||||
|         Not supported on platforms where ``isatty()`` returns False. | ||||
|         ''' | ||||
|  | ||||
|         try: | ||||
|             attr = termios.tcgetattr(self.fd) | ||||
|         except termios.error as err: | ||||
|             errmsg = 'getecho() may not be called on this platform' | ||||
|             if err.args[0] == errno.EINVAL: | ||||
|                 raise IOError(err.args[0], '%s: %s.' % (err.args[1], errmsg)) | ||||
|             raise | ||||
|  | ||||
|         self.echo = bool(attr[3] & termios.ECHO) | ||||
|         return self.echo | ||||
|  | ||||
|     def setecho(self, state): | ||||
|         '''Enable or disable terminal echo. | ||||
|  | ||||
|         Anything the child sent before the echo will be lost, so you should be | ||||
|         sure that your input buffer is empty before you call setecho(). | ||||
|         For example, the following will work as expected:: | ||||
|  | ||||
|             p = pexpect.spawn('cat') # Echo is on by default. | ||||
|             p.sendline('1234') # We expect see this twice from the child... | ||||
|             p.expect(['1234']) # ... once from the tty echo... | ||||
|             p.expect(['1234']) # ... and again from cat itself. | ||||
|             p.setecho(False) # Turn off tty echo | ||||
|             p.sendline('abcd') # We will set this only once (echoed by cat). | ||||
|             p.sendline('wxyz') # We will set this only once (echoed by cat) | ||||
|             p.expect(['abcd']) | ||||
|             p.expect(['wxyz']) | ||||
|  | ||||
|         The following WILL NOT WORK because the lines sent before the setecho | ||||
|         will be lost:: | ||||
|  | ||||
|             p = pexpect.spawn('cat') | ||||
|             p.sendline('1234') | ||||
|             p.setecho(False) # Turn off tty echo | ||||
|             p.sendline('abcd') # We will set this only once (echoed by cat). | ||||
|             p.sendline('wxyz') # We will set this only once (echoed by cat) | ||||
|             p.expect(['1234']) | ||||
|             p.expect(['1234']) | ||||
|             p.expect(['abcd']) | ||||
|             p.expect(['wxyz']) | ||||
|  | ||||
|  | ||||
|         Not supported on platforms where ``isatty()`` returns False. | ||||
|         ''' | ||||
|         _setecho(self.fd, state) | ||||
|  | ||||
|         self.echo = state | ||||
|  | ||||
|     def read(self, size=1024): | ||||
|         """Read and return at most ``size`` bytes from the pty. | ||||
|  | ||||
|         Can block if there is nothing to read. Raises :exc:`EOFError` if the | ||||
|         terminal was closed. | ||||
|          | ||||
|         Unlike Pexpect's ``read_nonblocking`` method, this doesn't try to deal | ||||
|         with the vagaries of EOF on platforms that do strange things, like IRIX | ||||
|         or older Solaris systems. It handles the errno=EIO pattern used on | ||||
|         Linux, and the empty-string return used on BSD platforms and (seemingly) | ||||
|         on recent Solaris. | ||||
|         """ | ||||
|         try: | ||||
|             s = self.fileobj.read1(size) | ||||
|         except (OSError, IOError) as err: | ||||
|             if err.args[0] == errno.EIO: | ||||
|                 # Linux-style EOF | ||||
|                 self.flag_eof = True | ||||
|                 raise EOFError('End Of File (EOF). Exception style platform.') | ||||
|             raise | ||||
|         if s == b'': | ||||
|             # BSD-style EOF (also appears to work on recent Solaris (OpenIndiana)) | ||||
|             self.flag_eof = True | ||||
|             raise EOFError('End Of File (EOF). Empty string style platform.') | ||||
|  | ||||
|         return s | ||||
|  | ||||
|     def readline(self): | ||||
|         """Read one line from the pseudoterminal, and return it as unicode. | ||||
|  | ||||
|         Can block if there is nothing to read. Raises :exc:`EOFError` if the | ||||
|         terminal was closed. | ||||
|         """ | ||||
|         try: | ||||
|             s = self.fileobj.readline() | ||||
|         except (OSError, IOError) as err: | ||||
|             if err.args[0] == errno.EIO: | ||||
|                 # Linux-style EOF | ||||
|                 self.flag_eof = True | ||||
|                 raise EOFError('End Of File (EOF). Exception style platform.') | ||||
|             raise | ||||
|         if s == b'': | ||||
|             # BSD-style EOF (also appears to work on recent Solaris (OpenIndiana)) | ||||
|             self.flag_eof = True | ||||
|             raise EOFError('End Of File (EOF). Empty string style platform.') | ||||
|  | ||||
|         return s | ||||
|  | ||||
|     def _writeb(self, b, flush=True): | ||||
|         n = self.fileobj.write(b) | ||||
|         if flush: | ||||
|             self.fileobj.flush() | ||||
|         return n | ||||
|  | ||||
|     def write(self, s, flush=True): | ||||
|         """Write bytes to the pseudoterminal. | ||||
|          | ||||
|         Returns the number of bytes written. | ||||
|         """ | ||||
|         return self._writeb(s, flush=flush) | ||||
|  | ||||
|     def sendcontrol(self, char): | ||||
|         '''Helper method for sending control characters to the terminal. | ||||
|  | ||||
|         For example, to send Ctrl-G (ASCII 7, bell, ``'\\a'``):: | ||||
|  | ||||
|             child.sendcontrol('g') | ||||
|  | ||||
|         See also, :meth:`sendintr` and :meth:`sendeof`. | ||||
|         ''' | ||||
|         char = char.lower() | ||||
|         a = ord(char) | ||||
|         if 97 <= a <= 122: | ||||
|             a = a - ord('a') + 1 | ||||
|             byte = _byte(a) | ||||
|             return self._writeb(byte), byte | ||||
|         d = {'@': 0, '`': 0, | ||||
|             '[': 27, '{': 27, | ||||
|             '\\': 28, '|': 28, | ||||
|             ']': 29, '}': 29, | ||||
|             '^': 30, '~': 30, | ||||
|             '_': 31, | ||||
|             '?': 127} | ||||
|         if char not in d: | ||||
|             return 0, b'' | ||||
|  | ||||
|         byte = _byte(d[char]) | ||||
|         return self._writeb(byte), byte | ||||
|  | ||||
|     def sendeof(self): | ||||
|         '''Sends an EOF (typically Ctrl-D) through the terminal. | ||||
|  | ||||
|         This sends a character which causes | ||||
|         the pending parent output buffer to be sent to the waiting child | ||||
|         program without waiting for end-of-line. If it is the first character | ||||
|         of the line, the read() in the user program returns 0, which signifies | ||||
|         end-of-file. This means to work as expected a sendeof() has to be | ||||
|         called at the beginning of a line. This method does not send a newline. | ||||
|         It is the responsibility of the caller to ensure the eof is sent at the | ||||
|         beginning of a line. | ||||
|         ''' | ||||
|         return self._writeb(_EOF), _EOF | ||||
|  | ||||
|     def sendintr(self): | ||||
|         '''Send an interrupt character (typically Ctrl-C) through the terminal. | ||||
|  | ||||
|         This will normally trigger the kernel to send SIGINT to the current | ||||
|         foreground process group. Processes can turn off this translation, in | ||||
|         which case they can read the raw data sent, e.g. ``b'\\x03'`` for Ctrl-C. | ||||
|  | ||||
|         See also the :meth:`kill` method, which sends a signal directly to the | ||||
|         immediate child process in the terminal (which is not necessarily the | ||||
|         foreground process). | ||||
|         ''' | ||||
|         return self._writeb(_INTR), _INTR | ||||
|  | ||||
|     def eof(self): | ||||
|         '''This returns True if the EOF exception was ever raised. | ||||
|         ''' | ||||
|  | ||||
|         return self.flag_eof | ||||
|  | ||||
|     def terminate(self, force=False): | ||||
|         '''This forces a child process to terminate. It starts nicely with | ||||
|         SIGHUP and SIGINT. If "force" is True then moves onto SIGKILL. This | ||||
|         returns True if the child was terminated. This returns False if the | ||||
|         child could not be terminated. ''' | ||||
|  | ||||
|         if not self.isalive(): | ||||
|             return True | ||||
|         try: | ||||
|             self.kill(signal.SIGHUP) | ||||
|             time.sleep(self.delayafterterminate) | ||||
|             if not self.isalive(): | ||||
|                 return True | ||||
|             self.kill(signal.SIGCONT) | ||||
|             time.sleep(self.delayafterterminate) | ||||
|             if not self.isalive(): | ||||
|                 return True | ||||
|             self.kill(signal.SIGINT) | ||||
|             time.sleep(self.delayafterterminate) | ||||
|             if not self.isalive(): | ||||
|                 return True | ||||
|             if force: | ||||
|                 self.kill(signal.SIGKILL) | ||||
|                 time.sleep(self.delayafterterminate) | ||||
|                 if not self.isalive(): | ||||
|                     return True | ||||
|                 else: | ||||
|                     return False | ||||
|             return False | ||||
|         except OSError: | ||||
|             # I think there are kernel timing issues that sometimes cause | ||||
|             # this to happen. I think isalive() reports True, but the | ||||
|             # process is dead to the kernel. | ||||
|             # Make one last attempt to see if the kernel is up to date. | ||||
|             time.sleep(self.delayafterterminate) | ||||
|             if not self.isalive(): | ||||
|                 return True | ||||
|             else: | ||||
|                 return False | ||||
|  | ||||
|     def wait(self): | ||||
|         '''This waits until the child exits. This is a blocking call. This will | ||||
|         not read any data from the child, so this will block forever if the | ||||
|         child has unread output and has terminated. In other words, the child | ||||
|         may have printed output then called exit(), but, the child is | ||||
|         technically still alive until its output is read by the parent. ''' | ||||
|  | ||||
|         if self.isalive(): | ||||
|             pid, status = os.waitpid(self.pid, 0) | ||||
|         else: | ||||
|             return self.exitstatus | ||||
|         self.exitstatus = os.WEXITSTATUS(status) | ||||
|         if os.WIFEXITED(status): | ||||
|             self.status = status | ||||
|             self.exitstatus = os.WEXITSTATUS(status) | ||||
|             self.signalstatus = None | ||||
|             self.terminated = True | ||||
|         elif os.WIFSIGNALED(status): | ||||
|             self.status = status | ||||
|             self.exitstatus = None | ||||
|             self.signalstatus = os.WTERMSIG(status) | ||||
|             self.terminated = True | ||||
|         elif os.WIFSTOPPED(status):  # pragma: no cover | ||||
|             # You can't call wait() on a child process in the stopped state. | ||||
|             raise PtyProcessError('Called wait() on a stopped child ' + | ||||
|                     'process. This is not supported. Is some other ' + | ||||
|                     'process attempting job control with our child pid?') | ||||
|         return self.exitstatus | ||||
|  | ||||
|     def isalive(self): | ||||
|         '''This tests if the child process is running or not. This is | ||||
|         non-blocking. If the child was terminated then this will read the | ||||
|         exitstatus or signalstatus of the child. This returns True if the child | ||||
|         process appears to be running or False if not. It can take literally | ||||
|         SECONDS for Solaris to return the right status. ''' | ||||
|  | ||||
|         if self.terminated: | ||||
|             return False | ||||
|  | ||||
|         if self.flag_eof: | ||||
|             # This is for Linux, which requires the blocking form | ||||
|             # of waitpid to get the status of a defunct process. | ||||
|             # This is super-lame. The flag_eof would have been set | ||||
|             # in read_nonblocking(), so this should be safe. | ||||
|             waitpid_options = 0 | ||||
|         else: | ||||
|             waitpid_options = os.WNOHANG | ||||
|  | ||||
|         try: | ||||
|             pid, status = os.waitpid(self.pid, waitpid_options) | ||||
|         except OSError as e: | ||||
|             # No child processes | ||||
|             if e.errno == errno.ECHILD: | ||||
|                 raise PtyProcessError('isalive() encountered condition ' + | ||||
|                         'where "terminated" is 0, but there was no child ' + | ||||
|                         'process. Did someone else call waitpid() ' + | ||||
|                         'on our process?') | ||||
|             else: | ||||
|                 raise | ||||
|  | ||||
|         # I have to do this twice for Solaris. | ||||
|         # I can't even believe that I figured this out... | ||||
|         # If waitpid() returns 0 it means that no child process | ||||
|         # wishes to report, and the value of status is undefined. | ||||
|         if pid == 0: | ||||
|             try: | ||||
|                 ### os.WNOHANG) # Solaris! | ||||
|                 pid, status = os.waitpid(self.pid, waitpid_options) | ||||
|             except OSError as e:  # pragma: no cover | ||||
|                 # This should never happen... | ||||
|                 if e.errno == errno.ECHILD: | ||||
|                     raise PtyProcessError('isalive() encountered condition ' + | ||||
|                             'that should never happen. There was no child ' + | ||||
|                             'process. Did someone else call waitpid() ' + | ||||
|                             'on our process?') | ||||
|                 else: | ||||
|                     raise | ||||
|  | ||||
|             # If pid is still 0 after two calls to waitpid() then the process | ||||
|             # really is alive. This seems to work on all platforms, except for | ||||
|             # Irix which seems to require a blocking call on waitpid or select, | ||||
|             # so I let read_nonblocking take care of this situation | ||||
|             # (unfortunately, this requires waiting through the timeout). | ||||
|             if pid == 0: | ||||
|                 return True | ||||
|  | ||||
|         if pid == 0: | ||||
|             return True | ||||
|  | ||||
|         if os.WIFEXITED(status): | ||||
|             self.status = status | ||||
|             self.exitstatus = os.WEXITSTATUS(status) | ||||
|             self.signalstatus = None | ||||
|             self.terminated = True | ||||
|         elif os.WIFSIGNALED(status): | ||||
|             self.status = status | ||||
|             self.exitstatus = None | ||||
|             self.signalstatus = os.WTERMSIG(status) | ||||
|             self.terminated = True | ||||
|         elif os.WIFSTOPPED(status): | ||||
|             raise PtyProcessError('isalive() encountered condition ' + | ||||
|                     'where child process is stopped. This is not ' + | ||||
|                     'supported. Is some other process attempting ' + | ||||
|                     'job control with our child pid?') | ||||
|         return False | ||||
|  | ||||
|     def kill(self, sig): | ||||
|         """Send the given signal to the child application. | ||||
|  | ||||
|         In keeping with UNIX tradition it has a misleading name. It does not | ||||
|         necessarily kill the child unless you send the right signal. See the | ||||
|         :mod:`signal` module for constants representing signal numbers. | ||||
|         """ | ||||
|  | ||||
|         # Same as os.kill, but the pid is given for you. | ||||
|         if self.isalive(): | ||||
|             os.kill(self.pid, sig) | ||||
|  | ||||
|     def getwinsize(self): | ||||
|         """Return the window size of the pseudoterminal as a tuple (rows, cols). | ||||
|         """ | ||||
|         TIOCGWINSZ = getattr(termios, 'TIOCGWINSZ', 1074295912) | ||||
|         s = struct.pack('HHHH', 0, 0, 0, 0) | ||||
|         x = fcntl.ioctl(self.fd, TIOCGWINSZ, s) | ||||
|         return struct.unpack('HHHH', x)[0:2] | ||||
|  | ||||
|     def setwinsize(self, rows, cols): | ||||
|         """Set the terminal window size of the child tty. | ||||
|  | ||||
|         This will cause a SIGWINCH signal to be sent to the child. This does not | ||||
|         change the physical window size. It changes the size reported to | ||||
|         TTY-aware applications like vi or curses -- applications that respond to | ||||
|         the SIGWINCH signal. | ||||
|         """ | ||||
|         return _setwinsize(self.fd, rows, cols) | ||||
|  | ||||
|  | ||||
| class PtyProcessUnicode(PtyProcess): | ||||
|     """Unicode wrapper around a process running in a pseudoterminal. | ||||
|  | ||||
|     This class exposes a similar interface to :class:`PtyProcess`, but its read | ||||
|     methods return unicode, and its :meth:`write` accepts unicode. | ||||
|     """ | ||||
|     if PY3: | ||||
|         string_type = str | ||||
|     else: | ||||
|         string_type = unicode   # analysis:ignore | ||||
|  | ||||
|     def __init__(self, pid, fd, encoding='utf-8', codec_errors='strict'): | ||||
|         super(PtyProcessUnicode, self).__init__(pid, fd) | ||||
|         self.encoding = encoding | ||||
|         self.codec_errors = codec_errors | ||||
|         self.decoder = codecs.getincrementaldecoder(encoding)(errors=codec_errors) | ||||
|  | ||||
|     def read(self, size=1024): | ||||
|         """Read at most ``size`` bytes from the pty, return them as unicode. | ||||
|  | ||||
|         Can block if there is nothing to read. Raises :exc:`EOFError` if the | ||||
|         terminal was closed. | ||||
|  | ||||
|         The size argument still refers to bytes, not unicode code points. | ||||
|         """ | ||||
|         b = super(PtyProcessUnicode, self).read(size) | ||||
|         return self.decoder.decode(b, final=False) | ||||
|  | ||||
|     def readline(self): | ||||
|         """Read one line from the pseudoterminal, and return it as unicode. | ||||
|  | ||||
|         Can block if there is nothing to read. Raises :exc:`EOFError` if the | ||||
|         terminal was closed. | ||||
|         """ | ||||
|         b = super(PtyProcessUnicode, self).readline() | ||||
|         return self.decoder.decode(b, final=False) | ||||
|  | ||||
|     def write(self, s): | ||||
|         """Write the unicode string ``s`` to the pseudoterminal. | ||||
|  | ||||
|         Returns the number of bytes written. | ||||
|         """ | ||||
|         b = s.encode(self.encoding) | ||||
|         return super(PtyProcessUnicode, self).write(b) | ||||
| @@ -1,30 +1,18 @@ | ||||
| import sys | ||||
| 
 | ||||
| PY3 = sys.version_info[0] >= 3 | ||||
| 
 | ||||
| if PY3: | ||||
|     def u(s): | ||||
|         return s | ||||
| else: | ||||
|     # Unicode-like literals | ||||
|     def u(s): | ||||
|         return s.decode('utf-8') | ||||
| 
 | ||||
| try: | ||||
|     # which() is available from Python 3.3 | ||||
|     from shutil import which | ||||
|     from shutil import which  # Python >= 3.3 | ||||
| except ImportError: | ||||
|     import os | ||||
|     # This is a copy of which() from Python 3.3 | ||||
|     import os, sys | ||||
|      | ||||
|     # This is copied from Python 3.4.1 | ||||
|     def which(cmd, mode=os.F_OK | os.X_OK, path=None): | ||||
|         """Given a command, mode, and a PATH string, return the path which | ||||
|         conforms to the given mode on the PATH, or None if there is no such | ||||
|         file. | ||||
| 
 | ||||
|      | ||||
|         `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result | ||||
|         of os.environ.get("PATH"), or can be overridden with a custom search | ||||
|         path. | ||||
| 
 | ||||
|      | ||||
|         """ | ||||
|         # Check that a given file can be accessed with the correct mode. | ||||
|         # Additionally check that `file` is not a directory, as on Windows | ||||
| @@ -32,7 +20,7 @@ except ImportError: | ||||
|         def _access_check(fn, mode): | ||||
|             return (os.path.exists(fn) and os.access(fn, mode) | ||||
|                     and not os.path.isdir(fn)) | ||||
| 
 | ||||
|      | ||||
|         # If we're given a path with a directory part, look it up directly rather | ||||
|         # than referring to PATH directories. This includes checking relative to the | ||||
|         # current directory, e.g. ./script | ||||
| @@ -40,14 +28,18 @@ except ImportError: | ||||
|             if _access_check(cmd, mode): | ||||
|                 return cmd | ||||
|             return None | ||||
| 
 | ||||
|         path = (path or os.environ.get("PATH", os.defpath)).split(os.pathsep) | ||||
| 
 | ||||
|      | ||||
|         if path is None: | ||||
|             path = os.environ.get("PATH", os.defpath) | ||||
|         if not path: | ||||
|             return None | ||||
|         path = path.split(os.pathsep) | ||||
|      | ||||
|         if sys.platform == "win32": | ||||
|             # The current directory takes precedence on Windows. | ||||
|             if not os.curdir in path: | ||||
|                 path.insert(0, os.curdir) | ||||
| 
 | ||||
|      | ||||
|             # PATHEXT is necessary to check on Windows. | ||||
|             pathext = os.environ.get("PATHEXT", "").split(os.pathsep) | ||||
|             # See if the given file matches any of the expected path extensions. | ||||
| @@ -62,7 +54,7 @@ except ImportError: | ||||
|             # On other platforms you don't have things like PATHEXT to tell you | ||||
|             # what file suffixes are executable, so just pass on cmd as-is. | ||||
|             files = [cmd] | ||||
| 
 | ||||
|      | ||||
|         seen = set() | ||||
|         for dir in path: | ||||
|             normdir = os.path.normcase(dir) | ||||
| @@ -73,3 +65,7 @@ except ImportError: | ||||
|                     if _access_check(name, mode): | ||||
|                         return name | ||||
|         return None | ||||
| 
 | ||||
| 
 | ||||
| class PtyProcessError(Exception): | ||||
|     """Generic error class for this package.""" | ||||
							
								
								
									
										537
									
								
								plugins/git_clone/pexpect/pxssh.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										537
									
								
								plugins/git_clone/pexpect/pxssh.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,537 @@ | ||||
| '''This class extends pexpect.spawn to specialize setting up SSH connections. | ||||
| This adds methods for login, logout, and expecting the shell prompt. | ||||
|  | ||||
| PEXPECT LICENSE | ||||
|  | ||||
|     This license is approved by the OSI and FSF as GPL-compatible. | ||||
|         http://opensource.org/licenses/isc-license.txt | ||||
|  | ||||
|     Copyright (c) 2012, Noah Spurrier <noah@noah.org> | ||||
|     PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY | ||||
|     PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE | ||||
|     COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. | ||||
|     THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||||
|     WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||||
|     MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||||
|     ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||||
|     WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||||
|     ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||||
|     OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||||
|  | ||||
| ''' | ||||
|  | ||||
| from pexpect import ExceptionPexpect, TIMEOUT, EOF, spawn | ||||
| import time | ||||
| import os | ||||
| import sys | ||||
| import re | ||||
|  | ||||
| __all__ = ['ExceptionPxssh', 'pxssh'] | ||||
|  | ||||
| # Exception classes used by this module. | ||||
| class ExceptionPxssh(ExceptionPexpect): | ||||
|     '''Raised for pxssh exceptions. | ||||
|     ''' | ||||
|  | ||||
| if sys.version_info > (3, 0): | ||||
|     from shlex import quote | ||||
| else: | ||||
|     _find_unsafe = re.compile(r'[^\w@%+=:,./-]').search | ||||
|  | ||||
|     def quote(s): | ||||
|         """Return a shell-escaped version of the string *s*.""" | ||||
|         if not s: | ||||
|             return "''" | ||||
|         if _find_unsafe(s) is None: | ||||
|             return s | ||||
|  | ||||
|         # use single quotes, and put single quotes into double quotes | ||||
|         # the string $'b is then quoted as '$'"'"'b' | ||||
|         return "'" + s.replace("'", "'\"'\"'") + "'" | ||||
|  | ||||
| class pxssh (spawn): | ||||
|     '''This class extends pexpect.spawn to specialize setting up SSH | ||||
|     connections. This adds methods for login, logout, and expecting the shell | ||||
|     prompt. It does various tricky things to handle many situations in the SSH | ||||
|     login process. For example, if the session is your first login, then pxssh | ||||
|     automatically accepts the remote certificate; or if you have public key | ||||
|     authentication setup then pxssh won't wait for the password prompt. | ||||
|  | ||||
|     pxssh uses the shell prompt to synchronize output from the remote host. In | ||||
|     order to make this more robust it sets the shell prompt to something more | ||||
|     unique than just $ or #. This should work on most Borne/Bash or Csh style | ||||
|     shells. | ||||
|  | ||||
|     Example that runs a few commands on a remote server and prints the result:: | ||||
|  | ||||
|         from pexpect import pxssh | ||||
|         import getpass | ||||
|         try: | ||||
|             s = pxssh.pxssh() | ||||
|             hostname = raw_input('hostname: ') | ||||
|             username = raw_input('username: ') | ||||
|             password = getpass.getpass('password: ') | ||||
|             s.login(hostname, username, password) | ||||
|             s.sendline('uptime')   # run a command | ||||
|             s.prompt()             # match the prompt | ||||
|             print(s.before)        # print everything before the prompt. | ||||
|             s.sendline('ls -l') | ||||
|             s.prompt() | ||||
|             print(s.before) | ||||
|             s.sendline('df') | ||||
|             s.prompt() | ||||
|             print(s.before) | ||||
|             s.logout() | ||||
|         except pxssh.ExceptionPxssh as e: | ||||
|             print("pxssh failed on login.") | ||||
|             print(e) | ||||
|  | ||||
|     Example showing how to specify SSH options:: | ||||
|  | ||||
|         from pexpect import pxssh | ||||
|         s = pxssh.pxssh(options={ | ||||
|                             "StrictHostKeyChecking": "no", | ||||
|                             "UserKnownHostsFile": "/dev/null"}) | ||||
|         ... | ||||
|  | ||||
|     Note that if you have ssh-agent running while doing development with pxssh | ||||
|     then this can lead to a lot of confusion. Many X display managers (xdm, | ||||
|     gdm, kdm, etc.) will automatically start a GUI agent. You may see a GUI | ||||
|     dialog box popup asking for a password during development. You should turn | ||||
|     off any key agents during testing. The 'force_password' attribute will turn | ||||
|     off public key authentication. This will only work if the remote SSH server | ||||
|     is configured to allow password logins. Example of using 'force_password' | ||||
|     attribute:: | ||||
|  | ||||
|             s = pxssh.pxssh() | ||||
|             s.force_password = True | ||||
|             hostname = raw_input('hostname: ') | ||||
|             username = raw_input('username: ') | ||||
|             password = getpass.getpass('password: ') | ||||
|             s.login (hostname, username, password) | ||||
|  | ||||
|     `debug_command_string` is only for the test suite to confirm that the string | ||||
|     generated for SSH is correct, using this will not allow you to do | ||||
|     anything other than get a string back from `pxssh.pxssh.login()`. | ||||
|     ''' | ||||
|  | ||||
|     def __init__ (self, timeout=30, maxread=2000, searchwindowsize=None, | ||||
|                     logfile=None, cwd=None, env=None, ignore_sighup=True, echo=True, | ||||
|                     options={}, encoding=None, codec_errors='strict', | ||||
|                     debug_command_string=False, use_poll=False): | ||||
|  | ||||
|         spawn.__init__(self, None, timeout=timeout, maxread=maxread, | ||||
|                        searchwindowsize=searchwindowsize, logfile=logfile, | ||||
|                        cwd=cwd, env=env, ignore_sighup=ignore_sighup, echo=echo, | ||||
|                        encoding=encoding, codec_errors=codec_errors, use_poll=use_poll) | ||||
|  | ||||
|         self.name = '<pxssh>' | ||||
|  | ||||
|         #SUBTLE HACK ALERT! Note that the command that SETS the prompt uses a | ||||
|         #slightly different string than the regular expression to match it. This | ||||
|         #is because when you set the prompt the command will echo back, but we | ||||
|         #don't want to match the echoed command. So if we make the set command | ||||
|         #slightly different than the regex we eliminate the problem. To make the | ||||
|         #set command different we add a backslash in front of $. The $ doesn't | ||||
|         #need to be escaped, but it doesn't hurt and serves to make the set | ||||
|         #prompt command different than the regex. | ||||
|  | ||||
|         # used to match the command-line prompt | ||||
|         self.UNIQUE_PROMPT = r"\[PEXPECT\][\$\#] " | ||||
|         self.PROMPT = self.UNIQUE_PROMPT | ||||
|  | ||||
|         # used to set shell command-line prompt to UNIQUE_PROMPT. | ||||
|         self.PROMPT_SET_SH = r"PS1='[PEXPECT]\$ '" | ||||
|         self.PROMPT_SET_CSH = r"set prompt='[PEXPECT]\$ '" | ||||
|         self.SSH_OPTS = ("-o'RSAAuthentication=no'" | ||||
|                 + " -o 'PubkeyAuthentication=no'") | ||||
| # Disabling host key checking, makes you vulnerable to MITM attacks. | ||||
| #                + " -o 'StrictHostKeyChecking=no'" | ||||
| #                + " -o 'UserKnownHostsFile /dev/null' ") | ||||
|         # Disabling X11 forwarding gets rid of the annoying SSH_ASKPASS from | ||||
|         # displaying a GUI password dialog. I have not figured out how to | ||||
|         # disable only SSH_ASKPASS without also disabling X11 forwarding. | ||||
|         # Unsetting SSH_ASKPASS on the remote side doesn't disable it! Annoying! | ||||
|         #self.SSH_OPTS = "-x -o'RSAAuthentication=no' -o 'PubkeyAuthentication=no'" | ||||
|         self.force_password = False | ||||
|  | ||||
|         self.debug_command_string = debug_command_string | ||||
|  | ||||
|         # User defined SSH options, eg, | ||||
|         # ssh.otions = dict(StrictHostKeyChecking="no",UserKnownHostsFile="/dev/null") | ||||
|         self.options = options | ||||
|  | ||||
|     def levenshtein_distance(self, a, b): | ||||
|         '''This calculates the Levenshtein distance between a and b. | ||||
|         ''' | ||||
|  | ||||
|         n, m = len(a), len(b) | ||||
|         if n > m: | ||||
|             a,b = b,a | ||||
|             n,m = m,n | ||||
|         current = range(n+1) | ||||
|         for i in range(1,m+1): | ||||
|             previous, current = current, [i]+[0]*n | ||||
|             for j in range(1,n+1): | ||||
|                 add, delete = previous[j]+1, current[j-1]+1 | ||||
|                 change = previous[j-1] | ||||
|                 if a[j-1] != b[i-1]: | ||||
|                     change = change + 1 | ||||
|                 current[j] = min(add, delete, change) | ||||
|         return current[n] | ||||
|  | ||||
|     def try_read_prompt(self, timeout_multiplier): | ||||
|         '''This facilitates using communication timeouts to perform | ||||
|         synchronization as quickly as possible, while supporting high latency | ||||
|         connections with a tunable worst case performance. Fast connections | ||||
|         should be read almost immediately. Worst case performance for this | ||||
|         method is timeout_multiplier * 3 seconds. | ||||
|         ''' | ||||
|  | ||||
|         # maximum time allowed to read the first response | ||||
|         first_char_timeout = timeout_multiplier * 0.5 | ||||
|  | ||||
|         # maximum time allowed between subsequent characters | ||||
|         inter_char_timeout = timeout_multiplier * 0.1 | ||||
|  | ||||
|         # maximum time for reading the entire prompt | ||||
|         total_timeout = timeout_multiplier * 3.0 | ||||
|  | ||||
|         prompt = self.string_type() | ||||
|         begin = time.time() | ||||
|         expired = 0.0 | ||||
|         timeout = first_char_timeout | ||||
|  | ||||
|         while expired < total_timeout: | ||||
|             try: | ||||
|                 prompt += self.read_nonblocking(size=1, timeout=timeout) | ||||
|                 expired = time.time() - begin # updated total time expired | ||||
|                 timeout = inter_char_timeout | ||||
|             except TIMEOUT: | ||||
|                 break | ||||
|  | ||||
|         return prompt | ||||
|  | ||||
|     def sync_original_prompt (self, sync_multiplier=1.0): | ||||
|         '''This attempts to find the prompt. Basically, press enter and record | ||||
|         the response; press enter again and record the response; if the two | ||||
|         responses are similar then assume we are at the original prompt. | ||||
|         This can be a slow function. Worst case with the default sync_multiplier | ||||
|         can take 12 seconds. Low latency connections are more likely to fail | ||||
|         with a low sync_multiplier. Best case sync time gets worse with a | ||||
|         high sync multiplier (500 ms with default). ''' | ||||
|  | ||||
|         # All of these timing pace values are magic. | ||||
|         # I came up with these based on what seemed reliable for | ||||
|         # connecting to a heavily loaded machine I have. | ||||
|         self.sendline() | ||||
|         time.sleep(0.1) | ||||
|  | ||||
|         try: | ||||
|             # Clear the buffer before getting the prompt. | ||||
|             self.try_read_prompt(sync_multiplier) | ||||
|         except TIMEOUT: | ||||
|             pass | ||||
|  | ||||
|         self.sendline() | ||||
|         x = self.try_read_prompt(sync_multiplier) | ||||
|  | ||||
|         self.sendline() | ||||
|         a = self.try_read_prompt(sync_multiplier) | ||||
|  | ||||
|         self.sendline() | ||||
|         b = self.try_read_prompt(sync_multiplier) | ||||
|  | ||||
|         ld = self.levenshtein_distance(a,b) | ||||
|         len_a = len(a) | ||||
|         if len_a == 0: | ||||
|             return False | ||||
|         if float(ld)/len_a < 0.4: | ||||
|             return True | ||||
|         return False | ||||
|  | ||||
|     ### TODO: This is getting messy and I'm pretty sure this isn't perfect. | ||||
|     ### TODO: I need to draw a flow chart for this. | ||||
|     ### TODO: Unit tests for SSH tunnels, remote SSH command exec, disabling original prompt sync | ||||
|     def login (self, server, username=None, password='', terminal_type='ansi', | ||||
|                 original_prompt=r"[#$]", login_timeout=10, port=None, | ||||
|                 auto_prompt_reset=True, ssh_key=None, quiet=True, | ||||
|                 sync_multiplier=1, check_local_ip=True, | ||||
|                 password_regex=r'(?i)(?:password:)|(?:passphrase for key)', | ||||
|                 ssh_tunnels={}, spawn_local_ssh=True, | ||||
|                 sync_original_prompt=True, ssh_config=None, cmd='ssh'): | ||||
|         '''This logs the user into the given server. | ||||
|  | ||||
|         It uses 'original_prompt' to try to find the prompt right after login. | ||||
|         When it finds the prompt it immediately tries to reset the prompt to | ||||
|         something more easily matched. The default 'original_prompt' is very | ||||
|         optimistic and is easily fooled. It's more reliable to try to match the original | ||||
|         prompt as exactly as possible to prevent false matches by server | ||||
|         strings such as the "Message Of The Day". On many systems you can | ||||
|         disable the MOTD on the remote server by creating a zero-length file | ||||
|         called :file:`~/.hushlogin` on the remote server. If a prompt cannot be found | ||||
|         then this will not necessarily cause the login to fail. In the case of | ||||
|         a timeout when looking for the prompt we assume that the original | ||||
|         prompt was so weird that we could not match it, so we use a few tricks | ||||
|         to guess when we have reached the prompt. Then we hope for the best and | ||||
|         blindly try to reset the prompt to something more unique. If that fails | ||||
|         then login() raises an :class:`ExceptionPxssh` exception. | ||||
|  | ||||
|         In some situations it is not possible or desirable to reset the | ||||
|         original prompt. In this case, pass ``auto_prompt_reset=False`` to | ||||
|         inhibit setting the prompt to the UNIQUE_PROMPT. Remember that pxssh | ||||
|         uses a unique prompt in the :meth:`prompt` method. If the original prompt is | ||||
|         not reset then this will disable the :meth:`prompt` method unless you | ||||
|         manually set the :attr:`PROMPT` attribute. | ||||
|  | ||||
|         Set ``password_regex`` if there is a MOTD message with `password` in it. | ||||
|         Changing this is like playing in traffic, don't (p)expect it to match straight | ||||
|         away. | ||||
|  | ||||
|         If you require to connect to another SSH server from the your original SSH | ||||
|         connection set ``spawn_local_ssh`` to `False` and this will use your current | ||||
|         session to do so. Setting this option to `False` and not having an active session | ||||
|         will trigger an error. | ||||
|  | ||||
|         Set ``ssh_key`` to a file path to an SSH private key to use that SSH key | ||||
|         for the session authentication. | ||||
|         Set ``ssh_key`` to `True` to force passing the current SSH authentication socket | ||||
|         to the desired ``hostname``. | ||||
|  | ||||
|         Set ``ssh_config`` to a file path string of an SSH client config file to pass that | ||||
|         file to the client to handle itself. You may set any options you wish in here, however | ||||
|         doing so will require you to post extra information that you may not want to if you | ||||
|         run into issues. | ||||
|  | ||||
|         Alter the ``cmd`` to change the ssh client used, or to prepend it with network | ||||
|         namespaces. For example ```cmd="ip netns exec vlan2 ssh"``` to execute the ssh in | ||||
|         network namespace named ```vlan```. | ||||
|         ''' | ||||
|  | ||||
|         session_regex_array = ["(?i)are you sure you want to continue connecting", original_prompt, password_regex, "(?i)permission denied", "(?i)terminal type", TIMEOUT] | ||||
|         session_init_regex_array = [] | ||||
|         session_init_regex_array.extend(session_regex_array) | ||||
|         session_init_regex_array.extend(["(?i)connection closed by remote host", EOF]) | ||||
|  | ||||
|         ssh_options = ''.join([" -o '%s=%s'" % (o, v) for (o, v) in self.options.items()]) | ||||
|         if quiet: | ||||
|             ssh_options = ssh_options + ' -q' | ||||
|         if not check_local_ip: | ||||
|             ssh_options = ssh_options + " -o'NoHostAuthenticationForLocalhost=yes'" | ||||
|         if self.force_password: | ||||
|             ssh_options = ssh_options + ' ' + self.SSH_OPTS | ||||
|         if ssh_config is not None: | ||||
|             if spawn_local_ssh and not os.path.isfile(ssh_config): | ||||
|                 raise ExceptionPxssh('SSH config does not exist or is not a file.') | ||||
|             ssh_options = ssh_options + ' -F ' + ssh_config | ||||
|         if port is not None: | ||||
|             ssh_options = ssh_options + ' -p %s'%(str(port)) | ||||
|         if ssh_key is not None: | ||||
|             # Allow forwarding our SSH key to the current session | ||||
|             if ssh_key==True: | ||||
|                 ssh_options = ssh_options + ' -A' | ||||
|             else: | ||||
|                 if spawn_local_ssh and not os.path.isfile(ssh_key): | ||||
|                     raise ExceptionPxssh('private ssh key does not exist or is not a file.') | ||||
|                 ssh_options = ssh_options + ' -i %s' % (ssh_key) | ||||
|  | ||||
|         # SSH tunnels, make sure you know what you're putting into the lists | ||||
|         # under each heading. Do not expect these to open 100% of the time, | ||||
|         # The port you're requesting might be bound. | ||||
|         # | ||||
|         # The structure should be like this: | ||||
|         # { 'local': ['2424:localhost:22'],  # Local SSH tunnels | ||||
|         # 'remote': ['2525:localhost:22'],   # Remote SSH tunnels | ||||
|         # 'dynamic': [8888] } # Dynamic/SOCKS tunnels | ||||
|         if ssh_tunnels!={} and isinstance({},type(ssh_tunnels)): | ||||
|             tunnel_types = { | ||||
|                 'local':'L', | ||||
|                 'remote':'R', | ||||
|                 'dynamic':'D' | ||||
|             } | ||||
|             for tunnel_type in tunnel_types: | ||||
|                 cmd_type = tunnel_types[tunnel_type] | ||||
|                 if tunnel_type in ssh_tunnels: | ||||
|                     tunnels = ssh_tunnels[tunnel_type] | ||||
|                     for tunnel in tunnels: | ||||
|                         if spawn_local_ssh==False: | ||||
|                             tunnel = quote(str(tunnel)) | ||||
|                         ssh_options = ssh_options + ' -' + cmd_type + ' ' + str(tunnel) | ||||
|  | ||||
|         if username is not None: | ||||
|             ssh_options = ssh_options + ' -l ' + username | ||||
|         elif ssh_config is None: | ||||
|             raise TypeError('login() needs either a username or an ssh_config') | ||||
|         else:  # make sure ssh_config has an entry for the server with a username | ||||
|             with open(ssh_config, 'rt') as f: | ||||
|                 lines = [l.strip() for l in f.readlines()] | ||||
|  | ||||
|             server_regex = r'^Host\s+%s\s*$' % server | ||||
|             user_regex = r'^User\s+\w+\s*$' | ||||
|             config_has_server = False | ||||
|             server_has_username = False | ||||
|             for line in lines: | ||||
|                 if not config_has_server and re.match(server_regex, line, re.IGNORECASE): | ||||
|                     config_has_server = True | ||||
|                 elif config_has_server and 'hostname' in line.lower(): | ||||
|                     pass | ||||
|                 elif config_has_server and 'host' in line.lower(): | ||||
|                     server_has_username = False  # insurance | ||||
|                     break  # we have left the relevant section | ||||
|                 elif config_has_server and re.match(user_regex, line, re.IGNORECASE): | ||||
|                     server_has_username = True | ||||
|                     break | ||||
|  | ||||
|             if lines: | ||||
|                 del line | ||||
|  | ||||
|             del lines | ||||
|  | ||||
|             if not config_has_server: | ||||
|                 raise TypeError('login() ssh_config has no Host entry for %s' % server) | ||||
|             elif not server_has_username: | ||||
|                 raise TypeError('login() ssh_config has no user entry for %s' % server) | ||||
|  | ||||
|         cmd += " %s %s" % (ssh_options, server) | ||||
|         if self.debug_command_string: | ||||
|             return(cmd) | ||||
|  | ||||
|         # Are we asking for a local ssh command or to spawn one in another session? | ||||
|         if spawn_local_ssh: | ||||
|             spawn._spawn(self, cmd) | ||||
|         else: | ||||
|             self.sendline(cmd) | ||||
|  | ||||
|         # This does not distinguish between a remote server 'password' prompt | ||||
|         # and a local ssh 'passphrase' prompt (for unlocking a private key). | ||||
|         i = self.expect(session_init_regex_array, timeout=login_timeout) | ||||
|  | ||||
|         # First phase | ||||
|         if i==0: | ||||
|             # New certificate -- always accept it. | ||||
|             # This is what you get if SSH does not have the remote host's | ||||
|             # public key stored in the 'known_hosts' cache. | ||||
|             self.sendline("yes") | ||||
|             i = self.expect(session_regex_array) | ||||
|         if i==2: # password or passphrase | ||||
|             self.sendline(password) | ||||
|             i = self.expect(session_regex_array) | ||||
|         if i==4: | ||||
|             self.sendline(terminal_type) | ||||
|             i = self.expect(session_regex_array) | ||||
|         if i==7: | ||||
|             self.close() | ||||
|             raise ExceptionPxssh('Could not establish connection to host') | ||||
|  | ||||
|         # Second phase | ||||
|         if i==0: | ||||
|             # This is weird. This should not happen twice in a row. | ||||
|             self.close() | ||||
|             raise ExceptionPxssh('Weird error. Got "are you sure" prompt twice.') | ||||
|         elif i==1: # can occur if you have a public key pair set to authenticate. | ||||
|             ### TODO: May NOT be OK if expect() got tricked and matched a false prompt. | ||||
|             pass | ||||
|         elif i==2: # password prompt again | ||||
|             # For incorrect passwords, some ssh servers will | ||||
|             # ask for the password again, others return 'denied' right away. | ||||
|             # If we get the password prompt again then this means | ||||
|             # we didn't get the password right the first time. | ||||
|             self.close() | ||||
|             raise ExceptionPxssh('password refused') | ||||
|         elif i==3: # permission denied -- password was bad. | ||||
|             self.close() | ||||
|             raise ExceptionPxssh('permission denied') | ||||
|         elif i==4: # terminal type again? WTF? | ||||
|             self.close() | ||||
|             raise ExceptionPxssh('Weird error. Got "terminal type" prompt twice.') | ||||
|         elif i==5: # Timeout | ||||
|             #This is tricky... I presume that we are at the command-line prompt. | ||||
|             #It may be that the shell prompt was so weird that we couldn't match | ||||
|             #it. Or it may be that we couldn't log in for some other reason. I | ||||
|             #can't be sure, but it's safe to guess that we did login because if | ||||
|             #I presume wrong and we are not logged in then this should be caught | ||||
|             #later when I try to set the shell prompt. | ||||
|             pass | ||||
|         elif i==6: # Connection closed by remote host | ||||
|             self.close() | ||||
|             raise ExceptionPxssh('connection closed') | ||||
|         else: # Unexpected | ||||
|             self.close() | ||||
|             raise ExceptionPxssh('unexpected login response') | ||||
|         if sync_original_prompt: | ||||
|             if not self.sync_original_prompt(sync_multiplier): | ||||
|                 self.close() | ||||
|                 raise ExceptionPxssh('could not synchronize with original prompt') | ||||
|         # We appear to be in. | ||||
|         # set shell prompt to something unique. | ||||
|         if auto_prompt_reset: | ||||
|             if not self.set_unique_prompt(): | ||||
|                 self.close() | ||||
|                 raise ExceptionPxssh('could not set shell prompt ' | ||||
|                                      '(received: %r, expected: %r).' % ( | ||||
|                                          self.before, self.PROMPT,)) | ||||
|         return True | ||||
|  | ||||
|     def logout (self): | ||||
|         '''Sends exit to the remote shell. | ||||
|  | ||||
|         If there are stopped jobs then this automatically sends exit twice. | ||||
|         ''' | ||||
|         self.sendline("exit") | ||||
|         index = self.expect([EOF, "(?i)there are stopped jobs"]) | ||||
|         if index==1: | ||||
|             self.sendline("exit") | ||||
|             self.expect(EOF) | ||||
|         self.close() | ||||
|  | ||||
|     def prompt(self, timeout=-1): | ||||
|         '''Match the next shell prompt. | ||||
|  | ||||
|         This is little more than a short-cut to the :meth:`~pexpect.spawn.expect` | ||||
|         method. Note that if you called :meth:`login` with | ||||
|         ``auto_prompt_reset=False``, then before calling :meth:`prompt` you must | ||||
|         set the :attr:`PROMPT` attribute to a regex that it will use for | ||||
|         matching the prompt. | ||||
|  | ||||
|         Calling :meth:`prompt` will erase the contents of the :attr:`before` | ||||
|         attribute even if no prompt is ever matched. If timeout is not given or | ||||
|         it is set to -1 then self.timeout is used. | ||||
|  | ||||
|         :return: True if the shell prompt was matched, False if the timeout was | ||||
|                  reached. | ||||
|         ''' | ||||
|  | ||||
|         if timeout == -1: | ||||
|             timeout = self.timeout | ||||
|         i = self.expect([self.PROMPT, TIMEOUT], timeout=timeout) | ||||
|         if i==1: | ||||
|             return False | ||||
|         return True | ||||
|  | ||||
|     def set_unique_prompt(self): | ||||
|         '''This sets the remote prompt to something more unique than ``#`` or ``$``. | ||||
|         This makes it easier for the :meth:`prompt` method to match the shell prompt | ||||
|         unambiguously. This method is called automatically by the :meth:`login` | ||||
|         method, but you may want to call it manually if you somehow reset the | ||||
|         shell prompt. For example, if you 'su' to a different user then you | ||||
|         will need to manually reset the prompt. This sends shell commands to | ||||
|         the remote host to set the prompt, so this assumes the remote host is | ||||
|         ready to receive commands. | ||||
|  | ||||
|         Alternatively, you may use your own prompt pattern. In this case you | ||||
|         should call :meth:`login` with ``auto_prompt_reset=False``; then set the | ||||
|         :attr:`PROMPT` attribute to a regular expression. After that, the | ||||
|         :meth:`prompt` method will try to match your prompt pattern. | ||||
|         ''' | ||||
|  | ||||
|         self.sendline("unset PROMPT_COMMAND") | ||||
|         self.sendline(self.PROMPT_SET_SH) # sh-style | ||||
|         i = self.expect ([TIMEOUT, self.PROMPT], timeout=10) | ||||
|         if i == 0: # csh-style | ||||
|             self.sendline(self.PROMPT_SET_CSH) | ||||
|             i = self.expect([TIMEOUT, self.PROMPT], timeout=10) | ||||
|             if i == 0: | ||||
|                 return False | ||||
|         return True | ||||
|  | ||||
| # vi:ts=4:sw=4:expandtab:ft=python: | ||||
							
								
								
									
										130
									
								
								plugins/git_clone/pexpect/replwrap.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										130
									
								
								plugins/git_clone/pexpect/replwrap.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,130 @@ | ||||
| """Generic wrapper for read-eval-print-loops, a.k.a. interactive shells | ||||
| """ | ||||
| import os.path | ||||
| import signal | ||||
| import sys | ||||
|  | ||||
| import pexpect | ||||
|  | ||||
| PY3 = (sys.version_info[0] >= 3) | ||||
|  | ||||
| if PY3: | ||||
|     basestring = str | ||||
|  | ||||
| PEXPECT_PROMPT = u'[PEXPECT_PROMPT>' | ||||
| PEXPECT_CONTINUATION_PROMPT = u'[PEXPECT_PROMPT+' | ||||
|  | ||||
| class REPLWrapper(object): | ||||
|     """Wrapper for a REPL. | ||||
|  | ||||
|     :param cmd_or_spawn: This can either be an instance of :class:`pexpect.spawn` | ||||
|       in which a REPL has already been started, or a str command to start a new | ||||
|       REPL process. | ||||
|     :param str orig_prompt: The prompt to expect at first. | ||||
|     :param str prompt_change: A command to change the prompt to something more | ||||
|       unique. If this is ``None``, the prompt will not be changed. This will | ||||
|       be formatted with the new and continuation prompts as positional | ||||
|       parameters, so you can use ``{}`` style formatting to insert them into | ||||
|       the command. | ||||
|     :param str new_prompt: The more unique prompt to expect after the change. | ||||
|     :param str extra_init_cmd: Commands to do extra initialisation, such as | ||||
|       disabling pagers. | ||||
|     """ | ||||
|     def __init__(self, cmd_or_spawn, orig_prompt, prompt_change, | ||||
|                  new_prompt=PEXPECT_PROMPT, | ||||
|                  continuation_prompt=PEXPECT_CONTINUATION_PROMPT, | ||||
|                  extra_init_cmd=None): | ||||
|         if isinstance(cmd_or_spawn, basestring): | ||||
|             self.child = pexpect.spawn(cmd_or_spawn, echo=False, encoding='utf-8') | ||||
|         else: | ||||
|             self.child = cmd_or_spawn | ||||
|         if self.child.echo: | ||||
|             # Existing spawn instance has echo enabled, disable it | ||||
|             # to prevent our input from being repeated to output. | ||||
|             self.child.setecho(False) | ||||
|             self.child.waitnoecho() | ||||
|  | ||||
|         if prompt_change is None: | ||||
|             self.prompt = orig_prompt | ||||
|         else: | ||||
|             self.set_prompt(orig_prompt, | ||||
|                         prompt_change.format(new_prompt, continuation_prompt)) | ||||
|             self.prompt = new_prompt | ||||
|         self.continuation_prompt = continuation_prompt | ||||
|  | ||||
|         self._expect_prompt() | ||||
|  | ||||
|         if extra_init_cmd is not None: | ||||
|             self.run_command(extra_init_cmd) | ||||
|  | ||||
|     def set_prompt(self, orig_prompt, prompt_change): | ||||
|         self.child.expect(orig_prompt) | ||||
|         self.child.sendline(prompt_change) | ||||
|  | ||||
|     def _expect_prompt(self, timeout=-1, async_=False): | ||||
|         return self.child.expect_exact([self.prompt, self.continuation_prompt], | ||||
|                                        timeout=timeout, async_=async_) | ||||
|  | ||||
|     def run_command(self, command, timeout=-1, async_=False): | ||||
|         """Send a command to the REPL, wait for and return output. | ||||
|  | ||||
|         :param str command: The command to send. Trailing newlines are not needed. | ||||
|           This should be a complete block of input that will trigger execution; | ||||
|           if a continuation prompt is found after sending input, :exc:`ValueError` | ||||
|           will be raised. | ||||
|         :param int timeout: How long to wait for the next prompt. -1 means the | ||||
|           default from the :class:`pexpect.spawn` object (default 30 seconds). | ||||
|           None means to wait indefinitely. | ||||
|         :param bool async_: On Python 3.4, or Python 3.3 with asyncio | ||||
|           installed, passing ``async_=True`` will make this return an | ||||
|           :mod:`asyncio` Future, which you can yield from to get the same | ||||
|           result that this method would normally give directly. | ||||
|         """ | ||||
|         # Split up multiline commands and feed them in bit-by-bit | ||||
|         cmdlines = command.splitlines() | ||||
|         # splitlines ignores trailing newlines - add it back in manually | ||||
|         if command.endswith('\n'): | ||||
|             cmdlines.append('') | ||||
|         if not cmdlines: | ||||
|             raise ValueError("No command was given") | ||||
|  | ||||
|         if async_: | ||||
|             from ._async import repl_run_command_async | ||||
|             return repl_run_command_async(self, cmdlines, timeout) | ||||
|  | ||||
|         res = [] | ||||
|         self.child.sendline(cmdlines[0]) | ||||
|         for line in cmdlines[1:]: | ||||
|             self._expect_prompt(timeout=timeout) | ||||
|             res.append(self.child.before) | ||||
|             self.child.sendline(line) | ||||
|  | ||||
|         # Command was fully submitted, now wait for the next prompt | ||||
|         if self._expect_prompt(timeout=timeout) == 1: | ||||
|             # We got the continuation prompt - command was incomplete | ||||
|             self.child.kill(signal.SIGINT) | ||||
|             self._expect_prompt(timeout=1) | ||||
|             raise ValueError("Continuation prompt found - input was incomplete:\n" | ||||
|                              + command) | ||||
|         return u''.join(res + [self.child.before]) | ||||
|  | ||||
| def python(command="python"): | ||||
|     """Start a Python shell and return a :class:`REPLWrapper` object.""" | ||||
|     return REPLWrapper(command, u">>> ", u"import sys; sys.ps1={0!r}; sys.ps2={1!r}") | ||||
|  | ||||
| def bash(command="bash"): | ||||
|     """Start a bash shell and return a :class:`REPLWrapper` object.""" | ||||
|     bashrc = os.path.join(os.path.dirname(__file__), 'bashrc.sh') | ||||
|     child = pexpect.spawn(command, ['--rcfile', bashrc], echo=False, | ||||
|                           encoding='utf-8') | ||||
|  | ||||
|     # If the user runs 'env', the value of PS1 will be in the output. To avoid | ||||
|     # replwrap seeing that as the next prompt, we'll embed the marker characters | ||||
|     # for invisible characters in the prompt; these show up when inspecting the | ||||
|     # environment variable, but not when bash displays the prompt. | ||||
|     ps1 = PEXPECT_PROMPT[:5] + u'\\[\\]' + PEXPECT_PROMPT[5:] | ||||
|     ps2 = PEXPECT_CONTINUATION_PROMPT[:5] + u'\\[\\]' + PEXPECT_CONTINUATION_PROMPT[5:] | ||||
|     prompt_change = u"PS1='{0}' PS2='{1}' PROMPT_COMMAND=''".format(ps1, ps2) | ||||
|  | ||||
|     return REPLWrapper(child, u'\\$', prompt_change, | ||||
|                        extra_init_cmd="export PAGER=cat") | ||||
							
								
								
									
										157
									
								
								plugins/git_clone/pexpect/run.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										157
									
								
								plugins/git_clone/pexpect/run.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,157 @@ | ||||
| import sys | ||||
| import types | ||||
|  | ||||
| from .exceptions import EOF, TIMEOUT | ||||
| from .pty_spawn import spawn | ||||
|  | ||||
| def run(command, timeout=30, withexitstatus=False, events=None, | ||||
|         extra_args=None, logfile=None, cwd=None, env=None, **kwargs): | ||||
|  | ||||
|     ''' | ||||
|     This function runs the given command; waits for it to finish; then | ||||
|     returns all output as a string. STDERR is included in output. If the full | ||||
|     path to the command is not given then the path is searched. | ||||
|  | ||||
|     Note that lines are terminated by CR/LF (\\r\\n) combination even on | ||||
|     UNIX-like systems because this is the standard for pseudottys. If you set | ||||
|     'withexitstatus' to true, then run will return a tuple of (command_output, | ||||
|     exitstatus). If 'withexitstatus' is false then this returns just | ||||
|     command_output. | ||||
|  | ||||
|     The run() function can often be used instead of creating a spawn instance. | ||||
|     For example, the following code uses spawn:: | ||||
|  | ||||
|         from pexpect import * | ||||
|         child = spawn('scp foo user@example.com:.') | ||||
|         child.expect('(?i)password') | ||||
|         child.sendline(mypassword) | ||||
|  | ||||
|     The previous code can be replace with the following:: | ||||
|  | ||||
|         from pexpect import * | ||||
|         run('scp foo user@example.com:.', events={'(?i)password': mypassword}) | ||||
|  | ||||
|     **Examples** | ||||
|  | ||||
|     Start the apache daemon on the local machine:: | ||||
|  | ||||
|         from pexpect import * | ||||
|         run("/usr/local/apache/bin/apachectl start") | ||||
|  | ||||
|     Check in a file using SVN:: | ||||
|  | ||||
|         from pexpect import * | ||||
|         run("svn ci -m 'automatic commit' my_file.py") | ||||
|  | ||||
|     Run a command and capture exit status:: | ||||
|  | ||||
|         from pexpect import * | ||||
|         (command_output, exitstatus) = run('ls -l /bin', withexitstatus=1) | ||||
|  | ||||
|     The following will run SSH and execute 'ls -l' on the remote machine. The | ||||
|     password 'secret' will be sent if the '(?i)password' pattern is ever seen:: | ||||
|  | ||||
|         run("ssh username@machine.example.com 'ls -l'", | ||||
|             events={'(?i)password':'secret\\n'}) | ||||
|  | ||||
|     This will start mencoder to rip a video from DVD. This will also display | ||||
|     progress ticks every 5 seconds as it runs. For example:: | ||||
|  | ||||
|         from pexpect import * | ||||
|         def print_ticks(d): | ||||
|             print d['event_count'], | ||||
|         run("mencoder dvd://1 -o video.avi -oac copy -ovc copy", | ||||
|             events={TIMEOUT:print_ticks}, timeout=5) | ||||
|  | ||||
|     The 'events' argument should be either a dictionary or a tuple list that | ||||
|     contains patterns and responses. Whenever one of the patterns is seen | ||||
|     in the command output, run() will send the associated response string. | ||||
|     So, run() in the above example can be also written as: | ||||
|  | ||||
|         run("mencoder dvd://1 -o video.avi -oac copy -ovc copy", | ||||
|             events=[(TIMEOUT,print_ticks)], timeout=5) | ||||
|  | ||||
|     Use a tuple list for events if the command output requires a delicate | ||||
|     control over what pattern should be matched, since the tuple list is passed | ||||
|     to pexpect() as its pattern list, with the order of patterns preserved. | ||||
|  | ||||
|     Note that you should put newlines in your string if Enter is necessary. | ||||
|  | ||||
|     Like the example above, the responses may also contain a callback, either | ||||
|     a function or method.  It should accept a dictionary value as an argument. | ||||
|     The dictionary contains all the locals from the run() function, so you can | ||||
|     access the child spawn object or any other variable defined in run() | ||||
|     (event_count, child, and extra_args are the most useful). A callback may | ||||
|     return True to stop the current run process.  Otherwise run() continues | ||||
|     until the next event. A callback may also return a string which will be | ||||
|     sent to the child. 'extra_args' is not used by directly run(). It provides | ||||
|     a way to pass data to a callback function through run() through the locals | ||||
|     dictionary passed to a callback. | ||||
|  | ||||
|     Like :class:`spawn`, passing *encoding* will make it work with unicode | ||||
|     instead of bytes. You can pass *codec_errors* to control how errors in | ||||
|     encoding and decoding are handled. | ||||
|     ''' | ||||
|     if timeout == -1: | ||||
|         child = spawn(command, maxread=2000, logfile=logfile, cwd=cwd, env=env, | ||||
|                         **kwargs) | ||||
|     else: | ||||
|         child = spawn(command, timeout=timeout, maxread=2000, logfile=logfile, | ||||
|                 cwd=cwd, env=env, **kwargs) | ||||
|     if isinstance(events, list): | ||||
|         patterns= [x for x,y in events] | ||||
|         responses = [y for x,y in events] | ||||
|     elif isinstance(events, dict): | ||||
|         patterns = list(events.keys()) | ||||
|         responses = list(events.values()) | ||||
|     else: | ||||
|         # This assumes EOF or TIMEOUT will eventually cause run to terminate. | ||||
|         patterns = None | ||||
|         responses = None | ||||
|     child_result_list = [] | ||||
|     event_count = 0 | ||||
|     while True: | ||||
|         try: | ||||
|             index = child.expect(patterns) | ||||
|             if isinstance(child.after, child.allowed_string_types): | ||||
|                 child_result_list.append(child.before + child.after) | ||||
|             else: | ||||
|                 # child.after may have been a TIMEOUT or EOF, | ||||
|                 # which we don't want appended to the list. | ||||
|                 child_result_list.append(child.before) | ||||
|             if isinstance(responses[index], child.allowed_string_types): | ||||
|                 child.send(responses[index]) | ||||
|             elif (isinstance(responses[index], types.FunctionType) or | ||||
|                   isinstance(responses[index], types.MethodType)): | ||||
|                 callback_result = responses[index](locals()) | ||||
|                 sys.stdout.flush() | ||||
|                 if isinstance(callback_result, child.allowed_string_types): | ||||
|                     child.send(callback_result) | ||||
|                 elif callback_result: | ||||
|                     break | ||||
|             else: | ||||
|                 raise TypeError("parameter `event' at index {index} must be " | ||||
|                                 "a string, method, or function: {value!r}" | ||||
|                                 .format(index=index, value=responses[index])) | ||||
|             event_count = event_count + 1 | ||||
|         except TIMEOUT: | ||||
|             child_result_list.append(child.before) | ||||
|             break | ||||
|         except EOF: | ||||
|             child_result_list.append(child.before) | ||||
|             break | ||||
|     child_result = child.string_type().join(child_result_list) | ||||
|     if withexitstatus: | ||||
|         child.close() | ||||
|         return (child_result, child.exitstatus) | ||||
|     else: | ||||
|         return child_result | ||||
|  | ||||
| def runu(command, timeout=30, withexitstatus=False, events=None, | ||||
|         extra_args=None, logfile=None, cwd=None, env=None, **kwargs): | ||||
|     """Deprecated: pass encoding to run() instead. | ||||
|     """ | ||||
|     kwargs.setdefault('encoding', 'utf-8') | ||||
|     return run(command, timeout=timeout, withexitstatus=withexitstatus, | ||||
|                 events=events, extra_args=extra_args, logfile=logfile, cwd=cwd, | ||||
|                 env=env, **kwargs) | ||||
							
								
								
									
										431
									
								
								plugins/git_clone/pexpect/screen.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										431
									
								
								plugins/git_clone/pexpect/screen.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,431 @@ | ||||
| '''This implements a virtual screen. This is used to support ANSI terminal | ||||
| emulation. The screen representation and state is implemented in this class. | ||||
| Most of the methods are inspired by ANSI screen control codes. The | ||||
| :class:`~pexpect.ANSI.ANSI` class extends this class to add parsing of ANSI | ||||
| escape codes. | ||||
|  | ||||
| PEXPECT LICENSE | ||||
|  | ||||
|     This license is approved by the OSI and FSF as GPL-compatible. | ||||
|         http://opensource.org/licenses/isc-license.txt | ||||
|  | ||||
|     Copyright (c) 2012, Noah Spurrier <noah@noah.org> | ||||
|     PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY | ||||
|     PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE | ||||
|     COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. | ||||
|     THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||||
|     WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||||
|     MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||||
|     ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||||
|     WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||||
|     ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||||
|     OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||||
|  | ||||
| ''' | ||||
|  | ||||
| import codecs | ||||
| import copy | ||||
| import sys | ||||
|  | ||||
| import warnings | ||||
|  | ||||
| warnings.warn(("pexpect.screen and pexpect.ANSI are deprecated. " | ||||
|                "We recommend using pyte to emulate a terminal screen: " | ||||
|                "https://pypi.python.org/pypi/pyte"), | ||||
|                stacklevel=2) | ||||
|  | ||||
| NUL = 0    # Fill character; ignored on input. | ||||
| ENQ = 5    # Transmit answerback message. | ||||
| BEL = 7    # Ring the bell. | ||||
| BS  = 8    # Move cursor left. | ||||
| HT  = 9    # Move cursor to next tab stop. | ||||
| LF = 10    # Line feed. | ||||
| VT = 11    # Same as LF. | ||||
| FF = 12    # Same as LF. | ||||
| CR = 13    # Move cursor to left margin or newline. | ||||
| SO = 14    # Invoke G1 character set. | ||||
| SI = 15    # Invoke G0 character set. | ||||
| XON = 17   # Resume transmission. | ||||
| XOFF = 19  # Halt transmission. | ||||
| CAN = 24   # Cancel escape sequence. | ||||
| SUB = 26   # Same as CAN. | ||||
| ESC = 27   # Introduce a control sequence. | ||||
| DEL = 127  # Fill character; ignored on input. | ||||
| SPACE = u' ' # Space or blank character. | ||||
|  | ||||
| PY3 = (sys.version_info[0] >= 3) | ||||
| if PY3: | ||||
|     unicode = str | ||||
|  | ||||
| def constrain (n, min, max): | ||||
|  | ||||
|     '''This returns a number, n constrained to the min and max bounds. ''' | ||||
|  | ||||
|     if n < min: | ||||
|         return min | ||||
|     if n > max: | ||||
|         return max | ||||
|     return n | ||||
|  | ||||
| class screen: | ||||
|     '''This object maintains the state of a virtual text screen as a | ||||
|     rectangular array. This maintains a virtual cursor position and handles | ||||
|     scrolling as characters are added. This supports most of the methods needed | ||||
|     by an ANSI text screen. Row and column indexes are 1-based (not zero-based, | ||||
|     like arrays). | ||||
|  | ||||
|     Characters are represented internally using unicode. Methods that accept | ||||
|     input characters, when passed 'bytes' (which in Python 2 is equivalent to | ||||
|     'str'), convert them from the encoding specified in the 'encoding' | ||||
|     parameter to the constructor. Methods that return screen contents return | ||||
|     unicode strings, with the exception of __str__() under Python 2. Passing | ||||
|     ``encoding=None`` limits the API to only accept unicode input, so passing | ||||
|     bytes in will raise :exc:`TypeError`. | ||||
|     ''' | ||||
|     def __init__(self, r=24, c=80, encoding='latin-1', encoding_errors='replace'): | ||||
|         '''This initializes a blank screen of the given dimensions.''' | ||||
|  | ||||
|         self.rows = r | ||||
|         self.cols = c | ||||
|         self.encoding = encoding | ||||
|         self.encoding_errors = encoding_errors | ||||
|         if encoding is not None: | ||||
|             self.decoder = codecs.getincrementaldecoder(encoding)(encoding_errors) | ||||
|         else: | ||||
|             self.decoder = None | ||||
|         self.cur_r = 1 | ||||
|         self.cur_c = 1 | ||||
|         self.cur_saved_r = 1 | ||||
|         self.cur_saved_c = 1 | ||||
|         self.scroll_row_start = 1 | ||||
|         self.scroll_row_end = self.rows | ||||
|         self.w = [ [SPACE] * self.cols for _ in range(self.rows)] | ||||
|  | ||||
|     def _decode(self, s): | ||||
|         '''This converts from the external coding system (as passed to | ||||
|         the constructor) to the internal one (unicode). ''' | ||||
|         if self.decoder is not None: | ||||
|             return self.decoder.decode(s) | ||||
|         else: | ||||
|             raise TypeError("This screen was constructed with encoding=None, " | ||||
|                             "so it does not handle bytes.") | ||||
|  | ||||
|     def _unicode(self): | ||||
|         '''This returns a printable representation of the screen as a unicode | ||||
|         string (which, under Python 3.x, is the same as 'str'). The end of each | ||||
|         screen line is terminated by a newline.''' | ||||
|  | ||||
|         return u'\n'.join ([ u''.join(c) for c in self.w ]) | ||||
|  | ||||
|     if PY3: | ||||
|         __str__ = _unicode | ||||
|     else: | ||||
|         __unicode__ = _unicode | ||||
|  | ||||
|         def __str__(self): | ||||
|             '''This returns a printable representation of the screen. The end of | ||||
|             each screen line is terminated by a newline. ''' | ||||
|             encoding = self.encoding or 'ascii' | ||||
|             return self._unicode().encode(encoding, 'replace') | ||||
|  | ||||
|     def dump (self): | ||||
|         '''This returns a copy of the screen as a unicode string. This is similar to | ||||
|         __str__/__unicode__ except that lines are not terminated with line | ||||
|         feeds.''' | ||||
|  | ||||
|         return u''.join ([ u''.join(c) for c in self.w ]) | ||||
|  | ||||
|     def pretty (self): | ||||
|         '''This returns a copy of the screen as a unicode string with an ASCII | ||||
|         text box around the screen border. This is similar to | ||||
|         __str__/__unicode__ except that it adds a box.''' | ||||
|  | ||||
|         top_bot = u'+' + u'-'*self.cols + u'+\n' | ||||
|         return top_bot + u'\n'.join([u'|'+line+u'|' for line in unicode(self).split(u'\n')]) + u'\n' + top_bot | ||||
|  | ||||
|     def fill (self, ch=SPACE): | ||||
|  | ||||
|         if isinstance(ch, bytes): | ||||
|             ch = self._decode(ch) | ||||
|  | ||||
|         self.fill_region (1,1,self.rows,self.cols, ch) | ||||
|  | ||||
|     def fill_region (self, rs,cs, re,ce, ch=SPACE): | ||||
|  | ||||
|         if isinstance(ch, bytes): | ||||
|             ch = self._decode(ch) | ||||
|  | ||||
|         rs = constrain (rs, 1, self.rows) | ||||
|         re = constrain (re, 1, self.rows) | ||||
|         cs = constrain (cs, 1, self.cols) | ||||
|         ce = constrain (ce, 1, self.cols) | ||||
|         if rs > re: | ||||
|             rs, re = re, rs | ||||
|         if cs > ce: | ||||
|             cs, ce = ce, cs | ||||
|         for r in range (rs, re+1): | ||||
|             for c in range (cs, ce + 1): | ||||
|                 self.put_abs (r,c,ch) | ||||
|  | ||||
|     def cr (self): | ||||
|         '''This moves the cursor to the beginning (col 1) of the current row. | ||||
|         ''' | ||||
|  | ||||
|         self.cursor_home (self.cur_r, 1) | ||||
|  | ||||
|     def lf (self): | ||||
|         '''This moves the cursor down with scrolling. | ||||
|         ''' | ||||
|  | ||||
|         old_r = self.cur_r | ||||
|         self.cursor_down() | ||||
|         if old_r == self.cur_r: | ||||
|             self.scroll_up () | ||||
|             self.erase_line() | ||||
|  | ||||
|     def crlf (self): | ||||
|         '''This advances the cursor with CRLF properties. | ||||
|         The cursor will line wrap and the screen may scroll. | ||||
|         ''' | ||||
|  | ||||
|         self.cr () | ||||
|         self.lf () | ||||
|  | ||||
|     def newline (self): | ||||
|         '''This is an alias for crlf(). | ||||
|         ''' | ||||
|  | ||||
|         self.crlf() | ||||
|  | ||||
|     def put_abs (self, r, c, ch): | ||||
|         '''Screen array starts at 1 index.''' | ||||
|  | ||||
|         r = constrain (r, 1, self.rows) | ||||
|         c = constrain (c, 1, self.cols) | ||||
|         if isinstance(ch, bytes): | ||||
|             ch = self._decode(ch)[0] | ||||
|         else: | ||||
|             ch = ch[0] | ||||
|         self.w[r-1][c-1] = ch | ||||
|  | ||||
|     def put (self, ch): | ||||
|         '''This puts a characters at the current cursor position. | ||||
|         ''' | ||||
|  | ||||
|         if isinstance(ch, bytes): | ||||
|             ch = self._decode(ch) | ||||
|  | ||||
|         self.put_abs (self.cur_r, self.cur_c, ch) | ||||
|  | ||||
|     def insert_abs (self, r, c, ch): | ||||
|         '''This inserts a character at (r,c). Everything under | ||||
|         and to the right is shifted right one character. | ||||
|         The last character of the line is lost. | ||||
|         ''' | ||||
|  | ||||
|         if isinstance(ch, bytes): | ||||
|             ch = self._decode(ch) | ||||
|  | ||||
|         r = constrain (r, 1, self.rows) | ||||
|         c = constrain (c, 1, self.cols) | ||||
|         for ci in range (self.cols, c, -1): | ||||
|             self.put_abs (r,ci, self.get_abs(r,ci-1)) | ||||
|         self.put_abs (r,c,ch) | ||||
|  | ||||
|     def insert (self, ch): | ||||
|  | ||||
|         if isinstance(ch, bytes): | ||||
|             ch = self._decode(ch) | ||||
|  | ||||
|         self.insert_abs (self.cur_r, self.cur_c, ch) | ||||
|  | ||||
|     def get_abs (self, r, c): | ||||
|  | ||||
|         r = constrain (r, 1, self.rows) | ||||
|         c = constrain (c, 1, self.cols) | ||||
|         return self.w[r-1][c-1] | ||||
|  | ||||
|     def get (self): | ||||
|  | ||||
|         self.get_abs (self.cur_r, self.cur_c) | ||||
|  | ||||
|     def get_region (self, rs,cs, re,ce): | ||||
|         '''This returns a list of lines representing the region. | ||||
|         ''' | ||||
|  | ||||
|         rs = constrain (rs, 1, self.rows) | ||||
|         re = constrain (re, 1, self.rows) | ||||
|         cs = constrain (cs, 1, self.cols) | ||||
|         ce = constrain (ce, 1, self.cols) | ||||
|         if rs > re: | ||||
|             rs, re = re, rs | ||||
|         if cs > ce: | ||||
|             cs, ce = ce, cs | ||||
|         sc = [] | ||||
|         for r in range (rs, re+1): | ||||
|             line = u'' | ||||
|             for c in range (cs, ce + 1): | ||||
|                 ch = self.get_abs (r,c) | ||||
|                 line = line + ch | ||||
|             sc.append (line) | ||||
|         return sc | ||||
|  | ||||
|     def cursor_constrain (self): | ||||
|         '''This keeps the cursor within the screen area. | ||||
|         ''' | ||||
|  | ||||
|         self.cur_r = constrain (self.cur_r, 1, self.rows) | ||||
|         self.cur_c = constrain (self.cur_c, 1, self.cols) | ||||
|  | ||||
|     def cursor_home (self, r=1, c=1): # <ESC>[{ROW};{COLUMN}H | ||||
|  | ||||
|         self.cur_r = r | ||||
|         self.cur_c = c | ||||
|         self.cursor_constrain () | ||||
|  | ||||
|     def cursor_back (self,count=1): # <ESC>[{COUNT}D (not confused with down) | ||||
|  | ||||
|         self.cur_c = self.cur_c - count | ||||
|         self.cursor_constrain () | ||||
|  | ||||
|     def cursor_down (self,count=1): # <ESC>[{COUNT}B (not confused with back) | ||||
|  | ||||
|         self.cur_r = self.cur_r + count | ||||
|         self.cursor_constrain () | ||||
|  | ||||
|     def cursor_forward (self,count=1): # <ESC>[{COUNT}C | ||||
|  | ||||
|         self.cur_c = self.cur_c + count | ||||
|         self.cursor_constrain () | ||||
|  | ||||
|     def cursor_up (self,count=1): # <ESC>[{COUNT}A | ||||
|  | ||||
|         self.cur_r = self.cur_r - count | ||||
|         self.cursor_constrain () | ||||
|  | ||||
|     def cursor_up_reverse (self): # <ESC> M   (called RI -- Reverse Index) | ||||
|  | ||||
|         old_r = self.cur_r | ||||
|         self.cursor_up() | ||||
|         if old_r == self.cur_r: | ||||
|             self.scroll_up() | ||||
|  | ||||
|     def cursor_force_position (self, r, c): # <ESC>[{ROW};{COLUMN}f | ||||
|         '''Identical to Cursor Home.''' | ||||
|  | ||||
|         self.cursor_home (r, c) | ||||
|  | ||||
|     def cursor_save (self): # <ESC>[s | ||||
|         '''Save current cursor position.''' | ||||
|  | ||||
|         self.cursor_save_attrs() | ||||
|  | ||||
|     def cursor_unsave (self): # <ESC>[u | ||||
|         '''Restores cursor position after a Save Cursor.''' | ||||
|  | ||||
|         self.cursor_restore_attrs() | ||||
|  | ||||
|     def cursor_save_attrs (self): # <ESC>7 | ||||
|         '''Save current cursor position.''' | ||||
|  | ||||
|         self.cur_saved_r = self.cur_r | ||||
|         self.cur_saved_c = self.cur_c | ||||
|  | ||||
|     def cursor_restore_attrs (self): # <ESC>8 | ||||
|         '''Restores cursor position after a Save Cursor.''' | ||||
|  | ||||
|         self.cursor_home (self.cur_saved_r, self.cur_saved_c) | ||||
|  | ||||
|     def scroll_constrain (self): | ||||
|         '''This keeps the scroll region within the screen region.''' | ||||
|  | ||||
|         if self.scroll_row_start <= 0: | ||||
|             self.scroll_row_start = 1 | ||||
|         if self.scroll_row_end > self.rows: | ||||
|             self.scroll_row_end = self.rows | ||||
|  | ||||
|     def scroll_screen (self): # <ESC>[r | ||||
|         '''Enable scrolling for entire display.''' | ||||
|  | ||||
|         self.scroll_row_start = 1 | ||||
|         self.scroll_row_end = self.rows | ||||
|  | ||||
|     def scroll_screen_rows (self, rs, re): # <ESC>[{start};{end}r | ||||
|         '''Enable scrolling from row {start} to row {end}.''' | ||||
|  | ||||
|         self.scroll_row_start = rs | ||||
|         self.scroll_row_end = re | ||||
|         self.scroll_constrain() | ||||
|  | ||||
|     def scroll_down (self): # <ESC>D | ||||
|         '''Scroll display down one line.''' | ||||
|  | ||||
|         # Screen is indexed from 1, but arrays are indexed from 0. | ||||
|         s = self.scroll_row_start - 1 | ||||
|         e = self.scroll_row_end - 1 | ||||
|         self.w[s+1:e+1] = copy.deepcopy(self.w[s:e]) | ||||
|  | ||||
|     def scroll_up (self): # <ESC>M | ||||
|         '''Scroll display up one line.''' | ||||
|  | ||||
|         # Screen is indexed from 1, but arrays are indexed from 0. | ||||
|         s = self.scroll_row_start - 1 | ||||
|         e = self.scroll_row_end - 1 | ||||
|         self.w[s:e] = copy.deepcopy(self.w[s+1:e+1]) | ||||
|  | ||||
|     def erase_end_of_line (self): # <ESC>[0K -or- <ESC>[K | ||||
|         '''Erases from the current cursor position to the end of the current | ||||
|         line.''' | ||||
|  | ||||
|         self.fill_region (self.cur_r, self.cur_c, self.cur_r, self.cols) | ||||
|  | ||||
|     def erase_start_of_line (self): # <ESC>[1K | ||||
|         '''Erases from the current cursor position to the start of the current | ||||
|         line.''' | ||||
|  | ||||
|         self.fill_region (self.cur_r, 1, self.cur_r, self.cur_c) | ||||
|  | ||||
|     def erase_line (self): # <ESC>[2K | ||||
|         '''Erases the entire current line.''' | ||||
|  | ||||
|         self.fill_region (self.cur_r, 1, self.cur_r, self.cols) | ||||
|  | ||||
|     def erase_down (self): # <ESC>[0J -or- <ESC>[J | ||||
|         '''Erases the screen from the current line down to the bottom of the | ||||
|         screen.''' | ||||
|  | ||||
|         self.erase_end_of_line () | ||||
|         self.fill_region (self.cur_r + 1, 1, self.rows, self.cols) | ||||
|  | ||||
|     def erase_up (self): # <ESC>[1J | ||||
|         '''Erases the screen from the current line up to the top of the | ||||
|         screen.''' | ||||
|  | ||||
|         self.erase_start_of_line () | ||||
|         self.fill_region (self.cur_r-1, 1, 1, self.cols) | ||||
|  | ||||
|     def erase_screen (self): # <ESC>[2J | ||||
|         '''Erases the screen with the background color.''' | ||||
|  | ||||
|         self.fill () | ||||
|  | ||||
|     def set_tab (self): # <ESC>H | ||||
|         '''Sets a tab at the current position.''' | ||||
|  | ||||
|         pass | ||||
|  | ||||
|     def clear_tab (self): # <ESC>[g | ||||
|         '''Clears tab at the current position.''' | ||||
|  | ||||
|         pass | ||||
|  | ||||
|     def clear_all_tabs (self): # <ESC>[3g | ||||
|         '''Clears all tabs.''' | ||||
|  | ||||
|         pass | ||||
|  | ||||
| #        Insert line             Esc [ Pn L | ||||
| #        Delete line             Esc [ Pn M | ||||
| #        Delete character        Esc [ Pn P | ||||
| #        Scrolling region        Esc [ Pn(top);Pn(bot) r | ||||
|  | ||||
							
								
								
									
										525
									
								
								plugins/git_clone/pexpect/spawnbase.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										525
									
								
								plugins/git_clone/pexpect/spawnbase.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,525 @@ | ||||
| from io import StringIO, BytesIO | ||||
| import codecs | ||||
| import os | ||||
| import sys | ||||
| import re | ||||
| import errno | ||||
| from .exceptions import ExceptionPexpect, EOF, TIMEOUT | ||||
| from .expect import Expecter, searcher_string, searcher_re | ||||
|  | ||||
| PY3 = (sys.version_info[0] >= 3) | ||||
| text_type = str if PY3 else unicode | ||||
|  | ||||
| class _NullCoder(object): | ||||
|     """Pass bytes through unchanged.""" | ||||
|     @staticmethod | ||||
|     def encode(b, final=False): | ||||
|         return b | ||||
|  | ||||
|     @staticmethod | ||||
|     def decode(b, final=False): | ||||
|         return b | ||||
|  | ||||
| class SpawnBase(object): | ||||
|     """A base class providing the backwards-compatible spawn API for Pexpect. | ||||
|  | ||||
|     This should not be instantiated directly: use :class:`pexpect.spawn` or | ||||
|     :class:`pexpect.fdpexpect.fdspawn`. | ||||
|     """ | ||||
|     encoding = None | ||||
|     pid = None | ||||
|     flag_eof = False | ||||
|  | ||||
|     def __init__(self, timeout=30, maxread=2000, searchwindowsize=None, | ||||
|                  logfile=None, encoding=None, codec_errors='strict'): | ||||
|         self.stdin = sys.stdin | ||||
|         self.stdout = sys.stdout | ||||
|         self.stderr = sys.stderr | ||||
|  | ||||
|         self.searcher = None | ||||
|         self.ignorecase = False | ||||
|         self.before = None | ||||
|         self.after = None | ||||
|         self.match = None | ||||
|         self.match_index = None | ||||
|         self.terminated = True | ||||
|         self.exitstatus = None | ||||
|         self.signalstatus = None | ||||
|         # status returned by os.waitpid | ||||
|         self.status = None | ||||
|         # the child file descriptor is initially closed | ||||
|         self.child_fd = -1 | ||||
|         self.timeout = timeout | ||||
|         self.delimiter = EOF | ||||
|         self.logfile = logfile | ||||
|         # input from child (read_nonblocking) | ||||
|         self.logfile_read = None | ||||
|         # output to send (send, sendline) | ||||
|         self.logfile_send = None | ||||
|         # max bytes to read at one time into buffer | ||||
|         self.maxread = maxread | ||||
|         # Data before searchwindowsize point is preserved, but not searched. | ||||
|         self.searchwindowsize = searchwindowsize | ||||
|         # Delay used before sending data to child. Time in seconds. | ||||
|         # Set this to None to skip the time.sleep() call completely. | ||||
|         self.delaybeforesend = 0.05 | ||||
|         # Used by close() to give kernel time to update process status. | ||||
|         # Time in seconds. | ||||
|         self.delayafterclose = 0.1 | ||||
|         # Used by terminate() to give kernel time to update process status. | ||||
|         # Time in seconds. | ||||
|         self.delayafterterminate = 0.1 | ||||
|         # Delay in seconds to sleep after each call to read_nonblocking(). | ||||
|         # Set this to None to skip the time.sleep() call completely: that | ||||
|         # would restore the behavior from pexpect-2.0 (for performance | ||||
|         # reasons or because you don't want to release Python's global | ||||
|         # interpreter lock). | ||||
|         self.delayafterread = 0.0001 | ||||
|         self.softspace = False | ||||
|         self.name = '<' + repr(self) + '>' | ||||
|         self.closed = True | ||||
|  | ||||
|         # Unicode interface | ||||
|         self.encoding = encoding | ||||
|         self.codec_errors = codec_errors | ||||
|         if encoding is None: | ||||
|             # bytes mode (accepts some unicode for backwards compatibility) | ||||
|             self._encoder = self._decoder = _NullCoder() | ||||
|             self.string_type = bytes | ||||
|             self.buffer_type = BytesIO | ||||
|             self.crlf = b'\r\n' | ||||
|             if PY3: | ||||
|                 self.allowed_string_types = (bytes, str) | ||||
|                 self.linesep = os.linesep.encode('ascii') | ||||
|                 def write_to_stdout(b): | ||||
|                     try: | ||||
|                         return sys.stdout.buffer.write(b) | ||||
|                     except AttributeError: | ||||
|                         # If stdout has been replaced, it may not have .buffer | ||||
|                         return sys.stdout.write(b.decode('ascii', 'replace')) | ||||
|                 self.write_to_stdout = write_to_stdout | ||||
|             else: | ||||
|                 self.allowed_string_types = (basestring,)  # analysis:ignore | ||||
|                 self.linesep = os.linesep | ||||
|                 self.write_to_stdout = sys.stdout.write | ||||
|         else: | ||||
|             # unicode mode | ||||
|             self._encoder = codecs.getincrementalencoder(encoding)(codec_errors) | ||||
|             self._decoder = codecs.getincrementaldecoder(encoding)(codec_errors) | ||||
|             self.string_type = text_type | ||||
|             self.buffer_type = StringIO | ||||
|             self.crlf = u'\r\n' | ||||
|             self.allowed_string_types = (text_type, ) | ||||
|             if PY3: | ||||
|                 self.linesep = os.linesep | ||||
|             else: | ||||
|                 self.linesep = os.linesep.decode('ascii') | ||||
|             # This can handle unicode in both Python 2 and 3 | ||||
|             self.write_to_stdout = sys.stdout.write | ||||
|         # storage for async transport | ||||
|         self.async_pw_transport = None | ||||
|         # This is the read buffer. See maxread. | ||||
|         self._buffer = self.buffer_type() | ||||
|         # The buffer may be trimmed for efficiency reasons.  This is the | ||||
|         # untrimmed buffer, used to create the before attribute. | ||||
|         self._before = self.buffer_type() | ||||
|  | ||||
|     def _log(self, s, direction): | ||||
|         if self.logfile is not None: | ||||
|             self.logfile.write(s) | ||||
|             self.logfile.flush() | ||||
|         second_log = self.logfile_send if (direction=='send') else self.logfile_read | ||||
|         if second_log is not None: | ||||
|             second_log.write(s) | ||||
|             second_log.flush() | ||||
|  | ||||
|     # For backwards compatibility, in bytes mode (when encoding is None) | ||||
|     # unicode is accepted for send and expect. Unicode mode is strictly unicode | ||||
|     # only. | ||||
|     def _coerce_expect_string(self, s): | ||||
|         if self.encoding is None and not isinstance(s, bytes): | ||||
|             return s.encode('ascii') | ||||
|         return s | ||||
|  | ||||
|     def _coerce_send_string(self, s): | ||||
|         if self.encoding is None and not isinstance(s, bytes): | ||||
|             return s.encode('utf-8') | ||||
|         return s | ||||
|  | ||||
|     def _get_buffer(self): | ||||
|         return self._buffer.getvalue() | ||||
|  | ||||
|     def _set_buffer(self, value): | ||||
|         self._buffer = self.buffer_type() | ||||
|         self._buffer.write(value) | ||||
|  | ||||
|     # This property is provided for backwards compatability (self.buffer used | ||||
|     # to be a string/bytes object) | ||||
|     buffer = property(_get_buffer, _set_buffer) | ||||
|  | ||||
|     def read_nonblocking(self, size=1, timeout=None): | ||||
|         """This reads data from the file descriptor. | ||||
|  | ||||
|         This is a simple implementation suitable for a regular file. Subclasses using ptys or pipes should override it. | ||||
|  | ||||
|         The timeout parameter is ignored. | ||||
|         """ | ||||
|  | ||||
|         try: | ||||
|             s = os.read(self.child_fd, size) | ||||
|         except OSError as err: | ||||
|             if err.args[0] == errno.EIO: | ||||
|                 # Linux-style EOF | ||||
|                 self.flag_eof = True | ||||
|                 raise EOF('End Of File (EOF). Exception style platform.') | ||||
|             raise | ||||
|         if s == b'': | ||||
|             # BSD-style EOF | ||||
|             self.flag_eof = True | ||||
|             raise EOF('End Of File (EOF). Empty string style platform.') | ||||
|  | ||||
|         s = self._decoder.decode(s, final=False) | ||||
|         self._log(s, 'read') | ||||
|         return s | ||||
|  | ||||
|     def _pattern_type_err(self, pattern): | ||||
|         raise TypeError('got {badtype} ({badobj!r}) as pattern, must be one' | ||||
|                         ' of: {goodtypes}, pexpect.EOF, pexpect.TIMEOUT'\ | ||||
|                         .format(badtype=type(pattern), | ||||
|                                 badobj=pattern, | ||||
|                                 goodtypes=', '.join([str(ast)\ | ||||
|                                     for ast in self.allowed_string_types]) | ||||
|                                 ) | ||||
|                         ) | ||||
|  | ||||
|     def compile_pattern_list(self, patterns): | ||||
|         '''This compiles a pattern-string or a list of pattern-strings. | ||||
|         Patterns must be a StringType, EOF, TIMEOUT, SRE_Pattern, or a list of | ||||
|         those. Patterns may also be None which results in an empty list (you | ||||
|         might do this if waiting for an EOF or TIMEOUT condition without | ||||
|         expecting any pattern). | ||||
|  | ||||
|         This is used by expect() when calling expect_list(). Thus expect() is | ||||
|         nothing more than:: | ||||
|  | ||||
|              cpl = self.compile_pattern_list(pl) | ||||
|              return self.expect_list(cpl, timeout) | ||||
|  | ||||
|         If you are using expect() within a loop it may be more | ||||
|         efficient to compile the patterns first and then call expect_list(). | ||||
|         This avoid calls in a loop to compile_pattern_list():: | ||||
|  | ||||
|              cpl = self.compile_pattern_list(my_pattern) | ||||
|              while some_condition: | ||||
|                 ... | ||||
|                 i = self.expect_list(cpl, timeout) | ||||
|                 ... | ||||
|         ''' | ||||
|  | ||||
|         if patterns is None: | ||||
|             return [] | ||||
|         if not isinstance(patterns, list): | ||||
|             patterns = [patterns] | ||||
|  | ||||
|         # Allow dot to match \n | ||||
|         compile_flags = re.DOTALL | ||||
|         if self.ignorecase: | ||||
|             compile_flags = compile_flags | re.IGNORECASE | ||||
|         compiled_pattern_list = [] | ||||
|         for idx, p in enumerate(patterns): | ||||
|             if isinstance(p, self.allowed_string_types): | ||||
|                 p = self._coerce_expect_string(p) | ||||
|                 compiled_pattern_list.append(re.compile(p, compile_flags)) | ||||
|             elif p is EOF: | ||||
|                 compiled_pattern_list.append(EOF) | ||||
|             elif p is TIMEOUT: | ||||
|                 compiled_pattern_list.append(TIMEOUT) | ||||
|             elif isinstance(p, type(re.compile(''))): | ||||
|                 compiled_pattern_list.append(p) | ||||
|             else: | ||||
|                 self._pattern_type_err(p) | ||||
|         return compiled_pattern_list | ||||
|  | ||||
|     def expect(self, pattern, timeout=-1, searchwindowsize=-1, async_=False, **kw): | ||||
|         '''This seeks through the stream until a pattern is matched. The | ||||
|         pattern is overloaded and may take several types. The pattern can be a | ||||
|         StringType, EOF, a compiled re, or a list of any of those types. | ||||
|         Strings will be compiled to re types. This returns the index into the | ||||
|         pattern list. If the pattern was not a list this returns index 0 on a | ||||
|         successful match. This may raise exceptions for EOF or TIMEOUT. To | ||||
|         avoid the EOF or TIMEOUT exceptions add EOF or TIMEOUT to the pattern | ||||
|         list. That will cause expect to match an EOF or TIMEOUT condition | ||||
|         instead of raising an exception. | ||||
|  | ||||
|         If you pass a list of patterns and more than one matches, the first | ||||
|         match in the stream is chosen. If more than one pattern matches at that | ||||
|         point, the leftmost in the pattern list is chosen. For example:: | ||||
|  | ||||
|             # the input is 'foobar' | ||||
|             index = p.expect(['bar', 'foo', 'foobar']) | ||||
|             # returns 1('foo') even though 'foobar' is a "better" match | ||||
|  | ||||
|         Please note, however, that buffering can affect this behavior, since | ||||
|         input arrives in unpredictable chunks. For example:: | ||||
|  | ||||
|             # the input is 'foobar' | ||||
|             index = p.expect(['foobar', 'foo']) | ||||
|             # returns 0('foobar') if all input is available at once, | ||||
|             # but returns 1('foo') if parts of the final 'bar' arrive late | ||||
|  | ||||
|         When a match is found for the given pattern, the class instance | ||||
|         attribute *match* becomes an re.MatchObject result.  Should an EOF | ||||
|         or TIMEOUT pattern match, then the match attribute will be an instance | ||||
|         of that exception class.  The pairing before and after class | ||||
|         instance attributes are views of the data preceding and following | ||||
|         the matching pattern.  On general exception, class attribute | ||||
|         *before* is all data received up to the exception, while *match* and | ||||
|         *after* attributes are value None. | ||||
|  | ||||
|         When the keyword argument timeout is -1 (default), then TIMEOUT will | ||||
|         raise after the default value specified by the class timeout | ||||
|         attribute. When None, TIMEOUT will not be raised and may block | ||||
|         indefinitely until match. | ||||
|  | ||||
|         When the keyword argument searchwindowsize is -1 (default), then the | ||||
|         value specified by the class maxread attribute is used. | ||||
|  | ||||
|         A list entry may be EOF or TIMEOUT instead of a string. This will | ||||
|         catch these exceptions and return the index of the list entry instead | ||||
|         of raising the exception. The attribute 'after' will be set to the | ||||
|         exception type. The attribute 'match' will be None. This allows you to | ||||
|         write code like this:: | ||||
|  | ||||
|                 index = p.expect(['good', 'bad', pexpect.EOF, pexpect.TIMEOUT]) | ||||
|                 if index == 0: | ||||
|                     do_something() | ||||
|                 elif index == 1: | ||||
|                     do_something_else() | ||||
|                 elif index == 2: | ||||
|                     do_some_other_thing() | ||||
|                 elif index == 3: | ||||
|                     do_something_completely_different() | ||||
|  | ||||
|         instead of code like this:: | ||||
|  | ||||
|                 try: | ||||
|                     index = p.expect(['good', 'bad']) | ||||
|                     if index == 0: | ||||
|                         do_something() | ||||
|                     elif index == 1: | ||||
|                         do_something_else() | ||||
|                 except EOF: | ||||
|                     do_some_other_thing() | ||||
|                 except TIMEOUT: | ||||
|                     do_something_completely_different() | ||||
|  | ||||
|         These two forms are equivalent. It all depends on what you want. You | ||||
|         can also just expect the EOF if you are waiting for all output of a | ||||
|         child to finish. For example:: | ||||
|  | ||||
|                 p = pexpect.spawn('/bin/ls') | ||||
|                 p.expect(pexpect.EOF) | ||||
|                 print p.before | ||||
|  | ||||
|         If you are trying to optimize for speed then see expect_list(). | ||||
|  | ||||
|         On Python 3.4, or Python 3.3 with asyncio installed, passing | ||||
|         ``async_=True``  will make this return an :mod:`asyncio` coroutine, | ||||
|         which you can yield from to get the same result that this method would | ||||
|         normally give directly. So, inside a coroutine, you can replace this code:: | ||||
|  | ||||
|             index = p.expect(patterns) | ||||
|  | ||||
|         With this non-blocking form:: | ||||
|  | ||||
|             index = yield from p.expect(patterns, async_=True) | ||||
|         ''' | ||||
|         if 'async' in kw: | ||||
|             async_ = kw.pop('async') | ||||
|         if kw: | ||||
|             raise TypeError("Unknown keyword arguments: {}".format(kw)) | ||||
|  | ||||
|         compiled_pattern_list = self.compile_pattern_list(pattern) | ||||
|         return self.expect_list(compiled_pattern_list, | ||||
|                 timeout, searchwindowsize, async_) | ||||
|  | ||||
|     def expect_list(self, pattern_list, timeout=-1, searchwindowsize=-1, | ||||
|                     async_=False, **kw): | ||||
|         '''This takes a list of compiled regular expressions and returns the | ||||
|         index into the pattern_list that matched the child output. The list may | ||||
|         also contain EOF or TIMEOUT(which are not compiled regular | ||||
|         expressions). This method is similar to the expect() method except that | ||||
|         expect_list() does not recompile the pattern list on every call. This | ||||
|         may help if you are trying to optimize for speed, otherwise just use | ||||
|         the expect() method.  This is called by expect(). | ||||
|  | ||||
|  | ||||
|         Like :meth:`expect`, passing ``async_=True`` will make this return an | ||||
|         asyncio coroutine. | ||||
|         ''' | ||||
|         if timeout == -1: | ||||
|             timeout = self.timeout | ||||
|         if 'async' in kw: | ||||
|             async_ = kw.pop('async') | ||||
|         if kw: | ||||
|             raise TypeError("Unknown keyword arguments: {}".format(kw)) | ||||
|  | ||||
|         exp = Expecter(self, searcher_re(pattern_list), searchwindowsize) | ||||
|         if async_: | ||||
|             from ._async import expect_async | ||||
|             return expect_async(exp, timeout) | ||||
|         else: | ||||
|             return exp.expect_loop(timeout) | ||||
|  | ||||
|     def expect_exact(self, pattern_list, timeout=-1, searchwindowsize=-1, | ||||
|                      async_=False, **kw): | ||||
|  | ||||
|         '''This is similar to expect(), but uses plain string matching instead | ||||
|         of compiled regular expressions in 'pattern_list'. The 'pattern_list' | ||||
|         may be a string; a list or other sequence of strings; or TIMEOUT and | ||||
|         EOF. | ||||
|  | ||||
|         This call might be faster than expect() for two reasons: string | ||||
|         searching is faster than RE matching and it is possible to limit the | ||||
|         search to just the end of the input buffer. | ||||
|  | ||||
|         This method is also useful when you don't want to have to worry about | ||||
|         escaping regular expression characters that you want to match. | ||||
|  | ||||
|         Like :meth:`expect`, passing ``async_=True`` will make this return an | ||||
|         asyncio coroutine. | ||||
|         ''' | ||||
|         if timeout == -1: | ||||
|             timeout = self.timeout | ||||
|         if 'async' in kw: | ||||
|             async_ = kw.pop('async') | ||||
|         if kw: | ||||
|             raise TypeError("Unknown keyword arguments: {}".format(kw)) | ||||
|  | ||||
|         if (isinstance(pattern_list, self.allowed_string_types) or | ||||
|                 pattern_list in (TIMEOUT, EOF)): | ||||
|             pattern_list = [pattern_list] | ||||
|  | ||||
|         def prepare_pattern(pattern): | ||||
|             if pattern in (TIMEOUT, EOF): | ||||
|                 return pattern | ||||
|             if isinstance(pattern, self.allowed_string_types): | ||||
|                 return self._coerce_expect_string(pattern) | ||||
|             self._pattern_type_err(pattern) | ||||
|  | ||||
|         try: | ||||
|             pattern_list = iter(pattern_list) | ||||
|         except TypeError: | ||||
|             self._pattern_type_err(pattern_list) | ||||
|         pattern_list = [prepare_pattern(p) for p in pattern_list] | ||||
|  | ||||
|         exp = Expecter(self, searcher_string(pattern_list), searchwindowsize) | ||||
|         if async_: | ||||
|             from ._async import expect_async | ||||
|             return expect_async(exp, timeout) | ||||
|         else: | ||||
|             return exp.expect_loop(timeout) | ||||
|  | ||||
|     def expect_loop(self, searcher, timeout=-1, searchwindowsize=-1): | ||||
|         '''This is the common loop used inside expect. The 'searcher' should be | ||||
|         an instance of searcher_re or searcher_string, which describes how and | ||||
|         what to search for in the input. | ||||
|  | ||||
|         See expect() for other arguments, return value and exceptions. ''' | ||||
|  | ||||
|         exp = Expecter(self, searcher, searchwindowsize) | ||||
|         return exp.expect_loop(timeout) | ||||
|  | ||||
|     def read(self, size=-1): | ||||
|         '''This reads at most "size" bytes from the file (less if the read hits | ||||
|         EOF before obtaining size bytes). If the size argument is negative or | ||||
|         omitted, read all data until EOF is reached. The bytes are returned as | ||||
|         a string object. An empty string is returned when EOF is encountered | ||||
|         immediately. ''' | ||||
|  | ||||
|         if size == 0: | ||||
|             return self.string_type() | ||||
|         if size < 0: | ||||
|             # delimiter default is EOF | ||||
|             self.expect(self.delimiter) | ||||
|             return self.before | ||||
|  | ||||
|         # I could have done this more directly by not using expect(), but | ||||
|         # I deliberately decided to couple read() to expect() so that | ||||
|         # I would catch any bugs early and ensure consistent behavior. | ||||
|         # It's a little less efficient, but there is less for me to | ||||
|         # worry about if I have to later modify read() or expect(). | ||||
|         # Note, it's OK if size==-1 in the regex. That just means it | ||||
|         # will never match anything in which case we stop only on EOF. | ||||
|         cre = re.compile(self._coerce_expect_string('.{%d}' % size), re.DOTALL) | ||||
|         # delimiter default is EOF | ||||
|         index = self.expect([cre, self.delimiter]) | ||||
|         if index == 0: | ||||
|             ### FIXME self.before should be ''. Should I assert this? | ||||
|             return self.after | ||||
|         return self.before | ||||
|  | ||||
|     def readline(self, size=-1): | ||||
|         '''This reads and returns one entire line. The newline at the end of | ||||
|         line is returned as part of the string, unless the file ends without a | ||||
|         newline. An empty string is returned if EOF is encountered immediately. | ||||
|         This looks for a newline as a CR/LF pair (\\r\\n) even on UNIX because | ||||
|         this is what the pseudotty device returns. So contrary to what you may | ||||
|         expect you will receive newlines as \\r\\n. | ||||
|  | ||||
|         If the size argument is 0 then an empty string is returned. In all | ||||
|         other cases the size argument is ignored, which is not standard | ||||
|         behavior for a file-like object. ''' | ||||
|  | ||||
|         if size == 0: | ||||
|             return self.string_type() | ||||
|         # delimiter default is EOF | ||||
|         index = self.expect([self.crlf, self.delimiter]) | ||||
|         if index == 0: | ||||
|             return self.before + self.crlf | ||||
|         else: | ||||
|             return self.before | ||||
|  | ||||
|     def __iter__(self): | ||||
|         '''This is to support iterators over a file-like object. | ||||
|         ''' | ||||
|         return iter(self.readline, self.string_type()) | ||||
|  | ||||
|     def readlines(self, sizehint=-1): | ||||
|         '''This reads until EOF using readline() and returns a list containing | ||||
|         the lines thus read. The optional 'sizehint' argument is ignored. | ||||
|         Remember, because this reads until EOF that means the child | ||||
|         process should have closed its stdout. If you run this method on | ||||
|         a child that is still running with its stdout open then this | ||||
|         method will block until it timesout.''' | ||||
|  | ||||
|         lines = [] | ||||
|         while True: | ||||
|             line = self.readline() | ||||
|             if not line: | ||||
|                 break | ||||
|             lines.append(line) | ||||
|         return lines | ||||
|  | ||||
|     def fileno(self): | ||||
|         '''Expose file descriptor for a file-like interface | ||||
|         ''' | ||||
|         return self.child_fd | ||||
|  | ||||
|     def flush(self): | ||||
|         '''This does nothing. It is here to support the interface for a | ||||
|         File-like object. ''' | ||||
|         pass | ||||
|  | ||||
|     def isatty(self): | ||||
|         """Overridden in subclass using tty""" | ||||
|         return False | ||||
|  | ||||
|     # For 'with spawn(...) as child:' | ||||
|     def __enter__(self): | ||||
|         return self | ||||
|  | ||||
|     def __exit__(self, etype, evalue, tb): | ||||
|         # We rely on subclasses to implement close(). If they don't, it's not | ||||
|         # clear what a context manager should do. | ||||
|         self.close() | ||||
							
								
								
									
										187
									
								
								plugins/git_clone/pexpect/utils.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										187
									
								
								plugins/git_clone/pexpect/utils.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,187 @@ | ||||
| import os | ||||
| import sys | ||||
| import stat | ||||
| import select | ||||
| import time | ||||
| import errno | ||||
|  | ||||
| try: | ||||
|     InterruptedError | ||||
| except NameError: | ||||
|     # Alias Python2 exception to Python3 | ||||
|     InterruptedError = select.error | ||||
|  | ||||
| if sys.version_info[0] >= 3: | ||||
|     string_types = (str,) | ||||
| else: | ||||
|     string_types = (unicode, str) | ||||
|  | ||||
|  | ||||
| def is_executable_file(path): | ||||
|     """Checks that path is an executable regular file, or a symlink towards one. | ||||
|  | ||||
|     This is roughly ``os.path isfile(path) and os.access(path, os.X_OK)``. | ||||
|     """ | ||||
|     # follow symlinks, | ||||
|     fpath = os.path.realpath(path) | ||||
|  | ||||
|     if not os.path.isfile(fpath): | ||||
|         # non-files (directories, fifo, etc.) | ||||
|         return False | ||||
|  | ||||
|     mode = os.stat(fpath).st_mode | ||||
|  | ||||
|     if (sys.platform.startswith('sunos') | ||||
|             and os.getuid() == 0): | ||||
|         # When root on Solaris, os.X_OK is True for *all* files, irregardless | ||||
|         # of their executability -- instead, any permission bit of any user, | ||||
|         # group, or other is fine enough. | ||||
|         # | ||||
|         # (This may be true for other "Unix98" OS's such as HP-UX and AIX) | ||||
|         return bool(mode & (stat.S_IXUSR | | ||||
|                             stat.S_IXGRP | | ||||
|                             stat.S_IXOTH)) | ||||
|  | ||||
|     return os.access(fpath, os.X_OK) | ||||
|  | ||||
|  | ||||
| def which(filename, env=None): | ||||
|     '''This takes a given filename; tries to find it in the environment path; | ||||
|     then checks if it is executable. This returns the full path to the filename | ||||
|     if found and executable. Otherwise this returns None.''' | ||||
|  | ||||
|     # Special case where filename contains an explicit path. | ||||
|     if os.path.dirname(filename) != '' and is_executable_file(filename): | ||||
|         return filename | ||||
|     if env is None: | ||||
|         env = os.environ | ||||
|     p = env.get('PATH') | ||||
|     if not p: | ||||
|         p = os.defpath | ||||
|     pathlist = p.split(os.pathsep) | ||||
|     for path in pathlist: | ||||
|         ff = os.path.join(path, filename) | ||||
|         if is_executable_file(ff): | ||||
|             return ff | ||||
|     return None | ||||
|  | ||||
|  | ||||
| def split_command_line(command_line): | ||||
|  | ||||
|     '''This splits a command line into a list of arguments. It splits arguments | ||||
|     on spaces, but handles embedded quotes, doublequotes, and escaped | ||||
|     characters. It's impossible to do this with a regular expression, so I | ||||
|     wrote a little state machine to parse the command line. ''' | ||||
|  | ||||
|     arg_list = [] | ||||
|     arg = '' | ||||
|  | ||||
|     # Constants to name the states we can be in. | ||||
|     state_basic = 0 | ||||
|     state_esc = 1 | ||||
|     state_singlequote = 2 | ||||
|     state_doublequote = 3 | ||||
|     # The state when consuming whitespace between commands. | ||||
|     state_whitespace = 4 | ||||
|     state = state_basic | ||||
|  | ||||
|     for c in command_line: | ||||
|         if state == state_basic or state == state_whitespace: | ||||
|             if c == '\\': | ||||
|                 # Escape the next character | ||||
|                 state = state_esc | ||||
|             elif c == r"'": | ||||
|                 # Handle single quote | ||||
|                 state = state_singlequote | ||||
|             elif c == r'"': | ||||
|                 # Handle double quote | ||||
|                 state = state_doublequote | ||||
|             elif c.isspace(): | ||||
|                 # Add arg to arg_list if we aren't in the middle of whitespace. | ||||
|                 if state == state_whitespace: | ||||
|                     # Do nothing. | ||||
|                     None | ||||
|                 else: | ||||
|                     arg_list.append(arg) | ||||
|                     arg = '' | ||||
|                     state = state_whitespace | ||||
|             else: | ||||
|                 arg = arg + c | ||||
|                 state = state_basic | ||||
|         elif state == state_esc: | ||||
|             arg = arg + c | ||||
|             state = state_basic | ||||
|         elif state == state_singlequote: | ||||
|             if c == r"'": | ||||
|                 state = state_basic | ||||
|             else: | ||||
|                 arg = arg + c | ||||
|         elif state == state_doublequote: | ||||
|             if c == r'"': | ||||
|                 state = state_basic | ||||
|             else: | ||||
|                 arg = arg + c | ||||
|  | ||||
|     if arg != '': | ||||
|         arg_list.append(arg) | ||||
|     return arg_list | ||||
|  | ||||
|  | ||||
| def select_ignore_interrupts(iwtd, owtd, ewtd, timeout=None): | ||||
|  | ||||
|     '''This is a wrapper around select.select() that ignores signals. If | ||||
|     select.select raises a select.error exception and errno is an EINTR | ||||
|     error then it is ignored. Mainly this is used to ignore sigwinch | ||||
|     (terminal resize). ''' | ||||
|  | ||||
|     # if select() is interrupted by a signal (errno==EINTR) then | ||||
|     # we loop back and enter the select() again. | ||||
|     if timeout is not None: | ||||
|         end_time = time.time() + timeout | ||||
|     while True: | ||||
|         try: | ||||
|             return select.select(iwtd, owtd, ewtd, timeout) | ||||
|         except InterruptedError: | ||||
|             err = sys.exc_info()[1] | ||||
|             if err.args[0] == errno.EINTR: | ||||
|                 # if we loop back we have to subtract the | ||||
|                 # amount of time we already waited. | ||||
|                 if timeout is not None: | ||||
|                     timeout = end_time - time.time() | ||||
|                     if timeout < 0: | ||||
|                         return([], [], []) | ||||
|             else: | ||||
|                 # something else caused the select.error, so | ||||
|                 # this actually is an exception. | ||||
|                 raise | ||||
|  | ||||
|  | ||||
| def poll_ignore_interrupts(fds, timeout=None): | ||||
|     '''Simple wrapper around poll to register file descriptors and | ||||
|     ignore signals.''' | ||||
|  | ||||
|     if timeout is not None: | ||||
|         end_time = time.time() + timeout | ||||
|  | ||||
|     poller = select.poll() | ||||
|     for fd in fds: | ||||
|         poller.register(fd, select.POLLIN | select.POLLPRI | select.POLLHUP | select.POLLERR) | ||||
|  | ||||
|     while True: | ||||
|         try: | ||||
|             timeout_ms = None if timeout is None else timeout * 1000 | ||||
|             results = poller.poll(timeout_ms) | ||||
|             return [afd for afd, _ in results] | ||||
|         except InterruptedError: | ||||
|             err = sys.exc_info()[1] | ||||
|             if err.args[0] == errno.EINTR: | ||||
|                 # if we loop back we have to subtract the | ||||
|                 # amount of time we already waited. | ||||
|                 if timeout is not None: | ||||
|                     timeout = end_time - time.time() | ||||
|                     if timeout < 0: | ||||
|                         return [] | ||||
|             else: | ||||
|                 # something else caused the select.error, so | ||||
|                 # this actually is an exception. | ||||
|                 raise | ||||
							
								
								
									
										84
									
								
								plugins/git_clone/plugin.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										84
									
								
								plugins/git_clone/plugin.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,84 @@ | ||||
| # Python imports | ||||
| import os | ||||
| import threading | ||||
| import subprocess | ||||
| import time | ||||
|  | ||||
| # Lib imports | ||||
| from . import pexpect | ||||
| import gi | ||||
| gi.require_version('Gtk', '3.0') | ||||
| from gi.repository import Gtk | ||||
|  | ||||
| # Application imports | ||||
| from plugins.plugin_base import PluginBase | ||||
|  | ||||
|  | ||||
| # NOTE: Threads WILL NOT die with parent's destruction. | ||||
| def threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=False).start() | ||||
|     return wrapper | ||||
|  | ||||
|  | ||||
|  | ||||
| class GitClonePluginException(Exception): | ||||
|     ... | ||||
|  | ||||
|  | ||||
|  | ||||
| class Plugin(PluginBase): | ||||
|     def __init__(self): | ||||
|         super().__init__() | ||||
|  | ||||
|         self.name              = "Git Clone"  # NOTE: Need to remove after establishing private bidirectional 1-1 message bus | ||||
|                                               #       where self.name should not be needed for message comms | ||||
|         self.path              = os.path.dirname(os.path.realpath(__file__)) | ||||
|  | ||||
|     def generate_reference_ui_element(self): | ||||
|         button = Gtk.Button(label=self.name) | ||||
|         button.connect("button-release-event", self._do_download) | ||||
|         return button | ||||
|  | ||||
|     def run(self): | ||||
|         ... | ||||
|  | ||||
|  | ||||
|     def _do_download(self, widget=None, eve=None): | ||||
|         self._event_system.emit("get_current_state") | ||||
|  | ||||
|         self.get_user_and_pass() | ||||
|         dir    = self._fm_state.tab.get_current_directory() | ||||
|         events = { | ||||
|                     '(?i)Username': self.get_user(), | ||||
|                     '(?i)Password': self.get_pass() | ||||
|                 } | ||||
|  | ||||
|         self._download(dir, events) | ||||
|  | ||||
|     @threaded | ||||
|     def _download(self, dir, _events): | ||||
|         git_clone_link = self.get_clipboard_data() | ||||
|         pexpect.run(f"git clone {git_clone_link}", cwd = dir, events=_events) | ||||
|  | ||||
|  | ||||
|     def get_user_and_pass(self): | ||||
|         response = self._fm_state.user_pass_dialog.run() | ||||
|         if response in (-4, -6): | ||||
|             raise GitClonePluginException("User canceled request...") | ||||
|  | ||||
|  | ||||
|     def get_user(self): | ||||
|         user   = self._fm_state.user_pass_dialog.user_input.get_text() | ||||
|         return f"{user}\n" | ||||
|  | ||||
|     def get_pass(self): | ||||
|         passwd = self._fm_state.user_pass_dialog.pass_input.get_text() | ||||
|         return f"{passwd}\n" | ||||
|  | ||||
|  | ||||
|     def get_clipboard_data(self, encoding="utf-8") -> str: | ||||
|         proc    = subprocess.Popen(['xclip','-selection', 'clipboard', '-o'], stdout=subprocess.PIPE) | ||||
|         retcode = proc.wait() | ||||
|         data    = proc.stdout.read() | ||||
|         return data.decode(encoding).strip() | ||||
| @@ -2,7 +2,6 @@ | ||||
| import os | ||||
| import threading | ||||
| import subprocess | ||||
| import inspect | ||||
| import requests | ||||
| import shutil | ||||
|  | ||||
| @@ -38,9 +37,9 @@ class Plugin(PluginBase): | ||||
|     def __init__(self): | ||||
|         super().__init__() | ||||
|  | ||||
|         self.path                   = os.path.dirname(os.path.realpath(__file__)) | ||||
|         self.name                   = "Movie/TV Info"   # NOTE: Need to remove after establishing private bidirectional 1-1 message bus | ||||
|                                                         #       where self.name should not be needed for message comms | ||||
|         self.path                   = os.path.dirname(os.path.realpath(__file__)) | ||||
|         self._GLADE_FILE            = f"{self.path}/movie_tv_info.glade" | ||||
|  | ||||
|         self._dialog                = None | ||||
| @@ -53,20 +52,9 @@ class Plugin(PluginBase): | ||||
|  | ||||
|  | ||||
|     def run(self): | ||||
|         self._builder           = Gtk.Builder() | ||||
|         self._builder = Gtk.Builder() | ||||
|         self._builder.add_from_file(self._GLADE_FILE) | ||||
|  | ||||
|         classes  = [self] | ||||
|         handlers = {} | ||||
|         for c in classes: | ||||
|             methods = None | ||||
|             try: | ||||
|                 methods = inspect.getmembers(c, predicate=inspect.ismethod) | ||||
|                 handlers.update(methods) | ||||
|             except Exception as e: | ||||
|                 print(repr(e)) | ||||
|  | ||||
|         self._builder.connect_signals(handlers) | ||||
|         self._connect_builder_signals(self, self._builder) | ||||
|  | ||||
|         self._thumbnailer_dialog    = self._builder.get_object("info_dialog") | ||||
|         self._overview              = self._builder.get_object("textbuffer") | ||||
| @@ -95,7 +83,7 @@ class Plugin(PluginBase): | ||||
|     def _process_changes(self, state): | ||||
|         self._fm_state = None | ||||
|  | ||||
|         if len(state.selected_files) == 1: | ||||
|         if state.uris and len(state.uris) == 1: | ||||
|             self._fm_state = state | ||||
|             self._set_ui_data() | ||||
|             response   = self._thumbnailer_dialog.run() | ||||
| @@ -115,7 +103,7 @@ class Plugin(PluginBase): | ||||
|         print(video_data["videos"]) if not keys in ("", None) and "videos" in keys else ... | ||||
|  | ||||
|     def get_video_data(self): | ||||
|         uri            = self._fm_state.selected_files[0] | ||||
|         uri            = self._fm_state.uris[0] | ||||
|         path           = self._fm_state.tab.get_current_directory() | ||||
|         parts          = uri.split("/") | ||||
|         _title         = parts[ len(parts) - 1 ] | ||||
|   | ||||
							
								
								
									
										3
									
								
								plugins/py_run/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								plugins/py_run/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| """ | ||||
|     Pligin Module | ||||
| """ | ||||
							
								
								
									
										3
									
								
								plugins/py_run/__main__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								plugins/py_run/__main__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| """ | ||||
|     Pligin Package | ||||
| """ | ||||
							
								
								
									
										13
									
								
								plugins/py_run/manifest.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								plugins/py_run/manifest.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | ||||
| { | ||||
|     "manifest": { | ||||
|         "name": "PyRun", | ||||
|         "author": "ITDominator", | ||||
|         "version": "0.0.1", | ||||
|         "support": "", | ||||
|         "requests": { | ||||
|             "ui_target": "plugin_control_list", | ||||
|             "pass_fm_events": "true", | ||||
|             "bind_keys": ["PyRun||send_message:<Shift><Control>r"] | ||||
|         } | ||||
|     } | ||||
| } | ||||
							
								
								
									
										71
									
								
								plugins/py_run/plugin.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										71
									
								
								plugins/py_run/plugin.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,71 @@ | ||||
| # Python imports | ||||
| import os | ||||
| import threading | ||||
| import subprocess | ||||
|  | ||||
| # Lib imports | ||||
| import gi | ||||
| gi.require_version('Gtk', '3.0') | ||||
| from gi.repository import Gtk | ||||
|  | ||||
| # Application imports | ||||
| from plugins.plugin_base import PluginBase | ||||
|  | ||||
|  | ||||
| # NOTE: Threads WILL NOT die with parent's destruction. | ||||
| def threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=False).start() | ||||
|     return wrapper | ||||
|  | ||||
| # NOTE: Threads WILL die with parent's destruction. | ||||
| def daemon_threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start() | ||||
|     return wrapper | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| class Plugin(PluginBase): | ||||
|     def __init__(self): | ||||
|         super().__init__() | ||||
|  | ||||
|         self.name        = "PyRun"  # NOTE: Need to remove after establishing private bidirectional 1-1 message bus | ||||
|                                     #       where self.name should not be needed for message comms | ||||
|  | ||||
|  | ||||
|     def run(self): | ||||
|         ... | ||||
|  | ||||
|     def generate_reference_ui_element(self): | ||||
|         button = Gtk.Button(label=self.name) | ||||
|         button.connect("button-release-event", self.start_run) | ||||
|         button.set_image( Gtk.Image(stock=Gtk.STOCK_MEDIA_PLAY ) ) | ||||
|         button.set_always_show_image(True) | ||||
|         return button | ||||
|  | ||||
|     def start_run(self, widget=None, eve=None): | ||||
|         self._event_system.emit("get_current_state") | ||||
|         state       = self._fm_state | ||||
|         current_dir = state.tab.get_current_directory() | ||||
|         isValid     = False | ||||
|         command     = ["terminator", "-x", f"bash -c 'python . && bash'"] | ||||
|  | ||||
|         if not state.uris: | ||||
|             for file in os.listdir(current_dir): | ||||
|                 path = os.path.join(current_dir, file) | ||||
|                 if os.path.isfile(path) and file == "__main__.py": | ||||
|                     isValid = True | ||||
|                     break | ||||
|         elif state.uris and len(state.uris) == 1: | ||||
|                 file = state.uris[0] | ||||
|                 if os.path.isfile(file) and file.endswith(".py"): | ||||
|                     command = ["terminator", "-x", f"bash -c 'python {state.uris[0]} && bash'"] | ||||
|                     isValid = True | ||||
|  | ||||
|         if isValid: | ||||
|             self.launch(current_dir, command) | ||||
|  | ||||
|     def launch(self, current_dir = "/", command = []): | ||||
|         subprocess.Popen(command, cwd=current_dir, shell=False, start_new_session=True, stdout=None, stderr=None, close_fds=True) | ||||
| @@ -1,4 +1,5 @@ | ||||
| # Python imports | ||||
| import time | ||||
| import threading | ||||
| import subprocess | ||||
| import signal | ||||
| @@ -33,11 +34,38 @@ def daemon_threaded(fn): | ||||
|  | ||||
| class FileSearchMixin: | ||||
|     def _run_find_file_query(self, widget=None, eve=None): | ||||
|         self._handle_find_file_query(query=widget) | ||||
|         self._queue_search = True | ||||
|  | ||||
|         if not self._search_watcher_running: | ||||
|             self._search_watcher_running = True | ||||
|  | ||||
|             self._stop_fsearch_query() | ||||
|             self.reset_file_list_box() | ||||
|             self.run_fsearch_watcher(query=widget) | ||||
|  | ||||
|     # TODO: Merge this logic with nearly the exact same thing in grep_search_mixin | ||||
|     @daemon_threaded | ||||
|     def _handle_find_file_query(self, widget=None, eve=None, query=None): | ||||
|     def run_fsearch_watcher(self, query): | ||||
|         while True: | ||||
|             if self._queue_search: | ||||
|                 self._queue_search = False | ||||
|                 time.sleep(1) | ||||
|  | ||||
|                 # NOTE: Hold call to translate if we're still typing/updating... | ||||
|                 if self._queue_search: | ||||
|                     continue | ||||
|  | ||||
|                 # NOTE: If query create new process and do all new loop. | ||||
|                 if query: | ||||
|                     self.pause_fifo_update = False | ||||
|                     GLib.idle_add(self._exec_find_file_query, query) | ||||
|  | ||||
|                 self._search_watcher_running = False | ||||
|  | ||||
|             break | ||||
|  | ||||
|     def _stop_fsearch_query(self, widget=None, eve=None): | ||||
|         self._spinner.stop() | ||||
|  | ||||
|         # NOTE: Freeze IPC consumption | ||||
|         self.pause_fifo_update  = True | ||||
|         self.search_query       = "" | ||||
| @@ -53,23 +81,20 @@ class FileSearchMixin: | ||||
|  | ||||
|             self._list_proc = None | ||||
|  | ||||
|         # NOTE: Clear children from ui and make sure ui thread redraws | ||||
|         GLib.idle_add(self.reset_file_list_box) | ||||
|  | ||||
|         # NOTE: If query create new process and do all new loop. | ||||
|         if query: | ||||
|             self.pause_fifo_update = False | ||||
|             GLib.idle_add(self._exec_find_file_query, query) | ||||
|  | ||||
|     def _exec_find_file_query(self, widget=None, eve=None): | ||||
|         query = widget.get_text() | ||||
|  | ||||
|         if not query in ("", None): | ||||
|             self.search_query = query | ||||
|             target_dir = shlex.quote( self._fm_state.tab.get_current_directory() ) | ||||
|             command = ["python", f"{self.path}/utils/search.py", "-t", "file_search", "-d", f"{target_dir}", "-q", f"{query}"] | ||||
|             target_dir        = shlex.quote( self._fm_state.tab.get_current_directory() ) | ||||
|             command           = ["python", f"{self.path}/utils/search.py", "-t", "file_search", "-d", f"{target_dir}", "-q", f"{query}"] | ||||
|  | ||||
|             self._spinner.start() | ||||
|  | ||||
|             self._list_proc = subprocess.Popen(command, cwd=self.path, stdin=None, stdout=None, stderr=None) | ||||
|  | ||||
|  | ||||
|     def _load_file_ui(self, data): | ||||
|         Gtk.main_iteration() | ||||
|  | ||||
|   | ||||
| @@ -1,12 +1,11 @@ | ||||
| # Python imports | ||||
| import ctypes | ||||
| import time | ||||
| import threading | ||||
| import subprocess | ||||
| import signal | ||||
| import json | ||||
| import shlex | ||||
| from datetime import datetime | ||||
| libgcc_s = ctypes.CDLL('libgcc_s.so.1') | ||||
|  | ||||
| # Lib imports | ||||
| import gi | ||||
| @@ -35,11 +34,38 @@ def daemon_threaded(fn): | ||||
|  | ||||
| class GrepSearchMixin: | ||||
|     def _run_grep_query(self, widget=None, eve=None): | ||||
|         self._handle_grep_query(query=widget) | ||||
|         self._queue_grep = True | ||||
|  | ||||
|         if not self._grep_watcher_running: | ||||
|             self._grep_watcher_running = True | ||||
|  | ||||
|             self._stop_grep_query() | ||||
|             self.reset_grep_box() | ||||
|             self.run_grep_watcher(query=widget) | ||||
|  | ||||
|     # TODO: Merge this logic with nearly the exact same thing in file_search_mixin | ||||
|     @daemon_threaded | ||||
|     def _handle_grep_query(self, widget=None, eve=None, query=None): | ||||
|     def run_grep_watcher(self, query): | ||||
|         while True: | ||||
|             if self._queue_grep: | ||||
|                 self._queue_grep = False | ||||
|                 time.sleep(1) | ||||
|  | ||||
|                 # NOTE: Hold call to translate if we're still typing/updating... | ||||
|                 if self._queue_grep: | ||||
|                     continue | ||||
|  | ||||
|                 # NOTE: If query create new process and do all new loop. | ||||
|                 if query: | ||||
|                     self.pause_fifo_update = False | ||||
|                     GLib.idle_add(self._exec_grep_query, query) | ||||
|  | ||||
|                 self._grep_watcher_running = False | ||||
|  | ||||
|             break | ||||
|  | ||||
|     def _stop_grep_query(self, widget=None, eve=None): | ||||
|         self._spinner.stop() | ||||
|  | ||||
|         # NOTE: Freeze IPC consumption | ||||
|         self.pause_fifo_update = True | ||||
|         self.grep_query        = "" | ||||
| @@ -55,23 +81,20 @@ class GrepSearchMixin: | ||||
|  | ||||
|             self._grep_proc = None | ||||
|  | ||||
|         # NOTE: Clear children from ui and make sure ui thread redraws | ||||
|         GLib.idle_add(self.reset_grep_box) | ||||
|  | ||||
|         # NOTE: If query create new process and do all new loop. | ||||
|         if query: | ||||
|             self.pause_fifo_update = False | ||||
|             GLib.idle_add(self._exec_grep_query, query) | ||||
|  | ||||
|     def _exec_grep_query(self, widget=None, eve=None): | ||||
|         query = widget.get_text() | ||||
|  | ||||
|         if not query.strip() in ("", None): | ||||
|             self.grep_query = query | ||||
|             target_dir      = shlex.quote( self._fm_state.tab.get_current_directory() ) | ||||
|             command         = ["python", f"{self.path}/utils/search.py", "-t", "grep_search", "-d", f"{target_dir}", "-q", f"{query}"] | ||||
|  | ||||
|             self._spinner.start() | ||||
|  | ||||
|             target_dir = shlex.quote( self._fm_state.tab.get_current_directory() ) | ||||
|             command = ["python", f"{self.path}/utils/search.py", "-t", "grep_search", "-d", f"{target_dir}", "-q", f"{query}"] | ||||
|             self._grep_proc = subprocess.Popen(command, cwd=self.path, stdin=None, stdout=None, stderr=None) | ||||
|  | ||||
|  | ||||
|     def _load_grep_ui(self, data): | ||||
|         Gtk.main_iteration() | ||||
|  | ||||
|   | ||||
| @@ -1,7 +1,5 @@ | ||||
| # Python imports | ||||
| import os | ||||
| import threading | ||||
| import inspect | ||||
| import time | ||||
|  | ||||
| # Lib imports | ||||
| @@ -18,32 +16,19 @@ from .utils.ipc_server import IPCServer | ||||
|  | ||||
|  | ||||
|  | ||||
| # NOTE: Threads WILL NOT die with parent's destruction. | ||||
| def threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=False).start() | ||||
|     return wrapper | ||||
|  | ||||
| # NOTE: Threads WILL die with parent's destruction. | ||||
| def daemon_threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start() | ||||
|     return wrapper | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| class Plugin(IPCServer, FileSearchMixin, GrepSearchMixin, PluginBase): | ||||
|     def __init__(self): | ||||
|         super().__init__() | ||||
|  | ||||
|         self.path               = os.path.dirname(os.path.realpath(__file__)) | ||||
|         self.name               = "Search"  # NOTE: Need to remove after establishing private bidirectional 1-1 message bus | ||||
|                                             #       where self.name should not be needed for message comms | ||||
|         self.path               = os.path.dirname(os.path.realpath(__file__)) | ||||
|         self._GLADE_FILE        = f"{self.path}/search_dialog.glade" | ||||
|  | ||||
|         self.update_list_ui_buffer = () | ||||
|         self._search_dialog     = None | ||||
|         self._spinner           = None | ||||
|         self._active_path       = None | ||||
|         self.file_list_parent   = None | ||||
|         self.grep_list_parent   = None | ||||
| @@ -52,30 +37,26 @@ class Plugin(IPCServer, FileSearchMixin, GrepSearchMixin, PluginBase): | ||||
|         self._grep_proc         = None | ||||
|         self._list_proc         = None | ||||
|         self.pause_fifo_update  = False | ||||
|         self.grep_time_stamp    = None | ||||
|         self.fsearch_time_stamp = None | ||||
|         self.grep_query         = "" | ||||
|         self.search_query       = "" | ||||
|  | ||||
|         self.grep_query              = "" | ||||
|         self.grep_time_stamp         = None | ||||
|         self._queue_grep             = False | ||||
|         self._grep_watcher_running   = False | ||||
|  | ||||
|         self.search_query            = "" | ||||
|         self.fsearch_time_stamp      = None | ||||
|         self._queue_search           = False | ||||
|         self._search_watcher_running = False | ||||
|  | ||||
|  | ||||
|     def run(self): | ||||
|         self._builder          = Gtk.Builder() | ||||
|         self._builder = Gtk.Builder() | ||||
|         self._builder.add_from_file(self._GLADE_FILE) | ||||
|  | ||||
|         classes  = [self] | ||||
|         handlers = {} | ||||
|         for c in classes: | ||||
|             methods = None | ||||
|             try: | ||||
|                 methods = inspect.getmembers(c, predicate=inspect.ismethod) | ||||
|                 handlers.update(methods) | ||||
|             except Exception as e: | ||||
|                 print(repr(e)) | ||||
|  | ||||
|         self._builder.connect_signals(handlers) | ||||
|         self._connect_builder_signals(self, self._builder) | ||||
|  | ||||
|         self._search_dialog = self._builder.get_object("search_dialog") | ||||
|         self.fsearch        = self._builder.get_object("fsearch") | ||||
|         self._spinner       = self._builder.get_object("spinner") | ||||
|  | ||||
|         self.grep_list_parent = self._builder.get_object("grep_list_parent") | ||||
|         self.file_list_parent = self._builder.get_object("file_list_parent") | ||||
| @@ -84,7 +65,6 @@ class Plugin(IPCServer, FileSearchMixin, GrepSearchMixin, PluginBase): | ||||
|         self._event_system.subscribe("update-grep-ui", self._load_grep_ui) | ||||
|         self._event_system.subscribe("show_search_page", self._show_page) | ||||
|  | ||||
|  | ||||
|         self.create_ipc_listener() | ||||
|  | ||||
|     def generate_reference_ui_element(self): | ||||
| @@ -94,6 +74,10 @@ class Plugin(IPCServer, FileSearchMixin, GrepSearchMixin, PluginBase): | ||||
|         item.set_always_show_image(True) | ||||
|         return item | ||||
|  | ||||
|     def stop_spinner(self, ret_code): | ||||
|         print(f"Return Code: {ret_code}") | ||||
|         self._spinner.stop() | ||||
|  | ||||
|  | ||||
|     def _show_page(self, widget=None, eve=None): | ||||
|         self._event_system.emit("get_current_state") | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| <?xml version="1.0" encoding="UTF-8"?> | ||||
| <!-- Generated with glade 3.38.2 --> | ||||
| <!-- Generated with glade 3.40.0 --> | ||||
| <interface> | ||||
|   <requires lib="gtk+" version="3.16"/> | ||||
|   <object class="GtkDialog" id="search_dialog"> | ||||
| @@ -25,6 +25,17 @@ | ||||
|             <property name="visible">True</property> | ||||
|             <property name="can-focus">False</property> | ||||
|             <property name="layout-style">end</property> | ||||
|             <child> | ||||
|               <object class="GtkSpinner" id="spinner"> | ||||
|                 <property name="visible">True</property> | ||||
|                 <property name="can-focus">False</property> | ||||
|               </object> | ||||
|               <packing> | ||||
|                 <property name="expand">True</property> | ||||
|                 <property name="fill">True</property> | ||||
|                 <property name="position">0</property> | ||||
|               </packing> | ||||
|             </child> | ||||
|             <child> | ||||
|               <object class="GtkButton" id="cancel_button"> | ||||
|                 <property name="label">gtk-cancel</property> | ||||
| @@ -37,7 +48,7 @@ | ||||
|               <packing> | ||||
|                 <property name="expand">True</property> | ||||
|                 <property name="fill">True</property> | ||||
|                 <property name="position">0</property> | ||||
|                 <property name="position">1</property> | ||||
|               </packing> | ||||
|             </child> | ||||
|             <child> | ||||
| @@ -52,7 +63,7 @@ | ||||
|               <packing> | ||||
|                 <property name="expand">True</property> | ||||
|                 <property name="fill">True</property> | ||||
|                 <property name="position">1</property> | ||||
|                 <property name="position">2</property> | ||||
|               </packing> | ||||
|             </child> | ||||
|           </object> | ||||
| @@ -101,7 +112,7 @@ | ||||
|                         <property name="can-focus">True</property> | ||||
|                         <property name="receives-default">True</property> | ||||
|                         <property name="use-stock">True</property> | ||||
|                         <signal name="released" handler="_handle_find_file_query" swapped="no"/> | ||||
|                         <signal name="released" handler="_stop_fsearch_query" swapped="no"/> | ||||
|                       </object> | ||||
|                       <packing> | ||||
|                         <property name="expand">False</property> | ||||
| @@ -193,7 +204,7 @@ | ||||
|                         <property name="can-focus">True</property> | ||||
|                         <property name="receives-default">True</property> | ||||
|                         <property name="use-stock">True</property> | ||||
|                         <signal name="released" handler="_handle_grep_query" swapped="no"/> | ||||
|                         <signal name="released" handler="_stop_grep_query" swapped="no"/> | ||||
|                       </object> | ||||
|                       <packing> | ||||
|                         <property name="expand">False</property> | ||||
| @@ -260,6 +271,12 @@ | ||||
|             <child type="tab"> | ||||
|               <placeholder/> | ||||
|             </child> | ||||
|             <child> | ||||
|               <placeholder/> | ||||
|             </child> | ||||
|             <child type="tab"> | ||||
|               <placeholder/> | ||||
|             </child> | ||||
|           </object> | ||||
|           <packing> | ||||
|             <property name="expand">True</property> | ||||
|   | ||||
| @@ -59,23 +59,24 @@ class IPCServer: | ||||
|         while True: | ||||
|             msg  = conn.recv() | ||||
|  | ||||
|             if "SEARCH|" in msg: | ||||
|                 ts, file = msg.split("SEARCH|")[1].strip().split("|", 1) | ||||
|                 try: | ||||
|             try: | ||||
|                 if "SEARCH|" in msg: | ||||
|                     ts, file = msg.split("SEARCH|")[1].strip().split("|", 1) | ||||
|                     timestamp = float(ts) | ||||
|                     if timestamp > self.fsearch_time_stamp and file: | ||||
|                         GLib.idle_add(self._load_file_ui, file, priority=GLib.PRIORITY_LOW) | ||||
|                 except Exception as e: | ||||
|                     ... | ||||
|                     if file and (timestamp > self.fsearch_time_stamp): | ||||
|                         GLib.idle_add(self._load_file_ui, file, priority=GLib.PRIORITY_HIGH_IDLE) | ||||
|  | ||||
|             if "GREP|" in msg: | ||||
|                 ts, data = msg.split("GREP|")[1].strip().split("|", 1) | ||||
|                 try: | ||||
|                 if "GREP|" in msg: | ||||
|                     ts, data = msg.split("GREP|")[1].strip().split("|", 1) | ||||
|                     timestamp = float(ts) | ||||
|                     if timestamp > self.grep_time_stamp and data: | ||||
|                         GLib.idle_add(self._load_grep_ui, data, priority=GLib.PRIORITY_LOW) | ||||
|                 except Exception as e: | ||||
|                     ... | ||||
|                     if data and (timestamp > self.grep_time_stamp): | ||||
|                         GLib.idle_add(self._load_grep_ui, data, priority=GLib.PRIORITY_HIGH_IDLE) | ||||
|  | ||||
|                 if "SEARCH_DONE|" in msg: | ||||
|                     ts, ret_code = msg.split("SEARCH_DONE|")[1].strip().split("|", 1) | ||||
|                     GLib.idle_add(self.stop_spinner, (ret_code,), priority=GLib.PRIORITY_HIGH_IDLE) | ||||
|             except Exception as e: | ||||
|                 print( repr(e) ) | ||||
|  | ||||
|  | ||||
|             conn.close() | ||||
|   | ||||
| @@ -32,8 +32,12 @@ dt = datetime.now() | ||||
| ts = datetime.timestamp(dt) | ||||
|  | ||||
|  | ||||
| def _log(message: str = "No message passed in...") -> None: | ||||
|     print(message) | ||||
|  | ||||
|  | ||||
| def send_ipc_message(message) -> None: | ||||
|     conn = Client(address=_ipc_address, family="AF_UNIX", authkey=_ipc_authkey) | ||||
|     conn = Client(address = _ipc_address, family = "AF_UNIX", authkey = _ipc_authkey) | ||||
|     conn.send(message) | ||||
|     conn.close() | ||||
|  | ||||
| @@ -41,9 +45,11 @@ def send_ipc_message(message) -> None: | ||||
|     time.sleep(0.05) | ||||
|  | ||||
|  | ||||
| def file_search(path, query): | ||||
| def file_search(path: str = None, query: str = None) -> None: | ||||
|     if not path or not query: return | ||||
|  | ||||
|     try: | ||||
|         for _path, _dir, _files in os.walk(path, topdown = True): | ||||
|         for _path, _dir, _files in os.walk(path, topdown = True, onerror = _log, followlinks = True): | ||||
|              for file in _files: | ||||
|                  if query in file.lower(): | ||||
|                      target = os.path.join(_path, file) | ||||
| @@ -54,44 +60,53 @@ def file_search(path, query): | ||||
|         traceback.print_exc() | ||||
|  | ||||
|  | ||||
| def grep_search(target=None, query=None): | ||||
|     if not query or not target: | ||||
|         return | ||||
| def grep_search(target: str = None, query: str = None): | ||||
|     if not target or not query: return | ||||
|  | ||||
|     # NOTE: -n = provide line numbers, -R = Search recursive in given target | ||||
|     #       -i = insensitive, -F = don't do regex parsing. (Treat as raw string) | ||||
|     command    = ["grep", "-n", "-R", "-i", "-F", query, target] | ||||
|     proc       = subprocess.Popen(command, stdout=subprocess.PIPE, encoding="utf-8") | ||||
|     proc       = subprocess.Popen(command, stdout = subprocess.PIPE, encoding = "utf-8") | ||||
|     raw_data   = proc.communicate()[0].strip() | ||||
|     proc_data  = raw_data.split("\n")   # NOTE: Will return data AFTER completion (if any) | ||||
|     collection = {} | ||||
|  | ||||
|     for line in proc_data: | ||||
|         file, line_no, data = line.split(":", 2) | ||||
|         b64_file = base64.urlsafe_b64encode(file.encode('utf-8')).decode('utf-8') | ||||
|         b64_data = base64.urlsafe_b64encode(data.encode('utf-8')).decode('utf-8') | ||||
|         try: | ||||
|             parts    = line.split(":", 2) | ||||
|             if not len(parts) == 3: | ||||
|                 continue | ||||
|  | ||||
|         if b64_file in collection.keys(): | ||||
|             collection[f"{b64_file}"][f"{line_no}"] = b64_data | ||||
|         else: | ||||
|             collection[f"{b64_file}"] = {} | ||||
|             collection[f"{b64_file}"] = { f"{line_no}": b64_data} | ||||
|             file, line_no, data = parts | ||||
|             b64_file = base64.urlsafe_b64encode(file.encode('utf-8')).decode('utf-8') | ||||
|             b64_data = base64.urlsafe_b64encode(data.encode('utf-8')).decode('utf-8') | ||||
|  | ||||
|     try: | ||||
|         data = f"GREP|{ts}|{json.dumps(collection, separators=(',', ':'), indent=4)}" | ||||
|         send_ipc_message(data) | ||||
|     except Exception as e: | ||||
|         ... | ||||
|             if b64_file in collection.keys(): | ||||
|                 collection[f"{b64_file}"][f"{line_no}"] = b64_data | ||||
|             else: | ||||
|                 collection[f"{b64_file}"] = {} | ||||
|                 collection[f"{b64_file}"] = { f"{line_no}": b64_data} | ||||
|  | ||||
|         except Exception as e: | ||||
|             traceback.print_exc() | ||||
|  | ||||
|     proc.terminate() | ||||
|     data = f"GREP|{ts}|{json.dumps(collection, separators=(',', ':'), indent=4)}" | ||||
|     send_ipc_message(data) | ||||
|     collection = {} | ||||
|  | ||||
|  | ||||
| def search(args): | ||||
|     path = args.dir | ||||
|     if (path[0] == "'" and path[-1] == "'") or \ | ||||
|         path[0] == '"' and path[-1] == '"': | ||||
|             path = path[1:-1] | ||||
|  | ||||
|     if args.type == "file_search": | ||||
|         file_search(args.dir, args.query.lower()) | ||||
|         file_search(path, args.query.lower()) | ||||
|  | ||||
|     if args.type == "grep_search": | ||||
|         grep_search(args.dir, args.query.encode("utf-8")) | ||||
|         grep_search(path, args.query.encode("utf-8")) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
| @@ -107,5 +122,11 @@ if __name__ == "__main__": | ||||
|         # Read arguments (If any...) | ||||
|         args = parser.parse_args() | ||||
|         search(args) | ||||
|  | ||||
|         data = f"SEARCH_DONE|{ts}|0" | ||||
|         send_ipc_message(data) | ||||
|     except Exception as e: | ||||
|         traceback.print_exc() | ||||
|  | ||||
|         data = f"SEARCH_DONE|{ts}|1" | ||||
|         send_ipc_message(data) | ||||
|   | ||||
| @@ -2,7 +2,6 @@ | ||||
| import os | ||||
| import threading | ||||
| import subprocess | ||||
| import ime | ||||
|  | ||||
| # Lib imports | ||||
| import gi | ||||
| @@ -32,18 +31,23 @@ class Plugin(PluginBase): | ||||
|     def __init__(self): | ||||
|         super().__init__() | ||||
|  | ||||
|         self.name               = "Example Plugin"  # NOTE: Need to remove after establishing private bidirectional 1-1 message bus | ||||
|                                                     #       where self.name should not be needed for message comms | ||||
|         self.name        = "Example Plugin"  # NOTE: Need to remove after establishing private bidirectional 1-1 message bus | ||||
|                                              #       where self.name should not be needed for message comms | ||||
|         # self.path        = os.path.dirname(os.path.realpath(__file__)) | ||||
|         # self._GLADE_FILE = f"{self.path}/glade_file.glade" | ||||
|  | ||||
|  | ||||
|     def run(self): | ||||
|         # self._builder = Gtk.Builder() | ||||
|         # self._builder.add_from_file(self._GLADE_FILE) | ||||
|         # self._connect_builder_signals(self, self._builder) | ||||
|         ... | ||||
|  | ||||
|     def generate_reference_ui_element(self): | ||||
|         button = Gtk.Button(label=self.name) | ||||
|         button.connect("button-release-event", self.send_message) | ||||
|         return button | ||||
|  | ||||
|     def run(self): | ||||
|         ... | ||||
|  | ||||
|     def send_message(self, widget=None, eve=None): | ||||
|         message = "Hello, World!" | ||||
|         event_system.emit("display_message", ("warning", message, None)) | ||||
|   | ||||
							
								
								
									
										3
									
								
								plugins/translate/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								plugins/translate/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| """ | ||||
|     Pligin Module | ||||
| """ | ||||
							
								
								
									
										3
									
								
								plugins/translate/__main__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								plugins/translate/__main__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| """ | ||||
|     Pligin Package | ||||
| """ | ||||
							
								
								
									
										6
									
								
								plugins/translate/brotli/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								plugins/translate/brotli/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| # flake8: noqa | ||||
| from .brotli import ( | ||||
|     decompress, Decompressor, compress, BrotliEncoderMode, DEFAULT_MODE, | ||||
|     Compressor, MODE_GENERIC, MODE_TEXT, MODE_FONT, error, Error | ||||
| ) | ||||
							
								
								
									
										466
									
								
								plugins/translate/brotli/brotli.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										466
									
								
								plugins/translate/brotli/brotli.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,466 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import math | ||||
| import enum | ||||
|  | ||||
| from ._brotli import ffi, lib | ||||
|  | ||||
|  | ||||
| class Error(Exception): | ||||
|     """ | ||||
|     Raised whenever an error is encountered with compressing or decompressing | ||||
|     data using brotlipy. | ||||
|  | ||||
|     .. versionadded:: 0.5.1 | ||||
|     """ | ||||
|     pass | ||||
|  | ||||
|  | ||||
| #: An alias of :class:`Error <brotli.Error>` that exists for compatibility with | ||||
| #: the original C brotli module. | ||||
| #: | ||||
| #: .. versionadded: 0.5.1 | ||||
| error = Error | ||||
|  | ||||
|  | ||||
| class BrotliEncoderMode(enum.IntEnum): | ||||
|     """ | ||||
|     Compression modes for the Brotli encoder. | ||||
|  | ||||
|     .. versionadded:: 0.5.0 | ||||
|     """ | ||||
|     #: Default compression mode. The compressor does not know anything in | ||||
|     #: advance about the properties of the input. | ||||
|     GENERIC = lib.BROTLI_MODE_GENERIC | ||||
|  | ||||
|     #: Compression mode for UTF-8 format text input. | ||||
|     TEXT = lib.BROTLI_MODE_TEXT | ||||
|  | ||||
|     #: Compression mode used in WOFF 2.0 | ||||
|     FONT = lib.BROTLI_MODE_FONT | ||||
|  | ||||
|  | ||||
| # Define some names for compatibility with the C module. | ||||
|  | ||||
| #: The default compression mode for brotli. | ||||
| DEFAULT_MODE = BrotliEncoderMode(lib.BROTLI_DEFAULT_MODE) | ||||
|  | ||||
|  | ||||
| #: A compression mode where the compressor does not know anything in advance | ||||
| #: about the properties of the input. | ||||
| #: | ||||
| #: .. note:: This name is defined for compatibility with the Brotli C | ||||
| #:           extension. If you're not interested in that compatibility, it is | ||||
| #:           recommended that you use :class:`BrotliEncoderMode | ||||
| #:           <brotli.BrotliEncoderMode>` instead. | ||||
| #: | ||||
| #: .. versionadded:: 0.5.0 | ||||
| MODE_GENERIC = BrotliEncoderMode.GENERIC | ||||
|  | ||||
|  | ||||
| #: A compression mode for UTF-8 format text input. | ||||
| #: | ||||
| #: .. note:: This name is defined for compatibility with the Brotli C | ||||
| #:           extension. If you're not interested in that compatibility, it is | ||||
| #:           recommended that you use :class:`BrotliEncoderMode | ||||
| #:           <brotli.BrotliEncoderMode>` instead. | ||||
| #: | ||||
| #: .. versionadded:: 0.5.0 | ||||
| MODE_TEXT = BrotliEncoderMode.TEXT | ||||
|  | ||||
|  | ||||
| #: The compression mode used in WOFF 2.0. | ||||
| #: | ||||
| #: .. note:: This name is defined for compatibility with the Brotli C | ||||
| #:           extension. If you're not interested in that compatibility, it is | ||||
| #:           recommended that you use :class:`BrotliEncoderMode | ||||
| #:           <brotli.BrotliEncoderMode>` instead. | ||||
| #: | ||||
| #: .. versionadded:: 0.5.0 | ||||
| MODE_FONT = BrotliEncoderMode.FONT | ||||
|  | ||||
|  | ||||
| def decompress(data): | ||||
|     """ | ||||
|     Decompress a complete Brotli-compressed string. | ||||
|  | ||||
|     :param data: A bytestring containing Brotli-compressed data. | ||||
|     """ | ||||
|     d = Decompressor() | ||||
|     data = d.decompress(data) | ||||
|     d.finish() | ||||
|     return data | ||||
|  | ||||
|  | ||||
| def compress(data, | ||||
|              mode=DEFAULT_MODE, | ||||
|              quality=lib.BROTLI_DEFAULT_QUALITY, | ||||
|              lgwin=lib.BROTLI_DEFAULT_WINDOW, | ||||
|              lgblock=0, | ||||
|              dictionary=b''): | ||||
|     """ | ||||
|     Compress a string using Brotli. | ||||
|  | ||||
|     .. versionchanged:: 0.5.0 | ||||
|        Added ``mode``, ``quality``, `lgwin``, ``lgblock``, and ``dictionary`` | ||||
|        parameters. | ||||
|  | ||||
|     :param data: A bytestring containing the data to compress. | ||||
|     :type data: ``bytes`` | ||||
|  | ||||
|     :param mode: The encoder mode. | ||||
|     :type mode: :class:`BrotliEncoderMode` or ``int`` | ||||
|  | ||||
|     :param quality: Controls the compression-speed vs compression-density | ||||
|         tradeoffs. The higher the quality, the slower the compression. The | ||||
|         range of this value is 0 to 11. | ||||
|     :type quality: ``int`` | ||||
|  | ||||
|     :param lgwin: The base-2 logarithm of the sliding window size. The range of | ||||
|         this value is 10 to 24. | ||||
|     :type lgwin: ``int`` | ||||
|  | ||||
|     :param lgblock: The base-2 logarithm of the maximum input block size. The | ||||
|         range of this value is 16 to 24. If set to 0, the value will be set | ||||
|         based on ``quality``. | ||||
|     :type lgblock: ``int`` | ||||
|  | ||||
|     :param dictionary: A pre-set dictionary for LZ77. Please use this with | ||||
|         caution: if a dictionary is used for compression, the same dictionary | ||||
|         **must** be used for decompression! | ||||
|     :type dictionary: ``bytes`` | ||||
|  | ||||
|     :returns: The compressed bytestring. | ||||
|     :rtype: ``bytes`` | ||||
|     """ | ||||
|     # This method uses private variables on the Compressor object, and | ||||
|     # generally does a whole lot of stuff that's not supported by the public | ||||
|     # API. The goal here is to minimise the number of allocations and copies | ||||
|     # we have to do. Users should prefer this method over the Compressor if | ||||
|     # they know they have single-shot data. | ||||
|     compressor = Compressor( | ||||
|         mode=mode, | ||||
|         quality=quality, | ||||
|         lgwin=lgwin, | ||||
|         lgblock=lgblock, | ||||
|         dictionary=dictionary | ||||
|     ) | ||||
|     compressed_data = compressor._compress(data, lib.BROTLI_OPERATION_FINISH) | ||||
|     assert lib.BrotliEncoderIsFinished(compressor._encoder) == lib.BROTLI_TRUE | ||||
|     assert ( | ||||
|         lib.BrotliEncoderHasMoreOutput(compressor._encoder) == lib.BROTLI_FALSE | ||||
|     ) | ||||
|     return compressed_data | ||||
|  | ||||
|  | ||||
| def _validate_mode(val): | ||||
|     """ | ||||
|     Validate that the mode is valid. | ||||
|     """ | ||||
|     try: | ||||
|         val = BrotliEncoderMode(val) | ||||
|     except ValueError: | ||||
|         raise Error("%s is not a valid encoder mode" % val) | ||||
|  | ||||
|  | ||||
| def _validate_quality(val): | ||||
|     """ | ||||
|     Validate that the quality setting is valid. | ||||
|     """ | ||||
|     if not (0 <= val <= 11): | ||||
|         raise Error( | ||||
|             "%d is not a valid quality, must be between 0 and 11" % val | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def _validate_lgwin(val): | ||||
|     """ | ||||
|     Validate that the lgwin setting is valid. | ||||
|     """ | ||||
|     if not (10 <= val <= 24): | ||||
|         raise Error("%d is not a valid lgwin, must be between 10 and 24" % val) | ||||
|  | ||||
|  | ||||
| def _validate_lgblock(val): | ||||
|     """ | ||||
|     Validate that the lgblock setting is valid. | ||||
|     """ | ||||
|     if (val != 0) and not (16 <= val <= 24): | ||||
|         raise Error( | ||||
|             "%d is not a valid lgblock, must be either 0 or between 16 and 24" | ||||
|             % val | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def _set_parameter(encoder, parameter, parameter_name, val): | ||||
|     """ | ||||
|     This helper function sets a specific Brotli encoder parameter, checking | ||||
|     the return code and raising :class:`Error <brotli.Error>` if it is | ||||
|     invalid. | ||||
|     """ | ||||
|     rc = lib.BrotliEncoderSetParameter(encoder, parameter, val) | ||||
|  | ||||
|     if parameter == lib.BROTLI_PARAM_MODE: | ||||
|         _validate_mode(val) | ||||
|     elif parameter == lib.BROTLI_PARAM_QUALITY: | ||||
|         _validate_quality(val) | ||||
|     elif parameter == lib.BROTLI_PARAM_LGWIN: | ||||
|         _validate_lgwin(val) | ||||
|     elif parameter == lib.BROTLI_PARAM_LGBLOCK: | ||||
|         _validate_lgblock(val) | ||||
|     else:  # pragma: no cover | ||||
|         raise RuntimeError("Unexpected parameter!") | ||||
|  | ||||
|     # This block is defensive: I see no way to hit it, but as long as the | ||||
|     # function returns a value we can live in hope that the brotli folks will | ||||
|     # enforce their own constraints. | ||||
|     if rc != lib.BROTLI_TRUE:  # pragma: no cover | ||||
|         raise Error( | ||||
|             "Error setting parameter %s: %d" % (parameter_name, val) | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class Compressor(object): | ||||
|     """ | ||||
|     An object that allows for streaming compression of data using the Brotli | ||||
|     compression algorithm. | ||||
|  | ||||
|     .. versionadded:: 0.5.0 | ||||
|  | ||||
|     :param mode: The encoder mode. | ||||
|     :type mode: :class:`BrotliEncoderMode` or ``int`` | ||||
|  | ||||
|     :param quality: Controls the compression-speed vs compression-density | ||||
|         tradeoffs. The higher the quality, the slower the compression. The | ||||
|         range of this value is 0 to 11. | ||||
|     :type quality: ``int`` | ||||
|  | ||||
|     :param lgwin: The base-2 logarithm of the sliding window size. The range of | ||||
|         this value is 10 to 24. | ||||
|     :type lgwin: ``int`` | ||||
|  | ||||
|     :param lgblock: The base-2 logarithm of the maximum input block size. The | ||||
|         range of this value is 16 to 24. If set to 0, the value will be set | ||||
|         based on ``quality``. | ||||
|     :type lgblock: ``int`` | ||||
|  | ||||
|     :param dictionary: A pre-set dictionary for LZ77. Please use this with | ||||
|         caution: if a dictionary is used for compression, the same dictionary | ||||
|         **must** be used for decompression! | ||||
|     :type dictionary: ``bytes`` | ||||
|     """ | ||||
|     _dictionary = None | ||||
|     _dictionary_size = None | ||||
|  | ||||
|     def __init__(self, | ||||
|                  mode=DEFAULT_MODE, | ||||
|                  quality=lib.BROTLI_DEFAULT_QUALITY, | ||||
|                  lgwin=lib.BROTLI_DEFAULT_WINDOW, | ||||
|                  lgblock=0, | ||||
|                  dictionary=b''): | ||||
|         enc = lib.BrotliEncoderCreateInstance( | ||||
|             ffi.NULL, ffi.NULL, ffi.NULL | ||||
|         ) | ||||
|         if not enc:  # pragma: no cover | ||||
|             raise RuntimeError("Unable to allocate Brotli encoder!") | ||||
|  | ||||
|         enc = ffi.gc(enc, lib.BrotliEncoderDestroyInstance) | ||||
|  | ||||
|         # Configure the encoder appropriately. | ||||
|         _set_parameter(enc, lib.BROTLI_PARAM_MODE, "mode", mode) | ||||
|         _set_parameter(enc, lib.BROTLI_PARAM_QUALITY, "quality", quality) | ||||
|         _set_parameter(enc, lib.BROTLI_PARAM_LGWIN, "lgwin", lgwin) | ||||
|         _set_parameter(enc, lib.BROTLI_PARAM_LGBLOCK, "lgblock", lgblock) | ||||
|  | ||||
|         if dictionary: | ||||
|             self._dictionary = ffi.new("uint8_t []", dictionary) | ||||
|             self._dictionary_size = len(dictionary) | ||||
|             lib.BrotliEncoderSetCustomDictionary( | ||||
|                 enc, self._dictionary_size, self._dictionary | ||||
|             ) | ||||
|  | ||||
|         self._encoder = enc | ||||
|  | ||||
|     def _compress(self, data, operation): | ||||
|         """ | ||||
|         This private method compresses some data in a given mode. This is used | ||||
|         because almost all of the code uses the exact same setup. It wouldn't | ||||
|         have to, but it doesn't hurt at all. | ||||
|         """ | ||||
|         # The 'algorithm' for working out how big to make this buffer is from | ||||
|         # the Brotli source code, brotlimodule.cc. | ||||
|         original_output_size = int( | ||||
|             math.ceil(len(data) + (len(data) >> 2) + 10240) | ||||
|         ) | ||||
|         available_out = ffi.new("size_t *") | ||||
|         available_out[0] = original_output_size | ||||
|         output_buffer = ffi.new("uint8_t []", available_out[0]) | ||||
|         ptr_to_output_buffer = ffi.new("uint8_t **", output_buffer) | ||||
|         input_size = ffi.new("size_t *", len(data)) | ||||
|         input_buffer = ffi.new("uint8_t []", data) | ||||
|         ptr_to_input_buffer = ffi.new("uint8_t **", input_buffer) | ||||
|  | ||||
|         rc = lib.BrotliEncoderCompressStream( | ||||
|             self._encoder, | ||||
|             operation, | ||||
|             input_size, | ||||
|             ptr_to_input_buffer, | ||||
|             available_out, | ||||
|             ptr_to_output_buffer, | ||||
|             ffi.NULL | ||||
|         ) | ||||
|         if rc != lib.BROTLI_TRUE:  # pragma: no cover | ||||
|             raise Error("Error encountered compressing data.") | ||||
|  | ||||
|         assert not input_size[0] | ||||
|  | ||||
|         size_of_output = original_output_size - available_out[0] | ||||
|         return ffi.buffer(output_buffer, size_of_output)[:] | ||||
|  | ||||
|     def compress(self, data): | ||||
|         """ | ||||
|         Incrementally compress more data. | ||||
|  | ||||
|         :param data: A bytestring containing data to compress. | ||||
|         :returns: A bytestring containing some compressed data. May return the | ||||
|             empty bytestring if not enough data has been inserted into the | ||||
|             compressor to create the output yet. | ||||
|         """ | ||||
|         return self._compress(data, lib.BROTLI_OPERATION_PROCESS) | ||||
|  | ||||
|     def flush(self): | ||||
|         """ | ||||
|         Flush the compressor. This will emit the remaining output data, but | ||||
|         will not destroy the compressor. It can be used, for example, to ensure | ||||
|         that given chunks of content will decompress immediately. | ||||
|         """ | ||||
|         chunks = [] | ||||
|         chunks.append(self._compress(b'', lib.BROTLI_OPERATION_FLUSH)) | ||||
|  | ||||
|         while lib.BrotliEncoderHasMoreOutput(self._encoder) == lib.BROTLI_TRUE: | ||||
|             chunks.append(self._compress(b'', lib.BROTLI_OPERATION_FLUSH)) | ||||
|  | ||||
|         return b''.join(chunks) | ||||
|  | ||||
|     def finish(self): | ||||
|         """ | ||||
|         Finish the compressor. This will emit the remaining output data and | ||||
|         transition the compressor to a completed state. The compressor cannot | ||||
|         be used again after this point, and must be replaced. | ||||
|         """ | ||||
|         chunks = [] | ||||
|         while lib.BrotliEncoderIsFinished(self._encoder) == lib.BROTLI_FALSE: | ||||
|             chunks.append(self._compress(b'', lib.BROTLI_OPERATION_FINISH)) | ||||
|  | ||||
|         return b''.join(chunks) | ||||
|  | ||||
|  | ||||
| class Decompressor(object): | ||||
|     """ | ||||
|     An object that allows for streaming decompression of Brotli-compressed | ||||
|     data. | ||||
|  | ||||
|     .. versionchanged:: 0.5.0 | ||||
|        Added ``dictionary`` parameter. | ||||
|  | ||||
|     :param dictionary: A pre-set dictionary for LZ77. Please use this with | ||||
|         caution: if a dictionary is used for compression, the same dictionary | ||||
|         **must** be used for decompression! | ||||
|     :type dictionary: ``bytes`` | ||||
|     """ | ||||
|     _dictionary = None | ||||
|     _dictionary_size = None | ||||
|  | ||||
|     def __init__(self, dictionary=b''): | ||||
|         dec = lib.BrotliDecoderCreateInstance(ffi.NULL, ffi.NULL, ffi.NULL) | ||||
|         self._decoder = ffi.gc(dec, lib.BrotliDecoderDestroyInstance) | ||||
|  | ||||
|         if dictionary: | ||||
|             self._dictionary = ffi.new("uint8_t []", dictionary) | ||||
|             self._dictionary_size = len(dictionary) | ||||
|             lib.BrotliDecoderSetCustomDictionary( | ||||
|                 self._decoder, | ||||
|                 self._dictionary_size, | ||||
|                 self._dictionary | ||||
|             ) | ||||
|  | ||||
|     def decompress(self, data): | ||||
|         """ | ||||
|         Decompress part of a complete Brotli-compressed string. | ||||
|  | ||||
|         :param data: A bytestring containing Brotli-compressed data. | ||||
|         :returns: A bytestring containing the decompressed data. | ||||
|         """ | ||||
|         chunks = [] | ||||
|  | ||||
|         available_in = ffi.new("size_t *", len(data)) | ||||
|         in_buffer = ffi.new("uint8_t[]", data) | ||||
|         next_in = ffi.new("uint8_t **", in_buffer) | ||||
|  | ||||
|         while True: | ||||
|             # Allocate a buffer that's hopefully overlarge, but if it's not we | ||||
|             # don't mind: we'll spin around again. | ||||
|             buffer_size = 5 * len(data) | ||||
|             available_out = ffi.new("size_t *", buffer_size) | ||||
|             out_buffer = ffi.new("uint8_t[]", buffer_size) | ||||
|             next_out = ffi.new("uint8_t **", out_buffer) | ||||
|  | ||||
|             rc = lib.BrotliDecoderDecompressStream(self._decoder, | ||||
|                                                    available_in, | ||||
|                                                    next_in, | ||||
|                                                    available_out, | ||||
|                                                    next_out, | ||||
|                                                    ffi.NULL) | ||||
|  | ||||
|             # First, check for errors. | ||||
|             if rc == lib.BROTLI_DECODER_RESULT_ERROR: | ||||
|                 error_code = lib.BrotliDecoderGetErrorCode(self._decoder) | ||||
|                 error_message = lib.BrotliDecoderErrorString(error_code) | ||||
|                 raise Error( | ||||
|                     "Decompression error: %s" % ffi.string(error_message) | ||||
|                 ) | ||||
|  | ||||
|             # Next, copy the result out. | ||||
|             chunk = ffi.buffer(out_buffer, buffer_size - available_out[0])[:] | ||||
|             chunks.append(chunk) | ||||
|  | ||||
|             if rc == lib.BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: | ||||
|                 assert available_in[0] == 0 | ||||
|                 break | ||||
|             elif rc == lib.BROTLI_DECODER_RESULT_SUCCESS: | ||||
|                 break | ||||
|             else: | ||||
|                 # It's cool if we need more output, we just loop again. | ||||
|                 assert rc == lib.BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT | ||||
|  | ||||
|         return b''.join(chunks) | ||||
|  | ||||
|     def flush(self): | ||||
|         """ | ||||
|         Complete the decompression, return whatever data is remaining to be | ||||
|         decompressed. | ||||
|  | ||||
|         .. deprecated:: 0.4.0 | ||||
|  | ||||
|             This method is no longer required, as decompress() will now | ||||
|             decompress eagerly. | ||||
|  | ||||
|         :returns: A bytestring containing the remaining decompressed data. | ||||
|         """ | ||||
|         return b'' | ||||
|  | ||||
|     def finish(self): | ||||
|         """ | ||||
|         Finish the decompressor. As the decompressor decompresses eagerly, this | ||||
|         will never actually emit any data. However, it will potentially throw | ||||
|         errors if a truncated or damaged data stream has been used. | ||||
|  | ||||
|         Note that, once this method is called, the decompressor is no longer | ||||
|         safe for further use and must be thrown away. | ||||
|         """ | ||||
|         assert ( | ||||
|             lib.BrotliDecoderHasMoreOutput(self._decoder) == lib.BROTLI_FALSE | ||||
|         ) | ||||
|         if lib.BrotliDecoderIsFinished(self._decoder) == lib.BROTLI_FALSE: | ||||
|             raise Error("Decompression error: incomplete compressed stream.") | ||||
|  | ||||
|         return b'' | ||||
							
								
								
									
										224
									
								
								plugins/translate/brotli/build.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										224
									
								
								plugins/translate/brotli/build.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,224 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
|  | ||||
| from cffi import FFI | ||||
| ffi = FFI() | ||||
|  | ||||
| libraries = ['libbrotli'] | ||||
| if 'win32' not in str(sys.platform).lower(): | ||||
|     libraries.append('stdc++') | ||||
|  | ||||
|  | ||||
| ffi.set_source( | ||||
|     "_brotli", | ||||
|     """#include <brotli/decode.h> | ||||
|        #include <brotli/encode.h> | ||||
|     """, | ||||
|     libraries=libraries, | ||||
|     include_dirs=["libbrotli", "libbrotli/include"] | ||||
| ) | ||||
|  | ||||
| ffi.cdef(""" | ||||
|     /* common/types.h */ | ||||
|     typedef bool BROTLI_BOOL; | ||||
|     #define BROTLI_TRUE ... | ||||
|     #define BROTLI_FALSE ... | ||||
|  | ||||
|     /* dec/state.h */ | ||||
|     /* Allocating function pointer. Function MUST return 0 in the case of | ||||
|        failure. Otherwise it MUST return a valid pointer to a memory region of | ||||
|        at least size length. Neither items nor size are allowed to be 0. | ||||
|        opaque argument is a pointer provided by client and could be used to | ||||
|        bind function to specific object (memory pool). */ | ||||
|     typedef void* (*brotli_alloc_func)(void* opaque, size_t size); | ||||
|  | ||||
|     /* Deallocating function pointer. Function SHOULD be no-op in the case the | ||||
|        address is 0. */ | ||||
|     typedef void (*brotli_free_func)(void* opaque, void* address); | ||||
|  | ||||
|     /* dec/decode.h */ | ||||
|  | ||||
|     typedef enum { | ||||
|       /* Decoding error, e.g. corrupt input or memory allocation problem */ | ||||
|       BROTLI_DECODER_RESULT_ERROR = 0, | ||||
|       /* Decoding successfully completed */ | ||||
|       BROTLI_DECODER_RESULT_SUCCESS = 1, | ||||
|       /* Partially done; should be called again with more input */ | ||||
|       BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT = 2, | ||||
|       /* Partially done; should be called again with more output */ | ||||
|       BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT = 3 | ||||
|     } BrotliDecoderResult; | ||||
|  | ||||
|     typedef enum {...} BrotliDecoderErrorCode; | ||||
|     typedef ... BrotliDecoderState; | ||||
|  | ||||
|     /* Creates the instance of BrotliDecoderState and initializes it. | ||||
|        |alloc_func| and |free_func| MUST be both zero or both non-zero. In the | ||||
|        case they are both zero, default memory allocators are used. |opaque| is | ||||
|        passed to |alloc_func| and |free_func| when they are called. */ | ||||
|     BrotliDecoderState* BrotliDecoderCreateInstance(brotli_alloc_func, | ||||
|                                                     brotli_free_func, | ||||
|                                                     void *); | ||||
|  | ||||
|     /* Deinitializes and frees BrotliDecoderState instance. */ | ||||
|     void BrotliDecoderDestroyInstance(BrotliDecoderState* state); | ||||
|  | ||||
|     /* Decompresses the data. Supports partial input and output. | ||||
|  | ||||
|        Must be called with an allocated input buffer in |*next_in| and an | ||||
|        allocated output buffer in |*next_out|. The values |*available_in| and | ||||
|        |*available_out| must specify the allocated size in |*next_in| and | ||||
|        |*next_out| respectively. | ||||
|  | ||||
|        After each call, |*available_in| will be decremented by the amount of | ||||
|        input bytes consumed, and the |*next_in| pointer will be incremented by | ||||
|        that amount. Similarly, |*available_out| will be decremented by the | ||||
|        amount of output bytes written, and the |*next_out| pointer will be | ||||
|        incremented by that amount. |total_out|, if it is not a null-pointer, | ||||
|        will be set to the number of bytes decompressed since the last state | ||||
|        initialization. | ||||
|  | ||||
|        Input is never overconsumed, so |next_in| and |available_in| could be | ||||
|        passed to the next consumer after decoding is complete. */ | ||||
|     BrotliDecoderResult BrotliDecoderDecompressStream(BrotliDecoderState* s, | ||||
|                                                       size_t* available_in, | ||||
|                                                       const uint8_t** next_in, | ||||
|                                                       size_t* available_out, | ||||
|                                                       uint8_t** next_out, | ||||
|                                                       size_t* total_out); | ||||
|  | ||||
|     /* Fills the new state with a dictionary for LZ77, warming up the | ||||
|        ringbuffer, e.g. for custom static dictionaries for data formats. | ||||
|        Not to be confused with the built-in transformable dictionary of Brotli. | ||||
|        |size| should be less or equal to 2^24 (16MiB), otherwise the dictionary | ||||
|        will be ignored. The dictionary must exist in memory until decoding is | ||||
|        done and is owned by the caller. To use: | ||||
|         1) Allocate and initialize state with BrotliCreateInstance | ||||
|         2) Use BrotliSetCustomDictionary | ||||
|         3) Use BrotliDecompressStream | ||||
|         4) Clean up and free state with BrotliDestroyState | ||||
|     */ | ||||
|     void BrotliDecoderSetCustomDictionary( | ||||
|         BrotliDecoderState* s, size_t size, const uint8_t* dict); | ||||
|  | ||||
|     /* Returns true, if decoder has some unconsumed output. | ||||
|        Otherwise returns false. */ | ||||
|     BROTLI_BOOL BrotliDecoderHasMoreOutput(const BrotliDecoderState* s); | ||||
|  | ||||
|     /* Returns true, if decoder has already received some input bytes. | ||||
|        Otherwise returns false. */ | ||||
|     BROTLI_BOOL BrotliDecoderIsUsed(const BrotliDecoderState* s); | ||||
|  | ||||
|     /* Returns true, if decoder is in a state where we reached the end of the | ||||
|        input and produced all of the output; returns false otherwise. */ | ||||
|     BROTLI_BOOL BrotliDecoderIsFinished(const BrotliDecoderState* s); | ||||
|  | ||||
|     /* Returns detailed error code after BrotliDecompressStream returns | ||||
|        BROTLI_DECODER_RESULT_ERROR. */ | ||||
|     BrotliDecoderErrorCode BrotliDecoderGetErrorCode( | ||||
|                                                   const BrotliDecoderState* s); | ||||
|  | ||||
|     const char* BrotliDecoderErrorString(BrotliDecoderErrorCode c); | ||||
|  | ||||
|     /* enc/encode.h */ | ||||
|     typedef ... BrotliEncoderState; | ||||
|  | ||||
|     typedef enum BrotliEncoderParameter { | ||||
|       BROTLI_PARAM_MODE = 0, | ||||
|       /* Controls the compression-speed vs compression-density tradeoffs. The | ||||
|          higher the quality, the slower the compression. Range is 0 to 11. */ | ||||
|       BROTLI_PARAM_QUALITY = 1, | ||||
|       /* Base 2 logarithm of the sliding window size. Range is 10 to 24. */ | ||||
|       BROTLI_PARAM_LGWIN = 2, | ||||
|       /* Base 2 logarithm of the maximum input block size. Range is 16 to 24. | ||||
|          If set to 0, the value will be set based on the quality. */ | ||||
|       BROTLI_PARAM_LGBLOCK = 3 | ||||
|     } BrotliEncoderParameter; | ||||
|  | ||||
|     typedef enum BrotliEncoderMode { | ||||
|       /* Default compression mode. The compressor does not know anything in | ||||
|          advance about the properties of the input. */ | ||||
|       BROTLI_MODE_GENERIC = 0, | ||||
|       /* Compression mode for UTF-8 format text input. */ | ||||
|       BROTLI_MODE_TEXT = 1, | ||||
|       /* Compression mode used in WOFF 2.0. */ | ||||
|       BROTLI_MODE_FONT = 2 | ||||
|     } BrotliEncoderMode; | ||||
|  | ||||
|     int BROTLI_DEFAULT_QUALITY = 11; | ||||
|     int BROTLI_DEFAULT_WINDOW = 22; | ||||
|     #define BROTLI_DEFAULT_MODE ... | ||||
|  | ||||
|     typedef enum BrotliEncoderOperation { | ||||
|       BROTLI_OPERATION_PROCESS = 0, | ||||
|       /* Request output stream to flush. Performed when input stream is | ||||
|          depleted and there is enough space in output stream. */ | ||||
|       BROTLI_OPERATION_FLUSH = 1, | ||||
|       /* Request output stream to finish. Performed when input stream is | ||||
|          depleted and there is enough space in output stream. */ | ||||
|       BROTLI_OPERATION_FINISH = 2 | ||||
|     } BrotliEncoderOperation; | ||||
|  | ||||
|     /* Creates the instance of BrotliEncoderState and initializes it. | ||||
|        |alloc_func| and |free_func| MUST be both zero or both non-zero. In the | ||||
|        case they are both zero, default memory allocators are used. |opaque| is | ||||
|        passed to |alloc_func| and |free_func| when they are called. */ | ||||
|     BrotliEncoderState* BrotliEncoderCreateInstance(brotli_alloc_func, | ||||
|                                                     brotli_free_func, | ||||
|                                                     void *); | ||||
|  | ||||
|     /* Deinitializes and frees BrotliEncoderState instance. */ | ||||
|     void BrotliEncoderDestroyInstance(BrotliEncoderState* state); | ||||
|  | ||||
|     /* Compresses the data in |input_buffer| into |encoded_buffer|, and sets | ||||
|        |*encoded_size| to the compressed length. | ||||
|        BROTLI_DEFAULT_QUALITY, BROTLI_DEFAULT_WINDOW and BROTLI_DEFAULT_MODE | ||||
|        should be used as |quality|, |lgwin| and |mode| if there are no specific | ||||
|        requirements to encoder speed and compression ratio. | ||||
|        If compression fails, |*encoded_size| is set to 0. | ||||
|        If BrotliEncoderMaxCompressedSize(|input_size|) is not zero, then | ||||
|        |*encoded_size| is never set to the bigger value. | ||||
|        Returns false if there was an error and true otherwise. */ | ||||
|     BROTLI_BOOL BrotliEncoderCompress(int quality, | ||||
|                                       int lgwin, | ||||
|                                       BrotliEncoderMode mode, | ||||
|                                       size_t input_size, | ||||
|                                       const uint8_t* input_buffer, | ||||
|                                       size_t* encoded_size, | ||||
|                                       uint8_t* encoded_buffer); | ||||
|  | ||||
|     BROTLI_BOOL BrotliEncoderCompressStream(BrotliEncoderState* s, | ||||
|                                             BrotliEncoderOperation op, | ||||
|                                             size_t* available_in, | ||||
|                                             const uint8_t** next_in, | ||||
|                                             size_t* available_out, | ||||
|                                             uint8_t** next_out, | ||||
|                                             size_t* total_out); | ||||
|  | ||||
|     BROTLI_BOOL BrotliEncoderSetParameter(BrotliEncoderState* state, | ||||
|                                           BrotliEncoderParameter p, | ||||
|                                           uint32_t value); | ||||
|  | ||||
|     /* Fills the new state with a dictionary for LZ77, warming up the | ||||
|        ringbuffer, e.g. for custom static dictionaries for data formats. | ||||
|        Not to be confused with the built-in transformable dictionary of Brotli. | ||||
|        To decode, use BrotliSetCustomDictionary() of the decoder with the same | ||||
|        dictionary. */ | ||||
|     void BrotliEncoderSetCustomDictionary(BrotliEncoderState* state, | ||||
|                                           size_t size, | ||||
|                                           const uint8_t* dict); | ||||
|  | ||||
|     /* Check if encoder is in "finished" state, i.e. no more input is | ||||
|        acceptable and no more output will be produced. | ||||
|        Works only with BrotliEncoderCompressStream workflow. | ||||
|        Returns 1 if stream is finished and 0 otherwise. */ | ||||
|     BROTLI_BOOL BrotliEncoderIsFinished(BrotliEncoderState* s); | ||||
|  | ||||
|     /* Check if encoder has more output bytes in internal buffer. | ||||
|        Works only with BrotliEncoderCompressStream workflow. | ||||
|        Returns 1 if has more output (in internal buffer) and 0 otherwise. */ | ||||
|     BROTLI_BOOL BrotliEncoderHasMoreOutput(BrotliEncoderState* s); | ||||
| """) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     ffi.compile() | ||||
							
								
								
									
										12
									
								
								plugins/translate/manifest.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								plugins/translate/manifest.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| { | ||||
|     "manifest": { | ||||
|         "name": "Translate", | ||||
|         "author": "ITDominator", | ||||
|         "version": "0.0.1", | ||||
|         "support": "", | ||||
|         "requests": { | ||||
|             "ui_target": "plugin_control_list", | ||||
|             "pass_fm_events": "true" | ||||
|         } | ||||
|     } | ||||
| } | ||||
							
								
								
									
										194
									
								
								plugins/translate/plugin.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										194
									
								
								plugins/translate/plugin.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,194 @@ | ||||
| # Python imports | ||||
| import os | ||||
| import time | ||||
| import threading | ||||
| import requests | ||||
| import json | ||||
| from . import brotli | ||||
|  | ||||
| # Lib imports | ||||
| import gi | ||||
| gi.require_version('Gtk', '3.0') | ||||
| from gi.repository import Gtk | ||||
| from gi.repository import GLib | ||||
|  | ||||
| # Application imports | ||||
| from plugins.plugin_base import PluginBase | ||||
|  | ||||
|  | ||||
| # NOTE: Threads WILL die with parent's destruction. | ||||
| def daemon_threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start() | ||||
|     return wrapper | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| class Plugin(PluginBase): | ||||
|     def __init__(self): | ||||
|         super().__init__() | ||||
|         self.path        = os.path.dirname(os.path.realpath(__file__)) | ||||
|         self.name        = "Translate"  # NOTE: Need to remove after establishing private bidirectional 1-1 message bus | ||||
|                                         #       where self.name should not be needed for message comms | ||||
|         self._GLADE_FILE = f"{self.path}/translate.glade" | ||||
|  | ||||
|         self._link       = "https://duckduckgo.com/translation.js?" | ||||
|         self._headers    = { | ||||
|             'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:106.0) Gecko/20100101 Firefox/106.0', | ||||
|             'Accept': '*/*', | ||||
|             'Accept-Language': 'en-US,en;q=0.5', | ||||
|             'Accept-Encoding': 'gzip, deflate, br', | ||||
|             'Referer': 'https://duckduckgo.com/', | ||||
|             'Content-Type': 'text/plain', | ||||
|             'X-Requested-With': 'XMLHttpRequest', | ||||
|             'Origin': 'https://duckduckgo.com', | ||||
|             'DNT': '1', | ||||
|             'Connection': 'keep-alive', | ||||
|             'Sec-Fetch-Dest': 'empty', | ||||
|             'Sec-Fetch-Mode': 'cors', | ||||
|             'Sec-Fetch-Site': 'same-origin', | ||||
|             'Pragma': 'no-cache', | ||||
|             'Cache-Control': 'no-cache' | ||||
|         } | ||||
|  | ||||
|         self.vqd_link    = "https://duckduckgo.com/?hps=1&q=translate&ia=web" | ||||
|         self.vqd_data    = {"q": "translate", "ia":"web"} | ||||
|         self.vqd_headers = { | ||||
|             'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:106.0) Gecko/20100101 Firefox/106.0', | ||||
|             "Referer": "https://duckduckgo.com/" | ||||
|         } | ||||
|  | ||||
|         self._queue_translate = False | ||||
|         self._watcher_running = False | ||||
|         self._vqd_attrib      = None | ||||
|         self.from_trans       = "ja" | ||||
|         self.to_trans         = "en" | ||||
|         self.translate_tries  = 0 | ||||
|  | ||||
|  | ||||
|     def generate_reference_ui_element(self): | ||||
|         button = Gtk.Button(label=self.name) | ||||
|         button.connect("button-release-event", self._show_translate_page) | ||||
|         return button | ||||
|  | ||||
|     def run(self): | ||||
|         self._builder = Gtk.Builder() | ||||
|         self._builder.add_from_file(self._GLADE_FILE) | ||||
|         self._connect_builder_signals(self, self._builder) | ||||
|  | ||||
|         self._translate_dialog = self._builder.get_object("translate_dialog") | ||||
|         self._translate_from   = self._builder.get_object("translate_from") | ||||
|         self._translate_to     = self._builder.get_object("translate_to") | ||||
|         self._translate_from_buffer = self._builder.get_object("translate_from_buffer") | ||||
|         self._translate_to_buffer   = self._builder.get_object("translate_to_buffer") | ||||
|         self._detected_language_lbl = self._builder.get_object("detected_language_lbl") | ||||
|  | ||||
|         self._detected_language_lbl.set_label(f"Selected Language: {self.from_trans}") | ||||
|         self.get_vqd() | ||||
|  | ||||
|  | ||||
|     @threaded | ||||
|     def _show_translate_page(self, widget=None, eve=None): | ||||
|         event_system.emit("get_current_state") | ||||
|  | ||||
|         state               = self._fm_state | ||||
|         self._event_message = None | ||||
|  | ||||
|         GLib.idle_add(self._show_ui, (state)) | ||||
|  | ||||
|     def _show_ui(self, state): | ||||
|         if state.uris and len(state.uris) == 1: | ||||
|             file_name = state.uris[0].split("/")[-1] | ||||
|             self._translate_from_buffer.set_text(file_name) | ||||
|  | ||||
|         response   = self._translate_dialog.run() | ||||
|         if response in [Gtk.ResponseType.CLOSE, Gtk.ResponseType.CANCEL, Gtk.ResponseType.DELETE_EVENT]: | ||||
|             self._translate_dialog.hide() | ||||
|  | ||||
|         self._translate_dialog.hide() | ||||
|  | ||||
|     def _pre_translate(self, widget=None, eve=None): | ||||
|         self._queue_translate = True | ||||
|  | ||||
|         if not self._watcher_running: | ||||
|             self._watcher_running = True | ||||
|             self.run_translate_watcher() | ||||
|  | ||||
|     @daemon_threaded | ||||
|     def run_translate_watcher(self): | ||||
|         while True: | ||||
|             if self._queue_translate: | ||||
|                 self._queue_translate = False | ||||
|                 time.sleep(1) | ||||
|  | ||||
|                 # NOTE: Hold call to translate if we're still typing/updating... | ||||
|                 if self._queue_translate: | ||||
|                     continue | ||||
|  | ||||
|                 GLib.idle_add(self._translate) | ||||
|                 self._watcher_running = False | ||||
|  | ||||
|             break | ||||
|  | ||||
|     def _translate(self): | ||||
|         start_itr, end_itr   =  self._translate_from_buffer.get_bounds() | ||||
|         from_translate       = self._translate_from_buffer.get_text(start_itr, end_itr, True).encode('utf-8') | ||||
|  | ||||
|         if from_translate in ("", None) or self._queue_translate: | ||||
|             return | ||||
|  | ||||
|         self.translate_tries += 1 | ||||
|         tlink    = f"https://duckduckgo.com/translation.js?vqd={self._vqd_attrib}&query=translate&from={self.from_trans}&to={self.to_trans}" | ||||
|         response = requests.post(tlink, headers=self._headers, data=from_translate) | ||||
|  | ||||
|         if response.status_code == 200: | ||||
|             data = self.get_data(response) | ||||
|             self.translate_tries = 0 | ||||
|             self._translate_to_buffer.set_text(data["translated"]) | ||||
|             if data["detected_language"]: | ||||
|                 self._detected_language_lbl.set_label(f"Detected Language: {data['detected_language']}") | ||||
|             else: | ||||
|                 self._detected_language_lbl.set_label(f"Selected Language: {self.from_trans}") | ||||
|         elif response.status_code >= 400 or response.status_code < 500: | ||||
|             self.get_vqd() | ||||
|             if not self.translate_tries > 2: | ||||
|                 self._translate() | ||||
|         else: | ||||
|             msg = f"Could not translate... Response Code: {response.status_code}" | ||||
|             self._translate_to_buffer.set_text(msg) | ||||
|  | ||||
|     def get_data(self, response): | ||||
|         data = None | ||||
|  | ||||
|         try: | ||||
|             data = response.json() | ||||
|         except Exception as e: | ||||
|             ... | ||||
|  | ||||
|         try: | ||||
|             data = json.loads(response.text) | ||||
|         except Exception as e: | ||||
|             ... | ||||
|  | ||||
|         try: | ||||
|             decompress_str = brotli.decompress(response.content).decode("utf-8") | ||||
|             data = json.loads(decompress_str) | ||||
|         except Exception as e: | ||||
|             ... | ||||
|  | ||||
|         return data | ||||
|  | ||||
|     # NOTE: https://github.com/deedy5/duckduckgo_search/blob/72acb900a346be576f0917dd3d6c0fbd618a71bf/duckduckgo_search/utils.py | ||||
|     def get_vqd(self): | ||||
|         response = requests.post(self.vqd_link, headers=self.vqd_headers, data=self.vqd_data, timeout=2) | ||||
|         if response.status_code == 200: | ||||
|             data             = response.content | ||||
|             vqd_start_index  = data.index(b"vqd='") + 5 | ||||
|             vqd_end_index    = data.index(b"'", vqd_start_index) | ||||
|             self._vqd_attrib = data[vqd_start_index:vqd_end_index].decode("utf-8") | ||||
|  | ||||
|             print(f"Translation VQD: {self._vqd_attrib}") | ||||
|         else: | ||||
|             msg = f"Could not get VQS attribute... Response Code: {response.status_code}" | ||||
|             self._translate_to_buffer.set_text(msg) | ||||
							
								
								
									
										210
									
								
								plugins/translate/translate.glade
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										210
									
								
								plugins/translate/translate.glade
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,210 @@ | ||||
| <?xml version="1.0" encoding="UTF-8"?> | ||||
| <!-- Generated with glade 3.40.0 --> | ||||
| <interface> | ||||
|   <requires lib="gtk+" version="3.22"/> | ||||
|   <object class="GtkTextBuffer" id="translate_from_buffer"> | ||||
|     <signal name="changed" handler="_pre_translate" swapped="no"/> | ||||
|   </object> | ||||
|   <object class="GtkTextBuffer" id="translate_to_buffer"/> | ||||
|   <object class="GtkDialog" id="translate_dialog"> | ||||
|     <property name="can-focus">False</property> | ||||
|     <property name="border-width">6</property> | ||||
|     <property name="title" translatable="yes">Translate</property> | ||||
|     <property name="resizable">False</property> | ||||
|     <property name="modal">True</property> | ||||
|     <property name="window-position">center-on-parent</property> | ||||
|     <property name="default-width">620</property> | ||||
|     <property name="default-height">320</property> | ||||
|     <property name="destroy-with-parent">True</property> | ||||
|     <property name="type-hint">dialog</property> | ||||
|     <property name="skip-taskbar-hint">True</property> | ||||
|     <property name="skip-pager-hint">True</property> | ||||
|     <property name="deletable">False</property> | ||||
|     <property name="gravity">center</property> | ||||
|     <child internal-child="vbox"> | ||||
|       <object class="GtkBox" id="dialog_vbox"> | ||||
|         <property name="visible">True</property> | ||||
|         <property name="can-focus">False</property> | ||||
|         <property name="spacing">12</property> | ||||
|         <child internal-child="action_area"> | ||||
|           <object class="GtkButtonBox" id="dialog_action_area"> | ||||
|             <property name="visible">True</property> | ||||
|             <property name="can-focus">False</property> | ||||
|             <property name="layout-style">end</property> | ||||
|             <child> | ||||
|               <object class="GtkButton" id="cancel_button"> | ||||
|                 <property name="label">gtk-cancel</property> | ||||
|                 <property name="visible">True</property> | ||||
|                 <property name="can-focus">True</property> | ||||
|                 <property name="can-default">True</property> | ||||
|                 <property name="receives-default">False</property> | ||||
|                 <property name="use-stock">True</property> | ||||
|               </object> | ||||
|               <packing> | ||||
|                 <property name="expand">True</property> | ||||
|                 <property name="fill">True</property> | ||||
|                 <property name="position">0</property> | ||||
|               </packing> | ||||
|             </child> | ||||
|             <child> | ||||
|               <object class="GtkButton" id="ok_button"> | ||||
|                 <property name="label">gtk-close</property> | ||||
|                 <property name="visible">True</property> | ||||
|                 <property name="can-focus">True</property> | ||||
|                 <property name="can-default">True</property> | ||||
|                 <property name="receives-default">False</property> | ||||
|                 <property name="use-stock">True</property> | ||||
|               </object> | ||||
|               <packing> | ||||
|                 <property name="expand">True</property> | ||||
|                 <property name="fill">True</property> | ||||
|                 <property name="position">1</property> | ||||
|               </packing> | ||||
|             </child> | ||||
|           </object> | ||||
|           <packing> | ||||
|             <property name="expand">False</property> | ||||
|             <property name="fill">False</property> | ||||
|             <property name="pack-type">end</property> | ||||
|             <property name="position">0</property> | ||||
|           </packing> | ||||
|         </child> | ||||
|         <child> | ||||
|           <object class="GtkBox"> | ||||
|             <property name="visible">True</property> | ||||
|             <property name="can-focus">False</property> | ||||
|             <property name="orientation">vertical</property> | ||||
|             <child> | ||||
|               <object class="GtkLabel" id="detected_language_lbl"> | ||||
|                 <property name="visible">True</property> | ||||
|                 <property name="can-focus">False</property> | ||||
|                 <property name="label" translatable="yes">Detected Language:</property> | ||||
|               </object> | ||||
|               <packing> | ||||
|                 <property name="expand">False</property> | ||||
|                 <property name="fill">True</property> | ||||
|                 <property name="position">0</property> | ||||
|               </packing> | ||||
|             </child> | ||||
|             <child> | ||||
|               <object class="GtkBox"> | ||||
|                 <property name="visible">True</property> | ||||
|                 <property name="can-focus">False</property> | ||||
|                 <property name="spacing">15</property> | ||||
|                 <property name="homogeneous">True</property> | ||||
|                 <child> | ||||
|                   <object class="GtkBox"> | ||||
|                     <property name="visible">True</property> | ||||
|                     <property name="can-focus">False</property> | ||||
|                     <property name="orientation">vertical</property> | ||||
|                     <child> | ||||
|                       <object class="GtkLabel"> | ||||
|                         <property name="visible">True</property> | ||||
|                         <property name="can-focus">False</property> | ||||
|                         <property name="label" translatable="yes">From:</property> | ||||
|                       </object> | ||||
|                       <packing> | ||||
|                         <property name="expand">False</property> | ||||
|                         <property name="fill">True</property> | ||||
|                         <property name="position">0</property> | ||||
|                       </packing> | ||||
|                     </child> | ||||
|                     <child> | ||||
|                       <object class="GtkScrolledWindow"> | ||||
|                         <property name="visible">True</property> | ||||
|                         <property name="can-focus">True</property> | ||||
|                         <property name="hscrollbar-policy">never</property> | ||||
|                         <property name="shadow-type">in</property> | ||||
|                         <child> | ||||
|                           <object class="GtkTextView" id="translate_from"> | ||||
|                             <property name="visible">True</property> | ||||
|                             <property name="can-focus">True</property> | ||||
|                             <property name="wrap-mode">word-char</property> | ||||
|                             <property name="buffer">translate_from_buffer</property> | ||||
|                             <property name="monospace">True</property> | ||||
|                           </object> | ||||
|                         </child> | ||||
|                       </object> | ||||
|                       <packing> | ||||
|                         <property name="expand">True</property> | ||||
|                         <property name="fill">True</property> | ||||
|                         <property name="position">1</property> | ||||
|                       </packing> | ||||
|                     </child> | ||||
|                   </object> | ||||
|                   <packing> | ||||
|                     <property name="expand">True</property> | ||||
|                     <property name="fill">True</property> | ||||
|                     <property name="position">0</property> | ||||
|                   </packing> | ||||
|                 </child> | ||||
|                 <child> | ||||
|                   <object class="GtkBox"> | ||||
|                     <property name="visible">True</property> | ||||
|                     <property name="can-focus">False</property> | ||||
|                     <property name="orientation">vertical</property> | ||||
|                     <child> | ||||
|                       <object class="GtkLabel"> | ||||
|                         <property name="visible">True</property> | ||||
|                         <property name="can-focus">False</property> | ||||
|                         <property name="label" translatable="yes">To:</property> | ||||
|                       </object> | ||||
|                       <packing> | ||||
|                         <property name="expand">False</property> | ||||
|                         <property name="fill">True</property> | ||||
|                         <property name="position">0</property> | ||||
|                       </packing> | ||||
|                     </child> | ||||
|                     <child> | ||||
|                       <object class="GtkScrolledWindow"> | ||||
|                         <property name="visible">True</property> | ||||
|                         <property name="can-focus">True</property> | ||||
|                         <property name="hscrollbar-policy">never</property> | ||||
|                         <property name="shadow-type">in</property> | ||||
|                         <child> | ||||
|                           <object class="GtkTextView" id="translate_to"> | ||||
|                             <property name="visible">True</property> | ||||
|                             <property name="can-focus">True</property> | ||||
|                             <property name="editable">False</property> | ||||
|                             <property name="wrap-mode">word-char</property> | ||||
|                             <property name="cursor-visible">False</property> | ||||
|                             <property name="buffer">translate_to_buffer</property> | ||||
|                             <property name="monospace">True</property> | ||||
|                           </object> | ||||
|                         </child> | ||||
|                       </object> | ||||
|                       <packing> | ||||
|                         <property name="expand">True</property> | ||||
|                         <property name="fill">True</property> | ||||
|                         <property name="position">1</property> | ||||
|                       </packing> | ||||
|                     </child> | ||||
|                   </object> | ||||
|                   <packing> | ||||
|                     <property name="expand">True</property> | ||||
|                     <property name="fill">True</property> | ||||
|                     <property name="position">1</property> | ||||
|                   </packing> | ||||
|                 </child> | ||||
|               </object> | ||||
|               <packing> | ||||
|                 <property name="expand">True</property> | ||||
|                 <property name="fill">True</property> | ||||
|                 <property name="position">1</property> | ||||
|               </packing> | ||||
|             </child> | ||||
|           </object> | ||||
|           <packing> | ||||
|             <property name="expand">True</property> | ||||
|             <property name="fill">True</property> | ||||
|             <property name="position">1</property> | ||||
|           </packing> | ||||
|         </child> | ||||
|       </object> | ||||
|     </child> | ||||
|     <action-widgets> | ||||
|       <action-widget response="-6">cancel_button</action-widget> | ||||
|       <action-widget response="-7">ok_button</action-widget> | ||||
|     </action-widgets> | ||||
|   </object> | ||||
| </interface> | ||||
| @@ -1,8 +1,5 @@ | ||||
| # Python imports | ||||
| import os | ||||
| import threading | ||||
| import subprocess | ||||
| import inspect | ||||
|  | ||||
| # Lib imports | ||||
| import gi | ||||
| @@ -16,19 +13,6 @@ from plugins.plugin_base import PluginBase | ||||
| from .xdgtrash import XDGTrash | ||||
|  | ||||
|  | ||||
| # NOTE: Threads WILL NOT die with parent's destruction. | ||||
| def threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=False).start() | ||||
|     return wrapper | ||||
|  | ||||
| # NOTE: Threads WILL die with parent's destruction. | ||||
| def daemon_threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start() | ||||
|     return wrapper | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| class Plugin(PluginBase): | ||||
| @@ -55,19 +39,21 @@ class Plugin(PluginBase): | ||||
|         trash_a = Gtk.MenuItem("Trash Actions") | ||||
|         trash_menu = Gtk.Menu() | ||||
|  | ||||
|         self.restore = Gtk.MenuItem("Restore From Trash") | ||||
|         self.restore = Gtk.ImageMenuItem("Restore From Trash") | ||||
|         self.restore.set_image( Gtk.Image.new_from_icon_name("gtk-undelete", 3) ) | ||||
|         self.restore.connect("activate", self.restore_trash_files) | ||||
|  | ||||
|         self.empty = Gtk.MenuItem("Empty Trash") | ||||
|         self.empty = Gtk.ImageMenuItem("Empty Trash") | ||||
|         self.empty.set_image( Gtk.Image.new_from_icon_name("gtk-delete", 3) ) | ||||
|         self.empty.connect("activate", self.empty_trash) | ||||
|  | ||||
|         trash = Gtk.ImageMenuItem("Trash") | ||||
|         trash.set_image( Gtk.Image.new_from_icon_name("user-trash", 16) ) | ||||
|         trash.set_image( Gtk.Image.new_from_icon_name("user-trash", 3) ) | ||||
|         trash.connect("activate", self.trash_files) | ||||
|         trash.set_always_show_image(True) | ||||
|  | ||||
|         go_to = Gtk.ImageMenuItem("Go To Trash") | ||||
|         go_to.set_image( Gtk.Image.new_from_icon_name("user-trash", 16) ) | ||||
|         go_to.set_image( Gtk.Image.new_from_icon_name("gtk-go-forward", 3) ) | ||||
|         go_to.connect("activate", self.go_to_trash) | ||||
|         go_to.set_always_show_image(True) | ||||
|  | ||||
| @@ -99,16 +85,16 @@ class Plugin(PluginBase): | ||||
|     def delete_files(self, widget = None, eve = None): | ||||
|         self._event_system.emit("get_current_state") | ||||
|         state    = self._fm_state | ||||
|         uris     = state.selected_files | ||||
|         uris     = state.uris | ||||
|         response = None | ||||
|  | ||||
|         state.warning_alert.format_secondary_text(f"Do you really want to delete the {len(uris)} file(s)?") | ||||
|         state.message_dialog.format_secondary_text(f"Do you really want to delete the {len(uris)} file(s)?") | ||||
|         for uri in uris: | ||||
|             file = Gio.File.new_for_path(uri) | ||||
|  | ||||
|             if not response: | ||||
|                 response = state.warning_alert.run() | ||||
|                 state.warning_alert.hide() | ||||
|                 response = state.message_dialog.run() | ||||
|                 state.message_dialog.hide() | ||||
|             if response == Gtk.ResponseType.YES: | ||||
|                 type = file.query_file_type(flags=Gio.FileQueryInfoFlags.NONE) | ||||
|  | ||||
| @@ -122,14 +108,14 @@ class Plugin(PluginBase): | ||||
|     def trash_files(self, widget = None, eve = None, verbocity = False): | ||||
|         self._event_system.emit("get_current_state") | ||||
|         state = self._fm_state | ||||
|         for uri in state.selected_files: | ||||
|         for uri in state.uris: | ||||
|             self.trashman.trash(uri, verbocity) | ||||
|  | ||||
|     def restore_trash_files(self, widget = None, eve = None, verbocity = False): | ||||
|         self._event_system.emit("get_current_state") | ||||
|         state = self._fm_state | ||||
|         for uri in state.selected_files: | ||||
|             self.trashman.restore(filename=uri.split("/")[-1], verbose = verbocity) | ||||
|         for uri in state.uris: | ||||
|             self.trashman.restore(filename = uri.split("/")[-1], verbose = verbocity) | ||||
|  | ||||
|     def empty_trash(self, widget = None, eve = None, verbocity = False): | ||||
|         self.trashman.empty(verbose = verbocity) | ||||
|   | ||||
| @@ -3,7 +3,6 @@ import os | ||||
| import threading | ||||
| import subprocess | ||||
| import time | ||||
| import inspect | ||||
| import hashlib | ||||
| from datetime import datetime | ||||
|  | ||||
| @@ -26,12 +25,6 @@ def threaded(fn): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=False).start() | ||||
|     return wrapper | ||||
|  | ||||
| # NOTE: Threads WILL die with parent's destruction. | ||||
| def daemon_threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start() | ||||
|     return wrapper | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -53,20 +46,9 @@ class Plugin(PluginBase): | ||||
|  | ||||
|  | ||||
|     def run(self): | ||||
|         self._builder           = Gtk.Builder() | ||||
|         self._builder = Gtk.Builder() | ||||
|         self._builder.add_from_file(self._GLADE_FILE) | ||||
|  | ||||
|         classes  = [self] | ||||
|         handlers = {} | ||||
|         for c in classes: | ||||
|             methods = None | ||||
|             try: | ||||
|                 methods = inspect.getmembers(c, predicate=inspect.ismethod) | ||||
|                 handlers.update(methods) | ||||
|             except Exception as e: | ||||
|                 print(repr(e)) | ||||
|  | ||||
|         self._builder.connect_signals(handlers) | ||||
|         self._connect_builder_signals(self, self._builder) | ||||
|  | ||||
|         self._thumbnailer_dialog    = self._builder.get_object("thumbnailer_dialog") | ||||
|         self._scrub_step            = self._builder.get_object("scrub_step") | ||||
| @@ -76,7 +58,7 @@ class Plugin(PluginBase): | ||||
|         self._file_hash             = self._builder.get_object("file_hash") | ||||
|  | ||||
|     def generate_reference_ui_element(self): | ||||
|         pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_scale(f"{self.path}/../../icons/video.png", 16, 16, True) | ||||
|         pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_scale(f"/usr/share/solarfm/icons/video.png", 16, 16, True) | ||||
|         icon   = Gtk.Image.new_from_pixbuf(pixbuf) | ||||
|         item   = Gtk.ImageMenuItem(self.name) | ||||
|  | ||||
| @@ -98,8 +80,8 @@ class Plugin(PluginBase): | ||||
|     def _process_changes(self, state): | ||||
|         self._fm_state = None | ||||
|  | ||||
|         if len(state.selected_files) == 1: | ||||
|             if state.selected_files[0].lower().endswith(state.tab.fvideos): | ||||
|         if len(state.uris) == 1: | ||||
|             if state.uris[0].lower().endswith(state.tab.fvideos): | ||||
|                 self._fm_state = state | ||||
|                 self._set_ui_data() | ||||
|                 response   = self._thumbnailer_dialog.run() | ||||
| @@ -115,9 +97,7 @@ class Plugin(PluginBase): | ||||
|         hash_img_pth  = f"{self._fm_state.tab.ABS_THUMBS_PTH}/{file_hash}.jpg" | ||||
|  | ||||
|         try: | ||||
|             os.remove(hash_img_pth) if os.path.isfile(hash_img_pth) else ... | ||||
|  | ||||
|             self._fm_state.tab.create_thumbnail(dir, file, f"{scrub_percent}%") | ||||
|             self._fm_state.tab.create_video_thumbnail(f"{dir}/{file}", f"{scrub_percent}%", True) | ||||
|             preview_pixbuf = GdkPixbuf.Pixbuf.new_from_file(hash_img_pth) | ||||
|             self._thumbnail_preview_img.set_from_pixbuf(preview_pixbuf) | ||||
|  | ||||
| @@ -132,11 +112,10 @@ class Plugin(PluginBase): | ||||
|  | ||||
|  | ||||
|     def _set_ui_data(self): | ||||
|         uri            = self._fm_state.selected_files[0] | ||||
|         uri            = self._fm_state.uris[0] | ||||
|         path           = self._fm_state.tab.get_current_directory() | ||||
|         parts          = uri.split("/") | ||||
|  | ||||
|         file_hash      = hashlib.sha256(str.encode(uri)).hexdigest() | ||||
|         file_hash      = self._fm_state.tab.fast_hash(uri) | ||||
|         hash_img_pth   = f"{self._fm_state.tab.ABS_THUMBS_PTH}/{file_hash}.jpg" | ||||
|         preview_pixbuf = GdkPixbuf.Pixbuf.new_from_file(hash_img_pth) | ||||
|  | ||||
|   | ||||
| @@ -13,7 +13,7 @@ function main() { | ||||
|     LINK=`xclip -selection clipboard -o` | ||||
|  | ||||
|     python "${HOME}/.config/solarfm/plugins/youtube_download/yt_dlp/__main__.py" \ | ||||
|             --cookies-from-browser firefox --write-sub --embed-sub --sub-langs en \ | ||||
|             --write-sub --embed-sub --sub-langs en \ | ||||
|             -o "${1}/%(title)s.%(ext)s" "${LINK}" | ||||
| } | ||||
| main "$@"; | ||||
|   | ||||
| @@ -1,5 +1,8 @@ | ||||
| # Python imports | ||||
| import os, threading, subprocess, time | ||||
| import os | ||||
| import threading | ||||
| import subprocess | ||||
| import time | ||||
|  | ||||
| # Lib imports | ||||
| import gi | ||||
| @@ -16,12 +19,6 @@ def threaded(fn): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=False).start() | ||||
|     return wrapper | ||||
|  | ||||
| # NOTE: Threads WILL die with parent's destruction. | ||||
| def daemon_threaded(fn): | ||||
|     def wrapper(*args, **kwargs): | ||||
|         threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start() | ||||
|     return wrapper | ||||
|  | ||||
|  | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -1 +0,0 @@ | ||||
| pip | ||||
| @@ -1,24 +0,0 @@ | ||||
| This is free and unencumbered software released into the public domain. | ||||
|  | ||||
| Anyone is free to copy, modify, publish, use, compile, sell, or | ||||
| distribute this software, either in source code form or as a compiled | ||||
| binary, for any purpose, commercial or non-commercial, and by any | ||||
| means. | ||||
|  | ||||
| In jurisdictions that recognize copyright laws, the author or authors | ||||
| of this software dedicate any and all copyright interest in the | ||||
| software to the public domain. We make this dedication for the benefit | ||||
| of the public at large and to the detriment of our heirs and | ||||
| successors. We intend this dedication to be an overt act of | ||||
| relinquishment in perpetuity of all present and future rights to this | ||||
| software under copyright law. | ||||
|  | ||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | ||||
| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | ||||
| MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. | ||||
| IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR | ||||
| OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, | ||||
| ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | ||||
| OTHER DEALINGS IN THE SOFTWARE. | ||||
|  | ||||
| For more information, please refer to <http://unlicense.org/> | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,6 +0,0 @@ | ||||
| Wheel-Version: 1.0 | ||||
| Generator: bdist_wheel (0.37.1) | ||||
| Root-Is-Purelib: true | ||||
| Tag: py2-none-any | ||||
| Tag: py3-none-any | ||||
|  | ||||
| @@ -1,3 +0,0 @@ | ||||
| [console_scripts] | ||||
| yt-dlp = yt_dlp:main | ||||
|  | ||||
| @@ -1 +0,0 @@ | ||||
| yt_dlp | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,13 +1,11 @@ | ||||
| #!/usr/bin/env python3 | ||||
| from __future__ import unicode_literals | ||||
|  | ||||
| # Execute with | ||||
| # $ python yt_dlp/__main__.py (2.6+) | ||||
| # $ python -m yt_dlp          (2.7+) | ||||
| # $ python -m yt_dlp | ||||
|  | ||||
| import sys | ||||
|  | ||||
| if __package__ is None and not hasattr(sys, 'frozen'): | ||||
| if __package__ is None and not getattr(sys, 'frozen', False): | ||||
|     # direct call of __main__.py | ||||
|     import os.path | ||||
|     path = os.path.realpath(os.path.abspath(__file__)) | ||||
|   | ||||
| @@ -0,0 +1,5 @@ | ||||
| import os | ||||
|  | ||||
|  | ||||
| def get_hook_dirs(): | ||||
|     return [os.path.dirname(__file__)] | ||||
							
								
								
									
										32
									
								
								plugins/youtube_download/yt_dlp/__pyinstaller/hook-yt_dlp.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								plugins/youtube_download/yt_dlp/__pyinstaller/hook-yt_dlp.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| import sys | ||||
|  | ||||
| from PyInstaller.utils.hooks import collect_submodules | ||||
|  | ||||
|  | ||||
| def pycryptodome_module(): | ||||
|     try: | ||||
|         import Cryptodome  # noqa: F401 | ||||
|     except ImportError: | ||||
|         try: | ||||
|             import Crypto  # noqa: F401 | ||||
|             print('WARNING: Using Crypto since Cryptodome is not available. ' | ||||
|                   'Install with: pip install pycryptodomex', file=sys.stderr) | ||||
|             return 'Crypto' | ||||
|         except ImportError: | ||||
|             pass | ||||
|     return 'Cryptodome' | ||||
|  | ||||
|  | ||||
| def get_hidden_imports(): | ||||
|     yield from ('yt_dlp.compat._legacy', 'yt_dlp.compat._deprecated') | ||||
|     yield from ('yt_dlp.utils._legacy', 'yt_dlp.utils._deprecated') | ||||
|     yield pycryptodome_module() | ||||
|     yield from collect_submodules('websockets') | ||||
|     # These are auto-detected, but explicitly add them just in case | ||||
|     yield from ('mutagen', 'brotli', 'certifi') | ||||
|  | ||||
|  | ||||
| hiddenimports = list(get_hidden_imports()) | ||||
| print(f'Adding imports: {hiddenimports}') | ||||
|  | ||||
| excludedimports = ['youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts'] | ||||
| @@ -1,26 +1,18 @@ | ||||
| from __future__ import unicode_literals | ||||
|  | ||||
| import base64 | ||||
| from math import ceil | ||||
|  | ||||
| from .compat import ( | ||||
|     compat_b64decode, | ||||
|     compat_ord, | ||||
|     compat_pycrypto_AES, | ||||
| ) | ||||
| from .utils import ( | ||||
|     bytes_to_intlist, | ||||
|     intlist_to_bytes, | ||||
| ) | ||||
| from .compat import compat_ord | ||||
| from .dependencies import Cryptodome | ||||
| from .utils import bytes_to_intlist, intlist_to_bytes | ||||
|  | ||||
|  | ||||
| if compat_pycrypto_AES: | ||||
| if Cryptodome.AES: | ||||
|     def aes_cbc_decrypt_bytes(data, key, iv): | ||||
|         """ Decrypt bytes with AES-CBC using pycryptodome """ | ||||
|         return compat_pycrypto_AES.new(key, compat_pycrypto_AES.MODE_CBC, iv).decrypt(data) | ||||
|         return Cryptodome.AES.new(key, Cryptodome.AES.MODE_CBC, iv).decrypt(data) | ||||
|  | ||||
|     def aes_gcm_decrypt_and_verify_bytes(data, key, tag, nonce): | ||||
|         """ Decrypt bytes with AES-GCM using pycryptodome """ | ||||
|         return compat_pycrypto_AES.new(key, compat_pycrypto_AES.MODE_GCM, nonce).decrypt_and_verify(data, tag) | ||||
|         return Cryptodome.AES.new(key, Cryptodome.AES.MODE_GCM, nonce).decrypt_and_verify(data, tag) | ||||
|  | ||||
| else: | ||||
|     def aes_cbc_decrypt_bytes(data, key, iv): | ||||
| @@ -32,16 +24,59 @@ else: | ||||
|         return intlist_to_bytes(aes_gcm_decrypt_and_verify(*map(bytes_to_intlist, (data, key, tag, nonce)))) | ||||
|  | ||||
|  | ||||
| def unpad_pkcs7(data): | ||||
|     return data[:-compat_ord(data[-1])] | ||||
| def aes_cbc_encrypt_bytes(data, key, iv, **kwargs): | ||||
|     return intlist_to_bytes(aes_cbc_encrypt(*map(bytes_to_intlist, (data, key, iv)), **kwargs)) | ||||
|  | ||||
|  | ||||
| BLOCK_SIZE_BYTES = 16 | ||||
|  | ||||
|  | ||||
| def unpad_pkcs7(data): | ||||
|     return data[:-compat_ord(data[-1])] | ||||
|  | ||||
|  | ||||
| def pkcs7_padding(data): | ||||
|     """ | ||||
|     PKCS#7 padding | ||||
|  | ||||
|     @param {int[]} data        cleartext | ||||
|     @returns {int[]}           padding data | ||||
|     """ | ||||
|  | ||||
|     remaining_length = BLOCK_SIZE_BYTES - len(data) % BLOCK_SIZE_BYTES | ||||
|     return data + [remaining_length] * remaining_length | ||||
|  | ||||
|  | ||||
| def pad_block(block, padding_mode): | ||||
|     """ | ||||
|     Pad a block with the given padding mode | ||||
|     @param {int[]} block        block to pad | ||||
|     @param padding_mode         padding mode | ||||
|     """ | ||||
|     padding_size = BLOCK_SIZE_BYTES - len(block) | ||||
|  | ||||
|     PADDING_BYTE = { | ||||
|         'pkcs7': padding_size, | ||||
|         'iso7816': 0x0, | ||||
|         'whitespace': 0x20, | ||||
|         'zero': 0x0, | ||||
|     } | ||||
|  | ||||
|     if padding_size < 0: | ||||
|         raise ValueError('Block size exceeded') | ||||
|     elif padding_mode not in PADDING_BYTE: | ||||
|         raise NotImplementedError(f'Padding mode {padding_mode} is not implemented') | ||||
|  | ||||
|     if padding_mode == 'iso7816' and padding_size: | ||||
|         block = block + [0x80]  # NB: += mutates list | ||||
|         padding_size -= 1 | ||||
|  | ||||
|     return block + [PADDING_BYTE[padding_mode]] * padding_size | ||||
|  | ||||
|  | ||||
| def aes_ecb_encrypt(data, key, iv=None): | ||||
|     """ | ||||
|     Encrypt with aes in ECB mode | ||||
|     Encrypt with aes in ECB mode. Using PKCS#7 padding | ||||
|  | ||||
|     @param {int[]} data        cleartext | ||||
|     @param {int[]} key         16/24/32-Byte cipher key | ||||
| @@ -54,8 +89,7 @@ def aes_ecb_encrypt(data, key, iv=None): | ||||
|     encrypted_data = [] | ||||
|     for i in range(block_count): | ||||
|         block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES] | ||||
|         encrypted_data += aes_encrypt(block, expanded_key) | ||||
|     encrypted_data = encrypted_data[:len(data)] | ||||
|         encrypted_data += aes_encrypt(pkcs7_padding(block), expanded_key) | ||||
|  | ||||
|     return encrypted_data | ||||
|  | ||||
| @@ -145,13 +179,14 @@ def aes_cbc_decrypt(data, key, iv): | ||||
|     return decrypted_data | ||||
|  | ||||
|  | ||||
| def aes_cbc_encrypt(data, key, iv): | ||||
| def aes_cbc_encrypt(data, key, iv, *, padding_mode='pkcs7'): | ||||
|     """ | ||||
|     Encrypt with aes in CBC mode. Using PKCS#7 padding | ||||
|     Encrypt with aes in CBC mode | ||||
|  | ||||
|     @param {int[]} data        cleartext | ||||
|     @param {int[]} key         16/24/32-Byte cipher key | ||||
|     @param {int[]} iv          16-Byte IV | ||||
|     @param padding_mode        Padding mode to use | ||||
|     @returns {int[]}           encrypted data | ||||
|     """ | ||||
|     expanded_key = key_expansion(key) | ||||
| @@ -161,8 +196,8 @@ def aes_cbc_encrypt(data, key, iv): | ||||
|     previous_cipher_block = iv | ||||
|     for i in range(block_count): | ||||
|         block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES] | ||||
|         remaining_length = BLOCK_SIZE_BYTES - len(block) | ||||
|         block += [remaining_length] * remaining_length | ||||
|         block = pad_block(block, padding_mode) | ||||
|  | ||||
|         mixed_block = xor(block, previous_cipher_block) | ||||
|  | ||||
|         encrypted_block = aes_encrypt(mixed_block, expanded_key) | ||||
| @@ -273,8 +308,8 @@ def aes_decrypt_text(data, password, key_size_bytes): | ||||
|     """ | ||||
|     NONCE_LENGTH_BYTES = 8 | ||||
|  | ||||
|     data = bytes_to_intlist(compat_b64decode(data)) | ||||
|     password = bytes_to_intlist(password.encode('utf-8')) | ||||
|     data = bytes_to_intlist(base64.b64decode(data)) | ||||
|     password = bytes_to_intlist(password.encode()) | ||||
|  | ||||
|     key = password[:key_size_bytes] + [0] * (key_size_bytes - len(password)) | ||||
|     key = aes_encrypt(key[:BLOCK_SIZE_BYTES], key_expansion(key)) * (key_size_bytes // BLOCK_SIZE_BYTES) | ||||
| @@ -503,20 +538,30 @@ def ghash(subkey, data): | ||||
|  | ||||
|     last_y = [0] * BLOCK_SIZE_BYTES | ||||
|     for i in range(0, len(data), BLOCK_SIZE_BYTES): | ||||
|         block = data[i : i + BLOCK_SIZE_BYTES]  # noqa: E203 | ||||
|         block = data[i: i + BLOCK_SIZE_BYTES] | ||||
|         last_y = block_product(xor(last_y, block), subkey) | ||||
|  | ||||
|     return last_y | ||||
|  | ||||
|  | ||||
| __all__ = [ | ||||
|     'aes_ctr_decrypt', | ||||
|     'aes_cbc_decrypt', | ||||
|     'aes_cbc_decrypt_bytes', | ||||
|     'aes_ctr_decrypt', | ||||
|     'aes_decrypt_text', | ||||
|     'aes_encrypt', | ||||
|     'aes_decrypt', | ||||
|     'aes_ecb_decrypt', | ||||
|     'aes_gcm_decrypt_and_verify', | ||||
|     'aes_gcm_decrypt_and_verify_bytes', | ||||
|  | ||||
|     'aes_cbc_encrypt', | ||||
|     'aes_cbc_encrypt_bytes', | ||||
|     'aes_ctr_encrypt', | ||||
|     'aes_ecb_encrypt', | ||||
|     'aes_encrypt', | ||||
|  | ||||
|     'key_expansion', | ||||
|     'pad_block', | ||||
|     'pkcs7_padding', | ||||
|     'unpad_pkcs7', | ||||
| ] | ||||
|   | ||||
| @@ -1,37 +1,30 @@ | ||||
| from __future__ import unicode_literals | ||||
|  | ||||
| import errno | ||||
| import io | ||||
| import contextlib | ||||
| import json | ||||
| import os | ||||
| import re | ||||
| import shutil | ||||
| import traceback | ||||
| import urllib.parse | ||||
|  | ||||
| from .compat import compat_getenv | ||||
| from .utils import ( | ||||
|     expand_path, | ||||
|     write_json_file, | ||||
| ) | ||||
| from .utils import expand_path, traverse_obj, version_tuple, write_json_file | ||||
| from .version import __version__ | ||||
|  | ||||
|  | ||||
| class Cache(object): | ||||
| class Cache: | ||||
|     def __init__(self, ydl): | ||||
|         self._ydl = ydl | ||||
|  | ||||
|     def _get_root_dir(self): | ||||
|         res = self._ydl.params.get('cachedir') | ||||
|         if res is None: | ||||
|             cache_root = compat_getenv('XDG_CACHE_HOME', '~/.cache') | ||||
|             cache_root = os.getenv('XDG_CACHE_HOME', '~/.cache') | ||||
|             res = os.path.join(cache_root, 'yt-dlp') | ||||
|         return expand_path(res) | ||||
|  | ||||
|     def _get_cache_fn(self, section, key, dtype): | ||||
|         assert re.match(r'^[a-zA-Z0-9_.-]+$', section), \ | ||||
|             'invalid section %r' % section | ||||
|         assert re.match(r'^[a-zA-Z0-9_.-]+$', key), 'invalid key %r' % key | ||||
|         return os.path.join( | ||||
|             self._get_root_dir(), section, '%s.%s' % (key, dtype)) | ||||
|         assert re.match(r'^[\w.-]+$', section), f'invalid section {section!r}' | ||||
|         key = urllib.parse.quote(key, safe='').replace('%', ',')  # encode non-ascii characters | ||||
|         return os.path.join(self._get_root_dir(), section, f'{key}.{dtype}') | ||||
|  | ||||
|     @property | ||||
|     def enabled(self): | ||||
| @@ -45,39 +38,39 @@ class Cache(object): | ||||
|  | ||||
|         fn = self._get_cache_fn(section, key, dtype) | ||||
|         try: | ||||
|             try: | ||||
|                 os.makedirs(os.path.dirname(fn)) | ||||
|             except OSError as ose: | ||||
|                 if ose.errno != errno.EEXIST: | ||||
|                     raise | ||||
|             os.makedirs(os.path.dirname(fn), exist_ok=True) | ||||
|             self._ydl.write_debug(f'Saving {section}.{key} to cache') | ||||
|             write_json_file(data, fn) | ||||
|             write_json_file({'yt-dlp_version': __version__, 'data': data}, fn) | ||||
|         except Exception: | ||||
|             tb = traceback.format_exc() | ||||
|             self._ydl.report_warning( | ||||
|                 'Writing cache to %r failed: %s' % (fn, tb)) | ||||
|             self._ydl.report_warning(f'Writing cache to {fn!r} failed: {tb}') | ||||
|  | ||||
|     def load(self, section, key, dtype='json', default=None): | ||||
|     def _validate(self, data, min_ver): | ||||
|         version = traverse_obj(data, 'yt-dlp_version') | ||||
|         if not version:  # Backward compatibility | ||||
|             data, version = {'data': data}, '2022.08.19' | ||||
|         if not min_ver or version_tuple(version) >= version_tuple(min_ver): | ||||
|             return data['data'] | ||||
|         self._ydl.write_debug(f'Discarding old cache from version {version} (needs {min_ver})') | ||||
|  | ||||
|     def load(self, section, key, dtype='json', default=None, *, min_ver=None): | ||||
|         assert dtype in ('json',) | ||||
|  | ||||
|         if not self.enabled: | ||||
|             return default | ||||
|  | ||||
|         cache_fn = self._get_cache_fn(section, key, dtype) | ||||
|         try: | ||||
|         with contextlib.suppress(OSError): | ||||
|             try: | ||||
|                 with io.open(cache_fn, 'r', encoding='utf-8') as cachef: | ||||
|                 with open(cache_fn, encoding='utf-8') as cachef: | ||||
|                     self._ydl.write_debug(f'Loading {section}.{key} from cache') | ||||
|                     return json.load(cachef) | ||||
|             except ValueError: | ||||
|                     return self._validate(json.load(cachef), min_ver) | ||||
|             except (ValueError, KeyError): | ||||
|                 try: | ||||
|                     file_size = os.path.getsize(cache_fn) | ||||
|                 except (OSError, IOError) as oe: | ||||
|                 except OSError as oe: | ||||
|                     file_size = str(oe) | ||||
|                 self._ydl.report_warning( | ||||
|                     'Cache retrieval from %s failed (%s)' % (cache_fn, file_size)) | ||||
|         except IOError: | ||||
|             pass  # No cache available | ||||
|                 self._ydl.report_warning(f'Cache retrieval from {cache_fn} failed ({file_size})') | ||||
|  | ||||
|         return default | ||||
|  | ||||
|   | ||||
							
								
								
									
										5
									
								
								plugins/youtube_download/yt_dlp/casefold.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								plugins/youtube_download/yt_dlp/casefold.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| import warnings | ||||
|  | ||||
| warnings.warn(DeprecationWarning(f'{__name__} is deprecated')) | ||||
|  | ||||
| casefold = str.casefold | ||||
| @@ -1,311 +0,0 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| import asyncio | ||||
| import base64 | ||||
| import collections | ||||
| import ctypes | ||||
| import getpass | ||||
| import html | ||||
| import html.parser | ||||
| import http | ||||
| import http.client | ||||
| import http.cookiejar | ||||
| import http.cookies | ||||
| import http.server | ||||
| import itertools | ||||
| import optparse | ||||
| import os | ||||
| import re | ||||
| import shlex | ||||
| import shutil | ||||
| import socket | ||||
| import struct | ||||
| import subprocess | ||||
| import sys | ||||
| import tokenize | ||||
| import urllib | ||||
| import xml.etree.ElementTree as etree | ||||
| from subprocess import DEVNULL | ||||
|  | ||||
|  | ||||
| # HTMLParseError has been deprecated in Python 3.3 and removed in | ||||
| # Python 3.5. Introducing dummy exception for Python >3.5 for compatible | ||||
| # and uniform cross-version exception handling | ||||
| class compat_HTMLParseError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| # compat_ctypes_WINFUNCTYPE = ctypes.WINFUNCTYPE | ||||
| # will not work since ctypes.WINFUNCTYPE does not exist in UNIX machines | ||||
| def compat_ctypes_WINFUNCTYPE(*args, **kwargs): | ||||
|     return ctypes.WINFUNCTYPE(*args, **kwargs) | ||||
|  | ||||
|  | ||||
| class _TreeBuilder(etree.TreeBuilder): | ||||
|     def doctype(self, name, pubid, system): | ||||
|         pass | ||||
|  | ||||
|  | ||||
| def compat_etree_fromstring(text): | ||||
|     return etree.XML(text, parser=etree.XMLParser(target=_TreeBuilder())) | ||||
|  | ||||
|  | ||||
| compat_os_name = os._name if os.name == 'java' else os.name | ||||
|  | ||||
|  | ||||
| if compat_os_name == 'nt': | ||||
|     def compat_shlex_quote(s): | ||||
|         return s if re.match(r'^[-_\w./]+$', s) else '"%s"' % s.replace('"', '\\"') | ||||
| else: | ||||
|     from shlex import quote as compat_shlex_quote | ||||
|  | ||||
|  | ||||
| def compat_ord(c): | ||||
|     if type(c) is int: | ||||
|         return c | ||||
|     else: | ||||
|         return ord(c) | ||||
|  | ||||
|  | ||||
| def compat_setenv(key, value, env=os.environ): | ||||
|     env[key] = value | ||||
|  | ||||
|  | ||||
| if compat_os_name == 'nt' and sys.version_info < (3, 8): | ||||
|     # os.path.realpath on Windows does not follow symbolic links | ||||
|     # prior to Python 3.8 (see https://bugs.python.org/issue9949) | ||||
|     def compat_realpath(path): | ||||
|         while os.path.islink(path): | ||||
|             path = os.path.abspath(os.readlink(path)) | ||||
|         return path | ||||
| else: | ||||
|     compat_realpath = os.path.realpath | ||||
|  | ||||
|  | ||||
| def compat_print(s): | ||||
|     assert isinstance(s, compat_str) | ||||
|     print(s) | ||||
|  | ||||
|  | ||||
| # Fix https://github.com/ytdl-org/youtube-dl/issues/4223 | ||||
| # See http://bugs.python.org/issue9161 for what is broken | ||||
| def workaround_optparse_bug9161(): | ||||
|     op = optparse.OptionParser() | ||||
|     og = optparse.OptionGroup(op, 'foo') | ||||
|     try: | ||||
|         og.add_option('-t') | ||||
|     except TypeError: | ||||
|         real_add_option = optparse.OptionGroup.add_option | ||||
|  | ||||
|         def _compat_add_option(self, *args, **kwargs): | ||||
|             enc = lambda v: ( | ||||
|                 v.encode('ascii', 'replace') if isinstance(v, compat_str) | ||||
|                 else v) | ||||
|             bargs = [enc(a) for a in args] | ||||
|             bkwargs = dict( | ||||
|                 (k, enc(v)) for k, v in kwargs.items()) | ||||
|             return real_add_option(self, *bargs, **bkwargs) | ||||
|         optparse.OptionGroup.add_option = _compat_add_option | ||||
|  | ||||
|  | ||||
| try: | ||||
|     compat_Pattern = re.Pattern | ||||
| except AttributeError: | ||||
|     compat_Pattern = type(re.compile('')) | ||||
|  | ||||
|  | ||||
| try: | ||||
|     compat_Match = re.Match | ||||
| except AttributeError: | ||||
|     compat_Match = type(re.compile('').match('')) | ||||
|  | ||||
|  | ||||
| try: | ||||
|     compat_asyncio_run = asyncio.run  # >= 3.7 | ||||
| except AttributeError: | ||||
|     def compat_asyncio_run(coro): | ||||
|         try: | ||||
|             loop = asyncio.get_event_loop() | ||||
|         except RuntimeError: | ||||
|             loop = asyncio.new_event_loop() | ||||
|             asyncio.set_event_loop(loop) | ||||
|         loop.run_until_complete(coro) | ||||
|  | ||||
|     asyncio.run = compat_asyncio_run | ||||
|  | ||||
|  | ||||
| # Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl | ||||
| # See https://github.com/yt-dlp/yt-dlp/issues/792 | ||||
| # https://docs.python.org/3/library/os.path.html#os.path.expanduser | ||||
| if compat_os_name in ('nt', 'ce') and 'HOME' in os.environ: | ||||
|     _userhome = os.environ['HOME'] | ||||
|  | ||||
|     def compat_expanduser(path): | ||||
|         if not path.startswith('~'): | ||||
|             return path | ||||
|         i = path.replace('\\', '/', 1).find('/')  # ~user | ||||
|         if i < 0: | ||||
|             i = len(path) | ||||
|         userhome = os.path.join(os.path.dirname(_userhome), path[1:i]) if i > 1 else _userhome | ||||
|         return userhome + path[i:] | ||||
| else: | ||||
|     compat_expanduser = os.path.expanduser | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from Cryptodome.Cipher import AES as compat_pycrypto_AES | ||||
| except ImportError: | ||||
|     try: | ||||
|         from Crypto.Cipher import AES as compat_pycrypto_AES | ||||
|     except ImportError: | ||||
|         compat_pycrypto_AES = None | ||||
|  | ||||
|  | ||||
| WINDOWS_VT_MODE = False if compat_os_name == 'nt' else None | ||||
|  | ||||
|  | ||||
| def windows_enable_vt_mode():  # TODO: Do this the proper way https://bugs.python.org/issue30075 | ||||
|     if compat_os_name != 'nt': | ||||
|         return | ||||
|     global WINDOWS_VT_MODE | ||||
|     startupinfo = subprocess.STARTUPINFO() | ||||
|     startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW | ||||
|     try: | ||||
|         subprocess.Popen('', shell=True, startupinfo=startupinfo) | ||||
|         WINDOWS_VT_MODE = True | ||||
|     except Exception: | ||||
|         pass | ||||
|  | ||||
|  | ||||
| #  Deprecated | ||||
|  | ||||
| compat_basestring = str | ||||
| compat_chr = chr | ||||
| compat_filter = filter | ||||
| compat_input = input | ||||
| compat_integer_types = (int, ) | ||||
| compat_kwargs = lambda kwargs: kwargs | ||||
| compat_map = map | ||||
| compat_numeric_types = (int, float, complex) | ||||
| compat_str = str | ||||
| compat_xpath = lambda xpath: xpath | ||||
| compat_zip = zip | ||||
|  | ||||
| compat_collections_abc = collections.abc | ||||
| compat_HTMLParser = html.parser.HTMLParser | ||||
| compat_HTTPError = urllib.error.HTTPError | ||||
| compat_Struct = struct.Struct | ||||
| compat_b64decode = base64.b64decode | ||||
| compat_cookiejar = http.cookiejar | ||||
| compat_cookiejar_Cookie = compat_cookiejar.Cookie | ||||
| compat_cookies = http.cookies | ||||
| compat_cookies_SimpleCookie = compat_cookies.SimpleCookie | ||||
| compat_etree_Element = etree.Element | ||||
| compat_etree_register_namespace = etree.register_namespace | ||||
| compat_get_terminal_size = shutil.get_terminal_size | ||||
| compat_getenv = os.getenv | ||||
| compat_getpass = getpass.getpass | ||||
| compat_html_entities = html.entities | ||||
| compat_html_entities_html5 = compat_html_entities.html5 | ||||
| compat_http_client = http.client | ||||
| compat_http_server = http.server | ||||
| compat_itertools_count = itertools.count | ||||
| compat_parse_qs = urllib.parse.parse_qs | ||||
| compat_shlex_split = shlex.split | ||||
| compat_socket_create_connection = socket.create_connection | ||||
| compat_struct_pack = struct.pack | ||||
| compat_struct_unpack = struct.unpack | ||||
| compat_subprocess_get_DEVNULL = lambda: DEVNULL | ||||
| compat_tokenize_tokenize = tokenize.tokenize | ||||
| compat_urllib_error = urllib.error | ||||
| compat_urllib_parse = urllib.parse | ||||
| compat_urllib_parse_quote = urllib.parse.quote | ||||
| compat_urllib_parse_quote_plus = urllib.parse.quote_plus | ||||
| compat_urllib_parse_unquote = urllib.parse.unquote | ||||
| compat_urllib_parse_unquote_plus = urllib.parse.unquote_plus | ||||
| compat_urllib_parse_unquote_to_bytes = urllib.parse.unquote_to_bytes | ||||
| compat_urllib_parse_urlencode = urllib.parse.urlencode | ||||
| compat_urllib_parse_urlparse = urllib.parse.urlparse | ||||
| compat_urllib_parse_urlunparse = urllib.parse.urlunparse | ||||
| compat_urllib_request = urllib.request | ||||
| compat_urllib_request_DataHandler = urllib.request.DataHandler | ||||
| compat_urllib_response = urllib.response | ||||
| compat_urlparse = urllib.parse | ||||
| compat_urlretrieve = urllib.request.urlretrieve | ||||
| compat_xml_parse_error = etree.ParseError | ||||
|  | ||||
|  | ||||
| # Set public objects | ||||
|  | ||||
| __all__ = [ | ||||
|     'WINDOWS_VT_MODE', | ||||
|     'compat_HTMLParseError', | ||||
|     'compat_HTMLParser', | ||||
|     'compat_HTTPError', | ||||
|     'compat_Match', | ||||
|     'compat_Pattern', | ||||
|     'compat_Struct', | ||||
|     'compat_asyncio_run', | ||||
|     'compat_b64decode', | ||||
|     'compat_basestring', | ||||
|     'compat_chr', | ||||
|     'compat_collections_abc', | ||||
|     'compat_cookiejar', | ||||
|     'compat_cookiejar_Cookie', | ||||
|     'compat_cookies', | ||||
|     'compat_cookies_SimpleCookie', | ||||
|     'compat_ctypes_WINFUNCTYPE', | ||||
|     'compat_etree_Element', | ||||
|     'compat_etree_fromstring', | ||||
|     'compat_etree_register_namespace', | ||||
|     'compat_expanduser', | ||||
|     'compat_filter', | ||||
|     'compat_get_terminal_size', | ||||
|     'compat_getenv', | ||||
|     'compat_getpass', | ||||
|     'compat_html_entities', | ||||
|     'compat_html_entities_html5', | ||||
|     'compat_http_client', | ||||
|     'compat_http_server', | ||||
|     'compat_input', | ||||
|     'compat_integer_types', | ||||
|     'compat_itertools_count', | ||||
|     'compat_kwargs', | ||||
|     'compat_map', | ||||
|     'compat_numeric_types', | ||||
|     'compat_ord', | ||||
|     'compat_os_name', | ||||
|     'compat_parse_qs', | ||||
|     'compat_print', | ||||
|     'compat_pycrypto_AES', | ||||
|     'compat_realpath', | ||||
|     'compat_setenv', | ||||
|     'compat_shlex_quote', | ||||
|     'compat_shlex_split', | ||||
|     'compat_socket_create_connection', | ||||
|     'compat_str', | ||||
|     'compat_struct_pack', | ||||
|     'compat_struct_unpack', | ||||
|     'compat_subprocess_get_DEVNULL', | ||||
|     'compat_tokenize_tokenize', | ||||
|     'compat_urllib_error', | ||||
|     'compat_urllib_parse', | ||||
|     'compat_urllib_parse_quote', | ||||
|     'compat_urllib_parse_quote_plus', | ||||
|     'compat_urllib_parse_unquote', | ||||
|     'compat_urllib_parse_unquote_plus', | ||||
|     'compat_urllib_parse_unquote_to_bytes', | ||||
|     'compat_urllib_parse_urlencode', | ||||
|     'compat_urllib_parse_urlparse', | ||||
|     'compat_urllib_parse_urlunparse', | ||||
|     'compat_urllib_request', | ||||
|     'compat_urllib_request_DataHandler', | ||||
|     'compat_urllib_response', | ||||
|     'compat_urlparse', | ||||
|     'compat_urlretrieve', | ||||
|     'compat_xml_parse_error', | ||||
|     'compat_xpath', | ||||
|     'compat_zip', | ||||
|     'windows_enable_vt_mode', | ||||
|     'workaround_optparse_bug9161', | ||||
| ] | ||||
							
								
								
									
										79
									
								
								plugins/youtube_download/yt_dlp/compat/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										79
									
								
								plugins/youtube_download/yt_dlp/compat/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,79 @@ | ||||
| import os | ||||
| import sys | ||||
| import xml.etree.ElementTree as etree | ||||
|  | ||||
| from .compat_utils import passthrough_module | ||||
|  | ||||
| passthrough_module(__name__, '._deprecated') | ||||
| del passthrough_module | ||||
|  | ||||
|  | ||||
| # HTMLParseError has been deprecated in Python 3.3 and removed in | ||||
| # Python 3.5. Introducing dummy exception for Python >3.5 for compatible | ||||
| # and uniform cross-version exception handling | ||||
| class compat_HTMLParseError(ValueError): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class _TreeBuilder(etree.TreeBuilder): | ||||
|     def doctype(self, name, pubid, system): | ||||
|         pass | ||||
|  | ||||
|  | ||||
| def compat_etree_fromstring(text): | ||||
|     return etree.XML(text, parser=etree.XMLParser(target=_TreeBuilder())) | ||||
|  | ||||
|  | ||||
| compat_os_name = os._name if os.name == 'java' else os.name | ||||
|  | ||||
|  | ||||
| if compat_os_name == 'nt': | ||||
|     def compat_shlex_quote(s): | ||||
|         import re | ||||
|         return s if re.match(r'^[-_\w./]+$', s) else '"%s"' % s.replace('"', '\\"') | ||||
| else: | ||||
|     from shlex import quote as compat_shlex_quote  # noqa: F401 | ||||
|  | ||||
|  | ||||
| def compat_ord(c): | ||||
|     return c if isinstance(c, int) else ord(c) | ||||
|  | ||||
|  | ||||
| if compat_os_name == 'nt' and sys.version_info < (3, 8): | ||||
|     # os.path.realpath on Windows does not follow symbolic links | ||||
|     # prior to Python 3.8 (see https://bugs.python.org/issue9949) | ||||
|     def compat_realpath(path): | ||||
|         while os.path.islink(path): | ||||
|             path = os.path.abspath(os.readlink(path)) | ||||
|         return os.path.realpath(path) | ||||
| else: | ||||
|     compat_realpath = os.path.realpath | ||||
|  | ||||
|  | ||||
| # Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl | ||||
| # See https://github.com/yt-dlp/yt-dlp/issues/792 | ||||
| # https://docs.python.org/3/library/os.path.html#os.path.expanduser | ||||
| if compat_os_name in ('nt', 'ce'): | ||||
|     def compat_expanduser(path): | ||||
|         HOME = os.environ.get('HOME') | ||||
|         if not HOME: | ||||
|             return os.path.expanduser(path) | ||||
|         elif not path.startswith('~'): | ||||
|             return path | ||||
|         i = path.replace('\\', '/', 1).find('/')  # ~user | ||||
|         if i < 0: | ||||
|             i = len(path) | ||||
|         userhome = os.path.join(os.path.dirname(HOME), path[1:i]) if i > 1 else HOME | ||||
|         return userhome + path[i:] | ||||
| else: | ||||
|     compat_expanduser = os.path.expanduser | ||||
|  | ||||
|  | ||||
| def urllib_req_to_req(urllib_request): | ||||
|     """Convert urllib Request to a networking Request""" | ||||
|     from ..networking import Request | ||||
|     from ..utils.networking import HTTPHeaderDict | ||||
|     return Request( | ||||
|         urllib_request.get_full_url(), data=urllib_request.data, method=urllib_request.get_method(), | ||||
|         headers=HTTPHeaderDict(urllib_request.headers, urllib_request.unredirected_hdrs), | ||||
|         extensions={'timeout': urllib_request.timeout} if hasattr(urllib_request, 'timeout') else None) | ||||
							
								
								
									
										23
									
								
								plugins/youtube_download/yt_dlp/compat/_deprecated.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								plugins/youtube_download/yt_dlp/compat/_deprecated.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| """Deprecated - New code should avoid these""" | ||||
| import warnings | ||||
|  | ||||
| from .compat_utils import passthrough_module | ||||
|  | ||||
| # XXX: Implement this the same way as other DeprecationWarnings without circular import | ||||
| passthrough_module(__name__, '.._legacy', callback=lambda attr: warnings.warn( | ||||
|     DeprecationWarning(f'{__name__}.{attr} is deprecated'), stacklevel=6)) | ||||
| del passthrough_module | ||||
|  | ||||
| import base64 | ||||
| import urllib.error | ||||
| import urllib.parse | ||||
|  | ||||
| compat_str = str | ||||
|  | ||||
| compat_b64decode = base64.b64decode | ||||
|  | ||||
| compat_urlparse = urllib.parse | ||||
| compat_parse_qs = urllib.parse.parse_qs | ||||
| compat_urllib_parse_unquote = urllib.parse.unquote | ||||
| compat_urllib_parse_urlencode = urllib.parse.urlencode | ||||
| compat_urllib_parse_urlparse = urllib.parse.urlparse | ||||
							
								
								
									
										108
									
								
								plugins/youtube_download/yt_dlp/compat/_legacy.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										108
									
								
								plugins/youtube_download/yt_dlp/compat/_legacy.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,108 @@ | ||||
| """ Do not use! """ | ||||
|  | ||||
| import base64 | ||||
| import collections | ||||
| import ctypes | ||||
| import getpass | ||||
| import html.entities | ||||
| import html.parser | ||||
| import http.client | ||||
| import http.cookiejar | ||||
| import http.cookies | ||||
| import http.server | ||||
| import itertools | ||||
| import os | ||||
| import shlex | ||||
| import shutil | ||||
| import socket | ||||
| import struct | ||||
| import subprocess | ||||
| import tokenize | ||||
| import urllib.error | ||||
| import urllib.parse | ||||
| import urllib.request | ||||
| import xml.etree.ElementTree as etree | ||||
|  | ||||
| # isort: split | ||||
| import asyncio  # noqa: F401 | ||||
| import re  # noqa: F401 | ||||
| from asyncio import run as compat_asyncio_run  # noqa: F401 | ||||
| from re import Pattern as compat_Pattern  # noqa: F401 | ||||
| from re import match as compat_Match  # noqa: F401 | ||||
|  | ||||
| from . import compat_expanduser, compat_HTMLParseError, compat_realpath | ||||
| from .compat_utils import passthrough_module | ||||
| from ..dependencies import brotli as compat_brotli  # noqa: F401 | ||||
| from ..dependencies import websockets as compat_websockets  # noqa: F401 | ||||
| from ..dependencies.Cryptodome import AES as compat_pycrypto_AES  # noqa: F401 | ||||
|  | ||||
| passthrough_module(__name__, '...utils', ('WINDOWS_VT_MODE', 'windows_enable_vt_mode')) | ||||
|  | ||||
|  | ||||
| # compat_ctypes_WINFUNCTYPE = ctypes.WINFUNCTYPE | ||||
| # will not work since ctypes.WINFUNCTYPE does not exist in UNIX machines | ||||
| def compat_ctypes_WINFUNCTYPE(*args, **kwargs): | ||||
|     return ctypes.WINFUNCTYPE(*args, **kwargs) | ||||
|  | ||||
|  | ||||
| def compat_setenv(key, value, env=os.environ): | ||||
|     env[key] = value | ||||
|  | ||||
|  | ||||
| compat_base64_b64decode = base64.b64decode | ||||
| compat_basestring = str | ||||
| compat_casefold = str.casefold | ||||
| compat_chr = chr | ||||
| compat_collections_abc = collections.abc | ||||
| compat_cookiejar = compat_http_cookiejar = http.cookiejar | ||||
| compat_cookiejar_Cookie = compat_http_cookiejar_Cookie = http.cookiejar.Cookie | ||||
| compat_cookies = compat_http_cookies = http.cookies | ||||
| compat_cookies_SimpleCookie = compat_http_cookies_SimpleCookie = http.cookies.SimpleCookie | ||||
| compat_etree_Element = compat_xml_etree_ElementTree_Element = etree.Element | ||||
| compat_etree_register_namespace = compat_xml_etree_register_namespace = etree.register_namespace | ||||
| compat_filter = filter | ||||
| compat_get_terminal_size = shutil.get_terminal_size | ||||
| compat_getenv = os.getenv | ||||
| compat_getpass = compat_getpass_getpass = getpass.getpass | ||||
| compat_html_entities = html.entities | ||||
| compat_html_entities_html5 = html.entities.html5 | ||||
| compat_html_parser_HTMLParseError = compat_HTMLParseError | ||||
| compat_HTMLParser = compat_html_parser_HTMLParser = html.parser.HTMLParser | ||||
| compat_http_client = http.client | ||||
| compat_http_server = http.server | ||||
| compat_HTTPError = urllib.error.HTTPError | ||||
| compat_input = input | ||||
| compat_integer_types = (int, ) | ||||
| compat_itertools_count = itertools.count | ||||
| compat_kwargs = lambda kwargs: kwargs | ||||
| compat_map = map | ||||
| compat_numeric_types = (int, float, complex) | ||||
| compat_os_path_expanduser = compat_expanduser | ||||
| compat_os_path_realpath = compat_realpath | ||||
| compat_print = print | ||||
| compat_shlex_split = shlex.split | ||||
| compat_socket_create_connection = socket.create_connection | ||||
| compat_Struct = struct.Struct | ||||
| compat_struct_pack = struct.pack | ||||
| compat_struct_unpack = struct.unpack | ||||
| compat_subprocess_get_DEVNULL = lambda: subprocess.DEVNULL | ||||
| compat_tokenize_tokenize = tokenize.tokenize | ||||
| compat_urllib_error = urllib.error | ||||
| compat_urllib_HTTPError = urllib.error.HTTPError | ||||
| compat_urllib_parse = urllib.parse | ||||
| compat_urllib_parse_parse_qs = urllib.parse.parse_qs | ||||
| compat_urllib_parse_quote = urllib.parse.quote | ||||
| compat_urllib_parse_quote_plus = urllib.parse.quote_plus | ||||
| compat_urllib_parse_unquote_plus = urllib.parse.unquote_plus | ||||
| compat_urllib_parse_unquote_to_bytes = urllib.parse.unquote_to_bytes | ||||
| compat_urllib_parse_urlunparse = urllib.parse.urlunparse | ||||
| compat_urllib_request = urllib.request | ||||
| compat_urllib_request_DataHandler = urllib.request.DataHandler | ||||
| compat_urllib_response = urllib.response | ||||
| compat_urlretrieve = compat_urllib_request_urlretrieve = urllib.request.urlretrieve | ||||
| compat_xml_parse_error = compat_xml_etree_ElementTree_ParseError = etree.ParseError | ||||
| compat_xpath = lambda xpath: xpath | ||||
| compat_zip = zip | ||||
| workaround_optparse_bug9161 = lambda: None | ||||
|  | ||||
| legacy = [] | ||||
							
								
								
									
										83
									
								
								plugins/youtube_download/yt_dlp/compat/compat_utils.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										83
									
								
								plugins/youtube_download/yt_dlp/compat/compat_utils.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,83 @@ | ||||
| import collections | ||||
| import contextlib | ||||
| import functools | ||||
| import importlib | ||||
| import sys | ||||
| import types | ||||
|  | ||||
| _NO_ATTRIBUTE = object() | ||||
|  | ||||
| _Package = collections.namedtuple('Package', ('name', 'version')) | ||||
|  | ||||
|  | ||||
| def get_package_info(module): | ||||
|     return _Package( | ||||
|         name=getattr(module, '_yt_dlp__identifier', module.__name__), | ||||
|         version=str(next(filter(None, ( | ||||
|             getattr(module, attr, None) | ||||
|             for attr in ('__version__', 'version_string', 'version') | ||||
|         )), None))) | ||||
|  | ||||
|  | ||||
| def _is_package(module): | ||||
|     return '__path__' in vars(module) | ||||
|  | ||||
|  | ||||
| def _is_dunder(name): | ||||
|     return name.startswith('__') and name.endswith('__') | ||||
|  | ||||
|  | ||||
| class EnhancedModule(types.ModuleType): | ||||
|     def __bool__(self): | ||||
|         return vars(self).get('__bool__', lambda: True)() | ||||
|  | ||||
|     def __getattribute__(self, attr): | ||||
|         try: | ||||
|             ret = super().__getattribute__(attr) | ||||
|         except AttributeError: | ||||
|             if _is_dunder(attr): | ||||
|                 raise | ||||
|             getter = getattr(self, '__getattr__', None) | ||||
|             if not getter: | ||||
|                 raise | ||||
|             ret = getter(attr) | ||||
|         return ret.fget() if isinstance(ret, property) else ret | ||||
|  | ||||
|  | ||||
| def passthrough_module(parent, child, allowed_attributes=(..., ), *, callback=lambda _: None): | ||||
|     """Passthrough parent module into a child module, creating the parent if necessary""" | ||||
|     def __getattr__(attr): | ||||
|         if _is_package(parent): | ||||
|             with contextlib.suppress(ModuleNotFoundError): | ||||
|                 return importlib.import_module(f'.{attr}', parent.__name__) | ||||
|  | ||||
|         ret = from_child(attr) | ||||
|         if ret is _NO_ATTRIBUTE: | ||||
|             raise AttributeError(f'module {parent.__name__} has no attribute {attr}') | ||||
|         callback(attr) | ||||
|         return ret | ||||
|  | ||||
|     @functools.lru_cache(maxsize=None) | ||||
|     def from_child(attr): | ||||
|         nonlocal child | ||||
|         if attr not in allowed_attributes: | ||||
|             if ... not in allowed_attributes or _is_dunder(attr): | ||||
|                 return _NO_ATTRIBUTE | ||||
|  | ||||
|         if isinstance(child, str): | ||||
|             child = importlib.import_module(child, parent.__name__) | ||||
|  | ||||
|         if _is_package(child): | ||||
|             with contextlib.suppress(ImportError): | ||||
|                 return passthrough_module(f'{parent.__name__}.{attr}', | ||||
|                                           importlib.import_module(f'.{attr}', child.__name__)) | ||||
|  | ||||
|         with contextlib.suppress(AttributeError): | ||||
|             return getattr(child, attr) | ||||
|  | ||||
|         return _NO_ATTRIBUTE | ||||
|  | ||||
|     parent = sys.modules.get(parent, types.ModuleType(parent)) | ||||
|     parent.__class__ = EnhancedModule | ||||
|     parent.__getattr__ = __getattr__ | ||||
|     return parent | ||||
							
								
								
									
										26
									
								
								plugins/youtube_download/yt_dlp/compat/functools.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								plugins/youtube_download/yt_dlp/compat/functools.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| # flake8: noqa: F405 | ||||
| from functools import *  # noqa: F403 | ||||
|  | ||||
| from .compat_utils import passthrough_module | ||||
|  | ||||
| passthrough_module(__name__, 'functools') | ||||
| del passthrough_module | ||||
|  | ||||
| try: | ||||
|     cache  # >= 3.9 | ||||
| except NameError: | ||||
|     cache = lru_cache(maxsize=None) | ||||
|  | ||||
| try: | ||||
|     cached_property  # >= 3.8 | ||||
| except NameError: | ||||
|     class cached_property: | ||||
|         def __init__(self, func): | ||||
|             update_wrapper(self, func) | ||||
|             self.func = func | ||||
|  | ||||
|         def __get__(self, instance, _): | ||||
|             if instance is None: | ||||
|                 return self | ||||
|             setattr(instance, self.func.__name__, self.func(instance)) | ||||
|             return getattr(instance, self.func.__name__) | ||||
							
								
								
									
										16
									
								
								plugins/youtube_download/yt_dlp/compat/imghdr.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								plugins/youtube_download/yt_dlp/compat/imghdr.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| tests = { | ||||
|     'webp': lambda h: h[0:4] == b'RIFF' and h[8:] == b'WEBP', | ||||
|     'png': lambda h: h[:8] == b'\211PNG\r\n\032\n', | ||||
|     'jpeg': lambda h: h[6:10] in (b'JFIF', b'Exif'), | ||||
|     'gif': lambda h: h[:6] in (b'GIF87a', b'GIF89a'), | ||||
| } | ||||
|  | ||||
|  | ||||
| def what(file=None, h=None): | ||||
|     """Detect format of image (Currently supports jpeg, png, webp, gif only) | ||||
|     Ref: https://github.com/python/cpython/blob/3.10/Lib/imghdr.py | ||||
|     """ | ||||
|     if h is None: | ||||
|         with open(file, 'rb') as f: | ||||
|             h = f.read(12) | ||||
|     return next((type_ for type_, test in tests.items() if test(h)), None) | ||||
							
								
								
									
										30
									
								
								plugins/youtube_download/yt_dlp/compat/shutil.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								plugins/youtube_download/yt_dlp/compat/shutil.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,30 @@ | ||||
| # flake8: noqa: F405 | ||||
| from shutil import *  # noqa: F403 | ||||
|  | ||||
| from .compat_utils import passthrough_module | ||||
|  | ||||
| passthrough_module(__name__, 'shutil') | ||||
| del passthrough_module | ||||
|  | ||||
|  | ||||
| import sys | ||||
|  | ||||
| if sys.platform.startswith('freebsd'): | ||||
|     import errno | ||||
|     import os | ||||
|     import shutil | ||||
|  | ||||
|     # Workaround for PermissionError when using restricted ACL mode on FreeBSD | ||||
|     def copy2(src, dst, *args, **kwargs): | ||||
|         if os.path.isdir(dst): | ||||
|             dst = os.path.join(dst, os.path.basename(src)) | ||||
|         shutil.copyfile(src, dst, *args, **kwargs) | ||||
|         try: | ||||
|             shutil.copystat(src, dst, *args, **kwargs) | ||||
|         except PermissionError as e: | ||||
|             if e.errno != getattr(errno, 'EPERM', None): | ||||
|                 raise | ||||
|         return dst | ||||
|  | ||||
|     def move(*args, copy_function=copy2, **kwargs): | ||||
|         return shutil.move(*args, copy_function=copy_function, **kwargs) | ||||
							
								
								
									
										13
									
								
								plugins/youtube_download/yt_dlp/compat/types.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								plugins/youtube_download/yt_dlp/compat/types.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | ||||
| # flake8: noqa: F405 | ||||
| from types import *  # noqa: F403 | ||||
|  | ||||
| from .compat_utils import passthrough_module | ||||
|  | ||||
| passthrough_module(__name__, 'types') | ||||
| del passthrough_module | ||||
|  | ||||
| try: | ||||
|     # NB: pypy has builtin NoneType, so checking NameError won't work | ||||
|     from types import NoneType  # >= 3.10 | ||||
| except ImportError: | ||||
|     NoneType = type(None) | ||||
							
								
								
									
										10
									
								
								plugins/youtube_download/yt_dlp/compat/urllib/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								plugins/youtube_download/yt_dlp/compat/urllib/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,10 @@ | ||||
| # flake8: noqa: F405 | ||||
| from urllib import *  # noqa: F403 | ||||
|  | ||||
| del request | ||||
| from . import request  # noqa: F401 | ||||
|  | ||||
| from ..compat_utils import passthrough_module | ||||
|  | ||||
| passthrough_module(__name__, 'urllib') | ||||
| del passthrough_module | ||||
							
								
								
									
										40
									
								
								plugins/youtube_download/yt_dlp/compat/urllib/request.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								plugins/youtube_download/yt_dlp/compat/urllib/request.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| # flake8: noqa: F405 | ||||
| from urllib.request import *  # noqa: F403 | ||||
|  | ||||
| from ..compat_utils import passthrough_module | ||||
|  | ||||
| passthrough_module(__name__, 'urllib.request') | ||||
| del passthrough_module | ||||
|  | ||||
|  | ||||
| from .. import compat_os_name | ||||
|  | ||||
| if compat_os_name == 'nt': | ||||
|     # On older python versions, proxies are extracted from Windows registry erroneously. [1] | ||||
|     # If the https proxy in the registry does not have a scheme, urllib will incorrectly add https:// to it. [2] | ||||
|     # It is unlikely that the user has actually set it to be https, so we should be fine to safely downgrade | ||||
|     # it to http on these older python versions to avoid issues | ||||
|     # This also applies for ftp proxy type, as ftp:// proxy scheme is not supported. | ||||
|     # 1: https://github.com/python/cpython/issues/86793 | ||||
|     # 2: https://github.com/python/cpython/blob/51f1ae5ceb0673316c4e4b0175384e892e33cc6e/Lib/urllib/request.py#L2683-L2698 | ||||
|     import sys | ||||
|     from urllib.request import getproxies_environment, getproxies_registry | ||||
|  | ||||
|     def getproxies_registry_patched(): | ||||
|         proxies = getproxies_registry() | ||||
|         if ( | ||||
|             sys.version_info >= (3, 10, 5)  # https://docs.python.org/3.10/whatsnew/changelog.html#python-3-10-5-final | ||||
|             or (3, 9, 13) <= sys.version_info < (3, 10)  # https://docs.python.org/3.9/whatsnew/changelog.html#python-3-9-13-final | ||||
|         ): | ||||
|             return proxies | ||||
|  | ||||
|         for scheme in ('https', 'ftp'): | ||||
|             if scheme in proxies and proxies[scheme].startswith(f'{scheme}://'): | ||||
|                 proxies[scheme] = 'http' + proxies[scheme][len(scheme):] | ||||
|  | ||||
|         return proxies | ||||
|  | ||||
|     def getproxies(): | ||||
|         return getproxies_environment() or getproxies_registry_patched() | ||||
|  | ||||
| del compat_os_name | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										38
									
								
								plugins/youtube_download/yt_dlp/dependencies/Cryptodome.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								plugins/youtube_download/yt_dlp/dependencies/Cryptodome.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,38 @@ | ||||
| from ..compat.compat_utils import passthrough_module | ||||
|  | ||||
| try: | ||||
|     import Cryptodome as _parent | ||||
| except ImportError: | ||||
|     try: | ||||
|         import Crypto as _parent | ||||
|     except (ImportError, SyntaxError):  # Old Crypto gives SyntaxError in newer Python | ||||
|         _parent = passthrough_module(__name__, 'no_Cryptodome') | ||||
|         __bool__ = lambda: False | ||||
|  | ||||
| del passthrough_module | ||||
|  | ||||
| __version__ = '' | ||||
| AES = PKCS1_v1_5 = Blowfish = PKCS1_OAEP = SHA1 = CMAC = RSA = None | ||||
| try: | ||||
|     if _parent.__name__ == 'Cryptodome': | ||||
|         from Cryptodome import __version__ | ||||
|         from Cryptodome.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 | ||||
|         from Cryptodome.Hash import CMAC, SHA1 | ||||
|         from Cryptodome.PublicKey import RSA | ||||
|     elif _parent.__name__ == 'Crypto': | ||||
|         from Crypto import __version__ | ||||
|         from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5  # noqa: F401 | ||||
|         from Crypto.Hash import CMAC, SHA1  # noqa: F401 | ||||
|         from Crypto.PublicKey import RSA  # noqa: F401 | ||||
| except ImportError: | ||||
|     __version__ = f'broken {__version__}'.strip() | ||||
|  | ||||
|  | ||||
| _yt_dlp__identifier = _parent.__name__ | ||||
| if AES and _yt_dlp__identifier == 'Crypto': | ||||
|     try: | ||||
|         # In pycrypto, mode defaults to ECB. See: | ||||
|         # https://www.pycryptodome.org/en/latest/src/vs_pycrypto.html#:~:text=not%20have%20ECB%20as%20default%20mode | ||||
|         AES.new(b'abcdefghijklmnop') | ||||
|     except TypeError: | ||||
|         _yt_dlp__identifier = 'pycrypto' | ||||
							
								
								
									
										83
									
								
								plugins/youtube_download/yt_dlp/dependencies/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										83
									
								
								plugins/youtube_download/yt_dlp/dependencies/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,83 @@ | ||||
| # flake8: noqa: F401 | ||||
| """Imports all optional dependencies for the project. | ||||
| An attribute "_yt_dlp__identifier" may be inserted into the module if it uses an ambiguous namespace""" | ||||
|  | ||||
| try: | ||||
|     import brotlicffi as brotli | ||||
| except ImportError: | ||||
|     try: | ||||
|         import brotli | ||||
|     except ImportError: | ||||
|         brotli = None | ||||
|  | ||||
|  | ||||
| try: | ||||
|     import certifi | ||||
| except ImportError: | ||||
|     certifi = None | ||||
| else: | ||||
|     from os.path import exists as _path_exists | ||||
|  | ||||
|     # The certificate may not be bundled in executable | ||||
|     if not _path_exists(certifi.where()): | ||||
|         certifi = None | ||||
|  | ||||
|  | ||||
| try: | ||||
|     import mutagen | ||||
| except ImportError: | ||||
|     mutagen = None | ||||
|  | ||||
|  | ||||
| secretstorage = None | ||||
| try: | ||||
|     import secretstorage | ||||
|     _SECRETSTORAGE_UNAVAILABLE_REASON = None | ||||
| except ImportError: | ||||
|     _SECRETSTORAGE_UNAVAILABLE_REASON = ( | ||||
|         'as the `secretstorage` module is not installed. ' | ||||
|         'Please install by running `python3 -m pip install secretstorage`') | ||||
| except Exception as _err: | ||||
|     _SECRETSTORAGE_UNAVAILABLE_REASON = f'as the `secretstorage` module could not be initialized. {_err}' | ||||
|  | ||||
|  | ||||
| try: | ||||
|     import sqlite3 | ||||
| except ImportError: | ||||
|     # although sqlite3 is part of the standard library, it is possible to compile python without | ||||
|     # sqlite support. See: https://github.com/yt-dlp/yt-dlp/issues/544 | ||||
|     sqlite3 = None | ||||
|  | ||||
|  | ||||
| try: | ||||
|     import websockets | ||||
| except (ImportError, SyntaxError): | ||||
|     # websockets 3.10 on python 3.6 causes SyntaxError | ||||
|     # See https://github.com/yt-dlp/yt-dlp/issues/2633 | ||||
|     websockets = None | ||||
|  | ||||
|  | ||||
| try: | ||||
|     import xattr  # xattr or pyxattr | ||||
| except ImportError: | ||||
|     xattr = None | ||||
| else: | ||||
|     if hasattr(xattr, 'set'):  # pyxattr | ||||
|         xattr._yt_dlp__identifier = 'pyxattr' | ||||
|  | ||||
|  | ||||
| from . import Cryptodome | ||||
|  | ||||
| all_dependencies = {k: v for k, v in globals().items() if not k.startswith('_')} | ||||
| available_dependencies = {k: v for k, v in all_dependencies.items() if v} | ||||
|  | ||||
|  | ||||
| # Deprecated | ||||
| Cryptodome_AES = Cryptodome.AES | ||||
|  | ||||
|  | ||||
| __all__ = [ | ||||
|     'all_dependencies', | ||||
|     'available_dependencies', | ||||
|     *all_dependencies.keys(), | ||||
| ] | ||||
| @@ -1,10 +1,4 @@ | ||||
| from __future__ import unicode_literals | ||||
|  | ||||
| from ..compat import compat_str | ||||
| from ..utils import ( | ||||
|     determine_protocol, | ||||
|     NO_DEFAULT | ||||
| ) | ||||
| from ..utils import NO_DEFAULT, determine_protocol | ||||
|  | ||||
|  | ||||
| def get_suitable_downloader(info_dict, params={}, default=NO_DEFAULT, protocol=None, to_stdout=False): | ||||
| @@ -29,20 +23,18 @@ def get_suitable_downloader(info_dict, params={}, default=NO_DEFAULT, protocol=N | ||||
| # Some of these require get_suitable_downloader | ||||
| from .common import FileDownloader | ||||
| from .dash import DashSegmentsFD | ||||
| from .external import FFmpegFD, get_external_downloader | ||||
| from .f4m import F4mFD | ||||
| from .fc2 import FC2LiveFD | ||||
| from .hls import HlsFD | ||||
| from .http import HttpFD | ||||
| from .rtmp import RtmpFD | ||||
| from .rtsp import RtspFD | ||||
| from .ism import IsmFD | ||||
| from .mhtml import MhtmlFD | ||||
| from .niconico import NiconicoDmcFD | ||||
| from .niconico import NiconicoDmcFD, NiconicoLiveFD | ||||
| from .rtmp import RtmpFD | ||||
| from .rtsp import RtspFD | ||||
| from .websocket import WebSocketFragmentFD | ||||
| from .youtube_live_chat import YoutubeLiveChatFD | ||||
| from .external import ( | ||||
|     get_external_downloader, | ||||
|     FFmpegFD, | ||||
| ) | ||||
|  | ||||
| PROTOCOL_MAP = { | ||||
|     'rtmp': RtmpFD, | ||||
| @@ -58,6 +50,8 @@ PROTOCOL_MAP = { | ||||
|     'ism': IsmFD, | ||||
|     'mhtml': MhtmlFD, | ||||
|     'niconico_dmc': NiconicoDmcFD, | ||||
|     'niconico_live': NiconicoLiveFD, | ||||
|     'fc2_live': FC2LiveFD, | ||||
|     'websocket_frag': WebSocketFragmentFD, | ||||
|     'youtube_live_chat': YoutubeLiveChatFD, | ||||
|     'youtube_live_chat_replay': YoutubeLiveChatFD, | ||||
| @@ -66,10 +60,11 @@ PROTOCOL_MAP = { | ||||
|  | ||||
| def shorten_protocol_name(proto, simplify=False): | ||||
|     short_protocol_names = { | ||||
|         'm3u8_native': 'm3u8_n', | ||||
|         'rtmp_ffmpeg': 'rtmp_f', | ||||
|         'm3u8_native': 'm3u8', | ||||
|         'm3u8': 'm3u8F', | ||||
|         'rtmp_ffmpeg': 'rtmpF', | ||||
|         'http_dash_segments': 'dash', | ||||
|         'http_dash_segments_generator': 'dash_g', | ||||
|         'http_dash_segments_generator': 'dashG', | ||||
|         'niconico_dmc': 'dmc', | ||||
|         'websocket_frag': 'WSfrag', | ||||
|     } | ||||
| @@ -77,6 +72,7 @@ def shorten_protocol_name(proto, simplify=False): | ||||
|         short_protocol_names.update({ | ||||
|             'https': 'http', | ||||
|             'ftps': 'ftp', | ||||
|             'm3u8': 'm3u8',  # Reverse above m3u8 mapping | ||||
|             'm3u8_native': 'm3u8', | ||||
|             'http_dash_segments_generator': 'dash', | ||||
|             'rtmp_ffmpeg': 'rtmp', | ||||
| @@ -91,13 +87,13 @@ def _get_suitable_downloader(info_dict, protocol, params, default): | ||||
|     if default is NO_DEFAULT: | ||||
|         default = HttpFD | ||||
|  | ||||
|     # if (info_dict.get('start_time') or info_dict.get('end_time')) and not info_dict.get('requested_formats') and FFmpegFD.can_download(info_dict): | ||||
|     #     return FFmpegFD | ||||
|     if (info_dict.get('section_start') or info_dict.get('section_end')) and FFmpegFD.can_download(info_dict): | ||||
|         return FFmpegFD | ||||
|  | ||||
|     info_dict['protocol'] = protocol | ||||
|     downloaders = params.get('external_downloader') | ||||
|     external_downloader = ( | ||||
|         downloaders if isinstance(downloaders, compat_str) or downloaders is None | ||||
|         downloaders if isinstance(downloaders, str) or downloaders is None | ||||
|         else downloaders.get(shorten_protocol_name(protocol, True), downloaders.get('default'))) | ||||
|  | ||||
|     if external_downloader is None: | ||||
| @@ -117,7 +113,7 @@ def _get_suitable_downloader(info_dict, protocol, params, default): | ||||
|             return FFmpegFD | ||||
|         elif (external_downloader or '').lower() == 'native': | ||||
|             return HlsFD | ||||
|         elif get_suitable_downloader( | ||||
|         elif protocol == 'm3u8_native' and get_suitable_downloader( | ||||
|                 info_dict, params, None, protocol='m3u8_frag_urls', to_stdout=info_dict['to_stdout']): | ||||
|             return HlsFD | ||||
|         elif params.get('hls_prefer_native') is True: | ||||
|   | ||||
| @@ -1,30 +1,40 @@ | ||||
| from __future__ import division, unicode_literals | ||||
|  | ||||
| import contextlib | ||||
| import errno | ||||
| import functools | ||||
| import os | ||||
| import random | ||||
| import re | ||||
| import time | ||||
| import random | ||||
| import errno | ||||
|  | ||||
| from ..minicurses import ( | ||||
|     BreaklineStatusPrinter, | ||||
|     MultilineLogger, | ||||
|     MultilinePrinter, | ||||
|     QuietMultilinePrinter, | ||||
| ) | ||||
| from ..utils import ( | ||||
|     IDENTITY, | ||||
|     NO_DEFAULT, | ||||
|     LockingUnsupportedError, | ||||
|     Namespace, | ||||
|     RetryManager, | ||||
|     classproperty, | ||||
|     decodeArgument, | ||||
|     deprecation_warning, | ||||
|     encodeFilename, | ||||
|     error_to_compat_str, | ||||
|     format_bytes, | ||||
|     join_nonempty, | ||||
|     parse_bytes, | ||||
|     remove_start, | ||||
|     sanitize_open, | ||||
|     shell_quote, | ||||
|     timeconvert, | ||||
|     timetuple_from_msec, | ||||
| ) | ||||
| from ..minicurses import ( | ||||
|     MultilineLogger, | ||||
|     MultilinePrinter, | ||||
|     QuietMultilinePrinter, | ||||
|     BreaklineStatusPrinter | ||||
|     try_call, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class FileDownloader(object): | ||||
| class FileDownloader: | ||||
|     """File Downloader class. | ||||
|  | ||||
|     File downloader objects are the ones responsible of downloading the | ||||
| @@ -40,8 +50,9 @@ class FileDownloader(object): | ||||
|     quiet:              Do not print messages to stdout. | ||||
|     ratelimit:          Download speed limit, in bytes/sec. | ||||
|     throttledratelimit: Assume the download is being throttled below this speed (bytes/sec) | ||||
|     retries:            Number of times to retry for HTTP error 5xx | ||||
|     file_access_retries:   Number of times to retry on file access error | ||||
|     retries:            Number of times to retry for expected network errors. | ||||
|                         Default is 0 for API, but 10 for CLI | ||||
|     file_access_retries:   Number of times to retry on file access error (default: 3) | ||||
|     buffersize:         Size of download buffer in bytes. | ||||
|     noresizebuffer:     Do not automatically resize the download buffer. | ||||
|     continuedl:         Try to continue downloads if possible. | ||||
| @@ -62,6 +73,7 @@ class FileDownloader(object): | ||||
|                         useful for bypassing bandwidth throttling imposed by | ||||
|                         a webserver (experimental) | ||||
|     progress_template:  See YoutubeDL.py | ||||
|     retry_sleep_functions: See YoutubeDL.py | ||||
|  | ||||
|     Subclasses of this one must re-define the real_download method. | ||||
|     """ | ||||
| @@ -71,21 +83,51 @@ class FileDownloader(object): | ||||
|  | ||||
|     def __init__(self, ydl, params): | ||||
|         """Create a FileDownloader object with the given options.""" | ||||
|         self.ydl = ydl | ||||
|         self._set_ydl(ydl) | ||||
|         self._progress_hooks = [] | ||||
|         self.params = params | ||||
|         self._prepare_multiline_status() | ||||
|         self.add_progress_hook(self.report_progress) | ||||
|  | ||||
|     def _set_ydl(self, ydl): | ||||
|         self.ydl = ydl | ||||
|  | ||||
|         for func in ( | ||||
|             'deprecation_warning', | ||||
|             'deprecated_feature', | ||||
|             'report_error', | ||||
|             'report_file_already_downloaded', | ||||
|             'report_warning', | ||||
|             'to_console_title', | ||||
|             'to_stderr', | ||||
|             'trouble', | ||||
|             'write_debug', | ||||
|         ): | ||||
|             if not hasattr(self, func): | ||||
|                 setattr(self, func, getattr(ydl, func)) | ||||
|  | ||||
|     def to_screen(self, *args, **kargs): | ||||
|         self.ydl.to_screen(*args, quiet=self.params.get('quiet'), **kargs) | ||||
|  | ||||
|     __to_screen = to_screen | ||||
|  | ||||
|     @classproperty | ||||
|     def FD_NAME(cls): | ||||
|         return re.sub(r'(?<=[a-z])(?=[A-Z])', '_', cls.__name__[:-2]).lower() | ||||
|  | ||||
|     @staticmethod | ||||
|     def format_seconds(seconds): | ||||
|         if seconds is None: | ||||
|             return ' Unknown' | ||||
|         time = timetuple_from_msec(seconds * 1000) | ||||
|         if time.hours > 99: | ||||
|             return '--:--:--' | ||||
|         if not time.hours: | ||||
|             return '%02d:%02d' % time[1:-1] | ||||
|         return '%02d:%02d:%02d' % time[:-1] | ||||
|  | ||||
|     @classmethod | ||||
|     def format_eta(cls, seconds): | ||||
|         return f'{remove_start(cls.format_seconds(seconds), "00:"):>8s}' | ||||
|  | ||||
|     @staticmethod | ||||
|     def calc_percent(byte_counter, data_len): | ||||
|         if data_len is None: | ||||
| @@ -94,29 +136,23 @@ class FileDownloader(object): | ||||
|  | ||||
|     @staticmethod | ||||
|     def format_percent(percent): | ||||
|         if percent is None: | ||||
|             return '---.-%' | ||||
|         elif percent == 100: | ||||
|             return '100%' | ||||
|         return '%6s' % ('%3.1f%%' % percent) | ||||
|         return '  N/A%' if percent is None else f'{percent:>5.1f}%' | ||||
|  | ||||
|     @staticmethod | ||||
|     def calc_eta(start, now, total, current): | ||||
|     @classmethod | ||||
|     def calc_eta(cls, start_or_rate, now_or_remaining, total=NO_DEFAULT, current=NO_DEFAULT): | ||||
|         if total is NO_DEFAULT: | ||||
|             rate, remaining = start_or_rate, now_or_remaining | ||||
|             if None in (rate, remaining): | ||||
|                 return None | ||||
|             return int(float(remaining) / rate) | ||||
|  | ||||
|         start, now = start_or_rate, now_or_remaining | ||||
|         if total is None: | ||||
|             return None | ||||
|         if now is None: | ||||
|             now = time.time() | ||||
|         dif = now - start | ||||
|         if current == 0 or dif < 0.001:  # One millisecond | ||||
|             return None | ||||
|         rate = float(current) / dif | ||||
|         return int((float(total) - float(current)) / rate) | ||||
|  | ||||
|     @staticmethod | ||||
|     def format_eta(eta): | ||||
|         if eta is None: | ||||
|             return '--:--' | ||||
|         return FileDownloader.format_seconds(eta) | ||||
|         rate = cls.calc_speed(start, now, current) | ||||
|         return rate and int((float(total) - float(current)) / rate) | ||||
|  | ||||
|     @staticmethod | ||||
|     def calc_speed(start, now, bytes): | ||||
| @@ -127,13 +163,17 @@ class FileDownloader(object): | ||||
|  | ||||
|     @staticmethod | ||||
|     def format_speed(speed): | ||||
|         if speed is None: | ||||
|             return '%10s' % '---b/s' | ||||
|         return '%10s' % ('%s/s' % format_bytes(speed)) | ||||
|         return ' Unknown B/s' if speed is None else f'{format_bytes(speed):>10s}/s' | ||||
|  | ||||
|     @staticmethod | ||||
|     def format_retries(retries): | ||||
|         return 'inf' if retries == float('inf') else '%.0f' % retries | ||||
|         return 'inf' if retries == float('inf') else int(retries) | ||||
|  | ||||
|     @staticmethod | ||||
|     def filesize_or_none(unencoded_filename): | ||||
|         if os.path.isfile(unencoded_filename): | ||||
|             return os.path.getsize(unencoded_filename) | ||||
|         return 0 | ||||
|  | ||||
|     @staticmethod | ||||
|     def best_block_size(elapsed_time, bytes): | ||||
| @@ -151,33 +191,9 @@ class FileDownloader(object): | ||||
|     @staticmethod | ||||
|     def parse_bytes(bytestr): | ||||
|         """Parse a string indicating a byte quantity into an integer.""" | ||||
|         matchobj = re.match(r'(?i)^(\d+(?:\.\d+)?)([kMGTPEZY]?)$', bytestr) | ||||
|         if matchobj is None: | ||||
|             return None | ||||
|         number = float(matchobj.group(1)) | ||||
|         multiplier = 1024.0 ** 'bkmgtpezy'.index(matchobj.group(2).lower()) | ||||
|         return int(round(number * multiplier)) | ||||
|  | ||||
|     def to_screen(self, *args, **kargs): | ||||
|         self.ydl.to_stdout(*args, quiet=self.params.get('quiet'), **kargs) | ||||
|  | ||||
|     def to_stderr(self, message): | ||||
|         self.ydl.to_stderr(message) | ||||
|  | ||||
|     def to_console_title(self, message): | ||||
|         self.ydl.to_console_title(message) | ||||
|  | ||||
|     def trouble(self, *args, **kargs): | ||||
|         self.ydl.trouble(*args, **kargs) | ||||
|  | ||||
|     def report_warning(self, *args, **kargs): | ||||
|         self.ydl.report_warning(*args, **kargs) | ||||
|  | ||||
|     def report_error(self, *args, **kargs): | ||||
|         self.ydl.report_error(*args, **kargs) | ||||
|  | ||||
|     def write_debug(self, *args, **kargs): | ||||
|         self.ydl.write_debug(*args, **kargs) | ||||
|         deprecation_warning('yt_dlp.FileDownloader.parse_bytes is deprecated and ' | ||||
|                             'may be removed in the future. Use yt_dlp.utils.parse_bytes instead') | ||||
|         return parse_bytes(bytestr) | ||||
|  | ||||
|     def slow_down(self, start_time, now, byte_counter): | ||||
|         """Sleep if the download speed is over the rate limit.""" | ||||
| @@ -210,28 +226,43 @@ class FileDownloader(object): | ||||
|     def ytdl_filename(self, filename): | ||||
|         return filename + '.ytdl' | ||||
|  | ||||
|     def sanitize_open(self, filename, open_mode): | ||||
|         file_access_retries = self.params.get('file_access_retries', 10) | ||||
|         retry = 0 | ||||
|         while True: | ||||
|             try: | ||||
|                 return sanitize_open(filename, open_mode) | ||||
|             except (IOError, OSError) as err: | ||||
|                 retry = retry + 1 | ||||
|                 if retry > file_access_retries or err.errno not in (errno.EACCES,): | ||||
|                     raise | ||||
|                 self.to_screen( | ||||
|                     '[download] Got file access error. Retrying (attempt %d of %s) ...' | ||||
|                     % (retry, self.format_retries(file_access_retries))) | ||||
|                 time.sleep(0.01) | ||||
|     def wrap_file_access(action, *, fatal=False): | ||||
|         def error_callback(err, count, retries, *, fd): | ||||
|             return RetryManager.report_retry( | ||||
|                 err, count, retries, info=fd.__to_screen, | ||||
|                 warn=lambda e: (time.sleep(0.01), fd.to_screen(f'[download] Unable to {action} file: {e}')), | ||||
|                 error=None if fatal else lambda e: fd.report_error(f'Unable to {action} file: {e}'), | ||||
|                 sleep_func=fd.params.get('retry_sleep_functions', {}).get('file_access')) | ||||
|  | ||||
|         def wrapper(self, func, *args, **kwargs): | ||||
|             for retry in RetryManager(self.params.get('file_access_retries', 3), error_callback, fd=self): | ||||
|                 try: | ||||
|                     return func(self, *args, **kwargs) | ||||
|                 except OSError as err: | ||||
|                     if err.errno in (errno.EACCES, errno.EINVAL): | ||||
|                         retry.error = err | ||||
|                         continue | ||||
|                     retry.error_callback(err, 1, 0) | ||||
|  | ||||
|         return functools.partial(functools.partialmethod, wrapper) | ||||
|  | ||||
|     @wrap_file_access('open', fatal=True) | ||||
|     def sanitize_open(self, filename, open_mode): | ||||
|         f, filename = sanitize_open(filename, open_mode) | ||||
|         if not getattr(f, 'locked', None): | ||||
|             self.write_debug(f'{LockingUnsupportedError.msg}. Proceeding without locking', only_once=True) | ||||
|         return f, filename | ||||
|  | ||||
|     @wrap_file_access('remove') | ||||
|     def try_remove(self, filename): | ||||
|         if os.path.isfile(filename): | ||||
|             os.remove(filename) | ||||
|  | ||||
|     @wrap_file_access('rename') | ||||
|     def try_rename(self, old_filename, new_filename): | ||||
|         if old_filename == new_filename: | ||||
|             return | ||||
|         try: | ||||
|             os.replace(old_filename, new_filename) | ||||
|         except (IOError, OSError) as err: | ||||
|             self.report_error(f'unable to rename file: {err}') | ||||
|         os.replace(old_filename, new_filename) | ||||
|  | ||||
|     def try_utime(self, filename, last_modified_hdr): | ||||
|         """Try to set the last-modified time of the given file.""" | ||||
| @@ -248,10 +279,8 @@ class FileDownloader(object): | ||||
|         # Ignore obviously invalid dates | ||||
|         if filetime == 0: | ||||
|             return | ||||
|         try: | ||||
|         with contextlib.suppress(Exception): | ||||
|             os.utime(filename, (time.time(), filetime)) | ||||
|         except Exception: | ||||
|             pass | ||||
|         return filetime | ||||
|  | ||||
|     def report_destination(self, filename): | ||||
| @@ -264,26 +293,27 @@ class FileDownloader(object): | ||||
|         elif self.ydl.params.get('logger'): | ||||
|             self._multiline = MultilineLogger(self.ydl.params['logger'], lines) | ||||
|         elif self.params.get('progress_with_newline'): | ||||
|             self._multiline = BreaklineStatusPrinter(self.ydl._screen_file, lines) | ||||
|             self._multiline = BreaklineStatusPrinter(self.ydl._out_files.out, lines) | ||||
|         else: | ||||
|             self._multiline = MultilinePrinter(self.ydl._screen_file, lines, not self.params.get('quiet')) | ||||
|         self._multiline.allow_colors = self._multiline._HAVE_FULLCAP and not self.params.get('no_color') | ||||
|             self._multiline = MultilinePrinter(self.ydl._out_files.out, lines, not self.params.get('quiet')) | ||||
|         self._multiline.allow_colors = self.ydl._allow_colors.out and self.ydl._allow_colors.out != 'no_color' | ||||
|         self._multiline._HAVE_FULLCAP = self.ydl._allow_colors.out | ||||
|  | ||||
|     def _finish_multiline_status(self): | ||||
|         self._multiline.end() | ||||
|  | ||||
|     _progress_styles = { | ||||
|         'downloaded_bytes': 'light blue', | ||||
|         'percent': 'light blue', | ||||
|         'eta': 'yellow', | ||||
|         'speed': 'green', | ||||
|         'elapsed': 'bold white', | ||||
|         'total_bytes': '', | ||||
|         'total_bytes_estimate': '', | ||||
|     } | ||||
|     ProgressStyles = Namespace( | ||||
|         downloaded_bytes='light blue', | ||||
|         percent='light blue', | ||||
|         eta='yellow', | ||||
|         speed='green', | ||||
|         elapsed='bold white', | ||||
|         total_bytes='', | ||||
|         total_bytes_estimate='', | ||||
|     ) | ||||
|  | ||||
|     def _report_progress_status(self, s, default_template): | ||||
|         for name, style in self._progress_styles.items(): | ||||
|         for name, style in self.ProgressStyles.items_: | ||||
|             name = f'_{name}_str' | ||||
|             if name not in s: | ||||
|                 continue | ||||
| @@ -307,78 +337,73 @@ class FileDownloader(object): | ||||
|             self._multiline.stream, self._multiline.allow_colors, *args, **kwargs) | ||||
|  | ||||
|     def report_progress(self, s): | ||||
|         def with_fields(*tups, default=''): | ||||
|             for *fields, tmpl in tups: | ||||
|                 if all(s.get(f) is not None for f in fields): | ||||
|                     return tmpl | ||||
|             return default | ||||
|  | ||||
|         _format_bytes = lambda k: f'{format_bytes(s.get(k)):>10s}' | ||||
|  | ||||
|         if s['status'] == 'finished': | ||||
|             if self.params.get('noprogress'): | ||||
|                 self.to_screen('[download] Download completed') | ||||
|             msg_template = '100%%' | ||||
|             if s.get('total_bytes') is not None: | ||||
|                 s['_total_bytes_str'] = format_bytes(s['total_bytes']) | ||||
|                 msg_template += ' of %(_total_bytes_str)s' | ||||
|             if s.get('elapsed') is not None: | ||||
|                 s['_elapsed_str'] = self.format_seconds(s['elapsed']) | ||||
|                 msg_template += ' in %(_elapsed_str)s' | ||||
|             s['_percent_str'] = self.format_percent(100) | ||||
|             self._report_progress_status(s, msg_template) | ||||
|             return | ||||
|             speed = try_call(lambda: s['total_bytes'] / s['elapsed']) | ||||
|             s.update({ | ||||
|                 'speed': speed, | ||||
|                 '_speed_str': self.format_speed(speed).strip(), | ||||
|                 '_total_bytes_str': _format_bytes('total_bytes'), | ||||
|                 '_elapsed_str': self.format_seconds(s.get('elapsed')), | ||||
|                 '_percent_str': self.format_percent(100), | ||||
|             }) | ||||
|             self._report_progress_status(s, join_nonempty( | ||||
|                 '100%%', | ||||
|                 with_fields(('total_bytes', 'of %(_total_bytes_str)s')), | ||||
|                 with_fields(('elapsed', 'in %(_elapsed_str)s')), | ||||
|                 with_fields(('speed', 'at %(_speed_str)s')), | ||||
|                 delim=' ')) | ||||
|  | ||||
|         if s['status'] != 'downloading': | ||||
|             return | ||||
|  | ||||
|         if s.get('eta') is not None: | ||||
|             s['_eta_str'] = self.format_eta(s['eta']) | ||||
|         else: | ||||
|             s['_eta_str'] = 'Unknown' | ||||
|         s.update({ | ||||
|             '_eta_str': self.format_eta(s.get('eta')).strip(), | ||||
|             '_speed_str': self.format_speed(s.get('speed')), | ||||
|             '_percent_str': self.format_percent(try_call( | ||||
|                 lambda: 100 * s['downloaded_bytes'] / s['total_bytes'], | ||||
|                 lambda: 100 * s['downloaded_bytes'] / s['total_bytes_estimate'], | ||||
|                 lambda: s['downloaded_bytes'] == 0 and 0)), | ||||
|             '_total_bytes_str': _format_bytes('total_bytes'), | ||||
|             '_total_bytes_estimate_str': _format_bytes('total_bytes_estimate'), | ||||
|             '_downloaded_bytes_str': _format_bytes('downloaded_bytes'), | ||||
|             '_elapsed_str': self.format_seconds(s.get('elapsed')), | ||||
|         }) | ||||
|  | ||||
|         if s.get('total_bytes') and s.get('downloaded_bytes') is not None: | ||||
|             s['_percent_str'] = self.format_percent(100 * s['downloaded_bytes'] / s['total_bytes']) | ||||
|         elif s.get('total_bytes_estimate') and s.get('downloaded_bytes') is not None: | ||||
|             s['_percent_str'] = self.format_percent(100 * s['downloaded_bytes'] / s['total_bytes_estimate']) | ||||
|         else: | ||||
|             if s.get('downloaded_bytes') == 0: | ||||
|                 s['_percent_str'] = self.format_percent(0) | ||||
|             else: | ||||
|                 s['_percent_str'] = 'Unknown %' | ||||
|         msg_template = with_fields( | ||||
|             ('total_bytes', '%(_percent_str)s of %(_total_bytes_str)s at %(_speed_str)s ETA %(_eta_str)s'), | ||||
|             ('total_bytes_estimate', '%(_percent_str)s of ~%(_total_bytes_estimate_str)s at %(_speed_str)s ETA %(_eta_str)s'), | ||||
|             ('downloaded_bytes', 'elapsed', '%(_downloaded_bytes_str)s at %(_speed_str)s (%(_elapsed_str)s)'), | ||||
|             ('downloaded_bytes', '%(_downloaded_bytes_str)s at %(_speed_str)s'), | ||||
|             default='%(_percent_str)s at %(_speed_str)s ETA %(_eta_str)s') | ||||
|  | ||||
|         if s.get('speed') is not None: | ||||
|             s['_speed_str'] = self.format_speed(s['speed']) | ||||
|         else: | ||||
|             s['_speed_str'] = 'Unknown speed' | ||||
|  | ||||
|         if s.get('total_bytes') is not None: | ||||
|             s['_total_bytes_str'] = format_bytes(s['total_bytes']) | ||||
|             msg_template = '%(_percent_str)s of %(_total_bytes_str)s at %(_speed_str)s ETA %(_eta_str)s' | ||||
|         elif s.get('total_bytes_estimate') is not None: | ||||
|             s['_total_bytes_estimate_str'] = format_bytes(s['total_bytes_estimate']) | ||||
|             msg_template = '%(_percent_str)s of ~%(_total_bytes_estimate_str)s at %(_speed_str)s ETA %(_eta_str)s' | ||||
|         else: | ||||
|             if s.get('downloaded_bytes') is not None: | ||||
|                 s['_downloaded_bytes_str'] = format_bytes(s['downloaded_bytes']) | ||||
|                 if s.get('elapsed'): | ||||
|                     s['_elapsed_str'] = self.format_seconds(s['elapsed']) | ||||
|                     msg_template = '%(_downloaded_bytes_str)s at %(_speed_str)s (%(_elapsed_str)s)' | ||||
|                 else: | ||||
|                     msg_template = '%(_downloaded_bytes_str)s at %(_speed_str)s' | ||||
|             else: | ||||
|                 msg_template = '%(_percent_str)s at %(_speed_str)s ETA %(_eta_str)s' | ||||
|         if s.get('fragment_index') and s.get('fragment_count'): | ||||
|             msg_template += ' (frag %(fragment_index)s/%(fragment_count)s)' | ||||
|         elif s.get('fragment_index'): | ||||
|             msg_template += ' (frag %(fragment_index)s)' | ||||
|         msg_template += with_fields( | ||||
|             ('fragment_index', 'fragment_count', ' (frag %(fragment_index)s/%(fragment_count)s)'), | ||||
|             ('fragment_index', ' (frag %(fragment_index)s)')) | ||||
|         self._report_progress_status(s, msg_template) | ||||
|  | ||||
|     def report_resuming_byte(self, resume_len): | ||||
|         """Report attempt to resume at given byte.""" | ||||
|         self.to_screen('[download] Resuming download at byte %s' % resume_len) | ||||
|  | ||||
|     def report_retry(self, err, count, retries): | ||||
|         """Report retry in case of HTTP error 5xx""" | ||||
|         self.to_screen( | ||||
|             '[download] Got server HTTP error: %s. Retrying (attempt %d of %s) ...' | ||||
|             % (error_to_compat_str(err), count, self.format_retries(retries))) | ||||
|  | ||||
|     def report_file_already_downloaded(self, *args, **kwargs): | ||||
|         """Report file has already been fully downloaded.""" | ||||
|         return self.ydl.report_file_already_downloaded(*args, **kwargs) | ||||
|     def report_retry(self, err, count, retries, frag_index=NO_DEFAULT, fatal=True): | ||||
|         """Report retry""" | ||||
|         is_frag = False if frag_index is NO_DEFAULT else 'fragment' | ||||
|         RetryManager.report_retry( | ||||
|             err, count, retries, info=self.__to_screen, | ||||
|             warn=lambda msg: self.__to_screen(f'[download] Got error: {msg}'), | ||||
|             error=IDENTITY if not fatal else lambda e: self.report_error(f'\r[download] Got error: {e}'), | ||||
|             sleep_func=self.params.get('retry_sleep_functions', {}).get(is_frag or 'http'), | ||||
|             suffix=f'fragment{"s" if frag_index is None else f" {frag_index}"}' if is_frag else None) | ||||
|  | ||||
|     def report_unable_to_resume(self): | ||||
|         """Report it was impossible to resume download.""" | ||||
| @@ -394,7 +419,6 @@ class FileDownloader(object): | ||||
|         """Download to a filename using the info from info_dict | ||||
|         Return True on success and False otherwise | ||||
|         """ | ||||
|  | ||||
|         nooverwrites_and_exists = ( | ||||
|             not self.params.get('overwrites', True) | ||||
|             and os.path.exists(encodeFilename(filename)) | ||||
| @@ -418,25 +442,16 @@ class FileDownloader(object): | ||||
|                 self._finish_multiline_status() | ||||
|                 return True, False | ||||
|  | ||||
|         if subtitle is False: | ||||
|             min_sleep_interval = self.params.get('sleep_interval') | ||||
|             if min_sleep_interval: | ||||
|                 max_sleep_interval = self.params.get('max_sleep_interval', min_sleep_interval) | ||||
|                 sleep_interval = random.uniform(min_sleep_interval, max_sleep_interval) | ||||
|                 self.to_screen( | ||||
|                     '[download] Sleeping %s seconds ...' % ( | ||||
|                         int(sleep_interval) if sleep_interval.is_integer() | ||||
|                         else '%.2f' % sleep_interval)) | ||||
|                 time.sleep(sleep_interval) | ||||
|         if subtitle: | ||||
|             sleep_interval = self.params.get('sleep_interval_subtitles') or 0 | ||||
|         else: | ||||
|             sleep_interval_sub = 0 | ||||
|             if type(self.params.get('sleep_interval_subtitles')) is int: | ||||
|                 sleep_interval_sub = self.params.get('sleep_interval_subtitles') | ||||
|             if sleep_interval_sub > 0: | ||||
|                 self.to_screen( | ||||
|                     '[download] Sleeping %s seconds ...' % ( | ||||
|                         sleep_interval_sub)) | ||||
|                 time.sleep(sleep_interval_sub) | ||||
|             min_sleep_interval = self.params.get('sleep_interval') or 0 | ||||
|             sleep_interval = random.uniform( | ||||
|                 min_sleep_interval, self.params.get('max_sleep_interval') or min_sleep_interval) | ||||
|         if sleep_interval > 0: | ||||
|             self.to_screen(f'[download] Sleeping {sleep_interval:.2f} seconds ...') | ||||
|             time.sleep(sleep_interval) | ||||
|  | ||||
|         ret = self.real_download(filename, info_dict) | ||||
|         self._finish_multiline_status() | ||||
|         return ret, True | ||||
| @@ -446,8 +461,7 @@ class FileDownloader(object): | ||||
|         raise NotImplementedError('This method must be implemented by subclasses') | ||||
|  | ||||
|     def _hook_progress(self, status, info_dict): | ||||
|         if not self._progress_hooks: | ||||
|             return | ||||
|         # Ideally we want to make a copy of the dict, but that is too slow | ||||
|         status['info_dict'] = info_dict | ||||
|         # youtube-dl passes the same status object to all the hooks. | ||||
|         # Some third party scripts seems to be relying on this. | ||||
| @@ -469,4 +483,4 @@ class FileDownloader(object): | ||||
|         if exe is None: | ||||
|             exe = os.path.basename(str_args[0]) | ||||
|  | ||||
|         self.write_debug('%s command line: %s' % (exe, shell_quote(str_args))) | ||||
|         self.write_debug(f'{exe} command line: {shell_quote(str_args)}') | ||||
|   | ||||
| @@ -1,10 +1,9 @@ | ||||
| from __future__ import unicode_literals | ||||
| import time | ||||
| import urllib.parse | ||||
|  | ||||
| from ..downloader import get_suitable_downloader | ||||
| from . import get_suitable_downloader | ||||
| from .fragment import FragmentFD | ||||
|  | ||||
| from ..utils import urljoin | ||||
| from ..utils import update_url_query, urljoin | ||||
|  | ||||
|  | ||||
| class DashSegmentsFD(FragmentFD): | ||||
| @@ -42,24 +41,29 @@ class DashSegmentsFD(FragmentFD): | ||||
|                 self._prepare_and_start_frag_download(ctx, fmt) | ||||
|             ctx['start'] = real_start | ||||
|  | ||||
|             fragments_to_download = self._get_fragments(fmt, ctx) | ||||
|             extra_query = None | ||||
|             extra_param_to_segment_url = info_dict.get('extra_param_to_segment_url') | ||||
|             if extra_param_to_segment_url: | ||||
|                 extra_query = urllib.parse.parse_qs(extra_param_to_segment_url) | ||||
|  | ||||
|             fragments_to_download = self._get_fragments(fmt, ctx, extra_query) | ||||
|  | ||||
|             if real_downloader: | ||||
|                 self.to_screen( | ||||
|                     '[%s] Fragment downloads will be delegated to %s' % (self.FD_NAME, real_downloader.get_basename())) | ||||
|                     f'[{self.FD_NAME}] Fragment downloads will be delegated to {real_downloader.get_basename()}') | ||||
|                 info_dict['fragments'] = list(fragments_to_download) | ||||
|                 fd = real_downloader(self.ydl, self.params) | ||||
|                 return fd.real_download(filename, info_dict) | ||||
|  | ||||
|             args.append([ctx, fragments_to_download, fmt]) | ||||
|  | ||||
|         return self.download_and_append_fragments_multiple(*args) | ||||
|         return self.download_and_append_fragments_multiple(*args, is_fatal=lambda idx: idx == 0) | ||||
|  | ||||
|     def _resolve_fragments(self, fragments, ctx): | ||||
|         fragments = fragments(ctx) if callable(fragments) else fragments | ||||
|         return [next(iter(fragments))] if self.params.get('test') else fragments | ||||
|  | ||||
|     def _get_fragments(self, fmt, ctx): | ||||
|     def _get_fragments(self, fmt, ctx, extra_query): | ||||
|         fragment_base_url = fmt.get('fragment_base_url') | ||||
|         fragments = self._resolve_fragments(fmt['fragments'], ctx) | ||||
|  | ||||
| @@ -72,9 +76,12 @@ class DashSegmentsFD(FragmentFD): | ||||
|             if not fragment_url: | ||||
|                 assert fragment_base_url | ||||
|                 fragment_url = urljoin(fragment_base_url, fragment['path']) | ||||
|             if extra_query: | ||||
|                 fragment_url = update_url_query(fragment_url, extra_query) | ||||
|  | ||||
|             yield { | ||||
|                 'frag_index': frag_index, | ||||
|                 'fragment_count': fragment.get('fragment_count'), | ||||
|                 'index': i, | ||||
|                 'url': fragment_url, | ||||
|             } | ||||
|   | ||||
| @@ -1,39 +1,49 @@ | ||||
| from __future__ import unicode_literals | ||||
|  | ||||
| import os.path | ||||
| import enum | ||||
| import json | ||||
| import os | ||||
| import re | ||||
| import subprocess | ||||
| import sys | ||||
| import tempfile | ||||
| import time | ||||
| import uuid | ||||
|  | ||||
| from .fragment import FragmentFD | ||||
| from ..compat import ( | ||||
|     compat_setenv, | ||||
|     compat_str, | ||||
| ) | ||||
| from ..postprocessor.ffmpeg import FFmpegPostProcessor, EXT_TO_OUT_FORMATS | ||||
| from ..compat import functools | ||||
| from ..networking import Request | ||||
| from ..postprocessor.ffmpeg import EXT_TO_OUT_FORMATS, FFmpegPostProcessor | ||||
| from ..utils import ( | ||||
|     Popen, | ||||
|     RetryManager, | ||||
|     _configuration_args, | ||||
|     check_executable, | ||||
|     classproperty, | ||||
|     cli_bool_option, | ||||
|     cli_option, | ||||
|     cli_valueless_option, | ||||
|     cli_bool_option, | ||||
|     _configuration_args, | ||||
|     determine_ext, | ||||
|     encodeFilename, | ||||
|     encodeArgument, | ||||
|     handle_youtubedl_headers, | ||||
|     check_executable, | ||||
|     Popen, | ||||
|     encodeFilename, | ||||
|     find_available_port, | ||||
|     remove_end, | ||||
|     traverse_obj, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class Features(enum.Enum): | ||||
|     TO_STDOUT = enum.auto() | ||||
|     MULTIPLE_FORMATS = enum.auto() | ||||
|  | ||||
|  | ||||
| class ExternalFD(FragmentFD): | ||||
|     SUPPORTED_PROTOCOLS = ('http', 'https', 'ftp', 'ftps') | ||||
|     can_download_to_stdout = False | ||||
|     SUPPORTED_FEATURES = () | ||||
|     _CAPTURE_STDERR = True | ||||
|  | ||||
|     def real_download(self, filename, info_dict): | ||||
|         self.report_destination(filename) | ||||
|         tmpfilename = self.temp_name(filename) | ||||
|         self._cookies_tempfile = None | ||||
|  | ||||
|         try: | ||||
|             started = time.time() | ||||
| @@ -46,6 +56,9 @@ class ExternalFD(FragmentFD): | ||||
|             # should take place | ||||
|             retval = 0 | ||||
|             self.to_screen('[%s] Interrupted by user' % self.get_basename()) | ||||
|         finally: | ||||
|             if self._cookies_tempfile: | ||||
|                 self.try_remove(self._cookies_tempfile) | ||||
|  | ||||
|         if retval == 0: | ||||
|             status = { | ||||
| @@ -55,7 +68,6 @@ class ExternalFD(FragmentFD): | ||||
|             } | ||||
|             if filename != '-': | ||||
|                 fsize = os.path.getsize(encodeFilename(tmpfilename)) | ||||
|                 self.to_screen('\r[%s] Downloaded %s bytes' % (self.get_basename(), fsize)) | ||||
|                 self.try_rename(tmpfilename, filename) | ||||
|                 status.update({ | ||||
|                     'downloaded_bytes': fsize, | ||||
| @@ -73,23 +85,32 @@ class ExternalFD(FragmentFD): | ||||
|     def get_basename(cls): | ||||
|         return cls.__name__[:-2].lower() | ||||
|  | ||||
|     @property | ||||
|     @classproperty | ||||
|     def EXE_NAME(cls): | ||||
|         return cls.get_basename() | ||||
|  | ||||
|     @functools.cached_property | ||||
|     def exe(self): | ||||
|         return self.get_basename() | ||||
|         return self.EXE_NAME | ||||
|  | ||||
|     @classmethod | ||||
|     def available(cls, path=None): | ||||
|         path = check_executable(path or cls.get_basename(), [cls.AVAILABLE_OPT]) | ||||
|         if path: | ||||
|             cls.exe = path | ||||
|             return path | ||||
|         return False | ||||
|         path = check_executable( | ||||
|             cls.EXE_NAME if path in (None, cls.get_basename()) else path, | ||||
|             [cls.AVAILABLE_OPT]) | ||||
|         if not path: | ||||
|             return False | ||||
|         cls.exe = path | ||||
|         return path | ||||
|  | ||||
|     @classmethod | ||||
|     def supports(cls, info_dict): | ||||
|         return ( | ||||
|             (cls.can_download_to_stdout or not info_dict.get('to_stdout')) | ||||
|             and info_dict['protocol'] in cls.SUPPORTED_PROTOCOLS) | ||||
|         return all(( | ||||
|             not info_dict.get('to_stdout') or Features.TO_STDOUT in cls.SUPPORTED_FEATURES, | ||||
|             '+' not in info_dict['protocol'] or Features.MULTIPLE_FORMATS in cls.SUPPORTED_FEATURES, | ||||
|             not traverse_obj(info_dict, ('hls_aes', ...), 'extra_param_to_segment_url'), | ||||
|             all(proto in cls.SUPPORTED_PROTOCOLS for proto in info_dict['protocol'].split('+')), | ||||
|         )) | ||||
|  | ||||
|     @classmethod | ||||
|     def can_download(cls, info_dict, path=None): | ||||
| @@ -106,9 +127,19 @@ class ExternalFD(FragmentFD): | ||||
|  | ||||
|     def _configuration_args(self, keys=None, *args, **kwargs): | ||||
|         return _configuration_args( | ||||
|             self.get_basename(), self.params.get('external_downloader_args'), self.get_basename(), | ||||
|             self.get_basename(), self.params.get('external_downloader_args'), self.EXE_NAME, | ||||
|             keys, *args, **kwargs) | ||||
|  | ||||
|     def _write_cookies(self): | ||||
|         if not self.ydl.cookiejar.filename: | ||||
|             tmp_cookies = tempfile.NamedTemporaryFile(suffix='.cookies', delete=False) | ||||
|             tmp_cookies.close() | ||||
|             self._cookies_tempfile = tmp_cookies.name | ||||
|             self.to_screen(f'[download] Writing temporary cookies file to "{self._cookies_tempfile}"') | ||||
|         # real_download resets _cookies_tempfile; if it's None then save() will write to cookiejar.filename | ||||
|         self.ydl.cookiejar.save(self._cookies_tempfile) | ||||
|         return self.ydl.cookiejar.filename or self._cookies_tempfile | ||||
|  | ||||
|     def _call_downloader(self, tmpfilename, info_dict): | ||||
|         """ Either overwrite this or implement _make_cmd """ | ||||
|         cmd = [encodeArgument(a) for a in self._make_cmd(tmpfilename, info_dict)] | ||||
| @@ -116,33 +147,27 @@ class ExternalFD(FragmentFD): | ||||
|         self._debug_cmd(cmd) | ||||
|  | ||||
|         if 'fragments' not in info_dict: | ||||
|             p = Popen(cmd, stderr=subprocess.PIPE) | ||||
|             _, stderr = p.communicate_or_kill() | ||||
|             if p.returncode != 0: | ||||
|                 self.to_stderr(stderr.decode('utf-8', 'replace')) | ||||
|             return p.returncode | ||||
|             _, stderr, returncode = self._call_process(cmd, info_dict) | ||||
|             if returncode and stderr: | ||||
|                 self.to_stderr(stderr) | ||||
|             return returncode | ||||
|  | ||||
|         fragment_retries = self.params.get('fragment_retries', 0) | ||||
|         skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True) | ||||
|  | ||||
|         count = 0 | ||||
|         while count <= fragment_retries: | ||||
|             p = Popen(cmd, stderr=subprocess.PIPE) | ||||
|             _, stderr = p.communicate_or_kill() | ||||
|             if p.returncode == 0: | ||||
|         retry_manager = RetryManager(self.params.get('fragment_retries'), self.report_retry, | ||||
|                                      frag_index=None, fatal=not skip_unavailable_fragments) | ||||
|         for retry in retry_manager: | ||||
|             _, stderr, returncode = self._call_process(cmd, info_dict) | ||||
|             if not returncode: | ||||
|                 break | ||||
|             # TODO: Decide whether to retry based on error code | ||||
|             # https://aria2.github.io/manual/en/html/aria2c.html#exit-status | ||||
|             self.to_stderr(stderr.decode('utf-8', 'replace')) | ||||
|             count += 1 | ||||
|             if count <= fragment_retries: | ||||
|                 self.to_screen( | ||||
|                     '[%s] Got error. Retrying fragments (attempt %d of %s)...' | ||||
|                     % (self.get_basename(), count, self.format_retries(fragment_retries))) | ||||
|         if count > fragment_retries: | ||||
|             if not skip_unavailable_fragments: | ||||
|                 self.report_error('Giving up after %s fragment retries' % fragment_retries) | ||||
|                 return -1 | ||||
|             if stderr: | ||||
|                 self.to_stderr(stderr) | ||||
|             retry.error = Exception() | ||||
|             continue | ||||
|         if not skip_unavailable_fragments and retry_manager.error: | ||||
|             return -1 | ||||
|  | ||||
|         decrypt_fragment = self.decrypter(info_dict) | ||||
|         dest, _ = self.sanitize_open(tmpfilename, 'wb') | ||||
| @@ -150,7 +175,7 @@ class ExternalFD(FragmentFD): | ||||
|             fragment_filename = '%s-Frag%d' % (tmpfilename, frag_index) | ||||
|             try: | ||||
|                 src, _ = self.sanitize_open(fragment_filename, 'rb') | ||||
|             except IOError as err: | ||||
|             except OSError as err: | ||||
|                 if skip_unavailable_fragments and frag_index > 1: | ||||
|                     self.report_skip_fragment(frag_index, err) | ||||
|                     continue | ||||
| @@ -159,20 +184,27 @@ class ExternalFD(FragmentFD): | ||||
|             dest.write(decrypt_fragment(fragment, src.read())) | ||||
|             src.close() | ||||
|             if not self.params.get('keep_fragments', False): | ||||
|                 os.remove(encodeFilename(fragment_filename)) | ||||
|                 self.try_remove(encodeFilename(fragment_filename)) | ||||
|         dest.close() | ||||
|         os.remove(encodeFilename('%s.frag.urls' % tmpfilename)) | ||||
|         self.try_remove(encodeFilename('%s.frag.urls' % tmpfilename)) | ||||
|         return 0 | ||||
|  | ||||
|     def _call_process(self, cmd, info_dict): | ||||
|         return Popen.run(cmd, text=True, stderr=subprocess.PIPE if self._CAPTURE_STDERR else None) | ||||
|  | ||||
|  | ||||
| class CurlFD(ExternalFD): | ||||
|     AVAILABLE_OPT = '-V' | ||||
|     _CAPTURE_STDERR = False  # curl writes the progress to stderr | ||||
|  | ||||
|     def _make_cmd(self, tmpfilename, info_dict): | ||||
|         cmd = [self.exe, '--location', '-o', tmpfilename] | ||||
|         cmd = [self.exe, '--location', '-o', tmpfilename, '--compressed'] | ||||
|         cookie_header = self.ydl.cookiejar.get_cookie_header(info_dict['url']) | ||||
|         if cookie_header: | ||||
|             cmd += ['--cookie', cookie_header] | ||||
|         if info_dict.get('http_headers') is not None: | ||||
|             for key, val in info_dict['http_headers'].items(): | ||||
|                 cmd += ['--header', '%s: %s' % (key, val)] | ||||
|                 cmd += ['--header', f'{key}: {val}'] | ||||
|  | ||||
|         cmd += self._bool_option('--continue-at', 'continuedl', '-', '0') | ||||
|         cmd += self._valueless_option('--silent', 'noprogress') | ||||
| @@ -191,16 +223,6 @@ class CurlFD(ExternalFD): | ||||
|         cmd += ['--', info_dict['url']] | ||||
|         return cmd | ||||
|  | ||||
|     def _call_downloader(self, tmpfilename, info_dict): | ||||
|         cmd = [encodeArgument(a) for a in self._make_cmd(tmpfilename, info_dict)] | ||||
|  | ||||
|         self._debug_cmd(cmd) | ||||
|  | ||||
|         # curl writes the progress to stderr so don't capture it. | ||||
|         p = Popen(cmd) | ||||
|         p.communicate_or_kill() | ||||
|         return p.returncode | ||||
|  | ||||
|  | ||||
| class AxelFD(ExternalFD): | ||||
|     AVAILABLE_OPT = '-V' | ||||
| @@ -209,7 +231,10 @@ class AxelFD(ExternalFD): | ||||
|         cmd = [self.exe, '-o', tmpfilename] | ||||
|         if info_dict.get('http_headers') is not None: | ||||
|             for key, val in info_dict['http_headers'].items(): | ||||
|                 cmd += ['-H', '%s: %s' % (key, val)] | ||||
|                 cmd += ['-H', f'{key}: {val}'] | ||||
|         cookie_header = self.ydl.cookiejar.get_cookie_header(info_dict['url']) | ||||
|         if cookie_header: | ||||
|             cmd += ['-H', f'Cookie: {cookie_header}', '--max-redirect=0'] | ||||
|         cmd += self._configuration_args() | ||||
|         cmd += ['--', info_dict['url']] | ||||
|         return cmd | ||||
| @@ -219,10 +244,12 @@ class WgetFD(ExternalFD): | ||||
|     AVAILABLE_OPT = '--version' | ||||
|  | ||||
|     def _make_cmd(self, tmpfilename, info_dict): | ||||
|         cmd = [self.exe, '-O', tmpfilename, '-nv', '--no-cookies'] | ||||
|         cmd = [self.exe, '-O', tmpfilename, '-nv', '--compression=auto'] | ||||
|         if self.ydl.cookiejar.get_cookie_header(info_dict['url']): | ||||
|             cmd += ['--load-cookies', self._write_cookies()] | ||||
|         if info_dict.get('http_headers') is not None: | ||||
|             for key, val in info_dict['http_headers'].items(): | ||||
|                 cmd += ['--header', '%s: %s' % (key, val)] | ||||
|                 cmd += ['--header', f'{key}: {val}'] | ||||
|         cmd += self._option('--limit-rate', 'ratelimit') | ||||
|         retry = self._option('--tries', 'retries') | ||||
|         if len(retry) == 2: | ||||
| @@ -230,7 +257,10 @@ class WgetFD(ExternalFD): | ||||
|                 retry[1] = '0' | ||||
|             cmd += retry | ||||
|         cmd += self._option('--bind-address', 'source_address') | ||||
|         cmd += self._option('--proxy', 'proxy') | ||||
|         proxy = self.params.get('proxy') | ||||
|         if proxy: | ||||
|             for var in ('http_proxy', 'https_proxy'): | ||||
|                 cmd += ['--execute', f'{var}={proxy}'] | ||||
|         cmd += self._valueless_option('--no-check-certificate', 'nocheckcertificate') | ||||
|         cmd += self._configuration_args() | ||||
|         cmd += ['--', info_dict['url']] | ||||
| @@ -250,18 +280,33 @@ class Aria2cFD(ExternalFD): | ||||
|         check_results = (not re.search(feature, manifest) for feature in UNSUPPORTED_FEATURES) | ||||
|         return all(check_results) | ||||
|  | ||||
|     @staticmethod | ||||
|     def _aria2c_filename(fn): | ||||
|         return fn if os.path.isabs(fn) else f'.{os.path.sep}{fn}' | ||||
|  | ||||
|     def _call_downloader(self, tmpfilename, info_dict): | ||||
|         # FIXME: Disabled due to https://github.com/yt-dlp/yt-dlp/issues/5931 | ||||
|         if False and 'no-external-downloader-progress' not in self.params.get('compat_opts', []): | ||||
|             info_dict['__rpc'] = { | ||||
|                 'port': find_available_port() or 19190, | ||||
|                 'secret': str(uuid.uuid4()), | ||||
|             } | ||||
|         return super()._call_downloader(tmpfilename, info_dict) | ||||
|  | ||||
|     def _make_cmd(self, tmpfilename, info_dict): | ||||
|         cmd = [self.exe, '-c', | ||||
|         cmd = [self.exe, '-c', '--no-conf', | ||||
|                '--console-log-level=warn', '--summary-interval=0', '--download-result=hide', | ||||
|                '--file-allocation=none', '-x16', '-j16', '-s16'] | ||||
|                '--http-accept-gzip=true', '--file-allocation=none', '-x16', '-j16', '-s16'] | ||||
|         if 'fragments' in info_dict: | ||||
|             cmd += ['--allow-overwrite=true', '--allow-piece-length-change=true'] | ||||
|         else: | ||||
|             cmd += ['--min-split-size', '1M'] | ||||
|  | ||||
|         if self.ydl.cookiejar.get_cookie_header(info_dict['url']): | ||||
|             cmd += [f'--load-cookies={self._write_cookies()}'] | ||||
|         if info_dict.get('http_headers') is not None: | ||||
|             for key, val in info_dict['http_headers'].items(): | ||||
|                 cmd += ['--header', '%s: %s' % (key, val)] | ||||
|                 cmd += ['--header', f'{key}: {val}'] | ||||
|         cmd += self._option('--max-overall-download-limit', 'ratelimit') | ||||
|         cmd += self._option('--interface', 'source_address') | ||||
|         cmd += self._option('--all-proxy', 'proxy') | ||||
| @@ -270,6 +315,12 @@ class Aria2cFD(ExternalFD): | ||||
|         cmd += self._bool_option('--show-console-readout', 'noprogress', 'false', 'true', '=') | ||||
|         cmd += self._configuration_args() | ||||
|  | ||||
|         if '__rpc' in info_dict: | ||||
|             cmd += [ | ||||
|                 '--enable-rpc', | ||||
|                 f'--rpc-listen-port={info_dict["__rpc"]["port"]}', | ||||
|                 f'--rpc-secret={info_dict["__rpc"]["secret"]}'] | ||||
|  | ||||
|         # aria2c strips out spaces from the beginning/end of filenames and paths. | ||||
|         # We work around this issue by adding a "./" to the beginning of the | ||||
|         # filename and relative path, and adding a "/" at the end of the path. | ||||
| @@ -278,11 +329,9 @@ class Aria2cFD(ExternalFD): | ||||
|         # https://github.com/aria2/aria2/issues/1373 | ||||
|         dn = os.path.dirname(tmpfilename) | ||||
|         if dn: | ||||
|             if not os.path.isabs(dn): | ||||
|                 dn = '.%s%s' % (os.path.sep, dn) | ||||
|             cmd += ['--dir', dn + os.path.sep] | ||||
|             cmd += ['--dir', self._aria2c_filename(dn) + os.path.sep] | ||||
|         if 'fragments' not in info_dict: | ||||
|             cmd += ['--out', '.%s%s' % (os.path.sep, os.path.basename(tmpfilename))] | ||||
|             cmd += ['--out', self._aria2c_filename(os.path.basename(tmpfilename))] | ||||
|         cmd += ['--auto-file-renaming=false'] | ||||
|  | ||||
|         if 'fragments' in info_dict: | ||||
| @@ -291,35 +340,121 @@ class Aria2cFD(ExternalFD): | ||||
|             url_list = [] | ||||
|             for frag_index, fragment in enumerate(info_dict['fragments']): | ||||
|                 fragment_filename = '%s-Frag%d' % (os.path.basename(tmpfilename), frag_index) | ||||
|                 url_list.append('%s\n\tout=%s' % (fragment['url'], fragment_filename)) | ||||
|                 url_list.append('%s\n\tout=%s' % (fragment['url'], self._aria2c_filename(fragment_filename))) | ||||
|             stream, _ = self.sanitize_open(url_list_file, 'wb') | ||||
|             stream.write('\n'.join(url_list).encode('utf-8')) | ||||
|             stream.write('\n'.join(url_list).encode()) | ||||
|             stream.close() | ||||
|             cmd += ['-i', url_list_file] | ||||
|             cmd += ['-i', self._aria2c_filename(url_list_file)] | ||||
|         else: | ||||
|             cmd += ['--', info_dict['url']] | ||||
|         return cmd | ||||
|  | ||||
|     def aria2c_rpc(self, rpc_port, rpc_secret, method, params=()): | ||||
|         # Does not actually need to be UUID, just unique | ||||
|         sanitycheck = str(uuid.uuid4()) | ||||
|         d = json.dumps({ | ||||
|             'jsonrpc': '2.0', | ||||
|             'id': sanitycheck, | ||||
|             'method': method, | ||||
|             'params': [f'token:{rpc_secret}', *params], | ||||
|         }).encode('utf-8') | ||||
|         request = Request( | ||||
|             f'http://localhost:{rpc_port}/jsonrpc', | ||||
|             data=d, headers={ | ||||
|                 'Content-Type': 'application/json', | ||||
|                 'Content-Length': f'{len(d)}', | ||||
|             }, proxies={'all': None}) | ||||
|         with self.ydl.urlopen(request) as r: | ||||
|             resp = json.load(r) | ||||
|         assert resp.get('id') == sanitycheck, 'Something went wrong with RPC server' | ||||
|         return resp['result'] | ||||
|  | ||||
|     def _call_process(self, cmd, info_dict): | ||||
|         if '__rpc' not in info_dict: | ||||
|             return super()._call_process(cmd, info_dict) | ||||
|  | ||||
|         send_rpc = functools.partial(self.aria2c_rpc, info_dict['__rpc']['port'], info_dict['__rpc']['secret']) | ||||
|         started = time.time() | ||||
|  | ||||
|         fragmented = 'fragments' in info_dict | ||||
|         frag_count = len(info_dict['fragments']) if fragmented else 1 | ||||
|         status = { | ||||
|             'filename': info_dict.get('_filename'), | ||||
|             'status': 'downloading', | ||||
|             'elapsed': 0, | ||||
|             'downloaded_bytes': 0, | ||||
|             'fragment_count': frag_count if fragmented else None, | ||||
|             'fragment_index': 0 if fragmented else None, | ||||
|         } | ||||
|         self._hook_progress(status, info_dict) | ||||
|  | ||||
|         def get_stat(key, *obj, average=False): | ||||
|             val = tuple(filter(None, map(float, traverse_obj(obj, (..., ..., key))))) or [0] | ||||
|             return sum(val) / (len(val) if average else 1) | ||||
|  | ||||
|         with Popen(cmd, text=True, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE) as p: | ||||
|             # Add a small sleep so that RPC client can receive response, | ||||
|             # or the connection stalls infinitely | ||||
|             time.sleep(0.2) | ||||
|             retval = p.poll() | ||||
|             while retval is None: | ||||
|                 # We don't use tellStatus as we won't know the GID without reading stdout | ||||
|                 # Ref: https://aria2.github.io/manual/en/html/aria2c.html#aria2.tellActive | ||||
|                 active = send_rpc('aria2.tellActive') | ||||
|                 completed = send_rpc('aria2.tellStopped', [0, frag_count]) | ||||
|  | ||||
|                 downloaded = get_stat('totalLength', completed) + get_stat('completedLength', active) | ||||
|                 speed = get_stat('downloadSpeed', active) | ||||
|                 total = frag_count * get_stat('totalLength', active, completed, average=True) | ||||
|                 if total < downloaded: | ||||
|                     total = None | ||||
|  | ||||
|                 status.update({ | ||||
|                     'downloaded_bytes': int(downloaded), | ||||
|                     'speed': speed, | ||||
|                     'total_bytes': None if fragmented else total, | ||||
|                     'total_bytes_estimate': total, | ||||
|                     'eta': (total - downloaded) / (speed or 1), | ||||
|                     'fragment_index': min(frag_count, len(completed) + 1) if fragmented else None, | ||||
|                     'elapsed': time.time() - started | ||||
|                 }) | ||||
|                 self._hook_progress(status, info_dict) | ||||
|  | ||||
|                 if not active and len(completed) >= frag_count: | ||||
|                     send_rpc('aria2.shutdown') | ||||
|                     retval = p.wait() | ||||
|                     break | ||||
|  | ||||
|                 time.sleep(0.1) | ||||
|                 retval = p.poll() | ||||
|  | ||||
|             return '', p.stderr.read(), retval | ||||
|  | ||||
|  | ||||
| class HttpieFD(ExternalFD): | ||||
|     AVAILABLE_OPT = '--version' | ||||
|  | ||||
|     @classmethod | ||||
|     def available(cls, path=None): | ||||
|         return super().available(path or 'http') | ||||
|     EXE_NAME = 'http' | ||||
|  | ||||
|     def _make_cmd(self, tmpfilename, info_dict): | ||||
|         cmd = ['http', '--download', '--output', tmpfilename, info_dict['url']] | ||||
|  | ||||
|         if info_dict.get('http_headers') is not None: | ||||
|             for key, val in info_dict['http_headers'].items(): | ||||
|                 cmd += ['%s:%s' % (key, val)] | ||||
|                 cmd += [f'{key}:{val}'] | ||||
|  | ||||
|         # httpie 3.1.0+ removes the Cookie header on redirect, so this should be safe for now. [1] | ||||
|         # If we ever need cookie handling for redirects, we can export the cookiejar into a session. [2] | ||||
|         # 1: https://github.com/httpie/httpie/security/advisories/GHSA-9w4w-cpc8-h2fq | ||||
|         # 2: https://httpie.io/docs/cli/sessions | ||||
|         cookie_header = self.ydl.cookiejar.get_cookie_header(info_dict['url']) | ||||
|         if cookie_header: | ||||
|             cmd += [f'Cookie:{cookie_header}'] | ||||
|         return cmd | ||||
|  | ||||
|  | ||||
| class FFmpegFD(ExternalFD): | ||||
|     SUPPORTED_PROTOCOLS = ('http', 'https', 'ftp', 'ftps', 'm3u8', 'm3u8_native', 'rtsp', 'rtmp', 'rtmp_ffmpeg', 'mms', 'http_dash_segments') | ||||
|     can_download_to_stdout = True | ||||
|     SUPPORTED_FEATURES = (Features.TO_STDOUT, Features.MULTIPLE_FORMATS) | ||||
|  | ||||
|     @classmethod | ||||
|     def available(cls, path=None): | ||||
| @@ -327,10 +462,6 @@ class FFmpegFD(ExternalFD): | ||||
|         # Fixme: This may be wrong when --ffmpeg-location is used | ||||
|         return FFmpegPostProcessor().available | ||||
|  | ||||
|     @classmethod | ||||
|     def supports(cls, info_dict): | ||||
|         return all(proto in cls.SUPPORTED_PROTOCOLS for proto in info_dict['protocol'].split('+')) | ||||
|  | ||||
|     def on_process_started(self, proc, stdin): | ||||
|         """ Override this in subclasses  """ | ||||
|         pass | ||||
| @@ -345,7 +476,6 @@ class FFmpegFD(ExternalFD): | ||||
|             and cls.can_download(info_dict)) | ||||
|  | ||||
|     def _call_downloader(self, tmpfilename, info_dict): | ||||
|         urls = [f['url'] for f in info_dict.get('requested_formats', [])] or [info_dict['url']] | ||||
|         ffpp = FFmpegPostProcessor(downloader=self) | ||||
|         if not ffpp.available: | ||||
|             self.report_error('m3u8 download detected but ffmpeg could not be found. Please install') | ||||
| @@ -361,9 +491,11 @@ class FFmpegFD(ExternalFD): | ||||
|         if not self.params.get('verbose'): | ||||
|             args += ['-hide_banner'] | ||||
|  | ||||
|         args += info_dict.get('_ffmpeg_args', []) | ||||
|         args += traverse_obj(info_dict, ('downloader_options', 'ffmpeg_args'), default=[]) | ||||
|  | ||||
|         # This option exists only for compatibility. Extractors should use `_ffmpeg_args` instead | ||||
|         # These exists only for compatibility. Extractors should use | ||||
|         # info_dict['downloader_options']['ffmpeg_args'] instead | ||||
|         args += info_dict.get('_ffmpeg_args') or [] | ||||
|         seekable = info_dict.get('_seekable') | ||||
|         if seekable is not None: | ||||
|             # setting -seekable prevents ffmpeg from guessing if the server | ||||
| @@ -373,21 +505,6 @@ class FFmpegFD(ExternalFD): | ||||
|             # http://trac.ffmpeg.org/ticket/6125#comment:10 | ||||
|             args += ['-seekable', '1' if seekable else '0'] | ||||
|  | ||||
|         # start_time = info_dict.get('start_time') or 0 | ||||
|         # if start_time: | ||||
|         #     args += ['-ss', compat_str(start_time)] | ||||
|         # end_time = info_dict.get('end_time') | ||||
|         # if end_time: | ||||
|         #     args += ['-t', compat_str(end_time - start_time)] | ||||
|  | ||||
|         if info_dict.get('http_headers') is not None and re.match(r'^https?://', urls[0]): | ||||
|             # Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv: | ||||
|             # [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header. | ||||
|             headers = handle_youtubedl_headers(info_dict['http_headers']) | ||||
|             args += [ | ||||
|                 '-headers', | ||||
|                 ''.join('%s: %s\r\n' % (key, val) for key, val in headers.items())] | ||||
|  | ||||
|         env = None | ||||
|         proxy = self.params.get('proxy') | ||||
|         if proxy: | ||||
| @@ -404,8 +521,8 @@ class FFmpegFD(ExternalFD): | ||||
|             # We could switch to the following code if we are able to detect version properly | ||||
|             # args += ['-http_proxy', proxy] | ||||
|             env = os.environ.copy() | ||||
|             compat_setenv('HTTP_PROXY', proxy, env=env) | ||||
|             compat_setenv('http_proxy', proxy, env=env) | ||||
|             env['HTTP_PROXY'] = proxy | ||||
|             env['http_proxy'] = proxy | ||||
|  | ||||
|         protocol = info_dict.get('protocol') | ||||
|  | ||||
| @@ -435,20 +552,41 @@ class FFmpegFD(ExternalFD): | ||||
|             if isinstance(conn, list): | ||||
|                 for entry in conn: | ||||
|                     args += ['-rtmp_conn', entry] | ||||
|             elif isinstance(conn, compat_str): | ||||
|             elif isinstance(conn, str): | ||||
|                 args += ['-rtmp_conn', conn] | ||||
|  | ||||
|         for i, url in enumerate(urls): | ||||
|             args += self._configuration_args((f'_i{i + 1}', '_i')) + ['-i', url] | ||||
|         start_time, end_time = info_dict.get('section_start') or 0, info_dict.get('section_end') | ||||
|  | ||||
|         selected_formats = info_dict.get('requested_formats') or [info_dict] | ||||
|         for i, fmt in enumerate(selected_formats): | ||||
|             is_http = re.match(r'^https?://', fmt['url']) | ||||
|             cookies = self.ydl.cookiejar.get_cookies_for_url(fmt['url']) if is_http else [] | ||||
|             if cookies: | ||||
|                 args.extend(['-cookies', ''.join( | ||||
|                     f'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n' | ||||
|                     for cookie in cookies)]) | ||||
|             if fmt.get('http_headers') and is_http: | ||||
|                 # Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv: | ||||
|                 # [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header. | ||||
|                 args.extend(['-headers', ''.join(f'{key}: {val}\r\n' for key, val in fmt['http_headers'].items())]) | ||||
|  | ||||
|             if start_time: | ||||
|                 args += ['-ss', str(start_time)] | ||||
|             if end_time: | ||||
|                 args += ['-t', str(end_time - start_time)] | ||||
|  | ||||
|             args += self._configuration_args((f'_i{i + 1}', '_i')) + ['-i', fmt['url']] | ||||
|  | ||||
|         if not (start_time or end_time) or not self.params.get('force_keyframes_at_cuts'): | ||||
|             args += ['-c', 'copy'] | ||||
|  | ||||
|         args += ['-c', 'copy'] | ||||
|         if info_dict.get('requested_formats') or protocol == 'http_dash_segments': | ||||
|             for (i, fmt) in enumerate(info_dict.get('requested_formats') or [info_dict]): | ||||
|             for i, fmt in enumerate(selected_formats): | ||||
|                 stream_number = fmt.get('manifest_stream_number', 0) | ||||
|                 args.extend(['-map', f'{i}:{stream_number}']) | ||||
|  | ||||
|         if self.params.get('test', False): | ||||
|             args += ['-fs', compat_str(self._TEST_FILE_SIZE)] | ||||
|             args += ['-fs', str(self._TEST_FILE_SIZE)] | ||||
|  | ||||
|         ext = info_dict['ext'] | ||||
|         if protocol in ('m3u8', 'm3u8_native'): | ||||
| @@ -483,35 +621,35 @@ class FFmpegFD(ExternalFD): | ||||
|         args.append(encodeFilename(ffpp._ffmpeg_filename_argument(tmpfilename), True)) | ||||
|         self._debug_cmd(args) | ||||
|  | ||||
|         proc = Popen(args, stdin=subprocess.PIPE, env=env) | ||||
|         if url in ('-', 'pipe:'): | ||||
|             self.on_process_started(proc, proc.stdin) | ||||
|         try: | ||||
|             retval = proc.wait() | ||||
|         except BaseException as e: | ||||
|             # subprocces.run would send the SIGKILL signal to ffmpeg and the | ||||
|             # mp4 file couldn't be played, but if we ask ffmpeg to quit it | ||||
|             # produces a file that is playable (this is mostly useful for live | ||||
|             # streams). Note that Windows is not affected and produces playable | ||||
|             # files (see https://github.com/ytdl-org/youtube-dl/issues/8300). | ||||
|             if isinstance(e, KeyboardInterrupt) and sys.platform != 'win32' and url not in ('-', 'pipe:'): | ||||
|                 proc.communicate_or_kill(b'q') | ||||
|             else: | ||||
|                 proc.kill() | ||||
|                 proc.wait() | ||||
|             raise | ||||
|         return retval | ||||
|         piped = any(fmt['url'] in ('-', 'pipe:') for fmt in selected_formats) | ||||
|         with Popen(args, stdin=subprocess.PIPE, env=env) as proc: | ||||
|             if piped: | ||||
|                 self.on_process_started(proc, proc.stdin) | ||||
|             try: | ||||
|                 retval = proc.wait() | ||||
|             except BaseException as e: | ||||
|                 # subprocces.run would send the SIGKILL signal to ffmpeg and the | ||||
|                 # mp4 file couldn't be played, but if we ask ffmpeg to quit it | ||||
|                 # produces a file that is playable (this is mostly useful for live | ||||
|                 # streams). Note that Windows is not affected and produces playable | ||||
|                 # files (see https://github.com/ytdl-org/youtube-dl/issues/8300). | ||||
|                 if isinstance(e, KeyboardInterrupt) and sys.platform != 'win32' and not piped: | ||||
|                     proc.communicate_or_kill(b'q') | ||||
|                 else: | ||||
|                     proc.kill(timeout=None) | ||||
|                 raise | ||||
|             return retval | ||||
|  | ||||
|  | ||||
| class AVconvFD(FFmpegFD): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| _BY_NAME = dict( | ||||
|     (klass.get_basename(), klass) | ||||
| _BY_NAME = { | ||||
|     klass.get_basename(): klass | ||||
|     for name, klass in globals().items() | ||||
|     if name.endswith('FD') and name not in ('ExternalFD', 'FragmentFD') | ||||
| ) | ||||
| } | ||||
|  | ||||
|  | ||||
| def list_external_downloaders(): | ||||
| @@ -519,8 +657,8 @@ def list_external_downloaders(): | ||||
|  | ||||
|  | ||||
| def get_external_downloader(external_downloader): | ||||
|     """ Given the name of the executable, see whether we support the given | ||||
|         downloader . """ | ||||
|     # Drop .exe extension on Windows | ||||
|     """ Given the name of the executable, see whether we support the given downloader """ | ||||
|     bn = os.path.splitext(os.path.basename(external_downloader))[0] | ||||
|     return _BY_NAME.get(bn) | ||||
|     return _BY_NAME.get(bn) or next(( | ||||
|         klass for klass in _BY_NAME.values() if klass.EXE_NAME in bn | ||||
|     ), None) | ||||
|   | ||||
| @@ -1,23 +1,14 @@ | ||||
| from __future__ import division, unicode_literals | ||||
|  | ||||
| import base64 | ||||
| import io | ||||
| import itertools | ||||
| import struct | ||||
| import time | ||||
| import urllib.parse | ||||
|  | ||||
| from .fragment import FragmentFD | ||||
| from ..compat import ( | ||||
|     compat_b64decode, | ||||
|     compat_etree_fromstring, | ||||
|     compat_urlparse, | ||||
|     compat_urllib_error, | ||||
|     compat_urllib_parse_urlparse, | ||||
|     compat_struct_pack, | ||||
|     compat_struct_unpack, | ||||
| ) | ||||
| from ..utils import ( | ||||
|     fix_xml_ampersands, | ||||
|     xpath_text, | ||||
| ) | ||||
| from ..compat import compat_etree_fromstring | ||||
| from ..networking.exceptions import HTTPError | ||||
| from ..utils import fix_xml_ampersands, xpath_text | ||||
|  | ||||
|  | ||||
| class DataTruncatedError(Exception): | ||||
| @@ -40,13 +31,13 @@ class FlvReader(io.BytesIO): | ||||
|  | ||||
|     # Utility functions for reading numbers and strings | ||||
|     def read_unsigned_long_long(self): | ||||
|         return compat_struct_unpack('!Q', self.read_bytes(8))[0] | ||||
|         return struct.unpack('!Q', self.read_bytes(8))[0] | ||||
|  | ||||
|     def read_unsigned_int(self): | ||||
|         return compat_struct_unpack('!I', self.read_bytes(4))[0] | ||||
|         return struct.unpack('!I', self.read_bytes(4))[0] | ||||
|  | ||||
|     def read_unsigned_char(self): | ||||
|         return compat_struct_unpack('!B', self.read_bytes(1))[0] | ||||
|         return struct.unpack('!B', self.read_bytes(1))[0] | ||||
|  | ||||
|     def read_string(self): | ||||
|         res = b'' | ||||
| @@ -193,7 +184,7 @@ def build_fragments_list(boot_info): | ||||
|     first_frag_number = fragment_run_entry_table[0]['first'] | ||||
|     fragments_counter = itertools.count(first_frag_number) | ||||
|     for segment, fragments_count in segment_run_table['segment_run']: | ||||
|         # In some live HDS streams (for example Rai), `fragments_count` is | ||||
|         # In some live HDS streams (e.g. Rai), `fragments_count` is | ||||
|         # abnormal and causing out-of-memory errors. It's OK to change the | ||||
|         # number of fragments for live streams as they are updated periodically | ||||
|         if fragments_count == 4294967295 and boot_info['live']: | ||||
| @@ -208,11 +199,11 @@ def build_fragments_list(boot_info): | ||||
|  | ||||
|  | ||||
| def write_unsigned_int(stream, val): | ||||
|     stream.write(compat_struct_pack('!I', val)) | ||||
|     stream.write(struct.pack('!I', val)) | ||||
|  | ||||
|  | ||||
| def write_unsigned_int_24(stream, val): | ||||
|     stream.write(compat_struct_pack('!I', val)[1:]) | ||||
|     stream.write(struct.pack('!I', val)[1:]) | ||||
|  | ||||
|  | ||||
| def write_flv_header(stream): | ||||
| @@ -261,8 +252,6 @@ class F4mFD(FragmentFD): | ||||
|     A downloader for f4m manifests or AdobeHDS. | ||||
|     """ | ||||
|  | ||||
|     FD_NAME = 'f4m' | ||||
|  | ||||
|     def _get_unencrypted_media(self, doc): | ||||
|         media = doc.findall(_add_ns('media')) | ||||
|         if not media: | ||||
| @@ -308,12 +297,12 @@ class F4mFD(FragmentFD): | ||||
|         # 1. http://live-1-1.rutube.ru/stream/1024/HDS/SD/C2NKsS85HQNckgn5HdEmOQ/1454167650/S-s604419906/move/four/dirs/upper/1024-576p.f4m | ||||
|         bootstrap_url = node.get('url') | ||||
|         if bootstrap_url: | ||||
|             bootstrap_url = compat_urlparse.urljoin( | ||||
|             bootstrap_url = urllib.parse.urljoin( | ||||
|                 base_url, bootstrap_url) | ||||
|             boot_info = self._get_bootstrap_from_url(bootstrap_url) | ||||
|         else: | ||||
|             bootstrap_url = None | ||||
|             bootstrap = compat_b64decode(node.text) | ||||
|             bootstrap = base64.b64decode(node.text) | ||||
|             boot_info = read_bootstrap_info(bootstrap) | ||||
|         return boot_info, bootstrap_url | ||||
|  | ||||
| @@ -323,7 +312,7 @@ class F4mFD(FragmentFD): | ||||
|         self.to_screen('[%s] Downloading f4m manifest' % self.FD_NAME) | ||||
|  | ||||
|         urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url)) | ||||
|         man_url = urlh.geturl() | ||||
|         man_url = urlh.url | ||||
|         # Some manifests may be malformed, e.g. prosiebensat1 generated manifests | ||||
|         # (see https://github.com/ytdl-org/youtube-dl/issues/6215#issuecomment-121704244 | ||||
|         # and https://github.com/ytdl-org/youtube-dl/issues/7823) | ||||
| @@ -343,14 +332,14 @@ class F4mFD(FragmentFD): | ||||
|         # Prefer baseURL for relative URLs as per 11.2 of F4M 3.0 spec. | ||||
|         man_base_url = get_base_url(doc) or man_url | ||||
|  | ||||
|         base_url = compat_urlparse.urljoin(man_base_url, media.attrib['url']) | ||||
|         base_url = urllib.parse.urljoin(man_base_url, media.attrib['url']) | ||||
|         bootstrap_node = doc.find(_add_ns('bootstrapInfo')) | ||||
|         boot_info, bootstrap_url = self._parse_bootstrap_node( | ||||
|             bootstrap_node, man_base_url) | ||||
|         live = boot_info['live'] | ||||
|         metadata_node = media.find(_add_ns('metadata')) | ||||
|         if metadata_node is not None: | ||||
|             metadata = compat_b64decode(metadata_node.text) | ||||
|             metadata = base64.b64decode(metadata_node.text) | ||||
|         else: | ||||
|             metadata = None | ||||
|  | ||||
| @@ -378,7 +367,7 @@ class F4mFD(FragmentFD): | ||||
|             if not live: | ||||
|                 write_metadata_tag(dest_stream, metadata) | ||||
|  | ||||
|         base_url_parsed = compat_urllib_parse_urlparse(base_url) | ||||
|         base_url_parsed = urllib.parse.urlparse(base_url) | ||||
|  | ||||
|         self._start_frag_download(ctx, info_dict) | ||||
|  | ||||
| @@ -398,9 +387,10 @@ class F4mFD(FragmentFD): | ||||
|                 query.append(info_dict['extra_param_to_segment_url']) | ||||
|             url_parsed = base_url_parsed._replace(path=base_url_parsed.path + name, query='&'.join(query)) | ||||
|             try: | ||||
|                 success, down_data = self._download_fragment(ctx, url_parsed.geturl(), info_dict) | ||||
|                 success = self._download_fragment(ctx, url_parsed.geturl(), info_dict) | ||||
|                 if not success: | ||||
|                     return False | ||||
|                 down_data = self._read_fragment(ctx) | ||||
|                 reader = FlvReader(down_data) | ||||
|                 while True: | ||||
|                     try: | ||||
| @@ -417,8 +407,8 @@ class F4mFD(FragmentFD): | ||||
|                     if box_type == b'mdat': | ||||
|                         self._append_fragment(ctx, box_data) | ||||
|                         break | ||||
|             except (compat_urllib_error.HTTPError, ) as err: | ||||
|                 if live and (err.code == 404 or err.code == 410): | ||||
|             except HTTPError as err: | ||||
|                 if live and (err.status == 404 or err.status == 410): | ||||
|                     # We didn't keep up with the live window. Continue | ||||
|                     # with the next available fragment. | ||||
|                     msg = 'Fragment %d unavailable' % frag_i | ||||
| @@ -434,6 +424,4 @@ class F4mFD(FragmentFD): | ||||
|                     msg = 'Missed %d fragments' % (fragments_list[0][1] - (frag_i + 1)) | ||||
|                     self.report_warning(msg) | ||||
|  | ||||
|         self._finish_frag_download(ctx, info_dict) | ||||
|  | ||||
|         return True | ||||
|         return self._finish_frag_download(ctx, info_dict) | ||||
|   | ||||
							
								
								
									
										46
									
								
								plugins/youtube_download/yt_dlp/downloader/fc2.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										46
									
								
								plugins/youtube_download/yt_dlp/downloader/fc2.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,46 @@ | ||||
| import threading | ||||
|  | ||||
| from .common import FileDownloader | ||||
| from .external import FFmpegFD | ||||
|  | ||||
|  | ||||
| class FC2LiveFD(FileDownloader): | ||||
|     """ | ||||
|     Downloads FC2 live without being stopped. <br> | ||||
|     Note, this is not a part of public API, and will be removed without notice. | ||||
|     DO NOT USE | ||||
|     """ | ||||
|  | ||||
|     def real_download(self, filename, info_dict): | ||||
|         ws = info_dict['ws'] | ||||
|  | ||||
|         heartbeat_lock = threading.Lock() | ||||
|         heartbeat_state = [None, 1] | ||||
|  | ||||
|         def heartbeat(): | ||||
|             if heartbeat_state[1] < 0: | ||||
|                 return | ||||
|  | ||||
|             try: | ||||
|                 heartbeat_state[1] += 1 | ||||
|                 ws.send('{"name":"heartbeat","arguments":{},"id":%d}' % heartbeat_state[1]) | ||||
|             except Exception: | ||||
|                 self.to_screen('[fc2:live] Heartbeat failed') | ||||
|  | ||||
|             with heartbeat_lock: | ||||
|                 heartbeat_state[0] = threading.Timer(30, heartbeat) | ||||
|                 heartbeat_state[0]._daemonic = True | ||||
|                 heartbeat_state[0].start() | ||||
|  | ||||
|         heartbeat() | ||||
|  | ||||
|         new_info_dict = info_dict.copy() | ||||
|         new_info_dict.update({ | ||||
|             'ws': None, | ||||
|             'protocol': 'live_ffmpeg', | ||||
|         }) | ||||
|         try: | ||||
|             return FFmpegFD(self.ydl, self.params or {}).download(filename, new_info_dict) | ||||
|         finally: | ||||
|             # stop heartbeating | ||||
|             heartbeat_state[1] = -1 | ||||
| @@ -1,40 +1,26 @@ | ||||
| from __future__ import division, unicode_literals | ||||
|  | ||||
| import http.client | ||||
| import concurrent.futures | ||||
| import contextlib | ||||
| import json | ||||
| import math | ||||
| import os | ||||
| import struct | ||||
| import time | ||||
|  | ||||
| try: | ||||
|     import concurrent.futures | ||||
|     can_threaded_download = True | ||||
| except ImportError: | ||||
|     can_threaded_download = False | ||||
|  | ||||
| from .common import FileDownloader | ||||
| from .http import HttpFD | ||||
| from ..aes import aes_cbc_decrypt_bytes, unpad_pkcs7 | ||||
| from ..compat import ( | ||||
|     compat_os_name, | ||||
|     compat_urllib_error, | ||||
|     compat_struct_pack, | ||||
| ) | ||||
| from ..utils import ( | ||||
|     DownloadError, | ||||
|     error_to_compat_str, | ||||
|     encodeFilename, | ||||
|     sanitized_Request, | ||||
| ) | ||||
| from ..compat import compat_os_name | ||||
| from ..networking import Request | ||||
| from ..networking.exceptions import HTTPError, IncompleteRead | ||||
| from ..utils import DownloadError, RetryManager, encodeFilename, traverse_obj | ||||
| from ..utils.networking import HTTPHeaderDict | ||||
|  | ||||
|  | ||||
| class HttpQuietDownloader(HttpFD): | ||||
|     def to_screen(self, *args, **kargs): | ||||
|         pass | ||||
|  | ||||
|     def report_retry(self, err, count, retries): | ||||
|         super().to_screen( | ||||
|             f'[download] Got server HTTP error: {err}. Retrying (attempt {count} of {self.format_retries(retries)}) ...') | ||||
|     to_console_title = to_screen | ||||
|  | ||||
|  | ||||
| class FragmentFD(FileDownloader): | ||||
| @@ -43,8 +29,8 @@ class FragmentFD(FileDownloader): | ||||
|  | ||||
|     Available options: | ||||
|  | ||||
|     fragment_retries:   Number of times to retry a fragment for HTTP error (DASH | ||||
|                         and hlsnative only) | ||||
|     fragment_retries:   Number of times to retry a fragment for HTTP error | ||||
|                         (DASH and hlsnative only). Default is 0 for API, but 10 for CLI | ||||
|     skip_unavailable_fragments: | ||||
|                         Skip unavailable fragments (DASH and hlsnative only) | ||||
|     keep_fragments:     Keep downloaded fragments on disk after downloading is | ||||
| @@ -74,9 +60,9 @@ class FragmentFD(FileDownloader): | ||||
|     """ | ||||
|  | ||||
|     def report_retry_fragment(self, err, frag_index, count, retries): | ||||
|         self.to_screen( | ||||
|             '\r[download] Got server HTTP error: %s. Retrying fragment %d (attempt %d of %s) ...' | ||||
|             % (error_to_compat_str(err), frag_index, count, self.format_retries(retries))) | ||||
|         self.deprecation_warning('yt_dlp.downloader.FragmentFD.report_retry_fragment is deprecated. ' | ||||
|                                  'Use yt_dlp.downloader.FileDownloader.report_retry instead') | ||||
|         return self.report_retry(err, count, retries, frag_index) | ||||
|  | ||||
|     def report_skip_fragment(self, frag_index, err=None): | ||||
|         err = f' {err};' if err else '' | ||||
| @@ -84,7 +70,7 @@ class FragmentFD(FileDownloader): | ||||
|  | ||||
|     def _prepare_url(self, info_dict, url): | ||||
|         headers = info_dict.get('http_headers') | ||||
|         return sanitized_Request(url, None, headers) if headers else url | ||||
|         return Request(url, None, headers) if headers else url | ||||
|  | ||||
|     def _prepare_and_start_frag_download(self, ctx, info_dict): | ||||
|         self._prepare_frag_download(ctx) | ||||
| @@ -130,16 +116,28 @@ class FragmentFD(FileDownloader): | ||||
|             'request_data': request_data, | ||||
|             'ctx_id': ctx.get('ctx_id'), | ||||
|         } | ||||
|         success = ctx['dl'].download(fragment_filename, fragment_info_dict) | ||||
|         frag_resume_len = 0 | ||||
|         if ctx['dl'].params.get('continuedl', True): | ||||
|             frag_resume_len = self.filesize_or_none(self.temp_name(fragment_filename)) | ||||
|         fragment_info_dict['frag_resume_len'] = ctx['frag_resume_len'] = frag_resume_len | ||||
|  | ||||
|         success, _ = ctx['dl'].download(fragment_filename, fragment_info_dict) | ||||
|         if not success: | ||||
|             return False, None | ||||
|             return False | ||||
|         if fragment_info_dict.get('filetime'): | ||||
|             ctx['fragment_filetime'] = fragment_info_dict.get('filetime') | ||||
|         ctx['fragment_filename_sanitized'] = fragment_filename | ||||
|         return True, self._read_fragment(ctx) | ||||
|         return True | ||||
|  | ||||
|     def _read_fragment(self, ctx): | ||||
|         down, frag_sanitized = self.sanitize_open(ctx['fragment_filename_sanitized'], 'rb') | ||||
|         if not ctx.get('fragment_filename_sanitized'): | ||||
|             return None | ||||
|         try: | ||||
|             down, frag_sanitized = self.sanitize_open(ctx['fragment_filename_sanitized'], 'rb') | ||||
|         except FileNotFoundError: | ||||
|             if ctx.get('live'): | ||||
|                 return None | ||||
|             raise | ||||
|         ctx['fragment_filename_sanitized'] = frag_sanitized | ||||
|         frag_content = down.read() | ||||
|         down.close() | ||||
| @@ -153,42 +151,34 @@ class FragmentFD(FileDownloader): | ||||
|             if self.__do_ytdl_file(ctx): | ||||
|                 self._write_ytdl_file(ctx) | ||||
|             if not self.params.get('keep_fragments', False): | ||||
|                 os.remove(encodeFilename(ctx['fragment_filename_sanitized'])) | ||||
|                 self.try_remove(encodeFilename(ctx['fragment_filename_sanitized'])) | ||||
|             del ctx['fragment_filename_sanitized'] | ||||
|  | ||||
|     def _prepare_frag_download(self, ctx): | ||||
|         if 'live' not in ctx: | ||||
|             ctx['live'] = False | ||||
|         if not ctx['live']: | ||||
|         if not ctx.setdefault('live', False): | ||||
|             total_frags_str = '%d' % ctx['total_frags'] | ||||
|             ad_frags = ctx.get('ad_frags', 0) | ||||
|             if ad_frags: | ||||
|                 total_frags_str += ' (not including %d ad)' % ad_frags | ||||
|         else: | ||||
|             total_frags_str = 'unknown (live)' | ||||
|         self.to_screen( | ||||
|             '[%s] Total fragments: %s' % (self.FD_NAME, total_frags_str)) | ||||
|         self.to_screen(f'[{self.FD_NAME}] Total fragments: {total_frags_str}') | ||||
|         self.report_destination(ctx['filename']) | ||||
|         dl = HttpQuietDownloader( | ||||
|             self.ydl, | ||||
|             { | ||||
|                 'continuedl': True, | ||||
|                 'quiet': self.params.get('quiet'), | ||||
|                 'noprogress': True, | ||||
|                 'ratelimit': self.params.get('ratelimit'), | ||||
|                 'retries': self.params.get('retries', 0), | ||||
|                 'nopart': self.params.get('nopart', False), | ||||
|                 'test': self.params.get('test', False), | ||||
|             } | ||||
|         ) | ||||
|         dl = HttpQuietDownloader(self.ydl, { | ||||
|             **self.params, | ||||
|             'noprogress': True, | ||||
|             'test': False, | ||||
|             'sleep_interval': 0, | ||||
|             'max_sleep_interval': 0, | ||||
|             'sleep_interval_subtitles': 0, | ||||
|         }) | ||||
|         tmpfilename = self.temp_name(ctx['filename']) | ||||
|         open_mode = 'wb' | ||||
|         resume_len = 0 | ||||
|  | ||||
|         # Establish possible resume length | ||||
|         if os.path.isfile(encodeFilename(tmpfilename)): | ||||
|         resume_len = self.filesize_or_none(tmpfilename) | ||||
|         if resume_len > 0: | ||||
|             open_mode = 'ab' | ||||
|             resume_len = os.path.getsize(encodeFilename(tmpfilename)) | ||||
|  | ||||
|         # Should be initialized before ytdl file check | ||||
|         ctx.update({ | ||||
| @@ -197,7 +187,9 @@ class FragmentFD(FileDownloader): | ||||
|         }) | ||||
|  | ||||
|         if self.__do_ytdl_file(ctx): | ||||
|             if os.path.isfile(encodeFilename(self.ytdl_filename(ctx['filename']))): | ||||
|             ytdl_file_exists = os.path.isfile(encodeFilename(self.ytdl_filename(ctx['filename']))) | ||||
|             continuedl = self.params.get('continuedl', True) | ||||
|             if continuedl and ytdl_file_exists: | ||||
|                 self._read_ytdl_file(ctx) | ||||
|                 is_corrupt = ctx.get('ytdl_corrupt') is True | ||||
|                 is_inconsistent = ctx['fragment_index'] > 0 and resume_len == 0 | ||||
| @@ -211,7 +203,12 @@ class FragmentFD(FileDownloader): | ||||
|                     if 'ytdl_corrupt' in ctx: | ||||
|                         del ctx['ytdl_corrupt'] | ||||
|                     self._write_ytdl_file(ctx) | ||||
|  | ||||
|             else: | ||||
|                 if not continuedl: | ||||
|                     if ytdl_file_exists: | ||||
|                         self._read_ytdl_file(ctx) | ||||
|                     ctx['fragment_index'] = resume_len = 0 | ||||
|                 self._write_ytdl_file(ctx) | ||||
|                 assert ctx['fragment_index'] == 0 | ||||
|  | ||||
| @@ -253,6 +250,9 @@ class FragmentFD(FileDownloader): | ||||
|             if s['status'] not in ('downloading', 'finished'): | ||||
|                 return | ||||
|  | ||||
|             if not total_frags and ctx.get('fragment_count'): | ||||
|                 state['fragment_count'] = ctx['fragment_count'] | ||||
|  | ||||
|             if ctx_id is not None and s.get('ctx_id') != ctx_id: | ||||
|                 return | ||||
|  | ||||
| @@ -281,12 +281,10 @@ class FragmentFD(FileDownloader): | ||||
|             else: | ||||
|                 frag_downloaded_bytes = s['downloaded_bytes'] | ||||
|                 state['downloaded_bytes'] += frag_downloaded_bytes - ctx['prev_frag_downloaded_bytes'] | ||||
|                 if not ctx['live']: | ||||
|                     state['eta'] = self.calc_eta( | ||||
|                         start, time_now, estimated_size - resume_len, | ||||
|                         state['downloaded_bytes'] - resume_len) | ||||
|                 ctx['speed'] = state['speed'] = self.calc_speed( | ||||
|                     ctx['fragment_started'], time_now, frag_downloaded_bytes) | ||||
|                     ctx['fragment_started'], time_now, frag_downloaded_bytes - ctx.get('frag_resume_len', 0)) | ||||
|                 if not ctx['live']: | ||||
|                     state['eta'] = self.calc_eta(state['speed'], estimated_size - state['downloaded_bytes']) | ||||
|                 ctx['prev_frag_downloaded_bytes'] = frag_downloaded_bytes | ||||
|             self._hook_progress(state, info_dict) | ||||
|  | ||||
| @@ -297,23 +295,26 @@ class FragmentFD(FileDownloader): | ||||
|     def _finish_frag_download(self, ctx, info_dict): | ||||
|         ctx['dest_stream'].close() | ||||
|         if self.__do_ytdl_file(ctx): | ||||
|             ytdl_filename = encodeFilename(self.ytdl_filename(ctx['filename'])) | ||||
|             if os.path.isfile(ytdl_filename): | ||||
|                 os.remove(ytdl_filename) | ||||
|             self.try_remove(self.ytdl_filename(ctx['filename'])) | ||||
|         elapsed = time.time() - ctx['started'] | ||||
|  | ||||
|         if ctx['tmpfilename'] == '-': | ||||
|             downloaded_bytes = ctx['complete_frags_downloaded_bytes'] | ||||
|         to_file = ctx['tmpfilename'] != '-' | ||||
|         if to_file: | ||||
|             downloaded_bytes = self.filesize_or_none(ctx['tmpfilename']) | ||||
|         else: | ||||
|             downloaded_bytes = ctx['complete_frags_downloaded_bytes'] | ||||
|  | ||||
|         if not downloaded_bytes: | ||||
|             if to_file: | ||||
|                 self.try_remove(ctx['tmpfilename']) | ||||
|             self.report_error('The downloaded file is empty') | ||||
|             return False | ||||
|         elif to_file: | ||||
|             self.try_rename(ctx['tmpfilename'], ctx['filename']) | ||||
|             if self.params.get('updatetime', True): | ||||
|                 filetime = ctx.get('fragment_filetime') | ||||
|                 if filetime: | ||||
|                     try: | ||||
|                         os.utime(ctx['filename'], (time.time(), filetime)) | ||||
|                     except Exception: | ||||
|                         pass | ||||
|             downloaded_bytes = os.path.getsize(encodeFilename(ctx['filename'])) | ||||
|             filetime = ctx.get('fragment_filetime') | ||||
|             if self.params.get('updatetime', True) and filetime: | ||||
|                 with contextlib.suppress(Exception): | ||||
|                     os.utime(ctx['filename'], (time.time(), filetime)) | ||||
|  | ||||
|         self._hook_progress({ | ||||
|             'downloaded_bytes': downloaded_bytes, | ||||
| @@ -325,6 +326,7 @@ class FragmentFD(FileDownloader): | ||||
|             'max_progress': ctx.get('max_progress'), | ||||
|             'progress_idx': ctx.get('progress_idx'), | ||||
|         }, info_dict) | ||||
|         return True | ||||
|  | ||||
|     def _prepare_external_frag_download(self, ctx): | ||||
|         if 'live' not in ctx: | ||||
| @@ -336,8 +338,7 @@ class FragmentFD(FileDownloader): | ||||
|                 total_frags_str += ' (not including %d ad)' % ad_frags | ||||
|         else: | ||||
|             total_frags_str = 'unknown (live)' | ||||
|         self.to_screen( | ||||
|             '[%s] Total fragments: %s' % (self.FD_NAME, total_frags_str)) | ||||
|         self.to_screen(f'[{self.FD_NAME}] Total fragments: {total_frags_str}') | ||||
|  | ||||
|         tmpfilename = self.temp_name(ctx['filename']) | ||||
|  | ||||
| @@ -356,11 +357,14 @@ class FragmentFD(FileDownloader): | ||||
|             return _key_cache[url] | ||||
|  | ||||
|         def decrypt_fragment(fragment, frag_content): | ||||
|             if frag_content is None: | ||||
|                 return | ||||
|             decrypt_info = fragment.get('decrypt_info') | ||||
|             if not decrypt_info or decrypt_info['METHOD'] != 'AES-128': | ||||
|                 return frag_content | ||||
|             iv = decrypt_info.get('IV') or compat_struct_pack('>8xq', fragment['media_sequence']) | ||||
|             decrypt_info['KEY'] = decrypt_info.get('KEY') or _get_key(info_dict.get('_decryption_key_url') or decrypt_info['URI']) | ||||
|             iv = decrypt_info.get('IV') or struct.pack('>8xq', fragment['media_sequence']) | ||||
|             decrypt_info['KEY'] = (decrypt_info.get('KEY') | ||||
|                                    or _get_key(traverse_obj(info_dict, ('hls_aes', 'uri')) or decrypt_info['URI'])) | ||||
|             # Don't decrypt the content in tests since the data is explicitly truncated and it's not to a valid block | ||||
|             # size (see https://github.com/ytdl-org/youtube-dl/pull/27660). Tests only care that the correct data downloaded, | ||||
|             # not what it decrypts to. | ||||
| @@ -370,7 +374,7 @@ class FragmentFD(FileDownloader): | ||||
|  | ||||
|         return decrypt_fragment | ||||
|  | ||||
|     def download_and_append_fragments_multiple(self, *args, pack_func=None, finish_func=None): | ||||
|     def download_and_append_fragments_multiple(self, *args, **kwargs): | ||||
|         ''' | ||||
|         @params (ctx1, fragments1, info_dict1), (ctx2, fragments2, info_dict2), ... | ||||
|                 all args must be either tuple or list | ||||
| @@ -378,146 +382,156 @@ class FragmentFD(FileDownloader): | ||||
|         interrupt_trigger = [True] | ||||
|         max_progress = len(args) | ||||
|         if max_progress == 1: | ||||
|             return self.download_and_append_fragments(*args[0], pack_func=pack_func, finish_func=finish_func) | ||||
|             return self.download_and_append_fragments(*args[0], **kwargs) | ||||
|         max_workers = self.params.get('concurrent_fragment_downloads', 1) | ||||
|         if max_progress > 1: | ||||
|             self._prepare_multiline_status(max_progress) | ||||
|         is_live = any(traverse_obj(args, (..., 2, 'is_live'))) | ||||
|  | ||||
|         def thread_func(idx, ctx, fragments, info_dict, tpe): | ||||
|             ctx['max_progress'] = max_progress | ||||
|             ctx['progress_idx'] = idx | ||||
|             return self.download_and_append_fragments( | ||||
|                 ctx, fragments, info_dict, pack_func=pack_func, finish_func=finish_func, | ||||
|                 tpe=tpe, interrupt_trigger=interrupt_trigger) | ||||
|                 ctx, fragments, info_dict, **kwargs, tpe=tpe, interrupt_trigger=interrupt_trigger) | ||||
|  | ||||
|         class FTPE(concurrent.futures.ThreadPoolExecutor): | ||||
|             # has to stop this or it's going to wait on the worker thread itself | ||||
|             def __exit__(self, exc_type, exc_val, exc_tb): | ||||
|                 pass | ||||
|  | ||||
|         spins = [] | ||||
|         if compat_os_name == 'nt': | ||||
|             self.report_warning('Ctrl+C does not work on Windows when used with parallel threads. ' | ||||
|                                 'This is a known issue and patches are welcome') | ||||
|             def future_result(future): | ||||
|                 while True: | ||||
|                     try: | ||||
|                         return future.result(0.1) | ||||
|                     except KeyboardInterrupt: | ||||
|                         raise | ||||
|                     except concurrent.futures.TimeoutError: | ||||
|                         continue | ||||
|         else: | ||||
|             def future_result(future): | ||||
|                 return future.result() | ||||
|  | ||||
|         def interrupt_trigger_iter(fg): | ||||
|             for f in fg: | ||||
|                 if not interrupt_trigger[0]: | ||||
|                     break | ||||
|                 yield f | ||||
|  | ||||
|         spins = [] | ||||
|         for idx, (ctx, fragments, info_dict) in enumerate(args): | ||||
|             tpe = FTPE(math.ceil(max_workers / max_progress)) | ||||
|             job = tpe.submit(thread_func, idx, ctx, fragments, info_dict, tpe) | ||||
|             job = tpe.submit(thread_func, idx, ctx, interrupt_trigger_iter(fragments), info_dict, tpe) | ||||
|             spins.append((tpe, job)) | ||||
|  | ||||
|         result = True | ||||
|         for tpe, job in spins: | ||||
|             try: | ||||
|                 result = result and job.result() | ||||
|                 result = result and future_result(job) | ||||
|             except KeyboardInterrupt: | ||||
|                 interrupt_trigger[0] = False | ||||
|             finally: | ||||
|                 tpe.shutdown(wait=True) | ||||
|         if not interrupt_trigger[0]: | ||||
|         if not interrupt_trigger[0] and not is_live: | ||||
|             raise KeyboardInterrupt() | ||||
|         # we expect the user wants to stop and DO WANT the preceding postprocessors to run; | ||||
|         # so returning a intermediate result here instead of KeyboardInterrupt on live | ||||
|         return result | ||||
|  | ||||
|     def download_and_append_fragments( | ||||
|             self, ctx, fragments, info_dict, *, pack_func=None, finish_func=None, | ||||
|             tpe=None, interrupt_trigger=None): | ||||
|         if not interrupt_trigger: | ||||
|             interrupt_trigger = (True, ) | ||||
|             self, ctx, fragments, info_dict, *, is_fatal=(lambda idx: False), | ||||
|             pack_func=(lambda content, idx: content), finish_func=None, | ||||
|             tpe=None, interrupt_trigger=(True, )): | ||||
|  | ||||
|         fragment_retries = self.params.get('fragment_retries', 0) | ||||
|         is_fatal = ( | ||||
|             ((lambda _: False) if info_dict.get('is_live') else (lambda idx: idx == 0)) | ||||
|             if self.params.get('skip_unavailable_fragments', True) else (lambda _: True)) | ||||
|  | ||||
|         if not pack_func: | ||||
|             pack_func = lambda frag_content, _: frag_content | ||||
|         if not self.params.get('skip_unavailable_fragments', True): | ||||
|             is_fatal = lambda _: True | ||||
|  | ||||
|         def download_fragment(fragment, ctx): | ||||
|             if not interrupt_trigger[0]: | ||||
|                 return | ||||
|  | ||||
|             frag_index = ctx['fragment_index'] = fragment['frag_index'] | ||||
|             ctx['last_error'] = None | ||||
|             if not interrupt_trigger[0]: | ||||
|                 return False, frag_index | ||||
|             headers = info_dict.get('http_headers', {}).copy() | ||||
|             headers = HTTPHeaderDict(info_dict.get('http_headers')) | ||||
|             byte_range = fragment.get('byte_range') | ||||
|             if byte_range: | ||||
|                 headers['Range'] = 'bytes=%d-%d' % (byte_range['start'], byte_range['end'] - 1) | ||||
|  | ||||
|             # Never skip the first fragment | ||||
|             fatal = is_fatal(fragment.get('index') or (frag_index - 1)) | ||||
|             count, frag_content = 0, None | ||||
|             while count <= fragment_retries: | ||||
|                 try: | ||||
|                     success, frag_content = self._download_fragment(ctx, fragment['url'], info_dict, headers) | ||||
|                     if not success: | ||||
|                         return False, frag_index | ||||
|                     break | ||||
|                 except (compat_urllib_error.HTTPError, http.client.IncompleteRead) as err: | ||||
|                     # Unavailable (possibly temporary) fragments may be served. | ||||
|                     # First we try to retry then either skip or abort. | ||||
|                     # See https://github.com/ytdl-org/youtube-dl/issues/10165, | ||||
|                     # https://github.com/ytdl-org/youtube-dl/issues/10448). | ||||
|                     count += 1 | ||||
|                     ctx['last_error'] = err | ||||
|                     if count <= fragment_retries: | ||||
|                         self.report_retry_fragment(err, frag_index, count, fragment_retries) | ||||
|                 except DownloadError: | ||||
|                     # Don't retry fragment if error occurred during HTTP downloading | ||||
|                     # itself since it has own retry settings | ||||
|                     if not fatal: | ||||
|                         break | ||||
|                     raise | ||||
|  | ||||
|             if count > fragment_retries: | ||||
|                 if not fatal: | ||||
|                     return False, frag_index | ||||
|                 ctx['dest_stream'].close() | ||||
|                 self.report_error('Giving up after %s fragment retries' % fragment_retries) | ||||
|                 return False, frag_index | ||||
|             return frag_content, frag_index | ||||
|             def error_callback(err, count, retries): | ||||
|                 if fatal and count > retries: | ||||
|                     ctx['dest_stream'].close() | ||||
|                 self.report_retry(err, count, retries, frag_index, fatal) | ||||
|                 ctx['last_error'] = err | ||||
|  | ||||
|             for retry in RetryManager(self.params.get('fragment_retries'), error_callback): | ||||
|                 try: | ||||
|                     ctx['fragment_count'] = fragment.get('fragment_count') | ||||
|                     if not self._download_fragment( | ||||
|                             ctx, fragment['url'], info_dict, headers, info_dict.get('request_data')): | ||||
|                         return | ||||
|                 except (HTTPError, IncompleteRead) as err: | ||||
|                     retry.error = err | ||||
|                     continue | ||||
|                 except DownloadError:  # has own retry settings | ||||
|                     if fatal: | ||||
|                         raise | ||||
|  | ||||
|         def append_fragment(frag_content, frag_index, ctx): | ||||
|             if not frag_content: | ||||
|                 if not is_fatal(frag_index - 1): | ||||
|                     self.report_skip_fragment(frag_index, 'fragment not found') | ||||
|                     return True | ||||
|                 else: | ||||
|                     ctx['dest_stream'].close() | ||||
|                     self.report_error( | ||||
|                         'fragment %s not found, unable to continue' % frag_index) | ||||
|                     return False | ||||
|             self._append_fragment(ctx, pack_func(frag_content, frag_index)) | ||||
|             if frag_content: | ||||
|                 self._append_fragment(ctx, pack_func(frag_content, frag_index)) | ||||
|             elif not is_fatal(frag_index - 1): | ||||
|                 self.report_skip_fragment(frag_index, 'fragment not found') | ||||
|             else: | ||||
|                 ctx['dest_stream'].close() | ||||
|                 self.report_error(f'fragment {frag_index} not found, unable to continue') | ||||
|                 return False | ||||
|             return True | ||||
|  | ||||
|         decrypt_fragment = self.decrypter(info_dict) | ||||
|  | ||||
|         max_workers = math.ceil( | ||||
|             self.params.get('concurrent_fragment_downloads', 1) / ctx.get('max_progress', 1)) | ||||
|         if can_threaded_download and max_workers > 1: | ||||
|  | ||||
|         if max_workers > 1: | ||||
|             def _download_fragment(fragment): | ||||
|                 ctx_copy = ctx.copy() | ||||
|                 frag_content, frag_index = download_fragment(fragment, ctx_copy) | ||||
|                 return fragment, frag_content, frag_index, ctx_copy.get('fragment_filename_sanitized') | ||||
|                 download_fragment(fragment, ctx_copy) | ||||
|                 return fragment, fragment['frag_index'], ctx_copy.get('fragment_filename_sanitized') | ||||
|  | ||||
|             self.report_warning('The download speed shown is only of one thread. This is a known issue and patches are welcome') | ||||
|             self.report_warning('The download speed shown is only of one thread. This is a known issue') | ||||
|             with tpe or concurrent.futures.ThreadPoolExecutor(max_workers) as pool: | ||||
|                 for fragment, frag_content, frag_index, frag_filename in pool.map(_download_fragment, fragments): | ||||
|                     if not interrupt_trigger[0]: | ||||
|                         break | ||||
|                     ctx['fragment_filename_sanitized'] = frag_filename | ||||
|                     ctx['fragment_index'] = frag_index | ||||
|                     result = append_fragment(decrypt_fragment(fragment, frag_content), frag_index, ctx) | ||||
|                     if not result: | ||||
|                         return False | ||||
|                 try: | ||||
|                     for fragment, frag_index, frag_filename in pool.map(_download_fragment, fragments): | ||||
|                         ctx.update({ | ||||
|                             'fragment_filename_sanitized': frag_filename, | ||||
|                             'fragment_index': frag_index, | ||||
|                         }) | ||||
|                         if not append_fragment(decrypt_fragment(fragment, self._read_fragment(ctx)), frag_index, ctx): | ||||
|                             return False | ||||
|                 except KeyboardInterrupt: | ||||
|                     self._finish_multiline_status() | ||||
|                     self.report_error( | ||||
|                         'Interrupted by user. Waiting for all threads to shutdown...', is_error=False, tb=False) | ||||
|                     pool.shutdown(wait=False) | ||||
|                     raise | ||||
|         else: | ||||
|             for fragment in fragments: | ||||
|                 if not interrupt_trigger[0]: | ||||
|                     break | ||||
|                 frag_content, frag_index = download_fragment(fragment, ctx) | ||||
|                 result = append_fragment(decrypt_fragment(fragment, frag_content), frag_index, ctx) | ||||
|                 try: | ||||
|                     download_fragment(fragment, ctx) | ||||
|                     result = append_fragment( | ||||
|                         decrypt_fragment(fragment, self._read_fragment(ctx)), fragment['frag_index'], ctx) | ||||
|                 except KeyboardInterrupt: | ||||
|                     if info_dict.get('is_live'): | ||||
|                         break | ||||
|                     raise | ||||
|                 if not result: | ||||
|                     return False | ||||
|  | ||||
|         if finish_func is not None: | ||||
|             ctx['dest_stream'].write(finish_func()) | ||||
|             ctx['dest_stream'].flush() | ||||
|         self._finish_frag_download(ctx, info_dict) | ||||
|         return True | ||||
|         return self._finish_frag_download(ctx, info_dict) | ||||
|   | ||||
| @@ -1,23 +1,21 @@ | ||||
| from __future__ import unicode_literals | ||||
|  | ||||
| import re | ||||
| import io | ||||
| import binascii | ||||
| import io | ||||
| import re | ||||
| import urllib.parse | ||||
|  | ||||
| from ..downloader import get_suitable_downloader | ||||
| from .fragment import FragmentFD | ||||
| from . import get_suitable_downloader | ||||
| from .external import FFmpegFD | ||||
|  | ||||
| from ..compat import ( | ||||
|     compat_pycrypto_AES, | ||||
|     compat_urlparse, | ||||
| ) | ||||
| from ..utils import ( | ||||
|     parse_m3u8_attributes, | ||||
|     update_url_query, | ||||
|     bug_reports_message, | ||||
| ) | ||||
| from .fragment import FragmentFD | ||||
| from .. import webvtt | ||||
| from ..dependencies import Cryptodome | ||||
| from ..utils import ( | ||||
|     bug_reports_message, | ||||
|     parse_m3u8_attributes, | ||||
|     remove_start, | ||||
|     traverse_obj, | ||||
|     update_url_query, | ||||
|     urljoin, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class HlsFD(FragmentFD): | ||||
| @@ -30,7 +28,16 @@ class HlsFD(FragmentFD): | ||||
|     FD_NAME = 'hlsnative' | ||||
|  | ||||
|     @staticmethod | ||||
|     def can_download(manifest, info_dict, allow_unplayable_formats=False): | ||||
|     def _has_drm(manifest):  # TODO: https://github.com/yt-dlp/yt-dlp/pull/5039 | ||||
|         return bool(re.search('|'.join(( | ||||
|             r'#EXT-X-(?:SESSION-)?KEY:.*?URI="skd://',  # Apple FairPlay | ||||
|             r'#EXT-X-(?:SESSION-)?KEY:.*?KEYFORMAT="com\.apple\.streamingkeydelivery"',  # Apple FairPlay | ||||
|             r'#EXT-X-(?:SESSION-)?KEY:.*?KEYFORMAT="com\.microsoft\.playready"',  # Microsoft PlayReady | ||||
|             r'#EXT-X-FAXS-CM:',  # Adobe Flash Access | ||||
|         )), manifest)) | ||||
|  | ||||
|     @classmethod | ||||
|     def can_download(cls, manifest, info_dict, allow_unplayable_formats=False): | ||||
|         UNSUPPORTED_FEATURES = [ | ||||
|             # r'#EXT-X-BYTERANGE',  # playlists composed of byte ranges of media files [2] | ||||
|  | ||||
| @@ -52,13 +59,15 @@ class HlsFD(FragmentFD): | ||||
|         ] | ||||
|         if not allow_unplayable_formats: | ||||
|             UNSUPPORTED_FEATURES += [ | ||||
|                 r'#EXT-X-KEY:METHOD=(?!NONE|AES-128)',  # encrypted streams [1] | ||||
|                 r'#EXT-X-KEY:METHOD=(?!NONE|AES-128)',  # encrypted streams [1], but not necessarily DRM | ||||
|             ] | ||||
|  | ||||
|         def check_results(): | ||||
|             yield not info_dict.get('is_live') | ||||
|             for feature in UNSUPPORTED_FEATURES: | ||||
|                 yield not re.search(feature, manifest) | ||||
|             if not allow_unplayable_formats: | ||||
|                 yield not cls._has_drm(manifest) | ||||
|         return all(check_results()) | ||||
|  | ||||
|     def real_download(self, filename, info_dict): | ||||
| @@ -66,25 +75,30 @@ class HlsFD(FragmentFD): | ||||
|         self.to_screen('[%s] Downloading m3u8 manifest' % self.FD_NAME) | ||||
|  | ||||
|         urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url)) | ||||
|         man_url = urlh.geturl() | ||||
|         man_url = urlh.url | ||||
|         s = urlh.read().decode('utf-8', 'ignore') | ||||
|  | ||||
|         can_download, message = self.can_download(s, info_dict, self.params.get('allow_unplayable_formats')), None | ||||
|         if can_download and not compat_pycrypto_AES and '#EXT-X-KEY:METHOD=AES-128' in s: | ||||
|             if FFmpegFD.available(): | ||||
|         if can_download: | ||||
|             has_ffmpeg = FFmpegFD.available() | ||||
|             no_crypto = not Cryptodome.AES and '#EXT-X-KEY:METHOD=AES-128' in s | ||||
|             if no_crypto and has_ffmpeg: | ||||
|                 can_download, message = False, 'The stream has AES-128 encryption and pycryptodomex is not available' | ||||
|             else: | ||||
|             elif no_crypto: | ||||
|                 message = ('The stream has AES-128 encryption and neither ffmpeg nor pycryptodomex are available; ' | ||||
|                            'Decryption will be performed natively, but will be extremely slow') | ||||
|             elif info_dict.get('extractor_key') == 'Generic' and re.search(r'(?m)#EXT-X-MEDIA-SEQUENCE:(?!0$)', s): | ||||
|                 install_ffmpeg = '' if has_ffmpeg else 'install ffmpeg and ' | ||||
|                 message = ('Live HLS streams are not supported by the native downloader. If this is a livestream, ' | ||||
|                            f'please {install_ffmpeg}add "--downloader ffmpeg --hls-use-mpegts" to your command') | ||||
|         if not can_download: | ||||
|             has_drm = re.search('|'.join([ | ||||
|                 r'#EXT-X-FAXS-CM:',  # Adobe Flash Access | ||||
|                 r'#EXT-X-(?:SESSION-)?KEY:.*?URI="skd://',  # Apple FairPlay | ||||
|             ]), s) | ||||
|             if has_drm and not self.params.get('allow_unplayable_formats'): | ||||
|                 self.report_error( | ||||
|                     'This video is DRM protected; Try selecting another format with --format or ' | ||||
|                     'add --check-formats to automatically fallback to the next best format') | ||||
|             if self._has_drm(s) and not self.params.get('allow_unplayable_formats'): | ||||
|                 if info_dict.get('has_drm') and self.params.get('test'): | ||||
|                     self.to_screen(f'[{self.FD_NAME}] This format is DRM protected', skip_eol=True) | ||||
|                 else: | ||||
|                     self.report_error( | ||||
|                         'This format is DRM protected; Try selecting another format with --format or ' | ||||
|                         'add --check-formats to automatically fallback to the next best format', tb=False) | ||||
|                 return False | ||||
|             message = message or 'Unsupported features have been detected' | ||||
|             fd = FFmpegFD(self.ydl, self.params) | ||||
| @@ -102,8 +116,7 @@ class HlsFD(FragmentFD): | ||||
|         if real_downloader and not real_downloader.supports_manifest(s): | ||||
|             real_downloader = None | ||||
|         if real_downloader: | ||||
|             self.to_screen( | ||||
|                 '[%s] Fragment downloads will be delegated to %s' % (self.FD_NAME, real_downloader.get_basename())) | ||||
|             self.to_screen(f'[{self.FD_NAME}] Fragment downloads will be delegated to {real_downloader.get_basename()}') | ||||
|  | ||||
|         def is_ad_fragment_start(s): | ||||
|             return (s.startswith('#ANVATO-SEGMENT-INFO') and 'type=ad' in s | ||||
| @@ -150,10 +163,17 @@ class HlsFD(FragmentFD): | ||||
|         extra_query = None | ||||
|         extra_param_to_segment_url = info_dict.get('extra_param_to_segment_url') | ||||
|         if extra_param_to_segment_url: | ||||
|             extra_query = compat_urlparse.parse_qs(extra_param_to_segment_url) | ||||
|             extra_query = urllib.parse.parse_qs(extra_param_to_segment_url) | ||||
|         i = 0 | ||||
|         media_sequence = 0 | ||||
|         decrypt_info = {'METHOD': 'NONE'} | ||||
|         external_aes_key = traverse_obj(info_dict, ('hls_aes', 'key')) | ||||
|         if external_aes_key: | ||||
|             external_aes_key = binascii.unhexlify(remove_start(external_aes_key, '0x')) | ||||
|             assert len(external_aes_key) in (16, 24, 32), 'Invalid length for HLS AES-128 key' | ||||
|         external_aes_iv = traverse_obj(info_dict, ('hls_aes', 'iv')) | ||||
|         if external_aes_iv: | ||||
|             external_aes_iv = binascii.unhexlify(remove_start(external_aes_iv, '0x').zfill(32)) | ||||
|         byte_range = {} | ||||
|         discontinuity_count = 0 | ||||
|         frag_index = 0 | ||||
| @@ -169,10 +189,7 @@ class HlsFD(FragmentFD): | ||||
|                     frag_index += 1 | ||||
|                     if frag_index <= ctx['fragment_index']: | ||||
|                         continue | ||||
|                     frag_url = ( | ||||
|                         line | ||||
|                         if re.match(r'^https?://', line) | ||||
|                         else compat_urlparse.urljoin(man_url, line)) | ||||
|                     frag_url = urljoin(man_url, line) | ||||
|                     if extra_query: | ||||
|                         frag_url = update_url_query(frag_url, extra_query) | ||||
|  | ||||
| @@ -194,13 +211,18 @@ class HlsFD(FragmentFD): | ||||
|                         return False | ||||
|                     frag_index += 1 | ||||
|                     map_info = parse_m3u8_attributes(line[11:]) | ||||
|                     frag_url = ( | ||||
|                         map_info.get('URI') | ||||
|                         if re.match(r'^https?://', map_info.get('URI')) | ||||
|                         else compat_urlparse.urljoin(man_url, map_info.get('URI'))) | ||||
|                     frag_url = urljoin(man_url, map_info.get('URI')) | ||||
|                     if extra_query: | ||||
|                         frag_url = update_url_query(frag_url, extra_query) | ||||
|  | ||||
|                     if map_info.get('BYTERANGE'): | ||||
|                         splitted_byte_range = map_info.get('BYTERANGE').split('@') | ||||
|                         sub_range_start = int(splitted_byte_range[1]) if len(splitted_byte_range) == 2 else byte_range['end'] | ||||
|                         byte_range = { | ||||
|                             'start': sub_range_start, | ||||
|                             'end': sub_range_start + int(splitted_byte_range[0]), | ||||
|                         } | ||||
|  | ||||
|                     fragments.append({ | ||||
|                         'frag_index': frag_index, | ||||
|                         'url': frag_url, | ||||
| @@ -210,27 +232,22 @@ class HlsFD(FragmentFD): | ||||
|                     }) | ||||
|                     media_sequence += 1 | ||||
|  | ||||
|                     if map_info.get('BYTERANGE'): | ||||
|                         splitted_byte_range = map_info.get('BYTERANGE').split('@') | ||||
|                         sub_range_start = int(splitted_byte_range[1]) if len(splitted_byte_range) == 2 else byte_range['end'] | ||||
|                         byte_range = { | ||||
|                             'start': sub_range_start, | ||||
|                             'end': sub_range_start + int(splitted_byte_range[0]), | ||||
|                         } | ||||
|  | ||||
|                 elif line.startswith('#EXT-X-KEY'): | ||||
|                     decrypt_url = decrypt_info.get('URI') | ||||
|                     decrypt_info = parse_m3u8_attributes(line[11:]) | ||||
|                     if decrypt_info['METHOD'] == 'AES-128': | ||||
|                         if 'IV' in decrypt_info: | ||||
|                         if external_aes_iv: | ||||
|                             decrypt_info['IV'] = external_aes_iv | ||||
|                         elif 'IV' in decrypt_info: | ||||
|                             decrypt_info['IV'] = binascii.unhexlify(decrypt_info['IV'][2:].zfill(32)) | ||||
|                         if not re.match(r'^https?://', decrypt_info['URI']): | ||||
|                             decrypt_info['URI'] = compat_urlparse.urljoin( | ||||
|                                 man_url, decrypt_info['URI']) | ||||
|                         if extra_query: | ||||
|                             decrypt_info['URI'] = update_url_query(decrypt_info['URI'], extra_query) | ||||
|                         if decrypt_url != decrypt_info['URI']: | ||||
|                             decrypt_info['KEY'] = None | ||||
|                         if external_aes_key: | ||||
|                             decrypt_info['KEY'] = external_aes_key | ||||
|                         else: | ||||
|                             decrypt_info['URI'] = urljoin(man_url, decrypt_info['URI']) | ||||
|                             if extra_query: | ||||
|                                 decrypt_info['URI'] = update_url_query(decrypt_info['URI'], extra_query) | ||||
|                             if decrypt_url != decrypt_info['URI']: | ||||
|                                 decrypt_info['KEY'] = None | ||||
|  | ||||
|                 elif line.startswith('#EXT-X-MEDIA-SEQUENCE'): | ||||
|                     media_sequence = int(line[22:]) | ||||
| @@ -339,7 +356,7 @@ class HlsFD(FragmentFD): | ||||
|                             continue | ||||
|                     block.write_into(output) | ||||
|  | ||||
|                 return output.getvalue().encode('utf-8') | ||||
|                 return output.getvalue().encode() | ||||
|  | ||||
|             def fin_fragments(): | ||||
|                 dedup_window = extra_state.get('webvtt_dedup_window') | ||||
| @@ -350,7 +367,7 @@ class HlsFD(FragmentFD): | ||||
|                 for cue in dedup_window: | ||||
|                     webvtt.CueBlock.from_json(cue).write_into(output) | ||||
|  | ||||
|                 return output.getvalue().encode('utf-8') | ||||
|                 return output.getvalue().encode() | ||||
|  | ||||
|             self.download_and_append_fragments( | ||||
|                 ctx, fragments, info_dict, pack_func=pack_fragment, finish_func=fin_fragments) | ||||
|   | ||||
| @@ -1,27 +1,27 @@ | ||||
| from __future__ import unicode_literals | ||||
|  | ||||
| import errno | ||||
| import os | ||||
| import socket | ||||
| import time | ||||
| import random | ||||
| import re | ||||
| import time | ||||
|  | ||||
| from .common import FileDownloader | ||||
| from ..compat import ( | ||||
|     compat_str, | ||||
|     compat_urllib_error, | ||||
| from ..networking import Request | ||||
| from ..networking.exceptions import ( | ||||
|     CertificateVerifyError, | ||||
|     HTTPError, | ||||
|     TransportError, | ||||
| ) | ||||
| from ..utils import ( | ||||
|     ContentTooShortError, | ||||
|     encodeFilename, | ||||
|     int_or_none, | ||||
|     sanitized_Request, | ||||
|     RetryManager, | ||||
|     ThrottledDownload, | ||||
|     write_xattr, | ||||
|     XAttrMetadataError, | ||||
|     XAttrUnavailableError, | ||||
|     encodeFilename, | ||||
|     int_or_none, | ||||
|     parse_http_range, | ||||
|     try_call, | ||||
|     write_xattr, | ||||
| ) | ||||
| from ..utils.networking import HTTPHeaderDict | ||||
|  | ||||
|  | ||||
| class HttpFD(FileDownloader): | ||||
| @@ -39,11 +39,8 @@ class HttpFD(FileDownloader): | ||||
|         ctx.tmpfilename = self.temp_name(filename) | ||||
|         ctx.stream = None | ||||
|  | ||||
|         # Do not include the Accept-Encoding header | ||||
|         headers = {'Youtubedl-no-compression': 'True'} | ||||
|         add_headers = info_dict.get('http_headers') | ||||
|         if add_headers: | ||||
|             headers.update(add_headers) | ||||
|         # Disable compression | ||||
|         headers = HTTPHeaderDict({'Accept-Encoding': 'identity'}, info_dict.get('http_headers')) | ||||
|  | ||||
|         is_test = self.params.get('test', False) | ||||
|         chunk_size = self._TEST_FILE_SIZE if is_test else ( | ||||
| @@ -53,11 +50,11 @@ class HttpFD(FileDownloader): | ||||
|  | ||||
|         ctx.open_mode = 'wb' | ||||
|         ctx.resume_len = 0 | ||||
|         ctx.data_len = None | ||||
|         ctx.block_size = self.params.get('buffersize', 1024) | ||||
|         ctx.start_time = time.time() | ||||
|         ctx.chunk_size = None | ||||
|         throttle_start = None | ||||
|  | ||||
|         # parse given Range | ||||
|         req_start, req_end, _ = parse_http_range(headers.get('Range')) | ||||
|  | ||||
|         if self.params.get('continuedl', True): | ||||
|             # Establish possible resume length | ||||
| @@ -67,9 +64,6 @@ class HttpFD(FileDownloader): | ||||
|  | ||||
|         ctx.is_resume = ctx.resume_len > 0 | ||||
|  | ||||
|         count = 0 | ||||
|         retries = self.params.get('retries', 0) | ||||
|  | ||||
|         class SucceedDownload(Exception): | ||||
|             pass | ||||
|  | ||||
| @@ -80,43 +74,50 @@ class HttpFD(FileDownloader): | ||||
|         class NextFragment(Exception): | ||||
|             pass | ||||
|  | ||||
|         def set_range(req, start, end): | ||||
|             range_header = 'bytes=%d-' % start | ||||
|             if end: | ||||
|                 range_header += compat_str(end) | ||||
|             req.add_header('Range', range_header) | ||||
|  | ||||
|         def establish_connection(): | ||||
|             ctx.chunk_size = (random.randint(int(chunk_size * 0.95), chunk_size) | ||||
|                               if not is_test and chunk_size else chunk_size) | ||||
|             if ctx.resume_len > 0: | ||||
|                 range_start = ctx.resume_len | ||||
|                 if req_start is not None: | ||||
|                     # offset the beginning of Range to be within request | ||||
|                     range_start += req_start | ||||
|                 if ctx.is_resume: | ||||
|                     self.report_resuming_byte(ctx.resume_len) | ||||
|                 ctx.open_mode = 'ab' | ||||
|             elif req_start is not None: | ||||
|                 range_start = req_start | ||||
|             elif ctx.chunk_size > 0: | ||||
|                 range_start = 0 | ||||
|             else: | ||||
|                 range_start = None | ||||
|             ctx.is_resume = False | ||||
|             range_end = range_start + ctx.chunk_size - 1 if ctx.chunk_size else None | ||||
|             if range_end and ctx.data_len is not None and range_end >= ctx.data_len: | ||||
|                 range_end = ctx.data_len - 1 | ||||
|  | ||||
|             if ctx.chunk_size: | ||||
|                 chunk_aware_end = range_start + ctx.chunk_size - 1 | ||||
|                 # we're not allowed to download outside Range | ||||
|                 range_end = chunk_aware_end if req_end is None else min(chunk_aware_end, req_end) | ||||
|             elif req_end is not None: | ||||
|                 # there's no need for chunked downloads, so download until the end of Range | ||||
|                 range_end = req_end | ||||
|             else: | ||||
|                 range_end = None | ||||
|  | ||||
|             if try_call(lambda: range_start > range_end): | ||||
|                 ctx.resume_len = 0 | ||||
|                 ctx.open_mode = 'wb' | ||||
|                 raise RetryDownload(Exception(f'Conflicting range. (start={range_start} > end={range_end})')) | ||||
|  | ||||
|             if try_call(lambda: range_end >= ctx.content_len): | ||||
|                 range_end = ctx.content_len - 1 | ||||
|  | ||||
|             request = Request(url, request_data, headers) | ||||
|             has_range = range_start is not None | ||||
|             ctx.has_range = has_range | ||||
|             request = sanitized_Request(url, request_data, headers) | ||||
|             if has_range: | ||||
|                 set_range(request, range_start, range_end) | ||||
|                 request.headers['Range'] = f'bytes={int(range_start)}-{int_or_none(range_end) or ""}' | ||||
|             # Establish connection | ||||
|             try: | ||||
|                 try: | ||||
|                     ctx.data = self.ydl.urlopen(request) | ||||
|                 except (compat_urllib_error.URLError, ) as err: | ||||
|                     # reason may not be available, e.g. for urllib2.HTTPError on python 2.6 | ||||
|                     reason = getattr(err, 'reason', None) | ||||
|                     if isinstance(reason, socket.timeout): | ||||
|                         raise RetryDownload(err) | ||||
|                     raise err | ||||
|                 ctx.data = self.ydl.urlopen(request) | ||||
|                 # When trying to resume, Content-Range HTTP header of response has to be checked | ||||
|                 # to match the value of requested Range HTTP header. This is due to a webservers | ||||
|                 # that don't support resuming and serve a whole file with no Content-Range | ||||
| @@ -124,41 +125,37 @@ class HttpFD(FileDownloader): | ||||
|                 # https://github.com/ytdl-org/youtube-dl/issues/6057#issuecomment-126129799) | ||||
|                 if has_range: | ||||
|                     content_range = ctx.data.headers.get('Content-Range') | ||||
|                     if content_range: | ||||
|                         content_range_m = re.search(r'bytes (\d+)-(\d+)?(?:/(\d+))?', content_range) | ||||
|                         # Content-Range is present and matches requested Range, resume is possible | ||||
|                         if content_range_m: | ||||
|                             if range_start == int(content_range_m.group(1)): | ||||
|                                 content_range_end = int_or_none(content_range_m.group(2)) | ||||
|                                 content_len = int_or_none(content_range_m.group(3)) | ||||
|                                 accept_content_len = ( | ||||
|                                     # Non-chunked download | ||||
|                                     not ctx.chunk_size | ||||
|                                     # Chunked download and requested piece or | ||||
|                                     # its part is promised to be served | ||||
|                                     or content_range_end == range_end | ||||
|                                     or content_len < range_end) | ||||
|                                 if accept_content_len: | ||||
|                                     ctx.data_len = content_len | ||||
|                                     return | ||||
|                     content_range_start, content_range_end, content_len = parse_http_range(content_range) | ||||
|                     # Content-Range is present and matches requested Range, resume is possible | ||||
|                     if range_start == content_range_start and ( | ||||
|                             # Non-chunked download | ||||
|                             not ctx.chunk_size | ||||
|                             # Chunked download and requested piece or | ||||
|                             # its part is promised to be served | ||||
|                             or content_range_end == range_end | ||||
|                             or content_len < range_end): | ||||
|                         ctx.content_len = content_len | ||||
|                         if content_len or req_end: | ||||
|                             ctx.data_len = min(content_len or req_end, req_end or content_len) - (req_start or 0) | ||||
|                         return | ||||
|                     # Content-Range is either not present or invalid. Assuming remote webserver is | ||||
|                     # trying to send the whole file, resume is not possible, so wiping the local file | ||||
|                     # and performing entire redownload | ||||
|                     self.report_unable_to_resume() | ||||
|                     elif range_start > 0: | ||||
|                         self.report_unable_to_resume() | ||||
|                     ctx.resume_len = 0 | ||||
|                     ctx.open_mode = 'wb' | ||||
|                 ctx.data_len = int_or_none(ctx.data.info().get('Content-length', None)) | ||||
|                 return | ||||
|             except (compat_urllib_error.HTTPError, ) as err: | ||||
|                 if err.code == 416: | ||||
|                 ctx.data_len = ctx.content_len = int_or_none(ctx.data.headers.get('Content-length', None)) | ||||
|             except HTTPError as err: | ||||
|                 if err.status == 416: | ||||
|                     # Unable to resume (requested range not satisfiable) | ||||
|                     try: | ||||
|                         # Open the connection again without the range header | ||||
|                         ctx.data = self.ydl.urlopen( | ||||
|                             sanitized_Request(url, request_data, headers)) | ||||
|                         content_length = ctx.data.info()['Content-Length'] | ||||
|                     except (compat_urllib_error.HTTPError, ) as err: | ||||
|                         if err.code < 500 or err.code >= 600: | ||||
|                             Request(url, request_data, headers)) | ||||
|                         content_length = ctx.data.headers['Content-Length'] | ||||
|                     except HTTPError as err: | ||||
|                         if err.status < 500 or err.status >= 600: | ||||
|                             raise | ||||
|                     else: | ||||
|                         # Examine the reported length | ||||
| @@ -186,21 +183,28 @@ class HttpFD(FileDownloader): | ||||
|                             ctx.resume_len = 0 | ||||
|                             ctx.open_mode = 'wb' | ||||
|                             return | ||||
|                 elif err.code < 500 or err.code >= 600: | ||||
|                 elif err.status < 500 or err.status >= 600: | ||||
|                     # Unexpected HTTP error | ||||
|                     raise | ||||
|                 raise RetryDownload(err) | ||||
|             except socket.timeout as err: | ||||
|                 raise RetryDownload(err) | ||||
|             except socket.error as err: | ||||
|                 if err.errno in (errno.ECONNRESET, errno.ETIMEDOUT): | ||||
|                     # Connection reset is no problem, just retry | ||||
|                     raise RetryDownload(err) | ||||
|             except CertificateVerifyError: | ||||
|                 raise | ||||
|             except TransportError as err: | ||||
|                 raise RetryDownload(err) | ||||
|  | ||||
|         def close_stream(): | ||||
|             if ctx.stream is not None: | ||||
|                 if not ctx.tmpfilename == '-': | ||||
|                     ctx.stream.close() | ||||
|                 ctx.stream = None | ||||
|  | ||||
|         def download(): | ||||
|             nonlocal throttle_start | ||||
|             data_len = ctx.data.info().get('Content-length', None) | ||||
|             data_len = ctx.data.headers.get('Content-length') | ||||
|  | ||||
|             if ctx.data.headers.get('Content-encoding'): | ||||
|                 # Content-encoding is present, Content-length is not reliable anymore as we are | ||||
|                 # doing auto decompression. (See: https://github.com/yt-dlp/yt-dlp/pull/6176) | ||||
|                 data_len = None | ||||
|  | ||||
|             # Range HTTP header may be ignored/unsupported by a webserver | ||||
|             # (e.g. extractor/scivee.py, extractor/bambuser.py). | ||||
| @@ -215,10 +219,12 @@ class HttpFD(FileDownloader): | ||||
|                 min_data_len = self.params.get('min_filesize') | ||||
|                 max_data_len = self.params.get('max_filesize') | ||||
|                 if min_data_len is not None and data_len < min_data_len: | ||||
|                     self.to_screen('\r[download] File is smaller than min-filesize (%s bytes < %s bytes). Aborting.' % (data_len, min_data_len)) | ||||
|                     self.to_screen( | ||||
|                         f'\r[download] File is smaller than min-filesize ({data_len} bytes < {min_data_len} bytes). Aborting.') | ||||
|                     return False | ||||
|                 if max_data_len is not None and data_len > max_data_len: | ||||
|                     self.to_screen('\r[download] File is larger than max-filesize (%s bytes > %s bytes). Aborting.' % (data_len, max_data_len)) | ||||
|                     self.to_screen( | ||||
|                         f'\r[download] File is larger than max-filesize ({data_len} bytes > {max_data_len} bytes). Aborting.') | ||||
|                     return False | ||||
|  | ||||
|             byte_counter = 0 + ctx.resume_len | ||||
| @@ -230,28 +236,17 @@ class HttpFD(FileDownloader): | ||||
|             before = start  # start measuring | ||||
|  | ||||
|             def retry(e): | ||||
|                 to_stdout = ctx.tmpfilename == '-' | ||||
|                 if ctx.stream is not None: | ||||
|                     if not to_stdout: | ||||
|                         ctx.stream.close() | ||||
|                     ctx.stream = None | ||||
|                 ctx.resume_len = byte_counter if to_stdout else os.path.getsize(encodeFilename(ctx.tmpfilename)) | ||||
|                 close_stream() | ||||
|                 ctx.resume_len = (byte_counter if ctx.tmpfilename == '-' | ||||
|                                   else os.path.getsize(encodeFilename(ctx.tmpfilename))) | ||||
|                 raise RetryDownload(e) | ||||
|  | ||||
|             while True: | ||||
|                 try: | ||||
|                     # Download and write | ||||
|                     data_block = ctx.data.read(block_size if not is_test else min(block_size, data_len - byte_counter)) | ||||
|                 # socket.timeout is a subclass of socket.error but may not have | ||||
|                 # errno set | ||||
|                 except socket.timeout as e: | ||||
|                     retry(e) | ||||
|                 except socket.error as e: | ||||
|                     # SSLError on python 2 (inherits socket.error) may have | ||||
|                     # no errno set but this error message | ||||
|                     if e.errno in (errno.ECONNRESET, errno.ETIMEDOUT) or getattr(e, 'message', None) == 'The read operation timed out': | ||||
|                         retry(e) | ||||
|                     raise | ||||
|                 except TransportError as err: | ||||
|                     retry(err) | ||||
|  | ||||
|                 byte_counter += len(data_block) | ||||
|  | ||||
| @@ -267,19 +262,19 @@ class HttpFD(FileDownloader): | ||||
|                         assert ctx.stream is not None | ||||
|                         ctx.filename = self.undo_temp_name(ctx.tmpfilename) | ||||
|                         self.report_destination(ctx.filename) | ||||
|                     except (OSError, IOError) as err: | ||||
|                     except OSError as err: | ||||
|                         self.report_error('unable to open for writing: %s' % str(err)) | ||||
|                         return False | ||||
|  | ||||
|                     if self.params.get('xattr_set_filesize', False) and data_len is not None: | ||||
|                         try: | ||||
|                             write_xattr(ctx.tmpfilename, 'user.ytdl.filesize', str(data_len).encode('utf-8')) | ||||
|                             write_xattr(ctx.tmpfilename, 'user.ytdl.filesize', str(data_len).encode()) | ||||
|                         except (XAttrUnavailableError, XAttrMetadataError) as err: | ||||
|                             self.report_error('unable to set filesize xattr: %s' % str(err)) | ||||
|  | ||||
|                 try: | ||||
|                     ctx.stream.write(data_block) | ||||
|                 except (IOError, OSError) as err: | ||||
|                 except OSError as err: | ||||
|                     self.to_stderr('\n') | ||||
|                     self.report_error('unable to write data: %s' % str(err)) | ||||
|                     return False | ||||
| @@ -322,38 +317,36 @@ class HttpFD(FileDownloader): | ||||
|                 if speed and speed < (self.params.get('throttledratelimit') or 0): | ||||
|                     # The speed must stay below the limit for 3 seconds | ||||
|                     # This prevents raising error when the speed temporarily goes down | ||||
|                     if throttle_start is None: | ||||
|                         throttle_start = now | ||||
|                     elif now - throttle_start > 3: | ||||
|                     if ctx.throttle_start is None: | ||||
|                         ctx.throttle_start = now | ||||
|                     elif now - ctx.throttle_start > 3: | ||||
|                         if ctx.stream is not None and ctx.tmpfilename != '-': | ||||
|                             ctx.stream.close() | ||||
|                         raise ThrottledDownload() | ||||
|                 elif speed: | ||||
|                     throttle_start = None | ||||
|  | ||||
|             if not is_test and ctx.chunk_size and ctx.data_len is not None and byte_counter < ctx.data_len: | ||||
|                 ctx.resume_len = byte_counter | ||||
|                 # ctx.block_size = block_size | ||||
|                 raise NextFragment() | ||||
|                     ctx.throttle_start = None | ||||
|  | ||||
|             if ctx.stream is None: | ||||
|                 self.to_stderr('\n') | ||||
|                 self.report_error('Did not get any data blocks') | ||||
|                 return False | ||||
|  | ||||
|             if not is_test and ctx.chunk_size and ctx.content_len is not None and byte_counter < ctx.content_len: | ||||
|                 ctx.resume_len = byte_counter | ||||
|                 raise NextFragment() | ||||
|  | ||||
|             if ctx.tmpfilename != '-': | ||||
|                 ctx.stream.close() | ||||
|  | ||||
|             if data_len is not None and byte_counter != data_len: | ||||
|                 err = ContentTooShortError(byte_counter, int(data_len)) | ||||
|                 if count <= retries: | ||||
|                     retry(err) | ||||
|                 raise err | ||||
|                 retry(err) | ||||
|  | ||||
|             self.try_rename(ctx.tmpfilename, ctx.filename) | ||||
|  | ||||
|             # Update file modification time | ||||
|             if self.params.get('updatetime', True): | ||||
|                 info_dict['filetime'] = self.try_utime(ctx.filename, ctx.data.info().get('last-modified', None)) | ||||
|                 info_dict['filetime'] = self.try_utime(ctx.filename, ctx.data.headers.get('last-modified', None)) | ||||
|  | ||||
|             self._hook_progress({ | ||||
|                 'downloaded_bytes': byte_counter, | ||||
| @@ -366,21 +359,20 @@ class HttpFD(FileDownloader): | ||||
|  | ||||
|             return True | ||||
|  | ||||
|         while count <= retries: | ||||
|         for retry in RetryManager(self.params.get('retries'), self.report_retry): | ||||
|             try: | ||||
|                 establish_connection() | ||||
|                 return download() | ||||
|             except RetryDownload as e: | ||||
|                 count += 1 | ||||
|                 if count <= retries: | ||||
|                     self.report_retry(e.source_error, count, retries) | ||||
|                 else: | ||||
|                     self.to_screen(f'[download] Got server HTTP error: {e.source_error}') | ||||
|             except RetryDownload as err: | ||||
|                 retry.error = err.source_error | ||||
|                 continue | ||||
|             except NextFragment: | ||||
|                 retry.error = None | ||||
|                 retry.attempt -= 1 | ||||
|                 continue | ||||
|             except SucceedDownload: | ||||
|                 return True | ||||
|  | ||||
|         self.report_error('giving up after %s retries' % retries) | ||||
|             except:  # noqa: E722 | ||||
|                 close_stream() | ||||
|                 raise | ||||
|         return False | ||||
|   | ||||
| @@ -1,27 +1,23 @@ | ||||
| from __future__ import unicode_literals | ||||
|  | ||||
| import time | ||||
| import binascii | ||||
| import io | ||||
| import struct | ||||
| import time | ||||
|  | ||||
| from .fragment import FragmentFD | ||||
| from ..compat import ( | ||||
|     compat_Struct, | ||||
|     compat_urllib_error, | ||||
| ) | ||||
| from ..networking.exceptions import HTTPError | ||||
| from ..utils import RetryManager | ||||
|  | ||||
| u8 = struct.Struct('>B') | ||||
| u88 = struct.Struct('>Bx') | ||||
| u16 = struct.Struct('>H') | ||||
| u1616 = struct.Struct('>Hxx') | ||||
| u32 = struct.Struct('>I') | ||||
| u64 = struct.Struct('>Q') | ||||
|  | ||||
| u8 = compat_Struct('>B') | ||||
| u88 = compat_Struct('>Bx') | ||||
| u16 = compat_Struct('>H') | ||||
| u1616 = compat_Struct('>Hxx') | ||||
| u32 = compat_Struct('>I') | ||||
| u64 = compat_Struct('>Q') | ||||
|  | ||||
| s88 = compat_Struct('>bx') | ||||
| s16 = compat_Struct('>h') | ||||
| s1616 = compat_Struct('>hxx') | ||||
| s32 = compat_Struct('>i') | ||||
| s88 = struct.Struct('>bx') | ||||
| s16 = struct.Struct('>h') | ||||
| s1616 = struct.Struct('>hxx') | ||||
| s32 = struct.Struct('>i') | ||||
|  | ||||
| unity_matrix = (s32.pack(0x10000) + s32.pack(0) * 3) * 2 + s32.pack(0x40000000) | ||||
|  | ||||
| @@ -142,6 +138,8 @@ def write_piff_header(stream, params): | ||||
|  | ||||
|         if fourcc == 'AACL': | ||||
|             sample_entry_box = box(b'mp4a', sample_entry_payload) | ||||
|         if fourcc == 'EC-3': | ||||
|             sample_entry_box = box(b'ec-3', sample_entry_payload) | ||||
|     elif stream_type == 'video': | ||||
|         sample_entry_payload += u16.pack(0)  # pre defined | ||||
|         sample_entry_payload += u16.pack(0)  # reserved | ||||
| @@ -156,7 +154,7 @@ def write_piff_header(stream, params): | ||||
|         sample_entry_payload += u16.pack(0x18)  # depth | ||||
|         sample_entry_payload += s16.pack(-1)  # pre defined | ||||
|  | ||||
|         codec_private_data = binascii.unhexlify(params['codec_private_data'].encode('utf-8')) | ||||
|         codec_private_data = binascii.unhexlify(params['codec_private_data'].encode()) | ||||
|         if fourcc in ('H264', 'AVC1'): | ||||
|             sps, pps = codec_private_data.split(u32.pack(1))[1:] | ||||
|             avcc_payload = u8.pack(1)  # configuration version | ||||
| @@ -235,8 +233,6 @@ class IsmFD(FragmentFD): | ||||
|     Download segments in a ISM manifest | ||||
|     """ | ||||
|  | ||||
|     FD_NAME = 'ism' | ||||
|  | ||||
|     def real_download(self, filename, info_dict): | ||||
|         segments = info_dict['fragments'][:1] if self.params.get( | ||||
|             'test', False) else info_dict['fragments'] | ||||
| @@ -252,7 +248,6 @@ class IsmFD(FragmentFD): | ||||
|             'ism_track_written': False, | ||||
|         }) | ||||
|  | ||||
|         fragment_retries = self.params.get('fragment_retries', 0) | ||||
|         skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True) | ||||
|  | ||||
|         frag_index = 0 | ||||
| @@ -260,30 +255,29 @@ class IsmFD(FragmentFD): | ||||
|             frag_index += 1 | ||||
|             if frag_index <= ctx['fragment_index']: | ||||
|                 continue | ||||
|             count = 0 | ||||
|             while count <= fragment_retries: | ||||
|  | ||||
|             retry_manager = RetryManager(self.params.get('fragment_retries'), self.report_retry, | ||||
|                                          frag_index=frag_index, fatal=not skip_unavailable_fragments) | ||||
|             for retry in retry_manager: | ||||
|                 try: | ||||
|                     success, frag_content = self._download_fragment(ctx, segment['url'], info_dict) | ||||
|                     success = self._download_fragment(ctx, segment['url'], info_dict) | ||||
|                     if not success: | ||||
|                         return False | ||||
|                     frag_content = self._read_fragment(ctx) | ||||
|  | ||||
|                     if not extra_state['ism_track_written']: | ||||
|                         tfhd_data = extract_box_data(frag_content, [b'moof', b'traf', b'tfhd']) | ||||
|                         info_dict['_download_params']['track_id'] = u32.unpack(tfhd_data[4:8])[0] | ||||
|                         write_piff_header(ctx['dest_stream'], info_dict['_download_params']) | ||||
|                         extra_state['ism_track_written'] = True | ||||
|                     self._append_fragment(ctx, frag_content) | ||||
|                     break | ||||
|                 except compat_urllib_error.HTTPError as err: | ||||
|                     count += 1 | ||||
|                     if count <= fragment_retries: | ||||
|                         self.report_retry_fragment(err, frag_index, count, fragment_retries) | ||||
|             if count > fragment_retries: | ||||
|                 if skip_unavailable_fragments: | ||||
|                     self.report_skip_fragment(frag_index) | ||||
|                 except HTTPError as err: | ||||
|                     retry.error = err | ||||
|                     continue | ||||
|                 self.report_error('giving up after %s fragment retries' % fragment_retries) | ||||
|                 return False | ||||
|  | ||||
|         self._finish_frag_download(ctx, info_dict) | ||||
|             if retry_manager.error: | ||||
|                 if not skip_unavailable_fragments: | ||||
|                     return False | ||||
|                 self.report_skip_fragment(frag_index) | ||||
|  | ||||
|         return True | ||||
|         return self._finish_frag_download(ctx, info_dict) | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user