Cleared out old files
This commit is contained in:
parent
7fbca79104
commit
c11e384925
|
@ -1,53 +0,0 @@
|
|||
# Python imports
|
||||
import builtins
|
||||
import threading
|
||||
import sys
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
from utils.event_system import EventSystem
|
||||
from utils.endpoint_registry import EndpointRegistry
|
||||
from utils.keybindings import Keybindings
|
||||
from utils.logger import Logger
|
||||
from utils.settings import Settings
|
||||
|
||||
|
||||
|
||||
# NOTE: Threads WILL NOT die with parent's destruction.
|
||||
def threaded_wrapper(fn):
|
||||
def wrapper(*args, **kwargs):
|
||||
threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=False).start()
|
||||
return wrapper
|
||||
|
||||
# NOTE: Threads WILL die with parent's destruction.
|
||||
def daemon_threaded_wrapper(fn):
|
||||
def wrapper(*args, **kwargs):
|
||||
threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start()
|
||||
return wrapper
|
||||
|
||||
def sizeof_fmt_def(num, suffix="B"):
|
||||
for unit in ["", "K", "M", "G", "T", "Pi", "Ei", "Zi"]:
|
||||
if abs(num) < 1024.0:
|
||||
return f"{num:3.1f} {unit}{suffix}"
|
||||
num /= 1024.0
|
||||
return f"{num:.1f} Yi{suffix}"
|
||||
|
||||
|
||||
|
||||
|
||||
# NOTE: Just reminding myself we can add to builtins two different ways...
|
||||
# __builtins__.update({"event_system": Builtins()})
|
||||
builtins.app_name = "Mirage2"
|
||||
builtins.keybindings = Keybindings()
|
||||
builtins.event_system = EventSystem()
|
||||
builtins.endpoint_registry = EndpointRegistry()
|
||||
builtins.settings = Settings()
|
||||
builtins.logger = Logger(settings.get_home_config_path(), \
|
||||
_ch_log_lvl=settings.get_ch_log_lvl(), \
|
||||
_fh_log_lvl=settings.get_fh_log_lvl()).get_logger()
|
||||
|
||||
builtins.threaded = threaded_wrapper
|
||||
builtins.daemon_threaded = daemon_threaded_wrapper
|
||||
builtins.debug = False
|
||||
builtins.trace_debug = False
|
|
@ -1,3 +0,0 @@
|
|||
"""
|
||||
Base module
|
||||
"""
|
|
@ -1,52 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
|
||||
# Python imports
|
||||
import argparse
|
||||
import faulthandler
|
||||
import traceback
|
||||
from setproctitle import setproctitle
|
||||
|
||||
# Lib imports
|
||||
import gi
|
||||
gi.require_version('Gtk', '3.0')
|
||||
from gi.repository import Gtk
|
||||
|
||||
# Application imports
|
||||
from __builtins__ import *
|
||||
from app import Application
|
||||
|
||||
|
||||
|
||||
def run():
|
||||
try:
|
||||
setproctitle(f"{app_name}")
|
||||
faulthandler.enable() # For better debug info
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Add long and short arguments
|
||||
parser.add_argument("--debug", "-d", default="false", help="Do extra console messaging.")
|
||||
parser.add_argument("--trace-debug", "-td", default="false", help="Disable saves, ignore IPC lock, do extra console messaging.")
|
||||
parser.add_argument("--no-plugins", "-np", default="false", help="Do not load plugins.")
|
||||
parser.add_argument("--file", "-f", default=None, help="Open an image.")
|
||||
parser.add_argument("--dir", "-d", default=None, help="Load a dir with images.")
|
||||
|
||||
# Read arguments (If any...)
|
||||
args, unknownargs = parser.parse_known_args()
|
||||
|
||||
if args.debug == "true":
|
||||
settings.set_debug(True)
|
||||
|
||||
if args.trace_debug == "true":
|
||||
settings.set_trace_debug(True)
|
||||
|
||||
Application(args, unknownargs)
|
||||
Gtk.main()
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
quit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
""" Set process title, get arguments, and create GTK main thread. """
|
||||
run()
|
|
@ -1,49 +0,0 @@
|
|||
# Python imports
|
||||
import os
|
||||
import inspect
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
from utils.ipc_server import IPCServer
|
||||
from context.controller import Controller
|
||||
|
||||
|
||||
|
||||
class Application(EventSystem):
|
||||
def __init__(self, args, unknownargs):
|
||||
if not debug:
|
||||
event_system.create_ipc_server()
|
||||
|
||||
# NOTE: Keeping here just in case I change my mind...
|
||||
# time.sleep(0.2)
|
||||
# if not trace_debug:
|
||||
# if not event_system.is_ipc_alive:
|
||||
# if unknownargs:
|
||||
# for arg in unknownargs:
|
||||
# if os.path.isdir(arg):
|
||||
# message = f"FILE|{arg}"
|
||||
# event_system.send_ipc_message(message)
|
||||
#
|
||||
# raise Exception("IPC Server Exists: Will send data to it and close...")
|
||||
|
||||
|
||||
settings.create_window()
|
||||
|
||||
controller = Controller(settings, args, unknownargs)
|
||||
if not controller:
|
||||
raise Exception("Controller exited and doesn't exist...")
|
||||
|
||||
# Gets the methods from the classes and sets to handler.
|
||||
# Then, builder from settings will connect to any signals it needs.
|
||||
classes = [controller]
|
||||
handlers = {}
|
||||
for c in classes:
|
||||
methods = None
|
||||
try:
|
||||
methods = inspect.getmembers(c, predicate=inspect.ismethod)
|
||||
handlers.update(methods)
|
||||
except Exception as e:
|
||||
print(repr(e))
|
||||
|
||||
settings.get_builder().connect_signals(handlers)
|
|
@ -1,3 +0,0 @@
|
|||
"""
|
||||
Gtk Bound Signal Module
|
||||
"""
|
|
@ -1,185 +0,0 @@
|
|||
# Python imports
|
||||
import os
|
||||
import threading, subprocess, time
|
||||
|
||||
|
||||
# Gtk imports
|
||||
import gi
|
||||
gi.require_version('Gtk', '3.0')
|
||||
from gi.repository import Gtk, GLib, GdkPixbuf
|
||||
|
||||
# Application imports
|
||||
from .controller_data import Controller_Data
|
||||
from .mixins.tree_view_update_mixin import TreeViewUpdateMixin
|
||||
|
||||
|
||||
try:
|
||||
from PIL import Image as PImage
|
||||
except Exception as e:
|
||||
PImage = None
|
||||
|
||||
|
||||
def threaded(fn):
|
||||
def wrapper(*args, **kwargs):
|
||||
threading.Thread(target=fn, args=args, kwargs=kwargs).start()
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class Controller(TreeViewUpdateMixin, Controller_Data):
|
||||
def __init__(self, _settings, args, unknownargs):
|
||||
self.setup_controller_data(_settings)
|
||||
self.window.show_all()
|
||||
self.handle_args(args, unknownargs)
|
||||
|
||||
|
||||
def tear_down(self, widget=None, eve=None):
|
||||
event_system.send_ipc_message("close server")
|
||||
|
||||
time.sleep(event_sleep_time)
|
||||
Gtk.main_quit()
|
||||
|
||||
|
||||
@threaded
|
||||
def gui_event_observer(self):
|
||||
while True:
|
||||
time.sleep(event_sleep_time)
|
||||
event = event_system.consume_gui_event()
|
||||
if event:
|
||||
try:
|
||||
type, target, data = event
|
||||
if not type:
|
||||
method = getattr(self.__class__, target)
|
||||
GLib.idle_add(method, *(self, *data,))
|
||||
else:
|
||||
method = getattr(self.__class__, "hadle_gui_event_and_call_back")
|
||||
GLib.idle_add(method, *(self, type, target, data))
|
||||
except Exception as e:
|
||||
print(repr(e))
|
||||
self.logger.debug(e)
|
||||
|
||||
def hadle_gui_event_and_call_back(self, type, target, parameters):
|
||||
method = getattr(self.__class__, target)
|
||||
data = method(*(self, *parameters))
|
||||
event_system.push_module_event([type, None, (data,)])
|
||||
|
||||
|
||||
def handle_args(self, args=None, unknownargs=None):
|
||||
print(f"Args: {args}")
|
||||
print(f"Unknownargs: {unknownargs}")
|
||||
if args.dir and os.path.isdir(args.dir):
|
||||
self.load_store(self.view, self.thumbnail_store, arg)
|
||||
|
||||
if args.file and os.path.isfile(args.file):
|
||||
path = '/'.join(rgs.file.split("/")[:-1])
|
||||
self.load_store(self.view, self.thumbnail_store, path)
|
||||
self.process_path(args.file)
|
||||
|
||||
if unknownargs:
|
||||
for arg in unknownargs:
|
||||
if os.path.isdir(arg):
|
||||
self.load_store(self.view, self.thumbnail_store, arg)
|
||||
elif os.path.isfile(arg):
|
||||
path = '/'.join(arg.split("/")[:-1])
|
||||
self.load_store(self.view, self.thumbnail_store, path)
|
||||
self.process_path(arg)
|
||||
|
||||
|
||||
def _on_drag_data_received(self, widget, drag_context, x, y, data, info, time):
|
||||
if info == 80:
|
||||
uri = data.get_uris()[0].split("file://")[1]
|
||||
if os.path.isfile(uri) and uri.endswith(self.images_filter):
|
||||
try:
|
||||
image = Gtk.Image.new_from_pixbuf(self._get_pixbuf(uri))
|
||||
except Exception as e:
|
||||
image = Gtk.Image.new_from_pixbuf(self._get_pixbuf(self.blank_image))
|
||||
|
||||
self._load_image(image)
|
||||
elif os.path.isdir(uri):
|
||||
self.load_store(self.view, self.thumbnail_store, uri)
|
||||
|
||||
|
||||
def load_image_from_treeview(self, widget):
|
||||
store, iter = widget.get_selection().get_selected()
|
||||
uri = store.get_value(iter, 1)
|
||||
|
||||
if uri == self.current_img_uri:
|
||||
return
|
||||
|
||||
self.process_path(uri)
|
||||
|
||||
def process_path(self, uri):
|
||||
self.current_img_uri = uri
|
||||
self.current_path_label.set_label(uri)
|
||||
if not uri.endswith(".gif"):
|
||||
self.is_img_gif = False
|
||||
self.current_img = Gtk.Image.new_from_pixbuf(self._get_pixbuf(uri))
|
||||
self._load_image()
|
||||
else:
|
||||
self.is_img_gif = True
|
||||
self.current_img = Gtk.Image.new_from_file(uri)
|
||||
self.gif_animation = self.current_img.get_animation()
|
||||
self._load_image()
|
||||
|
||||
def _get_pixbuf(self, uri):
|
||||
geom_rec = self.image_area.get_parent().get_parent().get_allocated_size()[0]
|
||||
width = geom_rec.width - 15
|
||||
height = geom_rec.height - 15
|
||||
wxh = [width, height]
|
||||
|
||||
self.image_area.set_size_request(width, height)
|
||||
if PImage and uri.lower().endswith(".webp"):
|
||||
return self.image2pixbuf(uri, wxh)
|
||||
else:
|
||||
return GdkPixbuf.Pixbuf.new_from_file_at_scale(uri, width, height, True)
|
||||
|
||||
def image2pixbuf(self, path, wxh):
|
||||
"""Convert Pillow image to GdkPixbuf"""
|
||||
im = PImage.open(path)
|
||||
data = im.tobytes()
|
||||
data = GLib.Bytes.new(data)
|
||||
w, h = im.size
|
||||
|
||||
pixbuf = GdkPixbuf.Pixbuf.new_from_bytes(data, GdkPixbuf.Colorspace.RGB,
|
||||
False, 8, w, h, w * 3)
|
||||
|
||||
return pixbuf.scale_simple(wxh[0], wxh[1], 2) # BILINEAR = 2
|
||||
|
||||
def _load_image(self):
|
||||
self.clear_children(self.image_area)
|
||||
self.image_area.add(self.current_img)
|
||||
self.image_area.show_all()
|
||||
|
||||
@threaded
|
||||
def scale_image_from_parent_resize(self, widget, allocation):
|
||||
if not self.image_update_lock:
|
||||
GLib.idle_add(self._on_scale_image_from_parent_resize, ())
|
||||
|
||||
|
||||
def _on_scale_image_from_parent_resize(self, eve):
|
||||
if self.current_img_uri:
|
||||
self.image_update_lock = True
|
||||
|
||||
if not self.is_img_gif:
|
||||
self.current_img = Gtk.Image.new_from_pixbuf(self._get_pixbuf(self.current_img_uri))
|
||||
self._load_image()
|
||||
else:
|
||||
try:
|
||||
self.gif_animation.advance()
|
||||
self.current_img.set_from_animation(self.gif_animation)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self.image_update_lock = False
|
||||
|
||||
def get_clipboard_data(self):
|
||||
proc = subprocess.Popen(['xclip','-selection', 'clipboard', '-o'], stdout=subprocess.PIPE)
|
||||
retcode = proc.wait()
|
||||
data = proc.stdout.read()
|
||||
return data.decode("utf-8").strip()
|
||||
|
||||
def set_clipboard_data(self, data):
|
||||
proc = subprocess.Popen(['xclip','-selection','clipboard'], stdin=subprocess.PIPE)
|
||||
proc.stdin.write(data)
|
||||
proc.stdin.close()
|
||||
retcode = proc.wait()
|
|
@ -1,68 +0,0 @@
|
|||
# Python imports
|
||||
import os, signal
|
||||
|
||||
# Lib imports
|
||||
import gi
|
||||
gi.require_version('Gtk', '3.0')
|
||||
gi.require_version('Gdk', '3.0')
|
||||
from gi.repository import Gtk, Gdk, GLib
|
||||
|
||||
# Application imports
|
||||
from .view import View
|
||||
|
||||
|
||||
|
||||
class Controller_Data:
|
||||
def clear_children(self, widget):
|
||||
''' Clear children of a gtk widget. '''
|
||||
for child in widget.get_children():
|
||||
widget.remove(child)
|
||||
|
||||
def clear_console(self):
|
||||
os.system('cls' if os.name == 'nt' else 'clear')
|
||||
|
||||
def call_method(self, _method_name, data = None):
|
||||
method_name = str(_method_name)
|
||||
method = getattr(self, method_name, lambda data: f"No valid key passed...\nkey={method_name}\nargs={data}")
|
||||
return method(data) if data else method()
|
||||
|
||||
def has_method(self, obj, name):
|
||||
return callable(getattr(obj, name, None))
|
||||
|
||||
def setup_controller_data(self, _settings):
|
||||
self.settings = _settings
|
||||
self.builder = self.settings.get_builder()
|
||||
self.window = self.settings.get_main_window()
|
||||
self.logger = self.settings.get_logger()
|
||||
|
||||
self.home_path = self.settings.get_home_path()
|
||||
self.success_color = self.settings.get_success_color()
|
||||
self.warning_color = self.settings.get_warning_color()
|
||||
self.error_color = self.settings.get_error_color()
|
||||
|
||||
self.current_path_label = self.builder.get_object("current_path_label")
|
||||
self.thumbnails_view = self.builder.get_object("thumbnails_view")
|
||||
self.thumbnail_store = self.builder.get_object("thumbnail_store")
|
||||
self.image_area = self.builder.get_object("image_area")
|
||||
self.blank_image = self.settings.get_blank_image()
|
||||
|
||||
self.thumbnails_view.connect("drag-data-received", self._on_drag_data_received)
|
||||
URI_TARGET_TYPE = 80
|
||||
uri_target = Gtk.TargetEntry.new('text/uri-list', Gtk.TargetFlags(0), URI_TARGET_TYPE)
|
||||
targets = [ uri_target ]
|
||||
action = Gdk.DragAction.COPY
|
||||
self.thumbnails_view.enable_model_drag_dest(targets, action)
|
||||
self.thumbnails_view.enable_model_drag_source(0, targets, action)
|
||||
|
||||
self.images_filter = self.settings.get_images_filter()
|
||||
self.view = View(self.images_filter, self.blank_image)
|
||||
self.current_img_uri = None
|
||||
self.current_img = None
|
||||
self.gif_animation = None
|
||||
self.is_img_gif = False
|
||||
self.image_update_lock = False
|
||||
|
||||
|
||||
|
||||
self.window.connect("delete-event", self.tear_down)
|
||||
GLib.unix_signal_add(GLib.PRIORITY_DEFAULT, signal.SIGINT, self.tear_down)
|
|
@ -1,3 +0,0 @@
|
|||
"""
|
||||
Icons module
|
||||
"""
|
|
@ -1,99 +0,0 @@
|
|||
# Python Imports
|
||||
import os, subprocess, threading, hashlib
|
||||
from os.path import isfile
|
||||
|
||||
# Gtk imports
|
||||
import gi
|
||||
gi.require_version('GdkPixbuf', '2.0')
|
||||
from gi.repository import GdkPixbuf, GLib
|
||||
|
||||
# Application imports
|
||||
from .mixins.desktopiconmixin import DesktopIconMixin
|
||||
from .mixins.videoiconmixin import VideoIconMixin
|
||||
|
||||
|
||||
try:
|
||||
from PIL import Image as PImage
|
||||
except Exception as e:
|
||||
PImage = None
|
||||
|
||||
|
||||
def threaded(fn):
|
||||
def wrapper(*args, **kwargs):
|
||||
threading.Thread(target=fn, args=args, kwargs=kwargs).start()
|
||||
return wrapper
|
||||
|
||||
|
||||
class Icon(DesktopIconMixin, VideoIconMixin):
|
||||
def create_icon(self, dir, file):
|
||||
full_path = f"{dir}/{file}"
|
||||
return self.get_icon_image(dir, file, full_path)
|
||||
|
||||
def get_icon_image(self, dir, file, full_path):
|
||||
try:
|
||||
thumbnl = None
|
||||
|
||||
if file.lower().endswith(self.fimages): # Image Icon
|
||||
thumbnl = self.create_scaled_image(full_path, self.VIDEO_ICON_WH)
|
||||
|
||||
return thumbnl
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
def create_thumbnail(self, dir, file):
|
||||
full_path = f"{dir}/{file}"
|
||||
try:
|
||||
file_hash = hashlib.sha256(str.encode(full_path)).hexdigest()
|
||||
hash_img_pth = f"{self.ABS_THUMBS_PTH}/{file_hash}.jpg"
|
||||
if isfile(hash_img_pth) == False:
|
||||
self.generate_video_thumbnail(full_path, hash_img_pth)
|
||||
|
||||
thumbnl = self.create_scaled_image(hash_img_pth, self.VIDEO_ICON_WH)
|
||||
if thumbnl == None: # If no icon whatsoever, return internal default
|
||||
thumbnl = GdkPixbuf.Pixbuf.new_from_file(f"{self.DEFAULT_ICONS}/video.png")
|
||||
|
||||
return thumbnl
|
||||
except Exception as e:
|
||||
print("Thumbnail generation issue:")
|
||||
print( repr(e) )
|
||||
return GdkPixbuf.Pixbuf.new_from_file(f"{self.DEFAULT_ICONS}/video.png")
|
||||
|
||||
|
||||
def create_scaled_image(self, path, wxh):
|
||||
try:
|
||||
if path.lower().endswith(".gif"):
|
||||
return GdkPixbuf.PixbufAnimation.new_from_file(path) \
|
||||
.get_static_image() \
|
||||
.scale_simple(wxh[0], wxh[1], GdkPixbuf.InterpType.BILINEAR)
|
||||
else:
|
||||
if PImage and path.lower().endswith(".webp"):
|
||||
return self.image2pixbuf(path, wxh)
|
||||
else:
|
||||
return GdkPixbuf.Pixbuf.new_from_file_at_scale(path, wxh[0], wxh[1], True)
|
||||
except Exception as e:
|
||||
print("Image Scaling Issue:")
|
||||
print( repr(e) )
|
||||
return None
|
||||
|
||||
def image2pixbuf(self, path, wxh):
|
||||
"""Convert Pillow image to GdkPixbuf"""
|
||||
im = PImage.open(path)
|
||||
data = im.tobytes()
|
||||
data = GLib.Bytes.new(data)
|
||||
w, h = im.size
|
||||
|
||||
pixbuf = GdkPixbuf.Pixbuf.new_from_bytes(data, GdkPixbuf.Colorspace.RGB,
|
||||
False, 8, w, h, w * 3)
|
||||
|
||||
return pixbuf.scale_simple(wxh[0], wxh[1], 2) # BILINEAR = 2
|
||||
|
||||
def create_from_file(self, path):
|
||||
try:
|
||||
return GdkPixbuf.Pixbuf.new_from_file(path)
|
||||
except Exception as e:
|
||||
print("Image from file Issue:")
|
||||
print( repr(e) )
|
||||
return None
|
||||
|
||||
def return_generic_icon(self):
|
||||
return GdkPixbuf.Pixbuf.new_from_file(self.DEFAULT_ICON)
|
|
@ -1,3 +0,0 @@
|
|||
"""
|
||||
Mixins
|
||||
"""
|
|
@ -1,62 +0,0 @@
|
|||
# Python Imports
|
||||
import os, subprocess, hashlib
|
||||
from os.path import isfile
|
||||
|
||||
# Gtk imports
|
||||
|
||||
# Application imports
|
||||
from .xdg.DesktopEntry import DesktopEntry
|
||||
|
||||
|
||||
class DesktopIconMixin:
|
||||
def parse_desktop_files(self, full_path):
|
||||
try:
|
||||
xdgObj = DesktopEntry(full_path)
|
||||
icon = xdgObj.getIcon()
|
||||
alt_icon_path = ""
|
||||
|
||||
if "steam" in icon:
|
||||
name = xdgObj.getName()
|
||||
file_hash = hashlib.sha256(str.encode(name)).hexdigest()
|
||||
hash_img_pth = self.STEAM_ICONS_PTH + "/" + file_hash + ".jpg"
|
||||
|
||||
if isfile(hash_img_pth) == True:
|
||||
# Use video sizes since headers are bigger
|
||||
return self.create_scaled_image(hash_img_pth, self.VIDEO_ICON_WH)
|
||||
|
||||
exec_str = xdgObj.getExec()
|
||||
parts = exec_str.split("steam://rungameid/")
|
||||
id = parts[len(parts) - 1]
|
||||
imageLink = self.STEAM_BASE_URL + id + "/header.jpg"
|
||||
proc = subprocess.Popen(["wget", "-O", hash_img_pth, imageLink])
|
||||
proc.wait()
|
||||
|
||||
# Use video thumbnail sizes since headers are bigger
|
||||
return self.create_scaled_image(hash_img_pth, self.VIDEO_ICON_WH)
|
||||
elif os.path.exists(icon):
|
||||
return self.create_scaled_image(icon, self.SYS_ICON_WH)
|
||||
else:
|
||||
alt_icon_path = ""
|
||||
|
||||
for dir in self.ICON_DIRS:
|
||||
alt_icon_path = self.traverse_icons_folder(dir, icon)
|
||||
if alt_icon_path != "":
|
||||
break
|
||||
|
||||
return self.create_scaled_image(alt_icon_path, self.SYS_ICON_WH)
|
||||
except Exception as e:
|
||||
print(".desktop icon generation issue:")
|
||||
print( repr(e) )
|
||||
return None
|
||||
|
||||
def traverse_icons_folder(self, path, icon):
|
||||
alt_icon_path = ""
|
||||
|
||||
for (dirpath, dirnames, filenames) in os.walk(path):
|
||||
for file in filenames:
|
||||
appNM = "application-x-" + icon
|
||||
if icon in file or appNM in file:
|
||||
alt_icon_path = dirpath + "/" + file
|
||||
break
|
||||
|
||||
return alt_icon_path
|
|
@ -1,53 +0,0 @@
|
|||
# Python Imports
|
||||
import subprocess
|
||||
|
||||
# Gtk imports
|
||||
|
||||
# Application imports
|
||||
|
||||
|
||||
class VideoIconMixin:
|
||||
def generate_video_thumbnail(self, full_path, hash_img_pth):
|
||||
try:
|
||||
proc = subprocess.Popen([self.FFMPG_THUMBNLR, "-t", "65%", "-s", "300", "-c", "jpg", "-i", full_path, "-o", hash_img_pth])
|
||||
proc.wait()
|
||||
except Exception as e:
|
||||
self.logger.debug(repr(e))
|
||||
self.ffprobe_generate_video_thumbnail(full_path, hash_img_pth)
|
||||
|
||||
|
||||
def ffprobe_generate_video_thumbnail(self, full_path, hash_img_pth):
|
||||
proc = None
|
||||
try:
|
||||
# Stream duration
|
||||
command = ["ffprobe", "-v", "error", "-select_streams", "v:0", "-show_entries", "stream=duration", "-of", "default=noprint_wrappers=1:nokey=1", full_path]
|
||||
data = subprocess.run(command, stdout=subprocess.PIPE)
|
||||
duration = data.stdout.decode('utf-8')
|
||||
|
||||
# Format (container) duration
|
||||
if "N/A" in duration:
|
||||
command = ["ffprobe", "-v", "error", "-show_entries", "format=duration", "-of", "default=noprint_wrappers=1:nokey=1", full_path]
|
||||
data = subprocess.run(command , stdout=subprocess.PIPE)
|
||||
duration = data.stdout.decode('utf-8')
|
||||
|
||||
# Stream duration type: image2
|
||||
if "N/A" in duration:
|
||||
command = ["ffprobe", "-v", "error", "-select_streams", "v:0", "-f", "image2", "-show_entries", "stream=duration", "-of", "default=noprint_wrappers=1:nokey=1", full_path]
|
||||
data = subprocess.run(command, stdout=subprocess.PIPE)
|
||||
duration = data.stdout.decode('utf-8')
|
||||
|
||||
# Format (container) duration type: image2
|
||||
if "N/A" in duration:
|
||||
command = ["ffprobe", "-v", "error", "-f", "image2", "-show_entries", "format=duration", "-of", "default=noprint_wrappers=1:nokey=1", full_path]
|
||||
data = subprocess.run(command , stdout=subprocess.PIPE)
|
||||
duration = data.stdout.decode('utf-8')
|
||||
|
||||
# Get frame roughly 35% through video
|
||||
grabTime = str( int( float( duration.split(".")[0] ) * 0.35) )
|
||||
command = ["ffmpeg", "-ss", grabTime, "-an", "-i", full_path, "-s", "320x180", "-vframes", "1", hash_img_pth]
|
||||
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
|
||||
proc.wait()
|
||||
except Exception as e:
|
||||
print("Video thumbnail generation issue in thread:")
|
||||
print( repr(e) )
|
||||
self.logger.debug(repr(e))
|
|
@ -1,160 +0,0 @@
|
|||
"""
|
||||
This module is based on a rox module (LGPL):
|
||||
|
||||
http://cvs.sourceforge.net/viewcvs.py/rox/ROX-Lib2/python/rox/basedir.py?rev=1.9&view=log
|
||||
|
||||
The freedesktop.org Base Directory specification provides a way for
|
||||
applications to locate shared data and configuration:
|
||||
|
||||
http://standards.freedesktop.org/basedir-spec/
|
||||
|
||||
(based on version 0.6)
|
||||
|
||||
This module can be used to load and save from and to these directories.
|
||||
|
||||
Typical usage:
|
||||
|
||||
from rox import basedir
|
||||
|
||||
for dir in basedir.load_config_paths('mydomain.org', 'MyProg', 'Options'):
|
||||
print "Load settings from", dir
|
||||
|
||||
dir = basedir.save_config_path('mydomain.org', 'MyProg')
|
||||
print >>file(os.path.join(dir, 'Options'), 'w'), "foo=2"
|
||||
|
||||
Note: see the rox.Options module for a higher-level API for managing options.
|
||||
"""
|
||||
|
||||
import os, stat
|
||||
|
||||
_home = os.path.expanduser('~')
|
||||
xdg_data_home = os.environ.get('XDG_DATA_HOME') or \
|
||||
os.path.join(_home, '.local', 'share')
|
||||
|
||||
xdg_data_dirs = [xdg_data_home] + \
|
||||
(os.environ.get('XDG_DATA_DIRS') or '/usr/local/share:/usr/share').split(':')
|
||||
|
||||
xdg_config_home = os.environ.get('XDG_CONFIG_HOME') or \
|
||||
os.path.join(_home, '.config')
|
||||
|
||||
xdg_config_dirs = [xdg_config_home] + \
|
||||
(os.environ.get('XDG_CONFIG_DIRS') or '/etc/xdg').split(':')
|
||||
|
||||
xdg_cache_home = os.environ.get('XDG_CACHE_HOME') or \
|
||||
os.path.join(_home, '.cache')
|
||||
|
||||
xdg_data_dirs = [x for x in xdg_data_dirs if x]
|
||||
xdg_config_dirs = [x for x in xdg_config_dirs if x]
|
||||
|
||||
def save_config_path(*resource):
|
||||
"""Ensure ``$XDG_CONFIG_HOME/<resource>/`` exists, and return its path.
|
||||
'resource' should normally be the name of your application. Use this
|
||||
when saving configuration settings.
|
||||
"""
|
||||
resource = os.path.join(*resource)
|
||||
assert not resource.startswith('/')
|
||||
path = os.path.join(xdg_config_home, resource)
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path, 0o700)
|
||||
return path
|
||||
|
||||
def save_data_path(*resource):
|
||||
"""Ensure ``$XDG_DATA_HOME/<resource>/`` exists, and return its path.
|
||||
'resource' should normally be the name of your application or a shared
|
||||
resource. Use this when saving or updating application data.
|
||||
"""
|
||||
resource = os.path.join(*resource)
|
||||
assert not resource.startswith('/')
|
||||
path = os.path.join(xdg_data_home, resource)
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path)
|
||||
return path
|
||||
|
||||
def save_cache_path(*resource):
|
||||
"""Ensure ``$XDG_CACHE_HOME/<resource>/`` exists, and return its path.
|
||||
'resource' should normally be the name of your application or a shared
|
||||
resource."""
|
||||
resource = os.path.join(*resource)
|
||||
assert not resource.startswith('/')
|
||||
path = os.path.join(xdg_cache_home, resource)
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path)
|
||||
return path
|
||||
|
||||
def load_config_paths(*resource):
|
||||
"""Returns an iterator which gives each directory named 'resource' in the
|
||||
configuration search path. Information provided by earlier directories should
|
||||
take precedence over later ones, and the user-specific config dir comes
|
||||
first."""
|
||||
resource = os.path.join(*resource)
|
||||
for config_dir in xdg_config_dirs:
|
||||
path = os.path.join(config_dir, resource)
|
||||
if os.path.exists(path): yield path
|
||||
|
||||
def load_first_config(*resource):
|
||||
"""Returns the first result from load_config_paths, or None if there is nothing
|
||||
to load."""
|
||||
for x in load_config_paths(*resource):
|
||||
return x
|
||||
return None
|
||||
|
||||
def load_data_paths(*resource):
|
||||
"""Returns an iterator which gives each directory named 'resource' in the
|
||||
application data search path. Information provided by earlier directories
|
||||
should take precedence over later ones."""
|
||||
resource = os.path.join(*resource)
|
||||
for data_dir in xdg_data_dirs:
|
||||
path = os.path.join(data_dir, resource)
|
||||
if os.path.exists(path): yield path
|
||||
|
||||
def get_runtime_dir(strict=True):
|
||||
"""Returns the value of $XDG_RUNTIME_DIR, a directory path.
|
||||
|
||||
This directory is intended for 'user-specific non-essential runtime files
|
||||
and other file objects (such as sockets, named pipes, ...)', and
|
||||
'communication and synchronization purposes'.
|
||||
|
||||
As of late 2012, only quite new systems set $XDG_RUNTIME_DIR. If it is not
|
||||
set, with ``strict=True`` (the default), a KeyError is raised. With
|
||||
``strict=False``, PyXDG will create a fallback under /tmp for the current
|
||||
user. This fallback does *not* provide the same guarantees as the
|
||||
specification requires for the runtime directory.
|
||||
|
||||
The strict default is deliberately conservative, so that application
|
||||
developers can make a conscious decision to allow the fallback.
|
||||
"""
|
||||
try:
|
||||
return os.environ['XDG_RUNTIME_DIR']
|
||||
except KeyError:
|
||||
if strict:
|
||||
raise
|
||||
|
||||
import getpass
|
||||
fallback = '/tmp/pyxdg-runtime-dir-fallback-' + getpass.getuser()
|
||||
create = False
|
||||
|
||||
try:
|
||||
# This must be a real directory, not a symlink, so attackers can't
|
||||
# point it elsewhere. So we use lstat to check it.
|
||||
st = os.lstat(fallback)
|
||||
except OSError as e:
|
||||
import errno
|
||||
if e.errno == errno.ENOENT:
|
||||
create = True
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
# The fallback must be a directory
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
os.unlink(fallback)
|
||||
create = True
|
||||
# Must be owned by the user and not accessible by anyone else
|
||||
elif (st.st_uid != os.getuid()) \
|
||||
or (st.st_mode & (stat.S_IRWXG | stat.S_IRWXO)):
|
||||
os.rmdir(fallback)
|
||||
create = True
|
||||
|
||||
if create:
|
||||
os.mkdir(fallback, 0o700)
|
||||
|
||||
return fallback
|
|
@ -1,39 +0,0 @@
|
|||
"""
|
||||
Functions to configure Basic Settings
|
||||
"""
|
||||
|
||||
language = "C"
|
||||
windowmanager = None
|
||||
icon_theme = "hicolor"
|
||||
icon_size = 48
|
||||
cache_time = 5
|
||||
root_mode = False
|
||||
|
||||
def setWindowManager(wm):
|
||||
global windowmanager
|
||||
windowmanager = wm
|
||||
|
||||
def setIconTheme(theme):
|
||||
global icon_theme
|
||||
icon_theme = theme
|
||||
import xdg.IconTheme
|
||||
xdg.IconTheme.themes = []
|
||||
|
||||
def setIconSize(size):
|
||||
global icon_size
|
||||
icon_size = size
|
||||
|
||||
def setCacheTime(time):
|
||||
global cache_time
|
||||
cache_time = time
|
||||
|
||||
def setLocale(lang):
|
||||
import locale
|
||||
lang = locale.normalize(lang)
|
||||
locale.setlocale(locale.LC_ALL, lang)
|
||||
import xdg.Locale
|
||||
xdg.Locale.update(lang)
|
||||
|
||||
def setRootMode(boolean):
|
||||
global root_mode
|
||||
root_mode = boolean
|
|
@ -1,435 +0,0 @@
|
|||
"""
|
||||
Complete implementation of the XDG Desktop Entry Specification
|
||||
http://standards.freedesktop.org/desktop-entry-spec/
|
||||
|
||||
Not supported:
|
||||
- Encoding: Legacy Mixed
|
||||
- Does not check exec parameters
|
||||
- Does not check URL's
|
||||
- Does not completly validate deprecated/kde items
|
||||
- Does not completly check categories
|
||||
"""
|
||||
|
||||
from .IniFile import IniFile
|
||||
from . import Locale
|
||||
|
||||
from .IniFile import is_ascii
|
||||
|
||||
from .Exceptions import ParsingError
|
||||
from .util import which
|
||||
import os.path
|
||||
import re
|
||||
import warnings
|
||||
|
||||
class DesktopEntry(IniFile):
|
||||
"Class to parse and validate Desktop Entries"
|
||||
|
||||
defaultGroup = 'Desktop Entry'
|
||||
|
||||
def __init__(self, filename=None):
|
||||
"""Create a new DesktopEntry.
|
||||
|
||||
If filename exists, it will be parsed as a desktop entry file. If not,
|
||||
or if filename is None, a blank DesktopEntry is created.
|
||||
"""
|
||||
self.content = dict()
|
||||
if filename and os.path.exists(filename):
|
||||
self.parse(filename)
|
||||
elif filename:
|
||||
self.new(filename)
|
||||
|
||||
def __str__(self):
|
||||
return self.getName()
|
||||
|
||||
def parse(self, file):
|
||||
"""Parse a desktop entry file.
|
||||
|
||||
This can raise :class:`~xdg.Exceptions.ParsingError`,
|
||||
:class:`~xdg.Exceptions.DuplicateGroupError` or
|
||||
:class:`~xdg.Exceptions.DuplicateKeyError`.
|
||||
"""
|
||||
IniFile.parse(self, file, ["Desktop Entry", "KDE Desktop Entry"])
|
||||
|
||||
def findTryExec(self):
|
||||
"""Looks in the PATH for the executable given in the TryExec field.
|
||||
|
||||
Returns the full path to the executable if it is found, None if not.
|
||||
Raises :class:`~xdg.Exceptions.NoKeyError` if TryExec is not present.
|
||||
"""
|
||||
tryexec = self.get('TryExec', strict=True)
|
||||
return which(tryexec)
|
||||
|
||||
# start standard keys
|
||||
def getType(self):
|
||||
return self.get('Type')
|
||||
def getVersion(self):
|
||||
"""deprecated, use getVersionString instead """
|
||||
return self.get('Version', type="numeric")
|
||||
def getVersionString(self):
|
||||
return self.get('Version')
|
||||
def getName(self):
|
||||
return self.get('Name', locale=True)
|
||||
def getGenericName(self):
|
||||
return self.get('GenericName', locale=True)
|
||||
def getNoDisplay(self):
|
||||
return self.get('NoDisplay', type="boolean")
|
||||
def getComment(self):
|
||||
return self.get('Comment', locale=True)
|
||||
def getIcon(self):
|
||||
return self.get('Icon', locale=True)
|
||||
def getHidden(self):
|
||||
return self.get('Hidden', type="boolean")
|
||||
def getOnlyShowIn(self):
|
||||
return self.get('OnlyShowIn', list=True)
|
||||
def getNotShowIn(self):
|
||||
return self.get('NotShowIn', list=True)
|
||||
def getTryExec(self):
|
||||
return self.get('TryExec')
|
||||
def getExec(self):
|
||||
return self.get('Exec')
|
||||
def getPath(self):
|
||||
return self.get('Path')
|
||||
def getTerminal(self):
|
||||
return self.get('Terminal', type="boolean")
|
||||
def getMimeType(self):
|
||||
"""deprecated, use getMimeTypes instead """
|
||||
return self.get('MimeType', list=True, type="regex")
|
||||
def getMimeTypes(self):
|
||||
return self.get('MimeType', list=True)
|
||||
def getCategories(self):
|
||||
return self.get('Categories', list=True)
|
||||
def getStartupNotify(self):
|
||||
return self.get('StartupNotify', type="boolean")
|
||||
def getStartupWMClass(self):
|
||||
return self.get('StartupWMClass')
|
||||
def getURL(self):
|
||||
return self.get('URL')
|
||||
# end standard keys
|
||||
|
||||
# start kde keys
|
||||
def getServiceTypes(self):
|
||||
return self.get('ServiceTypes', list=True)
|
||||
def getDocPath(self):
|
||||
return self.get('DocPath')
|
||||
def getKeywords(self):
|
||||
return self.get('Keywords', list=True, locale=True)
|
||||
def getInitialPreference(self):
|
||||
return self.get('InitialPreference')
|
||||
def getDev(self):
|
||||
return self.get('Dev')
|
||||
def getFSType(self):
|
||||
return self.get('FSType')
|
||||
def getMountPoint(self):
|
||||
return self.get('MountPoint')
|
||||
def getReadonly(self):
|
||||
return self.get('ReadOnly', type="boolean")
|
||||
def getUnmountIcon(self):
|
||||
return self.get('UnmountIcon', locale=True)
|
||||
# end kde keys
|
||||
|
||||
# start deprecated keys
|
||||
def getMiniIcon(self):
|
||||
return self.get('MiniIcon', locale=True)
|
||||
def getTerminalOptions(self):
|
||||
return self.get('TerminalOptions')
|
||||
def getDefaultApp(self):
|
||||
return self.get('DefaultApp')
|
||||
def getProtocols(self):
|
||||
return self.get('Protocols', list=True)
|
||||
def getExtensions(self):
|
||||
return self.get('Extensions', list=True)
|
||||
def getBinaryPattern(self):
|
||||
return self.get('BinaryPattern')
|
||||
def getMapNotify(self):
|
||||
return self.get('MapNotify')
|
||||
def getEncoding(self):
|
||||
return self.get('Encoding')
|
||||
def getSwallowTitle(self):
|
||||
return self.get('SwallowTitle', locale=True)
|
||||
def getSwallowExec(self):
|
||||
return self.get('SwallowExec')
|
||||
def getSortOrder(self):
|
||||
return self.get('SortOrder', list=True)
|
||||
def getFilePattern(self):
|
||||
return self.get('FilePattern', type="regex")
|
||||
def getActions(self):
|
||||
return self.get('Actions', list=True)
|
||||
# end deprecated keys
|
||||
|
||||
# desktop entry edit stuff
|
||||
def new(self, filename):
|
||||
"""Make this instance into a new, blank desktop entry.
|
||||
|
||||
If filename has a .desktop extension, Type is set to Application. If it
|
||||
has a .directory extension, Type is Directory. Other extensions will
|
||||
cause :class:`~xdg.Exceptions.ParsingError` to be raised.
|
||||
"""
|
||||
if os.path.splitext(filename)[1] == ".desktop":
|
||||
type = "Application"
|
||||
elif os.path.splitext(filename)[1] == ".directory":
|
||||
type = "Directory"
|
||||
else:
|
||||
raise ParsingError("Unknown extension", filename)
|
||||
|
||||
self.content = dict()
|
||||
self.addGroup(self.defaultGroup)
|
||||
self.set("Type", type)
|
||||
self.filename = filename
|
||||
# end desktop entry edit stuff
|
||||
|
||||
# validation stuff
|
||||
def checkExtras(self):
|
||||
# header
|
||||
if self.defaultGroup == "KDE Desktop Entry":
|
||||
self.warnings.append('[KDE Desktop Entry]-Header is deprecated')
|
||||
|
||||
# file extension
|
||||
if self.fileExtension == ".kdelnk":
|
||||
self.warnings.append("File extension .kdelnk is deprecated")
|
||||
elif self.fileExtension != ".desktop" and self.fileExtension != ".directory":
|
||||
self.warnings.append('Unknown File extension')
|
||||
|
||||
# Type
|
||||
try:
|
||||
self.type = self.content[self.defaultGroup]["Type"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Type' is missing")
|
||||
|
||||
# Name
|
||||
try:
|
||||
self.name = self.content[self.defaultGroup]["Name"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Name' is missing")
|
||||
|
||||
def checkGroup(self, group):
|
||||
# check if group header is valid
|
||||
if not (group == self.defaultGroup \
|
||||
or re.match("^Desktop Action [a-zA-Z0-9-]+$", group) \
|
||||
or (re.match("^X-", group) and is_ascii(group))):
|
||||
self.errors.append("Invalid Group name: %s" % group)
|
||||
else:
|
||||
#OnlyShowIn and NotShowIn
|
||||
if ("OnlyShowIn" in self.content[group]) and ("NotShowIn" in self.content[group]):
|
||||
self.errors.append("Group may either have OnlyShowIn or NotShowIn, but not both")
|
||||
|
||||
def checkKey(self, key, value, group):
|
||||
# standard keys
|
||||
if key == "Type":
|
||||
if value == "ServiceType" or value == "Service" or value == "FSDevice":
|
||||
self.warnings.append("Type=%s is a KDE extension" % key)
|
||||
elif value == "MimeType":
|
||||
self.warnings.append("Type=MimeType is deprecated")
|
||||
elif not (value == "Application" or value == "Link" or value == "Directory"):
|
||||
self.errors.append("Value of key 'Type' must be Application, Link or Directory, but is '%s'" % value)
|
||||
|
||||
if self.fileExtension == ".directory" and not value == "Directory":
|
||||
self.warnings.append("File extension is .directory, but Type is '%s'" % value)
|
||||
elif self.fileExtension == ".desktop" and value == "Directory":
|
||||
self.warnings.append("Files with Type=Directory should have the extension .directory")
|
||||
|
||||
if value == "Application":
|
||||
if "Exec" not in self.content[group]:
|
||||
self.warnings.append("Type=Application needs 'Exec' key")
|
||||
if value == "Link":
|
||||
if "URL" not in self.content[group]:
|
||||
self.warnings.append("Type=Link needs 'URL' key")
|
||||
|
||||
elif key == "Version":
|
||||
self.checkValue(key, value)
|
||||
|
||||
elif re.match("^Name"+xdg.Locale.regex+"$", key):
|
||||
pass # locale string
|
||||
|
||||
elif re.match("^GenericName"+xdg.Locale.regex+"$", key):
|
||||
pass # locale string
|
||||
|
||||
elif key == "NoDisplay":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
|
||||
elif re.match("^Comment"+xdg.Locale.regex+"$", key):
|
||||
pass # locale string
|
||||
|
||||
elif re.match("^Icon"+xdg.Locale.regex+"$", key):
|
||||
self.checkValue(key, value)
|
||||
|
||||
elif key == "Hidden":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
|
||||
elif key == "OnlyShowIn":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.checkOnlyShowIn(value)
|
||||
|
||||
elif key == "NotShowIn":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.checkOnlyShowIn(value)
|
||||
|
||||
elif key == "TryExec":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "Exec":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "Path":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "Terminal":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "Actions":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "MimeType":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "Categories":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "Application")
|
||||
self.checkCategories(value)
|
||||
|
||||
elif re.match("^Keywords"+xdg.Locale.regex+"$", key):
|
||||
self.checkValue(key, value, type="localestring", list=True)
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "StartupNotify":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "StartupWMClass":
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "URL":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "URL")
|
||||
|
||||
# kde extensions
|
||||
elif key == "ServiceTypes":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif key == "DocPath":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif key == "InitialPreference":
|
||||
self.checkValue(key, value, type="numeric")
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif key == "Dev":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "FSDevice")
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif key == "FSType":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "FSDevice")
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif key == "MountPoint":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "FSDevice")
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif key == "ReadOnly":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
self.checkType(key, "FSDevice")
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif re.match("^UnmountIcon"+xdg.Locale.regex+"$", key):
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "FSDevice")
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
# deprecated keys
|
||||
elif key == "Encoding":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif re.match("^MiniIcon"+xdg.Locale.regex+"$", key):
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "TerminalOptions":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "DefaultApp":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "Protocols":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "Extensions":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "BinaryPattern":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "MapNotify":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif re.match("^SwallowTitle"+xdg.Locale.regex+"$", key):
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "SwallowExec":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "FilePattern":
|
||||
self.checkValue(key, value, type="regex", list=True)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "SortOrder":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
# "X-" extensions
|
||||
elif re.match("^X-[a-zA-Z0-9-]+", key):
|
||||
pass
|
||||
|
||||
else:
|
||||
self.errors.append("Invalid key: %s" % key)
|
||||
|
||||
def checkType(self, key, type):
|
||||
if not self.getType() == type:
|
||||
self.errors.append("Key '%s' only allowed in Type=%s" % (key, type))
|
||||
|
||||
def checkOnlyShowIn(self, value):
|
||||
values = self.getList(value)
|
||||
valid = ["GNOME", "KDE", "LXDE", "MATE", "Razor", "ROX", "TDE", "Unity",
|
||||
"XFCE", "Old"]
|
||||
for item in values:
|
||||
if item not in valid and item[0:2] != "X-":
|
||||
self.errors.append("'%s' is not a registered OnlyShowIn value" % item);
|
||||
|
||||
def checkCategories(self, value):
|
||||
values = self.getList(value)
|
||||
|
||||
main = ["AudioVideo", "Audio", "Video", "Development", "Education", "Game", "Graphics", "Network", "Office", "Science", "Settings", "System", "Utility"]
|
||||
if not any(item in main for item in values):
|
||||
self.errors.append("Missing main category")
|
||||
|
||||
additional = ['Building', 'Debugger', 'IDE', 'GUIDesigner', 'Profiling', 'RevisionControl', 'Translation', 'Calendar', 'ContactManagement', 'Database', 'Dictionary', 'Chart', 'Email', 'Finance', 'FlowChart', 'PDA', 'ProjectManagement', 'Presentation', 'Spreadsheet', 'WordProcessor', '2DGraphics', 'VectorGraphics', 'RasterGraphics', '3DGraphics', 'Scanning', 'OCR', 'Photography', 'Publishing', 'Viewer', 'TextTools', 'DesktopSettings', 'HardwareSettings', 'Printing', 'PackageManager', 'Dialup', 'InstantMessaging', 'Chat', 'IRCClient', 'Feed', 'FileTransfer', 'HamRadio', 'News', 'P2P', 'RemoteAccess', 'Telephony', 'TelephonyTools', 'VideoConference', 'WebBrowser', 'WebDevelopment', 'Midi', 'Mixer', 'Sequencer', 'Tuner', 'TV', 'AudioVideoEditing', 'Player', 'Recorder', 'DiscBurning', 'ActionGame', 'AdventureGame', 'ArcadeGame', 'BoardGame', 'BlocksGame', 'CardGame', 'KidsGame', 'LogicGame', 'RolePlaying', 'Shooter', 'Simulation', 'SportsGame', 'StrategyGame', 'Art', 'Construction', 'Music', 'Languages', 'ArtificialIntelligence', 'Astronomy', 'Biology', 'Chemistry', 'ComputerScience', 'DataVisualization', 'Economy', 'Electricity', 'Geography', 'Geology', 'Geoscience', 'History', 'Humanities', 'ImageProcessing', 'Literature', 'Maps', 'Math', 'NumericalAnalysis', 'MedicalSoftware', 'Physics', 'Robotics', 'Spirituality', 'Sports', 'ParallelComputing', 'Amusement', 'Archiving', 'Compression', 'Electronics', 'Emulator', 'Engineering', 'FileTools', 'FileManager', 'TerminalEmulator', 'Filesystem', 'Monitor', 'Security', 'Accessibility', 'Calculator', 'Clock', 'TextEditor', 'Documentation', 'Adult', 'Core', 'KDE', 'GNOME', 'XFCE', 'GTK', 'Qt', 'Motif', 'Java', 'ConsoleOnly']
|
||||
allcategories = additional + main
|
||||
|
||||
for item in values:
|
||||
if item not in allcategories and not item.startswith("X-"):
|
||||
self.errors.append("'%s' is not a registered Category" % item);
|
||||
|
||||
def checkCategorie(self, value):
|
||||
"""Deprecated alias for checkCategories - only exists for backwards
|
||||
compatibility.
|
||||
"""
|
||||
warnings.warn("checkCategorie is deprecated, use checkCategories",
|
||||
DeprecationWarning)
|
||||
return self.checkCategories(value)
|
|
@ -1,84 +0,0 @@
|
|||
"""
|
||||
Exception Classes for the xdg package
|
||||
"""
|
||||
|
||||
debug = False
|
||||
|
||||
class Error(Exception):
|
||||
"""Base class for exceptions defined here."""
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
Exception.__init__(self, msg)
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
||||
class ValidationError(Error):
|
||||
"""Raised when a file fails to validate.
|
||||
|
||||
The filename is the .file attribute.
|
||||
"""
|
||||
def __init__(self, msg, file):
|
||||
self.msg = msg
|
||||
self.file = file
|
||||
Error.__init__(self, "ValidationError in file '%s': %s " % (file, msg))
|
||||
|
||||
class ParsingError(Error):
|
||||
"""Raised when a file cannot be parsed.
|
||||
|
||||
The filename is the .file attribute.
|
||||
"""
|
||||
def __init__(self, msg, file):
|
||||
self.msg = msg
|
||||
self.file = file
|
||||
Error.__init__(self, "ParsingError in file '%s', %s" % (file, msg))
|
||||
|
||||
class NoKeyError(Error):
|
||||
"""Raised when trying to access a nonexistant key in an INI-style file.
|
||||
|
||||
Attributes are .key, .group and .file.
|
||||
"""
|
||||
def __init__(self, key, group, file):
|
||||
Error.__init__(self, "No key '%s' in group %s of file %s" % (key, group, file))
|
||||
self.key = key
|
||||
self.group = group
|
||||
self.file = file
|
||||
|
||||
class DuplicateKeyError(Error):
|
||||
"""Raised when the same key occurs twice in an INI-style file.
|
||||
|
||||
Attributes are .key, .group and .file.
|
||||
"""
|
||||
def __init__(self, key, group, file):
|
||||
Error.__init__(self, "Duplicate key '%s' in group %s of file %s" % (key, group, file))
|
||||
self.key = key
|
||||
self.group = group
|
||||
self.file = file
|
||||
|
||||
class NoGroupError(Error):
|
||||
"""Raised when trying to access a nonexistant group in an INI-style file.
|
||||
|
||||
Attributes are .group and .file.
|
||||
"""
|
||||
def __init__(self, group, file):
|
||||
Error.__init__(self, "No group: %s in file %s" % (group, file))
|
||||
self.group = group
|
||||
self.file = file
|
||||
|
||||
class DuplicateGroupError(Error):
|
||||
"""Raised when the same key occurs twice in an INI-style file.
|
||||
|
||||
Attributes are .group and .file.
|
||||
"""
|
||||
def __init__(self, group, file):
|
||||
Error.__init__(self, "Duplicate group: %s in file %s" % (group, file))
|
||||
self.group = group
|
||||
self.file = file
|
||||
|
||||
class NoThemeError(Error):
|
||||
"""Raised when trying to access a nonexistant icon theme.
|
||||
|
||||
The name of the theme is the .theme attribute.
|
||||
"""
|
||||
def __init__(self, theme):
|
||||
Error.__init__(self, "No such icon-theme: %s" % theme)
|
||||
self.theme = theme
|
|
@ -1,445 +0,0 @@
|
|||
"""
|
||||
Complete implementation of the XDG Icon Spec
|
||||
http://standards.freedesktop.org/icon-theme-spec/
|
||||
"""
|
||||
|
||||
import os, time
|
||||
import re
|
||||
|
||||
from . import IniFile, Config
|
||||
from .IniFile import is_ascii
|
||||
from .BaseDirectory import xdg_data_dirs
|
||||
from .Exceptions import NoThemeError, debug
|
||||
|
||||
|
||||
class IconTheme(IniFile):
|
||||
"Class to parse and validate IconThemes"
|
||||
def __init__(self):
|
||||
IniFile.__init__(self)
|
||||
|
||||
def __repr__(self):
|
||||
return self.name
|
||||
|
||||
def parse(self, file):
|
||||
IniFile.parse(self, file, ["Icon Theme", "KDE Icon Theme"])
|
||||
self.dir = os.path.dirname(file)
|
||||
(nil, self.name) = os.path.split(self.dir)
|
||||
|
||||
def getDir(self):
|
||||
return self.dir
|
||||
|
||||
# Standard Keys
|
||||
def getName(self):
|
||||
return self.get('Name', locale=True)
|
||||
def getComment(self):
|
||||
return self.get('Comment', locale=True)
|
||||
def getInherits(self):
|
||||
return self.get('Inherits', list=True)
|
||||
def getDirectories(self):
|
||||
return self.get('Directories', list=True)
|
||||
def getScaledDirectories(self):
|
||||
return self.get('ScaledDirectories', list=True)
|
||||
def getHidden(self):
|
||||
return self.get('Hidden', type="boolean")
|
||||
def getExample(self):
|
||||
return self.get('Example')
|
||||
|
||||
# Per Directory Keys
|
||||
def getSize(self, directory):
|
||||
return self.get('Size', type="integer", group=directory)
|
||||
def getContext(self, directory):
|
||||
return self.get('Context', group=directory)
|
||||
def getType(self, directory):
|
||||
value = self.get('Type', group=directory)
|
||||
if value:
|
||||
return value
|
||||
else:
|
||||
return "Threshold"
|
||||
def getMaxSize(self, directory):
|
||||
value = self.get('MaxSize', type="integer", group=directory)
|
||||
if value or value == 0:
|
||||
return value
|
||||
else:
|
||||
return self.getSize(directory)
|
||||
def getMinSize(self, directory):
|
||||
value = self.get('MinSize', type="integer", group=directory)
|
||||
if value or value == 0:
|
||||
return value
|
||||
else:
|
||||
return self.getSize(directory)
|
||||
def getThreshold(self, directory):
|
||||
value = self.get('Threshold', type="integer", group=directory)
|
||||
if value or value == 0:
|
||||
return value
|
||||
else:
|
||||
return 2
|
||||
|
||||
def getScale(self, directory):
|
||||
value = self.get('Scale', type="integer", group=directory)
|
||||
return value or 1
|
||||
|
||||
# validation stuff
|
||||
def checkExtras(self):
|
||||
# header
|
||||
if self.defaultGroup == "KDE Icon Theme":
|
||||
self.warnings.append('[KDE Icon Theme]-Header is deprecated')
|
||||
|
||||
# file extension
|
||||
if self.fileExtension == ".theme":
|
||||
pass
|
||||
elif self.fileExtension == ".desktop":
|
||||
self.warnings.append('.desktop fileExtension is deprecated')
|
||||
else:
|
||||
self.warnings.append('Unknown File extension')
|
||||
|
||||
# Check required keys
|
||||
# Name
|
||||
try:
|
||||
self.name = self.content[self.defaultGroup]["Name"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Name' is missing")
|
||||
|
||||
# Comment
|
||||
try:
|
||||
self.comment = self.content[self.defaultGroup]["Comment"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Comment' is missing")
|
||||
|
||||
# Directories
|
||||
try:
|
||||
self.directories = self.content[self.defaultGroup]["Directories"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Directories' is missing")
|
||||
|
||||
def checkGroup(self, group):
|
||||
# check if group header is valid
|
||||
if group == self.defaultGroup:
|
||||
try:
|
||||
self.name = self.content[group]["Name"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Name' in Group '%s' is missing" % group)
|
||||
try:
|
||||
self.name = self.content[group]["Comment"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Comment' in Group '%s' is missing" % group)
|
||||
elif group in self.getDirectories():
|
||||
try:
|
||||
self.type = self.content[group]["Type"]
|
||||
except KeyError:
|
||||
self.type = "Threshold"
|
||||
try:
|
||||
self.name = self.content[group]["Size"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Size' in Group '%s' is missing" % group)
|
||||
elif not (re.match(r"^\[X-", group) and is_ascii(group)):
|
||||
self.errors.append("Invalid Group name: %s" % group)
|
||||
|
||||
def checkKey(self, key, value, group):
|
||||
# standard keys
|
||||
if group == self.defaultGroup:
|
||||
if re.match("^Name"+xdg.Locale.regex+"$", key):
|
||||
pass
|
||||
elif re.match("^Comment"+xdg.Locale.regex+"$", key):
|
||||
pass
|
||||
elif key == "Inherits":
|
||||
self.checkValue(key, value, list=True)
|
||||
elif key == "Directories":
|
||||
self.checkValue(key, value, list=True)
|
||||
elif key == "ScaledDirectories":
|
||||
self.checkValue(key, value, list=True)
|
||||
elif key == "Hidden":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
elif key == "Example":
|
||||
self.checkValue(key, value)
|
||||
elif re.match("^X-[a-zA-Z0-9-]+", key):
|
||||
pass
|
||||
else:
|
||||
self.errors.append("Invalid key: %s" % key)
|
||||
elif group in self.getDirectories():
|
||||
if key == "Size":
|
||||
self.checkValue(key, value, type="integer")
|
||||
elif key == "Context":
|
||||
self.checkValue(key, value)
|
||||
elif key == "Type":
|
||||
self.checkValue(key, value)
|
||||
if value not in ["Fixed", "Scalable", "Threshold"]:
|
||||
self.errors.append("Key 'Type' must be one out of 'Fixed','Scalable','Threshold', but is %s" % value)
|
||||
elif key == "MaxSize":
|
||||
self.checkValue(key, value, type="integer")
|
||||
if self.type != "Scalable":
|
||||
self.errors.append("Key 'MaxSize' give, but Type is %s" % self.type)
|
||||
elif key == "MinSize":
|
||||
self.checkValue(key, value, type="integer")
|
||||
if self.type != "Scalable":
|
||||
self.errors.append("Key 'MinSize' give, but Type is %s" % self.type)
|
||||
elif key == "Threshold":
|
||||
self.checkValue(key, value, type="integer")
|
||||
if self.type != "Threshold":
|
||||
self.errors.append("Key 'Threshold' give, but Type is %s" % self.type)
|
||||
elif key == "Scale":
|
||||
self.checkValue(key, value, type="integer")
|
||||
elif re.match("^X-[a-zA-Z0-9-]+", key):
|
||||
pass
|
||||
else:
|
||||
self.errors.append("Invalid key: %s" % key)
|
||||
|
||||
|
||||
class IconData(IniFile):
|
||||
"Class to parse and validate IconData Files"
|
||||
def __init__(self):
|
||||
IniFile.__init__(self)
|
||||
|
||||
def __repr__(self):
|
||||
displayname = self.getDisplayName()
|
||||
if displayname:
|
||||
return "<IconData: %s>" % displayname
|
||||
else:
|
||||
return "<IconData>"
|
||||
|
||||
def parse(self, file):
|
||||
IniFile.parse(self, file, ["Icon Data"])
|
||||
|
||||
# Standard Keys
|
||||
def getDisplayName(self):
|
||||
"""Retrieve the display name from the icon data, if one is specified."""
|
||||
return self.get('DisplayName', locale=True)
|
||||
def getEmbeddedTextRectangle(self):
|
||||
"""Retrieve the embedded text rectangle from the icon data as a list of
|
||||
numbers (x0, y0, x1, y1), if it is specified."""
|
||||
return self.get('EmbeddedTextRectangle', type="integer", list=True)
|
||||
def getAttachPoints(self):
|
||||
"""Retrieve the anchor points for overlays & emblems from the icon data,
|
||||
as a list of co-ordinate pairs, if they are specified."""
|
||||
return self.get('AttachPoints', type="point", list=True)
|
||||
|
||||
# validation stuff
|
||||
def checkExtras(self):
|
||||
# file extension
|
||||
if self.fileExtension != ".icon":
|
||||
self.warnings.append('Unknown File extension')
|
||||
|
||||
def checkGroup(self, group):
|
||||
# check if group header is valid
|
||||
if not (group == self.defaultGroup \
|
||||
or (re.match(r"^\[X-", group) and is_ascii(group))):
|
||||
self.errors.append("Invalid Group name: %s" % group.encode("ascii", "replace"))
|
||||
|
||||
def checkKey(self, key, value, group):
|
||||
# standard keys
|
||||
if re.match("^DisplayName"+xdg.Locale.regex+"$", key):
|
||||
pass
|
||||
elif key == "EmbeddedTextRectangle":
|
||||
self.checkValue(key, value, type="integer", list=True)
|
||||
elif key == "AttachPoints":
|
||||
self.checkValue(key, value, type="point", list=True)
|
||||
elif re.match("^X-[a-zA-Z0-9-]+", key):
|
||||
pass
|
||||
else:
|
||||
self.errors.append("Invalid key: %s" % key)
|
||||
|
||||
|
||||
|
||||
icondirs = []
|
||||
for basedir in xdg_data_dirs:
|
||||
icondirs.append(os.path.join(basedir, "icons"))
|
||||
icondirs.append(os.path.join(basedir, "pixmaps"))
|
||||
icondirs.append(os.path.expanduser("~/.icons"))
|
||||
|
||||
# just cache variables, they give a 10x speed improvement
|
||||
themes = []
|
||||
theme_cache = {}
|
||||
dir_cache = {}
|
||||
icon_cache = {}
|
||||
|
||||
def getIconPath(iconname, size = None, theme = None, extensions = ["png", "svg", "xpm"]):
|
||||
"""Get the path to a specified icon.
|
||||
|
||||
size :
|
||||
Icon size in pixels. Defaults to ``xdg.Config.icon_size``.
|
||||
theme :
|
||||
Icon theme name. Defaults to ``xdg.Config.icon_theme``. If the icon isn't
|
||||
found in the specified theme, it will be looked up in the basic 'hicolor'
|
||||
theme.
|
||||
extensions :
|
||||
List of preferred file extensions.
|
||||
|
||||
Example::
|
||||
|
||||
>>> getIconPath("inkscape", 32)
|
||||
'/usr/share/icons/hicolor/32x32/apps/inkscape.png'
|
||||
"""
|
||||
|
||||
global themes
|
||||
|
||||
if size == None:
|
||||
size = xdg.Config.icon_size
|
||||
if theme == None:
|
||||
theme = xdg.Config.icon_theme
|
||||
|
||||
# if we have an absolute path, just return it
|
||||
if os.path.isabs(iconname):
|
||||
return iconname
|
||||
|
||||
# check if it has an extension and strip it
|
||||
if os.path.splitext(iconname)[1][1:] in extensions:
|
||||
iconname = os.path.splitext(iconname)[0]
|
||||
|
||||
# parse theme files
|
||||
if (themes == []) or (themes[0].name != theme):
|
||||
themes = list(__get_themes(theme))
|
||||
|
||||
# more caching (icon looked up in the last 5 seconds?)
|
||||
tmp = (iconname, size, theme, tuple(extensions))
|
||||
try:
|
||||
timestamp, icon = icon_cache[tmp]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if (time.time() - timestamp) >= xdg.Config.cache_time:
|
||||
del icon_cache[tmp]
|
||||
else:
|
||||
return icon
|
||||
|
||||
for thme in themes:
|
||||
icon = LookupIcon(iconname, size, thme, extensions)
|
||||
if icon:
|
||||
icon_cache[tmp] = (time.time(), icon)
|
||||
return icon
|
||||
|
||||
# cache stuff again (directories looked up in the last 5 seconds?)
|
||||
for directory in icondirs:
|
||||
if (directory not in dir_cache \
|
||||
or (int(time.time() - dir_cache[directory][1]) >= xdg.Config.cache_time \
|
||||
and dir_cache[directory][2] < os.path.getmtime(directory))) \
|
||||
and os.path.isdir(directory):
|
||||
dir_cache[directory] = (os.listdir(directory), time.time(), os.path.getmtime(directory))
|
||||
|
||||
for dir, values in dir_cache.items():
|
||||
for extension in extensions:
|
||||
try:
|
||||
if iconname + "." + extension in values[0]:
|
||||
icon = os.path.join(dir, iconname + "." + extension)
|
||||
icon_cache[tmp] = [time.time(), icon]
|
||||
return icon
|
||||
except UnicodeDecodeError as e:
|
||||
if debug:
|
||||
raise e
|
||||
else:
|
||||
pass
|
||||
|
||||
# we haven't found anything? "hicolor" is our fallback
|
||||
if theme != "hicolor":
|
||||
icon = getIconPath(iconname, size, "hicolor")
|
||||
icon_cache[tmp] = [time.time(), icon]
|
||||
return icon
|
||||
|
||||
def getIconData(path):
|
||||
"""Retrieve the data from the .icon file corresponding to the given file. If
|
||||
there is no .icon file, it returns None.
|
||||
|
||||
Example::
|
||||
|
||||
getIconData("/usr/share/icons/Tango/scalable/places/folder.svg")
|
||||
"""
|
||||
if os.path.isfile(path):
|
||||
icon_file = os.path.splitext(path)[0] + ".icon"
|
||||
if os.path.isfile(icon_file):
|
||||
data = IconData()
|
||||
data.parse(icon_file)
|
||||
return data
|
||||
|
||||
def __get_themes(themename):
|
||||
"""Generator yielding IconTheme objects for a specified theme and any themes
|
||||
from which it inherits.
|
||||
"""
|
||||
for dir in icondirs:
|
||||
theme_file = os.path.join(dir, themename, "index.theme")
|
||||
if os.path.isfile(theme_file):
|
||||
break
|
||||
theme_file = os.path.join(dir, themename, "index.desktop")
|
||||
if os.path.isfile(theme_file):
|
||||
break
|
||||
else:
|
||||
if debug:
|
||||
raise NoThemeError(themename)
|
||||
return
|
||||
|
||||
theme = IconTheme()
|
||||
theme.parse(theme_file)
|
||||
yield theme
|
||||
for subtheme in theme.getInherits():
|
||||
for t in __get_themes(subtheme):
|
||||
yield t
|
||||
|
||||
def LookupIcon(iconname, size, theme, extensions):
|
||||
# look for the cache
|
||||
if theme.name not in theme_cache:
|
||||
theme_cache[theme.name] = []
|
||||
theme_cache[theme.name].append(time.time() - (xdg.Config.cache_time + 1)) # [0] last time of lookup
|
||||
theme_cache[theme.name].append(0) # [1] mtime
|
||||
theme_cache[theme.name].append(dict()) # [2] dir: [subdir, [items]]
|
||||
|
||||
# cache stuff (directory lookuped up the in the last 5 seconds?)
|
||||
if int(time.time() - theme_cache[theme.name][0]) >= xdg.Config.cache_time:
|
||||
theme_cache[theme.name][0] = time.time()
|
||||
for subdir in theme.getDirectories():
|
||||
for directory in icondirs:
|
||||
dir = os.path.join(directory,theme.name,subdir)
|
||||
if (dir not in theme_cache[theme.name][2] \
|
||||
or theme_cache[theme.name][1] < os.path.getmtime(os.path.join(directory,theme.name))) \
|
||||
and subdir != "" \
|
||||
and os.path.isdir(dir):
|
||||
theme_cache[theme.name][2][dir] = [subdir, os.listdir(dir)]
|
||||
theme_cache[theme.name][1] = os.path.getmtime(os.path.join(directory,theme.name))
|
||||
|
||||
for dir, values in theme_cache[theme.name][2].items():
|
||||
if DirectoryMatchesSize(values[0], size, theme):
|
||||
for extension in extensions:
|
||||
if iconname + "." + extension in values[1]:
|
||||
return os.path.join(dir, iconname + "." + extension)
|
||||
|
||||
minimal_size = 2**31
|
||||
closest_filename = ""
|
||||
for dir, values in theme_cache[theme.name][2].items():
|
||||
distance = DirectorySizeDistance(values[0], size, theme)
|
||||
if distance < minimal_size:
|
||||
for extension in extensions:
|
||||
if iconname + "." + extension in values[1]:
|
||||
closest_filename = os.path.join(dir, iconname + "." + extension)
|
||||
minimal_size = distance
|
||||
|
||||
return closest_filename
|
||||
|
||||
def DirectoryMatchesSize(subdir, iconsize, theme):
|
||||
Type = theme.getType(subdir)
|
||||
Size = theme.getSize(subdir)
|
||||
Threshold = theme.getThreshold(subdir)
|
||||
MinSize = theme.getMinSize(subdir)
|
||||
MaxSize = theme.getMaxSize(subdir)
|
||||
if Type == "Fixed":
|
||||
return Size == iconsize
|
||||
elif Type == "Scaleable":
|
||||
return MinSize <= iconsize <= MaxSize
|
||||
elif Type == "Threshold":
|
||||
return Size - Threshold <= iconsize <= Size + Threshold
|
||||
|
||||
def DirectorySizeDistance(subdir, iconsize, theme):
|
||||
Type = theme.getType(subdir)
|
||||
Size = theme.getSize(subdir)
|
||||
Threshold = theme.getThreshold(subdir)
|
||||
MinSize = theme.getMinSize(subdir)
|
||||
MaxSize = theme.getMaxSize(subdir)
|
||||
if Type == "Fixed":
|
||||
return abs(Size - iconsize)
|
||||
elif Type == "Scalable":
|
||||
if iconsize < MinSize:
|
||||
return MinSize - iconsize
|
||||
elif iconsize > MaxSize:
|
||||
return MaxSize - iconsize
|
||||
return 0
|
||||
elif Type == "Threshold":
|
||||
if iconsize < Size - Threshold:
|
||||
return MinSize - iconsize
|
||||
elif iconsize > Size + Threshold:
|
||||
return iconsize - MaxSize
|
||||
return 0
|
|
@ -1,419 +0,0 @@
|
|||
"""
|
||||
Base Class for DesktopEntry, IconTheme and IconData
|
||||
"""
|
||||
|
||||
import re, os, stat, io
|
||||
from .Exceptions import (ParsingError, DuplicateGroupError, NoGroupError,
|
||||
NoKeyError, DuplicateKeyError, ValidationError,
|
||||
debug)
|
||||
# import xdg.Locale
|
||||
from . import Locale
|
||||
from .util import u
|
||||
|
||||
def is_ascii(s):
|
||||
"""Return True if a string consists entirely of ASCII characters."""
|
||||
try:
|
||||
s.encode('ascii', 'strict')
|
||||
return True
|
||||
except UnicodeError:
|
||||
return False
|
||||
|
||||
class IniFile:
|
||||
defaultGroup = ''
|
||||
fileExtension = ''
|
||||
|
||||
filename = ''
|
||||
|
||||
tainted = False
|
||||
|
||||
def __init__(self, filename=None):
|
||||
self.content = dict()
|
||||
if filename:
|
||||
self.parse(filename)
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.content, other.content)
|
||||
|
||||
def parse(self, filename, headers=None):
|
||||
'''Parse an INI file.
|
||||
|
||||
headers -- list of headers the parser will try to select as a default header
|
||||
'''
|
||||
# for performance reasons
|
||||
content = self.content
|
||||
|
||||
if not os.path.isfile(filename):
|
||||
raise ParsingError("File not found", filename)
|
||||
|
||||
try:
|
||||
# The content should be UTF-8, but legacy files can have other
|
||||
# encodings, including mixed encodings in one file. We don't attempt
|
||||
# to decode them, but we silence the errors.
|
||||
fd = io.open(filename, 'r', encoding='utf-8', errors='replace')
|
||||
except IOError as e:
|
||||
if debug:
|
||||
raise e
|
||||
else:
|
||||
return
|
||||
|
||||
# parse file
|
||||
for line in fd:
|
||||
line = line.strip()
|
||||
# empty line
|
||||
if not line:
|
||||
continue
|
||||
# comment
|
||||
elif line[0] == '#':
|
||||
continue
|
||||
# new group
|
||||
elif line[0] == '[':
|
||||
currentGroup = line.lstrip("[").rstrip("]")
|
||||
if debug and self.hasGroup(currentGroup):
|
||||
raise DuplicateGroupError(currentGroup, filename)
|
||||
else:
|
||||
content[currentGroup] = {}
|
||||
# key
|
||||
else:
|
||||
try:
|
||||
key, value = line.split("=", 1)
|
||||
except ValueError:
|
||||
raise ParsingError("Invalid line: " + line, filename)
|
||||
|
||||
key = key.strip() # Spaces before/after '=' should be ignored
|
||||
try:
|
||||
if debug and self.hasKey(key, currentGroup):
|
||||
raise DuplicateKeyError(key, currentGroup, filename)
|
||||
else:
|
||||
content[currentGroup][key] = value.strip()
|
||||
except (IndexError, UnboundLocalError):
|
||||
raise ParsingError("Parsing error on key, group missing", filename)
|
||||
|
||||
fd.close()
|
||||
|
||||
self.filename = filename
|
||||
self.tainted = False
|
||||
|
||||
# check header
|
||||
if headers:
|
||||
for header in headers:
|
||||
if header in content:
|
||||
self.defaultGroup = header
|
||||
break
|
||||
else:
|
||||
raise ParsingError("[%s]-Header missing" % headers[0], filename)
|
||||
|
||||
# start stuff to access the keys
|
||||
def get(self, key, group=None, locale=False, type="string", list=False, strict=False):
|
||||
# set default group
|
||||
if not group:
|
||||
group = self.defaultGroup
|
||||
|
||||
# return key (with locale)
|
||||
if (group in self.content) and (key in self.content[group]):
|
||||
if locale:
|
||||
value = self.content[group][self.__addLocale(key, group)]
|
||||
else:
|
||||
value = self.content[group][key]
|
||||
else:
|
||||
if strict or debug:
|
||||
if group not in self.content:
|
||||
raise NoGroupError(group, self.filename)
|
||||
elif key not in self.content[group]:
|
||||
raise NoKeyError(key, group, self.filename)
|
||||
else:
|
||||
value = ""
|
||||
|
||||
if list == True:
|
||||
values = self.getList(value)
|
||||
result = []
|
||||
else:
|
||||
values = [value]
|
||||
|
||||
for value in values:
|
||||
if type == "boolean":
|
||||
value = self.__getBoolean(value)
|
||||
elif type == "integer":
|
||||
try:
|
||||
value = int(value)
|
||||
except ValueError:
|
||||
value = 0
|
||||
elif type == "numeric":
|
||||
try:
|
||||
value = float(value)
|
||||
except ValueError:
|
||||
value = 0.0
|
||||
elif type == "regex":
|
||||
value = re.compile(value)
|
||||
elif type == "point":
|
||||
x, y = value.split(",")
|
||||
value = int(x), int(y)
|
||||
|
||||
if list == True:
|
||||
result.append(value)
|
||||
else:
|
||||
result = value
|
||||
|
||||
return result
|
||||
# end stuff to access the keys
|
||||
|
||||
# start subget
|
||||
def getList(self, string):
|
||||
if re.search(r"(?<!\\)\;", string):
|
||||
list = re.split(r"(?<!\\);", string)
|
||||
elif re.search(r"(?<!\\)\|", string):
|
||||
list = re.split(r"(?<!\\)\|", string)
|
||||
elif re.search(r"(?<!\\),", string):
|
||||
list = re.split(r"(?<!\\),", string)
|
||||
else:
|
||||
list = [string]
|
||||
if list[-1] == "":
|
||||
list.pop()
|
||||
return list
|
||||
|
||||
def __getBoolean(self, boolean):
|
||||
if boolean == 1 or boolean == "true" or boolean == "True":
|
||||
return True
|
||||
elif boolean == 0 or boolean == "false" or boolean == "False":
|
||||
return False
|
||||
return False
|
||||
# end subget
|
||||
|
||||
def __addLocale(self, key, group=None):
|
||||
"add locale to key according the current lc_messages"
|
||||
# set default group
|
||||
if not group:
|
||||
group = self.defaultGroup
|
||||
|
||||
for lang in Locale.langs:
|
||||
langkey = "%s[%s]" % (key, lang)
|
||||
if langkey in self.content[group]:
|
||||
return langkey
|
||||
|
||||
return key
|
||||
|
||||
# start validation stuff
|
||||
def validate(self, report="All"):
|
||||
"""Validate the contents, raising :class:`~xdg.Exceptions.ValidationError`
|
||||
if there is anything amiss.
|
||||
|
||||
report can be 'All' / 'Warnings' / 'Errors'
|
||||
"""
|
||||
|
||||
self.warnings = []
|
||||
self.errors = []
|
||||
|
||||
# get file extension
|
||||
self.fileExtension = os.path.splitext(self.filename)[1]
|
||||
|
||||
# overwrite this for own checkings
|
||||
self.checkExtras()
|
||||
|
||||
# check all keys
|
||||
for group in self.content:
|
||||
self.checkGroup(group)
|
||||
for key in self.content[group]:
|
||||
self.checkKey(key, self.content[group][key], group)
|
||||
# check if value is empty
|
||||
if self.content[group][key] == "":
|
||||
self.warnings.append("Value of Key '%s' is empty" % key)
|
||||
|
||||
# raise Warnings / Errors
|
||||
msg = ""
|
||||
|
||||
if report == "All" or report == "Warnings":
|
||||
for line in self.warnings:
|
||||
msg += "\n- " + line
|
||||
|
||||
if report == "All" or report == "Errors":
|
||||
for line in self.errors:
|
||||
msg += "\n- " + line
|
||||
|
||||
if msg:
|
||||
raise ValidationError(msg, self.filename)
|
||||
|
||||
# check if group header is valid
|
||||
def checkGroup(self, group):
|
||||
pass
|
||||
|
||||
# check if key is valid
|
||||
def checkKey(self, key, value, group):
|
||||
pass
|
||||
|
||||
# check random stuff
|
||||
def checkValue(self, key, value, type="string", list=False):
|
||||
if list == True:
|
||||
values = self.getList(value)
|
||||
else:
|
||||
values = [value]
|
||||
|
||||
for value in values:
|
||||
if type == "string":
|
||||
code = self.checkString(value)
|
||||
if type == "localestring":
|
||||
continue
|
||||
elif type == "boolean":
|
||||
code = self.checkBoolean(value)
|
||||
elif type == "numeric":
|
||||
code = self.checkNumber(value)
|
||||
elif type == "integer":
|
||||
code = self.checkInteger(value)
|
||||
elif type == "regex":
|
||||
code = self.checkRegex(value)
|
||||
elif type == "point":
|
||||
code = self.checkPoint(value)
|
||||
if code == 1:
|
||||
self.errors.append("'%s' is not a valid %s" % (value, type))
|
||||
elif code == 2:
|
||||
self.warnings.append("Value of key '%s' is deprecated" % key)
|
||||
|
||||
def checkExtras(self):
|
||||
pass
|
||||
|
||||
def checkBoolean(self, value):
|
||||
# 1 or 0 : deprecated
|
||||
if (value == "1" or value == "0"):
|
||||
return 2
|
||||
# true or false: ok
|
||||
elif not (value == "true" or value == "false"):
|
||||
return 1
|
||||
|
||||
def checkNumber(self, value):
|
||||
# float() ValueError
|
||||
try:
|
||||
float(value)
|
||||
except:
|
||||
return 1
|
||||
|
||||
def checkInteger(self, value):
|
||||
# int() ValueError
|
||||
try:
|
||||
int(value)
|
||||
except:
|
||||
return 1
|
||||
|
||||
def checkPoint(self, value):
|
||||
if not re.match("^[0-9]+,[0-9]+$", value):
|
||||
return 1
|
||||
|
||||
def checkString(self, value):
|
||||
return 0 if is_ascii(value) else 1
|
||||
|
||||
def checkRegex(self, value):
|
||||
try:
|
||||
re.compile(value)
|
||||
except:
|
||||
return 1
|
||||
|
||||
# write support
|
||||
def write(self, filename=None, trusted=False):
|
||||
if not filename and not self.filename:
|
||||
raise ParsingError("File not found", "")
|
||||
|
||||
if filename:
|
||||
self.filename = filename
|
||||
else:
|
||||
filename = self.filename
|
||||
|
||||
if os.path.dirname(filename) and not os.path.isdir(os.path.dirname(filename)):
|
||||
os.makedirs(os.path.dirname(filename))
|
||||
|
||||
with io.open(filename, 'w', encoding='utf-8') as fp:
|
||||
|
||||
# An executable bit signifies that the desktop file is
|
||||
# trusted, but then the file can be executed. Add hashbang to
|
||||
# make sure that the file is opened by something that
|
||||
# understands desktop files.
|
||||
if trusted:
|
||||
fp.write(u("#!/usr/bin/env xdg-open\n"))
|
||||
|
||||
if self.defaultGroup:
|
||||
fp.write(u("[%s]\n") % self.defaultGroup)
|
||||
for (key, value) in self.content[self.defaultGroup].items():
|
||||
fp.write(u("%s=%s\n") % (key, value))
|
||||
fp.write(u("\n"))
|
||||
for (name, group) in self.content.items():
|
||||
if name != self.defaultGroup:
|
||||
fp.write(u("[%s]\n") % name)
|
||||
for (key, value) in group.items():
|
||||
fp.write(u("%s=%s\n") % (key, value))
|
||||
fp.write(u("\n"))
|
||||
|
||||
# Add executable bits to the file to show that it's trusted.
|
||||
if trusted:
|
||||
oldmode = os.stat(filename).st_mode
|
||||
mode = oldmode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
||||
os.chmod(filename, mode)
|
||||
|
||||
self.tainted = False
|
||||
|
||||
def set(self, key, value, group=None, locale=False):
|
||||
# set default group
|
||||
if not group:
|
||||
group = self.defaultGroup
|
||||
|
||||
if locale == True and len(xdg.Locale.langs) > 0:
|
||||
key = key + "[" + xdg.Locale.langs[0] + "]"
|
||||
|
||||
try:
|
||||
self.content[group][key] = value
|
||||
except KeyError:
|
||||
raise NoGroupError(group, self.filename)
|
||||
|
||||
self.tainted = (value == self.get(key, group))
|
||||
|
||||
def addGroup(self, group):
|
||||
if self.hasGroup(group):
|
||||
if debug:
|
||||
raise DuplicateGroupError(group, self.filename)
|
||||
else:
|
||||
self.content[group] = {}
|
||||
self.tainted = True
|
||||
|
||||
def removeGroup(self, group):
|
||||
existed = group in self.content
|
||||
if existed:
|
||||
del self.content[group]
|
||||
self.tainted = True
|
||||
else:
|
||||
if debug:
|
||||
raise NoGroupError(group, self.filename)
|
||||
return existed
|
||||
|
||||
def removeKey(self, key, group=None, locales=True):
|
||||
# set default group
|
||||
if not group:
|
||||
group = self.defaultGroup
|
||||
|
||||
try:
|
||||
if locales:
|
||||
for name in list(self.content[group]):
|
||||
if re.match("^" + key + xdg.Locale.regex + "$", name) and name != key:
|
||||
del self.content[group][name]
|
||||
value = self.content[group].pop(key)
|
||||
self.tainted = True
|
||||
return value
|
||||
except KeyError as e:
|
||||
if debug:
|
||||
if e == group:
|
||||
raise NoGroupError(group, self.filename)
|
||||
else:
|
||||
raise NoKeyError(key, group, self.filename)
|
||||
else:
|
||||
return ""
|
||||
|
||||
# misc
|
||||
def groups(self):
|
||||
return self.content.keys()
|
||||
|
||||
def hasGroup(self, group):
|
||||
return group in self.content
|
||||
|
||||
def hasKey(self, key, group=None):
|
||||
# set default group
|
||||
if not group:
|
||||
group = self.defaultGroup
|
||||
|
||||
return key in self.content[group]
|
||||
|
||||
def getFileName(self):
|
||||
return self.filename
|
|
@ -1,79 +0,0 @@
|
|||
"""
|
||||
Helper Module for Locale settings
|
||||
|
||||
This module is based on a ROX module (LGPL):
|
||||
|
||||
http://cvs.sourceforge.net/viewcvs.py/rox/ROX-Lib2/python/rox/i18n.py?rev=1.3&view=log
|
||||
"""
|
||||
|
||||
import os
|
||||
from locale import normalize
|
||||
|
||||
regex = r"(\[([a-zA-Z]+)(_[a-zA-Z]+)?(\.[a-zA-Z0-9-]+)?(@[a-zA-Z]+)?\])?"
|
||||
|
||||
def _expand_lang(locale):
|
||||
locale = normalize(locale)
|
||||
COMPONENT_CODESET = 1 << 0
|
||||
COMPONENT_MODIFIER = 1 << 1
|
||||
COMPONENT_TERRITORY = 1 << 2
|
||||
# split up the locale into its base components
|
||||
mask = 0
|
||||
pos = locale.find('@')
|
||||
if pos >= 0:
|
||||
modifier = locale[pos:]
|
||||
locale = locale[:pos]
|
||||
mask |= COMPONENT_MODIFIER
|
||||
else:
|
||||
modifier = ''
|
||||
pos = locale.find('.')
|
||||
codeset = ''
|
||||
if pos >= 0:
|
||||
locale = locale[:pos]
|
||||
pos = locale.find('_')
|
||||
if pos >= 0:
|
||||
territory = locale[pos:]
|
||||
locale = locale[:pos]
|
||||
mask |= COMPONENT_TERRITORY
|
||||
else:
|
||||
territory = ''
|
||||
language = locale
|
||||
ret = []
|
||||
for i in range(mask+1):
|
||||
if not (i & ~mask): # if all components for this combo exist ...
|
||||
val = language
|
||||
if i & COMPONENT_TERRITORY: val += territory
|
||||
if i & COMPONENT_CODESET: val += codeset
|
||||
if i & COMPONENT_MODIFIER: val += modifier
|
||||
ret.append(val)
|
||||
ret.reverse()
|
||||
return ret
|
||||
|
||||
def expand_languages(languages=None):
|
||||
# Get some reasonable defaults for arguments that were not supplied
|
||||
if languages is None:
|
||||
languages = []
|
||||
for envar in ('LANGUAGE', 'LC_ALL', 'LC_MESSAGES', 'LANG'):
|
||||
val = os.environ.get(envar)
|
||||
if val:
|
||||
languages = val.split(':')
|
||||
break
|
||||
#if 'C' not in languages:
|
||||
# languages.append('C')
|
||||
|
||||
# now normalize and expand the languages
|
||||
nelangs = []
|
||||
for lang in languages:
|
||||
for nelang in _expand_lang(lang):
|
||||
if nelang not in nelangs:
|
||||
nelangs.append(nelang)
|
||||
return nelangs
|
||||
|
||||
def update(language=None):
|
||||
global langs
|
||||
if language:
|
||||
langs = expand_languages([language])
|
||||
else:
|
||||
langs = expand_languages()
|
||||
|
||||
langs = []
|
||||
update()
|
File diff suppressed because it is too large
Load Diff
|
@ -1,541 +0,0 @@
|
|||
""" CLass to edit XDG Menus """
|
||||
import os
|
||||
try:
|
||||
import xml.etree.cElementTree as etree
|
||||
except ImportError:
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
from .Menu import Menu, MenuEntry, Layout, Separator, XMLMenuBuilder
|
||||
from .BaseDirectory import xdg_config_dirs, xdg_data_dirs
|
||||
from .Exceptions import ParsingError
|
||||
from .Config import setRootMode
|
||||
|
||||
# XML-Cleanups: Move / Exclude
|
||||
# FIXME: proper reverte/delete
|
||||
# FIXME: pass AppDirs/DirectoryDirs around in the edit/move functions
|
||||
# FIXME: catch Exceptions
|
||||
# FIXME: copy functions
|
||||
# FIXME: More Layout stuff
|
||||
# FIXME: unod/redo function / remove menu...
|
||||
# FIXME: Advanced MenuEditing Stuff: LegacyDir/MergeFile
|
||||
# Complex Rules/Deleted/OnlyAllocated/AppDirs/DirectoryDirs
|
||||
|
||||
|
||||
class MenuEditor(object):
|
||||
|
||||
def __init__(self, menu=None, filename=None, root=False):
|
||||
self.menu = None
|
||||
self.filename = None
|
||||
self.tree = None
|
||||
self.parser = XMLMenuBuilder()
|
||||
self.parse(menu, filename, root)
|
||||
|
||||
# fix for creating two menus with the same name on the fly
|
||||
self.filenames = []
|
||||
|
||||
def parse(self, menu=None, filename=None, root=False):
|
||||
if root:
|
||||
setRootMode(True)
|
||||
|
||||
if isinstance(menu, Menu):
|
||||
self.menu = menu
|
||||
elif menu:
|
||||
self.menu = self.parser.parse(menu)
|
||||
else:
|
||||
self.menu = self.parser.parse()
|
||||
|
||||
if root:
|
||||
self.filename = self.menu.Filename
|
||||
elif filename:
|
||||
self.filename = filename
|
||||
else:
|
||||
self.filename = os.path.join(xdg_config_dirs[0], "menus", os.path.split(self.menu.Filename)[1])
|
||||
|
||||
try:
|
||||
self.tree = etree.parse(self.filename)
|
||||
except IOError:
|
||||
root = etree.fromtring("""
|
||||
<!DOCTYPE Menu PUBLIC "-//freedesktop//DTD Menu 1.0//EN" "http://standards.freedesktop.org/menu-spec/menu-1.0.dtd">
|
||||
<Menu>
|
||||
<Name>Applications</Name>
|
||||
<MergeFile type="parent">%s</MergeFile>
|
||||
</Menu>
|
||||
""" % self.menu.Filename)
|
||||
self.tree = etree.ElementTree(root)
|
||||
except ParsingError:
|
||||
raise ParsingError('Not a valid .menu file', self.filename)
|
||||
|
||||
#FIXME: is this needed with etree ?
|
||||
self.__remove_whitespace_nodes(self.tree)
|
||||
|
||||
def save(self):
|
||||
self.__saveEntries(self.menu)
|
||||
self.__saveMenu()
|
||||
|
||||
def createMenuEntry(self, parent, name, command=None, genericname=None, comment=None, icon=None, terminal=None, after=None, before=None):
|
||||
menuentry = MenuEntry(self.__getFileName(name, ".desktop"))
|
||||
menuentry = self.editMenuEntry(menuentry, name, genericname, comment, command, icon, terminal)
|
||||
|
||||
self.__addEntry(parent, menuentry, after, before)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return menuentry
|
||||
|
||||
def createMenu(self, parent, name, genericname=None, comment=None, icon=None, after=None, before=None):
|
||||
menu = Menu()
|
||||
|
||||
menu.Parent = parent
|
||||
menu.Depth = parent.Depth + 1
|
||||
menu.Layout = parent.DefaultLayout
|
||||
menu.DefaultLayout = parent.DefaultLayout
|
||||
|
||||
menu = self.editMenu(menu, name, genericname, comment, icon)
|
||||
|
||||
self.__addEntry(parent, menu, after, before)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return menu
|
||||
|
||||
def createSeparator(self, parent, after=None, before=None):
|
||||
separator = Separator(parent)
|
||||
|
||||
self.__addEntry(parent, separator, after, before)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return separator
|
||||
|
||||
def moveMenuEntry(self, menuentry, oldparent, newparent, after=None, before=None):
|
||||
self.__deleteEntry(oldparent, menuentry, after, before)
|
||||
self.__addEntry(newparent, menuentry, after, before)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return menuentry
|
||||
|
||||
def moveMenu(self, menu, oldparent, newparent, after=None, before=None):
|
||||
self.__deleteEntry(oldparent, menu, after, before)
|
||||
self.__addEntry(newparent, menu, after, before)
|
||||
|
||||
root_menu = self.__getXmlMenu(self.menu.Name)
|
||||
if oldparent.getPath(True) != newparent.getPath(True):
|
||||
self.__addXmlMove(root_menu, os.path.join(oldparent.getPath(True), menu.Name), os.path.join(newparent.getPath(True), menu.Name))
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return menu
|
||||
|
||||
def moveSeparator(self, separator, parent, after=None, before=None):
|
||||
self.__deleteEntry(parent, separator, after, before)
|
||||
self.__addEntry(parent, separator, after, before)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return separator
|
||||
|
||||
def copyMenuEntry(self, menuentry, oldparent, newparent, after=None, before=None):
|
||||
self.__addEntry(newparent, menuentry, after, before)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return menuentry
|
||||
|
||||
def editMenuEntry(self, menuentry, name=None, genericname=None, comment=None, command=None, icon=None, terminal=None, nodisplay=None, hidden=None):
|
||||
deskentry = menuentry.DesktopEntry
|
||||
|
||||
if name:
|
||||
if not deskentry.hasKey("Name"):
|
||||
deskentry.set("Name", name)
|
||||
deskentry.set("Name", name, locale=True)
|
||||
if comment:
|
||||
if not deskentry.hasKey("Comment"):
|
||||
deskentry.set("Comment", comment)
|
||||
deskentry.set("Comment", comment, locale=True)
|
||||
if genericname:
|
||||
if not deskentry.hasKey("GenericName"):
|
||||
deskentry.set("GenericName", genericname)
|
||||
deskentry.set("GenericName", genericname, locale=True)
|
||||
if command:
|
||||
deskentry.set("Exec", command)
|
||||
if icon:
|
||||
deskentry.set("Icon", icon)
|
||||
|
||||
if terminal:
|
||||
deskentry.set("Terminal", "true")
|
||||
elif not terminal:
|
||||
deskentry.set("Terminal", "false")
|
||||
|
||||
if nodisplay is True:
|
||||
deskentry.set("NoDisplay", "true")
|
||||
elif nodisplay is False:
|
||||
deskentry.set("NoDisplay", "false")
|
||||
|
||||
if hidden is True:
|
||||
deskentry.set("Hidden", "true")
|
||||
elif hidden is False:
|
||||
deskentry.set("Hidden", "false")
|
||||
|
||||
menuentry.updateAttributes()
|
||||
|
||||
if len(menuentry.Parents) > 0:
|
||||
self.menu.sort()
|
||||
|
||||
return menuentry
|
||||
|
||||
def editMenu(self, menu, name=None, genericname=None, comment=None, icon=None, nodisplay=None, hidden=None):
|
||||
# Hack for legacy dirs
|
||||
if isinstance(menu.Directory, MenuEntry) and menu.Directory.Filename == ".directory":
|
||||
xml_menu = self.__getXmlMenu(menu.getPath(True, True))
|
||||
self.__addXmlTextElement(xml_menu, 'Directory', menu.Name + ".directory")
|
||||
menu.Directory.setAttributes(menu.Name + ".directory")
|
||||
# Hack for New Entries
|
||||
elif not isinstance(menu.Directory, MenuEntry):
|
||||
if not name:
|
||||
name = menu.Name
|
||||
filename = self.__getFileName(name, ".directory").replace("/", "")
|
||||
if not menu.Name:
|
||||
menu.Name = filename.replace(".directory", "")
|
||||
xml_menu = self.__getXmlMenu(menu.getPath(True, True))
|
||||
self.__addXmlTextElement(xml_menu, 'Directory', filename)
|
||||
menu.Directory = MenuEntry(filename)
|
||||
|
||||
deskentry = menu.Directory.DesktopEntry
|
||||
|
||||
if name:
|
||||
if not deskentry.hasKey("Name"):
|
||||
deskentry.set("Name", name)
|
||||
deskentry.set("Name", name, locale=True)
|
||||
if genericname:
|
||||
if not deskentry.hasKey("GenericName"):
|
||||
deskentry.set("GenericName", genericname)
|
||||
deskentry.set("GenericName", genericname, locale=True)
|
||||
if comment:
|
||||
if not deskentry.hasKey("Comment"):
|
||||
deskentry.set("Comment", comment)
|
||||
deskentry.set("Comment", comment, locale=True)
|
||||
if icon:
|
||||
deskentry.set("Icon", icon)
|
||||
|
||||
if nodisplay is True:
|
||||
deskentry.set("NoDisplay", "true")
|
||||
elif nodisplay is False:
|
||||
deskentry.set("NoDisplay", "false")
|
||||
|
||||
if hidden is True:
|
||||
deskentry.set("Hidden", "true")
|
||||
elif hidden is False:
|
||||
deskentry.set("Hidden", "false")
|
||||
|
||||
menu.Directory.updateAttributes()
|
||||
|
||||
if isinstance(menu.Parent, Menu):
|
||||
self.menu.sort()
|
||||
|
||||
return menu
|
||||
|
||||
def hideMenuEntry(self, menuentry):
|
||||
self.editMenuEntry(menuentry, nodisplay=True)
|
||||
|
||||
def unhideMenuEntry(self, menuentry):
|
||||
self.editMenuEntry(menuentry, nodisplay=False, hidden=False)
|
||||
|
||||
def hideMenu(self, menu):
|
||||
self.editMenu(menu, nodisplay=True)
|
||||
|
||||
def unhideMenu(self, menu):
|
||||
self.editMenu(menu, nodisplay=False, hidden=False)
|
||||
xml_menu = self.__getXmlMenu(menu.getPath(True, True), False)
|
||||
deleted = xml_menu.findall('Deleted')
|
||||
not_deleted = xml_menu.findall('NotDeleted')
|
||||
for node in deleted + not_deleted:
|
||||
xml_menu.remove(node)
|
||||
|
||||
def deleteMenuEntry(self, menuentry):
|
||||
if self.getAction(menuentry) == "delete":
|
||||
self.__deleteFile(menuentry.DesktopEntry.filename)
|
||||
for parent in menuentry.Parents:
|
||||
self.__deleteEntry(parent, menuentry)
|
||||
self.menu.sort()
|
||||
return menuentry
|
||||
|
||||
def revertMenuEntry(self, menuentry):
|
||||
if self.getAction(menuentry) == "revert":
|
||||
self.__deleteFile(menuentry.DesktopEntry.filename)
|
||||
menuentry.Original.Parents = []
|
||||
for parent in menuentry.Parents:
|
||||
index = parent.Entries.index(menuentry)
|
||||
parent.Entries[index] = menuentry.Original
|
||||
index = parent.MenuEntries.index(menuentry)
|
||||
parent.MenuEntries[index] = menuentry.Original
|
||||
menuentry.Original.Parents.append(parent)
|
||||
self.menu.sort()
|
||||
return menuentry
|
||||
|
||||
def deleteMenu(self, menu):
|
||||
if self.getAction(menu) == "delete":
|
||||
self.__deleteFile(menu.Directory.DesktopEntry.filename)
|
||||
self.__deleteEntry(menu.Parent, menu)
|
||||
xml_menu = self.__getXmlMenu(menu.getPath(True, True))
|
||||
parent = self.__get_parent_node(xml_menu)
|
||||
parent.remove(xml_menu)
|
||||
self.menu.sort()
|
||||
return menu
|
||||
|
||||
def revertMenu(self, menu):
|
||||
if self.getAction(menu) == "revert":
|
||||
self.__deleteFile(menu.Directory.DesktopEntry.filename)
|
||||
menu.Directory = menu.Directory.Original
|
||||
self.menu.sort()
|
||||
return menu
|
||||
|
||||
def deleteSeparator(self, separator):
|
||||
self.__deleteEntry(separator.Parent, separator, after=True)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return separator
|
||||
|
||||
""" Private Stuff """
|
||||
def getAction(self, entry):
|
||||
if isinstance(entry, Menu):
|
||||
if not isinstance(entry.Directory, MenuEntry):
|
||||
return "none"
|
||||
elif entry.Directory.getType() == "Both":
|
||||
return "revert"
|
||||
elif entry.Directory.getType() == "User" and (
|
||||
len(entry.Submenus) + len(entry.MenuEntries)
|
||||
) == 0:
|
||||
return "delete"
|
||||
|
||||
elif isinstance(entry, MenuEntry):
|
||||
if entry.getType() == "Both":
|
||||
return "revert"
|
||||
elif entry.getType() == "User":
|
||||
return "delete"
|
||||
else:
|
||||
return "none"
|
||||
|
||||
return "none"
|
||||
|
||||
def __saveEntries(self, menu):
|
||||
if not menu:
|
||||
menu = self.menu
|
||||
if isinstance(menu.Directory, MenuEntry):
|
||||
menu.Directory.save()
|
||||
for entry in menu.getEntries(hidden=True):
|
||||
if isinstance(entry, MenuEntry):
|
||||
entry.save()
|
||||
elif isinstance(entry, Menu):
|
||||
self.__saveEntries(entry)
|
||||
|
||||
def __saveMenu(self):
|
||||
if not os.path.isdir(os.path.dirname(self.filename)):
|
||||
os.makedirs(os.path.dirname(self.filename))
|
||||
self.tree.write(self.filename, encoding='utf-8')
|
||||
|
||||
def __getFileName(self, name, extension):
|
||||
postfix = 0
|
||||
while 1:
|
||||
if postfix == 0:
|
||||
filename = name + extension
|
||||
else:
|
||||
filename = name + "-" + str(postfix) + extension
|
||||
if extension == ".desktop":
|
||||
dir = "applications"
|
||||
elif extension == ".directory":
|
||||
dir = "desktop-directories"
|
||||
if not filename in self.filenames and not os.path.isfile(
|
||||
os.path.join(xdg_data_dirs[0], dir, filename)
|
||||
):
|
||||
self.filenames.append(filename)
|
||||
break
|
||||
else:
|
||||
postfix += 1
|
||||
|
||||
return filename
|
||||
|
||||
def __getXmlMenu(self, path, create=True, element=None):
|
||||
# FIXME: we should also return the menu's parent,
|
||||
# to avoid looking for it later on
|
||||
# @see Element.getiterator()
|
||||
if not element:
|
||||
element = self.tree
|
||||
|
||||
if "/" in path:
|
||||
(name, path) = path.split("/", 1)
|
||||
else:
|
||||
name = path
|
||||
path = ""
|
||||
|
||||
found = None
|
||||
for node in element.findall("Menu"):
|
||||
name_node = node.find('Name')
|
||||
if name_node.text == name:
|
||||
if path:
|
||||
found = self.__getXmlMenu(path, create, node)
|
||||
else:
|
||||
found = node
|
||||
if found:
|
||||
break
|
||||
if not found and create:
|
||||
node = self.__addXmlMenuElement(element, name)
|
||||
if path:
|
||||
found = self.__getXmlMenu(path, create, node)
|
||||
else:
|
||||
found = node
|
||||
|
||||
return found
|
||||
|
||||
def __addXmlMenuElement(self, element, name):
|
||||
menu_node = etree.SubElement('Menu', element)
|
||||
name_node = etree.SubElement('Name', menu_node)
|
||||
name_node.text = name
|
||||
return menu_node
|
||||
|
||||
def __addXmlTextElement(self, element, name, text):
|
||||
node = etree.SubElement(name, element)
|
||||
node.text = text
|
||||
return node
|
||||
|
||||
def __addXmlFilename(self, element, filename, type_="Include"):
|
||||
# remove old filenames
|
||||
includes = element.findall('Include')
|
||||
excludes = element.findall('Exclude')
|
||||
rules = includes + excludes
|
||||
for rule in rules:
|
||||
#FIXME: this finds only Rules whose FIRST child is a Filename element
|
||||
if rule[0].tag == "Filename" and rule[0].text == filename:
|
||||
element.remove(rule)
|
||||
# shouldn't it remove all occurences, like the following:
|
||||
#filename_nodes = rule.findall('.//Filename'):
|
||||
#for fn in filename_nodes:
|
||||
#if fn.text == filename:
|
||||
##element.remove(rule)
|
||||
#parent = self.__get_parent_node(fn)
|
||||
#parent.remove(fn)
|
||||
|
||||
# add new filename
|
||||
node = etree.SubElement(type_, element)
|
||||
self.__addXmlTextElement(node, 'Filename', filename)
|
||||
return node
|
||||
|
||||
def __addXmlMove(self, element, old, new):
|
||||
node = etree.SubElement("Move", element)
|
||||
self.__addXmlTextElement(node, 'Old', old)
|
||||
self.__addXmlTextElement(node, 'New', new)
|
||||
return node
|
||||
|
||||
def __addXmlLayout(self, element, layout):
|
||||
# remove old layout
|
||||
for node in element.findall("Layout"):
|
||||
element.remove(node)
|
||||
|
||||
# add new layout
|
||||
node = etree.SubElement("Layout", element)
|
||||
for order in layout.order:
|
||||
if order[0] == "Separator":
|
||||
child = etree.SubElement("Separator", node)
|
||||
elif order[0] == "Filename":
|
||||
child = self.__addXmlTextElement(node, "Filename", order[1])
|
||||
elif order[0] == "Menuname":
|
||||
child = self.__addXmlTextElement(node, "Menuname", order[1])
|
||||
elif order[0] == "Merge":
|
||||
child = etree.SubElement("Merge", node)
|
||||
child.attrib["type"] = order[1]
|
||||
return node
|
||||
|
||||
def __addLayout(self, parent):
|
||||
layout = Layout()
|
||||
layout.order = []
|
||||
layout.show_empty = parent.Layout.show_empty
|
||||
layout.inline = parent.Layout.inline
|
||||
layout.inline_header = parent.Layout.inline_header
|
||||
layout.inline_alias = parent.Layout.inline_alias
|
||||
layout.inline_limit = parent.Layout.inline_limit
|
||||
|
||||
layout.order.append(["Merge", "menus"])
|
||||
for entry in parent.Entries:
|
||||
if isinstance(entry, Menu):
|
||||
layout.parseMenuname(entry.Name)
|
||||
elif isinstance(entry, MenuEntry):
|
||||
layout.parseFilename(entry.DesktopFileID)
|
||||
elif isinstance(entry, Separator):
|
||||
layout.parseSeparator()
|
||||
layout.order.append(["Merge", "files"])
|
||||
|
||||
parent.Layout = layout
|
||||
|
||||
return layout
|
||||
|
||||
def __addEntry(self, parent, entry, after=None, before=None):
|
||||
if after or before:
|
||||
if after:
|
||||
index = parent.Entries.index(after) + 1
|
||||
elif before:
|
||||
index = parent.Entries.index(before)
|
||||
parent.Entries.insert(index, entry)
|
||||
else:
|
||||
parent.Entries.append(entry)
|
||||
|
||||
xml_parent = self.__getXmlMenu(parent.getPath(True, True))
|
||||
|
||||
if isinstance(entry, MenuEntry):
|
||||
parent.MenuEntries.append(entry)
|
||||
entry.Parents.append(parent)
|
||||
self.__addXmlFilename(xml_parent, entry.DesktopFileID, "Include")
|
||||
elif isinstance(entry, Menu):
|
||||
parent.addSubmenu(entry)
|
||||
|
||||
if after or before:
|
||||
self.__addLayout(parent)
|
||||
self.__addXmlLayout(xml_parent, parent.Layout)
|
||||
|
||||
def __deleteEntry(self, parent, entry, after=None, before=None):
|
||||
parent.Entries.remove(entry)
|
||||
|
||||
xml_parent = self.__getXmlMenu(parent.getPath(True, True))
|
||||
|
||||
if isinstance(entry, MenuEntry):
|
||||
entry.Parents.remove(parent)
|
||||
parent.MenuEntries.remove(entry)
|
||||
self.__addXmlFilename(xml_parent, entry.DesktopFileID, "Exclude")
|
||||
elif isinstance(entry, Menu):
|
||||
parent.Submenus.remove(entry)
|
||||
|
||||
if after or before:
|
||||
self.__addLayout(parent)
|
||||
self.__addXmlLayout(xml_parent, parent.Layout)
|
||||
|
||||
def __deleteFile(self, filename):
|
||||
try:
|
||||
os.remove(filename)
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
self.filenames.remove(filename)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def __remove_whitespace_nodes(self, node):
|
||||
for child in node:
|
||||
text = child.text.strip()
|
||||
if not text:
|
||||
child.text = ''
|
||||
tail = child.tail.strip()
|
||||
if not tail:
|
||||
child.tail = ''
|
||||
if len(child):
|
||||
self.__remove_whilespace_nodes(child)
|
||||
|
||||
def __get_parent_node(self, node):
|
||||
# elements in ElementTree doesn't hold a reference to their parent
|
||||
for parent, child in self.__iter_parent():
|
||||
if child is node:
|
||||
return child
|
||||
|
||||
def __iter_parent(self):
|
||||
for parent in self.tree.getiterator():
|
||||
for child in parent:
|
||||
yield parent, child
|
|
@ -1,780 +0,0 @@
|
|||
"""
|
||||
This module is based on a rox module (LGPL):
|
||||
|
||||
http://cvs.sourceforge.net/viewcvs.py/rox/ROX-Lib2/python/rox/mime.py?rev=1.21&view=log
|
||||
|
||||
This module provides access to the shared MIME database.
|
||||
|
||||
types is a dictionary of all known MIME types, indexed by the type name, e.g.
|
||||
types['application/x-python']
|
||||
|
||||
Applications can install information about MIME types by storing an
|
||||
XML file as <MIME>/packages/<application>.xml and running the
|
||||
update-mime-database command, which is provided by the freedesktop.org
|
||||
shared mime database package.
|
||||
|
||||
See http://www.freedesktop.org/standards/shared-mime-info-spec/ for
|
||||
information about the format of these files.
|
||||
|
||||
(based on version 0.13)
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import sys
|
||||
import fnmatch
|
||||
|
||||
from . import BaseDirectory, Locale
|
||||
|
||||
from .dom import minidom, XML_NAMESPACE
|
||||
from collections import defaultdict
|
||||
|
||||
FREE_NS = 'http://www.freedesktop.org/standards/shared-mime-info'
|
||||
|
||||
types = {} # Maps MIME names to type objects
|
||||
|
||||
exts = None # Maps extensions to types
|
||||
globs = None # List of (glob, type) pairs
|
||||
literals = None # Maps liternal names to types
|
||||
magic = None
|
||||
|
||||
PY3 = (sys.version_info[0] >= 3)
|
||||
|
||||
def _get_node_data(node):
|
||||
"""Get text of XML node"""
|
||||
return ''.join([n.nodeValue for n in node.childNodes]).strip()
|
||||
|
||||
def lookup(media, subtype = None):
|
||||
"""Get the MIMEtype object for the given type.
|
||||
|
||||
This remains for backwards compatibility; calling MIMEtype now does
|
||||
the same thing.
|
||||
|
||||
The name can either be passed as one part ('text/plain'), or as two
|
||||
('text', 'plain').
|
||||
"""
|
||||
return MIMEtype(media, subtype)
|
||||
|
||||
class MIMEtype(object):
|
||||
"""Class holding data about a MIME type.
|
||||
|
||||
Calling the class will return a cached instance, so there is only one
|
||||
instance for each MIME type. The name can either be passed as one part
|
||||
('text/plain'), or as two ('text', 'plain').
|
||||
"""
|
||||
def __new__(cls, media, subtype=None):
|
||||
if subtype is None and '/' in media:
|
||||
media, subtype = media.split('/', 1)
|
||||
assert '/' not in subtype
|
||||
media = media.lower()
|
||||
subtype = subtype.lower()
|
||||
|
||||
try:
|
||||
return types[(media, subtype)]
|
||||
except KeyError:
|
||||
mtype = super(MIMEtype, cls).__new__(cls)
|
||||
mtype._init(media, subtype)
|
||||
types[(media, subtype)] = mtype
|
||||
return mtype
|
||||
|
||||
# If this is done in __init__, it is automatically called again each time
|
||||
# the MIMEtype is returned by __new__, which we don't want. So we call it
|
||||
# explicitly only when we construct a new instance.
|
||||
def _init(self, media, subtype):
|
||||
self.media = media
|
||||
self.subtype = subtype
|
||||
self._comment = None
|
||||
|
||||
def _load(self):
|
||||
"Loads comment for current language. Use get_comment() instead."
|
||||
resource = os.path.join('mime', self.media, self.subtype + '.xml')
|
||||
for path in BaseDirectory.load_data_paths(resource):
|
||||
doc = minidom.parse(path)
|
||||
if doc is None:
|
||||
continue
|
||||
for comment in doc.documentElement.getElementsByTagNameNS(FREE_NS, 'comment'):
|
||||
lang = comment.getAttributeNS(XML_NAMESPACE, 'lang') or 'en'
|
||||
goodness = 1 + (lang in xdg.Locale.langs)
|
||||
if goodness > self._comment[0]:
|
||||
self._comment = (goodness, _get_node_data(comment))
|
||||
if goodness == 2: return
|
||||
|
||||
# FIXME: add get_icon method
|
||||
def get_comment(self):
|
||||
"""Returns comment for current language, loading it if needed."""
|
||||
# Should we ever reload?
|
||||
if self._comment is None:
|
||||
self._comment = (0, str(self))
|
||||
self._load()
|
||||
return self._comment[1]
|
||||
|
||||
def canonical(self):
|
||||
"""Returns the canonical MimeType object if this is an alias."""
|
||||
update_cache()
|
||||
s = str(self)
|
||||
if s in aliases:
|
||||
return lookup(aliases[s])
|
||||
return self
|
||||
|
||||
def inherits_from(self):
|
||||
"""Returns a set of Mime types which this inherits from."""
|
||||
update_cache()
|
||||
return set(lookup(t) for t in inheritance[str(self)])
|
||||
|
||||
def __str__(self):
|
||||
return self.media + '/' + self.subtype
|
||||
|
||||
def __repr__(self):
|
||||
return 'MIMEtype(%r, %r)' % (self.media, self.subtype)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.media) ^ hash(self.subtype)
|
||||
|
||||
class UnknownMagicRuleFormat(ValueError):
|
||||
pass
|
||||
|
||||
class DiscardMagicRules(Exception):
|
||||
"Raised when __NOMAGIC__ is found, and caught to discard previous rules."
|
||||
pass
|
||||
|
||||
class MagicRule:
|
||||
also = None
|
||||
|
||||
def __init__(self, start, value, mask, word, range):
|
||||
self.start = start
|
||||
self.value = value
|
||||
self.mask = mask
|
||||
self.word = word
|
||||
self.range = range
|
||||
|
||||
rule_ending_re = re.compile(br'(?:~(\d+))?(?:\+(\d+))?\n$')
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, f):
|
||||
"""Read a rule from the binary magics file. Returns a 2-tuple of
|
||||
the nesting depth and the MagicRule."""
|
||||
line = f.readline()
|
||||
#print line
|
||||
|
||||
# [indent] '>'
|
||||
nest_depth, line = line.split(b'>', 1)
|
||||
nest_depth = int(nest_depth) if nest_depth else 0
|
||||
|
||||
# start-offset '='
|
||||
start, line = line.split(b'=', 1)
|
||||
start = int(start)
|
||||
|
||||
if line == b'__NOMAGIC__\n':
|
||||
raise DiscardMagicRules
|
||||
|
||||
# value length (2 bytes, big endian)
|
||||
if sys.version_info[0] >= 3:
|
||||
lenvalue = int.from_bytes(line[:2], byteorder='big')
|
||||
else:
|
||||
lenvalue = (ord(line[0])<<8)+ord(line[1])
|
||||
line = line[2:]
|
||||
|
||||
# value
|
||||
# This can contain newlines, so we may need to read more lines
|
||||
while len(line) <= lenvalue:
|
||||
line += f.readline()
|
||||
value, line = line[:lenvalue], line[lenvalue:]
|
||||
|
||||
# ['&' mask]
|
||||
if line.startswith(b'&'):
|
||||
# This can contain newlines, so we may need to read more lines
|
||||
while len(line) <= lenvalue:
|
||||
line += f.readline()
|
||||
mask, line = line[1:lenvalue+1], line[lenvalue+1:]
|
||||
else:
|
||||
mask = None
|
||||
|
||||
# ['~' word-size] ['+' range-length]
|
||||
ending = cls.rule_ending_re.match(line)
|
||||
if not ending:
|
||||
# Per the spec, this will be caught and ignored, to allow
|
||||
# for future extensions.
|
||||
raise UnknownMagicRuleFormat(repr(line))
|
||||
|
||||
word, range = ending.groups()
|
||||
word = int(word) if (word is not None) else 1
|
||||
range = int(range) if (range is not None) else 1
|
||||
|
||||
return nest_depth, cls(start, value, mask, word, range)
|
||||
|
||||
def maxlen(self):
|
||||
l = self.start + len(self.value) + self.range
|
||||
if self.also:
|
||||
return max(l, self.also.maxlen())
|
||||
return l
|
||||
|
||||
def match(self, buffer):
|
||||
if self.match0(buffer):
|
||||
if self.also:
|
||||
return self.also.match(buffer)
|
||||
return True
|
||||
|
||||
def match0(self, buffer):
|
||||
l=len(buffer)
|
||||
lenvalue = len(self.value)
|
||||
for o in range(self.range):
|
||||
s=self.start+o
|
||||
e=s+lenvalue
|
||||
if l<e:
|
||||
return False
|
||||
if self.mask:
|
||||
test=''
|
||||
for i in range(lenvalue):
|
||||
if PY3:
|
||||
c = buffer[s+i] & self.mask[i]
|
||||
else:
|
||||
c = ord(buffer[s+i]) & ord(self.mask[i])
|
||||
test += chr(c)
|
||||
else:
|
||||
test = buffer[s:e]
|
||||
|
||||
if test==self.value:
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
return 'MagicRule(start=%r, value=%r, mask=%r, word=%r, range=%r)' %(
|
||||
self.start,
|
||||
self.value,
|
||||
self.mask,
|
||||
self.word,
|
||||
self.range)
|
||||
|
||||
|
||||
class MagicMatchAny(object):
|
||||
"""Match any of a set of magic rules.
|
||||
|
||||
This has a similar interface to MagicRule objects (i.e. its match() and
|
||||
maxlen() methods), to allow for duck typing.
|
||||
"""
|
||||
def __init__(self, rules):
|
||||
self.rules = rules
|
||||
|
||||
def match(self, buffer):
|
||||
return any(r.match(buffer) for r in self.rules)
|
||||
|
||||
def maxlen(self):
|
||||
return max(r.maxlen() for r in self.rules)
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, f):
|
||||
"""Read a set of rules from the binary magic file."""
|
||||
c=f.read(1)
|
||||
f.seek(-1, 1)
|
||||
depths_rules = []
|
||||
while c and c != b'[':
|
||||
try:
|
||||
depths_rules.append(MagicRule.from_file(f))
|
||||
except UnknownMagicRuleFormat:
|
||||
# Ignored to allow for extensions to the rule format.
|
||||
pass
|
||||
c=f.read(1)
|
||||
if c:
|
||||
f.seek(-1, 1)
|
||||
|
||||
# Build the rule tree
|
||||
tree = [] # (rule, [(subrule,[subsubrule,...]), ...])
|
||||
insert_points = {0:tree}
|
||||
for depth, rule in depths_rules:
|
||||
subrules = []
|
||||
insert_points[depth].append((rule, subrules))
|
||||
insert_points[depth+1] = subrules
|
||||
|
||||
return cls.from_rule_tree(tree)
|
||||
|
||||
@classmethod
|
||||
def from_rule_tree(cls, tree):
|
||||
"""From a nested list of (rule, subrules) pairs, build a MagicMatchAny
|
||||
instance, recursing down the tree.
|
||||
|
||||
Where there's only one top-level rule, this is returned directly,
|
||||
to simplify the nested structure. Returns None if no rules were read.
|
||||
"""
|
||||
rules = []
|
||||
for rule, subrules in tree:
|
||||
if subrules:
|
||||
rule.also = cls.from_rule_tree(subrules)
|
||||
rules.append(rule)
|
||||
|
||||
if len(rules)==0:
|
||||
return None
|
||||
if len(rules)==1:
|
||||
return rules[0]
|
||||
return cls(rules)
|
||||
|
||||
class MagicDB:
|
||||
def __init__(self):
|
||||
self.bytype = defaultdict(list) # mimetype -> [(priority, rule), ...]
|
||||
|
||||
def merge_file(self, fname):
|
||||
"""Read a magic binary file, and add its rules to this MagicDB."""
|
||||
with open(fname, 'rb') as f:
|
||||
line = f.readline()
|
||||
if line != b'MIME-Magic\0\n':
|
||||
raise IOError('Not a MIME magic file')
|
||||
|
||||
while True:
|
||||
shead = f.readline().decode('ascii')
|
||||
#print(shead)
|
||||
if not shead:
|
||||
break
|
||||
if shead[0] != '[' or shead[-2:] != ']\n':
|
||||
raise ValueError('Malformed section heading', shead)
|
||||
pri, tname = shead[1:-2].split(':')
|
||||
#print shead[1:-2]
|
||||
pri = int(pri)
|
||||
mtype = lookup(tname)
|
||||
try:
|
||||
rule = MagicMatchAny.from_file(f)
|
||||
except DiscardMagicRules:
|
||||
self.bytype.pop(mtype, None)
|
||||
rule = MagicMatchAny.from_file(f)
|
||||
if rule is None:
|
||||
continue
|
||||
#print rule
|
||||
|
||||
self.bytype[mtype].append((pri, rule))
|
||||
|
||||
def finalise(self):
|
||||
"""Prepare the MagicDB for matching.
|
||||
|
||||
This should be called after all rules have been merged into it.
|
||||
"""
|
||||
maxlen = 0
|
||||
self.alltypes = [] # (priority, mimetype, rule)
|
||||
|
||||
for mtype, rules in self.bytype.items():
|
||||
for pri, rule in rules:
|
||||
self.alltypes.append((pri, mtype, rule))
|
||||
maxlen = max(maxlen, rule.maxlen())
|
||||
|
||||
self.maxlen = maxlen # Number of bytes to read from files
|
||||
self.alltypes.sort(key=lambda x: x[0], reverse=True)
|
||||
|
||||
def match_data(self, data, max_pri=100, min_pri=0, possible=None):
|
||||
"""Do magic sniffing on some bytes.
|
||||
|
||||
max_pri & min_pri can be used to specify the maximum & minimum priority
|
||||
rules to look for. possible can be a list of mimetypes to check, or None
|
||||
(the default) to check all mimetypes until one matches.
|
||||
|
||||
Returns the MIMEtype found, or None if no entries match.
|
||||
"""
|
||||
if possible is not None:
|
||||
types = []
|
||||
for mt in possible:
|
||||
for pri, rule in self.bytype[mt]:
|
||||
types.append((pri, mt, rule))
|
||||
types.sort(key=lambda x: x[0])
|
||||
else:
|
||||
types = self.alltypes
|
||||
|
||||
for priority, mimetype, rule in types:
|
||||
#print priority, max_pri, min_pri
|
||||
if priority > max_pri:
|
||||
continue
|
||||
if priority < min_pri:
|
||||
break
|
||||
|
||||
if rule.match(data):
|
||||
return mimetype
|
||||
|
||||
def match(self, path, max_pri=100, min_pri=0, possible=None):
|
||||
"""Read data from the file and do magic sniffing on it.
|
||||
|
||||
max_pri & min_pri can be used to specify the maximum & minimum priority
|
||||
rules to look for. possible can be a list of mimetypes to check, or None
|
||||
(the default) to check all mimetypes until one matches.
|
||||
|
||||
Returns the MIMEtype found, or None if no entries match. Raises IOError
|
||||
if the file can't be opened.
|
||||
"""
|
||||
with open(path, 'rb') as f:
|
||||
buf = f.read(self.maxlen)
|
||||
return self.match_data(buf, max_pri, min_pri, possible)
|
||||
|
||||
def __repr__(self):
|
||||
return '<MagicDB (%d types)>' % len(self.alltypes)
|
||||
|
||||
class GlobDB(object):
|
||||
def __init__(self):
|
||||
"""Prepare the GlobDB. It can't actually be used until .finalise() is
|
||||
called, but merge_file() can be used to add data before that.
|
||||
"""
|
||||
# Maps mimetype to {(weight, glob, flags), ...}
|
||||
self.allglobs = defaultdict(set)
|
||||
|
||||
def merge_file(self, path):
|
||||
"""Loads name matching information from a globs2 file."""#
|
||||
allglobs = self.allglobs
|
||||
with open(path) as f:
|
||||
for line in f:
|
||||
if line.startswith('#'): continue # Comment
|
||||
|
||||
fields = line[:-1].split(':')
|
||||
weight, type_name, pattern = fields[:3]
|
||||
weight = int(weight)
|
||||
mtype = lookup(type_name)
|
||||
if len(fields) > 3:
|
||||
flags = fields[3].split(',')
|
||||
else:
|
||||
flags = ()
|
||||
|
||||
if pattern == '__NOGLOBS__':
|
||||
# This signals to discard any previous globs
|
||||
allglobs.pop(mtype, None)
|
||||
continue
|
||||
|
||||
allglobs[mtype].add((weight, pattern, tuple(flags)))
|
||||
|
||||
def finalise(self):
|
||||
"""Prepare the GlobDB for matching.
|
||||
|
||||
This should be called after all files have been merged into it.
|
||||
"""
|
||||
self.exts = defaultdict(list) # Maps extensions to [(type, weight),...]
|
||||
self.cased_exts = defaultdict(list)
|
||||
self.globs = [] # List of (regex, type, weight) triplets
|
||||
self.literals = {} # Maps literal names to (type, weight)
|
||||
self.cased_literals = {}
|
||||
|
||||
for mtype, globs in self.allglobs.items():
|
||||
mtype = mtype.canonical()
|
||||
for weight, pattern, flags in globs:
|
||||
|
||||
cased = 'cs' in flags
|
||||
|
||||
if pattern.startswith('*.'):
|
||||
# *.foo -- extension pattern
|
||||
rest = pattern[2:]
|
||||
if not ('*' in rest or '[' in rest or '?' in rest):
|
||||
if cased:
|
||||
self.cased_exts[rest].append((mtype, weight))
|
||||
else:
|
||||
self.exts[rest.lower()].append((mtype, weight))
|
||||
continue
|
||||
|
||||
if ('*' in pattern or '[' in pattern or '?' in pattern):
|
||||
# Translate the glob pattern to a regex & compile it
|
||||
re_flags = 0 if cased else re.I
|
||||
pattern = re.compile(fnmatch.translate(pattern), flags=re_flags)
|
||||
self.globs.append((pattern, mtype, weight))
|
||||
else:
|
||||
# No wildcards - literal pattern
|
||||
if cased:
|
||||
self.cased_literals[pattern] = (mtype, weight)
|
||||
else:
|
||||
self.literals[pattern.lower()] = (mtype, weight)
|
||||
|
||||
# Sort globs by weight & length
|
||||
self.globs.sort(reverse=True, key=lambda x: (x[2], len(x[0].pattern)) )
|
||||
|
||||
def first_match(self, path):
|
||||
"""Return the first match found for a given path, or None if no match
|
||||
is found."""
|
||||
try:
|
||||
return next(self._match_path(path))[0]
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
def all_matches(self, path):
|
||||
"""Return a list of (MIMEtype, glob weight) pairs for the path."""
|
||||
return list(self._match_path(path))
|
||||
|
||||
def _match_path(self, path):
|
||||
"""Yields pairs of (mimetype, glob weight)."""
|
||||
leaf = os.path.basename(path)
|
||||
|
||||
# Literals (no wildcards)
|
||||
if leaf in self.cased_literals:
|
||||
yield self.cased_literals[leaf]
|
||||
|
||||
lleaf = leaf.lower()
|
||||
if lleaf in self.literals:
|
||||
yield self.literals[lleaf]
|
||||
|
||||
# Extensions
|
||||
ext = leaf
|
||||
while 1:
|
||||
p = ext.find('.')
|
||||
if p < 0: break
|
||||
ext = ext[p + 1:]
|
||||
if ext in self.cased_exts:
|
||||
for res in self.cased_exts[ext]:
|
||||
yield res
|
||||
ext = lleaf
|
||||
while 1:
|
||||
p = ext.find('.')
|
||||
if p < 0: break
|
||||
ext = ext[p+1:]
|
||||
if ext in self.exts:
|
||||
for res in self.exts[ext]:
|
||||
yield res
|
||||
|
||||
# Other globs
|
||||
for (regex, mime_type, weight) in self.globs:
|
||||
if regex.match(leaf):
|
||||
yield (mime_type, weight)
|
||||
|
||||
# Some well-known types
|
||||
text = lookup('text', 'plain')
|
||||
octet_stream = lookup('application', 'octet-stream')
|
||||
inode_block = lookup('inode', 'blockdevice')
|
||||
inode_char = lookup('inode', 'chardevice')
|
||||
inode_dir = lookup('inode', 'directory')
|
||||
inode_fifo = lookup('inode', 'fifo')
|
||||
inode_socket = lookup('inode', 'socket')
|
||||
inode_symlink = lookup('inode', 'symlink')
|
||||
inode_door = lookup('inode', 'door')
|
||||
app_exe = lookup('application', 'executable')
|
||||
|
||||
_cache_uptodate = False
|
||||
|
||||
def _cache_database():
|
||||
global globs, magic, aliases, inheritance, _cache_uptodate
|
||||
|
||||
_cache_uptodate = True
|
||||
|
||||
aliases = {} # Maps alias Mime types to canonical names
|
||||
inheritance = defaultdict(set) # Maps to sets of parent mime types.
|
||||
|
||||
# Load aliases
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'aliases')):
|
||||
with open(path, 'r') as f:
|
||||
for line in f:
|
||||
alias, canonical = line.strip().split(None, 1)
|
||||
aliases[alias] = canonical
|
||||
|
||||
# Load filename patterns (globs)
|
||||
globs = GlobDB()
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'globs2')):
|
||||
globs.merge_file(path)
|
||||
globs.finalise()
|
||||
|
||||
# Load magic sniffing data
|
||||
magic = MagicDB()
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'magic')):
|
||||
magic.merge_file(path)
|
||||
magic.finalise()
|
||||
|
||||
# Load subclasses
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'subclasses')):
|
||||
with open(path, 'r') as f:
|
||||
for line in f:
|
||||
sub, parent = line.strip().split(None, 1)
|
||||
inheritance[sub].add(parent)
|
||||
|
||||
def update_cache():
|
||||
if not _cache_uptodate:
|
||||
_cache_database()
|
||||
|
||||
def get_type_by_name(path):
|
||||
"""Returns type of file by its name, or None if not known"""
|
||||
update_cache()
|
||||
return globs.first_match(path)
|
||||
|
||||
def get_type_by_contents(path, max_pri=100, min_pri=0):
|
||||
"""Returns type of file by its contents, or None if not known"""
|
||||
update_cache()
|
||||
|
||||
return magic.match(path, max_pri, min_pri)
|
||||
|
||||
def get_type_by_data(data, max_pri=100, min_pri=0):
|
||||
"""Returns type of the data, which should be bytes."""
|
||||
update_cache()
|
||||
|
||||
return magic.match_data(data, max_pri, min_pri)
|
||||
|
||||
def _get_type_by_stat(st_mode):
|
||||
"""Match special filesystem objects to Mimetypes."""
|
||||
if stat.S_ISDIR(st_mode): return inode_dir
|
||||
elif stat.S_ISCHR(st_mode): return inode_char
|
||||
elif stat.S_ISBLK(st_mode): return inode_block
|
||||
elif stat.S_ISFIFO(st_mode): return inode_fifo
|
||||
elif stat.S_ISLNK(st_mode): return inode_symlink
|
||||
elif stat.S_ISSOCK(st_mode): return inode_socket
|
||||
return inode_door
|
||||
|
||||
def get_type(path, follow=True, name_pri=100):
|
||||
"""Returns type of file indicated by path.
|
||||
|
||||
This function is *deprecated* - :func:`get_type2` is more accurate.
|
||||
|
||||
:param path: pathname to check (need not exist)
|
||||
:param follow: when reading file, follow symbolic links
|
||||
:param name_pri: Priority to do name matches. 100=override magic
|
||||
|
||||
This tries to use the contents of the file, and falls back to the name. It
|
||||
can also handle special filesystem objects like directories and sockets.
|
||||
"""
|
||||
update_cache()
|
||||
|
||||
try:
|
||||
if follow:
|
||||
st = os.stat(path)
|
||||
else:
|
||||
st = os.lstat(path)
|
||||
except:
|
||||
t = get_type_by_name(path)
|
||||
return t or text
|
||||
|
||||
if stat.S_ISREG(st.st_mode):
|
||||
# Regular file
|
||||
t = get_type_by_contents(path, min_pri=name_pri)
|
||||
if not t: t = get_type_by_name(path)
|
||||
if not t: t = get_type_by_contents(path, max_pri=name_pri)
|
||||
if t is None:
|
||||
if stat.S_IMODE(st.st_mode) & 0o111:
|
||||
return app_exe
|
||||
else:
|
||||
return text
|
||||
return t
|
||||
else:
|
||||
return _get_type_by_stat(st.st_mode)
|
||||
|
||||
def get_type2(path, follow=True):
|
||||
"""Find the MIMEtype of a file using the XDG recommended checking order.
|
||||
|
||||
This first checks the filename, then uses file contents if the name doesn't
|
||||
give an unambiguous MIMEtype. It can also handle special filesystem objects
|
||||
like directories and sockets.
|
||||
|
||||
:param path: file path to examine (need not exist)
|
||||
:param follow: whether to follow symlinks
|
||||
|
||||
:rtype: :class:`MIMEtype`
|
||||
|
||||
.. versionadded:: 1.0
|
||||
"""
|
||||
update_cache()
|
||||
|
||||
try:
|
||||
st = os.stat(path) if follow else os.lstat(path)
|
||||
except OSError:
|
||||
return get_type_by_name(path) or octet_stream
|
||||
|
||||
if not stat.S_ISREG(st.st_mode):
|
||||
# Special filesystem objects
|
||||
return _get_type_by_stat(st.st_mode)
|
||||
|
||||
mtypes = sorted(globs.all_matches(path), key=(lambda x: x[1]), reverse=True)
|
||||
if mtypes:
|
||||
max_weight = mtypes[0][1]
|
||||
i = 1
|
||||
for mt, w in mtypes[1:]:
|
||||
if w < max_weight:
|
||||
break
|
||||
i += 1
|
||||
mtypes = mtypes[:i]
|
||||
if len(mtypes) == 1:
|
||||
return mtypes[0][0]
|
||||
|
||||
possible = [mt for mt,w in mtypes]
|
||||
else:
|
||||
possible = None # Try all magic matches
|
||||
|
||||
try:
|
||||
t = magic.match(path, possible=possible)
|
||||
except IOError:
|
||||
t = None
|
||||
|
||||
if t:
|
||||
return t
|
||||
elif mtypes:
|
||||
return mtypes[0][0]
|
||||
elif stat.S_IMODE(st.st_mode) & 0o111:
|
||||
return app_exe
|
||||
else:
|
||||
return text if is_text_file(path) else octet_stream
|
||||
|
||||
def is_text_file(path):
|
||||
"""Guess whether a file contains text or binary data.
|
||||
|
||||
Heuristic: binary if the first 32 bytes include ASCII control characters.
|
||||
This rule may change in future versions.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
"""
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
except IOError:
|
||||
return False
|
||||
|
||||
with f:
|
||||
return _is_text(f.read(32))
|
||||
|
||||
if PY3:
|
||||
def _is_text(data):
|
||||
return not any(b <= 0x8 or 0xe <= b < 0x20 or b == 0x7f for b in data)
|
||||
else:
|
||||
def _is_text(data):
|
||||
return not any(b <= '\x08' or '\x0e' <= b < '\x20' or b == '\x7f' \
|
||||
for b in data)
|
||||
|
||||
_mime2ext_cache = None
|
||||
_mime2ext_cache_uptodate = False
|
||||
|
||||
def get_extensions(mimetype):
|
||||
"""Retrieve the set of filename extensions matching a given MIMEtype.
|
||||
|
||||
Extensions are returned without a leading dot, e.g. 'py'. If no extensions
|
||||
are registered for the MIMEtype, returns an empty set.
|
||||
|
||||
The extensions are stored in a cache the first time this is called.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
"""
|
||||
global _mime2ext_cache, _mime2ext_cache_uptodate
|
||||
update_cache()
|
||||
if not _mime2ext_cache_uptodate:
|
||||
_mime2ext_cache = defaultdict(set)
|
||||
for ext, mtypes in globs.exts.items():
|
||||
for mtype, prio in mtypes:
|
||||
_mime2ext_cache[mtype].add(ext)
|
||||
_mime2ext_cache_uptodate = True
|
||||
|
||||
return _mime2ext_cache[mimetype]
|
||||
|
||||
|
||||
def install_mime_info(application, package_file):
|
||||
"""Copy 'package_file' as ``~/.local/share/mime/packages/<application>.xml.``
|
||||
If package_file is None, install ``<app_dir>/<application>.xml``.
|
||||
If already installed, does nothing. May overwrite an existing
|
||||
file with the same name (if the contents are different)"""
|
||||
application += '.xml'
|
||||
|
||||
new_data = open(package_file).read()
|
||||
|
||||
# See if the file is already installed
|
||||
package_dir = os.path.join('mime', 'packages')
|
||||
resource = os.path.join(package_dir, application)
|
||||
for x in BaseDirectory.load_data_paths(resource):
|
||||
try:
|
||||
old_data = open(x).read()
|
||||
except:
|
||||
continue
|
||||
if old_data == new_data:
|
||||
return # Already installed
|
||||
|
||||
global _cache_uptodate
|
||||
_cache_uptodate = False
|
||||
|
||||
# Not already installed; add a new copy
|
||||
# Create the directory structure...
|
||||
new_file = os.path.join(BaseDirectory.save_data_path(package_dir), application)
|
||||
|
||||
# Write the file...
|
||||
open(new_file, 'w').write(new_data)
|
||||
|
||||
# Update the database...
|
||||
command = 'update-mime-database'
|
||||
if os.spawnlp(os.P_WAIT, command, command, BaseDirectory.save_data_path('mime')):
|
||||
os.unlink(new_file)
|
||||
raise Exception("The '%s' command returned an error code!\n" \
|
||||
"Make sure you have the freedesktop.org shared MIME package:\n" \
|
||||
"http://standards.freedesktop.org/shared-mime-info/" % command)
|
|
@ -1,181 +0,0 @@
|
|||
"""
|
||||
Implementation of the XDG Recent File Storage Specification
|
||||
http://standards.freedesktop.org/recent-file-spec
|
||||
"""
|
||||
|
||||
import xml.dom.minidom, xml.sax.saxutils
|
||||
import os, time, fcntl
|
||||
from .Exceptions import ParsingError
|
||||
|
||||
class RecentFiles:
|
||||
def __init__(self):
|
||||
self.RecentFiles = []
|
||||
self.filename = ""
|
||||
|
||||
def parse(self, filename=None):
|
||||
"""Parse a list of recently used files.
|
||||
|
||||
filename defaults to ``~/.recently-used``.
|
||||
"""
|
||||
if not filename:
|
||||
filename = os.path.join(os.getenv("HOME"), ".recently-used")
|
||||
|
||||
try:
|
||||
doc = xml.dom.minidom.parse(filename)
|
||||
except IOError:
|
||||
raise ParsingError('File not found', filename)
|
||||
except xml.parsers.expat.ExpatError:
|
||||
raise ParsingError('Not a valid .menu file', filename)
|
||||
|
||||
self.filename = filename
|
||||
|
||||
for child in doc.childNodes:
|
||||
if child.nodeType == xml.dom.Node.ELEMENT_NODE:
|
||||
if child.tagName == "RecentFiles":
|
||||
for recent in child.childNodes:
|
||||
if recent.nodeType == xml.dom.Node.ELEMENT_NODE:
|
||||
if recent.tagName == "RecentItem":
|
||||
self.__parseRecentItem(recent)
|
||||
|
||||
self.sort()
|
||||
|
||||
def __parseRecentItem(self, item):
|
||||
recent = RecentFile()
|
||||
self.RecentFiles.append(recent)
|
||||
|
||||
for attribute in item.childNodes:
|
||||
if attribute.nodeType == xml.dom.Node.ELEMENT_NODE:
|
||||
if attribute.tagName == "URI":
|
||||
recent.URI = attribute.childNodes[0].nodeValue
|
||||
elif attribute.tagName == "Mime-Type":
|
||||
recent.MimeType = attribute.childNodes[0].nodeValue
|
||||
elif attribute.tagName == "Timestamp":
|
||||
recent.Timestamp = int(attribute.childNodes[0].nodeValue)
|
||||
elif attribute.tagName == "Private":
|
||||
recent.Prviate = True
|
||||
elif attribute.tagName == "Groups":
|
||||
|
||||
for group in attribute.childNodes:
|
||||
if group.nodeType == xml.dom.Node.ELEMENT_NODE:
|
||||
if group.tagName == "Group":
|
||||
recent.Groups.append(group.childNodes[0].nodeValue)
|
||||
|
||||
def write(self, filename=None):
|
||||
"""Write the list of recently used files to disk.
|
||||
|
||||
If the instance is already associated with a file, filename can be
|
||||
omitted to save it there again.
|
||||
"""
|
||||
if not filename and not self.filename:
|
||||
raise ParsingError('File not found', filename)
|
||||
elif not filename:
|
||||
filename = self.filename
|
||||
|
||||
f = open(filename, "w")
|
||||
fcntl.lockf(f, fcntl.LOCK_EX)
|
||||
f.write('<?xml version="1.0"?>\n')
|
||||
f.write("<RecentFiles>\n")
|
||||
|
||||
for r in self.RecentFiles:
|
||||
f.write(" <RecentItem>\n")
|
||||
f.write(" <URI>%s</URI>\n" % xml.sax.saxutils.escape(r.URI))
|
||||
f.write(" <Mime-Type>%s</Mime-Type>\n" % r.MimeType)
|
||||
f.write(" <Timestamp>%s</Timestamp>\n" % r.Timestamp)
|
||||
if r.Private == True:
|
||||
f.write(" <Private/>\n")
|
||||
if len(r.Groups) > 0:
|
||||
f.write(" <Groups>\n")
|
||||
for group in r.Groups:
|
||||
f.write(" <Group>%s</Group>\n" % group)
|
||||
f.write(" </Groups>\n")
|
||||
f.write(" </RecentItem>\n")
|
||||
|
||||
f.write("</RecentFiles>\n")
|
||||
fcntl.lockf(f, fcntl.LOCK_UN)
|
||||
f.close()
|
||||
|
||||
def getFiles(self, mimetypes=None, groups=None, limit=0):
|
||||
"""Get a list of recently used files.
|
||||
|
||||
The parameters can be used to filter by mime types, by group, or to
|
||||
limit the number of items returned. By default, the entire list is
|
||||
returned, except for items marked private.
|
||||
"""
|
||||
tmp = []
|
||||
i = 0
|
||||
for item in self.RecentFiles:
|
||||
if groups:
|
||||
for group in groups:
|
||||
if group in item.Groups:
|
||||
tmp.append(item)
|
||||
i += 1
|
||||
elif mimetypes:
|
||||
for mimetype in mimetypes:
|
||||
if mimetype == item.MimeType:
|
||||
tmp.append(item)
|
||||
i += 1
|
||||
else:
|
||||
if item.Private == False:
|
||||
tmp.append(item)
|
||||
i += 1
|
||||
if limit != 0 and i == limit:
|
||||
break
|
||||
|
||||
return tmp
|
||||
|
||||
def addFile(self, item, mimetype, groups=None, private=False):
|
||||
"""Add a recently used file.
|
||||
|
||||
item should be the URI of the file, typically starting with ``file:///``.
|
||||
"""
|
||||
# check if entry already there
|
||||
if item in self.RecentFiles:
|
||||
index = self.RecentFiles.index(item)
|
||||
recent = self.RecentFiles[index]
|
||||
else:
|
||||
# delete if more then 500 files
|
||||
if len(self.RecentFiles) == 500:
|
||||
self.RecentFiles.pop()
|
||||
# add entry
|
||||
recent = RecentFile()
|
||||
self.RecentFiles.append(recent)
|
||||
|
||||
recent.URI = item
|
||||
recent.MimeType = mimetype
|
||||
recent.Timestamp = int(time.time())
|
||||
recent.Private = private
|
||||
if groups:
|
||||
recent.Groups = groups
|
||||
|
||||
self.sort()
|
||||
|
||||
def deleteFile(self, item):
|
||||
"""Remove a recently used file, by URI, from the list.
|
||||
"""
|
||||
if item in self.RecentFiles:
|
||||
self.RecentFiles.remove(item)
|
||||
|
||||
def sort(self):
|
||||
self.RecentFiles.sort()
|
||||
self.RecentFiles.reverse()
|
||||
|
||||
|
||||
class RecentFile:
|
||||
def __init__(self):
|
||||
self.URI = ""
|
||||
self.MimeType = ""
|
||||
self.Timestamp = ""
|
||||
self.Private = False
|
||||
self.Groups = []
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.Timestamp, other.Timestamp)
|
||||
|
||||
def __lt__ (self, other):
|
||||
return self.Timestamp < other.Timestamp
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.URI == str(other)
|
||||
|
||||
def __str__(self):
|
||||
return self.URI
|
|
@ -1,3 +0,0 @@
|
|||
__all__ = [ "BaseDirectory", "DesktopEntry", "Menu", "Exceptions", "IniFile", "IconTheme", "Locale", "Config", "Mime", "RecentFiles", "MenuEditor" ]
|
||||
|
||||
__version__ = "0.26"
|
|
@ -1,75 +0,0 @@
|
|||
import sys
|
||||
|
||||
PY3 = sys.version_info[0] >= 3
|
||||
|
||||
if PY3:
|
||||
def u(s):
|
||||
return s
|
||||
else:
|
||||
# Unicode-like literals
|
||||
def u(s):
|
||||
return s.decode('utf-8')
|
||||
|
||||
try:
|
||||
# which() is available from Python 3.3
|
||||
from shutil import which
|
||||
except ImportError:
|
||||
import os
|
||||
# This is a copy of which() from Python 3.3
|
||||
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
|
||||
"""Given a command, mode, and a PATH string, return the path which
|
||||
conforms to the given mode on the PATH, or None if there is no such
|
||||
file.
|
||||
|
||||
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
|
||||
of os.environ.get("PATH"), or can be overridden with a custom search
|
||||
path.
|
||||
|
||||
"""
|
||||
# Check that a given file can be accessed with the correct mode.
|
||||
# Additionally check that `file` is not a directory, as on Windows
|
||||
# directories pass the os.access check.
|
||||
def _access_check(fn, mode):
|
||||
return (os.path.exists(fn) and os.access(fn, mode)
|
||||
and not os.path.isdir(fn))
|
||||
|
||||
# If we're given a path with a directory part, look it up directly rather
|
||||
# than referring to PATH directories. This includes checking relative to the
|
||||
# current directory, e.g. ./script
|
||||
if os.path.dirname(cmd):
|
||||
if _access_check(cmd, mode):
|
||||
return cmd
|
||||
return None
|
||||
|
||||
path = (path or os.environ.get("PATH", os.defpath)).split(os.pathsep)
|
||||
|
||||
if sys.platform == "win32":
|
||||
# The current directory takes precedence on Windows.
|
||||
if not os.curdir in path:
|
||||
path.insert(0, os.curdir)
|
||||
|
||||
# PATHEXT is necessary to check on Windows.
|
||||
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
|
||||
# See if the given file matches any of the expected path extensions.
|
||||
# This will allow us to short circuit when given "python.exe".
|
||||
# If it does match, only test that one, otherwise we have to try
|
||||
# others.
|
||||
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
|
||||
files = [cmd]
|
||||
else:
|
||||
files = [cmd + ext for ext in pathext]
|
||||
else:
|
||||
# On other platforms you don't have things like PATHEXT to tell you
|
||||
# what file suffixes are executable, so just pass on cmd as-is.
|
||||
files = [cmd]
|
||||
|
||||
seen = set()
|
||||
for dir in path:
|
||||
normdir = os.path.normcase(dir)
|
||||
if not normdir in seen:
|
||||
seen.add(normdir)
|
||||
for thefile in files:
|
||||
name = os.path.join(dir, thefile)
|
||||
if _access_check(name, mode):
|
||||
return name
|
||||
return None
|
|
@ -1,3 +0,0 @@
|
|||
"""
|
||||
Mixins module
|
||||
"""
|
|
@ -1,49 +0,0 @@
|
|||
# Python imports
|
||||
import threading
|
||||
|
||||
|
||||
# Gtk imports
|
||||
import gi
|
||||
gi.require_version("Gtk", "3.0")
|
||||
gi.require_version('Gdk', '3.0')
|
||||
from gi.repository import Gtk, Gdk, GLib, Gio, GdkPixbuf
|
||||
|
||||
# Application imports
|
||||
|
||||
|
||||
|
||||
def threaded(fn):
|
||||
def wrapper(*args, **kwargs):
|
||||
threading.Thread(target=fn, args=args, kwargs=kwargs).start()
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class TreeViewUpdateMixin:
|
||||
"""docstring for DummyMixin"""
|
||||
def load_store(self, view, store, dir):
|
||||
store.clear()
|
||||
view.load_directory(dir)
|
||||
files = view.get_images()
|
||||
|
||||
for i, file in enumerate(files):
|
||||
store.append([None, f"{dir}/{file[0]}"])
|
||||
self.create_icon(i, view, store, dir, file[0])
|
||||
|
||||
@threaded
|
||||
def create_icon(self, i, view, store, dir, file):
|
||||
icon = view.create_icon(dir, file)
|
||||
fpath = f"{dir}/{file}"
|
||||
GLib.idle_add(self.update_store, (i, store, icon, view, fpath,))
|
||||
|
||||
def update_store(self, item):
|
||||
i, store, icon, view, fpath = item
|
||||
itr = store.get_iter(i)
|
||||
|
||||
if not icon:
|
||||
if fpath.endswith(".gif"):
|
||||
icon = GdkPixbuf.PixbufAnimation.get_static_image(fpath)
|
||||
else:
|
||||
icon = GdkPixbuf.Pixbuf.new_from_file(view.DEFAULT_ICON)
|
||||
|
||||
store.set_value(itr, 0, icon)
|
|
@ -1,78 +0,0 @@
|
|||
# Python imports
|
||||
import hashlib, re
|
||||
from os import listdir
|
||||
from os.path import isdir, isfile, join
|
||||
|
||||
# Lib imports
|
||||
|
||||
|
||||
# Application imports
|
||||
from .icons.icon import Icon
|
||||
|
||||
|
||||
class View(Icon):
|
||||
def __init__(self, img_filter, default_icon):
|
||||
self.DEFAULT_ICON = default_icon
|
||||
self._hide_hidden = True
|
||||
self._images = []
|
||||
self.fimages = img_filter
|
||||
self.VIDEO_ICON_WH = [256, 128]
|
||||
|
||||
def load_directory(self, _path):
|
||||
path = _path
|
||||
self._images = []
|
||||
|
||||
if not isdir(path):
|
||||
return ""
|
||||
|
||||
for f in listdir(path):
|
||||
file = join(path, f)
|
||||
if self._hide_hidden:
|
||||
if f.startswith('.'):
|
||||
continue
|
||||
|
||||
if isfile(file):
|
||||
lowerName = file.lower()
|
||||
if lowerName.endswith(self.fimages):
|
||||
self._images.append(f)
|
||||
|
||||
self._images.sort(key=self._natural_keys)
|
||||
|
||||
|
||||
def get_pixbuf_icon_str_combo(self):
|
||||
data = []
|
||||
dir = self.get_current_directory()
|
||||
for file in self._files:
|
||||
icon = self.create_icon(dir, file).get_pixbuf()
|
||||
data.append([icon, file])
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def get_gtk_icon_str_combo(self):
|
||||
data = []
|
||||
dir = self.get_current_directory()
|
||||
for file in self._files:
|
||||
icon = self.create_icon(dir, file)
|
||||
data.append([icon, file[0]])
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def get_images(self):
|
||||
return self._hash_set(self._images)
|
||||
|
||||
def _hash_text(self, text):
|
||||
return hashlib.sha256(str.encode(text)).hexdigest()[:18]
|
||||
|
||||
def _hash_set(self, arry):
|
||||
data = []
|
||||
for arr in arry:
|
||||
data.append([arr, self._hash_text(arr)])
|
||||
return data
|
||||
|
||||
def _atoi(self, text):
|
||||
return int(text) if text.isdigit() else text
|
||||
|
||||
def _natural_keys(self, text):
|
||||
return [ self._atoi(c) for c in re.split('(\d+)',text) ]
|
|
@ -1,3 +0,0 @@
|
|||
"""
|
||||
Utils module
|
||||
"""
|
|
@ -1,22 +0,0 @@
|
|||
# Python imports
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
from .singleton import Singleton
|
||||
|
||||
|
||||
|
||||
class EndpointRegistry(Singleton):
|
||||
def __init__(self):
|
||||
self._endpoints = {}
|
||||
|
||||
def register(self, rule, **options):
|
||||
def decorator(f):
|
||||
self._endpoints[rule] = f
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
def get_endpoints(self):
|
||||
return self._endpoints
|
|
@ -1,54 +0,0 @@
|
|||
# Python imports
|
||||
from collections import defaultdict
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
from .singleton import Singleton
|
||||
|
||||
|
||||
|
||||
class EventSystem(Singleton):
|
||||
""" Create event system. """
|
||||
|
||||
def __init__(self):
|
||||
self.subscribers = defaultdict(list)
|
||||
|
||||
|
||||
def subscribe(self, event_type, fn):
|
||||
self.subscribers[event_type].append(fn)
|
||||
|
||||
def unsubscribe(self, event_type, fn):
|
||||
self.subscribers[event_type].remove(fn)
|
||||
|
||||
def unsubscribe_all(self, event_type):
|
||||
self.subscribers.pop(event_type, None)
|
||||
|
||||
def emit(self, event_type, data = None):
|
||||
if event_type in self.subscribers:
|
||||
for fn in self.subscribers[event_type]:
|
||||
if data:
|
||||
if hasattr(data, '__iter__') and not type(data) is str:
|
||||
fn(*data)
|
||||
else:
|
||||
fn(data)
|
||||
else:
|
||||
fn()
|
||||
|
||||
def emit_and_await(self, event_type, data = None):
|
||||
""" NOTE: Should be used when signal has only one listener and vis-a-vis """
|
||||
if event_type in self.subscribers:
|
||||
response = None
|
||||
for fn in self.subscribers[event_type]:
|
||||
if data:
|
||||
if hasattr(data, '__iter__') and not type(data) is str:
|
||||
response = fn(*data)
|
||||
else:
|
||||
response = fn(data)
|
||||
else:
|
||||
response = fn()
|
||||
|
||||
if not response in (None, ''):
|
||||
break
|
||||
|
||||
return response
|
|
@ -1,91 +0,0 @@
|
|||
# Python imports
|
||||
import os, threading, time
|
||||
from multiprocessing.connection import Listener, Client
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
from .singleton import Singleton
|
||||
|
||||
|
||||
def threaded(fn):
|
||||
def wrapper(*args, **kwargs):
|
||||
threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start()
|
||||
return wrapper
|
||||
|
||||
|
||||
|
||||
|
||||
class IPCServer(Singleton):
|
||||
""" Create a listener so that other SolarFM instances send requests back to existing instance. """
|
||||
def __init__(self, conn_type="socket"):
|
||||
self.is_ipc_alive = False
|
||||
self._conn_type = conn_type
|
||||
self.ipc_authkey = b'mirage2-ipc'
|
||||
self.ipc_timeout = 15.0
|
||||
|
||||
if conn_type == "socket":
|
||||
self.ipc_address = '/tmp/mirage2-ipc.sock'
|
||||
else:
|
||||
self.ipc_address = '127.0.0.1'
|
||||
self.ipc_port = 4848
|
||||
|
||||
|
||||
@threaded
|
||||
def create_ipc_server(self):
|
||||
if self._conn_type == "socket":
|
||||
if os.path.exists(self.ipc_address):
|
||||
return
|
||||
|
||||
listener = Listener(address=self.ipc_address, family="AF_UNIX", authkey=self.ipc_authkey)
|
||||
else:
|
||||
listener = Listener((self.ipc_address, self.ipc_port), authkey=self.ipc_authkey)
|
||||
|
||||
|
||||
self.is_ipc_alive = True
|
||||
while True:
|
||||
conn = listener.accept()
|
||||
start_time = time.time()
|
||||
|
||||
print(f"New Connection: {listener.last_accepted}")
|
||||
while True:
|
||||
msg = conn.recv()
|
||||
if debug:
|
||||
print(msg)
|
||||
|
||||
if "FILE|" in msg:
|
||||
file = msg.split("FILE|")[1].strip()
|
||||
if file:
|
||||
event_system.push_gui_event([None, "handle_file_from_ipc", (file,)])
|
||||
|
||||
conn.close()
|
||||
break
|
||||
|
||||
|
||||
if msg == 'close connection':
|
||||
conn.close()
|
||||
break
|
||||
if msg == 'close server':
|
||||
conn.close()
|
||||
break
|
||||
|
||||
# NOTE: Not perfect but insures we don't lock up the connection for too long.
|
||||
end_time = time.time()
|
||||
if (end - start) > self.ipc_timeout:
|
||||
conn.close()
|
||||
|
||||
listener.close()
|
||||
|
||||
|
||||
def send_ipc_message(self, message="Empty Data..."):
|
||||
try:
|
||||
if self._conn_type == "socket":
|
||||
conn = Client(address=self.ipc_address, family="AF_UNIX", authkey=self.ipc_authkey)
|
||||
else:
|
||||
conn = Client((self.ipc_address, self.ipc_port), authkey=self.ipc_authkey)
|
||||
|
||||
|
||||
conn.send(message)
|
||||
conn.send('close connection')
|
||||
except Exception as e:
|
||||
print(repr(e))
|
|
@ -1,138 +0,0 @@
|
|||
# Python imports
|
||||
import re
|
||||
|
||||
# Gtk imports
|
||||
import gi
|
||||
gi.require_version('Gdk', '3.0')
|
||||
from gi.repository import Gdk
|
||||
|
||||
# Application imports
|
||||
from .singleton import Singleton
|
||||
|
||||
|
||||
|
||||
def logger(log = ""):
|
||||
print(log)
|
||||
|
||||
|
||||
class KeymapError(Exception):
|
||||
""" Custom exception for errors in keybinding configurations """
|
||||
|
||||
MODIFIER = re.compile('<([^<]+)>')
|
||||
class Keybindings(Singleton):
|
||||
""" Class to handle loading and lookup of Terminator keybindings """
|
||||
|
||||
modifiers = {
|
||||
'ctrl': Gdk.ModifierType.CONTROL_MASK,
|
||||
'control': Gdk.ModifierType.CONTROL_MASK,
|
||||
'primary': Gdk.ModifierType.CONTROL_MASK,
|
||||
'shift': Gdk.ModifierType.SHIFT_MASK,
|
||||
'alt': Gdk.ModifierType.MOD1_MASK,
|
||||
'super': Gdk.ModifierType.SUPER_MASK,
|
||||
'hyper': Gdk.ModifierType.HYPER_MASK,
|
||||
'mod2': Gdk.ModifierType.MOD2_MASK
|
||||
}
|
||||
|
||||
empty = {}
|
||||
keys = None
|
||||
_masks = None
|
||||
_lookup = None
|
||||
|
||||
def __init__(self):
|
||||
self.keymap = Gdk.Keymap.get_default()
|
||||
self.configure({})
|
||||
|
||||
def print_keys(self):
|
||||
print(self.keys)
|
||||
|
||||
def append_bindings(self, combos):
|
||||
""" Accept new binding(s) and reload """
|
||||
for item in combos:
|
||||
method, keys = item.split(":")
|
||||
self.keys[method] = keys
|
||||
|
||||
self.reload()
|
||||
|
||||
def configure(self, bindings):
|
||||
""" Accept new bindings and reconfigure with them """
|
||||
self.keys = bindings
|
||||
self.reload()
|
||||
|
||||
def reload(self):
|
||||
""" Parse bindings and mangle into an appropriate form """
|
||||
self._lookup = {}
|
||||
self._masks = 0
|
||||
|
||||
for action, bindings in list(self.keys.items()):
|
||||
if isinstance(bindings, list):
|
||||
bindings = (*bindings,)
|
||||
elif not isinstance(bindings, tuple):
|
||||
bindings = (bindings,)
|
||||
|
||||
|
||||
for binding in bindings:
|
||||
if not binding or binding == "None":
|
||||
continue
|
||||
|
||||
try:
|
||||
keyval, mask = self._parsebinding(binding)
|
||||
# Does much the same, but with poorer error handling.
|
||||
# keyval, mask = Gtk.accelerator_parse(binding)
|
||||
except KeymapError as e:
|
||||
logger(f"Keybinding reload failed to parse binding '{binding}': {e}")
|
||||
else:
|
||||
if mask & Gdk.ModifierType.SHIFT_MASK:
|
||||
if keyval == Gdk.KEY_Tab:
|
||||
keyval = Gdk.KEY_ISO_Left_Tab
|
||||
mask &= ~Gdk.ModifierType.SHIFT_MASK
|
||||
else:
|
||||
keyvals = Gdk.keyval_convert_case(keyval)
|
||||
if keyvals[0] != keyvals[1]:
|
||||
keyval = keyvals[1]
|
||||
mask &= ~Gdk.ModifierType.SHIFT_MASK
|
||||
else:
|
||||
keyval = Gdk.keyval_to_lower(keyval)
|
||||
|
||||
self._lookup.setdefault(mask, {})
|
||||
self._lookup[mask][keyval] = action
|
||||
self._masks |= mask
|
||||
|
||||
def _parsebinding(self, binding):
|
||||
""" Parse an individual binding using Gtk's binding function """
|
||||
mask = 0
|
||||
modifiers = re.findall(MODIFIER, binding)
|
||||
|
||||
if modifiers:
|
||||
for modifier in modifiers:
|
||||
mask |= self._lookup_modifier(modifier)
|
||||
|
||||
key = re.sub(MODIFIER, '', binding)
|
||||
if key == '':
|
||||
raise KeymapError('No key found!')
|
||||
|
||||
keyval = Gdk.keyval_from_name(key)
|
||||
|
||||
if keyval == 0:
|
||||
raise KeymapError(f"Key '{key}' is unrecognised...")
|
||||
return (keyval, mask)
|
||||
|
||||
def _lookup_modifier(self, modifier):
|
||||
""" Map modifier names to gtk values """
|
||||
try:
|
||||
return self.modifiers[modifier.lower()]
|
||||
except KeyError:
|
||||
raise KeymapError(f"Unhandled modifier '<{modifier}>'")
|
||||
|
||||
def lookup(self, event):
|
||||
""" Translate a keyboard event into a mapped key """
|
||||
try:
|
||||
_found, keyval, _egp, _lvl, consumed = self.keymap.translate_keyboard_state(
|
||||
event.hardware_keycode,
|
||||
Gdk.ModifierType(event.get_state() & ~Gdk.ModifierType.LOCK_MASK),
|
||||
event.group)
|
||||
except TypeError:
|
||||
logger("Keybinding lookup failed to translate keyboard event: {dir(event)}")
|
||||
return None
|
||||
|
||||
mask = (event.get_state() & ~consumed) & self._masks
|
||||
return self._lookup.get(mask, self.empty).get(keyval, None)
|
|
@ -1,59 +0,0 @@
|
|||
# Python imports
|
||||
import os
|
||||
import logging
|
||||
|
||||
# Application imports
|
||||
from .singleton import Singleton
|
||||
|
||||
|
||||
|
||||
class Logger(Singleton):
|
||||
"""
|
||||
Create a new logging object and return it.
|
||||
:note:
|
||||
NOSET # Don't know the actual log level of this... (defaulting or literally none?)
|
||||
Log Levels (From least to most)
|
||||
Type Value
|
||||
CRITICAL 50
|
||||
ERROR 40
|
||||
WARNING 30
|
||||
INFO 20
|
||||
DEBUG 10
|
||||
:param loggerName: Sets the name of the logger object. (Used in log lines)
|
||||
:param createFile: Whether we create a log file or just pump to terminal
|
||||
|
||||
:return: the logging object we created
|
||||
"""
|
||||
|
||||
def __init__(self, config_path: str, _ch_log_lvl = logging.CRITICAL, _fh_log_lvl = logging.INFO):
|
||||
self._CONFIG_PATH = config_path
|
||||
self.global_lvl = logging.DEBUG # Keep this at highest so that handlers can filter to their desired levels
|
||||
self.ch_log_lvl = _ch_log_lvl # Prety much the only one we ever change
|
||||
self.fh_log_lvl = _fh_log_lvl
|
||||
|
||||
def get_logger(self, loggerName: str = "NO_LOGGER_NAME_PASSED", createFile: bool = True) -> logging.Logger:
|
||||
log = logging.getLogger(loggerName)
|
||||
log.setLevel(self.global_lvl)
|
||||
|
||||
# Set our log output styles
|
||||
fFormatter = logging.Formatter('[%(asctime)s] %(pathname)s:%(lineno)d %(levelname)s - %(message)s', '%m-%d %H:%M:%S')
|
||||
cFormatter = logging.Formatter('%(pathname)s:%(lineno)d] %(levelname)s - %(message)s')
|
||||
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(level=self.ch_log_lvl)
|
||||
ch.setFormatter(cFormatter)
|
||||
log.addHandler(ch)
|
||||
|
||||
if createFile:
|
||||
folder = self._CONFIG_PATH
|
||||
file = f"{folder}/application.log"
|
||||
|
||||
if not os.path.exists(folder):
|
||||
os.mkdir(folder)
|
||||
|
||||
fh = logging.FileHandler(file)
|
||||
fh.setLevel(level=self.fh_log_lvl)
|
||||
fh.setFormatter(fFormatter)
|
||||
log.addHandler(fh)
|
||||
|
||||
return log
|
|
@ -1,113 +0,0 @@
|
|||
# Python imports
|
||||
import os
|
||||
|
||||
# Gtk imports
|
||||
import gi, cairo
|
||||
gi.require_version('Gtk', '3.0')
|
||||
gi.require_version('Gdk', '3.0')
|
||||
|
||||
from gi.repository import Gtk
|
||||
from gi.repository import Gdk
|
||||
|
||||
|
||||
# Application imports
|
||||
from .singleton import Singleton
|
||||
from .logger import Logger
|
||||
|
||||
|
||||
|
||||
class Settings(Singleton):
|
||||
def __init__(self):
|
||||
self._SCRIPT_PTH = os.path.dirname(os.path.realpath(__file__))
|
||||
self._USER_HOME = os.path.expanduser('~')
|
||||
self._CONFIG_PATH = f"{self._USER_HOME}/.config/{app_name.lower()}"
|
||||
self._HOME_CONFIG_PATH = f"{self._USER_HOME}/.config/{app_name.lower()}"
|
||||
self._GLADE_FILE = f"{self._CONFIG_PATH}/Main_Window.glade"
|
||||
self._CSS_FILE = f"{self._CONFIG_PATH}/stylesheet.css"
|
||||
self._DEFAULT_ICONS = f"{self._CONFIG_PATH}/icons"
|
||||
self._WINDOW_ICON = f"{self._DEFAULT_ICONS}/{app_name.lower()}.png"
|
||||
self._BLANK_ICON = f"{self._DEFAULT_ICONS}/mirage_blank.png"
|
||||
self._USR_PATH = f"/usr/share/{app_name.lower()}"
|
||||
|
||||
if not os.path.exists(self._CONFIG_PATH):
|
||||
os.mkdir(self._CONFIG_PATH)
|
||||
if not os.path.exists(self._GLADE_FILE):
|
||||
self._GLADE_FILE = f"{self._USR_PATH}/Main_Window.glade"
|
||||
if not os.path.exists(self._CSS_FILE):
|
||||
self._CSS_FILE = f"{self._USR_PATH}/stylesheet.css"
|
||||
if not os.path.exists(self._WINDOW_ICON):
|
||||
self._WINDOW_ICON = f"{self._USR_PATH}/icons/{app_name.lower()}.png"
|
||||
if not os.path.exists(self._BLANK_ICON):
|
||||
self._BLANK_ICON = f"{self._USR_PATH}/icons/mirage_blank.png"
|
||||
if not os.path.exists(self._DEFAULT_ICONS):
|
||||
self.DEFAULT_ICONS = f"{self._USR_PATH}/icons"
|
||||
|
||||
# '_filters'
|
||||
self._images_filter = ('.png', '.jpg', '.jpeg', '.gif', '.ico', '.tga')
|
||||
|
||||
self._success_color = "#88cc27"
|
||||
self._warning_color = "#ffa800"
|
||||
self._error_color = "#ff0000"
|
||||
|
||||
self._main_window = None
|
||||
self._logger = Logger(self._CONFIG_PATH).get_logger()
|
||||
self._builder = Gtk.Builder()
|
||||
self._builder.add_from_file(self._GLADE_FILE)
|
||||
|
||||
|
||||
|
||||
def create_window(self):
|
||||
# Get window and connect signals
|
||||
self._main_window = self._builder.get_object("Main_Window")
|
||||
self.set_window_data()
|
||||
|
||||
def set_window_data(self):
|
||||
self._main_window.set_icon_from_file(self._WINDOW_ICON)
|
||||
screen = self._main_window.get_screen()
|
||||
visual = screen.get_rgba_visual()
|
||||
|
||||
if visual != None and screen.is_composited():
|
||||
self._main_window.set_visual(visual)
|
||||
self._main_window.set_app_paintable(True)
|
||||
self._main_window.connect("draw", self.draw_area)
|
||||
|
||||
# bind css file
|
||||
cssProvider = Gtk.CssProvider()
|
||||
cssProvider.load_from_path(self._CSS_FILE)
|
||||
screen = Gdk.Screen.get_default()
|
||||
styleContext = Gtk.StyleContext()
|
||||
styleContext.add_provider_for_screen(screen, cssProvider, Gtk.STYLE_PROVIDER_PRIORITY_USER)
|
||||
|
||||
def get_monitor_data(self):
|
||||
screen = self._builder.get_object("Main_Window").get_screen()
|
||||
monitors = []
|
||||
for m in range(screen.get_n_monitors()):
|
||||
monitors.append(screen.get_monitor_geometry(m))
|
||||
|
||||
for monitor in monitors:
|
||||
print("{}x{}|{}+{}".format(monitor.width, monitor.height, monitor.x, monitor.y))
|
||||
|
||||
return monitors
|
||||
|
||||
def draw_area(self, widget, cr):
|
||||
cr.set_source_rgba(0, 0, 0, 0.54)
|
||||
cr.set_operator(cairo.OPERATOR_SOURCE)
|
||||
cr.paint()
|
||||
cr.set_operator(cairo.OPERATOR_OVER)
|
||||
|
||||
|
||||
|
||||
|
||||
def get_builder(self): return self._builder
|
||||
def get_logger(self): return self._logger
|
||||
def get_main_window(self): return self._main_window
|
||||
def get_home_path(self): return self._USER_HOME
|
||||
def get_home_config_path(self) -> str: return self._HOME_CONFIG_PATH
|
||||
|
||||
# Filter returns
|
||||
def get_images_filter(self): return self._images_filter
|
||||
|
||||
def get_blank_image(self): return self._BLANK_ICON
|
||||
def get_success_color(self): return self._success_color
|
||||
def get_warning_color(self): return self._warning_color
|
||||
def get_error_color(self): return self._error_color
|
|
@ -1,23 +0,0 @@
|
|||
# Python imports
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
|
||||
|
||||
class SingletonError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
class Singleton:
|
||||
ccount = 0
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
obj = super(Singleton, cls).__new__(cls)
|
||||
cls.ccount += 1
|
||||
|
||||
if cls.ccount == 2:
|
||||
raise SingletonError(f"Exceeded {cls.__name__} instantiation limit...")
|
||||
|
||||
return obj
|
Loading…
Reference in New Issue