Merge pull request 'Major refactoring, cleanup' (#3) from develop into master
Reviewed-on: #3
This commit is contained in:
commit
d6a3187f2c
|
@ -1,25 +1,36 @@
|
|||
bcrypt==4.0.1
|
||||
certifi==2022.12.7
|
||||
charset-normalizer==3.0.1
|
||||
click==7.1.2
|
||||
dnspython==2.1.0
|
||||
eventlet==0.30.1
|
||||
dnspython==1.16.0
|
||||
email-validator==1.1.2
|
||||
eventlet==0.30.1
|
||||
Flask==1.1.2
|
||||
Flask-Bcrypt==0.7.1
|
||||
Flask-Login==0.5.0
|
||||
flask-oidc==1.4.0
|
||||
Flask-Bcrypt==0.7.1
|
||||
Flask-SQLAlchemy==2.4.4
|
||||
Flask-Uploads==0.2.1
|
||||
Flask-WTF==0.14.3
|
||||
greenlet==1.0.0
|
||||
gunicorn==20.0.4
|
||||
httplib2==0.19.0
|
||||
idna==3.4
|
||||
itsdangerous==1.1.0
|
||||
Jinja2==2.11.3
|
||||
MarkupSafe==1.1.1
|
||||
oauth2client==4.1.3
|
||||
Pillow==9.4.0
|
||||
pyasn1==0.4.8
|
||||
pyasn1-modules==0.2.8
|
||||
pycairo==1.23.0
|
||||
PyGObject==3.42.2
|
||||
pyparsing==2.4.7
|
||||
pyxdg==0.28
|
||||
requests==2.28.2
|
||||
rsa==4.7
|
||||
six==1.15.0
|
||||
SQLAlchemy==1.3.23
|
||||
urllib3==1.26.14
|
||||
Werkzeug==1.0.1
|
||||
WTForms==2.3.3
|
||||
WTForms==2.3.3
|
||||
|
|
|
@ -0,0 +1,94 @@
|
|||
# Python imports
|
||||
import os
|
||||
import builtins
|
||||
import threading
|
||||
import re
|
||||
import secrets
|
||||
|
||||
# Lib imports
|
||||
from flask import session
|
||||
|
||||
# Application imports
|
||||
from core import app
|
||||
from core.utils import Logger
|
||||
|
||||
|
||||
|
||||
# NOTE: Threads WILL NOT die with parent's destruction.
|
||||
def threaded_wrapper(fn):
|
||||
def wrapper(*args, **kwargs):
|
||||
threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=False).start()
|
||||
return wrapper
|
||||
|
||||
# NOTE: Threads WILL die with parent's destruction.
|
||||
def daemon_threaded_wrapper(fn):
|
||||
def wrapper(*args, **kwargs):
|
||||
threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start()
|
||||
return wrapper
|
||||
|
||||
def sizeof_fmt_def(num, suffix="B"):
|
||||
for unit in ["", "K", "M", "G", "T", "Pi", "Ei", "Zi"]:
|
||||
if abs(num) < 1024.0:
|
||||
return f"{num:3.1f} {unit}{suffix}"
|
||||
num /= 1024.0
|
||||
return f"{num:.1f} Yi{suffix}"
|
||||
|
||||
|
||||
def _get_file_size(file):
|
||||
return "4K" if isdir(file) else sizeof_fmt_def(os.path.getsize(file))
|
||||
|
||||
|
||||
|
||||
# NOTE: Just reminding myself we can add to builtins two different ways...
|
||||
# __builtins__.update({"event_system": Builtins()})
|
||||
builtins.app_name = "WebFM"
|
||||
builtins.threaded = threaded_wrapper
|
||||
builtins.daemon_threaded = daemon_threaded_wrapper
|
||||
builtins.sizeof_fmt = sizeof_fmt_def
|
||||
builtins.get_file_size = _get_file_size
|
||||
builtins.ROOT_FILE_PTH = os.path.dirname(os.path.realpath(__file__))
|
||||
builtins.BG_IMGS_PATH = ROOT_FILE_PTH + "/static/imgs/backgrounds/"
|
||||
builtins.BG_FILE_TYPE = (".webm", ".mp4", ".gif", ".jpg", ".png", ".webp")
|
||||
builtins.valid_fname_pat = re.compile(r"[a-z0-9A-Z-_\[\]\(\)\| ]{4,20}")
|
||||
builtins.logger = Logger().get_logger()
|
||||
|
||||
|
||||
|
||||
# NOTE: Need threads defined befor instantiating
|
||||
from core.utils.shellfm.windows.controller import WindowController # Get file manager controller
|
||||
window_controllers = {}
|
||||
def _get_view():
|
||||
controller = None
|
||||
try:
|
||||
controller = window_controllers[ session["win_controller_id"] ].get_window_by_index(0).get_tab_by_index(0)
|
||||
except Exception as e:
|
||||
id = secrets.token_hex(16)
|
||||
controller = WindowController()
|
||||
view = controller.create_window().create_tab()
|
||||
|
||||
try:
|
||||
view.ABS_THUMBS_PTH = app.config['ABS_THUMBS_PTH']
|
||||
except Exception as e:
|
||||
print("No ABS_THUMBS_PTH set by WebFM...")
|
||||
|
||||
try:
|
||||
view.REMUX_FOLDER = app.config['REMUX_FOLDER']
|
||||
except Exception as e:
|
||||
print("No REMUX_FOLDER set by WebFM...")
|
||||
|
||||
try:
|
||||
view.FFMPG_THUMBNLR = app.config['FFMPG_THUMBNLR']
|
||||
except Exception as e:
|
||||
print("No FFMPG_THUMBNLR set by WebFM...")
|
||||
|
||||
view.logger = logger
|
||||
|
||||
session['win_controller_id'] = id
|
||||
window_controllers.update( {id: controller } )
|
||||
controller = window_controllers[ session["win_controller_id"] ].get_window_by_index(0).get_tab_by_index(0)
|
||||
|
||||
return controller
|
||||
|
||||
|
||||
|
||||
builtins.get_view = _get_view
|
|
@ -1,6 +1,4 @@
|
|||
# Python imports
|
||||
import os
|
||||
|
||||
|
||||
# Lib imports
|
||||
from flask import Flask
|
||||
|
@ -10,20 +8,18 @@ from flask_oidc import OpenIDConnect
|
|||
from flask_bcrypt import Bcrypt
|
||||
from flask_login import current_user, login_user, logout_user, LoginManager
|
||||
|
||||
|
||||
# Apoplication imports
|
||||
from core.utils import Logger
|
||||
|
||||
|
||||
ROOT_FILE_PTH = os.path.dirname(os.path.realpath(__file__))
|
||||
app = Flask(__name__)
|
||||
app.config.from_object("core.config.ProductionConfig")
|
||||
# app.config.from_object("core.config.DevelopmentConfig")
|
||||
|
||||
# Apoplication imports
|
||||
from .__builtins__ import *
|
||||
|
||||
|
||||
|
||||
oidc = OpenIDConnect(app)
|
||||
login_manager = LoginManager(app)
|
||||
bcrypt = Bcrypt(app)
|
||||
logger = Logger().get_logger()
|
||||
|
||||
def oidc_loggedin():
|
||||
return oidc.user_loggedin
|
||||
|
|
|
@ -0,0 +1,119 @@
|
|||
# Python imports
|
||||
import os
|
||||
import re
|
||||
|
||||
# Lib imports
|
||||
from flask import request
|
||||
|
||||
from flask_uploads import ALL
|
||||
from flask_uploads import configure_uploads
|
||||
from flask_uploads import UploadSet
|
||||
|
||||
|
||||
# App imports
|
||||
# Get from __init__
|
||||
from core import app
|
||||
from core import db
|
||||
from core import Favorites
|
||||
from core import oidc
|
||||
|
||||
from core.utils import MessageHandler # Get simple message processor
|
||||
|
||||
json_message = MessageHandler()
|
||||
|
||||
|
||||
|
||||
@app.route('/api/delete/<_hash>', methods=['GET', 'POST'])
|
||||
def delete_item(_hash = None):
|
||||
if request.method == 'POST':
|
||||
msg = "Log in with an Admin privlidged user to delete files!"
|
||||
if not oidc.user_loggedin:
|
||||
return json_message.create("danger", msg)
|
||||
elif oidc.user_loggedin:
|
||||
isAdmin = oidc.user_getfield("isAdmin")
|
||||
if isAdmin != "yes" :
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
view = get_view()
|
||||
folder = view.get_current_directory()
|
||||
file = view.get_path_part_from_hash(_hash)
|
||||
fpath = os.path.join(folder, file)
|
||||
try:
|
||||
msg = f"[Success] Deleted the file/folder -->: {file} !"
|
||||
view.delete_file(fpath)
|
||||
return json_message.create("success", msg)
|
||||
except Exception as e:
|
||||
msg = "[Error] Unable to delete the file/folder...."
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
|
||||
|
||||
@app.route('/api/create/<_type>', methods=['GET', 'POST'])
|
||||
def create_item(_type = None):
|
||||
if request.method == 'POST':
|
||||
msg = "Log in with an Admin privlidged user to upload files!"
|
||||
if not oidc.user_loggedin:
|
||||
return json_message.create("danger", msg)
|
||||
elif oidc.user_loggedin:
|
||||
isAdmin = oidc.user_getfield("isAdmin")
|
||||
if isAdmin != "yes" :
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
TYPE = _type.strip()
|
||||
if not TYPE in ["dir", "file"]:
|
||||
msg = "Couldn't handle action type for api create..."
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
FNAME = str(request.values['fname']).strip()
|
||||
if not re.fullmatch(valid_fname_pat, FNAME):
|
||||
msg = "A new item name can only contain alphanumeric, -, _, |, [], (), or spaces and must be minimum of 4 and max of 20 characters..."
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
try:
|
||||
view = get_view()
|
||||
folder = view.get_current_directory()
|
||||
new_item = f"{folder}/{FNAME}"
|
||||
view.create_file(new_item, TYPE)
|
||||
except Exception as e:
|
||||
print(repr(e))
|
||||
msg = "Couldn't create file/folder. An unexpected error occured..."
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
|
||||
msg = "[Success] created the file/dir..."
|
||||
return json_message.create("success", msg)
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
|
||||
@app.route('/api/upload', methods=['GET', 'POST'])
|
||||
def upload():
|
||||
if request.method == 'POST' and len(request.files) > 0:
|
||||
msg = "Log in with an Admin privlidged user to upload files!"
|
||||
if not oidc.user_loggedin:
|
||||
return json_message.create("danger", msg)
|
||||
elif oidc.user_loggedin:
|
||||
isAdmin = oidc.user_getfield("isAdmin")
|
||||
if isAdmin != "yes" :
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
view = get_view()
|
||||
folder = view.get_current_directory()
|
||||
UPLOADS_PTH = f'{folder}/'
|
||||
files = UploadSet('files', ALL, default_dest=lambda x: UPLOADS_PTH)
|
||||
configure_uploads(app, files)
|
||||
|
||||
try:
|
||||
for file in request.files:
|
||||
files.save(request.files[file])
|
||||
except Exception as e:
|
||||
print(repr(e))
|
||||
msg = "[Error] Failed to upload some or all of the file(s)..."
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
msg = "[Success] Uploaded file(s)..."
|
||||
return json_message.create("success", msg)
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return json_message.create("danger", msg)
|
|
@ -0,0 +1,71 @@
|
|||
# Python imports
|
||||
|
||||
# Lib imports
|
||||
from flask import request
|
||||
|
||||
# App imports
|
||||
from core import app
|
||||
from core import db
|
||||
from core import Favorites # Get from __init__
|
||||
from core.utils import MessageHandler # Get simple message processor
|
||||
|
||||
|
||||
json_message = MessageHandler()
|
||||
|
||||
|
||||
|
||||
@app.route('/api/list-favorites', methods=['GET', 'POST'])
|
||||
def listFavorites():
|
||||
if request.method == 'POST':
|
||||
list = db.session.query(Favorites).all()
|
||||
faves = []
|
||||
for fave in list:
|
||||
faves.append([fave.link, fave.id])
|
||||
|
||||
return json_message.faves_list(faves)
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
@app.route('/api/load-favorite/<_id>', methods=['GET', 'POST'])
|
||||
def loadFavorite(_id):
|
||||
if request.method == 'POST':
|
||||
try:
|
||||
ID = int(_id)
|
||||
fave = db.session.query(Favorites).filter_by(id = ID).first()
|
||||
view = get_view()
|
||||
view.set_path_with_sub_path(fave.link)
|
||||
return '{"refresh": "true"}'
|
||||
except Exception as e:
|
||||
print(repr(e))
|
||||
msg = "Incorrect Favorites ID..."
|
||||
return json_message.create("danger", msg)
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
|
||||
@app.route('/api/manage-favorites/<_action>', methods=['GET', 'POST'])
|
||||
def manageFavorites(_action):
|
||||
if request.method == 'POST':
|
||||
ACTION = _action.strip()
|
||||
view = get_view()
|
||||
sub_path = view.get_current_sub_path()
|
||||
|
||||
if ACTION == "add":
|
||||
fave = Favorites(link = sub_path)
|
||||
db.session.add(fave)
|
||||
msg = "Added to Favorites successfully..."
|
||||
elif ACTION == "delete":
|
||||
fave = db.session.query(Favorites).filter_by(link = sub_path).first()
|
||||
db.session.delete(fave)
|
||||
msg = "Deleted from Favorites successfully..."
|
||||
else:
|
||||
msg = "Couldn't handle action for favorites item..."
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
db.session.commit()
|
||||
return json_message.create("success", msg)
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return json_message.create("danger", msg)
|
|
@ -0,0 +1,92 @@
|
|||
# Python imports
|
||||
import os
|
||||
import requests
|
||||
import shutil
|
||||
|
||||
# Lib imports
|
||||
from flask import request
|
||||
|
||||
|
||||
# App imports
|
||||
# Get from __init__
|
||||
from core import app
|
||||
|
||||
from core.utils import MessageHandler # Get simple message processor
|
||||
from core.utils.tmdbscraper import scraper # Get media art scraper
|
||||
|
||||
|
||||
json_message = MessageHandler()
|
||||
tmdb = scraper.get_tmdb_scraper()
|
||||
|
||||
|
||||
@app.route('/api/get-background-poster-trailer', methods=['GET', 'POST'])
|
||||
def getPosterTrailer():
|
||||
if request.method == 'GET':
|
||||
info = {}
|
||||
view = get_view()
|
||||
dot_dots = view.get_dot_dots()
|
||||
|
||||
sub_path = view.get_current_sub_path()
|
||||
file = sub_path.split("/")[-1]
|
||||
trailer = None
|
||||
if "(" in file and ")" in file:
|
||||
title = file.split("(")[0].strip()
|
||||
startIndex = file.index('(') + 1
|
||||
endIndex = file.index(')')
|
||||
date = file[startIndex:endIndex]
|
||||
|
||||
try:
|
||||
video_data = tmdb.search(title, date)[0]
|
||||
video_id = video_data["id"]
|
||||
background_url = video_data["backdrop_path"]
|
||||
background_pth = f"{view.get_current_directory()}/000.jpg"
|
||||
|
||||
tmdb_videos = tmdb.tmdbapi.get_movie(str(video_id), append_to_response="videos")["videos"]["results"]
|
||||
for tmdb_video in tmdb_videos:
|
||||
if "YouTube" in tmdb_video["site"]:
|
||||
trailer_key = tmdb_video["key"]
|
||||
trailer = f"https://www.youtube-nocookie.com/embed/{trailer_key}?start=0&autoplay=1";
|
||||
|
||||
if not trailer:
|
||||
raise Exception("No key found. Defering to none...")
|
||||
except Exception as e:
|
||||
print("No trailer found...")
|
||||
trailer = None
|
||||
|
||||
if not os.path.isfile(background_pth):
|
||||
r = requests.get(background_url, stream = True)
|
||||
|
||||
if r.status_code == 200:
|
||||
r.raw.decode_content = True
|
||||
with open(background_pth,'wb') as f:
|
||||
shutil.copyfileobj(r.raw, f)
|
||||
|
||||
view.load_directory()
|
||||
print('Cover Background Image sucessfully retreived...')
|
||||
else:
|
||||
print('Cover Background Image Couldn\'t be retreived...')
|
||||
|
||||
info.update({'trailer': trailer})
|
||||
info.update({'poster': background_url})
|
||||
|
||||
return info
|
||||
|
||||
|
||||
@app.route('/backgrounds', methods=['GET', 'POST'])
|
||||
def backgrounds():
|
||||
files = []
|
||||
data = os.listdir(BG_IMGS_PATH)
|
||||
for file in data:
|
||||
if file.lower().endswith(BG_FILE_TYPE):
|
||||
files.append(file)
|
||||
|
||||
return json_message.backgrounds(files)
|
||||
|
||||
@app.route('/api/get-thumbnails', methods=['GET', 'POST'])
|
||||
def getThumbnails():
|
||||
if request.method == 'GET':
|
||||
view = get_view()
|
||||
return json_message.thumbnails( view.get_video_icons() )
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return json_message.create("danger", msg)
|
|
@ -1,65 +1,31 @@
|
|||
# Python imports
|
||||
import os, json, secrets, requests, shutil, re
|
||||
import os
|
||||
|
||||
# Lib imports
|
||||
from flask import request, session, render_template, send_from_directory, redirect
|
||||
from flask_uploads import UploadSet, configure_uploads, ALL
|
||||
from flask_login import current_user
|
||||
|
||||
from flask import redirect
|
||||
from flask import request
|
||||
from flask import render_template
|
||||
from flask import send_from_directory
|
||||
|
||||
# App imports
|
||||
from core import app, logger, oidc, db, Favorites, ROOT_FILE_PTH # Get from __init__
|
||||
from core.utils import MessageHandler # Get simple message processor
|
||||
from core.utils.shellfm import WindowController # Get file manager controller
|
||||
from core.utils.tmdbscraper import scraper # Get media art scraper
|
||||
# Get from __init__
|
||||
from core import app
|
||||
from core import db
|
||||
from core import Favorites
|
||||
from core import oidc
|
||||
|
||||
from core.utils import MessageHandler # Get simple message processor
|
||||
|
||||
|
||||
BG_IMGS_PATH = ROOT_FILE_PTH + "/static/imgs/backgrounds/"
|
||||
BG_FILE_TYPE = (".webm", ".mp4", ".gif", ".jpg", ".png", ".webp")
|
||||
msgHandler = MessageHandler()
|
||||
tmdb = scraper.get_tmdb_scraper()
|
||||
window_controllers = {}
|
||||
# valid_fname_pat = re.compile(r"/^[a-zA-Z0-9-_\[\]\(\)| ]+$/")
|
||||
valid_fname_pat = re.compile(r"[a-z0-9A-Z-_\[\]\(\)\| ]{4,20}")
|
||||
|
||||
json_message = MessageHandler()
|
||||
|
||||
def get_window_controller():
|
||||
controller = None
|
||||
try:
|
||||
controller = window_controllers[ session["win_controller_id"] ]
|
||||
except Exception as e:
|
||||
id = secrets.token_hex(16)
|
||||
controller = WindowController()
|
||||
view = controller.get_window(1).get_view(0)
|
||||
|
||||
try:
|
||||
view.ABS_THUMBS_PTH = app.config['ABS_THUMBS_PTH']
|
||||
except Exception as e:
|
||||
...
|
||||
|
||||
try:
|
||||
view.REMUX_FOLDER = app.config['REMUX_FOLDER']
|
||||
except Exception as e:
|
||||
...
|
||||
|
||||
try:
|
||||
view.FFMPG_THUMBNLR = app.config['FFMPG_THUMBNLR']
|
||||
except Exception as e:
|
||||
...
|
||||
|
||||
view.logger = logger
|
||||
|
||||
session['win_controller_id'] = id
|
||||
window_controllers.update( {id: controller } )
|
||||
controller = window_controllers[ session["win_controller_id"] ]
|
||||
|
||||
return controller
|
||||
|
||||
|
||||
@app.route('/', methods=['GET', 'POST'])
|
||||
def home():
|
||||
if request.method == 'GET':
|
||||
view = get_window_controller().get_window(1).get_view(0)
|
||||
view = get_view()
|
||||
_dot_dots = view.get_dot_dots()
|
||||
_current_directory = view.get_current_directory()
|
||||
return render_template('pages/index.html', current_directory = _current_directory, dot_dots = _dot_dots)
|
||||
|
@ -68,20 +34,11 @@ def home():
|
|||
message = 'Must use GET request type...')
|
||||
|
||||
|
||||
@app.route('/backgrounds', methods=['GET', 'POST'])
|
||||
def backgrounds():
|
||||
files = []
|
||||
data = os.listdir(BG_IMGS_PATH)
|
||||
for file in data:
|
||||
if file.lower().endswith(BG_FILE_TYPE):
|
||||
files.append(file)
|
||||
|
||||
return '{ "backgrounds": ' + json.dumps(files) + '}'
|
||||
|
||||
@app.route('/api/list-files/<_hash>', methods=['GET', 'POST'])
|
||||
def listFiles(_hash = None):
|
||||
if request.method == 'POST':
|
||||
view = get_window_controller().get_window(1).get_view(0)
|
||||
view = get_view()
|
||||
dot_dots = view.get_dot_dots()
|
||||
|
||||
if dot_dots[0][1] == _hash: # Refresh
|
||||
|
@ -92,91 +49,41 @@ def listFiles(_hash = None):
|
|||
msg = "Log in with an Admin privlidged user to view the requested path!"
|
||||
is_locked = view.is_folder_locked(_hash)
|
||||
if is_locked and not oidc.user_loggedin:
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
return json_message.create("danger", msg)
|
||||
elif is_locked and oidc.user_loggedin:
|
||||
isAdmin = oidc.user_getfield("isAdmin")
|
||||
if isAdmin != "yes" :
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
if dot_dots[0][1] != _hash and dot_dots[1][1] != _hash:
|
||||
path = view.get_path_part_from_hash(_hash)
|
||||
view.push_to_path(path)
|
||||
|
||||
error_msg = view.get_error_message()
|
||||
if error_msg != None:
|
||||
if error_msg:
|
||||
view.unset_error_message()
|
||||
return msgHandler.create_JSON_message("danger", error_msg)
|
||||
|
||||
|
||||
sub_path = view.get_current_sub_path()
|
||||
current_directory = sub_path.split("/")[-1]
|
||||
trailer = None
|
||||
if "(" in current_directory and ")" in current_directory:
|
||||
title = current_directory.split("(")[0].strip()
|
||||
startIndex = current_directory.index('(') + 1
|
||||
endIndex = current_directory.index(')')
|
||||
date = current_directory[startIndex:endIndex]
|
||||
video_data = tmdb.search(title, date)[0]
|
||||
video_id = video_data["id"]
|
||||
background_url = video_data["backdrop_path"]
|
||||
background_pth = f"{view.get_current_directory()}/000.jpg"
|
||||
|
||||
try:
|
||||
tmdb_videos = tmdb.tmdbapi.get_movie(str(video_id), append_to_response="videos")["videos"]["results"]
|
||||
for tmdb_video in tmdb_videos:
|
||||
if "YouTube" in tmdb_video["site"]:
|
||||
trailer_key = tmdb_video["key"]
|
||||
trailer = f"https://www.youtube-nocookie.com/embed/{trailer_key}?start=0&autoplay=1";
|
||||
|
||||
if not trailer:
|
||||
raise Exception("No key found. Defering to none...")
|
||||
except Exception as e:
|
||||
print("No trailer found...")
|
||||
trailer = None
|
||||
|
||||
if not os.path.isfile(background_pth):
|
||||
r = requests.get(background_url, stream = True)
|
||||
|
||||
if r.status_code == 200:
|
||||
r.raw.decode_content = True
|
||||
with open(background_pth,'wb') as f:
|
||||
shutil.copyfileobj(r.raw, f)
|
||||
|
||||
view.load_directory()
|
||||
print('Cover Background Image sucessfully retreived...')
|
||||
else:
|
||||
print('Cover Background Image Couldn\'t be retreived...')
|
||||
|
||||
return json_message.create("danger", error_msg)
|
||||
|
||||
sub_path = view.get_current_sub_path()
|
||||
files = view.get_files_formatted()
|
||||
fave = db.session.query(Favorites).filter_by(link = sub_path).first()
|
||||
in_fave = "true" if fave else "false"
|
||||
|
||||
files.update({'in_fave': in_fave})
|
||||
files.update({'trailer': trailer})
|
||||
return files
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
|
||||
@app.route('/api/get-posters', methods=['GET', 'POST'])
|
||||
def getPosters():
|
||||
if request.method == 'POST':
|
||||
view = get_window_controller().get_window(1).get_view(0)
|
||||
videos = view.get_videos()
|
||||
return videos
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
|
||||
@app.route('/api/file-manager-action/<_type>/<_hash>', methods=['GET', 'POST'])
|
||||
def fileManagerAction(_type, _hash = None):
|
||||
view = get_window_controller().get_window(1).get_view(0)
|
||||
view = get_view()
|
||||
|
||||
if _type == "reset-path" and _hash == "None":
|
||||
view.set_to_home()
|
||||
msg = "Returning to home directory..."
|
||||
return msgHandler.create_JSON_message("success", msg)
|
||||
return json_message.create("success", msg)
|
||||
|
||||
folder = view.get_current_directory()
|
||||
file = view.get_path_part_from_hash(_hash)
|
||||
|
@ -195,7 +102,7 @@ def fileManagerAction(_type, _hash = None):
|
|||
return '{"path":"static/remuxs/' + _hash + '.mp4"}'
|
||||
else:
|
||||
msg = "Remuxing: Remux failed or took too long; please, refresh the page and try again..."
|
||||
return msgHandler.create_JSON_message("success", msg)
|
||||
return json_message.create("success", msg)
|
||||
|
||||
if _type == "remux":
|
||||
stream_target = view.remux_video(_hash, fpath)
|
||||
|
@ -205,158 +112,14 @@ def fileManagerAction(_type, _hash = None):
|
|||
# Be aware of ordering!
|
||||
msg = "Log in with an Admin privlidged user to do this action!"
|
||||
if not oidc.user_loggedin:
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
return json_message.create("danger", msg)
|
||||
elif oidc.user_loggedin:
|
||||
isAdmin = oidc.user_getfield("isAdmin")
|
||||
if isAdmin != "yes" :
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
return json_message.create("danger", msg)
|
||||
|
||||
|
||||
if _type == "run-locally":
|
||||
msg = "Opened media..."
|
||||
view.open_file_locally(fpath)
|
||||
return msgHandler.create_JSON_message("success", msg)
|
||||
|
||||
if _type == "delete":
|
||||
try:
|
||||
msg = f"[Success] Deleted the file/folder -->: {file} !"
|
||||
if os.path.isfile(fpath):
|
||||
os.unlink(fpath)
|
||||
else:
|
||||
shutil.rmtree(fpath)
|
||||
return msgHandler.create_JSON_message("success", msg)
|
||||
except Exception as e:
|
||||
msg = "[Error] Unable to delete the file/folder...."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
|
||||
|
||||
@app.route('/api/list-favorites', methods=['GET', 'POST'])
|
||||
def listFavorites():
|
||||
if request.method == 'POST':
|
||||
list = db.session.query(Favorites).all()
|
||||
faves = []
|
||||
for fave in list:
|
||||
faves.append([fave.link, fave.id])
|
||||
|
||||
return '{"faves_list":' + json.dumps(faves) + '}'
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
|
||||
@app.route('/api/load-favorite/<_id>', methods=['GET', 'POST'])
|
||||
def loadFavorite(_id):
|
||||
if request.method == 'POST':
|
||||
try:
|
||||
ID = int(_id)
|
||||
fave = db.session.query(Favorites).filter_by(id = ID).first()
|
||||
view = get_window_controller().get_window(1).get_view(0)
|
||||
view.set_path_with_sub_path(fave.link)
|
||||
return '{"refresh": "true"}'
|
||||
except Exception as e:
|
||||
print(repr(e))
|
||||
msg = "Incorrect Favorites ID..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
|
||||
|
||||
@app.route('/api/manage-favorites/<_action>', methods=['GET', 'POST'])
|
||||
def manageFavorites(_action):
|
||||
if request.method == 'POST':
|
||||
ACTION = _action.strip()
|
||||
view = get_window_controller().get_window(1).get_view(0)
|
||||
sub_path = view.get_current_sub_path()
|
||||
|
||||
if ACTION == "add":
|
||||
fave = Favorites(link = sub_path)
|
||||
db.session.add(fave)
|
||||
msg = "Added to Favorites successfully..."
|
||||
elif ACTION == "delete":
|
||||
fave = db.session.query(Favorites).filter_by(link = sub_path).first()
|
||||
db.session.delete(fave)
|
||||
msg = "Deleted from Favorites successfully..."
|
||||
else:
|
||||
msg = "Couldn't handle action for favorites item..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
|
||||
db.session.commit()
|
||||
return msgHandler.create_JSON_message("success", msg)
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
|
||||
|
||||
@app.route('/api/create/<_type>', methods=['GET', 'POST'])
|
||||
def create_item(_type = None):
|
||||
if request.method == 'POST':
|
||||
msg = "Log in with an Admin privlidged user to upload files!"
|
||||
if not oidc.user_loggedin:
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
elif oidc.user_loggedin:
|
||||
isAdmin = oidc.user_getfield("isAdmin")
|
||||
if isAdmin != "yes" :
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
|
||||
TYPE = _type.strip()
|
||||
FNAME = str(request.values['fname']).strip()
|
||||
|
||||
if not re.fullmatch(valid_fname_pat, FNAME):
|
||||
msg = "A new item name can only contain alphanumeric, -, _, |, [], (), or spaces and must be minimum of 4 and max of 20 characters..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
|
||||
view = get_window_controller().get_window(1).get_view(0)
|
||||
folder = view.get_current_directory()
|
||||
new_item = f"{folder}/{FNAME}"
|
||||
|
||||
try:
|
||||
if TYPE == "dir":
|
||||
os.mkdir(new_item)
|
||||
elif TYPE == "file":
|
||||
open(f"{new_item}.txt", 'a').close()
|
||||
else:
|
||||
msg = "Couldn't handle action type for api create..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
except Exception as e:
|
||||
print(repr(e))
|
||||
msg = "Couldn't create file/folder. An unexpected error occured..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
|
||||
|
||||
msg = "[Success] created the file/dir..."
|
||||
return msgHandler.create_JSON_message("success", msg)
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
|
||||
|
||||
@app.route('/upload', methods=['GET', 'POST'])
|
||||
def upload():
|
||||
if request.method == 'POST' and len(request.files) > 0:
|
||||
msg = "Log in with an Admin privlidged user to upload files!"
|
||||
if not oidc.user_loggedin:
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
elif oidc.user_loggedin:
|
||||
isAdmin = oidc.user_getfield("isAdmin")
|
||||
if isAdmin != "yes" :
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
|
||||
view = get_window_controller().get_window(1).get_view(0)
|
||||
folder = view.get_current_directory()
|
||||
UPLOADS_PTH = f'{folder}/'
|
||||
files = UploadSet('files', ALL, default_dest=lambda x: UPLOADS_PTH)
|
||||
configure_uploads(app, files)
|
||||
|
||||
for file in request.files:
|
||||
try:
|
||||
files.save(request.files[file])
|
||||
except Exception as e:
|
||||
print(repr(e))
|
||||
msg = "[Error] Failed to upload some or all of the file(s)..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
|
||||
msg = "[Success] Uploaded file(s)..."
|
||||
return msgHandler.create_JSON_message("success", msg)
|
||||
else:
|
||||
msg = "Can't manage the request type..."
|
||||
return msgHandler.create_JSON_message("danger", msg)
|
||||
return json_message.create("success", msg)
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
from . import Images
|
||||
from . import CRUD
|
||||
from . import Routes
|
||||
from . import Favorites
|
||||
from .pages import Flask_Login
|
||||
from .pages import Flask_Register
|
||||
from .pages import OIDC_Login
|
||||
|
|
Binary file not shown.
|
@ -5,9 +5,14 @@ const goHomeAjax = async (hash) => {
|
|||
doAjax("api/file-manager-action/reset-path/None", data, "reset-path");
|
||||
}
|
||||
|
||||
const createItemAjax = async (type, fname) => {
|
||||
const data = "fname=" + fname;
|
||||
doAjax("api/create/" + type, data, "create-file");
|
||||
}
|
||||
|
||||
const deleteItemAjax = async (hash) => {
|
||||
const data = "empty=NULL";
|
||||
doAjax("api/file-manager-action/delete/" + hash, data, "delete-file");
|
||||
doAjax("api/delete/" + hash, data, "delete-file");
|
||||
}
|
||||
|
||||
const listFilesAjax = async (hash) => {
|
||||
|
|
|
@ -51,18 +51,6 @@ const updateHTMLDirList = async (data) => {
|
|||
let isInFaves = data.in_fave;
|
||||
let background_image = (images[0]) ? images[0][0] : "";
|
||||
|
||||
if (data.hasOwnProperty("trailer")) {
|
||||
let trailerBttn = document.getElementById("trailer-btn");
|
||||
let trailerLink = document.getElementById("trailer-link");
|
||||
if (data.trailer !== null) {
|
||||
trailerBttn.style.display = "";
|
||||
trailerLink.href = `javascript: showFile( "Trailer", "${data.trailer}", "", "trailer" )`;
|
||||
} else {
|
||||
trailerBttn.style.display = "none";
|
||||
trailerLink.href = "#";
|
||||
}
|
||||
}
|
||||
|
||||
document.getElementById("path").innerText = data.path_head;
|
||||
// Setup background if there is a 000.* in selection
|
||||
if (background_image.match(/000\.(jpg|png|gif)\b/) != null) {
|
||||
|
@ -80,5 +68,8 @@ const updateHTMLDirList = async (data) => {
|
|||
else
|
||||
tggl_faves_btn.classList.remove("btn-info");
|
||||
|
||||
|
||||
renderFilesList(data.list);
|
||||
loadBackgroundPoster();
|
||||
loadThumbnails();
|
||||
}
|
||||
|
|
|
@ -42,12 +42,12 @@ const uploadFiles = (files = null) => {
|
|||
file = files[i];
|
||||
name = file.name;
|
||||
data = createFormDataFiles([file]);
|
||||
doAjaxUpload('upload', data, name, "upload-file");
|
||||
doAjaxUpload('api/upload', data, name, "upload-file");
|
||||
}
|
||||
} else { // Single upload...
|
||||
data = createFormDataFiles(files);
|
||||
name = files[0].name;
|
||||
doAjaxUpload('upload', data, name, "upload-file");
|
||||
doAjaxUpload('api/upload', data, name, "upload-file");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -69,7 +69,7 @@ class FilesList extends React.Component {
|
|||
if (filetype === "video") {
|
||||
card_header = name;
|
||||
card_body = <React.Fragment>
|
||||
<img class="card-img-top" src={"static/imgs/thumbnails/" + hash + ".jpg?d=" + Date.now()} alt={name} />
|
||||
<img class="card-img-top card-image" src="" alt={name} />
|
||||
<span class="float-right">{fsize}</span>
|
||||
</React.Fragment>;
|
||||
} else if (filetype === "image") {
|
||||
|
@ -163,6 +163,7 @@ const renderFilesList = (data = null) => {
|
|||
React.createElement(FilesList, data),
|
||||
filesListElm
|
||||
)
|
||||
|
||||
videoPlaylist = document.body.querySelectorAll('[ftype="video"][value="Open"]'); // With attributes ftype and value
|
||||
}
|
||||
|
||||
|
|
|
@ -212,6 +212,38 @@ const clearSearch = () => {
|
|||
}
|
||||
}
|
||||
|
||||
const loadThumbnails = () => {
|
||||
fetchData("api/get-thumbnails").then((data) => {
|
||||
const cards_imgs = document.body.querySelectorAll('.card-image');
|
||||
const thumbnails = data["thumbnails"]
|
||||
for (var i = 0; i < cards_imgs.length; i++) {
|
||||
cards_imgs[i].src = "static/imgs/thumbnails/" + thumbnails[i][0] + ".jpg?d=" + Date.now();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
const loadBackgroundPoster = () => {
|
||||
fetchData("api/get-background-poster-trailer").then((data) => {
|
||||
if (data.hasOwnProperty("trailer")) {
|
||||
let trailerBttn = document.getElementById("trailer-btn");
|
||||
let trailerLink = document.getElementById("trailer-link");
|
||||
if (data.trailer !== null) {
|
||||
trailerBttn.style.display = "";
|
||||
trailerLink.href = `javascript: showFile( "Trailer", "${data.trailer}", "", "trailer" )`;
|
||||
} else {
|
||||
trailerBttn.style.display = "none";
|
||||
trailerLink.href = "#";
|
||||
}
|
||||
|
||||
if (data.poster !== null) {
|
||||
background_image = "api/file-manager-action/files/000.jpg?d=" + Date.now();
|
||||
updateBackground(background_image, false);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
const updateBackground = (srcLink, isvideo = true) => {
|
||||
try {
|
||||
|
|
|
@ -55,7 +55,7 @@
|
|||
<!-- Server Messages -->
|
||||
<div id="settings-alert-zone-files"></div>
|
||||
<!-- To Upload List -->
|
||||
<div id="uploadListTitlesContainer scroller">
|
||||
<div id="uploadListTitlesContainer" class="scroller">
|
||||
<ul id="uploadListTitles" class="list-group">
|
||||
</ul>
|
||||
</div>
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Gtk imports
|
||||
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
|
||||
|
@ -10,5 +11,14 @@ class MessageHandler:
|
|||
print("MessageHandler initialized...")
|
||||
|
||||
|
||||
def create_JSON_message(self, type, text):
|
||||
def create(self, type, text):
|
||||
return '{"message": { "type": "' + type + '", "text": "' + text + '" } }'
|
||||
|
||||
def backgrounds(self, files):
|
||||
return '{ "backgrounds": ' + json.dumps(files) + '}'
|
||||
|
||||
def thumbnails(self, files):
|
||||
return '{ "thumbnails": ' + json.dumps(files) + '}'
|
||||
|
||||
def faves_list(self, faves):
|
||||
return '{"faves_list":' + json.dumps(faves) + '}'
|
||||
|
|
|
@ -1 +1,3 @@
|
|||
from .windows import WindowController
|
||||
"""
|
||||
Root of ShellFM
|
||||
"""
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
from .view import View
|
||||
|
||||
|
||||
class Window:
|
||||
def __init__(self):
|
||||
self.name = ""
|
||||
self.nickname = ""
|
||||
self.id = 0
|
||||
self.views = []
|
||||
|
||||
def create_view(self):
|
||||
view = View()
|
||||
self.views.append(view)
|
||||
|
||||
def pop_view(self):
|
||||
self.views.pop()
|
||||
|
||||
def delete_view(self, index):
|
||||
del self.views[index]
|
||||
|
||||
def get_view(self, index):
|
||||
return self.views[index]
|
|
@ -1,67 +0,0 @@
|
|||
from . import Window
|
||||
|
||||
|
||||
class WindowController:
|
||||
def __init__(self):
|
||||
self.windows = []
|
||||
self.add_window()
|
||||
|
||||
def get_window(self, win_id):
|
||||
for window in self.windows:
|
||||
if window.id == win_id:
|
||||
return window
|
||||
|
||||
raise(f"No Window by ID {win_id} found!")
|
||||
|
||||
def get_windows(self):
|
||||
return self.windows
|
||||
|
||||
def add_window(self):
|
||||
window = Window()
|
||||
window.id = len(self.windows) + 1
|
||||
window.name = f"window_{window.id}"
|
||||
window.create_view()
|
||||
self.windows.append(window)
|
||||
|
||||
def add_view_for_window(self, win_id):
|
||||
for window in self.windows:
|
||||
if window.id == win_id:
|
||||
window.create_view()
|
||||
break
|
||||
|
||||
def pop_window(self):
|
||||
self.windows.pop()
|
||||
|
||||
def delete_window_by_id(self, win_id):
|
||||
i = 0
|
||||
for window in self.windows:
|
||||
if window.id == win_id:
|
||||
self.window.remove(win_id)
|
||||
break
|
||||
i += 1
|
||||
|
||||
def set_window_nickname(self, win_id = None, nickname = ""):
|
||||
for window in self.windows:
|
||||
if window.id == win_id:
|
||||
window.nickname = nickname
|
||||
|
||||
def list_windows(self):
|
||||
for window in self.windows:
|
||||
print("\n[ Window ]")
|
||||
print("ID: {window.id}")
|
||||
print("Name: {window.name}")
|
||||
print("Nickname: {window.nickname}")
|
||||
print("View Count: {len(window.views)}")
|
||||
|
||||
|
||||
def list_views_from_window(self, win_id):
|
||||
for window in self.windows:
|
||||
if window.id == win_id:
|
||||
for view in window.views:
|
||||
print(view.files)
|
||||
break
|
||||
|
||||
def return_views_from_window(self, win_id):
|
||||
for window in self.windows:
|
||||
if window.id == win_id:
|
||||
return window.views
|
|
@ -1,2 +1,3 @@
|
|||
from .Window import Window
|
||||
from .WindowController import WindowController
|
||||
"""
|
||||
Window module
|
||||
"""
|
||||
|
|
|
@ -0,0 +1,179 @@
|
|||
# Python imports
|
||||
import json
|
||||
from os import path
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
from .window import Window
|
||||
|
||||
|
||||
|
||||
|
||||
class WindowController:
|
||||
def __init__(self):
|
||||
USER_HOME: str = path.expanduser('~')
|
||||
CONFIG_PATH: str = f"{USER_HOME}/.config/{app_name.lower()}"
|
||||
self._session_file: srr = f"{CONFIG_PATH}/session.json"
|
||||
|
||||
self._event_sleep_time: int = 1
|
||||
self._active_window_id: str = ""
|
||||
self._active_tab_id: str = ""
|
||||
self._windows: list = []
|
||||
|
||||
|
||||
def set_wid_and_tid(self, wid: int, tid: int) -> None:
|
||||
self._active_window_id = str(wid)
|
||||
self._active_tab_id = str(tid)
|
||||
|
||||
def get_active_wid_and_tid(self) -> list:
|
||||
return self._active_window_id, self._active_tab_id
|
||||
|
||||
def create_window(self) -> Window:
|
||||
window = Window()
|
||||
window.set_nickname(f"window_{str(len(self._windows) + 1)}")
|
||||
self._windows.append(window)
|
||||
return window
|
||||
|
||||
|
||||
def add_tab_for_window(self, win_id: str) -> None:
|
||||
for window in self._windows:
|
||||
if window.get_id() == win_id:
|
||||
return window.create_tab()
|
||||
|
||||
def add_tab_for_window_by_name(self, name: str) -> None:
|
||||
for window in self._windows:
|
||||
if window.get_name() == name:
|
||||
return window.create_tab()
|
||||
|
||||
def add_tab_for_window_by_nickname(self, nickname: str) -> None:
|
||||
for window in self._windows:
|
||||
if window.get_nickname() == nickname:
|
||||
return window.create_tab()
|
||||
|
||||
def pop_window(self) -> None:
|
||||
self._windows.pop()
|
||||
|
||||
def delete_window_by_id(self, win_id: str) -> None:
|
||||
for window in self._windows:
|
||||
if window.get_id() == win_id:
|
||||
self._windows.remove(window)
|
||||
break
|
||||
|
||||
def delete_window_by_name(self, name: str) -> str:
|
||||
for window in self._windows:
|
||||
if window.get_name() == name:
|
||||
self._windows.remove(window)
|
||||
break
|
||||
|
||||
def delete_window_by_nickname(self, nickname: str) -> str:
|
||||
for window in self._windows:
|
||||
if window.get_nickname() == nickname:
|
||||
self._windows.remove(window)
|
||||
break
|
||||
|
||||
def get_window_by_id(self, win_id: str) -> Window:
|
||||
for window in self._windows:
|
||||
if window.get_id() == win_id:
|
||||
return window
|
||||
|
||||
raise(f"No Window by ID {win_id} found!")
|
||||
|
||||
def get_window_by_name(self, name: str) -> Window:
|
||||
for window in self._windows:
|
||||
if window.get_name() == name:
|
||||
return window
|
||||
|
||||
raise(f"No Window by Name {name} found!")
|
||||
|
||||
def get_window_by_nickname(self, nickname: str) -> Window:
|
||||
for window in self._windows:
|
||||
if window.get_nickname() == nickname:
|
||||
return window
|
||||
|
||||
raise(f"No Window by Nickname {nickname} found!")
|
||||
|
||||
def get_window_by_index(self, index: int) -> Window:
|
||||
return self._windows[index]
|
||||
|
||||
def get_all_windows(self) -> list:
|
||||
return self._windows
|
||||
|
||||
|
||||
def set_window_nickname(self, win_id: str = None, nickname: str = "") -> None:
|
||||
for window in self._windows:
|
||||
if window.get_id() == win_id:
|
||||
window.set_nickname(nickname)
|
||||
|
||||
def list_windows(self) -> None:
|
||||
print("\n[ ---- Windows ---- ]\n")
|
||||
for window in self._windows:
|
||||
print(f"\nID: {window.get_id()}")
|
||||
print(f"Name: {window.get_name()}")
|
||||
print(f"Nickname: {window.get_nickname()}")
|
||||
print(f"Is Hidden: {window.is_hidden()}")
|
||||
print(f"Tab Count: {window.get_tabs_count()}")
|
||||
print("\n-------------------------\n")
|
||||
|
||||
|
||||
|
||||
def list_files_from_tabs_of_window(self, win_id: str) -> None:
|
||||
for window in self._windows:
|
||||
if window.get_id() == win_id:
|
||||
window.list_files_from_tabs()
|
||||
break
|
||||
|
||||
def get_tabs_count(self, win_id: str) -> int:
|
||||
for window in self._windows:
|
||||
if window.get_id() == win_id:
|
||||
return window.get_tabs_count()
|
||||
|
||||
def get_tabs_from_window(self, win_id: str) -> list:
|
||||
for window in self._windows:
|
||||
if window.get_id() == win_id:
|
||||
return window.get_all_tabs()
|
||||
|
||||
|
||||
|
||||
|
||||
def unload_tabs_and_windows(self) -> None:
|
||||
for window in self._windows:
|
||||
window.get_all_tabs().clear()
|
||||
|
||||
self._windows.clear()
|
||||
|
||||
def save_state(self, session_file: str = None) -> None:
|
||||
if not session_file:
|
||||
session_file = self._session_file
|
||||
|
||||
if len(self._windows) > 0:
|
||||
windows = []
|
||||
for window in self._windows:
|
||||
tabs = []
|
||||
for tab in window.get_all_tabs():
|
||||
tabs.append(tab.get_current_directory())
|
||||
|
||||
windows.append(
|
||||
{
|
||||
'window':{
|
||||
"ID": window.get_id(),
|
||||
"Name": window.get_name(),
|
||||
"Nickname": window.get_nickname(),
|
||||
"isHidden": f"{window.is_hidden()}",
|
||||
'tabs': tabs
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
with open(session_file, 'w') as outfile:
|
||||
json.dump(windows, outfile, separators=(',', ':'), indent=4)
|
||||
else:
|
||||
raise Exception("Window data corrupted! Can not save session!")
|
||||
|
||||
def get_state_from_file(self, session_file: str = None) -> dict:
|
||||
if not session_file:
|
||||
session_file = self._session_file
|
||||
|
||||
if path.isfile(session_file):
|
||||
with open(session_file) as infile:
|
||||
return json.load(infile)
|
|
@ -0,0 +1,3 @@
|
|||
"""
|
||||
Tabs module
|
||||
"""
|
|
@ -0,0 +1,3 @@
|
|||
"""
|
||||
Icons module
|
||||
"""
|
|
@ -0,0 +1,192 @@
|
|||
# Python imports
|
||||
import os
|
||||
from os.path import isfile
|
||||
import hashlib
|
||||
import threading
|
||||
|
||||
# Lib imports
|
||||
import gi
|
||||
gi.require_version('GdkPixbuf', '2.0')
|
||||
from gi.repository import GLib
|
||||
from gi.repository import Gio
|
||||
from gi.repository import GdkPixbuf
|
||||
|
||||
try:
|
||||
from PIL import Image as PImage
|
||||
except ModuleNotFoundError as e:
|
||||
PImage = None
|
||||
|
||||
# Application imports
|
||||
from .mixins.videoiconmixin import VideoIconMixin
|
||||
from .mixins.meshsiconmixin import MeshsIconMixin
|
||||
from .mixins.desktopiconmixin import DesktopIconMixin
|
||||
|
||||
|
||||
|
||||
class IconException(Exception):
|
||||
...
|
||||
|
||||
|
||||
|
||||
class Icon(DesktopIconMixin, VideoIconMixin, MeshsIconMixin):
|
||||
def create_icon(self, dir, file):
|
||||
full_path = f"{dir}/{file}"
|
||||
return self.get_icon_image(dir, file, full_path)
|
||||
|
||||
def get_icon_image(self, dir, file, full_path):
|
||||
try:
|
||||
thumbnl = self._get_system_thumbnail_gtk_thread(full_path, self.sys_icon_wh[0])
|
||||
|
||||
if file.lower().endswith(self.fmeshs): # 3D Mesh icon
|
||||
...
|
||||
if file.lower().endswith(self.fvideos): # Video icon
|
||||
thumbnl = self.create_video_thumbnail(full_path)
|
||||
elif file.lower().endswith(self.fimages): # Image Icon
|
||||
thumbnl = self.create_scaled_image(full_path)
|
||||
elif file.lower().endswith( (".blend",) ): # Blender icon
|
||||
thumbnl = self.create_blender_thumbnail(full_path)
|
||||
elif full_path.lower().endswith( ('.desktop',) ): # .desktop file parsing
|
||||
thumbnl = self.find_thumbnail_from_desktop_file(full_path)
|
||||
|
||||
if not thumbnl:
|
||||
raise IconException("No known icons found.")
|
||||
|
||||
return thumbnl
|
||||
except IconException:
|
||||
...
|
||||
|
||||
return self.get_generic_icon()
|
||||
|
||||
def create_blender_thumbnail(self, full_path, returnHashInstead=False):
|
||||
try:
|
||||
path_exists, img_hash, hash_img_path = self.generate_hash_and_path(full_path)
|
||||
if not path_exists:
|
||||
self.generate_blender_thumbnail(full_path, hash_img_path)
|
||||
|
||||
if returnHashInstead:
|
||||
return img_hash, hash_img_path
|
||||
|
||||
return self.create_scaled_image(hash_img_path, self.video_icon_wh)
|
||||
except IconException as e:
|
||||
print("Blender thumbnail generation issue:")
|
||||
print( repr(e) )
|
||||
|
||||
return None
|
||||
|
||||
def create_video_thumbnail(self, full_path, scrub_percent = "65%", replace=False, returnHashInstead=False):
|
||||
try:
|
||||
path_exists, img_hash, hash_img_path = self.generate_hash_and_path(full_path)
|
||||
if path_exists and replace:
|
||||
os.remove(hash_img_path)
|
||||
path_exists = False
|
||||
|
||||
if not path_exists:
|
||||
self.generate_video_thumbnail(full_path, hash_img_path, scrub_percent)
|
||||
|
||||
if returnHashInstead:
|
||||
return img_hash, hash_img_path
|
||||
|
||||
return self.create_scaled_image(hash_img_path, self.video_icon_wh)
|
||||
except IconException as e:
|
||||
print("Image/Video thumbnail generation issue:")
|
||||
print( repr(e) )
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def create_scaled_image(self, full_path, wxh = None):
|
||||
if not wxh:
|
||||
wxh = self.video_icon_wh
|
||||
|
||||
if full_path:
|
||||
try:
|
||||
if full_path.lower().endswith(".gif"):
|
||||
return GdkPixbuf.PixbufAnimation.new_from_file(full_path) \
|
||||
.get_static_image() \
|
||||
.scale_simple(wxh[0], wxh[1], GdkPixbuf.InterpType.BILINEAR)
|
||||
elif full_path.lower().endswith(".webp") and PImage:
|
||||
return self.image2pixbuf(full_path, wxh)
|
||||
|
||||
return GdkPixbuf.Pixbuf.new_from_file_at_scale(full_path, wxh[0], wxh[1], True)
|
||||
except IconException as e:
|
||||
print("Image Scaling Issue:")
|
||||
print( repr(e) )
|
||||
|
||||
return None
|
||||
|
||||
def create_from_file(self, full_path):
|
||||
try:
|
||||
return GdkPixbuf.Pixbuf.new_from_file(full_path)
|
||||
except IconException as e:
|
||||
print("Image from file Issue:")
|
||||
print( repr(e) )
|
||||
|
||||
return None
|
||||
|
||||
def _get_system_thumbnail_gtk_thread(self, full_path, size):
|
||||
def _call_gtk_thread(event, result):
|
||||
result.append( self.get_system_thumbnail(full_path, size) )
|
||||
event.set()
|
||||
|
||||
result = []
|
||||
event = threading.Event()
|
||||
GLib.idle_add(_call_gtk_thread, event, result)
|
||||
event.wait()
|
||||
return result[0]
|
||||
|
||||
|
||||
def get_system_thumbnail(self, full_path, size):
|
||||
try:
|
||||
gio_file = Gio.File.new_for_path(full_path)
|
||||
info = gio_file.query_info('standard::icon' , 0, None)
|
||||
icon = info.get_icon().get_names()[0]
|
||||
data = settings.get_icon_theme().lookup_icon(icon , size , 0)
|
||||
|
||||
if data:
|
||||
icon_path = data.get_filename()
|
||||
return GdkPixbuf.Pixbuf.new_from_file(icon_path)
|
||||
|
||||
raise IconException("No system icon found...")
|
||||
except IconException:
|
||||
...
|
||||
|
||||
return None
|
||||
|
||||
def get_generic_icon(self):
|
||||
return GdkPixbuf.Pixbuf.new_from_file(self.DEFAULT_ICON)
|
||||
|
||||
def generate_hash_and_path(self, full_path):
|
||||
img_hash = self.fast_hash(full_path)
|
||||
hash_img_path = f"{self.ABS_THUMBS_PTH}/{img_hash}.jpg"
|
||||
path_exists = True if isfile(hash_img_path) else False
|
||||
|
||||
return path_exists, img_hash, hash_img_path
|
||||
|
||||
|
||||
def fast_hash(self, filename, hash_factory=hashlib.md5, chunk_num_blocks=128, i=1):
|
||||
h = hash_factory()
|
||||
with open(filename,'rb') as f:
|
||||
f.seek(0, 2)
|
||||
mid = int(f.tell() / 2)
|
||||
f.seek(mid, 0)
|
||||
|
||||
while chunk := f.read(chunk_num_blocks*h.block_size):
|
||||
h.update(chunk)
|
||||
if (i == 12):
|
||||
break
|
||||
|
||||
i += 1
|
||||
|
||||
return h.hexdigest()
|
||||
|
||||
def image2pixbuf(self, full_path, wxh):
|
||||
"""Convert Pillow image to GdkPixbuf"""
|
||||
im = PImage.open(full_path)
|
||||
data = im.tobytes()
|
||||
data = GLib.Bytes.new(data)
|
||||
w, h = im.size
|
||||
|
||||
pixbuf = GdkPixbuf.Pixbuf.new_from_bytes(data, GdkPixbuf.Colorspace.RGB,
|
||||
False, 8, w, h, w * 3)
|
||||
|
||||
return pixbuf.scale_simple(wxh[0], wxh[1], 2) # BILINEAR = 2
|
|
@ -0,0 +1,3 @@
|
|||
"""
|
||||
Icons mixins module
|
||||
"""
|
|
@ -0,0 +1,76 @@
|
|||
# Python imports
|
||||
import os
|
||||
from os.path import isfile
|
||||
import subprocess
|
||||
import hashlib
|
||||
|
||||
# Lib imports
|
||||
import gi
|
||||
gi.require_version('Gtk', '3.0')
|
||||
from gi.repository import Gtk
|
||||
from gi.repository import Gio
|
||||
|
||||
# Application imports
|
||||
from .xdg.DesktopEntry import DesktopEntry
|
||||
|
||||
|
||||
|
||||
|
||||
class DesktopIconMixin:
|
||||
def find_thumbnail_from_desktop_file(self, full_path):
|
||||
try:
|
||||
xdgObj = DesktopEntry(full_path)
|
||||
icon = xdgObj.getIcon()
|
||||
alt_icon_path = ""
|
||||
|
||||
if "steam" in icon:
|
||||
return self.get_steam_img(xdgObj)
|
||||
elif os.path.exists(icon):
|
||||
return self.create_scaled_image(icon, self.sys_icon_wh)
|
||||
else:
|
||||
pixbuf, alt_icon_path = self.get_icon_from_traversal(icon)
|
||||
if pixbuf:
|
||||
return pixbuf
|
||||
|
||||
return self.create_scaled_image(alt_icon_path, self.sys_icon_wh)
|
||||
except Exception as e:
|
||||
print(".desktop icon generation issue:")
|
||||
print( repr(e) )
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_steam_img(self, xdgObj):
|
||||
name = xdgObj.getName()
|
||||
file_hash = hashlib.sha256(str.encode(name)).hexdigest()
|
||||
hash_img_pth = f"{self.STEAM_ICONS_PTH}/{file_hash}.jpg"
|
||||
|
||||
if not isfile(hash_img_pth):
|
||||
exec_str = xdgObj.getExec()
|
||||
parts = exec_str.split("steam://rungameid/")
|
||||
id = parts[len(parts) - 1]
|
||||
imageLink = f"{self.STEAM_CDN_URL}{id}/header.jpg"
|
||||
proc = subprocess.Popen(["wget", "-O", hash_img_pth, imageLink])
|
||||
proc.wait()
|
||||
|
||||
return self.create_scaled_image(hash_img_pth, self.video_icon_wh)
|
||||
|
||||
def get_icon_from_traversal(self, icon):
|
||||
gio_icon = Gio.Icon.new_for_string(icon)
|
||||
gicon = Gtk.Image.new_from_gicon(gio_icon, 32)
|
||||
pixbuf = gicon.get_pixbuf()
|
||||
|
||||
alt_icon_path = ""
|
||||
for dir in self.ICON_DIRS:
|
||||
alt_icon_path = self.traverse_icons_folder(dir, icon)
|
||||
if alt_icon_path != "":
|
||||
break
|
||||
|
||||
return pixbuf, alt_icon_path
|
||||
|
||||
def traverse_icons_folder(self, path, icon):
|
||||
for (dirpath, dirnames, filenames) in os.walk(path):
|
||||
for file in filenames:
|
||||
appNM = "application-x-" + icon
|
||||
if icon in file or appNM in file:
|
||||
return f"{dirpath}/{file}"
|
|
@ -0,0 +1,17 @@
|
|||
# Python imports
|
||||
import subprocess
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
|
||||
|
||||
|
||||
|
||||
class MeshsIconMixin:
|
||||
def generate_blender_thumbnail(self, full_path, hash_img_path):
|
||||
try:
|
||||
proc = subprocess.Popen([self.BLENDER_THUMBNLR, full_path, hash_img_path])
|
||||
proc.wait()
|
||||
except Exception as e:
|
||||
self.logger.debug(repr(e))
|
|
@ -0,0 +1,55 @@
|
|||
# Python imports
|
||||
import subprocess
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
|
||||
|
||||
|
||||
|
||||
class VideoIconMixin:
|
||||
def generate_video_thumbnail(self, full_path, hash_img_path, scrub_percent = "65%"):
|
||||
try:
|
||||
proc = subprocess.Popen([self.FFMPG_THUMBNLR, "-t", scrub_percent, "-s", "300", "-c", "jpg", "-i", full_path, "-o", hash_img_path])
|
||||
proc.wait()
|
||||
except Exception as e:
|
||||
self.logger.debug(repr(e))
|
||||
self.ffprobe_generate_video_thumbnail(full_path, hash_img_path)
|
||||
|
||||
|
||||
def ffprobe_generate_video_thumbnail(self, full_path, hash_img_path):
|
||||
proc = None
|
||||
try:
|
||||
# Stream duration
|
||||
command = ["ffprobe", "-v", "error", "-select_streams", "v:0", "-show_entries", "stream=duration", "-of", "default=noprint_wrappers=1:nokey=1", full_path]
|
||||
data = subprocess.run(command, stdout=subprocess.PIPE)
|
||||
duration = data.stdout.decode('utf-8')
|
||||
|
||||
# Format (container) duration
|
||||
if "N/A" in duration:
|
||||
command = ["ffprobe", "-v", "error", "-show_entries", "format=duration", "-of", "default=noprint_wrappers=1:nokey=1", full_path]
|
||||
data = subprocess.run(command , stdout=subprocess.PIPE)
|
||||
duration = data.stdout.decode('utf-8')
|
||||
|
||||
# Stream duration type: image2
|
||||
if "N/A" in duration:
|
||||
command = ["ffprobe", "-v", "error", "-select_streams", "v:0", "-f", "image2", "-show_entries", "stream=duration", "-of", "default=noprint_wrappers=1:nokey=1", full_path]
|
||||
data = subprocess.run(command, stdout=subprocess.PIPE)
|
||||
duration = data.stdout.decode('utf-8')
|
||||
|
||||
# Format (container) duration type: image2
|
||||
if "N/A" in duration:
|
||||
command = ["ffprobe", "-v", "error", "-f", "image2", "-show_entries", "format=duration", "-of", "default=noprint_wrappers=1:nokey=1", full_path]
|
||||
data = subprocess.run(command , stdout=subprocess.PIPE)
|
||||
duration = data.stdout.decode('utf-8')
|
||||
|
||||
# Get frame roughly 35% through video
|
||||
grabTime = str( int( float( duration.split(".")[0] ) * 0.35) )
|
||||
command = ["ffmpeg", "-ss", grabTime, "-an", "-i", full_path, "-s", "320x180", "-vframes", "1", hash_img_path]
|
||||
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
|
||||
proc.wait()
|
||||
except Exception as e:
|
||||
print("Video thumbnail generation issue in thread:")
|
||||
print( repr(e) )
|
||||
self.logger.debug(repr(e))
|
|
@ -0,0 +1,160 @@
|
|||
"""
|
||||
This module is based on a rox module (LGPL):
|
||||
|
||||
http://cvs.sourceforge.net/viewcvs.py/rox/ROX-Lib2/python/rox/basedir.py?rev=1.9&view=log
|
||||
|
||||
The freedesktop.org Base Directory specification provides a way for
|
||||
applications to locate shared data and configuration:
|
||||
|
||||
http://standards.freedesktop.org/basedir-spec/
|
||||
|
||||
(based on version 0.6)
|
||||
|
||||
This module can be used to load and save from and to these directories.
|
||||
|
||||
Typical usage:
|
||||
|
||||
from rox import basedir
|
||||
|
||||
for dir in basedir.load_config_paths('mydomain.org', 'MyProg', 'Options'):
|
||||
print "Load settings from", dir
|
||||
|
||||
dir = basedir.save_config_path('mydomain.org', 'MyProg')
|
||||
print >>file(os.path.join(dir, 'Options'), 'w'), "foo=2"
|
||||
|
||||
Note: see the rox.Options module for a higher-level API for managing options.
|
||||
"""
|
||||
|
||||
import os, stat
|
||||
|
||||
_home = os.path.expanduser('~')
|
||||
xdg_data_home = os.environ.get('XDG_DATA_HOME') or \
|
||||
os.path.join(_home, '.local', 'share')
|
||||
|
||||
xdg_data_dirs = [xdg_data_home] + \
|
||||
(os.environ.get('XDG_DATA_DIRS') or '/usr/local/share:/usr/share').split(':')
|
||||
|
||||
xdg_config_home = os.environ.get('XDG_CONFIG_HOME') or \
|
||||
os.path.join(_home, '.config')
|
||||
|
||||
xdg_config_dirs = [xdg_config_home] + \
|
||||
(os.environ.get('XDG_CONFIG_DIRS') or '/etc/xdg').split(':')
|
||||
|
||||
xdg_cache_home = os.environ.get('XDG_CACHE_HOME') or \
|
||||
os.path.join(_home, '.cache')
|
||||
|
||||
xdg_data_dirs = [x for x in xdg_data_dirs if x]
|
||||
xdg_config_dirs = [x for x in xdg_config_dirs if x]
|
||||
|
||||
def save_config_path(*resource):
|
||||
"""Ensure ``$XDG_CONFIG_HOME/<resource>/`` exists, and return its path.
|
||||
'resource' should normally be the name of your application. Use this
|
||||
when saving configuration settings.
|
||||
"""
|
||||
resource = os.path.join(*resource)
|
||||
assert not resource.startswith('/')
|
||||
path = os.path.join(xdg_config_home, resource)
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path, 0o700)
|
||||
return path
|
||||
|
||||
def save_data_path(*resource):
|
||||
"""Ensure ``$XDG_DATA_HOME/<resource>/`` exists, and return its path.
|
||||
'resource' should normally be the name of your application or a shared
|
||||
resource. Use this when saving or updating application data.
|
||||
"""
|
||||
resource = os.path.join(*resource)
|
||||
assert not resource.startswith('/')
|
||||
path = os.path.join(xdg_data_home, resource)
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path)
|
||||
return path
|
||||
|
||||
def save_cache_path(*resource):
|
||||
"""Ensure ``$XDG_CACHE_HOME/<resource>/`` exists, and return its path.
|
||||
'resource' should normally be the name of your application or a shared
|
||||
resource."""
|
||||
resource = os.path.join(*resource)
|
||||
assert not resource.startswith('/')
|
||||
path = os.path.join(xdg_cache_home, resource)
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path)
|
||||
return path
|
||||
|
||||
def load_config_paths(*resource):
|
||||
"""Returns an iterator which gives each directory named 'resource' in the
|
||||
configuration search path. Information provided by earlier directories should
|
||||
take precedence over later ones, and the user-specific config dir comes
|
||||
first."""
|
||||
resource = os.path.join(*resource)
|
||||
for config_dir in xdg_config_dirs:
|
||||
path = os.path.join(config_dir, resource)
|
||||
if os.path.exists(path): yield path
|
||||
|
||||
def load_first_config(*resource):
|
||||
"""Returns the first result from load_config_paths, or None if there is nothing
|
||||
to load."""
|
||||
for x in load_config_paths(*resource):
|
||||
return x
|
||||
return None
|
||||
|
||||
def load_data_paths(*resource):
|
||||
"""Returns an iterator which gives each directory named 'resource' in the
|
||||
application data search path. Information provided by earlier directories
|
||||
should take precedence over later ones."""
|
||||
resource = os.path.join(*resource)
|
||||
for data_dir in xdg_data_dirs:
|
||||
path = os.path.join(data_dir, resource)
|
||||
if os.path.exists(path): yield path
|
||||
|
||||
def get_runtime_dir(strict=True):
|
||||
"""Returns the value of $XDG_RUNTIME_DIR, a directory path.
|
||||
|
||||
This directory is intended for 'user-specific non-essential runtime files
|
||||
and other file objects (such as sockets, named pipes, ...)', and
|
||||
'communication and synchronization purposes'.
|
||||
|
||||
As of late 2012, only quite new systems set $XDG_RUNTIME_DIR. If it is not
|
||||
set, with ``strict=True`` (the default), a KeyError is raised. With
|
||||
``strict=False``, PyXDG will create a fallback under /tmp for the current
|
||||
user. This fallback does *not* provide the same guarantees as the
|
||||
specification requires for the runtime directory.
|
||||
|
||||
The strict default is deliberately conservative, so that application
|
||||
developers can make a conscious decision to allow the fallback.
|
||||
"""
|
||||
try:
|
||||
return os.environ['XDG_RUNTIME_DIR']
|
||||
except KeyError:
|
||||
if strict:
|
||||
raise
|
||||
|
||||
import getpass
|
||||
fallback = '/tmp/pyxdg-runtime-dir-fallback-' + getpass.getuser()
|
||||
create = False
|
||||
|
||||
try:
|
||||
# This must be a real directory, not a symlink, so attackers can't
|
||||
# point it elsewhere. So we use lstat to check it.
|
||||
st = os.lstat(fallback)
|
||||
except OSError as e:
|
||||
import errno
|
||||
if e.errno == errno.ENOENT:
|
||||
create = True
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
# The fallback must be a directory
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
os.unlink(fallback)
|
||||
create = True
|
||||
# Must be owned by the user and not accessible by anyone else
|
||||
elif (st.st_uid != os.getuid()) \
|
||||
or (st.st_mode & (stat.S_IRWXG | stat.S_IRWXO)):
|
||||
os.rmdir(fallback)
|
||||
create = True
|
||||
|
||||
if create:
|
||||
os.mkdir(fallback, 0o700)
|
||||
|
||||
return fallback
|
|
@ -0,0 +1,39 @@
|
|||
"""
|
||||
Functions to configure Basic Settings
|
||||
"""
|
||||
|
||||
language = "C"
|
||||
windowmanager = None
|
||||
icon_theme = "hicolor"
|
||||
icon_size = 48
|
||||
cache_time = 5
|
||||
root_mode = False
|
||||
|
||||
def setWindowManager(wm):
|
||||
global windowmanager
|
||||
windowmanager = wm
|
||||
|
||||
def setIconTheme(theme):
|
||||
global icon_theme
|
||||
icon_theme = theme
|
||||
import xdg.IconTheme
|
||||
xdg.IconTheme.themes = []
|
||||
|
||||
def setIconSize(size):
|
||||
global icon_size
|
||||
icon_size = size
|
||||
|
||||
def setCacheTime(time):
|
||||
global cache_time
|
||||
cache_time = time
|
||||
|
||||
def setLocale(lang):
|
||||
import locale
|
||||
lang = locale.normalize(lang)
|
||||
locale.setlocale(locale.LC_ALL, lang)
|
||||
import xdg.Locale
|
||||
xdg.Locale.update(lang)
|
||||
|
||||
def setRootMode(boolean):
|
||||
global root_mode
|
||||
root_mode = boolean
|
|
@ -0,0 +1,435 @@
|
|||
"""
|
||||
Complete implementation of the XDG Desktop Entry Specification
|
||||
http://standards.freedesktop.org/desktop-entry-spec/
|
||||
|
||||
Not supported:
|
||||
- Encoding: Legacy Mixed
|
||||
- Does not check exec parameters
|
||||
- Does not check URL's
|
||||
- Does not completly validate deprecated/kde items
|
||||
- Does not completly check categories
|
||||
"""
|
||||
|
||||
from .IniFile import IniFile
|
||||
from . import Locale
|
||||
|
||||
from .IniFile import is_ascii
|
||||
|
||||
from .Exceptions import ParsingError
|
||||
from .util import which
|
||||
import os.path
|
||||
import re
|
||||
import warnings
|
||||
|
||||
class DesktopEntry(IniFile):
|
||||
"Class to parse and validate Desktop Entries"
|
||||
|
||||
defaultGroup = 'Desktop Entry'
|
||||
|
||||
def __init__(self, filename=None):
|
||||
"""Create a new DesktopEntry.
|
||||
|
||||
If filename exists, it will be parsed as a desktop entry file. If not,
|
||||
or if filename is None, a blank DesktopEntry is created.
|
||||
"""
|
||||
self.content = dict()
|
||||
if filename and os.path.exists(filename):
|
||||
self.parse(filename)
|
||||
elif filename:
|
||||
self.new(filename)
|
||||
|
||||
def __str__(self):
|
||||
return self.getName()
|
||||
|
||||
def parse(self, file):
|
||||
"""Parse a desktop entry file.
|
||||
|
||||
This can raise :class:`~xdg.Exceptions.ParsingError`,
|
||||
:class:`~xdg.Exceptions.DuplicateGroupError` or
|
||||
:class:`~xdg.Exceptions.DuplicateKeyError`.
|
||||
"""
|
||||
IniFile.parse(self, file, ["Desktop Entry", "KDE Desktop Entry"])
|
||||
|
||||
def findTryExec(self):
|
||||
"""Looks in the PATH for the executable given in the TryExec field.
|
||||
|
||||
Returns the full path to the executable if it is found, None if not.
|
||||
Raises :class:`~xdg.Exceptions.NoKeyError` if TryExec is not present.
|
||||
"""
|
||||
tryexec = self.get('TryExec', strict=True)
|
||||
return which(tryexec)
|
||||
|
||||
# start standard keys
|
||||
def getType(self):
|
||||
return self.get('Type')
|
||||
def getVersion(self):
|
||||
"""deprecated, use getVersionString instead """
|
||||
return self.get('Version', type="numeric")
|
||||
def getVersionString(self):
|
||||
return self.get('Version')
|
||||
def getName(self):
|
||||
return self.get('Name', locale=True)
|
||||
def getGenericName(self):
|
||||
return self.get('GenericName', locale=True)
|
||||
def getNoDisplay(self):
|
||||
return self.get('NoDisplay', type="boolean")
|
||||
def getComment(self):
|
||||
return self.get('Comment', locale=True)
|
||||
def getIcon(self):
|
||||
return self.get('Icon', locale=True)
|
||||
def getHidden(self):
|
||||
return self.get('Hidden', type="boolean")
|
||||
def getOnlyShowIn(self):
|
||||
return self.get('OnlyShowIn', list=True)
|
||||
def getNotShowIn(self):
|
||||
return self.get('NotShowIn', list=True)
|
||||
def getTryExec(self):
|
||||
return self.get('TryExec')
|
||||
def getExec(self):
|
||||
return self.get('Exec')
|
||||
def getPath(self):
|
||||
return self.get('Path')
|
||||
def getTerminal(self):
|
||||
return self.get('Terminal', type="boolean")
|
||||
def getMimeType(self):
|
||||
"""deprecated, use getMimeTypes instead """
|
||||
return self.get('MimeType', list=True, type="regex")
|
||||
def getMimeTypes(self):
|
||||
return self.get('MimeType', list=True)
|
||||
def getCategories(self):
|
||||
return self.get('Categories', list=True)
|
||||
def getStartupNotify(self):
|
||||
return self.get('StartupNotify', type="boolean")
|
||||
def getStartupWMClass(self):
|
||||
return self.get('StartupWMClass')
|
||||
def getURL(self):
|
||||
return self.get('URL')
|
||||
# end standard keys
|
||||
|
||||
# start kde keys
|
||||
def getServiceTypes(self):
|
||||
return self.get('ServiceTypes', list=True)
|
||||
def getDocPath(self):
|
||||
return self.get('DocPath')
|
||||
def getKeywords(self):
|
||||
return self.get('Keywords', list=True, locale=True)
|
||||
def getInitialPreference(self):
|
||||
return self.get('InitialPreference')
|
||||
def getDev(self):
|
||||
return self.get('Dev')
|
||||
def getFSType(self):
|
||||
return self.get('FSType')
|
||||
def getMountPoint(self):
|
||||
return self.get('MountPoint')
|
||||
def getReadonly(self):
|
||||
return self.get('ReadOnly', type="boolean")
|
||||
def getUnmountIcon(self):
|
||||
return self.get('UnmountIcon', locale=True)
|
||||
# end kde keys
|
||||
|
||||
# start deprecated keys
|
||||
def getMiniIcon(self):
|
||||
return self.get('MiniIcon', locale=True)
|
||||
def getTerminalOptions(self):
|
||||
return self.get('TerminalOptions')
|
||||
def getDefaultApp(self):
|
||||
return self.get('DefaultApp')
|
||||
def getProtocols(self):
|
||||
return self.get('Protocols', list=True)
|
||||
def getExtensions(self):
|
||||
return self.get('Extensions', list=True)
|
||||
def getBinaryPattern(self):
|
||||
return self.get('BinaryPattern')
|
||||
def getMapNotify(self):
|
||||
return self.get('MapNotify')
|
||||
def getEncoding(self):
|
||||
return self.get('Encoding')
|
||||
def getSwallowTitle(self):
|
||||
return self.get('SwallowTitle', locale=True)
|
||||
def getSwallowExec(self):
|
||||
return self.get('SwallowExec')
|
||||
def getSortOrder(self):
|
||||
return self.get('SortOrder', list=True)
|
||||
def getFilePattern(self):
|
||||
return self.get('FilePattern', type="regex")
|
||||
def getActions(self):
|
||||
return self.get('Actions', list=True)
|
||||
# end deprecated keys
|
||||
|
||||
# desktop entry edit stuff
|
||||
def new(self, filename):
|
||||
"""Make this instance into a new, blank desktop entry.
|
||||
|
||||
If filename has a .desktop extension, Type is set to Application. If it
|
||||
has a .directory extension, Type is Directory. Other extensions will
|
||||
cause :class:`~xdg.Exceptions.ParsingError` to be raised.
|
||||
"""
|
||||
if os.path.splitext(filename)[1] == ".desktop":
|
||||
type = "Application"
|
||||
elif os.path.splitext(filename)[1] == ".directory":
|
||||
type = "Directory"
|
||||
else:
|
||||
raise ParsingError("Unknown extension", filename)
|
||||
|
||||
self.content = dict()
|
||||
self.addGroup(self.defaultGroup)
|
||||
self.set("Type", type)
|
||||
self.filename = filename
|
||||
# end desktop entry edit stuff
|
||||
|
||||
# validation stuff
|
||||
def checkExtras(self):
|
||||
# header
|
||||
if self.defaultGroup == "KDE Desktop Entry":
|
||||
self.warnings.append('[KDE Desktop Entry]-Header is deprecated')
|
||||
|
||||
# file extension
|
||||
if self.fileExtension == ".kdelnk":
|
||||
self.warnings.append("File extension .kdelnk is deprecated")
|
||||
elif self.fileExtension != ".desktop" and self.fileExtension != ".directory":
|
||||
self.warnings.append('Unknown File extension')
|
||||
|
||||
# Type
|
||||
try:
|
||||
self.type = self.content[self.defaultGroup]["Type"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Type' is missing")
|
||||
|
||||
# Name
|
||||
try:
|
||||
self.name = self.content[self.defaultGroup]["Name"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Name' is missing")
|
||||
|
||||
def checkGroup(self, group):
|
||||
# check if group header is valid
|
||||
if not (group == self.defaultGroup \
|
||||
or re.match("^Desktop Action [a-zA-Z0-9-]+$", group) \
|
||||
or (re.match("^X-", group) and is_ascii(group))):
|
||||
self.errors.append("Invalid Group name: %s" % group)
|
||||
else:
|
||||
#OnlyShowIn and NotShowIn
|
||||
if ("OnlyShowIn" in self.content[group]) and ("NotShowIn" in self.content[group]):
|
||||
self.errors.append("Group may either have OnlyShowIn or NotShowIn, but not both")
|
||||
|
||||
def checkKey(self, key, value, group):
|
||||
# standard keys
|
||||
if key == "Type":
|
||||
if value == "ServiceType" or value == "Service" or value == "FSDevice":
|
||||
self.warnings.append("Type=%s is a KDE extension" % key)
|
||||
elif value == "MimeType":
|
||||
self.warnings.append("Type=MimeType is deprecated")
|
||||
elif not (value == "Application" or value == "Link" or value == "Directory"):
|
||||
self.errors.append("Value of key 'Type' must be Application, Link or Directory, but is '%s'" % value)
|
||||
|
||||
if self.fileExtension == ".directory" and not value == "Directory":
|
||||
self.warnings.append("File extension is .directory, but Type is '%s'" % value)
|
||||
elif self.fileExtension == ".desktop" and value == "Directory":
|
||||
self.warnings.append("Files with Type=Directory should have the extension .directory")
|
||||
|
||||
if value == "Application":
|
||||
if "Exec" not in self.content[group]:
|
||||
self.warnings.append("Type=Application needs 'Exec' key")
|
||||
if value == "Link":
|
||||
if "URL" not in self.content[group]:
|
||||
self.warnings.append("Type=Link needs 'URL' key")
|
||||
|
||||
elif key == "Version":
|
||||
self.checkValue(key, value)
|
||||
|
||||
elif re.match("^Name"+xdg.Locale.regex+"$", key):
|
||||
pass # locale string
|
||||
|
||||
elif re.match("^GenericName"+xdg.Locale.regex+"$", key):
|
||||
pass # locale string
|
||||
|
||||
elif key == "NoDisplay":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
|
||||
elif re.match("^Comment"+xdg.Locale.regex+"$", key):
|
||||
pass # locale string
|
||||
|
||||
elif re.match("^Icon"+xdg.Locale.regex+"$", key):
|
||||
self.checkValue(key, value)
|
||||
|
||||
elif key == "Hidden":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
|
||||
elif key == "OnlyShowIn":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.checkOnlyShowIn(value)
|
||||
|
||||
elif key == "NotShowIn":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.checkOnlyShowIn(value)
|
||||
|
||||
elif key == "TryExec":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "Exec":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "Path":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "Terminal":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "Actions":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "MimeType":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "Categories":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "Application")
|
||||
self.checkCategories(value)
|
||||
|
||||
elif re.match("^Keywords"+xdg.Locale.regex+"$", key):
|
||||
self.checkValue(key, value, type="localestring", list=True)
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "StartupNotify":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "StartupWMClass":
|
||||
self.checkType(key, "Application")
|
||||
|
||||
elif key == "URL":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "URL")
|
||||
|
||||
# kde extensions
|
||||
elif key == "ServiceTypes":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif key == "DocPath":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif key == "InitialPreference":
|
||||
self.checkValue(key, value, type="numeric")
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif key == "Dev":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "FSDevice")
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif key == "FSType":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "FSDevice")
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif key == "MountPoint":
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "FSDevice")
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif key == "ReadOnly":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
self.checkType(key, "FSDevice")
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
elif re.match("^UnmountIcon"+xdg.Locale.regex+"$", key):
|
||||
self.checkValue(key, value)
|
||||
self.checkType(key, "FSDevice")
|
||||
self.warnings.append("Key '%s' is a KDE extension" % key)
|
||||
|
||||
# deprecated keys
|
||||
elif key == "Encoding":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif re.match("^MiniIcon"+xdg.Locale.regex+"$", key):
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "TerminalOptions":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "DefaultApp":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "Protocols":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "Extensions":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "BinaryPattern":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "MapNotify":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif re.match("^SwallowTitle"+xdg.Locale.regex+"$", key):
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "SwallowExec":
|
||||
self.checkValue(key, value)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "FilePattern":
|
||||
self.checkValue(key, value, type="regex", list=True)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
elif key == "SortOrder":
|
||||
self.checkValue(key, value, list=True)
|
||||
self.warnings.append("Key '%s' is deprecated" % key)
|
||||
|
||||
# "X-" extensions
|
||||
elif re.match("^X-[a-zA-Z0-9-]+", key):
|
||||
pass
|
||||
|
||||
else:
|
||||
self.errors.append("Invalid key: %s" % key)
|
||||
|
||||
def checkType(self, key, type):
|
||||
if not self.getType() == type:
|
||||
self.errors.append("Key '%s' only allowed in Type=%s" % (key, type))
|
||||
|
||||
def checkOnlyShowIn(self, value):
|
||||
values = self.getList(value)
|
||||
valid = ["GNOME", "KDE", "LXDE", "MATE", "Razor", "ROX", "TDE", "Unity",
|
||||
"XFCE", "Old"]
|
||||
for item in values:
|
||||
if item not in valid and item[0:2] != "X-":
|
||||
self.errors.append("'%s' is not a registered OnlyShowIn value" % item);
|
||||
|
||||
def checkCategories(self, value):
|
||||
values = self.getList(value)
|
||||
|
||||
main = ["AudioVideo", "Audio", "Video", "Development", "Education", "Game", "Graphics", "Network", "Office", "Science", "Settings", "System", "Utility"]
|
||||
if not any(item in main for item in values):
|
||||
self.errors.append("Missing main category")
|
||||
|
||||
additional = ['Building', 'Debugger', 'IDE', 'GUIDesigner', 'Profiling', 'RevisionControl', 'Translation', 'Calendar', 'ContactManagement', 'Database', 'Dictionary', 'Chart', 'Email', 'Finance', 'FlowChart', 'PDA', 'ProjectManagement', 'Presentation', 'Spreadsheet', 'WordProcessor', '2DGraphics', 'VectorGraphics', 'RasterGraphics', '3DGraphics', 'Scanning', 'OCR', 'Photography', 'Publishing', 'Viewer', 'TextTools', 'DesktopSettings', 'HardwareSettings', 'Printing', 'PackageManager', 'Dialup', 'InstantMessaging', 'Chat', 'IRCClient', 'Feed', 'FileTransfer', 'HamRadio', 'News', 'P2P', 'RemoteAccess', 'Telephony', 'TelephonyTools', 'VideoConference', 'WebBrowser', 'WebDevelopment', 'Midi', 'Mixer', 'Sequencer', 'Tuner', 'TV', 'AudioVideoEditing', 'Player', 'Recorder', 'DiscBurning', 'ActionGame', 'AdventureGame', 'ArcadeGame', 'BoardGame', 'BlocksGame', 'CardGame', 'KidsGame', 'LogicGame', 'RolePlaying', 'Shooter', 'Simulation', 'SportsGame', 'StrategyGame', 'Art', 'Construction', 'Music', 'Languages', 'ArtificialIntelligence', 'Astronomy', 'Biology', 'Chemistry', 'ComputerScience', 'DataVisualization', 'Economy', 'Electricity', 'Geography', 'Geology', 'Geoscience', 'History', 'Humanities', 'ImageProcessing', 'Literature', 'Maps', 'Math', 'NumericalAnalysis', 'MedicalSoftware', 'Physics', 'Robotics', 'Spirituality', 'Sports', 'ParallelComputing', 'Amusement', 'Archiving', 'Compression', 'Electronics', 'Emulator', 'Engineering', 'FileTools', 'FileManager', 'TerminalEmulator', 'Filesystem', 'Monitor', 'Security', 'Accessibility', 'Calculator', 'Clock', 'TextEditor', 'Documentation', 'Adult', 'Core', 'KDE', 'GNOME', 'XFCE', 'GTK', 'Qt', 'Motif', 'Java', 'ConsoleOnly']
|
||||
allcategories = additional + main
|
||||
|
||||
for item in values:
|
||||
if item not in allcategories and not item.startswith("X-"):
|
||||
self.errors.append("'%s' is not a registered Category" % item);
|
||||
|
||||
def checkCategorie(self, value):
|
||||
"""Deprecated alias for checkCategories - only exists for backwards
|
||||
compatibility.
|
||||
"""
|
||||
warnings.warn("checkCategorie is deprecated, use checkCategories",
|
||||
DeprecationWarning)
|
||||
return self.checkCategories(value)
|
|
@ -0,0 +1,84 @@
|
|||
"""
|
||||
Exception Classes for the xdg package
|
||||
"""
|
||||
|
||||
debug = False
|
||||
|
||||
class Error(Exception):
|
||||
"""Base class for exceptions defined here."""
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
Exception.__init__(self, msg)
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
||||
class ValidationError(Error):
|
||||
"""Raised when a file fails to validate.
|
||||
|
||||
The filename is the .file attribute.
|
||||
"""
|
||||
def __init__(self, msg, file):
|
||||
self.msg = msg
|
||||
self.file = file
|
||||
Error.__init__(self, "ValidationError in file '%s': %s " % (file, msg))
|
||||
|
||||
class ParsingError(Error):
|
||||
"""Raised when a file cannot be parsed.
|
||||
|
||||
The filename is the .file attribute.
|
||||
"""
|
||||
def __init__(self, msg, file):
|
||||
self.msg = msg
|
||||
self.file = file
|
||||
Error.__init__(self, "ParsingError in file '%s', %s" % (file, msg))
|
||||
|
||||
class NoKeyError(Error):
|
||||
"""Raised when trying to access a nonexistant key in an INI-style file.
|
||||
|
||||
Attributes are .key, .group and .file.
|
||||
"""
|
||||
def __init__(self, key, group, file):
|
||||
Error.__init__(self, "No key '%s' in group %s of file %s" % (key, group, file))
|
||||
self.key = key
|
||||
self.group = group
|
||||
self.file = file
|
||||
|
||||
class DuplicateKeyError(Error):
|
||||
"""Raised when the same key occurs twice in an INI-style file.
|
||||
|
||||
Attributes are .key, .group and .file.
|
||||
"""
|
||||
def __init__(self, key, group, file):
|
||||
Error.__init__(self, "Duplicate key '%s' in group %s of file %s" % (key, group, file))
|
||||
self.key = key
|
||||
self.group = group
|
||||
self.file = file
|
||||
|
||||
class NoGroupError(Error):
|
||||
"""Raised when trying to access a nonexistant group in an INI-style file.
|
||||
|
||||
Attributes are .group and .file.
|
||||
"""
|
||||
def __init__(self, group, file):
|
||||
Error.__init__(self, "No group: %s in file %s" % (group, file))
|
||||
self.group = group
|
||||
self.file = file
|
||||
|
||||
class DuplicateGroupError(Error):
|
||||
"""Raised when the same key occurs twice in an INI-style file.
|
||||
|
||||
Attributes are .group and .file.
|
||||
"""
|
||||
def __init__(self, group, file):
|
||||
Error.__init__(self, "Duplicate group: %s in file %s" % (group, file))
|
||||
self.group = group
|
||||
self.file = file
|
||||
|
||||
class NoThemeError(Error):
|
||||
"""Raised when trying to access a nonexistant icon theme.
|
||||
|
||||
The name of the theme is the .theme attribute.
|
||||
"""
|
||||
def __init__(self, theme):
|
||||
Error.__init__(self, "No such icon-theme: %s" % theme)
|
||||
self.theme = theme
|
|
@ -0,0 +1,445 @@
|
|||
"""
|
||||
Complete implementation of the XDG Icon Spec
|
||||
http://standards.freedesktop.org/icon-theme-spec/
|
||||
"""
|
||||
|
||||
import os, time
|
||||
import re
|
||||
|
||||
from . import IniFile, Config
|
||||
from .IniFile import is_ascii
|
||||
from .BaseDirectory import xdg_data_dirs
|
||||
from .Exceptions import NoThemeError, debug
|
||||
|
||||
|
||||
class IconTheme(IniFile):
|
||||
"Class to parse and validate IconThemes"
|
||||
def __init__(self):
|
||||
IniFile.__init__(self)
|
||||
|
||||
def __repr__(self):
|
||||
return self.name
|
||||
|
||||
def parse(self, file):
|
||||
IniFile.parse(self, file, ["Icon Theme", "KDE Icon Theme"])
|
||||
self.dir = os.path.dirname(file)
|
||||
(nil, self.name) = os.path.split(self.dir)
|
||||
|
||||
def getDir(self):
|
||||
return self.dir
|
||||
|
||||
# Standard Keys
|
||||
def getName(self):
|
||||
return self.get('Name', locale=True)
|
||||
def getComment(self):
|
||||
return self.get('Comment', locale=True)
|
||||
def getInherits(self):
|
||||
return self.get('Inherits', list=True)
|
||||
def getDirectories(self):
|
||||
return self.get('Directories', list=True)
|
||||
def getScaledDirectories(self):
|
||||
return self.get('ScaledDirectories', list=True)
|
||||
def getHidden(self):
|
||||
return self.get('Hidden', type="boolean")
|
||||
def getExample(self):
|
||||
return self.get('Example')
|
||||
|
||||
# Per Directory Keys
|
||||
def getSize(self, directory):
|
||||
return self.get('Size', type="integer", group=directory)
|
||||
def getContext(self, directory):
|
||||
return self.get('Context', group=directory)
|
||||
def getType(self, directory):
|
||||
value = self.get('Type', group=directory)
|
||||
if value:
|
||||
return value
|
||||
else:
|
||||
return "Threshold"
|
||||
def getMaxSize(self, directory):
|
||||
value = self.get('MaxSize', type="integer", group=directory)
|
||||
if value or value == 0:
|
||||
return value
|
||||
else:
|
||||
return self.getSize(directory)
|
||||
def getMinSize(self, directory):
|
||||
value = self.get('MinSize', type="integer", group=directory)
|
||||
if value or value == 0:
|
||||
return value
|
||||
else:
|
||||
return self.getSize(directory)
|
||||
def getThreshold(self, directory):
|
||||
value = self.get('Threshold', type="integer", group=directory)
|
||||
if value or value == 0:
|
||||
return value
|
||||
else:
|
||||
return 2
|
||||
|
||||
def getScale(self, directory):
|
||||
value = self.get('Scale', type="integer", group=directory)
|
||||
return value or 1
|
||||
|
||||
# validation stuff
|
||||
def checkExtras(self):
|
||||
# header
|
||||
if self.defaultGroup == "KDE Icon Theme":
|
||||
self.warnings.append('[KDE Icon Theme]-Header is deprecated')
|
||||
|
||||
# file extension
|
||||
if self.fileExtension == ".theme":
|
||||
pass
|
||||
elif self.fileExtension == ".desktop":
|
||||
self.warnings.append('.desktop fileExtension is deprecated')
|
||||
else:
|
||||
self.warnings.append('Unknown File extension')
|
||||
|
||||
# Check required keys
|
||||
# Name
|
||||
try:
|
||||
self.name = self.content[self.defaultGroup]["Name"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Name' is missing")
|
||||
|
||||
# Comment
|
||||
try:
|
||||
self.comment = self.content[self.defaultGroup]["Comment"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Comment' is missing")
|
||||
|
||||
# Directories
|
||||
try:
|
||||
self.directories = self.content[self.defaultGroup]["Directories"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Directories' is missing")
|
||||
|
||||
def checkGroup(self, group):
|
||||
# check if group header is valid
|
||||
if group == self.defaultGroup:
|
||||
try:
|
||||
self.name = self.content[group]["Name"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Name' in Group '%s' is missing" % group)
|
||||
try:
|
||||
self.name = self.content[group]["Comment"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Comment' in Group '%s' is missing" % group)
|
||||
elif group in self.getDirectories():
|
||||
try:
|
||||
self.type = self.content[group]["Type"]
|
||||
except KeyError:
|
||||
self.type = "Threshold"
|
||||
try:
|
||||
self.name = self.content[group]["Size"]
|
||||
except KeyError:
|
||||
self.errors.append("Key 'Size' in Group '%s' is missing" % group)
|
||||
elif not (re.match(r"^\[X-", group) and is_ascii(group)):
|
||||
self.errors.append("Invalid Group name: %s" % group)
|
||||
|
||||
def checkKey(self, key, value, group):
|
||||
# standard keys
|
||||
if group == self.defaultGroup:
|
||||
if re.match("^Name"+xdg.Locale.regex+"$", key):
|
||||
pass
|
||||
elif re.match("^Comment"+xdg.Locale.regex+"$", key):
|
||||
pass
|
||||
elif key == "Inherits":
|
||||
self.checkValue(key, value, list=True)
|
||||
elif key == "Directories":
|
||||
self.checkValue(key, value, list=True)
|
||||
elif key == "ScaledDirectories":
|
||||
self.checkValue(key, value, list=True)
|
||||
elif key == "Hidden":
|
||||
self.checkValue(key, value, type="boolean")
|
||||
elif key == "Example":
|
||||
self.checkValue(key, value)
|
||||
elif re.match("^X-[a-zA-Z0-9-]+", key):
|
||||
pass
|
||||
else:
|
||||
self.errors.append("Invalid key: %s" % key)
|
||||
elif group in self.getDirectories():
|
||||
if key == "Size":
|
||||
self.checkValue(key, value, type="integer")
|
||||
elif key == "Context":
|
||||
self.checkValue(key, value)
|
||||
elif key == "Type":
|
||||
self.checkValue(key, value)
|
||||
if value not in ["Fixed", "Scalable", "Threshold"]:
|
||||
self.errors.append("Key 'Type' must be one out of 'Fixed','Scalable','Threshold', but is %s" % value)
|
||||
elif key == "MaxSize":
|
||||
self.checkValue(key, value, type="integer")
|
||||
if self.type != "Scalable":
|
||||
self.errors.append("Key 'MaxSize' give, but Type is %s" % self.type)
|
||||
elif key == "MinSize":
|
||||
self.checkValue(key, value, type="integer")
|
||||
if self.type != "Scalable":
|
||||
self.errors.append("Key 'MinSize' give, but Type is %s" % self.type)
|
||||
elif key == "Threshold":
|
||||
self.checkValue(key, value, type="integer")
|
||||
if self.type != "Threshold":
|
||||
self.errors.append("Key 'Threshold' give, but Type is %s" % self.type)
|
||||
elif key == "Scale":
|
||||
self.checkValue(key, value, type="integer")
|
||||
elif re.match("^X-[a-zA-Z0-9-]+", key):
|
||||
pass
|
||||
else:
|
||||
self.errors.append("Invalid key: %s" % key)
|
||||
|
||||
|
||||
class IconData(IniFile):
|
||||
"Class to parse and validate IconData Files"
|
||||
def __init__(self):
|
||||
IniFile.__init__(self)
|
||||
|
||||
def __repr__(self):
|
||||
displayname = self.getDisplayName()
|
||||
if displayname:
|
||||
return "<IconData: %s>" % displayname
|
||||
else:
|
||||
return "<IconData>"
|
||||
|
||||
def parse(self, file):
|
||||
IniFile.parse(self, file, ["Icon Data"])
|
||||
|
||||
# Standard Keys
|
||||
def getDisplayName(self):
|
||||
"""Retrieve the display name from the icon data, if one is specified."""
|
||||
return self.get('DisplayName', locale=True)
|
||||
def getEmbeddedTextRectangle(self):
|
||||
"""Retrieve the embedded text rectangle from the icon data as a list of
|
||||
numbers (x0, y0, x1, y1), if it is specified."""
|
||||
return self.get('EmbeddedTextRectangle', type="integer", list=True)
|
||||
def getAttachPoints(self):
|
||||
"""Retrieve the anchor points for overlays & emblems from the icon data,
|
||||
as a list of co-ordinate pairs, if they are specified."""
|
||||
return self.get('AttachPoints', type="point", list=True)
|
||||
|
||||
# validation stuff
|
||||
def checkExtras(self):
|
||||
# file extension
|
||||
if self.fileExtension != ".icon":
|
||||
self.warnings.append('Unknown File extension')
|
||||
|
||||
def checkGroup(self, group):
|
||||
# check if group header is valid
|
||||
if not (group == self.defaultGroup \
|
||||
or (re.match(r"^\[X-", group) and is_ascii(group))):
|
||||
self.errors.append("Invalid Group name: %s" % group.encode("ascii", "replace"))
|
||||
|
||||
def checkKey(self, key, value, group):
|
||||
# standard keys
|
||||
if re.match("^DisplayName"+xdg.Locale.regex+"$", key):
|
||||
pass
|
||||
elif key == "EmbeddedTextRectangle":
|
||||
self.checkValue(key, value, type="integer", list=True)
|
||||
elif key == "AttachPoints":
|
||||
self.checkValue(key, value, type="point", list=True)
|
||||
elif re.match("^X-[a-zA-Z0-9-]+", key):
|
||||
pass
|
||||
else:
|
||||
self.errors.append("Invalid key: %s" % key)
|
||||
|
||||
|
||||
|
||||
icondirs = []
|
||||
for basedir in xdg_data_dirs:
|
||||
icondirs.append(os.path.join(basedir, "icons"))
|
||||
icondirs.append(os.path.join(basedir, "pixmaps"))
|
||||
icondirs.append(os.path.expanduser("~/.icons"))
|
||||
|
||||
# just cache variables, they give a 10x speed improvement
|
||||
themes = []
|
||||
theme_cache = {}
|
||||
dir_cache = {}
|
||||
icon_cache = {}
|
||||
|
||||
def getIconPath(iconname, size = None, theme = None, extensions = ["png", "svg", "xpm"]):
|
||||
"""Get the path to a specified icon.
|
||||
|
||||
size :
|
||||
Icon size in pixels. Defaults to ``xdg.Config.icon_size``.
|
||||
theme :
|
||||
Icon theme name. Defaults to ``xdg.Config.icon_theme``. If the icon isn't
|
||||
found in the specified theme, it will be looked up in the basic 'hicolor'
|
||||
theme.
|
||||
extensions :
|
||||
List of preferred file extensions.
|
||||
|
||||
Example::
|
||||
|
||||
>>> getIconPath("inkscape", 32)
|
||||
'/usr/share/icons/hicolor/32x32/apps/inkscape.png'
|
||||
"""
|
||||
|
||||
global themes
|
||||
|
||||
if size == None:
|
||||
size = xdg.Config.icon_size
|
||||
if theme == None:
|
||||
theme = xdg.Config.icon_theme
|
||||
|
||||
# if we have an absolute path, just return it
|
||||
if os.path.isabs(iconname):
|
||||
return iconname
|
||||
|
||||
# check if it has an extension and strip it
|
||||
if os.path.splitext(iconname)[1][1:] in extensions:
|
||||
iconname = os.path.splitext(iconname)[0]
|
||||
|
||||
# parse theme files
|
||||
if (themes == []) or (themes[0].name != theme):
|
||||
themes = list(__get_themes(theme))
|
||||
|
||||
# more caching (icon looked up in the last 5 seconds?)
|
||||
tmp = (iconname, size, theme, tuple(extensions))
|
||||
try:
|
||||
timestamp, icon = icon_cache[tmp]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if (time.time() - timestamp) >= xdg.Config.cache_time:
|
||||
del icon_cache[tmp]
|
||||
else:
|
||||
return icon
|
||||
|
||||
for thme in themes:
|
||||
icon = LookupIcon(iconname, size, thme, extensions)
|
||||
if icon:
|
||||
icon_cache[tmp] = (time.time(), icon)
|
||||
return icon
|
||||
|
||||
# cache stuff again (directories looked up in the last 5 seconds?)
|
||||
for directory in icondirs:
|
||||
if (directory not in dir_cache \
|
||||
or (int(time.time() - dir_cache[directory][1]) >= xdg.Config.cache_time \
|
||||
and dir_cache[directory][2] < os.path.getmtime(directory))) \
|
||||
and os.path.isdir(directory):
|
||||
dir_cache[directory] = (os.listdir(directory), time.time(), os.path.getmtime(directory))
|
||||
|
||||
for dir, values in dir_cache.items():
|
||||
for extension in extensions:
|
||||
try:
|
||||
if iconname + "." + extension in values[0]:
|
||||
icon = os.path.join(dir, iconname + "." + extension)
|
||||
icon_cache[tmp] = [time.time(), icon]
|
||||
return icon
|
||||
except UnicodeDecodeError as e:
|
||||
if debug:
|
||||
raise e
|
||||
else:
|
||||
pass
|
||||
|
||||
# we haven't found anything? "hicolor" is our fallback
|
||||
if theme != "hicolor":
|
||||
icon = getIconPath(iconname, size, "hicolor")
|
||||
icon_cache[tmp] = [time.time(), icon]
|
||||
return icon
|
||||
|
||||
def getIconData(path):
|
||||
"""Retrieve the data from the .icon file corresponding to the given file. If
|
||||
there is no .icon file, it returns None.
|
||||
|
||||
Example::
|
||||
|
||||
getIconData("/usr/share/icons/Tango/scalable/places/folder.svg")
|
||||
"""
|
||||
if os.path.isfile(path):
|
||||
icon_file = os.path.splitext(path)[0] + ".icon"
|
||||
if os.path.isfile(icon_file):
|
||||
data = IconData()
|
||||
data.parse(icon_file)
|
||||
return data
|
||||
|
||||
def __get_themes(themename):
|
||||
"""Generator yielding IconTheme objects for a specified theme and any themes
|
||||
from which it inherits.
|
||||
"""
|
||||
for dir in icondirs:
|
||||
theme_file = os.path.join(dir, themename, "index.theme")
|
||||
if os.path.isfile(theme_file):
|
||||
break
|
||||
theme_file = os.path.join(dir, themename, "index.desktop")
|
||||
if os.path.isfile(theme_file):
|
||||
break
|
||||
else:
|
||||
if debug:
|
||||
raise NoThemeError(themename)
|
||||
return
|
||||
|
||||
theme = IconTheme()
|
||||
theme.parse(theme_file)
|
||||
yield theme
|
||||
for subtheme in theme.getInherits():
|
||||
for t in __get_themes(subtheme):
|
||||
yield t
|
||||
|
||||
def LookupIcon(iconname, size, theme, extensions):
|
||||
# look for the cache
|
||||
if theme.name not in theme_cache:
|
||||
theme_cache[theme.name] = []
|
||||
theme_cache[theme.name].append(time.time() - (xdg.Config.cache_time + 1)) # [0] last time of lookup
|
||||
theme_cache[theme.name].append(0) # [1] mtime
|
||||
theme_cache[theme.name].append(dict()) # [2] dir: [subdir, [items]]
|
||||
|
||||
# cache stuff (directory lookuped up the in the last 5 seconds?)
|
||||
if int(time.time() - theme_cache[theme.name][0]) >= xdg.Config.cache_time:
|
||||
theme_cache[theme.name][0] = time.time()
|
||||
for subdir in theme.getDirectories():
|
||||
for directory in icondirs:
|
||||
dir = os.path.join(directory,theme.name,subdir)
|
||||
if (dir not in theme_cache[theme.name][2] \
|
||||
or theme_cache[theme.name][1] < os.path.getmtime(os.path.join(directory,theme.name))) \
|
||||
and subdir != "" \
|
||||
and os.path.isdir(dir):
|
||||
theme_cache[theme.name][2][dir] = [subdir, os.listdir(dir)]
|
||||
theme_cache[theme.name][1] = os.path.getmtime(os.path.join(directory,theme.name))
|
||||
|
||||
for dir, values in theme_cache[theme.name][2].items():
|
||||
if DirectoryMatchesSize(values[0], size, theme):
|
||||
for extension in extensions:
|
||||
if iconname + "." + extension in values[1]:
|
||||
return os.path.join(dir, iconname + "." + extension)
|
||||
|
||||
minimal_size = 2**31
|
||||
closest_filename = ""
|
||||
for dir, values in theme_cache[theme.name][2].items():
|
||||
distance = DirectorySizeDistance(values[0], size, theme)
|
||||
if distance < minimal_size:
|
||||
for extension in extensions:
|
||||
if iconname + "." + extension in values[1]:
|
||||
closest_filename = os.path.join(dir, iconname + "." + extension)
|
||||
minimal_size = distance
|
||||
|
||||
return closest_filename
|
||||
|
||||
def DirectoryMatchesSize(subdir, iconsize, theme):
|
||||
Type = theme.getType(subdir)
|
||||
Size = theme.getSize(subdir)
|
||||
Threshold = theme.getThreshold(subdir)
|
||||
MinSize = theme.getMinSize(subdir)
|
||||
MaxSize = theme.getMaxSize(subdir)
|
||||
if Type == "Fixed":
|
||||
return Size == iconsize
|
||||
elif Type == "Scaleable":
|
||||
return MinSize <= iconsize <= MaxSize
|
||||
elif Type == "Threshold":
|
||||
return Size - Threshold <= iconsize <= Size + Threshold
|
||||
|
||||
def DirectorySizeDistance(subdir, iconsize, theme):
|
||||
Type = theme.getType(subdir)
|
||||
Size = theme.getSize(subdir)
|
||||
Threshold = theme.getThreshold(subdir)
|
||||
MinSize = theme.getMinSize(subdir)
|
||||
MaxSize = theme.getMaxSize(subdir)
|
||||
if Type == "Fixed":
|
||||
return abs(Size - iconsize)
|
||||
elif Type == "Scalable":
|
||||
if iconsize < MinSize:
|
||||
return MinSize - iconsize
|
||||
elif iconsize > MaxSize:
|
||||
return MaxSize - iconsize
|
||||
return 0
|
||||
elif Type == "Threshold":
|
||||
if iconsize < Size - Threshold:
|
||||
return MinSize - iconsize
|
||||
elif iconsize > Size + Threshold:
|
||||
return iconsize - MaxSize
|
||||
return 0
|
|
@ -0,0 +1,419 @@
|
|||
"""
|
||||
Base Class for DesktopEntry, IconTheme and IconData
|
||||
"""
|
||||
|
||||
import re, os, stat, io
|
||||
from .Exceptions import (ParsingError, DuplicateGroupError, NoGroupError,
|
||||
NoKeyError, DuplicateKeyError, ValidationError,
|
||||
debug)
|
||||
# import xdg.Locale
|
||||
from . import Locale
|
||||
from .util import u
|
||||
|
||||
def is_ascii(s):
|
||||
"""Return True if a string consists entirely of ASCII characters."""
|
||||
try:
|
||||
s.encode('ascii', 'strict')
|
||||
return True
|
||||
except UnicodeError:
|
||||
return False
|
||||
|
||||
class IniFile:
|
||||
defaultGroup = ''
|
||||
fileExtension = ''
|
||||
|
||||
filename = ''
|
||||
|
||||
tainted = False
|
||||
|
||||
def __init__(self, filename=None):
|
||||
self.content = dict()
|
||||
if filename:
|
||||
self.parse(filename)
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.content, other.content)
|
||||
|
||||
def parse(self, filename, headers=None):
|
||||
'''Parse an INI file.
|
||||
|
||||
headers -- list of headers the parser will try to select as a default header
|
||||
'''
|
||||
# for performance reasons
|
||||
content = self.content
|
||||
|
||||
if not os.path.isfile(filename):
|
||||
raise ParsingError("File not found", filename)
|
||||
|
||||
try:
|
||||
# The content should be UTF-8, but legacy files can have other
|
||||
# encodings, including mixed encodings in one file. We don't attempt
|
||||
# to decode them, but we silence the errors.
|
||||
fd = io.open(filename, 'r', encoding='utf-8', errors='replace')
|
||||
except IOError as e:
|
||||
if debug:
|
||||
raise e
|
||||
else:
|
||||
return
|
||||
|
||||
# parse file
|
||||
for line in fd:
|
||||
line = line.strip()
|
||||
# empty line
|
||||
if not line:
|
||||
continue
|
||||
# comment
|
||||
elif line[0] == '#':
|
||||
continue
|
||||
# new group
|
||||
elif line[0] == '[':
|
||||
currentGroup = line.lstrip("[").rstrip("]")
|
||||
if debug and self.hasGroup(currentGroup):
|
||||
raise DuplicateGroupError(currentGroup, filename)
|
||||
else:
|
||||
content[currentGroup] = {}
|
||||
# key
|
||||
else:
|
||||
try:
|
||||
key, value = line.split("=", 1)
|
||||
except ValueError:
|
||||
raise ParsingError("Invalid line: " + line, filename)
|
||||
|
||||
key = key.strip() # Spaces before/after '=' should be ignored
|
||||
try:
|
||||
if debug and self.hasKey(key, currentGroup):
|
||||
raise DuplicateKeyError(key, currentGroup, filename)
|
||||
else:
|
||||
content[currentGroup][key] = value.strip()
|
||||
except (IndexError, UnboundLocalError):
|
||||
raise ParsingError("Parsing error on key, group missing", filename)
|
||||
|
||||
fd.close()
|
||||
|
||||
self.filename = filename
|
||||
self.tainted = False
|
||||
|
||||
# check header
|
||||
if headers:
|
||||
for header in headers:
|
||||
if header in content:
|
||||
self.defaultGroup = header
|
||||
break
|
||||
else:
|
||||
raise ParsingError("[%s]-Header missing" % headers[0], filename)
|
||||
|
||||
# start stuff to access the keys
|
||||
def get(self, key, group=None, locale=False, type="string", list=False, strict=False):
|
||||
# set default group
|
||||
if not group:
|
||||
group = self.defaultGroup
|
||||
|
||||
# return key (with locale)
|
||||
if (group in self.content) and (key in self.content[group]):
|
||||
if locale:
|
||||
value = self.content[group][self.__addLocale(key, group)]
|
||||
else:
|
||||
value = self.content[group][key]
|
||||
else:
|
||||
if strict or debug:
|
||||
if group not in self.content:
|
||||
raise NoGroupError(group, self.filename)
|
||||
elif key not in self.content[group]:
|
||||
raise NoKeyError(key, group, self.filename)
|
||||
else:
|
||||
value = ""
|
||||
|
||||
if list == True:
|
||||
values = self.getList(value)
|
||||
result = []
|
||||
else:
|
||||
values = [value]
|
||||
|
||||
for value in values:
|
||||
if type == "boolean":
|
||||
value = self.__getBoolean(value)
|
||||
elif type == "integer":
|
||||
try:
|
||||
value = int(value)
|
||||
except ValueError:
|
||||
value = 0
|
||||
elif type == "numeric":
|
||||
try:
|
||||
value = float(value)
|
||||
except ValueError:
|
||||
value = 0.0
|
||||
elif type == "regex":
|
||||
value = re.compile(value)
|
||||
elif type == "point":
|
||||
x, y = value.split(",")
|
||||
value = int(x), int(y)
|
||||
|
||||
if list == True:
|
||||
result.append(value)
|
||||
else:
|
||||
result = value
|
||||
|
||||
return result
|
||||
# end stuff to access the keys
|
||||
|
||||
# start subget
|
||||
def getList(self, string):
|
||||
if re.search(r"(?<!\\)\;", string):
|
||||
list = re.split(r"(?<!\\);", string)
|
||||
elif re.search(r"(?<!\\)\|", string):
|
||||
list = re.split(r"(?<!\\)\|", string)
|
||||
elif re.search(r"(?<!\\),", string):
|
||||
list = re.split(r"(?<!\\),", string)
|
||||
else:
|
||||
list = [string]
|
||||
if list[-1] == "":
|
||||
list.pop()
|
||||
return list
|
||||
|
||||
def __getBoolean(self, boolean):
|
||||
if boolean == 1 or boolean == "true" or boolean == "True":
|
||||
return True
|
||||
elif boolean == 0 or boolean == "false" or boolean == "False":
|
||||
return False
|
||||
return False
|
||||
# end subget
|
||||
|
||||
def __addLocale(self, key, group=None):
|
||||
"add locale to key according the current lc_messages"
|
||||
# set default group
|
||||
if not group:
|
||||
group = self.defaultGroup
|
||||
|
||||
for lang in Locale.langs:
|
||||
langkey = "%s[%s]" % (key, lang)
|
||||
if langkey in self.content[group]:
|
||||
return langkey
|
||||
|
||||
return key
|
||||
|
||||
# start validation stuff
|
||||
def validate(self, report="All"):
|
||||
"""Validate the contents, raising :class:`~xdg.Exceptions.ValidationError`
|
||||
if there is anything amiss.
|
||||
|
||||
report can be 'All' / 'Warnings' / 'Errors'
|
||||
"""
|
||||
|
||||
self.warnings = []
|
||||
self.errors = []
|
||||
|
||||
# get file extension
|
||||
self.fileExtension = os.path.splitext(self.filename)[1]
|
||||
|
||||
# overwrite this for own checkings
|
||||
self.checkExtras()
|
||||
|
||||
# check all keys
|
||||
for group in self.content:
|
||||
self.checkGroup(group)
|
||||
for key in self.content[group]:
|
||||
self.checkKey(key, self.content[group][key], group)
|
||||
# check if value is empty
|
||||
if self.content[group][key] == "":
|
||||
self.warnings.append("Value of Key '%s' is empty" % key)
|
||||
|
||||
# raise Warnings / Errors
|
||||
msg = ""
|
||||
|
||||
if report == "All" or report == "Warnings":
|
||||
for line in self.warnings:
|
||||
msg += "\n- " + line
|
||||
|
||||
if report == "All" or report == "Errors":
|
||||
for line in self.errors:
|
||||
msg += "\n- " + line
|
||||
|
||||
if msg:
|
||||
raise ValidationError(msg, self.filename)
|
||||
|
||||
# check if group header is valid
|
||||
def checkGroup(self, group):
|
||||
pass
|
||||
|
||||
# check if key is valid
|
||||
def checkKey(self, key, value, group):
|
||||
pass
|
||||
|
||||
# check random stuff
|
||||
def checkValue(self, key, value, type="string", list=False):
|
||||
if list == True:
|
||||
values = self.getList(value)
|
||||
else:
|
||||
values = [value]
|
||||
|
||||
for value in values:
|
||||
if type == "string":
|
||||
code = self.checkString(value)
|
||||
if type == "localestring":
|
||||
continue
|
||||
elif type == "boolean":
|
||||
code = self.checkBoolean(value)
|
||||
elif type == "numeric":
|
||||
code = self.checkNumber(value)
|
||||
elif type == "integer":
|
||||
code = self.checkInteger(value)
|
||||
elif type == "regex":
|
||||
code = self.checkRegex(value)
|
||||
elif type == "point":
|
||||
code = self.checkPoint(value)
|
||||
if code == 1:
|
||||
self.errors.append("'%s' is not a valid %s" % (value, type))
|
||||
elif code == 2:
|
||||
self.warnings.append("Value of key '%s' is deprecated" % key)
|
||||
|
||||
def checkExtras(self):
|
||||
pass
|
||||
|
||||
def checkBoolean(self, value):
|
||||
# 1 or 0 : deprecated
|
||||
if (value == "1" or value == "0"):
|
||||
return 2
|
||||
# true or false: ok
|
||||
elif not (value == "true" or value == "false"):
|
||||
return 1
|
||||
|
||||
def checkNumber(self, value):
|
||||
# float() ValueError
|
||||
try:
|
||||
float(value)
|
||||
except:
|
||||
return 1
|
||||
|
||||
def checkInteger(self, value):
|
||||
# int() ValueError
|
||||
try:
|
||||
int(value)
|
||||
except:
|
||||
return 1
|
||||
|
||||
def checkPoint(self, value):
|
||||
if not re.match("^[0-9]+,[0-9]+$", value):
|
||||
return 1
|
||||
|
||||
def checkString(self, value):
|
||||
return 0 if is_ascii(value) else 1
|
||||
|
||||
def checkRegex(self, value):
|
||||
try:
|
||||
re.compile(value)
|
||||
except:
|
||||
return 1
|
||||
|
||||
# write support
|
||||
def write(self, filename=None, trusted=False):
|
||||
if not filename and not self.filename:
|
||||
raise ParsingError("File not found", "")
|
||||
|
||||
if filename:
|
||||
self.filename = filename
|
||||
else:
|
||||
filename = self.filename
|
||||
|
||||
if os.path.dirname(filename) and not os.path.isdir(os.path.dirname(filename)):
|
||||
os.makedirs(os.path.dirname(filename))
|
||||
|
||||
with io.open(filename, 'w', encoding='utf-8') as fp:
|
||||
|
||||
# An executable bit signifies that the desktop file is
|
||||
# trusted, but then the file can be executed. Add hashbang to
|
||||
# make sure that the file is opened by something that
|
||||
# understands desktop files.
|
||||
if trusted:
|
||||
fp.write(u("#!/usr/bin/env xdg-open\n"))
|
||||
|
||||
if self.defaultGroup:
|
||||
fp.write(u("[%s]\n") % self.defaultGroup)
|
||||
for (key, value) in self.content[self.defaultGroup].items():
|
||||
fp.write(u("%s=%s\n") % (key, value))
|
||||
fp.write(u("\n"))
|
||||
for (name, group) in self.content.items():
|
||||
if name != self.defaultGroup:
|
||||
fp.write(u("[%s]\n") % name)
|
||||
for (key, value) in group.items():
|
||||
fp.write(u("%s=%s\n") % (key, value))
|
||||
fp.write(u("\n"))
|
||||
|
||||
# Add executable bits to the file to show that it's trusted.
|
||||
if trusted:
|
||||
oldmode = os.stat(filename).st_mode
|
||||
mode = oldmode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
||||
os.chmod(filename, mode)
|
||||
|
||||
self.tainted = False
|
||||
|
||||
def set(self, key, value, group=None, locale=False):
|
||||
# set default group
|
||||
if not group:
|
||||
group = self.defaultGroup
|
||||
|
||||
if locale == True and len(xdg.Locale.langs) > 0:
|
||||
key = key + "[" + xdg.Locale.langs[0] + "]"
|
||||
|
||||
try:
|
||||
self.content[group][key] = value
|
||||
except KeyError:
|
||||
raise NoGroupError(group, self.filename)
|
||||
|
||||
self.tainted = (value == self.get(key, group))
|
||||
|
||||
def addGroup(self, group):
|
||||
if self.hasGroup(group):
|
||||
if debug:
|
||||
raise DuplicateGroupError(group, self.filename)
|
||||
else:
|
||||
self.content[group] = {}
|
||||
self.tainted = True
|
||||
|
||||
def removeGroup(self, group):
|
||||
existed = group in self.content
|
||||
if existed:
|
||||
del self.content[group]
|
||||
self.tainted = True
|
||||
else:
|
||||
if debug:
|
||||
raise NoGroupError(group, self.filename)
|
||||
return existed
|
||||
|
||||
def removeKey(self, key, group=None, locales=True):
|
||||
# set default group
|
||||
if not group:
|
||||
group = self.defaultGroup
|
||||
|
||||
try:
|
||||
if locales:
|
||||
for name in list(self.content[group]):
|
||||
if re.match("^" + key + xdg.Locale.regex + "$", name) and name != key:
|
||||
del self.content[group][name]
|
||||
value = self.content[group].pop(key)
|
||||
self.tainted = True
|
||||
return value
|
||||
except KeyError as e:
|
||||
if debug:
|
||||
if e == group:
|
||||
raise NoGroupError(group, self.filename)
|
||||
else:
|
||||
raise NoKeyError(key, group, self.filename)
|
||||
else:
|
||||
return ""
|
||||
|
||||
# misc
|
||||
def groups(self):
|
||||
return self.content.keys()
|
||||
|
||||
def hasGroup(self, group):
|
||||
return group in self.content
|
||||
|
||||
def hasKey(self, key, group=None):
|
||||
# set default group
|
||||
if not group:
|
||||
group = self.defaultGroup
|
||||
|
||||
return key in self.content[group]
|
||||
|
||||
def getFileName(self):
|
||||
return self.filename
|
|
@ -0,0 +1,79 @@
|
|||
"""
|
||||
Helper Module for Locale settings
|
||||
|
||||
This module is based on a ROX module (LGPL):
|
||||
|
||||
http://cvs.sourceforge.net/viewcvs.py/rox/ROX-Lib2/python/rox/i18n.py?rev=1.3&view=log
|
||||
"""
|
||||
|
||||
import os
|
||||
from locale import normalize
|
||||
|
||||
regex = r"(\[([a-zA-Z]+)(_[a-zA-Z]+)?(\.[a-zA-Z0-9-]+)?(@[a-zA-Z]+)?\])?"
|
||||
|
||||
def _expand_lang(locale):
|
||||
locale = normalize(locale)
|
||||
COMPONENT_CODESET = 1 << 0
|
||||
COMPONENT_MODIFIER = 1 << 1
|
||||
COMPONENT_TERRITORY = 1 << 2
|
||||
# split up the locale into its base components
|
||||
mask = 0
|
||||
pos = locale.find('@')
|
||||
if pos >= 0:
|
||||
modifier = locale[pos:]
|
||||
locale = locale[:pos]
|
||||
mask |= COMPONENT_MODIFIER
|
||||
else:
|
||||
modifier = ''
|
||||
pos = locale.find('.')
|
||||
codeset = ''
|
||||
if pos >= 0:
|
||||
locale = locale[:pos]
|
||||
pos = locale.find('_')
|
||||
if pos >= 0:
|
||||
territory = locale[pos:]
|
||||
locale = locale[:pos]
|
||||
mask |= COMPONENT_TERRITORY
|
||||
else:
|
||||
territory = ''
|
||||
language = locale
|
||||
ret = []
|
||||
for i in range(mask+1):
|
||||
if not (i & ~mask): # if all components for this combo exist ...
|
||||
val = language
|
||||
if i & COMPONENT_TERRITORY: val += territory
|
||||
if i & COMPONENT_CODESET: val += codeset
|
||||
if i & COMPONENT_MODIFIER: val += modifier
|
||||
ret.append(val)
|
||||
ret.reverse()
|
||||
return ret
|
||||
|
||||
def expand_languages(languages=None):
|
||||
# Get some reasonable defaults for arguments that were not supplied
|
||||
if languages is None:
|
||||
languages = []
|
||||
for envar in ('LANGUAGE', 'LC_ALL', 'LC_MESSAGES', 'LANG'):
|
||||
val = os.environ.get(envar)
|
||||
if val:
|
||||
languages = val.split(':')
|
||||
break
|
||||
#if 'C' not in languages:
|
||||
# languages.append('C')
|
||||
|
||||
# now normalize and expand the languages
|
||||
nelangs = []
|
||||
for lang in languages:
|
||||
for nelang in _expand_lang(lang):
|
||||
if nelang not in nelangs:
|
||||
nelangs.append(nelang)
|
||||
return nelangs
|
||||
|
||||
def update(language=None):
|
||||
global langs
|
||||
if language:
|
||||
langs = expand_languages([language])
|
||||
else:
|
||||
langs = expand_languages()
|
||||
|
||||
langs = []
|
||||
update()
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,541 @@
|
|||
""" CLass to edit XDG Menus """
|
||||
import os
|
||||
try:
|
||||
import xml.etree.cElementTree as etree
|
||||
except ImportError:
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
from .Menu import Menu, MenuEntry, Layout, Separator, XMLMenuBuilder
|
||||
from .BaseDirectory import xdg_config_dirs, xdg_data_dirs
|
||||
from .Exceptions import ParsingError
|
||||
from .Config import setRootMode
|
||||
|
||||
# XML-Cleanups: Move / Exclude
|
||||
# FIXME: proper reverte/delete
|
||||
# FIXME: pass AppDirs/DirectoryDirs around in the edit/move functions
|
||||
# FIXME: catch Exceptions
|
||||
# FIXME: copy functions
|
||||
# FIXME: More Layout stuff
|
||||
# FIXME: unod/redo function / remove menu...
|
||||
# FIXME: Advanced MenuEditing Stuff: LegacyDir/MergeFile
|
||||
# Complex Rules/Deleted/OnlyAllocated/AppDirs/DirectoryDirs
|
||||
|
||||
|
||||
class MenuEditor(object):
|
||||
|
||||
def __init__(self, menu=None, filename=None, root=False):
|
||||
self.menu = None
|
||||
self.filename = None
|
||||
self.tree = None
|
||||
self.parser = XMLMenuBuilder()
|
||||
self.parse(menu, filename, root)
|
||||
|
||||
# fix for creating two menus with the same name on the fly
|
||||
self.filenames = []
|
||||
|
||||
def parse(self, menu=None, filename=None, root=False):
|
||||
if root:
|
||||
setRootMode(True)
|
||||
|
||||
if isinstance(menu, Menu):
|
||||
self.menu = menu
|
||||
elif menu:
|
||||
self.menu = self.parser.parse(menu)
|
||||
else:
|
||||
self.menu = self.parser.parse()
|
||||
|
||||
if root:
|
||||
self.filename = self.menu.Filename
|
||||
elif filename:
|
||||
self.filename = filename
|
||||
else:
|
||||
self.filename = os.path.join(xdg_config_dirs[0], "menus", os.path.split(self.menu.Filename)[1])
|
||||
|
||||
try:
|
||||
self.tree = etree.parse(self.filename)
|
||||
except IOError:
|
||||
root = etree.fromtring("""
|
||||
<!DOCTYPE Menu PUBLIC "-//freedesktop//DTD Menu 1.0//EN" "http://standards.freedesktop.org/menu-spec/menu-1.0.dtd">
|
||||
<Menu>
|
||||
<Name>Applications</Name>
|
||||
<MergeFile type="parent">%s</MergeFile>
|
||||
</Menu>
|
||||
""" % self.menu.Filename)
|
||||
self.tree = etree.ElementTree(root)
|
||||
except ParsingError:
|
||||
raise ParsingError('Not a valid .menu file', self.filename)
|
||||
|
||||
#FIXME: is this needed with etree ?
|
||||
self.__remove_whitespace_nodes(self.tree)
|
||||
|
||||
def save(self):
|
||||
self.__saveEntries(self.menu)
|
||||
self.__saveMenu()
|
||||
|
||||
def createMenuEntry(self, parent, name, command=None, genericname=None, comment=None, icon=None, terminal=None, after=None, before=None):
|
||||
menuentry = MenuEntry(self.__getFileName(name, ".desktop"))
|
||||
menuentry = self.editMenuEntry(menuentry, name, genericname, comment, command, icon, terminal)
|
||||
|
||||
self.__addEntry(parent, menuentry, after, before)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return menuentry
|
||||
|
||||
def createMenu(self, parent, name, genericname=None, comment=None, icon=None, after=None, before=None):
|
||||
menu = Menu()
|
||||
|
||||
menu.Parent = parent
|
||||
menu.Depth = parent.Depth + 1
|
||||
menu.Layout = parent.DefaultLayout
|
||||
menu.DefaultLayout = parent.DefaultLayout
|
||||
|
||||
menu = self.editMenu(menu, name, genericname, comment, icon)
|
||||
|
||||
self.__addEntry(parent, menu, after, before)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return menu
|
||||
|
||||
def createSeparator(self, parent, after=None, before=None):
|
||||
separator = Separator(parent)
|
||||
|
||||
self.__addEntry(parent, separator, after, before)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return separator
|
||||
|
||||
def moveMenuEntry(self, menuentry, oldparent, newparent, after=None, before=None):
|
||||
self.__deleteEntry(oldparent, menuentry, after, before)
|
||||
self.__addEntry(newparent, menuentry, after, before)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return menuentry
|
||||
|
||||
def moveMenu(self, menu, oldparent, newparent, after=None, before=None):
|
||||
self.__deleteEntry(oldparent, menu, after, before)
|
||||
self.__addEntry(newparent, menu, after, before)
|
||||
|
||||
root_menu = self.__getXmlMenu(self.menu.Name)
|
||||
if oldparent.getPath(True) != newparent.getPath(True):
|
||||
self.__addXmlMove(root_menu, os.path.join(oldparent.getPath(True), menu.Name), os.path.join(newparent.getPath(True), menu.Name))
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return menu
|
||||
|
||||
def moveSeparator(self, separator, parent, after=None, before=None):
|
||||
self.__deleteEntry(parent, separator, after, before)
|
||||
self.__addEntry(parent, separator, after, before)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return separator
|
||||
|
||||
def copyMenuEntry(self, menuentry, oldparent, newparent, after=None, before=None):
|
||||
self.__addEntry(newparent, menuentry, after, before)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return menuentry
|
||||
|
||||
def editMenuEntry(self, menuentry, name=None, genericname=None, comment=None, command=None, icon=None, terminal=None, nodisplay=None, hidden=None):
|
||||
deskentry = menuentry.DesktopEntry
|
||||
|
||||
if name:
|
||||
if not deskentry.hasKey("Name"):
|
||||
deskentry.set("Name", name)
|
||||
deskentry.set("Name", name, locale=True)
|
||||
if comment:
|
||||
if not deskentry.hasKey("Comment"):
|
||||
deskentry.set("Comment", comment)
|
||||
deskentry.set("Comment", comment, locale=True)
|
||||
if genericname:
|
||||
if not deskentry.hasKey("GenericName"):
|
||||
deskentry.set("GenericName", genericname)
|
||||
deskentry.set("GenericName", genericname, locale=True)
|
||||
if command:
|
||||
deskentry.set("Exec", command)
|
||||
if icon:
|
||||
deskentry.set("Icon", icon)
|
||||
|
||||
if terminal:
|
||||
deskentry.set("Terminal", "true")
|
||||
elif not terminal:
|
||||
deskentry.set("Terminal", "false")
|
||||
|
||||
if nodisplay is True:
|
||||
deskentry.set("NoDisplay", "true")
|
||||
elif nodisplay is False:
|
||||
deskentry.set("NoDisplay", "false")
|
||||
|
||||
if hidden is True:
|
||||
deskentry.set("Hidden", "true")
|
||||
elif hidden is False:
|
||||
deskentry.set("Hidden", "false")
|
||||
|
||||
menuentry.updateAttributes()
|
||||
|
||||
if len(menuentry.Parents) > 0:
|
||||
self.menu.sort()
|
||||
|
||||
return menuentry
|
||||
|
||||
def editMenu(self, menu, name=None, genericname=None, comment=None, icon=None, nodisplay=None, hidden=None):
|
||||
# Hack for legacy dirs
|
||||
if isinstance(menu.Directory, MenuEntry) and menu.Directory.Filename == ".directory":
|
||||
xml_menu = self.__getXmlMenu(menu.getPath(True, True))
|
||||
self.__addXmlTextElement(xml_menu, 'Directory', menu.Name + ".directory")
|
||||
menu.Directory.setAttributes(menu.Name + ".directory")
|
||||
# Hack for New Entries
|
||||
elif not isinstance(menu.Directory, MenuEntry):
|
||||
if not name:
|
||||
name = menu.Name
|
||||
filename = self.__getFileName(name, ".directory").replace("/", "")
|
||||
if not menu.Name:
|
||||
menu.Name = filename.replace(".directory", "")
|
||||
xml_menu = self.__getXmlMenu(menu.getPath(True, True))
|
||||
self.__addXmlTextElement(xml_menu, 'Directory', filename)
|
||||
menu.Directory = MenuEntry(filename)
|
||||
|
||||
deskentry = menu.Directory.DesktopEntry
|
||||
|
||||
if name:
|
||||
if not deskentry.hasKey("Name"):
|
||||
deskentry.set("Name", name)
|
||||
deskentry.set("Name", name, locale=True)
|
||||
if genericname:
|
||||
if not deskentry.hasKey("GenericName"):
|
||||
deskentry.set("GenericName", genericname)
|
||||
deskentry.set("GenericName", genericname, locale=True)
|
||||
if comment:
|
||||
if not deskentry.hasKey("Comment"):
|
||||
deskentry.set("Comment", comment)
|
||||
deskentry.set("Comment", comment, locale=True)
|
||||
if icon:
|
||||
deskentry.set("Icon", icon)
|
||||
|
||||
if nodisplay is True:
|
||||
deskentry.set("NoDisplay", "true")
|
||||
elif nodisplay is False:
|
||||
deskentry.set("NoDisplay", "false")
|
||||
|
||||
if hidden is True:
|
||||
deskentry.set("Hidden", "true")
|
||||
elif hidden is False:
|
||||
deskentry.set("Hidden", "false")
|
||||
|
||||
menu.Directory.updateAttributes()
|
||||
|
||||
if isinstance(menu.Parent, Menu):
|
||||
self.menu.sort()
|
||||
|
||||
return menu
|
||||
|
||||
def hideMenuEntry(self, menuentry):
|
||||
self.editMenuEntry(menuentry, nodisplay=True)
|
||||
|
||||
def unhideMenuEntry(self, menuentry):
|
||||
self.editMenuEntry(menuentry, nodisplay=False, hidden=False)
|
||||
|
||||
def hideMenu(self, menu):
|
||||
self.editMenu(menu, nodisplay=True)
|
||||
|
||||
def unhideMenu(self, menu):
|
||||
self.editMenu(menu, nodisplay=False, hidden=False)
|
||||
xml_menu = self.__getXmlMenu(menu.getPath(True, True), False)
|
||||
deleted = xml_menu.findall('Deleted')
|
||||
not_deleted = xml_menu.findall('NotDeleted')
|
||||
for node in deleted + not_deleted:
|
||||
xml_menu.remove(node)
|
||||
|
||||
def deleteMenuEntry(self, menuentry):
|
||||
if self.getAction(menuentry) == "delete":
|
||||
self.__deleteFile(menuentry.DesktopEntry.filename)
|
||||
for parent in menuentry.Parents:
|
||||
self.__deleteEntry(parent, menuentry)
|
||||
self.menu.sort()
|
||||
return menuentry
|
||||
|
||||
def revertMenuEntry(self, menuentry):
|
||||
if self.getAction(menuentry) == "revert":
|
||||
self.__deleteFile(menuentry.DesktopEntry.filename)
|
||||
menuentry.Original.Parents = []
|
||||
for parent in menuentry.Parents:
|
||||
index = parent.Entries.index(menuentry)
|
||||
parent.Entries[index] = menuentry.Original
|
||||
index = parent.MenuEntries.index(menuentry)
|
||||
parent.MenuEntries[index] = menuentry.Original
|
||||
menuentry.Original.Parents.append(parent)
|
||||
self.menu.sort()
|
||||
return menuentry
|
||||
|
||||
def deleteMenu(self, menu):
|
||||
if self.getAction(menu) == "delete":
|
||||
self.__deleteFile(menu.Directory.DesktopEntry.filename)
|
||||
self.__deleteEntry(menu.Parent, menu)
|
||||
xml_menu = self.__getXmlMenu(menu.getPath(True, True))
|
||||
parent = self.__get_parent_node(xml_menu)
|
||||
parent.remove(xml_menu)
|
||||
self.menu.sort()
|
||||
return menu
|
||||
|
||||
def revertMenu(self, menu):
|
||||
if self.getAction(menu) == "revert":
|
||||
self.__deleteFile(menu.Directory.DesktopEntry.filename)
|
||||
menu.Directory = menu.Directory.Original
|
||||
self.menu.sort()
|
||||
return menu
|
||||
|
||||
def deleteSeparator(self, separator):
|
||||
self.__deleteEntry(separator.Parent, separator, after=True)
|
||||
|
||||
self.menu.sort()
|
||||
|
||||
return separator
|
||||
|
||||
""" Private Stuff """
|
||||
def getAction(self, entry):
|
||||
if isinstance(entry, Menu):
|
||||
if not isinstance(entry.Directory, MenuEntry):
|
||||
return "none"
|
||||
elif entry.Directory.getType() == "Both":
|
||||
return "revert"
|
||||
elif entry.Directory.getType() == "User" and (
|
||||
len(entry.Submenus) + len(entry.MenuEntries)
|
||||
) == 0:
|
||||
return "delete"
|
||||
|
||||
elif isinstance(entry, MenuEntry):
|
||||
if entry.getType() == "Both":
|
||||
return "revert"
|
||||
elif entry.getType() == "User":
|
||||
return "delete"
|
||||
else:
|
||||
return "none"
|
||||
|
||||
return "none"
|
||||
|
||||
def __saveEntries(self, menu):
|
||||
if not menu:
|
||||
menu = self.menu
|
||||
if isinstance(menu.Directory, MenuEntry):
|
||||
menu.Directory.save()
|
||||
for entry in menu.getEntries(hidden=True):
|
||||
if isinstance(entry, MenuEntry):
|
||||
entry.save()
|
||||
elif isinstance(entry, Menu):
|
||||
self.__saveEntries(entry)
|
||||
|
||||
def __saveMenu(self):
|
||||
if not os.path.isdir(os.path.dirname(self.filename)):
|
||||
os.makedirs(os.path.dirname(self.filename))
|
||||
self.tree.write(self.filename, encoding='utf-8')
|
||||
|
||||
def __getFileName(self, name, extension):
|
||||
postfix = 0
|
||||
while 1:
|
||||
if postfix == 0:
|
||||
filename = name + extension
|
||||
else:
|
||||
filename = name + "-" + str(postfix) + extension
|
||||
if extension == ".desktop":
|
||||
dir = "applications"
|
||||
elif extension == ".directory":
|
||||
dir = "desktop-directories"
|
||||
if not filename in self.filenames and not os.path.isfile(
|
||||
os.path.join(xdg_data_dirs[0], dir, filename)
|
||||
):
|
||||
self.filenames.append(filename)
|
||||
break
|
||||
else:
|
||||
postfix += 1
|
||||
|
||||
return filename
|
||||
|
||||
def __getXmlMenu(self, path, create=True, element=None):
|
||||
# FIXME: we should also return the menu's parent,
|
||||
# to avoid looking for it later on
|
||||
# @see Element.getiterator()
|
||||
if not element:
|
||||
element = self.tree
|
||||
|
||||
if "/" in path:
|
||||
(name, path) = path.split("/", 1)
|
||||
else:
|
||||
name = path
|
||||
path = ""
|
||||
|
||||
found = None
|
||||
for node in element.findall("Menu"):
|
||||
name_node = node.find('Name')
|
||||
if name_node.text == name:
|
||||
if path:
|
||||
found = self.__getXmlMenu(path, create, node)
|
||||
else:
|
||||
found = node
|
||||
if found:
|
||||
break
|
||||
if not found and create:
|
||||
node = self.__addXmlMenuElement(element, name)
|
||||
if path:
|
||||
found = self.__getXmlMenu(path, create, node)
|
||||
else:
|
||||
found = node
|
||||
|
||||
return found
|
||||
|
||||
def __addXmlMenuElement(self, element, name):
|
||||
menu_node = etree.SubElement('Menu', element)
|
||||
name_node = etree.SubElement('Name', menu_node)
|
||||
name_node.text = name
|
||||
return menu_node
|
||||
|
||||
def __addXmlTextElement(self, element, name, text):
|
||||
node = etree.SubElement(name, element)
|
||||
node.text = text
|
||||
return node
|
||||
|
||||
def __addXmlFilename(self, element, filename, type_="Include"):
|
||||
# remove old filenames
|
||||
includes = element.findall('Include')
|
||||
excludes = element.findall('Exclude')
|
||||
rules = includes + excludes
|
||||
for rule in rules:
|
||||
#FIXME: this finds only Rules whose FIRST child is a Filename element
|
||||
if rule[0].tag == "Filename" and rule[0].text == filename:
|
||||
element.remove(rule)
|
||||
# shouldn't it remove all occurences, like the following:
|
||||
#filename_nodes = rule.findall('.//Filename'):
|
||||
#for fn in filename_nodes:
|
||||
#if fn.text == filename:
|
||||
##element.remove(rule)
|
||||
#parent = self.__get_parent_node(fn)
|
||||
#parent.remove(fn)
|
||||
|
||||
# add new filename
|
||||
node = etree.SubElement(type_, element)
|
||||
self.__addXmlTextElement(node, 'Filename', filename)
|
||||
return node
|
||||
|
||||
def __addXmlMove(self, element, old, new):
|
||||
node = etree.SubElement("Move", element)
|
||||
self.__addXmlTextElement(node, 'Old', old)
|
||||
self.__addXmlTextElement(node, 'New', new)
|
||||
return node
|
||||
|
||||
def __addXmlLayout(self, element, layout):
|
||||
# remove old layout
|
||||
for node in element.findall("Layout"):
|
||||
element.remove(node)
|
||||
|
||||
# add new layout
|
||||
node = etree.SubElement("Layout", element)
|
||||
for order in layout.order:
|
||||
if order[0] == "Separator":
|
||||
child = etree.SubElement("Separator", node)
|
||||
elif order[0] == "Filename":
|
||||
child = self.__addXmlTextElement(node, "Filename", order[1])
|
||||
elif order[0] == "Menuname":
|
||||
child = self.__addXmlTextElement(node, "Menuname", order[1])
|
||||
elif order[0] == "Merge":
|
||||
child = etree.SubElement("Merge", node)
|
||||
child.attrib["type"] = order[1]
|
||||
return node
|
||||
|
||||
def __addLayout(self, parent):
|
||||
layout = Layout()
|
||||
layout.order = []
|
||||
layout.show_empty = parent.Layout.show_empty
|
||||
layout.inline = parent.Layout.inline
|
||||
layout.inline_header = parent.Layout.inline_header
|
||||
layout.inline_alias = parent.Layout.inline_alias
|
||||
layout.inline_limit = parent.Layout.inline_limit
|
||||
|
||||
layout.order.append(["Merge", "menus"])
|
||||
for entry in parent.Entries:
|
||||
if isinstance(entry, Menu):
|
||||
layout.parseMenuname(entry.Name)
|
||||
elif isinstance(entry, MenuEntry):
|
||||
layout.parseFilename(entry.DesktopFileID)
|
||||
elif isinstance(entry, Separator):
|
||||
layout.parseSeparator()
|
||||
layout.order.append(["Merge", "files"])
|
||||
|
||||
parent.Layout = layout
|
||||
|
||||
return layout
|
||||
|
||||
def __addEntry(self, parent, entry, after=None, before=None):
|
||||
if after or before:
|
||||
if after:
|
||||
index = parent.Entries.index(after) + 1
|
||||
elif before:
|
||||
index = parent.Entries.index(before)
|
||||
parent.Entries.insert(index, entry)
|
||||
else:
|
||||
parent.Entries.append(entry)
|
||||
|
||||
xml_parent = self.__getXmlMenu(parent.getPath(True, True))
|
||||
|
||||
if isinstance(entry, MenuEntry):
|
||||
parent.MenuEntries.append(entry)
|
||||
entry.Parents.append(parent)
|
||||
self.__addXmlFilename(xml_parent, entry.DesktopFileID, "Include")
|
||||
elif isinstance(entry, Menu):
|
||||
parent.addSubmenu(entry)
|
||||
|
||||
if after or before:
|
||||
self.__addLayout(parent)
|
||||
self.__addXmlLayout(xml_parent, parent.Layout)
|
||||
|
||||
def __deleteEntry(self, parent, entry, after=None, before=None):
|
||||
parent.Entries.remove(entry)
|
||||
|
||||
xml_parent = self.__getXmlMenu(parent.getPath(True, True))
|
||||
|
||||
if isinstance(entry, MenuEntry):
|
||||
entry.Parents.remove(parent)
|
||||
parent.MenuEntries.remove(entry)
|
||||
self.__addXmlFilename(xml_parent, entry.DesktopFileID, "Exclude")
|
||||
elif isinstance(entry, Menu):
|
||||
parent.Submenus.remove(entry)
|
||||
|
||||
if after or before:
|
||||
self.__addLayout(parent)
|
||||
self.__addXmlLayout(xml_parent, parent.Layout)
|
||||
|
||||
def __deleteFile(self, filename):
|
||||
try:
|
||||
os.remove(filename)
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
self.filenames.remove(filename)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def __remove_whitespace_nodes(self, node):
|
||||
for child in node:
|
||||
text = child.text.strip()
|
||||
if not text:
|
||||
child.text = ''
|
||||
tail = child.tail.strip()
|
||||
if not tail:
|
||||
child.tail = ''
|
||||
if len(child):
|
||||
self.__remove_whilespace_nodes(child)
|
||||
|
||||
def __get_parent_node(self, node):
|
||||
# elements in ElementTree doesn't hold a reference to their parent
|
||||
for parent, child in self.__iter_parent():
|
||||
if child is node:
|
||||
return child
|
||||
|
||||
def __iter_parent(self):
|
||||
for parent in self.tree.getiterator():
|
||||
for child in parent:
|
||||
yield parent, child
|
|
@ -0,0 +1,780 @@
|
|||
"""
|
||||
This module is based on a rox module (LGPL):
|
||||
|
||||
http://cvs.sourceforge.net/viewcvs.py/rox/ROX-Lib2/python/rox/mime.py?rev=1.21&view=log
|
||||
|
||||
This module provides access to the shared MIME database.
|
||||
|
||||
types is a dictionary of all known MIME types, indexed by the type name, e.g.
|
||||
types['application/x-python']
|
||||
|
||||
Applications can install information about MIME types by storing an
|
||||
XML file as <MIME>/packages/<application>.xml and running the
|
||||
update-mime-database command, which is provided by the freedesktop.org
|
||||
shared mime database package.
|
||||
|
||||
See http://www.freedesktop.org/standards/shared-mime-info-spec/ for
|
||||
information about the format of these files.
|
||||
|
||||
(based on version 0.13)
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import sys
|
||||
import fnmatch
|
||||
|
||||
from . import BaseDirectory, Locale
|
||||
|
||||
from .dom import minidom, XML_NAMESPACE
|
||||
from collections import defaultdict
|
||||
|
||||
FREE_NS = 'http://www.freedesktop.org/standards/shared-mime-info'
|
||||
|
||||
types = {} # Maps MIME names to type objects
|
||||
|
||||
exts = None # Maps extensions to types
|
||||
globs = None # List of (glob, type) pairs
|
||||
literals = None # Maps liternal names to types
|
||||
magic = None
|
||||
|
||||
PY3 = (sys.version_info[0] >= 3)
|
||||
|
||||
def _get_node_data(node):
|
||||
"""Get text of XML node"""
|
||||
return ''.join([n.nodeValue for n in node.childNodes]).strip()
|
||||
|
||||
def lookup(media, subtype = None):
|
||||
"""Get the MIMEtype object for the given type.
|
||||
|
||||
This remains for backwards compatibility; calling MIMEtype now does
|
||||
the same thing.
|
||||
|
||||
The name can either be passed as one part ('text/plain'), or as two
|
||||
('text', 'plain').
|
||||
"""
|
||||
return MIMEtype(media, subtype)
|
||||
|
||||
class MIMEtype(object):
|
||||
"""Class holding data about a MIME type.
|
||||
|
||||
Calling the class will return a cached instance, so there is only one
|
||||
instance for each MIME type. The name can either be passed as one part
|
||||
('text/plain'), or as two ('text', 'plain').
|
||||
"""
|
||||
def __new__(cls, media, subtype=None):
|
||||
if subtype is None and '/' in media:
|
||||
media, subtype = media.split('/', 1)
|
||||
assert '/' not in subtype
|
||||
media = media.lower()
|
||||
subtype = subtype.lower()
|
||||
|
||||
try:
|
||||
return types[(media, subtype)]
|
||||
except KeyError:
|
||||
mtype = super(MIMEtype, cls).__new__(cls)
|
||||
mtype._init(media, subtype)
|
||||
types[(media, subtype)] = mtype
|
||||
return mtype
|
||||
|
||||
# If this is done in __init__, it is automatically called again each time
|
||||
# the MIMEtype is returned by __new__, which we don't want. So we call it
|
||||
# explicitly only when we construct a new instance.
|
||||
def _init(self, media, subtype):
|
||||
self.media = media
|
||||
self.subtype = subtype
|
||||
self._comment = None
|
||||
|
||||
def _load(self):
|
||||
"Loads comment for current language. Use get_comment() instead."
|
||||
resource = os.path.join('mime', self.media, self.subtype + '.xml')
|
||||
for path in BaseDirectory.load_data_paths(resource):
|
||||
doc = minidom.parse(path)
|
||||
if doc is None:
|
||||
continue
|
||||
for comment in doc.documentElement.getElementsByTagNameNS(FREE_NS, 'comment'):
|
||||
lang = comment.getAttributeNS(XML_NAMESPACE, 'lang') or 'en'
|
||||
goodness = 1 + (lang in xdg.Locale.langs)
|
||||
if goodness > self._comment[0]:
|
||||
self._comment = (goodness, _get_node_data(comment))
|
||||
if goodness == 2: return
|
||||
|
||||
# FIXME: add get_icon method
|
||||
def get_comment(self):
|
||||
"""Returns comment for current language, loading it if needed."""
|
||||
# Should we ever reload?
|
||||
if self._comment is None:
|
||||
self._comment = (0, str(self))
|
||||
self._load()
|
||||
return self._comment[1]
|
||||
|
||||
def canonical(self):
|
||||
"""Returns the canonical MimeType object if this is an alias."""
|
||||
update_cache()
|
||||
s = str(self)
|
||||
if s in aliases:
|
||||
return lookup(aliases[s])
|
||||
return self
|
||||
|
||||
def inherits_from(self):
|
||||
"""Returns a set of Mime types which this inherits from."""
|
||||
update_cache()
|
||||
return set(lookup(t) for t in inheritance[str(self)])
|
||||
|
||||
def __str__(self):
|
||||
return self.media + '/' + self.subtype
|
||||
|
||||
def __repr__(self):
|
||||
return 'MIMEtype(%r, %r)' % (self.media, self.subtype)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.media) ^ hash(self.subtype)
|
||||
|
||||
class UnknownMagicRuleFormat(ValueError):
|
||||
pass
|
||||
|
||||
class DiscardMagicRules(Exception):
|
||||
"Raised when __NOMAGIC__ is found, and caught to discard previous rules."
|
||||
pass
|
||||
|
||||
class MagicRule:
|
||||
also = None
|
||||
|
||||
def __init__(self, start, value, mask, word, range):
|
||||
self.start = start
|
||||
self.value = value
|
||||
self.mask = mask
|
||||
self.word = word
|
||||
self.range = range
|
||||
|
||||
rule_ending_re = re.compile(br'(?:~(\d+))?(?:\+(\d+))?\n$')
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, f):
|
||||
"""Read a rule from the binary magics file. Returns a 2-tuple of
|
||||
the nesting depth and the MagicRule."""
|
||||
line = f.readline()
|
||||
#print line
|
||||
|
||||
# [indent] '>'
|
||||
nest_depth, line = line.split(b'>', 1)
|
||||
nest_depth = int(nest_depth) if nest_depth else 0
|
||||
|
||||
# start-offset '='
|
||||
start, line = line.split(b'=', 1)
|
||||
start = int(start)
|
||||
|
||||
if line == b'__NOMAGIC__\n':
|
||||
raise DiscardMagicRules
|
||||
|
||||
# value length (2 bytes, big endian)
|
||||
if sys.version_info[0] >= 3:
|
||||
lenvalue = int.from_bytes(line[:2], byteorder='big')
|
||||
else:
|
||||
lenvalue = (ord(line[0])<<8)+ord(line[1])
|
||||
line = line[2:]
|
||||
|
||||
# value
|
||||
# This can contain newlines, so we may need to read more lines
|
||||
while len(line) <= lenvalue:
|
||||
line += f.readline()
|
||||
value, line = line[:lenvalue], line[lenvalue:]
|
||||
|
||||
# ['&' mask]
|
||||
if line.startswith(b'&'):
|
||||
# This can contain newlines, so we may need to read more lines
|
||||
while len(line) <= lenvalue:
|
||||
line += f.readline()
|
||||
mask, line = line[1:lenvalue+1], line[lenvalue+1:]
|
||||
else:
|
||||
mask = None
|
||||
|
||||
# ['~' word-size] ['+' range-length]
|
||||
ending = cls.rule_ending_re.match(line)
|
||||
if not ending:
|
||||
# Per the spec, this will be caught and ignored, to allow
|
||||
# for future extensions.
|
||||
raise UnknownMagicRuleFormat(repr(line))
|
||||
|
||||
word, range = ending.groups()
|
||||
word = int(word) if (word is not None) else 1
|
||||
range = int(range) if (range is not None) else 1
|
||||
|
||||
return nest_depth, cls(start, value, mask, word, range)
|
||||
|
||||
def maxlen(self):
|
||||
l = self.start + len(self.value) + self.range
|
||||
if self.also:
|
||||
return max(l, self.also.maxlen())
|
||||
return l
|
||||
|
||||
def match(self, buffer):
|
||||
if self.match0(buffer):
|
||||
if self.also:
|
||||
return self.also.match(buffer)
|
||||
return True
|
||||
|
||||
def match0(self, buffer):
|
||||
l=len(buffer)
|
||||
lenvalue = len(self.value)
|
||||
for o in range(self.range):
|
||||
s=self.start+o
|
||||
e=s+lenvalue
|
||||
if l<e:
|
||||
return False
|
||||
if self.mask:
|
||||
test=''
|
||||
for i in range(lenvalue):
|
||||
if PY3:
|
||||
c = buffer[s+i] & self.mask[i]
|
||||
else:
|
||||
c = ord(buffer[s+i]) & ord(self.mask[i])
|
||||
test += chr(c)
|
||||
else:
|
||||
test = buffer[s:e]
|
||||
|
||||
if test==self.value:
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
return 'MagicRule(start=%r, value=%r, mask=%r, word=%r, range=%r)' %(
|
||||
self.start,
|
||||
self.value,
|
||||
self.mask,
|
||||
self.word,
|
||||
self.range)
|
||||
|
||||
|
||||
class MagicMatchAny(object):
|
||||
"""Match any of a set of magic rules.
|
||||
|
||||
This has a similar interface to MagicRule objects (i.e. its match() and
|
||||
maxlen() methods), to allow for duck typing.
|
||||
"""
|
||||
def __init__(self, rules):
|
||||
self.rules = rules
|
||||
|
||||
def match(self, buffer):
|
||||
return any(r.match(buffer) for r in self.rules)
|
||||
|
||||
def maxlen(self):
|
||||
return max(r.maxlen() for r in self.rules)
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, f):
|
||||
"""Read a set of rules from the binary magic file."""
|
||||
c=f.read(1)
|
||||
f.seek(-1, 1)
|
||||
depths_rules = []
|
||||
while c and c != b'[':
|
||||
try:
|
||||
depths_rules.append(MagicRule.from_file(f))
|
||||
except UnknownMagicRuleFormat:
|
||||
# Ignored to allow for extensions to the rule format.
|
||||
pass
|
||||
c=f.read(1)
|
||||
if c:
|
||||
f.seek(-1, 1)
|
||||
|
||||
# Build the rule tree
|
||||
tree = [] # (rule, [(subrule,[subsubrule,...]), ...])
|
||||
insert_points = {0:tree}
|
||||
for depth, rule in depths_rules:
|
||||
subrules = []
|
||||
insert_points[depth].append((rule, subrules))
|
||||
insert_points[depth+1] = subrules
|
||||
|
||||
return cls.from_rule_tree(tree)
|
||||
|
||||
@classmethod
|
||||
def from_rule_tree(cls, tree):
|
||||
"""From a nested list of (rule, subrules) pairs, build a MagicMatchAny
|
||||
instance, recursing down the tree.
|
||||
|
||||
Where there's only one top-level rule, this is returned directly,
|
||||
to simplify the nested structure. Returns None if no rules were read.
|
||||
"""
|
||||
rules = []
|
||||
for rule, subrules in tree:
|
||||
if subrules:
|
||||
rule.also = cls.from_rule_tree(subrules)
|
||||
rules.append(rule)
|
||||
|
||||
if len(rules)==0:
|
||||
return None
|
||||
if len(rules)==1:
|
||||
return rules[0]
|
||||
return cls(rules)
|
||||
|
||||
class MagicDB:
|
||||
def __init__(self):
|
||||
self.bytype = defaultdict(list) # mimetype -> [(priority, rule), ...]
|
||||
|
||||
def merge_file(self, fname):
|
||||
"""Read a magic binary file, and add its rules to this MagicDB."""
|
||||
with open(fname, 'rb') as f:
|
||||
line = f.readline()
|
||||
if line != b'MIME-Magic\0\n':
|
||||
raise IOError('Not a MIME magic file')
|
||||
|
||||
while True:
|
||||
shead = f.readline().decode('ascii')
|
||||
#print(shead)
|
||||
if not shead:
|
||||
break
|
||||
if shead[0] != '[' or shead[-2:] != ']\n':
|
||||
raise ValueError('Malformed section heading', shead)
|
||||
pri, tname = shead[1:-2].split(':')
|
||||
#print shead[1:-2]
|
||||
pri = int(pri)
|
||||
mtype = lookup(tname)
|
||||
try:
|
||||
rule = MagicMatchAny.from_file(f)
|
||||
except DiscardMagicRules:
|
||||
self.bytype.pop(mtype, None)
|
||||
rule = MagicMatchAny.from_file(f)
|
||||
if rule is None:
|
||||
continue
|
||||
#print rule
|
||||
|
||||
self.bytype[mtype].append((pri, rule))
|
||||
|
||||
def finalise(self):
|
||||
"""Prepare the MagicDB for matching.
|
||||
|
||||
This should be called after all rules have been merged into it.
|
||||
"""
|
||||
maxlen = 0
|
||||
self.alltypes = [] # (priority, mimetype, rule)
|
||||
|
||||
for mtype, rules in self.bytype.items():
|
||||
for pri, rule in rules:
|
||||
self.alltypes.append((pri, mtype, rule))
|
||||
maxlen = max(maxlen, rule.maxlen())
|
||||
|
||||
self.maxlen = maxlen # Number of bytes to read from files
|
||||
self.alltypes.sort(key=lambda x: x[0], reverse=True)
|
||||
|
||||
def match_data(self, data, max_pri=100, min_pri=0, possible=None):
|
||||
"""Do magic sniffing on some bytes.
|
||||
|
||||
max_pri & min_pri can be used to specify the maximum & minimum priority
|
||||
rules to look for. possible can be a list of mimetypes to check, or None
|
||||
(the default) to check all mimetypes until one matches.
|
||||
|
||||
Returns the MIMEtype found, or None if no entries match.
|
||||
"""
|
||||
if possible is not None:
|
||||
types = []
|
||||
for mt in possible:
|
||||
for pri, rule in self.bytype[mt]:
|
||||
types.append((pri, mt, rule))
|
||||
types.sort(key=lambda x: x[0])
|
||||
else:
|
||||
types = self.alltypes
|
||||
|
||||
for priority, mimetype, rule in types:
|
||||
#print priority, max_pri, min_pri
|
||||
if priority > max_pri:
|
||||
continue
|
||||
if priority < min_pri:
|
||||
break
|
||||
|
||||
if rule.match(data):
|
||||
return mimetype
|
||||
|
||||
def match(self, path, max_pri=100, min_pri=0, possible=None):
|
||||
"""Read data from the file and do magic sniffing on it.
|
||||
|
||||
max_pri & min_pri can be used to specify the maximum & minimum priority
|
||||
rules to look for. possible can be a list of mimetypes to check, or None
|
||||
(the default) to check all mimetypes until one matches.
|
||||
|
||||
Returns the MIMEtype found, or None if no entries match. Raises IOError
|
||||
if the file can't be opened.
|
||||
"""
|
||||
with open(path, 'rb') as f:
|
||||
buf = f.read(self.maxlen)
|
||||
return self.match_data(buf, max_pri, min_pri, possible)
|
||||
|
||||
def __repr__(self):
|
||||
return '<MagicDB (%d types)>' % len(self.alltypes)
|
||||
|
||||
class GlobDB(object):
|
||||
def __init__(self):
|
||||
"""Prepare the GlobDB. It can't actually be used until .finalise() is
|
||||
called, but merge_file() can be used to add data before that.
|
||||
"""
|
||||
# Maps mimetype to {(weight, glob, flags), ...}
|
||||
self.allglobs = defaultdict(set)
|
||||
|
||||
def merge_file(self, path):
|
||||
"""Loads name matching information from a globs2 file."""#
|
||||
allglobs = self.allglobs
|
||||
with open(path) as f:
|
||||
for line in f:
|
||||
if line.startswith('#'): continue # Comment
|
||||
|
||||
fields = line[:-1].split(':')
|
||||
weight, type_name, pattern = fields[:3]
|
||||
weight = int(weight)
|
||||
mtype = lookup(type_name)
|
||||
if len(fields) > 3:
|
||||
flags = fields[3].split(',')
|
||||
else:
|
||||
flags = ()
|
||||
|
||||
if pattern == '__NOGLOBS__':
|
||||
# This signals to discard any previous globs
|
||||
allglobs.pop(mtype, None)
|
||||
continue
|
||||
|
||||
allglobs[mtype].add((weight, pattern, tuple(flags)))
|
||||
|
||||
def finalise(self):
|
||||
"""Prepare the GlobDB for matching.
|
||||
|
||||
This should be called after all files have been merged into it.
|
||||
"""
|
||||
self.exts = defaultdict(list) # Maps extensions to [(type, weight),...]
|
||||
self.cased_exts = defaultdict(list)
|
||||
self.globs = [] # List of (regex, type, weight) triplets
|
||||
self.literals = {} # Maps literal names to (type, weight)
|
||||
self.cased_literals = {}
|
||||
|
||||
for mtype, globs in self.allglobs.items():
|
||||
mtype = mtype.canonical()
|
||||
for weight, pattern, flags in globs:
|
||||
|
||||
cased = 'cs' in flags
|
||||
|
||||
if pattern.startswith('*.'):
|
||||
# *.foo -- extension pattern
|
||||
rest = pattern[2:]
|
||||
if not ('*' in rest or '[' in rest or '?' in rest):
|
||||
if cased:
|
||||
self.cased_exts[rest].append((mtype, weight))
|
||||
else:
|
||||
self.exts[rest.lower()].append((mtype, weight))
|
||||
continue
|
||||
|
||||
if ('*' in pattern or '[' in pattern or '?' in pattern):
|
||||
# Translate the glob pattern to a regex & compile it
|
||||
re_flags = 0 if cased else re.I
|
||||
pattern = re.compile(fnmatch.translate(pattern), flags=re_flags)
|
||||
self.globs.append((pattern, mtype, weight))
|
||||
else:
|
||||
# No wildcards - literal pattern
|
||||
if cased:
|
||||
self.cased_literals[pattern] = (mtype, weight)
|
||||
else:
|
||||
self.literals[pattern.lower()] = (mtype, weight)
|
||||
|
||||
# Sort globs by weight & length
|
||||
self.globs.sort(reverse=True, key=lambda x: (x[2], len(x[0].pattern)) )
|
||||
|
||||
def first_match(self, path):
|
||||
"""Return the first match found for a given path, or None if no match
|
||||
is found."""
|
||||
try:
|
||||
return next(self._match_path(path))[0]
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
def all_matches(self, path):
|
||||
"""Return a list of (MIMEtype, glob weight) pairs for the path."""
|
||||
return list(self._match_path(path))
|
||||
|
||||
def _match_path(self, path):
|
||||
"""Yields pairs of (mimetype, glob weight)."""
|
||||
leaf = os.path.basename(path)
|
||||
|
||||
# Literals (no wildcards)
|
||||
if leaf in self.cased_literals:
|
||||
yield self.cased_literals[leaf]
|
||||
|
||||
lleaf = leaf.lower()
|
||||
if lleaf in self.literals:
|
||||
yield self.literals[lleaf]
|
||||
|
||||
# Extensions
|
||||
ext = leaf
|
||||
while 1:
|
||||
p = ext.find('.')
|
||||
if p < 0: break
|
||||
ext = ext[p + 1:]
|
||||
if ext in self.cased_exts:
|
||||
for res in self.cased_exts[ext]:
|
||||
yield res
|
||||
ext = lleaf
|
||||
while 1:
|
||||
p = ext.find('.')
|
||||
if p < 0: break
|
||||
ext = ext[p+1:]
|
||||
if ext in self.exts:
|
||||
for res in self.exts[ext]:
|
||||
yield res
|
||||
|
||||
# Other globs
|
||||
for (regex, mime_type, weight) in self.globs:
|
||||
if regex.match(leaf):
|
||||
yield (mime_type, weight)
|
||||
|
||||
# Some well-known types
|
||||
text = lookup('text', 'plain')
|
||||
octet_stream = lookup('application', 'octet-stream')
|
||||
inode_block = lookup('inode', 'blockdevice')
|
||||
inode_char = lookup('inode', 'chardevice')
|
||||
inode_dir = lookup('inode', 'directory')
|
||||
inode_fifo = lookup('inode', 'fifo')
|
||||
inode_socket = lookup('inode', 'socket')
|
||||
inode_symlink = lookup('inode', 'symlink')
|
||||
inode_door = lookup('inode', 'door')
|
||||
app_exe = lookup('application', 'executable')
|
||||
|
||||
_cache_uptodate = False
|
||||
|
||||
def _cache_database():
|
||||
global globs, magic, aliases, inheritance, _cache_uptodate
|
||||
|
||||
_cache_uptodate = True
|
||||
|
||||
aliases = {} # Maps alias Mime types to canonical names
|
||||
inheritance = defaultdict(set) # Maps to sets of parent mime types.
|
||||
|
||||
# Load aliases
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'aliases')):
|
||||
with open(path, 'r') as f:
|
||||
for line in f:
|
||||
alias, canonical = line.strip().split(None, 1)
|
||||
aliases[alias] = canonical
|
||||
|
||||
# Load filename patterns (globs)
|
||||
globs = GlobDB()
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'globs2')):
|
||||
globs.merge_file(path)
|
||||
globs.finalise()
|
||||
|
||||
# Load magic sniffing data
|
||||
magic = MagicDB()
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'magic')):
|
||||
magic.merge_file(path)
|
||||
magic.finalise()
|
||||
|
||||
# Load subclasses
|
||||
for path in BaseDirectory.load_data_paths(os.path.join('mime', 'subclasses')):
|
||||
with open(path, 'r') as f:
|
||||
for line in f:
|
||||
sub, parent = line.strip().split(None, 1)
|
||||
inheritance[sub].add(parent)
|
||||
|
||||
def update_cache():
|
||||
if not _cache_uptodate:
|
||||
_cache_database()
|
||||
|
||||
def get_type_by_name(path):
|
||||
"""Returns type of file by its name, or None if not known"""
|
||||
update_cache()
|
||||
return globs.first_match(path)
|
||||
|
||||
def get_type_by_contents(path, max_pri=100, min_pri=0):
|
||||
"""Returns type of file by its contents, or None if not known"""
|
||||
update_cache()
|
||||
|
||||
return magic.match(path, max_pri, min_pri)
|
||||
|
||||
def get_type_by_data(data, max_pri=100, min_pri=0):
|
||||
"""Returns type of the data, which should be bytes."""
|
||||
update_cache()
|
||||
|
||||
return magic.match_data(data, max_pri, min_pri)
|
||||
|
||||
def _get_type_by_stat(st_mode):
|
||||
"""Match special filesystem objects to Mimetypes."""
|
||||
if stat.S_ISDIR(st_mode): return inode_dir
|
||||
elif stat.S_ISCHR(st_mode): return inode_char
|
||||
elif stat.S_ISBLK(st_mode): return inode_block
|
||||
elif stat.S_ISFIFO(st_mode): return inode_fifo
|
||||
elif stat.S_ISLNK(st_mode): return inode_symlink
|
||||
elif stat.S_ISSOCK(st_mode): return inode_socket
|
||||
return inode_door
|
||||
|
||||
def get_type(path, follow=True, name_pri=100):
|
||||
"""Returns type of file indicated by path.
|
||||
|
||||
This function is *deprecated* - :func:`get_type2` is more accurate.
|
||||
|
||||
:param path: pathname to check (need not exist)
|
||||
:param follow: when reading file, follow symbolic links
|
||||
:param name_pri: Priority to do name matches. 100=override magic
|
||||
|
||||
This tries to use the contents of the file, and falls back to the name. It
|
||||
can also handle special filesystem objects like directories and sockets.
|
||||
"""
|
||||
update_cache()
|
||||
|
||||
try:
|
||||
if follow:
|
||||
st = os.stat(path)
|
||||
else:
|
||||
st = os.lstat(path)
|
||||
except:
|
||||
t = get_type_by_name(path)
|
||||
return t or text
|
||||
|
||||
if stat.S_ISREG(st.st_mode):
|
||||
# Regular file
|
||||
t = get_type_by_contents(path, min_pri=name_pri)
|
||||
if not t: t = get_type_by_name(path)
|
||||
if not t: t = get_type_by_contents(path, max_pri=name_pri)
|
||||
if t is None:
|
||||
if stat.S_IMODE(st.st_mode) & 0o111:
|
||||
return app_exe
|
||||
else:
|
||||
return text
|
||||
return t
|
||||
else:
|
||||
return _get_type_by_stat(st.st_mode)
|
||||
|
||||
def get_type2(path, follow=True):
|
||||
"""Find the MIMEtype of a file using the XDG recommended checking order.
|
||||
|
||||
This first checks the filename, then uses file contents if the name doesn't
|
||||
give an unambiguous MIMEtype. It can also handle special filesystem objects
|
||||
like directories and sockets.
|
||||
|
||||
:param path: file path to examine (need not exist)
|
||||
:param follow: whether to follow symlinks
|
||||
|
||||
:rtype: :class:`MIMEtype`
|
||||
|
||||
.. versionadded:: 1.0
|
||||
"""
|
||||
update_cache()
|
||||
|
||||
try:
|
||||
st = os.stat(path) if follow else os.lstat(path)
|
||||
except OSError:
|
||||
return get_type_by_name(path) or octet_stream
|
||||
|
||||
if not stat.S_ISREG(st.st_mode):
|
||||
# Special filesystem objects
|
||||
return _get_type_by_stat(st.st_mode)
|
||||
|
||||
mtypes = sorted(globs.all_matches(path), key=(lambda x: x[1]), reverse=True)
|
||||
if mtypes:
|
||||
max_weight = mtypes[0][1]
|
||||
i = 1
|
||||
for mt, w in mtypes[1:]:
|
||||
if w < max_weight:
|
||||
break
|
||||
i += 1
|
||||
mtypes = mtypes[:i]
|
||||
if len(mtypes) == 1:
|
||||
return mtypes[0][0]
|
||||
|
||||
possible = [mt for mt,w in mtypes]
|
||||
else:
|
||||
possible = None # Try all magic matches
|
||||
|
||||
try:
|
||||
t = magic.match(path, possible=possible)
|
||||
except IOError:
|
||||
t = None
|
||||
|
||||
if t:
|
||||
return t
|
||||
elif mtypes:
|
||||
return mtypes[0][0]
|
||||
elif stat.S_IMODE(st.st_mode) & 0o111:
|
||||
return app_exe
|
||||
else:
|
||||
return text if is_text_file(path) else octet_stream
|
||||
|
||||
def is_text_file(path):
|
||||
"""Guess whether a file contains text or binary data.
|
||||
|
||||
Heuristic: binary if the first 32 bytes include ASCII control characters.
|
||||
This rule may change in future versions.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
"""
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
except IOError:
|
||||
return False
|
||||
|
||||
with f:
|
||||
return _is_text(f.read(32))
|
||||
|
||||
if PY3:
|
||||
def _is_text(data):
|
||||
return not any(b <= 0x8 or 0xe <= b < 0x20 or b == 0x7f for b in data)
|
||||
else:
|
||||
def _is_text(data):
|
||||
return not any(b <= '\x08' or '\x0e' <= b < '\x20' or b == '\x7f' \
|
||||
for b in data)
|
||||
|
||||
_mime2ext_cache = None
|
||||
_mime2ext_cache_uptodate = False
|
||||
|
||||
def get_extensions(mimetype):
|
||||
"""Retrieve the set of filename extensions matching a given MIMEtype.
|
||||
|
||||
Extensions are returned without a leading dot, e.g. 'py'. If no extensions
|
||||
are registered for the MIMEtype, returns an empty set.
|
||||
|
||||
The extensions are stored in a cache the first time this is called.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
"""
|
||||
global _mime2ext_cache, _mime2ext_cache_uptodate
|
||||
update_cache()
|
||||
if not _mime2ext_cache_uptodate:
|
||||
_mime2ext_cache = defaultdict(set)
|
||||
for ext, mtypes in globs.exts.items():
|
||||
for mtype, prio in mtypes:
|
||||
_mime2ext_cache[mtype].add(ext)
|
||||
_mime2ext_cache_uptodate = True
|
||||
|
||||
return _mime2ext_cache[mimetype]
|
||||
|
||||
|
||||
def install_mime_info(application, package_file):
|
||||
"""Copy 'package_file' as ``~/.local/share/mime/packages/<application>.xml.``
|
||||
If package_file is None, install ``<app_dir>/<application>.xml``.
|
||||
If already installed, does nothing. May overwrite an existing
|
||||
file with the same name (if the contents are different)"""
|
||||
application += '.xml'
|
||||
|
||||
new_data = open(package_file).read()
|
||||
|
||||
# See if the file is already installed
|
||||
package_dir = os.path.join('mime', 'packages')
|
||||
resource = os.path.join(package_dir, application)
|
||||
for x in BaseDirectory.load_data_paths(resource):
|
||||
try:
|
||||
old_data = open(x).read()
|
||||
except:
|
||||
continue
|
||||
if old_data == new_data:
|
||||
return # Already installed
|
||||
|
||||
global _cache_uptodate
|
||||
_cache_uptodate = False
|
||||
|
||||
# Not already installed; add a new copy
|
||||
# Create the directory structure...
|
||||
new_file = os.path.join(BaseDirectory.save_data_path(package_dir), application)
|
||||
|
||||
# Write the file...
|
||||
open(new_file, 'w').write(new_data)
|
||||
|
||||
# Update the database...
|
||||
command = 'update-mime-database'
|
||||
if os.spawnlp(os.P_WAIT, command, command, BaseDirectory.save_data_path('mime')):
|
||||
os.unlink(new_file)
|
||||
raise Exception("The '%s' command returned an error code!\n" \
|
||||
"Make sure you have the freedesktop.org shared MIME package:\n" \
|
||||
"http://standards.freedesktop.org/shared-mime-info/" % command)
|
|
@ -0,0 +1,181 @@
|
|||
"""
|
||||
Implementation of the XDG Recent File Storage Specification
|
||||
http://standards.freedesktop.org/recent-file-spec
|
||||
"""
|
||||
|
||||
import xml.dom.minidom, xml.sax.saxutils
|
||||
import os, time, fcntl
|
||||
from .Exceptions import ParsingError
|
||||
|
||||
class RecentFiles:
|
||||
def __init__(self):
|
||||
self.RecentFiles = []
|
||||
self.filename = ""
|
||||
|
||||
def parse(self, filename=None):
|
||||
"""Parse a list of recently used files.
|
||||
|
||||
filename defaults to ``~/.recently-used``.
|
||||
"""
|
||||
if not filename:
|
||||
filename = os.path.join(os.getenv("HOME"), ".recently-used")
|
||||
|
||||
try:
|
||||
doc = xml.dom.minidom.parse(filename)
|
||||
except IOError:
|
||||
raise ParsingError('File not found', filename)
|
||||
except xml.parsers.expat.ExpatError:
|
||||
raise ParsingError('Not a valid .menu file', filename)
|
||||
|
||||
self.filename = filename
|
||||
|
||||
for child in doc.childNodes:
|
||||
if child.nodeType == xml.dom.Node.ELEMENT_NODE:
|
||||
if child.tagName == "RecentFiles":
|
||||
for recent in child.childNodes:
|
||||
if recent.nodeType == xml.dom.Node.ELEMENT_NODE:
|
||||
if recent.tagName == "RecentItem":
|
||||
self.__parseRecentItem(recent)
|
||||
|
||||
self.sort()
|
||||
|
||||
def __parseRecentItem(self, item):
|
||||
recent = RecentFile()
|
||||
self.RecentFiles.append(recent)
|
||||
|
||||
for attribute in item.childNodes:
|
||||
if attribute.nodeType == xml.dom.Node.ELEMENT_NODE:
|
||||
if attribute.tagName == "URI":
|
||||
recent.URI = attribute.childNodes[0].nodeValue
|
||||
elif attribute.tagName == "Mime-Type":
|
||||
recent.MimeType = attribute.childNodes[0].nodeValue
|
||||
elif attribute.tagName == "Timestamp":
|
||||
recent.Timestamp = int(attribute.childNodes[0].nodeValue)
|
||||
elif attribute.tagName == "Private":
|
||||
recent.Prviate = True
|
||||
elif attribute.tagName == "Groups":
|
||||
|
||||
for group in attribute.childNodes:
|
||||
if group.nodeType == xml.dom.Node.ELEMENT_NODE:
|
||||
if group.tagName == "Group":
|
||||
recent.Groups.append(group.childNodes[0].nodeValue)
|
||||
|
||||
def write(self, filename=None):
|
||||
"""Write the list of recently used files to disk.
|
||||
|
||||
If the instance is already associated with a file, filename can be
|
||||
omitted to save it there again.
|
||||
"""
|
||||
if not filename and not self.filename:
|
||||
raise ParsingError('File not found', filename)
|
||||
elif not filename:
|
||||
filename = self.filename
|
||||
|
||||
f = open(filename, "w")
|
||||
fcntl.lockf(f, fcntl.LOCK_EX)
|
||||
f.write('<?xml version="1.0"?>\n')
|
||||
f.write("<RecentFiles>\n")
|
||||
|
||||
for r in self.RecentFiles:
|
||||
f.write(" <RecentItem>\n")
|
||||
f.write(" <URI>%s</URI>\n" % xml.sax.saxutils.escape(r.URI))
|
||||
f.write(" <Mime-Type>%s</Mime-Type>\n" % r.MimeType)
|
||||
f.write(" <Timestamp>%s</Timestamp>\n" % r.Timestamp)
|
||||
if r.Private == True:
|
||||
f.write(" <Private/>\n")
|
||||
if len(r.Groups) > 0:
|
||||
f.write(" <Groups>\n")
|
||||
for group in r.Groups:
|
||||
f.write(" <Group>%s</Group>\n" % group)
|
||||
f.write(" </Groups>\n")
|
||||
f.write(" </RecentItem>\n")
|
||||
|
||||
f.write("</RecentFiles>\n")
|
||||
fcntl.lockf(f, fcntl.LOCK_UN)
|
||||
f.close()
|
||||
|
||||
def getFiles(self, mimetypes=None, groups=None, limit=0):
|
||||
"""Get a list of recently used files.
|
||||
|
||||
The parameters can be used to filter by mime types, by group, or to
|
||||
limit the number of items returned. By default, the entire list is
|
||||
returned, except for items marked private.
|
||||
"""
|
||||
tmp = []
|
||||
i = 0
|
||||
for item in self.RecentFiles:
|
||||
if groups:
|
||||
for group in groups:
|
||||
if group in item.Groups:
|
||||
tmp.append(item)
|
||||
i += 1
|
||||
elif mimetypes:
|
||||
for mimetype in mimetypes:
|
||||
if mimetype == item.MimeType:
|
||||
tmp.append(item)
|
||||
i += 1
|
||||
else:
|
||||
if item.Private == False:
|
||||
tmp.append(item)
|
||||
i += 1
|
||||
if limit != 0 and i == limit:
|
||||
break
|
||||
|
||||
return tmp
|
||||
|
||||
def addFile(self, item, mimetype, groups=None, private=False):
|
||||
"""Add a recently used file.
|
||||
|
||||
item should be the URI of the file, typically starting with ``file:///``.
|
||||
"""
|
||||
# check if entry already there
|
||||
if item in self.RecentFiles:
|
||||
index = self.RecentFiles.index(item)
|
||||
recent = self.RecentFiles[index]
|
||||
else:
|
||||
# delete if more then 500 files
|
||||
if len(self.RecentFiles) == 500:
|
||||
self.RecentFiles.pop()
|
||||
# add entry
|
||||
recent = RecentFile()
|
||||
self.RecentFiles.append(recent)
|
||||
|
||||
recent.URI = item
|
||||
recent.MimeType = mimetype
|
||||
recent.Timestamp = int(time.time())
|
||||
recent.Private = private
|
||||
if groups:
|
||||
recent.Groups = groups
|
||||
|
||||
self.sort()
|
||||
|
||||
def deleteFile(self, item):
|
||||
"""Remove a recently used file, by URI, from the list.
|
||||
"""
|
||||
if item in self.RecentFiles:
|
||||
self.RecentFiles.remove(item)
|
||||
|
||||
def sort(self):
|
||||
self.RecentFiles.sort()
|
||||
self.RecentFiles.reverse()
|
||||
|
||||
|
||||
class RecentFile:
|
||||
def __init__(self):
|
||||
self.URI = ""
|
||||
self.MimeType = ""
|
||||
self.Timestamp = ""
|
||||
self.Private = False
|
||||
self.Groups = []
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.Timestamp, other.Timestamp)
|
||||
|
||||
def __lt__ (self, other):
|
||||
return self.Timestamp < other.Timestamp
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.URI == str(other)
|
||||
|
||||
def __str__(self):
|
||||
return self.URI
|
|
@ -0,0 +1,3 @@
|
|||
__all__ = [ "BaseDirectory", "DesktopEntry", "Menu", "Exceptions", "IniFile", "IconTheme", "Locale", "Config", "Mime", "RecentFiles", "MenuEditor" ]
|
||||
|
||||
__version__ = "0.26"
|
|
@ -0,0 +1,75 @@
|
|||
import sys
|
||||
|
||||
PY3 = sys.version_info[0] >= 3
|
||||
|
||||
if PY3:
|
||||
def u(s):
|
||||
return s
|
||||
else:
|
||||
# Unicode-like literals
|
||||
def u(s):
|
||||
return s.decode('utf-8')
|
||||
|
||||
try:
|
||||
# which() is available from Python 3.3
|
||||
from shutil import which
|
||||
except ImportError:
|
||||
import os
|
||||
# This is a copy of which() from Python 3.3
|
||||
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
|
||||
"""Given a command, mode, and a PATH string, return the path which
|
||||
conforms to the given mode on the PATH, or None if there is no such
|
||||
file.
|
||||
|
||||
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
|
||||
of os.environ.get("PATH"), or can be overridden with a custom search
|
||||
path.
|
||||
|
||||
"""
|
||||
# Check that a given file can be accessed with the correct mode.
|
||||
# Additionally check that `file` is not a directory, as on Windows
|
||||
# directories pass the os.access check.
|
||||
def _access_check(fn, mode):
|
||||
return (os.path.exists(fn) and os.access(fn, mode)
|
||||
and not os.path.isdir(fn))
|
||||
|
||||
# If we're given a path with a directory part, look it up directly rather
|
||||
# than referring to PATH directories. This includes checking relative to the
|
||||
# current directory, e.g. ./script
|
||||
if os.path.dirname(cmd):
|
||||
if _access_check(cmd, mode):
|
||||
return cmd
|
||||
return None
|
||||
|
||||
path = (path or os.environ.get("PATH", os.defpath)).split(os.pathsep)
|
||||
|
||||
if sys.platform == "win32":
|
||||
# The current directory takes precedence on Windows.
|
||||
if not os.curdir in path:
|
||||
path.insert(0, os.curdir)
|
||||
|
||||
# PATHEXT is necessary to check on Windows.
|
||||
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
|
||||
# See if the given file matches any of the expected path extensions.
|
||||
# This will allow us to short circuit when given "python.exe".
|
||||
# If it does match, only test that one, otherwise we have to try
|
||||
# others.
|
||||
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
|
||||
files = [cmd]
|
||||
else:
|
||||
files = [cmd + ext for ext in pathext]
|
||||
else:
|
||||
# On other platforms you don't have things like PATHEXT to tell you
|
||||
# what file suffixes are executable, so just pass on cmd as-is.
|
||||
files = [cmd]
|
||||
|
||||
seen = set()
|
||||
for dir in path:
|
||||
normdir = os.path.normcase(dir)
|
||||
if not normdir in seen:
|
||||
seen.add(normdir)
|
||||
for thefile in files:
|
||||
name = os.path.join(dir, thefile)
|
||||
if _access_check(name, mode):
|
||||
return name
|
||||
return None
|
|
@ -0,0 +1,64 @@
|
|||
# Python imports
|
||||
import os
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
|
||||
|
||||
|
||||
|
||||
class Path:
|
||||
def get_home(self) -> str:
|
||||
return os.path.expanduser("~") + self.subpath
|
||||
|
||||
def get_path(self) -> str:
|
||||
return f"/{'/'.join(self.path)}" if self.path else f"/{''.join(self.path)}"
|
||||
|
||||
def get_path_list(self) -> list:
|
||||
return self.path
|
||||
|
||||
def push_to_path(self, dir: str):
|
||||
self.path.append(dir)
|
||||
self.load_directory()
|
||||
|
||||
def pop_from_path(self) -> None:
|
||||
try:
|
||||
self.path.pop()
|
||||
|
||||
if not self.go_past_home:
|
||||
if self.get_home() not in self.get_path():
|
||||
self.set_to_home()
|
||||
|
||||
self.load_directory()
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
def set_path(self, path: str) -> bool:
|
||||
if path == self.get_path():
|
||||
return False
|
||||
|
||||
if os.path.isdir(path):
|
||||
self.path = list( filter(None, path.replace("\\", "/").split('/')) )
|
||||
self.load_directory()
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def set_path_with_sub_path(self, sub_path: str) -> bool:
|
||||
path = os.path.join(self.get_home(), sub_path)
|
||||
if path == self.get_path():
|
||||
return False
|
||||
|
||||
if os.path.isdir(path):
|
||||
self.path = list( filter(None, path.replace("\\", "/").split('/')) )
|
||||
self.load_directory()
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def set_to_home(self) -> None:
|
||||
home = os.path.expanduser("~") + self.subpath
|
||||
path = list( filter(None, home.replace("\\", "/").split('/')) )
|
||||
self.path = path
|
||||
self.load_directory()
|
|
@ -0,0 +1,288 @@
|
|||
# Python imports
|
||||
import os
|
||||
import hashlib
|
||||
import re
|
||||
from os import listdir
|
||||
from os.path import isdir
|
||||
from os.path import isfile
|
||||
from os.path import join
|
||||
from random import randint
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
from .utils.settings import Settings
|
||||
from .utils.launcher import Launcher
|
||||
from .utils.filehandler import FileHandler
|
||||
from .icons.icon import Icon
|
||||
from .path import Path
|
||||
|
||||
|
||||
try:
|
||||
get_file_size("/")
|
||||
except Exception as e:
|
||||
import os
|
||||
|
||||
def sizeof_fmt_def(num, suffix="B"):
|
||||
for unit in ["", "K", "M", "G", "T", "Pi", "Ei", "Zi"]:
|
||||
if abs(num) < 1024.0:
|
||||
return f"{num:3.1f} {unit}{suffix}"
|
||||
num /= 1024.0
|
||||
return f"{num:.1f} Yi{suffix}"
|
||||
|
||||
def _get_file_size(file):
|
||||
return "4K" if isdir(file) else sizeof_fmt_def(os.path.getsize(file))
|
||||
|
||||
get_file_size = _get_file_size
|
||||
|
||||
|
||||
|
||||
class Tab(Settings, FileHandler, Launcher, Icon, Path):
|
||||
def __init__(self):
|
||||
self.logger = None
|
||||
self._id_length: int = 10
|
||||
|
||||
self._id: str = ""
|
||||
self._wid: str = None
|
||||
self.error_message: str = None
|
||||
self._dir_watcher = None
|
||||
self._hide_hidden: bool = self.HIDE_HIDDEN_FILES
|
||||
self._files: list = []
|
||||
self._dirs: list = []
|
||||
self._vids: list = []
|
||||
self._images: list = []
|
||||
self._desktop: list = []
|
||||
self._ungrouped: list = []
|
||||
self._hidden: list = []
|
||||
|
||||
self._generate_id()
|
||||
self.set_to_home()
|
||||
|
||||
def load_directory(self) -> None:
|
||||
path = self.get_path()
|
||||
self._dirs = []
|
||||
self._vids = []
|
||||
self._images = []
|
||||
self._desktop = []
|
||||
self._ungrouped = []
|
||||
self._hidden = []
|
||||
self._files = []
|
||||
|
||||
if not isdir(path):
|
||||
self._set_error_message("Path can not be accessed.")
|
||||
self.set_to_home()
|
||||
return ""
|
||||
|
||||
for f in listdir(path):
|
||||
file = join(path, f)
|
||||
if self._hide_hidden:
|
||||
if f.startswith('.'):
|
||||
self._hidden.append(f)
|
||||
continue
|
||||
|
||||
if isfile(file):
|
||||
lowerName = file.lower()
|
||||
if lowerName.endswith(self.fvideos):
|
||||
self._vids.append(f)
|
||||
elif lowerName.endswith(self.fimages):
|
||||
self._images.append(f)
|
||||
elif lowerName.endswith((".desktop",)):
|
||||
self._desktop.append(f)
|
||||
else:
|
||||
self._ungrouped.append(f)
|
||||
else:
|
||||
self._dirs.append(f)
|
||||
|
||||
self._dirs.sort(key=self._natural_keys)
|
||||
self._vids.sort(key=self._natural_keys)
|
||||
self._images.sort(key=self._natural_keys)
|
||||
self._desktop.sort(key=self._natural_keys)
|
||||
self._ungrouped.sort(key=self._natural_keys)
|
||||
|
||||
self._files = self._dirs + self._vids + self._images + self._desktop + self._ungrouped
|
||||
|
||||
def is_folder_locked(self, hash):
|
||||
if self.lock_folder:
|
||||
path_parts = self.get_path().split('/')
|
||||
file = self.get_path_part_from_hash(hash)
|
||||
|
||||
# Insure chilren folders are locked too.
|
||||
lockedFolderInPath = False
|
||||
for folder in self.locked_folders:
|
||||
if folder in path_parts:
|
||||
lockedFolderInPath = True
|
||||
break
|
||||
|
||||
return (file in self.locked_folders or lockedFolderInPath)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def get_not_hidden_count(self) -> int:
|
||||
return len(self._files) + \
|
||||
len(self._dirs) + \
|
||||
len(self._vids) + \
|
||||
len(self._images) + \
|
||||
len(self._desktop) + \
|
||||
len(self._ungrouped)
|
||||
|
||||
def get_hidden_count(self) -> int:
|
||||
return len(self._hidden)
|
||||
|
||||
def get_files_count(self) -> int:
|
||||
return len(self._files)
|
||||
|
||||
def get_path_part_from_hash(self, hash: str) -> str:
|
||||
files = self.get_files()
|
||||
file = None
|
||||
|
||||
for f in files:
|
||||
if hash == f[1]:
|
||||
file = f[0]
|
||||
break
|
||||
|
||||
return file
|
||||
|
||||
def get_files_formatted(self) -> dict:
|
||||
files = self._hash_set(self._files),
|
||||
dirs = self._hash_set(self._dirs),
|
||||
videos = self.get_videos(),
|
||||
images = self._hash_set(self._images),
|
||||
desktops = self._hash_set(self._desktop),
|
||||
ungrouped = self._hash_set(self._ungrouped)
|
||||
hidden = self._hash_set(self._hidden)
|
||||
|
||||
return {
|
||||
'path_head': self.get_path(),
|
||||
'list': {
|
||||
'files': files,
|
||||
'dirs': dirs,
|
||||
'videos': videos,
|
||||
'images': images,
|
||||
'desktops': desktops,
|
||||
'ungrouped': ungrouped,
|
||||
'hidden': hidden
|
||||
}
|
||||
}
|
||||
|
||||
def get_video_icons(self) -> list:
|
||||
data = []
|
||||
dir = self.get_current_directory()
|
||||
for file in self._vids:
|
||||
img_hash, hash_img_path = self.create_video_thumbnail(full_path=f"{dir}/{file}", returnHashInstead=True)
|
||||
data.append([img_hash, hash_img_path])
|
||||
|
||||
return data
|
||||
|
||||
def get_pixbuf_icon_str_combo(self):
|
||||
data = []
|
||||
dir = self.get_current_directory()
|
||||
for file in self._files:
|
||||
icon = self.create_icon(dir, file).get_pixbuf()
|
||||
data.append([icon, file])
|
||||
|
||||
return data
|
||||
|
||||
def get_gtk_icon_str_combo(self) -> list:
|
||||
data = []
|
||||
dir = self.get_current_directory()
|
||||
for file in self._files:
|
||||
icon = self.create_icon(dir, file)
|
||||
data.append([icon, file[0]])
|
||||
|
||||
return data
|
||||
|
||||
def get_current_directory(self) -> str:
|
||||
return self.get_path()
|
||||
|
||||
def get_current_sub_path(self) -> str:
|
||||
path = self.get_path()
|
||||
home = f"{self.get_home()}/"
|
||||
return path.replace(home, "")
|
||||
|
||||
def get_end_of_path(self) -> str:
|
||||
parts = self.get_current_directory().split("/")
|
||||
size = len(parts)
|
||||
return parts[size - 1]
|
||||
|
||||
|
||||
def set_hiding_hidden(self, state: bool) -> None:
|
||||
self._hide_hidden = state
|
||||
|
||||
def is_hiding_hidden(self) -> bool:
|
||||
return self._hide_hidden
|
||||
|
||||
def get_dot_dots(self) -> list:
|
||||
return self._hash_set(['.', '..'])
|
||||
|
||||
def get_files(self) -> list:
|
||||
return self._hash_set(self._files)
|
||||
|
||||
def get_dirs(self) -> list:
|
||||
return self._hash_set(self._dirs)
|
||||
|
||||
def get_videos(self) -> list:
|
||||
return self._hash_set(self._vids)
|
||||
|
||||
def get_images(self) -> list:
|
||||
return self._hash_set(self._images)
|
||||
|
||||
def get_desktops(self) -> list:
|
||||
return self._hash_set(self._desktop)
|
||||
|
||||
def get_ungrouped(self) -> list:
|
||||
return self._hash_set(self._ungrouped)
|
||||
|
||||
def get_hidden(self) -> list:
|
||||
return self._hash_set(self._hidden)
|
||||
|
||||
def get_id(self) -> str:
|
||||
return self._id
|
||||
|
||||
def set_wid(self, _wid: str) -> None:
|
||||
self._wid = _wid
|
||||
|
||||
def get_wid(self) -> str:
|
||||
return self._wid
|
||||
|
||||
def set_dir_watcher(self, watcher):
|
||||
self._dir_watcher = watcher
|
||||
|
||||
def get_dir_watcher(self):
|
||||
return self._dir_watcher
|
||||
|
||||
def get_error_message(self):
|
||||
return self.error_message
|
||||
|
||||
def unset_error_message(self):
|
||||
self.error_message = None
|
||||
|
||||
def _atoi(self, text):
|
||||
return int(text) if text.isdigit() else text
|
||||
|
||||
def _natural_keys(self, text):
|
||||
return [ self._atoi(c) for c in re.split('(\d+)',text) ]
|
||||
|
||||
def _hash_text(self, text) -> str:
|
||||
return hashlib.sha256(str.encode(text)).hexdigest()[:18]
|
||||
|
||||
def _hash_set(self, arry: list) -> list:
|
||||
path = self.get_current_directory()
|
||||
data = []
|
||||
for arr in arry:
|
||||
file = f"{path}/{arr}"
|
||||
size = get_file_size(file)
|
||||
data.append([arr, self._hash_text(arr), size])
|
||||
return data
|
||||
|
||||
|
||||
def _random_with_N_digits(self, n: int) -> int:
|
||||
range_start = 10**(n-1)
|
||||
range_end = (10**n)-1
|
||||
return randint(range_start, range_end)
|
||||
|
||||
def _generate_id(self) -> str:
|
||||
self._id = str(self._random_with_N_digits(self._id_length))
|
||||
|
||||
def _set_error_message(self, text: str):
|
||||
self.error_message = text
|
|
@ -0,0 +1,3 @@
|
|||
"""
|
||||
Utils module
|
||||
"""
|
|
@ -0,0 +1,87 @@
|
|||
# Python imports
|
||||
import os
|
||||
import shutil
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
|
||||
|
||||
|
||||
|
||||
class FileHandler:
|
||||
def create_file(self, nFile, type):
|
||||
try:
|
||||
if type == "dir":
|
||||
os.mkdir(nFile)
|
||||
elif type == "file":
|
||||
open(nFile, 'a').close()
|
||||
except Exception as e:
|
||||
print("An error occured creating the file/dir:")
|
||||
print(repr(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def update_file(self, oFile, nFile):
|
||||
try:
|
||||
print(f"Renaming: {oFile} --> {nFile}")
|
||||
os.rename(oFile, nFile)
|
||||
except Exception as e:
|
||||
print("An error occured renaming the file:")
|
||||
print(repr(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def delete_file(self, toDeleteFile):
|
||||
try:
|
||||
print(f"Deleting: {toDeleteFile}")
|
||||
if os.path.exists(toDeleteFile):
|
||||
if os.path.isfile(toDeleteFile):
|
||||
os.remove(toDeleteFile)
|
||||
elif os.path.isdir(toDeleteFile):
|
||||
shutil.rmtree(toDeleteFile)
|
||||
else:
|
||||
print("An error occured deleting the file:")
|
||||
return False
|
||||
else:
|
||||
print("The folder/file does not exist")
|
||||
return False
|
||||
except Exception as e:
|
||||
print("An error occured deleting the file:")
|
||||
print(repr(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def move_file(self, fFile, tFile):
|
||||
try:
|
||||
print(f"Moving: {fFile} --> {tFile}")
|
||||
if os.path.exists(fFile) and not os.path.exists(tFile):
|
||||
if not tFile.endswith("/"):
|
||||
tFile += "/"
|
||||
|
||||
shutil.move(fFile, tFile)
|
||||
else:
|
||||
print("The folder/file does not exist")
|
||||
return False
|
||||
except Exception as e:
|
||||
print("An error occured moving the file:")
|
||||
print(repr(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def copy_file(self,fFile, tFile, symlinks=False, ignore=None):
|
||||
try:
|
||||
if os.path.isdir(fFile):
|
||||
shutil.copytree(fFile, tFile, symlinks, ignore)
|
||||
else:
|
||||
shutil.copy2(fFile, tFile)
|
||||
except Exception as e:
|
||||
print("An error occured copying the file:")
|
||||
print(repr(e))
|
||||
return False
|
||||
|
||||
return True
|
|
@ -1,13 +1,15 @@
|
|||
# System import
|
||||
import os, subprocess, threading
|
||||
|
||||
# Python imports
|
||||
import os
|
||||
import subprocess
|
||||
import shlex
|
||||
|
||||
# Lib imports
|
||||
|
||||
|
||||
# Apoplication imports
|
||||
|
||||
|
||||
|
||||
|
||||
class Launcher:
|
||||
def open_file_locally(self, file):
|
||||
lowerName = file.lower()
|
||||
|
@ -26,37 +28,41 @@ class Launcher:
|
|||
command = [self.music_app, file]
|
||||
elif lowerName.endswith(self.foffice):
|
||||
command = [self.office_app, file]
|
||||
elif lowerName.endswith(self.fcode):
|
||||
command = [self.code_app, file]
|
||||
elif lowerName.endswith(self.ftext):
|
||||
command = [self.text_app, file]
|
||||
elif lowerName.endswith(self.fpdf):
|
||||
command = [self.pdf_app, file]
|
||||
else:
|
||||
elif lowerName.endswith("placeholder-until-i-can-get-a-use-pref-fm-flag"):
|
||||
command = [self.file_manager_app, file]
|
||||
else:
|
||||
command = ["xdg-open", file]
|
||||
|
||||
self.logger.debug(command)
|
||||
self.execute(command)
|
||||
|
||||
|
||||
def execute(self, command, start_dir=os.getenv("HOME"), use_shell=False):
|
||||
self.logger.debug(command)
|
||||
subprocess.Popen(command, cwd=start_dir, shell=use_shell, start_new_session=True, stdout=None, stderr=None, close_fds=True)
|
||||
|
||||
# TODO: Return stdout and in handlers along with subprocess instead of sinking to null
|
||||
def execute_and_return_thread_handler(self, command, start_dir=os.getenv("HOME"), use_shell=False):
|
||||
DEVNULL = open(os.devnull, 'w')
|
||||
subprocess.Popen(command, start_new_session=True, stdout=DEVNULL, stderr=DEVNULL, close_fds=True)
|
||||
|
||||
|
||||
def create_stream(self, hash, file):
|
||||
# ffmpeg -re -stream_loop -1 -i "<video file path>" -f rtsp -rtsp_transport udp rtsp://localhost:8554/live.stream
|
||||
command = ["ffmpeg", "-re", "-stream_loop", "-1", "-i", file, "-f", "rtsp", "-rtsp_transport", "udp", "rtsp://www.webfm.com:8554/{hash}.stream",]
|
||||
try:
|
||||
proc = subprocess.Popen(command)
|
||||
except Exception as e:
|
||||
self.logger.debug(message)
|
||||
self.logger.debug(e)
|
||||
return False
|
||||
return subprocess.Popen(command, cwd=start_dir, shell=use_shell, start_new_session=False, stdout=DEVNULL, stderr=DEVNULL, close_fds=False)
|
||||
|
||||
@threaded
|
||||
def app_chooser_exec(self, app_info, uris):
|
||||
app_info.launch_uris_async(uris)
|
||||
|
||||
def remux_video(self, hash, file):
|
||||
remux_vid_pth = f"{self.REMUX_FOLDER}/{hash}.mp4"
|
||||
remux_vid_pth = self.REMUX_FOLDER + "/" + hash + ".mp4"
|
||||
self.logger.debug(remux_vid_pth)
|
||||
|
||||
if not os.path.isfile(remux_vid_pth):
|
||||
self.check_remux_space()
|
||||
|
||||
command = ["ffmpeg", "-i", file, "-hide_banner", "-movflags", "+faststart", "-sws_flags", "fast_bilinear",]
|
||||
command = ["ffmpeg", "-i", file, "-hide_banner", "-movflags", "+faststart"]
|
||||
if file.endswith("mkv"):
|
||||
command += ["-codec", "copy", "-strict", "-2"]
|
||||
if file.endswith("avi"):
|
||||
|
@ -77,15 +83,6 @@ class Launcher:
|
|||
|
||||
return True
|
||||
|
||||
|
||||
def generate_video_thumbnail(self, fullPath, hashImgPth):
|
||||
try:
|
||||
proc = subprocess.Popen([self.FFMPG_THUMBNLR, "-t", "65%", "-s", "300", "-c", "jpg", "-i", fullPath, "-o", hashImgPth])
|
||||
proc.wait()
|
||||
except Exception as e:
|
||||
self.logger.debug(repr(e))
|
||||
|
||||
|
||||
def check_remux_space(self):
|
||||
limit = self.remux_folder_max_disk_usage
|
||||
try:
|
|
@ -0,0 +1,86 @@
|
|||
# Python imports
|
||||
import json
|
||||
import os
|
||||
from os import path
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Apoplication imports
|
||||
|
||||
|
||||
|
||||
|
||||
class Settings:
|
||||
logger = None
|
||||
USR_WEBFM = "/usr/share/webfm"
|
||||
USER_HOME = path.expanduser('~')
|
||||
CONFIG_PATH = f"{USER_HOME}/.config/webfm"
|
||||
CONFIG_FILE = f"{CONFIG_PATH}/settings.json"
|
||||
HIDE_HIDDEN_FILES = True
|
||||
|
||||
GTK_ORIENTATION = 1 # HORIZONTAL (0) VERTICAL (1)
|
||||
DEFAULT_ICONS = f"{CONFIG_PATH}/icons"
|
||||
DEFAULT_ICON = f"{DEFAULT_ICONS}/text.png"
|
||||
FFMPG_THUMBNLR = f"{CONFIG_PATH}/ffmpegthumbnailer" # Thumbnail generator binary
|
||||
BLENDER_THUMBNLR = f"{CONFIG_PATH}/blender-thumbnailer" # Blender thumbnail generator binary
|
||||
REMUX_FOLDER = f"{USER_HOME}/.remuxs" # Remuxed files folder
|
||||
|
||||
ICON_DIRS = ["/usr/share/icons", f"{USER_HOME}/.icons" "/usr/share/pixmaps"]
|
||||
BASE_THUMBS_PTH = f"{USER_HOME}/.thumbnails" # Used for thumbnail generation
|
||||
ABS_THUMBS_PTH = f"{BASE_THUMBS_PTH}/normal" # Used for thumbnail generation
|
||||
STEAM_ICONS_PTH = f"{BASE_THUMBS_PTH}/steam_icons"
|
||||
|
||||
# Dir structure check
|
||||
if not path.isdir(REMUX_FOLDER):
|
||||
os.mkdir(REMUX_FOLDER)
|
||||
|
||||
if not path.isdir(BASE_THUMBS_PTH):
|
||||
os.mkdir(BASE_THUMBS_PTH)
|
||||
|
||||
if not path.isdir(ABS_THUMBS_PTH):
|
||||
os.mkdir(ABS_THUMBS_PTH)
|
||||
|
||||
if not path.isdir(STEAM_ICONS_PTH):
|
||||
os.mkdir(STEAM_ICONS_PTH)
|
||||
|
||||
if not os.path.exists(DEFAULT_ICONS):
|
||||
DEFAULT_ICONS = f"{USR_WEBFM}/icons"
|
||||
DEFAULT_ICON = f"{DEFAULT_ICONS}/text.png"
|
||||
|
||||
with open(CONFIG_FILE) as f:
|
||||
settings = json.load(f)
|
||||
config = settings["config"]
|
||||
|
||||
subpath = config["base_of_home"]
|
||||
STEAM_CDN_URL = config["steam_cdn_url"]
|
||||
FFMPG_THUMBNLR = FFMPG_THUMBNLR if config["thumbnailer_path"] == "" else config["thumbnailer_path"]
|
||||
BLENDER_THUMBNLR = BLENDER_THUMBNLR if config["blender_thumbnailer_path"] == "" else config["blender_thumbnailer_path"]
|
||||
HIDE_HIDDEN_FILES = True if config["hide_hidden_files"] == "true" else False
|
||||
go_past_home = True if config["go_past_home"] == "true" else False
|
||||
lock_folder = True if config["lock_folder"] == "true" else False
|
||||
locked_folders = config["locked_folders"].split("::::")
|
||||
mplayer_options = config["mplayer_options"].split()
|
||||
music_app = config["music_app"]
|
||||
media_app = config["media_app"]
|
||||
image_app = config["image_app"]
|
||||
office_app = config["office_app"]
|
||||
pdf_app = config["pdf_app"]
|
||||
code_app = config["code_app"]
|
||||
text_app = config["text_app"]
|
||||
terminal_app = config["terminal_app"]
|
||||
container_icon_wh = config["container_icon_wh"]
|
||||
video_icon_wh = config["video_icon_wh"]
|
||||
sys_icon_wh = config["sys_icon_wh"]
|
||||
file_manager_app = config["file_manager_app"]
|
||||
remux_folder_max_disk_usage = config["remux_folder_max_disk_usage"]
|
||||
|
||||
# Filters
|
||||
filters = settings["filters"]
|
||||
fmeshs = tuple(filters["meshs"])
|
||||
fcode = tuple(filters["code"])
|
||||
fvideos = tuple(filters["videos"])
|
||||
foffice = tuple(filters["office"])
|
||||
fimages = tuple(filters["images"])
|
||||
ftext = tuple(filters["text"])
|
||||
fmusic = tuple(filters["music"])
|
||||
fpdf = tuple(filters["pdf"])
|
|
@ -1,46 +0,0 @@
|
|||
# Python imports
|
||||
import os
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
|
||||
|
||||
class Path:
|
||||
def get_path(self):
|
||||
return "/" + "/".join(self.path)
|
||||
|
||||
def get_path_list(self):
|
||||
return self.path
|
||||
|
||||
def push_to_path(self, dir):
|
||||
self.path.append(dir)
|
||||
self.load_directory()
|
||||
|
||||
def pop_from_path(self):
|
||||
self.path.pop()
|
||||
|
||||
if not self.go_past_home:
|
||||
if self.get_home() not in self.get_path():
|
||||
self.set_to_home()
|
||||
|
||||
self.load_directory()
|
||||
|
||||
|
||||
def set_path(self, path):
|
||||
self.path = list( filter(None, path.replace("\\", "/").split('/')) )
|
||||
self.load_directory()
|
||||
|
||||
def set_path_with_sub_path(self, sub_path):
|
||||
path = os.path.join(self.get_home(), sub_path)
|
||||
self.path = list( filter(None, path.replace("\\", "/").split('/')) )
|
||||
self.load_directory()
|
||||
|
||||
def set_to_home(self):
|
||||
home = os.path.expanduser("~") + self.subpath
|
||||
path = list( filter(None, home.replace("\\", "/").split('/')) )
|
||||
self.path = path
|
||||
self.load_directory()
|
||||
|
||||
def get_home(self):
|
||||
return os.path.expanduser("~") + self.subpath
|
|
@ -1,199 +0,0 @@
|
|||
# Python imports
|
||||
import hashlib, os, re
|
||||
from os import listdir
|
||||
from os.path import isdir, isfile, join
|
||||
|
||||
|
||||
# Lib imports
|
||||
|
||||
|
||||
# Application imports
|
||||
from .utils import Settings, Launcher
|
||||
from . import Path
|
||||
|
||||
class View(Settings, Launcher, Path):
|
||||
def __init__(self):
|
||||
self.files = []
|
||||
self.dirs = []
|
||||
self.vids = []
|
||||
self.images = []
|
||||
self.desktop = []
|
||||
self.ungrouped = []
|
||||
self.error_message = None
|
||||
|
||||
self.set_to_home()
|
||||
|
||||
def load_directory(self):
|
||||
path = self.get_path()
|
||||
self.dirs = []
|
||||
self.vids = []
|
||||
self.images = []
|
||||
self.desktop = []
|
||||
self.ungrouped = []
|
||||
self.files = []
|
||||
|
||||
if not isdir(path):
|
||||
self._set_error_message("Path can not be accessed.")
|
||||
return ""
|
||||
|
||||
for f in listdir(path):
|
||||
file = join(path, f)
|
||||
if self.HIDE_HIDDEN_FILES:
|
||||
if f.startswith('.'):
|
||||
continue
|
||||
|
||||
if isfile(file):
|
||||
lowerName = file.lower()
|
||||
if lowerName.endswith(self.fvideos):
|
||||
self.vids.append(f)
|
||||
elif lowerName.endswith(self.fimages):
|
||||
self.images.append(f)
|
||||
elif lowerName.endswith((".desktop",)):
|
||||
self.desktop.append(f)
|
||||
else:
|
||||
self.ungrouped.append(f)
|
||||
else:
|
||||
self.dirs.append(f)
|
||||
|
||||
self.dirs.sort(key=self._natural_keys)
|
||||
self.vids.sort(key=self._natural_keys)
|
||||
self.images.sort(key=self._natural_keys)
|
||||
self.desktop.sort(key=self._natural_keys)
|
||||
self.ungrouped.sort(key=self._natural_keys)
|
||||
|
||||
self.files = self.dirs + self.vids + self.images + self.desktop + self.ungrouped
|
||||
|
||||
def hashText(self, text):
|
||||
return hashlib.sha256(str.encode(text)).hexdigest()[:18]
|
||||
|
||||
def hashSet(self, arry):
|
||||
path = self.get_path()
|
||||
data = []
|
||||
for arr in arry:
|
||||
file = f"{path}/{arr}"
|
||||
size = "4K" if isdir(file) else self.sizeof_fmt(os.path.getsize(file))
|
||||
data.append([arr, self.hashText(arr), size])
|
||||
return data
|
||||
|
||||
def get_path_part_from_hash(self, hash):
|
||||
files = self.get_files()
|
||||
file = None
|
||||
|
||||
for f in files:
|
||||
if hash == f[1]:
|
||||
file = f[0]
|
||||
break
|
||||
|
||||
return file
|
||||
|
||||
def get_files_formatted(self):
|
||||
files = self.hashSet(self.files),
|
||||
dirs = self.hashSet(self.dirs),
|
||||
videos = self.get_videos(),
|
||||
images = self.hashSet(self.images),
|
||||
desktops = self.hashSet(self.desktop),
|
||||
ungrouped = self.hashSet(self.ungrouped)
|
||||
|
||||
return {
|
||||
'path_head': self.get_path(),
|
||||
'list': {
|
||||
'files': files,
|
||||
'dirs': dirs,
|
||||
'videos': videos,
|
||||
'images': images,
|
||||
'desktops': desktops,
|
||||
'ungrouped': ungrouped
|
||||
}
|
||||
}
|
||||
|
||||
def is_folder_locked(self, hash):
|
||||
if self.lock_folder:
|
||||
path_parts = self.get_path().split('/')
|
||||
file = self.get_path_part_from_hash(hash)
|
||||
|
||||
# Insure chilren folders are locked too.
|
||||
lockedFolderInPath = False
|
||||
for folder in self.locked_folders:
|
||||
if folder in path_parts:
|
||||
lockedFolderInPath = True
|
||||
break
|
||||
|
||||
return (file in self.locked_folders or lockedFolderInPath)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def _set_error_message(self, text):
|
||||
self.error_message = text
|
||||
|
||||
def unset_error_message(self):
|
||||
self.error_message = None
|
||||
|
||||
def get_error_message(self):
|
||||
return self.error_message
|
||||
|
||||
def get_current_directory(self):
|
||||
return self.get_path()
|
||||
|
||||
def get_current_sub_path(self):
|
||||
path = self.get_path()
|
||||
home = self.get_home() + "/"
|
||||
return path.replace(home, "")
|
||||
|
||||
def get_dot_dots(self):
|
||||
return self.hashSet(['.', '..'])
|
||||
|
||||
def get_files(self):
|
||||
return self.hashSet(self.files)
|
||||
|
||||
def get_dirs(self):
|
||||
return self.hashSet(self.dirs)
|
||||
|
||||
def get_videos(self):
|
||||
videos_set = self.hashSet(self.vids)
|
||||
current_directory = self.get_current_directory()
|
||||
for video in videos_set:
|
||||
hashImgPth = join(self.ABS_THUMBS_PTH, video[1]) + ".jpg"
|
||||
if not os.path.exists(hashImgPth) :
|
||||
fullPath = join(current_directory, video[0])
|
||||
self.logger.debug(f"Hash Path: {hashImgPth}\nFile Path: {fullPath}")
|
||||
self.generate_video_thumbnail(fullPath, hashImgPth)
|
||||
|
||||
return videos_set
|
||||
|
||||
def get_images(self):
|
||||
return self.hashSet(self.images)
|
||||
|
||||
def get_desktops(self):
|
||||
return self.hashSet(self.desktop)
|
||||
|
||||
def get_ungrouped(self):
|
||||
return self.hashSet(self.ungrouped)
|
||||
|
||||
def sizeof_fmt(self, num, suffix="B"):
|
||||
for unit in ["", "K", "M", "G", "T", "Pi", "Ei", "Zi"]:
|
||||
if abs(num) < 1024.0:
|
||||
return f"{num:3.1f} {unit}{suffix}"
|
||||
num /= 1024.0
|
||||
return f"{num:.1f} Yi{suffix}"
|
||||
|
||||
def get_dir_size(self, sdir):
|
||||
"""Get the size of a directory. Based on code found online."""
|
||||
size = os.path.getsize(sdir)
|
||||
|
||||
for item in listdir(sdir):
|
||||
item = join(sdir, item)
|
||||
|
||||
if isfile(item):
|
||||
size = size + os.path.getsize(item)
|
||||
elif isdir(item):
|
||||
size = size + self.get_dir_size(item)
|
||||
|
||||
return size
|
||||
|
||||
|
||||
def _atoi(self, text):
|
||||
return int(text) if text.isdigit() else text
|
||||
|
||||
def _natural_keys(self, text):
|
||||
return [ self._atoi(c) for c in re.split('(\d+)',text) ]
|
|
@ -1,4 +0,0 @@
|
|||
from .utils import *
|
||||
|
||||
from .Path import Path
|
||||
from .View import View
|
|
@ -1,94 +0,0 @@
|
|||
# System import
|
||||
import json
|
||||
import os
|
||||
from os import path
|
||||
|
||||
# Lib imports
|
||||
|
||||
|
||||
# Apoplication imports
|
||||
|
||||
|
||||
|
||||
class Settings:
|
||||
logger = None
|
||||
|
||||
USER_HOME = path.expanduser('~')
|
||||
CONFIG_PATH = f"{USER_HOME}/.config/webfm"
|
||||
CONFIG_FILE = f"{CONFIG_PATH}/settings.json"
|
||||
HIDE_HIDDEN_FILES = True
|
||||
|
||||
GTK_ORIENTATION = 1 # HORIZONTAL (0) VERTICAL (1)
|
||||
DEFAULT_ICONS = f"{CONFIG_PATH}/icons"
|
||||
DEFAULT_ICON = f"{DEFAULT_ICONS}/text.png"
|
||||
FFMPG_THUMBNLR = f"{CONFIG_PATH}/ffmpegthumbnailer" # Thumbnail generator binary
|
||||
REMUX_FOLDER = f"{USER_HOME}/.remuxs" # Remuxed files folder
|
||||
|
||||
STEAM_BASE_URL = "https://steamcdn-a.akamaihd.net/steam/apps/"
|
||||
ICON_DIRS = ["/usr/share/pixmaps", "/usr/share/icons", f"{USER_HOME}/.icons" ,]
|
||||
BASE_THUMBS_PTH = f"{USER_HOME}/.thumbnails" # Used for thumbnail generation
|
||||
ABS_THUMBS_PTH = f"{BASE_THUMBS_PTH}/normal" # Used for thumbnail generation
|
||||
STEAM_ICONS_PTH = f"{BASE_THUMBS_PTH}/steam_icons"
|
||||
CONTAINER_ICON_WH = [128, 128]
|
||||
VIDEO_ICON_WH = [128, 64]
|
||||
SYS_ICON_WH = [56, 56]
|
||||
|
||||
# CONTAINER_ICON_WH = [128, 128]
|
||||
# VIDEO_ICON_WH = [96, 48]
|
||||
# SYS_ICON_WH = [96, 96]
|
||||
|
||||
subpath = ""
|
||||
go_past_home = None
|
||||
lock_folder = None
|
||||
locked_folders = None
|
||||
mplayer_options = None
|
||||
music_app = None
|
||||
media_app = None
|
||||
image_app = None
|
||||
office_app = None
|
||||
pdf_app = None
|
||||
text_app = None
|
||||
file_manager_app = None
|
||||
remux_folder_max_disk_usage = None
|
||||
|
||||
if path.isfile(CONFIG_FILE):
|
||||
with open(CONFIG_FILE) as infile:
|
||||
settings = json.load(infile)["settings"]
|
||||
|
||||
subpath = settings["base_of_home"]
|
||||
HIDE_HIDDEN_FILES = True if settings["hide_hidden_files"] == "true" else False
|
||||
FFMPG_THUMBNLR = FFMPG_THUMBNLR if settings["thumbnailer_path"] == "" else settings["thumbnailer_path"]
|
||||
go_past_home = True if settings["go_past_home"] == "true" else False
|
||||
lock_folder = True if settings["lock_folder"] == "true" else False
|
||||
locked_folders = settings["locked_folders"].split("::::")
|
||||
mplayer_options = settings["mplayer_options"].split()
|
||||
music_app = settings["music_app"]
|
||||
media_app = settings["media_app"]
|
||||
image_app = settings["image_app"]
|
||||
office_app = settings["office_app"]
|
||||
pdf_app = settings["pdf_app"]
|
||||
text_app = settings["text_app"]
|
||||
file_manager_app = settings["file_manager_app"]
|
||||
remux_folder_max_disk_usage = settings["remux_folder_max_disk_usage"]
|
||||
|
||||
# Filters
|
||||
fvideos = ('.mkv', '.avi', '.flv', '.mov', '.m4v', '.mpg', '.wmv', '.mpeg', '.mp4', '.webm')
|
||||
foffice = ('.doc', '.docx', '.xls', '.xlsx', '.xlt', '.xltx', '.xlm', '.ppt', 'pptx', '.pps', '.ppsx', '.odt', '.rtf')
|
||||
fimages = ('.png', '.jpg', '.jpeg', '.gif', '.ico', '.tga', '.webp')
|
||||
ftext = ('.txt', '.text', '.sh', '.cfg', '.conf')
|
||||
fmusic = ('.psf', '.mp3', '.ogg', '.flac', '.m4a')
|
||||
fpdf = ('.pdf')
|
||||
|
||||
|
||||
# Dire structure check
|
||||
if path.isdir(REMUX_FOLDER) == False:
|
||||
os.mkdir(REMUX_FOLDER)
|
||||
|
||||
if path.isdir(BASE_THUMBS_PTH) == False:
|
||||
os.mkdir(BASE_THUMBS_PTH)
|
||||
|
||||
if path.isdir(ABS_THUMBS_PTH) == False:
|
||||
os.mkdir(ABS_THUMBS_PTH)
|
||||
|
||||
if path.isdir(STEAM_ICONS_PTH) == False:
|
||||
os.mkdir(STEAM_ICONS_PTH)
|
|
@ -1,2 +0,0 @@
|
|||
from .Settings import Settings
|
||||
from .Launcher import Launcher
|
Binary file not shown.
|
@ -1,281 +0,0 @@
|
|||
|
||||
###############################################
|
||||
# General parameters
|
||||
|
||||
# Sets the verbosity of the program; available values are "error", "warn", "info", "debug".
|
||||
logLevel: info
|
||||
# Destinations of log messages; available values are "stdout", "file" and "syslog".
|
||||
logDestinations: [stdout]
|
||||
# If "file" is in logDestinations, this is the file which will receive the logs.
|
||||
logFile: rtsp-simple-server.log
|
||||
|
||||
# Timeout of read operations.
|
||||
readTimeout: 10s
|
||||
# Timeout of write operations.
|
||||
writeTimeout: 10s
|
||||
# Number of read buffers.
|
||||
# A higher number allows a wider throughput, a lower number allows to save RAM.
|
||||
readBufferCount: 2048
|
||||
|
||||
# HTTP URL to perform external authentication.
|
||||
# Every time a user wants to authenticate, the server calls this URL
|
||||
# with the POST method and a body containing:
|
||||
# {
|
||||
# "ip": "ip",
|
||||
# "user": "user",
|
||||
# "password": "password",
|
||||
# "path": "path",
|
||||
# "action": "read|publish"
|
||||
# "query": "url's raw query"
|
||||
# }
|
||||
# If the response code is 20x, authentication is accepted, otherwise
|
||||
# it is discarded.
|
||||
externalAuthenticationURL:
|
||||
|
||||
# Enable the HTTP API.
|
||||
api: no
|
||||
# Address of the API listener.
|
||||
apiAddress: 127.0.0.1:9997
|
||||
|
||||
# Enable Prometheus-compatible metrics.
|
||||
metrics: no
|
||||
# Address of the metrics listener.
|
||||
metricsAddress: 127.0.0.1:9998
|
||||
|
||||
# Enable pprof-compatible endpoint to monitor performances.
|
||||
pprof: no
|
||||
# Address of the pprof listener.
|
||||
pprofAddress: 127.0.0.1:9999
|
||||
|
||||
# Command to run when a client connects to the server.
|
||||
# This is terminated with SIGINT when a client disconnects from the server.
|
||||
# The following environment variables are available:
|
||||
# * RTSP_PORT: server port
|
||||
runOnConnect:
|
||||
# Restart the command if it exits suddenly.
|
||||
runOnConnectRestart: no
|
||||
|
||||
###############################################
|
||||
# RTSP parameters
|
||||
|
||||
# Disable support for the RTSP protocol.
|
||||
rtspDisable: no
|
||||
# List of enabled RTSP transport protocols.
|
||||
# UDP is the most performant, but doesn't work when there's a NAT/firewall between
|
||||
# server and clients, and doesn't support encryption.
|
||||
# UDP-multicast allows to save bandwidth when clients are all in the same LAN.
|
||||
# TCP is the most versatile, and does support encryption.
|
||||
# The handshake is always performed with TCP.
|
||||
protocols: [udp, multicast, tcp]
|
||||
# Encrypt handshake and TCP streams with TLS (RTSPS).
|
||||
# Available values are "no", "strict", "optional".
|
||||
encryption: "no"
|
||||
# Address of the TCP/RTSP listener. This is needed only when encryption is "no" or "optional".
|
||||
rtspAddress: :8554
|
||||
# Address of the TCP/TLS/RTSPS listener. This is needed only when encryption is "strict" or "optional".
|
||||
rtspsAddress: :8322
|
||||
# Address of the UDP/RTP listener. This is needed only when "udp" is in protocols.
|
||||
rtpAddress: :8000
|
||||
# Address of the UDP/RTCP listener. This is needed only when "udp" is in protocols.
|
||||
rtcpAddress: :8001
|
||||
# IP range of all UDP-multicast listeners. This is needed only when "multicast" is in protocols.
|
||||
multicastIPRange: 224.1.0.0/16
|
||||
# Port of all UDP-multicast/RTP listeners. This is needed only when "multicast" is in protocols.
|
||||
multicastRTPPort: 8002
|
||||
# Port of all UDP-multicast/RTCP listeners. This is needed only when "multicast" is in protocols.
|
||||
multicastRTCPPort: 8003
|
||||
# Path to the server key. This is needed only when encryption is "strict" or "optional".
|
||||
# This can be generated with:
|
||||
# openssl genrsa -out server.key 2048
|
||||
# openssl req -new -x509 -sha256 -key server.key -out server.crt -days 3650
|
||||
serverKey: server.key
|
||||
# Path to the server certificate. This is needed only when encryption is "strict" or "optional".
|
||||
serverCert: server.crt
|
||||
# Authentication methods.
|
||||
authMethods: [basic, digest]
|
||||
|
||||
###############################################
|
||||
# RTMP parameters
|
||||
|
||||
# Disable support for the RTMP protocol.
|
||||
rtmpDisable: no
|
||||
# Address of the RTMP listener.
|
||||
rtmpAddress: :1935
|
||||
|
||||
###############################################
|
||||
# HLS parameters
|
||||
|
||||
# Disable support for the HLS protocol.
|
||||
hlsDisable: no
|
||||
# Address of the HLS listener.
|
||||
hlsAddress: :8888
|
||||
# By default, HLS is generated only when requested by a user.
|
||||
# This option allows to generate it always, avoiding the delay between request and generation.
|
||||
hlsAlwaysRemux: no
|
||||
# Variant of the HLS protocol to use. Available options are:
|
||||
# * mpegts - uses MPEG-TS segments, for maximum compatibility.
|
||||
# * fmp4 - uses fragmented MP4 segments, more efficient.
|
||||
# * lowLatency - uses Low-Latency HLS.
|
||||
hlsVariant: mpegts
|
||||
# Number of HLS segments to keep on the server.
|
||||
# Segments allow to seek through the stream.
|
||||
# Their number doesn't influence latency.
|
||||
hlsSegmentCount: 7
|
||||
# Minimum duration of each segment.
|
||||
# A player usually puts 3 segments in a buffer before reproducing the stream.
|
||||
# The final segment duration is also influenced by the interval between IDR frames,
|
||||
# since the server changes the duration in order to include at least one IDR frame
|
||||
# in each segment.
|
||||
hlsSegmentDuration: 1s
|
||||
# Minimum duration of each part.
|
||||
# A player usually puts 3 parts in a buffer before reproducing the stream.
|
||||
# Parts are used in Low-Latency HLS in place of segments.
|
||||
# Part duration is influenced by the distance between video/audio samples
|
||||
# and is adjusted in order to produce segments with a similar duration.
|
||||
hlsPartDuration: 200ms
|
||||
# Maximum size of each segment.
|
||||
# This prevents RAM exhaustion.
|
||||
hlsSegmentMaxSize: 50M
|
||||
# Value of the Access-Control-Allow-Origin header provided in every HTTP response.
|
||||
# This allows to play the HLS stream from an external website.
|
||||
hlsAllowOrigin: '*'
|
||||
# Enable TLS/HTTPS on the HLS server.
|
||||
# This is required for Low-Latency HLS.
|
||||
hlsEncryption: no
|
||||
# Path to the server key. This is needed only when encryption is yes.
|
||||
# This can be generated with:
|
||||
# openssl genrsa -out server.key 2048
|
||||
# openssl req -new -x509 -sha256 -key server.key -out server.crt -days 3650
|
||||
hlsServerKey: server.key
|
||||
# Path to the server certificate.
|
||||
hlsServerCert: server.crt
|
||||
|
||||
###############################################
|
||||
# Path parameters
|
||||
|
||||
# These settings are path-dependent, and the map key is the name of the path.
|
||||
# It's possible to use regular expressions by using a tilde as prefix.
|
||||
# For example, "~^(test1|test2)$" will match both "test1" and "test2".
|
||||
# For example, "~^prefix" will match all paths that start with "prefix".
|
||||
# The settings under the path "all" are applied to all paths that do not match
|
||||
# another entry.
|
||||
paths:
|
||||
all:
|
||||
# Source of the stream. This can be:
|
||||
# * publisher -> the stream is published by a RTSP or RTMP client
|
||||
# * rtsp://existing-url -> the stream is pulled from another RTSP server / camera
|
||||
# * rtsps://existing-url -> the stream is pulled from another RTSP server / camera with RTSPS
|
||||
# * rtmp://existing-url -> the stream is pulled from another RTMP server
|
||||
# * http://existing-url/stream.m3u8 -> the stream is pulled from another HLS server
|
||||
# * https://existing-url/stream.m3u8 -> the stream is pulled from another HLS server with HTTPS
|
||||
# * redirect -> the stream is provided by another path or server
|
||||
source: publisher
|
||||
|
||||
# If the source is an RTSP or RTSPS URL, this is the protocol that will be used to
|
||||
# pull the stream. available values are "automatic", "udp", "multicast", "tcp".
|
||||
sourceProtocol: automatic
|
||||
|
||||
# Tf the source is an RTSP or RTSPS URL, this allows to support sources that
|
||||
# don't provide server ports or use random server ports. This is a security issue
|
||||
# and must be used only when interacting with sources that require it.
|
||||
sourceAnyPortEnable: no
|
||||
|
||||
# If the source is a RTSPS or HTTPS URL, and the source certificate is self-signed
|
||||
# or invalid, you can provide the fingerprint of the certificate in order to
|
||||
# validate it anyway. It can be obtained by running:
|
||||
# openssl s_client -connect source_ip:source_port </dev/null 2>/dev/null | sed -n '/BEGIN/,/END/p' > server.crt
|
||||
# openssl x509 -in server.crt -noout -fingerprint -sha256 | cut -d "=" -f2 | tr -d ':'
|
||||
sourceFingerprint:
|
||||
|
||||
# If the source is an RTSP or RTMP URL, it will be pulled only when at least
|
||||
# one reader is connected, saving bandwidth.
|
||||
sourceOnDemand: no
|
||||
# If sourceOnDemand is "yes", readers will be put on hold until the source is
|
||||
# ready or until this amount of time has passed.
|
||||
sourceOnDemandStartTimeout: 10s
|
||||
# If sourceOnDemand is "yes", the source will be closed when there are no
|
||||
# readers connected and this amount of time has passed.
|
||||
sourceOnDemandCloseAfter: 10s
|
||||
|
||||
# If the source is "redirect", this is the RTSP URL which clients will be
|
||||
# redirected to.
|
||||
sourceRedirect:
|
||||
|
||||
# If the source is "publisher" and a client is publishing, do not allow another
|
||||
# client to disconnect the former and publish in its place.
|
||||
disablePublisherOverride: no
|
||||
|
||||
# If the source is "publisher" and no one is publishing, redirect readers to this
|
||||
# path. It can be can be a relative path (i.e. /otherstream) or an absolute RTSP URL.
|
||||
fallback:
|
||||
|
||||
# Username required to publish.
|
||||
# SHA256-hashed values can be inserted with the "sha256:" prefix.
|
||||
publishUser:
|
||||
# Password required to publish.
|
||||
# SHA256-hashed values can be inserted with the "sha256:" prefix.
|
||||
publishPass:
|
||||
# IPs or networks (x.x.x.x/24) allowed to publish.
|
||||
publishIPs: []
|
||||
|
||||
# Username required to read.
|
||||
# SHA256-hashed values can be inserted with the "sha256:" prefix.
|
||||
readUser:
|
||||
# password required to read.
|
||||
# SHA256-hashed values can be inserted with the "sha256:" prefix.
|
||||
readPass:
|
||||
# IPs or networks (x.x.x.x/24) allowed to read.
|
||||
readIPs: []
|
||||
|
||||
# Command to run when this path is initialized.
|
||||
# This can be used to publish a stream and keep it always opened.
|
||||
# This is terminated with SIGINT when the program closes.
|
||||
# The following environment variables are available:
|
||||
# * RTSP_PATH: path name
|
||||
# * RTSP_PORT: server port
|
||||
# * G1, G2, ...: regular expression groups, if path name is
|
||||
# a regular expression.
|
||||
runOnInit:
|
||||
# Restart the command if it exits suddenly.
|
||||
runOnInitRestart: no
|
||||
|
||||
# Command to run when this path is requested.
|
||||
# This can be used to publish a stream on demand.
|
||||
# This is terminated with SIGINT when the path is not requested anymore.
|
||||
# The following environment variables are available:
|
||||
# * RTSP_PATH: path name
|
||||
# * RTSP_PORT: server port
|
||||
# * G1, G2, ...: regular expression groups, if path name is
|
||||
# a regular expression.
|
||||
runOnDemand:
|
||||
# Restart the command if it exits suddenly.
|
||||
runOnDemandRestart: no
|
||||
# Readers will be put on hold until the runOnDemand command starts publishing
|
||||
# or until this amount of time has passed.
|
||||
runOnDemandStartTimeout: 10s
|
||||
# The command will be closed when there are no
|
||||
# readers connected and this amount of time has passed.
|
||||
runOnDemandCloseAfter: 10s
|
||||
|
||||
# Command to run when the stream is ready to be read, whether it is
|
||||
# published by a client or pulled from a server / camera.
|
||||
# This is terminated with SIGINT when the stream is not ready anymore.
|
||||
# The following environment variables are available:
|
||||
# * RTSP_PATH: path name
|
||||
# * RTSP_PORT: server port
|
||||
# * G1, G2, ...: regular expression groups, if path name is
|
||||
# a regular expression.
|
||||
runOnReady:
|
||||
# Restart the command if it exits suddenly.
|
||||
runOnReadyRestart: no
|
||||
|
||||
# Command to run when a clients starts reading.
|
||||
# This is terminated with SIGINT when a client stops reading.
|
||||
# The following environment variables are available:
|
||||
# * RTSP_PATH: path name
|
||||
# * RTSP_PORT: server port
|
||||
# * G1, G2, ...: regular expression groups, if path name is
|
||||
# a regular expression.
|
||||
runOnRead:
|
||||
# Restart the command if it exits suddenly.
|
||||
runOnReadRestart: no
|
|
@ -0,0 +1,93 @@
|
|||
# Python imports
|
||||
from random import randint
|
||||
|
||||
# Lib imports
|
||||
|
||||
# Application imports
|
||||
from .tabs.tab import Tab
|
||||
|
||||
|
||||
|
||||
|
||||
class Window:
|
||||
def __init__(self):
|
||||
self._id_length: int = 10
|
||||
self._id: str = ""
|
||||
self._name: str = ""
|
||||
self._nickname:str = ""
|
||||
self._isHidden: bool = False
|
||||
self._active_tab: int = 0
|
||||
self._tabs: list = []
|
||||
|
||||
self._generate_id()
|
||||
self._set_name()
|
||||
|
||||
|
||||
def create_tab(self) -> Tab:
|
||||
tab = Tab()
|
||||
self._tabs.append(tab)
|
||||
return tab
|
||||
|
||||
def pop_tab(self) -> None:
|
||||
self._tabs.pop()
|
||||
|
||||
def delete_tab_by_id(self, tid: str):
|
||||
for tab in self._tabs:
|
||||
if tab.get_id() == tid:
|
||||
self._tabs.remove(tab)
|
||||
break
|
||||
|
||||
|
||||
def get_tab_by_id(self, tid: str) -> Tab:
|
||||
for tab in self._tabs:
|
||||
if tab.get_id() == tid:
|
||||
return tab
|
||||
|
||||
def get_tab_by_index(self, index) -> Tab:
|
||||
return self._tabs[index]
|
||||
|
||||
def get_tabs_count(self) -> int:
|
||||
return len(self._tabs)
|
||||
|
||||
def get_all_tabs(self) -> list:
|
||||
return self._tabs
|
||||
|
||||
def get_id(self) -> str:
|
||||
return self._id
|
||||
|
||||
def get_name(self) -> str:
|
||||
return self._name
|
||||
|
||||
def get_nickname(self) -> str:
|
||||
return self._nickname
|
||||
|
||||
def is_hidden(self) -> bool:
|
||||
return self._isHidden
|
||||
|
||||
def list_files_from_tabs(self) -> None:
|
||||
for tab in self._tabs:
|
||||
print(tab.get_files())
|
||||
|
||||
def set_active_tab(self, index: int):
|
||||
self._active_tab = index
|
||||
|
||||
def get_active_tab(self) -> Tab:
|
||||
return self._tabs[self._active_tab]
|
||||
|
||||
def set_nickname(self, nickname):
|
||||
self._nickname = f"{nickname}"
|
||||
|
||||
def set_is_hidden(self, state):
|
||||
self._isHidden = f"{state}"
|
||||
|
||||
def _set_name(self):
|
||||
self._name = "window_" + self.get_id()
|
||||
|
||||
|
||||
def _random_with_N_digits(self, n):
|
||||
range_start = 10**(n-1)
|
||||
range_end = (10**n)-1
|
||||
return randint(range_start, range_end)
|
||||
|
||||
def _generate_id(self):
|
||||
self._id = str(self._random_with_N_digits(self._id_length))
|
|
@ -15,7 +15,7 @@ function main() {
|
|||
TIMEOUT=120
|
||||
|
||||
# <module>:<app> IE <file>:<flask app variable>
|
||||
gunicorn wsgi:app -p app.pid -b unix:/tmp/app.sock \
|
||||
gunicorn wsgi:app -p app.pid -b unix:/tmp/apps/webfm.sock \
|
||||
-k eventlet \
|
||||
-w $WORKER_COUNT \
|
||||
--timeout $TIMEOUT \
|
||||
|
|
Binary file not shown.
|
@ -1,19 +1,37 @@
|
|||
{
|
||||
"settings": {
|
||||
"config": {
|
||||
"base_of_home": "/LazyShare",
|
||||
"hide_hidden_files": "true",
|
||||
"thumbnailer_path": "ffmpegthumbnailer",
|
||||
"hide_hidden_files": "true",
|
||||
"blender_thumbnailer_path": "",
|
||||
"go_past_home": "false",
|
||||
"lock_folder": "true",
|
||||
"locked_folders": "Synced Backup::::venv::::flasks::::Encrypted Vault::::Cryptomator",
|
||||
"locked_folders": "venv::::flasks",
|
||||
"mplayer_options": "-quiet -really-quiet -xy 1600 -geometry 50%:50%",
|
||||
"music_app": "/opt/deadbeef/bin/deadbeef",
|
||||
"music_app": "deadbeef",
|
||||
"media_app": "mpv",
|
||||
"image_app": "mirage",
|
||||
"image_app": "mirage2",
|
||||
"office_app": "libreoffice",
|
||||
"pdf_app": "evince",
|
||||
"text_app": "leafpad",
|
||||
"file_manager_app": "spacefm",
|
||||
"code_app": "atom",
|
||||
"text_app": "mousepad",
|
||||
"terminal_app": "terminator",
|
||||
"container_icon_wh": [128, 128],
|
||||
"video_icon_wh": [128, 64],
|
||||
"sys_icon_wh": [56, 56],
|
||||
"file_manager_app": "solarfm",
|
||||
"steam_cdn_url": "https://steamcdn-a.akamaihd.net/steam/apps/",
|
||||
"remux_folder_max_disk_usage": "8589934592"
|
||||
}
|
||||
},
|
||||
"filters": {
|
||||
"meshs": [".dae", ".fbx", ".gltf", ".obj", ".stl"],
|
||||
"code": [".cpp", ".css", ".c", ".go", ".html", ".htm", ".java", ".js", ".json", ".lua", ".md", ".py", ".rs", ".toml", ".xml", ".pom"],
|
||||
"videos": [".mkv", ".mp4", ".webm", ".avi", ".mov", ".m4v", ".mpg", ".mpeg", ".wmv", ".flv"],
|
||||
"office": [".doc", ".docx", ".xls", ".xlsx", ".xlt", ".xltx", ".xlm", ".ppt", ".pptx", ".pps", ".ppsx", ".odt", ".rtf"],
|
||||
"images": [".png", ".jpg", ".jpeg", ".gif", ".ico", ".tga", ".webp"],
|
||||
"text": [".txt", ".text", ".sh", ".cfg", ".conf", ".log"],
|
||||
"music": [".psf", ".mp3", ".ogg", ".flac", ".m4a"],
|
||||
"pdf": [".pdf"]
|
||||
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue