Merge Stable Changesto Master #9
|
@ -70,4 +70,5 @@ class GrepSearchMixin:
|
||||||
grep_result = jdata[key]
|
grep_result = jdata[key]
|
||||||
|
|
||||||
widget = GrepPreviewWidget(key, sub_keys, grep_result)
|
widget = GrepPreviewWidget(key, sub_keys, grep_result)
|
||||||
self._grep_list.add(widget)
|
GLib.idle_add(self._grep_list.add, widget)
|
||||||
|
# self._grep_list.add(widget)
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
|
|
||||||
# Python imports
|
# Python imports
|
||||||
import os, traceback, argparse, json, base64
|
import os, traceback, argparse, threading, json, base64, time
|
||||||
from setproctitle import setproctitle
|
from setproctitle import setproctitle
|
||||||
from multiprocessing.connection import Client
|
from multiprocessing.connection import Client
|
||||||
|
|
||||||
|
@ -16,11 +16,20 @@ from multiprocessing.connection import Client
|
||||||
_ipc_address = f'/tmp/solarfm-search_grep-ipc.sock'
|
_ipc_address = f'/tmp/solarfm-search_grep-ipc.sock'
|
||||||
_ipc_authkey = b'' + bytes(f'solarfm-search_grep-ipc', 'utf-8')
|
_ipc_authkey = b'' + bytes(f'solarfm-search_grep-ipc', 'utf-8')
|
||||||
|
|
||||||
filter = (".mkv", ".mp4", ".webm", ".avi", ".mov", ".m4v", ".mpg", ".mpeg", ".wmv", ".flv") + \
|
filter = (".cpp", ".css", ".c", ".go", ".html", ".htm", ".java", ".js", ".json", ".lua", ".md", ".py", ".rs", ".toml", ".xml", ".pom") + \
|
||||||
(".png", ".jpg", ".jpeg", ".gif", ".ico", ".tga", ".webp") + \
|
(".txt", ".text", ".sh", ".cfg", ".conf", ".log")
|
||||||
(".psf", ".mp3", ".ogg", ".flac", ".m4a")
|
|
||||||
|
|
||||||
file_result_set = []
|
# NOTE: Threads WILL NOT die with parent's destruction.
|
||||||
|
def threaded(fn):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=False).start()
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
# NOTE: Threads WILL die with parent's destruction.
|
||||||
|
def daemon_threaded(fn):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
threading.Thread(target=fn, args=args, kwargs=kwargs, daemon=True).start()
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
def send_ipc_message(message) -> None:
|
def send_ipc_message(message) -> None:
|
||||||
|
@ -42,7 +51,7 @@ def file_search(path, query):
|
||||||
if os.path.isdir(target):
|
if os.path.isdir(target):
|
||||||
file_search(target, query)
|
file_search(target, query)
|
||||||
else:
|
else:
|
||||||
if query.lower() in file.lower():
|
if query in file.lower():
|
||||||
data = f"SEARCH|{json.dumps([target, file])}"
|
data = f"SEARCH|{json.dumps([target, file])}"
|
||||||
send_ipc_message(data)
|
send_ipc_message(data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -52,21 +61,40 @@ def file_search(path, query):
|
||||||
def _search_for_string(file, query):
|
def _search_for_string(file, query):
|
||||||
b64_file = base64.urlsafe_b64encode(file.encode('utf-8')).decode('utf-8')
|
b64_file = base64.urlsafe_b64encode(file.encode('utf-8')).decode('utf-8')
|
||||||
grep_result_set = {}
|
grep_result_set = {}
|
||||||
|
padding = 15
|
||||||
with open(file, 'r') as fp:
|
with open(file, 'r') as fp:
|
||||||
for i, line in enumerate(fp):
|
# NOTE: I know there's an issue if there's a very large file with content all on one line will lower and dupe it.
|
||||||
if query in line:
|
# And, yes, it will only return one instance from the file.
|
||||||
b64_line = base64.urlsafe_b64encode(line.encode('utf-8')).decode('utf-8')
|
for i, raw in enumerate(fp):
|
||||||
|
line = None
|
||||||
|
llower = raw.lower()
|
||||||
|
if not query in llower:
|
||||||
|
continue
|
||||||
|
|
||||||
if f"{b64_file}" in grep_result_set.keys():
|
if len(raw) > 72:
|
||||||
grep_result_set[f"{b64_file}"][f"{i+1}"] = b64_line
|
start = 0
|
||||||
else:
|
end = len(raw) - 1
|
||||||
grep_result_set[f"{b64_file}"] = {}
|
index = llower.index(query)
|
||||||
grep_result_set[f"{b64_file}"] = {f"{i+1}": b64_line}
|
sindex = llower.index(query) - 15 if index >= 15 else abs(start - index) - index
|
||||||
|
eindex = sindex + 15 if end > (index + 15) else abs(index - end) + index
|
||||||
|
line = raw[sindex:eindex]
|
||||||
|
else:
|
||||||
|
line = raw
|
||||||
|
|
||||||
|
b64_line = base64.urlsafe_b64encode(line.encode('utf-8')).decode('utf-8')
|
||||||
|
if f"{b64_file}" in grep_result_set.keys():
|
||||||
|
grep_result_set[f"{b64_file}"][f"{i+1}"] = b64_line
|
||||||
|
else:
|
||||||
|
grep_result_set[f"{b64_file}"] = {}
|
||||||
|
grep_result_set[f"{b64_file}"] = {f"{i+1}": b64_line}
|
||||||
|
|
||||||
data = f"GREP|{json.dumps(grep_result_set)}"
|
data = f"GREP|{json.dumps(grep_result_set)}"
|
||||||
send_ipc_message(data)
|
send_ipc_message(data)
|
||||||
|
|
||||||
|
@daemon_threaded
|
||||||
|
def _search_for_string_threaded(file, query):
|
||||||
|
_search_for_string(file, query)
|
||||||
|
|
||||||
|
|
||||||
def grep_search(path, query):
|
def grep_search(path, query):
|
||||||
try:
|
try:
|
||||||
|
@ -75,18 +103,23 @@ def grep_search(path, query):
|
||||||
if os.path.isdir(target):
|
if os.path.isdir(target):
|
||||||
grep_search(target, query)
|
grep_search(target, query)
|
||||||
else:
|
else:
|
||||||
if not target.lower().endswith(filter):
|
if target.lower().endswith(filter):
|
||||||
|
size = os.path.getsize(target)
|
||||||
|
if size < 5000:
|
||||||
_search_for_string(target, query)
|
_search_for_string(target, query)
|
||||||
|
else:
|
||||||
|
_search_for_string_threaded(target, query)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("Couldn't traverse to path. Might be permissions related...")
|
print("Couldn't traverse to path. Might be permissions related...")
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
|
||||||
def search(args):
|
def search(args):
|
||||||
if args.type == "file_search":
|
if args.type == "file_search":
|
||||||
file_search(args.dir, args.query)
|
file_search(args.dir, args.query.lower())
|
||||||
|
|
||||||
if args.type == "grep_search":
|
if args.type == "grep_search":
|
||||||
grep_search(args.dir, args.query)
|
grep_search(args.dir, args.query.lower())
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -10,37 +10,35 @@ from gi.repository import Gtk
|
||||||
|
|
||||||
|
|
||||||
class GrepPreviewWidget(Gtk.Box):
|
class GrepPreviewWidget(Gtk.Box):
|
||||||
def __init__(self, _path, sub_keys, data):
|
def __init__(self, _path, sub_keys, _data):
|
||||||
super(GrepPreviewWidget, self).__init__()
|
super(GrepPreviewWidget, self).__init__()
|
||||||
self.set_orientation(Gtk.Orientation.VERTICAL)
|
self.set_orientation(Gtk.Orientation.VERTICAL)
|
||||||
self.line_color = "#e0cc64"
|
self.line_color = "#e0cc64"
|
||||||
|
|
||||||
path = base64.urlsafe_b64decode(_path.encode('utf-8')).decode('utf-8')
|
path = self.decode_str(_path)
|
||||||
_label = '/'.join( path.split("/")[-3:] )
|
_label = '/'.join( path.split("/")[-3:] )
|
||||||
title = Gtk.LinkButton.new_with_label(uri=f"file://{path}", label=_label)
|
title = Gtk.LinkButton.new_with_label(uri=f"file://{path}", label=_label)
|
||||||
|
|
||||||
|
text_view = Gtk.TextView()
|
||||||
|
text_view.set_editable(False)
|
||||||
|
text_view.set_wrap_mode(Gtk.WrapMode.NONE)
|
||||||
|
buffer = text_view.get_buffer()
|
||||||
|
|
||||||
|
for i, key in enumerate(sub_keys):
|
||||||
|
line_num = self.make_utf8_line_num(self.line_color, key)
|
||||||
|
text = f"\t\t{ self.decode_str(_data[key]) }"
|
||||||
|
|
||||||
|
itr = buffer.get_end_iter()
|
||||||
|
buffer.insert_markup(itr, line_num, len(line_num))
|
||||||
|
itr = buffer.get_end_iter()
|
||||||
|
buffer.insert(itr, text, length=len(text))
|
||||||
|
|
||||||
self.add(title)
|
self.add(title)
|
||||||
for key in sub_keys:
|
self.add(text_view)
|
||||||
line_num = key
|
|
||||||
text = base64.urlsafe_b64decode(data[key].encode('utf-8')).decode('utf-8')
|
|
||||||
|
|
||||||
|
|
||||||
box = Gtk.Box()
|
|
||||||
number_label = Gtk.Label()
|
|
||||||
text_view = Gtk.Label(label=text[:-1])
|
|
||||||
label_text = f"<span foreground='{self.line_color}'>{line_num}</span>"
|
|
||||||
|
|
||||||
number_label.set_markup(label_text)
|
|
||||||
number_label.set_margin_left(15)
|
|
||||||
number_label.set_margin_right(5)
|
|
||||||
number_label.set_margin_top(5)
|
|
||||||
number_label.set_margin_bottom(5)
|
|
||||||
text_view.set_margin_top(5)
|
|
||||||
text_view.set_margin_bottom(5)
|
|
||||||
text_view.set_line_wrap(True)
|
|
||||||
|
|
||||||
box.add(number_label)
|
|
||||||
box.add(text_view)
|
|
||||||
self.add(box)
|
|
||||||
|
|
||||||
self.show_all()
|
self.show_all()
|
||||||
|
|
||||||
|
def decode_str(self, target):
|
||||||
|
return base64.urlsafe_b64decode(target.encode('utf-8')).decode('utf-8')
|
||||||
|
|
||||||
|
def make_utf8_line_num(self, color, target):
|
||||||
|
return bytes(f"\n<span foreground='{color}'>{target}</span>", "utf-8").decode("utf-8")
|
||||||
|
|
Loading…
Reference in New Issue