Made words completer run async on load and update; swapped out tabs to use Gtk.Notebook

This commit is contained in:
2026-02-16 17:11:30 -06:00
parent b922415f98
commit 6c42ff7c7d
8 changed files with 132 additions and 163 deletions

View File

@@ -1,5 +1,5 @@
# Python imports
from os import path
import asyncio
# Lib imports
import gi
@@ -23,10 +23,11 @@ class ProviderResponseCache(ProviderResponseCacheBase):
def process_file_load(self, event: Code_Event_Types.AddedNewFileEvent):
self.load_as_new_set(event.file.buffer)
buffer = event.file.buffer
asyncio.run( self._handle_change(buffer) )
def process_file_close(self, event: Code_Event_Types.RemovedFileEvent):
self.matchers[event.file.buffer] = []
self.matchers[event.file.buffer] = set()
del self.matchers[event.file.buffer]
def process_file_save(self, event: Code_Event_Types.SavedFileEvent):
@@ -34,9 +35,23 @@ class ProviderResponseCache(ProviderResponseCacheBase):
def process_file_change(self, event: Code_Event_Types.TextChangedEvent):
buffer = event.file.buffer
# if self.get_if_in_matched_word_set(buffer): return
self.load_as_new_set(buffer)
asyncio.run( self._handle_change(buffer) )
async def _handle_change(self, buffer):
start_itr = buffer.get_start_iter()
end_itr = buffer.get_end_iter()
data = buffer.get_text(start_itr, end_itr, False)
if not data:
GLib.idle_add(self.load_empty_set, buffer)
return
if not buffer in self.matchers:
GLib.idle_add(self.load_as_new_set, buffer, data)
return
new_words = self.get_all_words(data)
GLib.idle_add(self.load_into_set, buffer, new_words)
def filter(self, word: str) -> list[dict]:
response: list[dict] = []
@@ -67,44 +82,15 @@ class ProviderResponseCache(ProviderResponseCacheBase):
return response
def load_as_new_set(self, buffer):
start_itr = buffer.get_start_iter()
end_itr = buffer.get_end_iter()
data = buffer.get_text(start_itr, end_itr, False)
def load_empty_set(self, buffer):
self.matchers[buffer] = set()
if not data:
self.matchers[buffer] = set()
return
def load_into_set(self, buffer, new_words):
self.matchers[buffer].update(new_words)
def load_as_new_set(self, buffer, data):
self.matchers[buffer] = self.get_all_words(data)
def get_if_in_matched_word_set(self, buffer):
was_found = False
if not buffer in self.matchers: return was_found
insert_itr = buffer.get_iter_at_mark( buffer.get_insert() )
end_itr = insert_itr.copy()
start_itr = end_itr.copy()
if not start_itr.starts_word():
start_itr.backward_word_start()
if not end_itr.ends_word():
end_itr.forward_word_end()
word = buffer.get_text(start_itr, end_itr, False)
for _word in self.matchers[buffer]:
if not _word.startswith(word): continue
was_found = True
if was_found: return was_found
self.matchers[buffer].add(word)
return was_found
def get_all_words(self, data: str):
words = set()