Added TODOs; made explicit that LSP Manager pre launch if autoload true/unset; added WIP tree_sitter plugin

This commit is contained in:
2026-03-22 17:56:48 -05:00
parent 13908d7ba7
commit c821f30880
16 changed files with 1092 additions and 0 deletions

View File

@@ -4,6 +4,8 @@ ___
1. Add TreeSitter 1. Add TreeSitter
1. Add Collapsable code blocks 1. Add Collapsable code blocks
1. Add Terminal plugin 1. Add Terminal plugin
1. Add <Ctrl>mouse scroll to zoom text in/out
1. Add event to emit on file open so plugins could try to open
1. Add Plugin to <Shift\><Ctrl\>| and <Ctrl\>| to split views up, down, left, right 1. Add Plugin to <Shift\><Ctrl\>| and <Ctrl\>| to split views up, down, left, right
1. Add <Ctrl\>i to **lsp_manager** to list who implements xyz 1. Add <Ctrl\>i to **lsp_manager** to list who implements xyz

View File

@@ -0,0 +1,3 @@
"""
Pligin Module
"""

View File

@@ -0,0 +1,3 @@
"""
Pligin Package
"""

View File

@@ -0,0 +1,39 @@
from .libs.tree_sitter_language_pack import \
init, download, get_language, get_parser, available_languages, process, ProcessConfig
# Optional: Pre-download specific languages for offline use
#init(["python", "javascript", "rust"])
# Get a language (auto-downloads if not cached)
language = get_language("python")
# Get a pre-configured parser (auto-downloads if needed)
parser = get_parser("python")
tree = parser.parse(b"def hello(): pass")
print(tree.root_node)
# List all available languages
for lang in available_languages():
print(lang)
# Extract file intelligence (auto-downloads language if needed)
result = process(
"def hello(): pass",
ProcessConfig( language = "python")
)
print(f"Functions: {len(result['structure'])}")
# Pre-download languages for offline use
download(["python", "javascript"])
# With chunking
result = process(
source,
ProcessConfig(
language = "python",
chunk_max_size = 1000,
comments = True
)
)
print(f"Chunks: {len(result['chunks'])}")

View File

@@ -0,0 +1,3 @@
"""
Libs Module
"""

View File

@@ -0,0 +1,71 @@
"""Python bindings to the Tree-sitter parsing library."""
from typing import Protocol as _Protocol
from ._binding import (
Language,
LogType,
LookaheadIterator,
Node,
Parser,
Point,
Query,
QueryCursor,
QueryError,
Range,
Tree,
TreeCursor,
LANGUAGE_VERSION,
MIN_COMPATIBLE_LANGUAGE_VERSION,
)
LogType.__doc__ = "The type of a log message."
Point.__doc__ = "A position in a multi-line text document, in terms of rows and columns."
Point.row.__doc__ = "The zero-based row of the document."
Point.column.__doc__ = "The zero-based column of the document."
class QueryPredicate(_Protocol):
"""A custom query predicate that runs on a pattern."""
def __call__(self, predicate, args, pattern_index, captures):
"""
Parameters
----------
predicate : str
The name of the predicate.
args : list[tuple[str, typing.Literal['capture', 'string']]]
The arguments to the predicate.
pattern_index : int
The index of the pattern within the query.
captures : dict[str, list[Node]]
The captures contained in the pattern.
Returns
-------
``True`` if the predicate matches, ``False`` otherwise.
Tip
---
You don't need to create an actual class, just a function with this signature.
"""
__all__ = [
"Language",
"LogType",
"LookaheadIterator",
"Node",
"Parser",
"Point",
"Query",
"QueryCursor",
"QueryError",
"QueryPredicate",
"Range",
"Tree",
"TreeCursor",
"LANGUAGE_VERSION",
"MIN_COMPATIBLE_LANGUAGE_VERSION",
]

View File

@@ -0,0 +1,416 @@
from enum import IntEnum
from collections.abc import ByteString, Callable, Iterator, Sequence
from typing import Annotated, Any, Final, Literal, NamedTuple, Protocol, Self, final, overload
from typing_extensions import deprecated
class _SupportsFileno(Protocol):
def fileno(self) -> int: ...
class Point(NamedTuple):
row: int
column: int
class LogType(IntEnum):
PARSE: int
LEX: int
@final
class Language:
@overload
@deprecated("int argument support is deprecated")
def __init__(self, ptr: Annotated[int, "TSLanguage *"], /) -> None: ...
@overload
def __init__(self, ptr: Annotated[object, "TSLanguage *"], /) -> None: ...
@property
def name(self) -> str | None: ...
@property
def abi_version(self) -> int: ...
@property
def semantic_version(self) -> tuple[int, int, int] | None: ...
@deprecated("Use abi_version instead")
@property
def version(self) -> int: ...
@property
def node_kind_count(self) -> int: ...
@property
def parse_state_count(self) -> int: ...
@property
def field_count(self) -> int: ...
@property
def supertypes(self) -> tuple[int, ...]: ...
def subtypes(self, supertype: int, /) -> tuple[int, ...]: ...
def node_kind_for_id(self, id: int, /) -> str | None: ...
def id_for_node_kind(self, kind: str, named: bool, /) -> int | None: ...
def node_kind_is_named(self, id: int, /) -> bool: ...
def node_kind_is_visible(self, id: int, /) -> bool: ...
def node_kind_is_supertype(self, id: int, /) -> bool: ...
def field_name_for_id(self, field_id: int, /) -> str | None: ...
def field_id_for_name(self, name: str, /) -> int | None: ...
def next_state(self, state: int, id: int, /) -> int: ...
def lookahead_iterator(self, state: int, /) -> LookaheadIterator | None: ...
@deprecated("Use the Query() constructor instead")
def query(self, source: str, /) -> Query: ...
def copy(self) -> Language: ...
def __repr__(self) -> str: ...
def __eq__(self, other: Any, /) -> bool: ...
def __ne__(self, other: Any, /) -> bool: ...
def __hash__(self) -> int: ...
def __copy__(self) -> Language: ...
@final
class Node:
@property
def id(self) -> int: ...
@property
def kind_id(self) -> int: ...
@property
def grammar_id(self) -> int: ...
@property
def grammar_name(self) -> str: ...
@property
def type(self) -> str: ...
@property
def is_named(self) -> bool: ...
@property
def is_extra(self) -> bool: ...
@property
def has_changes(self) -> bool: ...
@property
def has_error(self) -> bool: ...
@property
def is_error(self) -> bool: ...
@property
def parse_state(self) -> int: ...
@property
def next_parse_state(self) -> int: ...
@property
def is_missing(self) -> bool: ...
@property
def start_byte(self) -> int: ...
@property
def end_byte(self) -> int: ...
@property
def byte_range(self) -> tuple[int, int]: ...
@property
def range(self) -> Range: ...
@property
def start_point(self) -> Point: ...
@property
def end_point(self) -> Point: ...
@property
def children(self) -> list[Node]: ...
@property
def child_count(self) -> int: ...
@property
def named_children(self) -> list[Node]: ...
@property
def named_child_count(self) -> int: ...
@property
def parent(self) -> Node | None: ...
@property
def next_sibling(self) -> Node | None: ...
@property
def prev_sibling(self) -> Node | None: ...
@property
def next_named_sibling(self) -> Node | None: ...
@property
def prev_named_sibling(self) -> Node | None: ...
@property
def descendant_count(self) -> int: ...
@property
def text(self) -> bytes | None: ...
def walk(self) -> TreeCursor: ...
def edit(
self,
start_byte: int,
old_end_byte: int,
new_end_byte: int,
start_point: Point | tuple[int, int],
old_end_point: Point | tuple[int, int],
new_end_point: Point | tuple[int, int],
) -> None: ...
def child(self, index: int, /) -> Node | None: ...
def named_child(self, index: int, /) -> Node | None: ...
def first_child_for_byte(self, byte: int, /) -> Node | None: ...
def first_named_child_for_byte(self, byte: int, /) -> Node | None: ...
def child_by_field_id(self, id: int, /) -> Node | None: ...
def child_by_field_name(self, name: str, /) -> Node | None: ...
def child_with_descendant(self, descendant: Node, /) -> Node | None: ...
def children_by_field_id(self, id: int, /) -> list[Node]: ...
def children_by_field_name(self, name: str, /) -> list[Node]: ...
def field_name_for_child(self, child_index: int, /) -> str | None: ...
def field_name_for_named_child(self, child_index: int, /) -> str | None: ...
def descendant_for_byte_range(
self,
start_byte: int,
end_byte: int,
/,
) -> Node | None: ...
def named_descendant_for_byte_range(
self,
start_byte: int,
end_byte: int,
/,
) -> Node | None: ...
def descendant_for_point_range(
self,
start_point: Point | tuple[int, int],
end_point: Point | tuple[int, int],
/,
) -> Node | None: ...
def named_descendant_for_point_range(
self,
start_point: Point | tuple[int, int],
end_point: Point | tuple[int, int],
/,
) -> Node | None: ...
def __repr__(self) -> str: ...
def __str__(self) -> str: ...
def __eq__(self, other: Any, /) -> bool: ...
def __ne__(self, other: Any, /) -> bool: ...
def __hash__(self) -> int: ...
@final
class Tree:
@property
def root_node(self) -> Node: ...
@property
def included_ranges(self) -> list[Range]: ...
@property
def language(self) -> Language: ...
def root_node_with_offset(
self,
offset_bytes: int,
offset_extent: Point | tuple[int, int],
/,
) -> Node | None: ...
def copy(self) -> Tree: ...
def edit(
self,
start_byte: int,
old_end_byte: int,
new_end_byte: int,
start_point: Point | tuple[int, int],
old_end_point: Point | tuple[int, int],
new_end_point: Point | tuple[int, int],
) -> None: ...
def walk(self) -> TreeCursor: ...
def changed_ranges(self, new_tree: Tree, /) -> list[Range]: ...
def print_dot_graph(self, file: _SupportsFileno, /) -> None: ...
def __copy__(self) -> Tree: ...
@final
class TreeCursor:
@property
def node(self) -> Node | None: ...
@property
def field_id(self) -> int | None: ...
@property
def field_name(self) -> str | None: ...
@property
def depth(self) -> int: ...
@property
def descendant_index(self) -> int: ...
def copy(self) -> TreeCursor: ...
def reset(self, node: Node, /) -> None: ...
def reset_to(self, cursor: TreeCursor, /) -> None: ...
def goto_first_child(self) -> bool: ...
def goto_last_child(self) -> bool: ...
def goto_parent(self) -> bool: ...
def goto_next_sibling(self) -> bool: ...
def goto_previous_sibling(self) -> bool: ...
def goto_descendant(self, index: int, /) -> None: ...
def goto_first_child_for_byte(self, byte: int, /) -> int | None: ...
def goto_first_child_for_point(self, point: Point | tuple[int, int], /) -> int | None: ...
def __copy__(self) -> TreeCursor: ...
@final
class Parser:
@overload
def __init__(
self,
language: Language | None = None,
*,
included_ranges: Sequence[Range] | None = None,
logger: Callable[[LogType, str], None] | None = None,
) -> None: ...
@deprecated("timeout_micros is deprecated")
@overload
def __init__(
self,
language: Language | None = None,
*,
included_ranges: Sequence[Range] | None = None,
timeout_micros: int | None = None,
logger: Callable[[LogType, str], None] | None = None,
) -> None: ...
@property
def language(self) -> Language | None: ...
@language.setter
def language(self, language: Language) -> None: ...
@language.deleter
def language(self) -> None: ...
@property
def included_ranges(self) -> list[Range]: ...
@included_ranges.setter
def included_ranges(self, ranges: Sequence[Range]) -> None: ...
@included_ranges.deleter
def included_ranges(self) -> None: ...
@deprecated("Use the progress_callback in parse()")
@property
def timeout_micros(self) -> int: ...
@deprecated("Use the progress_callback in parse()")
@timeout_micros.setter
def timeout_micros(self, timeout: int) -> None: ...
@deprecated("Use the progress_callback in parse()")
@timeout_micros.deleter
def timeout_micros(self) -> None: ...
@property
def logger(self) -> Callable[[LogType, str], None] | None: ...
@logger.setter
def logger(self, logger: Callable[[LogType, str], None]) -> None: ...
@logger.deleter
def logger(self) -> None: ...
@overload
def parse(
self,
source: ByteString,
/,
old_tree: Tree | None = None,
encoding: Literal["utf8", "utf16", "utf16le", "utf16be"] = "utf8",
) -> Tree: ...
@overload
def parse(
self,
read_callback: Callable[[int, Point], ByteString | None],
/,
old_tree: Tree | None = None,
encoding: Literal["utf8", "utf16", "utf16le", "utf16be"] = "utf8",
progress_callback: Callable[[int, bool], bool] | None = None,
) -> Tree: ...
def reset(self) -> None: ...
def print_dot_graphs(self, file: _SupportsFileno | None, /) -> None: ...
class QueryError(ValueError): ...
class QueryPredicate(Protocol):
def __call__(
self,
predicate: str,
args: list[tuple[str, Literal["capture", "string"]]],
pattern_index: int,
captures: dict[str, list[Node]],
) -> bool: ...
@final
class Query:
def __new__(cls, language: Language, source: str, /) -> Self: ...
def pattern_count(self) -> int: ...
def capture_count(self) -> int: ...
def string_count(self) -> int: ...
def start_byte_for_pattern(self, index: int, /) -> int: ...
def end_byte_for_pattern(self, index: int, /) -> int: ...
def is_pattern_rooted(self, index: int, /) -> bool: ...
def is_pattern_non_local(self, index: int, /) -> bool: ...
def is_pattern_guaranteed_at_step(self, index: int, /) -> bool: ...
def capture_name(self, index: int, /) -> str: ...
def capture_quantifier(
self,
pattern_index: int,
capture_index: int,
/
) -> Literal["", "?", "*", "+"]: ...
def string_value(self, index: int, /) -> str: ...
def disable_capture(self, name: str, /) -> None: ...
def disable_pattern(self, index: int, /) -> None: ...
def pattern_settings(self, index: int, /) -> dict[str, str | None]: ...
def pattern_assertions(self, index: int, /) -> dict[str, tuple[str | None, bool]]: ...
@final
class QueryCursor:
@overload
def __init__(self, query: Query, *, match_limit: int = 0xFFFFFFFF) -> None: ...
@deprecated("timeout_micros is deprecated")
@overload
def __init__(
self,
query: Query,
*,
match_limit: int = 0xFFFFFFFF,
timeout_micros: int = 0
) -> None: ...
@property
def match_limit(self) -> int: ...
@match_limit.setter
def match_limit(self, limit: int) -> None: ...
@match_limit.deleter
def match_limit(self) -> None: ...
@deprecated("Use the progress_callback in matches() or captures()")
@property
def timeout_micros(self) -> int: ...
@deprecated("Use the progress_callback in matches() or captures()")
@timeout_micros.setter
def timeout_micros(self, timeout: int) -> None: ...
@property
def did_exceed_match_limit(self) -> bool: ...
def set_max_start_depth(self, depth: int, /) -> None: ...
def set_byte_range(self, start: int, end: int, /) -> None: ...
def set_point_range(
self,
start: Point | tuple[int, int],
end: Point | tuple[int, int],
/,
) -> None: ...
def captures(
self,
node: Node,
predicate: QueryPredicate | None = None,
progress_callback: Callable[[int], bool] | None = None,
/,
) -> dict[str, list[Node]]: ...
def matches(
self,
node: Node,
predicate: QueryPredicate | None = None,
progress_callback: Callable[[int], bool] | None = None,
/,
) -> list[tuple[int, dict[str, list[Node]]]]: ...
@final
class LookaheadIterator(Iterator[tuple[int, str]]):
@property
def language(self) -> Language: ...
@property
def current_symbol(self) -> int: ...
@property
def current_symbol_name(self) -> str: ...
def reset(self, state: int, /, language: Language | None = None) -> bool: ...
def names(self) -> list[str]: ...
def symbols(self) -> list[int]: ...
def __next__(self) -> tuple[int, str]: ...
@final
class Range:
def __init__(
self,
start_point: Point | tuple[int, int],
end_point: Point | tuple[int, int],
start_byte: int,
end_byte: int,
) -> None: ...
@property
def start_point(self) -> Point: ...
@property
def end_point(self) -> Point: ...
@property
def start_byte(self) -> int: ...
@property
def end_byte(self) -> int: ...
def __eq__(self, other: Any, /) -> bool: ...
def __ne__(self, other: Any, /) -> bool: ...
def __repr__(self) -> str: ...
def __hash__(self) -> int: ...
LANGUAGE_VERSION: Final[int]
MIN_COMPATIBLE_LANGUAGE_VERSION: Final[int]

View File

@@ -0,0 +1,54 @@
from typing import TypeAlias
from tree_sitter_language_pack._native import (
DownloadError,
LanguageNotFoundError,
ParseError,
ProcessConfig,
QueryError,
TreeHandle,
available_languages,
cache_dir,
clean_cache,
configure,
download,
download_all,
downloaded_languages,
get_binding,
get_language,
get_parser,
has_language,
init,
language_count,
manifest_languages,
parse_string,
process,
)
SupportedLanguage: TypeAlias = str
__all__ = [
"DownloadError",
"LanguageNotFoundError",
"ParseError",
"ProcessConfig",
"QueryError",
"SupportedLanguage",
"TreeHandle",
"available_languages",
"cache_dir",
"clean_cache",
"configure",
"download",
"download_all",
"downloaded_languages",
"get_binding",
"get_language",
"get_parser",
"has_language",
"init",
"language_count",
"manifest_languages",
"parse_string",
"process",
]

View File

@@ -0,0 +1,276 @@
from typing import Literal, TypeAlias
from tree_sitter import Language, Parser
class LanguageNotFoundError(ValueError): ...
class DownloadError(RuntimeError): ...
SupportedLanguage: TypeAlias = Literal[
"actionscript",
"ada",
"agda",
"apex",
"arduino",
"asm",
"astro",
"bash",
"batch",
"bazel",
"beancount",
"bibtex",
"bicep",
"bitbake",
"bsl",
"c",
"cairo",
"capnp",
"chatito",
"clarity",
"clojure",
"cmake",
"cobol",
"comment",
"commonlisp",
"cpon",
"cpp",
"css",
"csv",
"cuda",
"d",
"dart",
"diff",
"dockerfile",
"doxygen",
"dtd",
"elisp",
"elixir",
"elm",
"erlang",
"fennel",
"firrtl",
"fish",
"fortran",
"fsharp",
"fsharp_signature",
"func",
"gdscript",
"gitattributes",
"gitcommit",
"gitignore",
"gleam",
"glsl",
"gn",
"go",
"gomod",
"gosum",
"gradle",
"graphql",
"groovy",
"gstlaunch",
"hack",
"hare",
"haskell",
"haxe",
"hcl",
"heex",
"hlsl",
"html",
"hyprlang",
"ignorefile",
"ini",
"ispc",
"janet",
"java",
"javascript",
"jsdoc",
"json",
"jsonnet",
"julia",
"kconfig",
"kdl",
"kotlin",
"latex",
"linkerscript",
"lisp",
"llvm",
"lua",
"luadoc",
"luap",
"luau",
"magik",
"make",
"makefile",
"markdown",
"markdown_inline",
"matlab",
"mermaid",
"meson",
"netlinx",
"nim",
"ninja",
"nix",
"nqc",
"objc",
"ocaml",
"ocaml_interface",
"odin",
"org",
"pascal",
"pem",
"perl",
"pgn",
"php",
"pkl",
"po",
"pony",
"powershell",
"printf",
"prisma",
"properties",
"proto",
"psv",
"puppet",
"purescript",
"pymanifest",
"python",
"qmldir",
"qmljs",
"query",
"r",
"racket",
"re2c",
"readline",
"rego",
"requirements",
"ron",
"rst",
"ruby",
"rust",
"scala",
"scheme",
"scss",
"shell",
"smali",
"smithy",
"solidity",
"sparql",
"sql",
"squirrel",
"starlark",
"svelte",
"swift",
"tablegen",
"tcl",
"terraform",
"test",
"thrift",
"toml",
"tsv",
"tsx",
"twig",
"typescript",
"typst",
"udev",
"ungrammar",
"uxntal",
"v",
"verilog",
"vhdl",
"vim",
"vue",
"wast",
"wat",
"wgsl",
"xcompose",
"xml",
"yuck",
"zig",
]
class ParseError(RuntimeError): ...
class QueryError(ValueError): ...
class ProcessConfig:
language: str
structure: bool
imports: bool
exports: bool
comments: bool
docstrings: bool
symbols: bool
diagnostics: bool
chunk_max_size: int | None
def __init__(
self,
language: str,
*,
structure: bool = True,
imports: bool = True,
exports: bool = True,
comments: bool = True,
docstrings: bool = True,
symbols: bool = True,
diagnostics: bool = True,
chunk_max_size: int | None = None,
) -> None: ...
@staticmethod
def all(language: str) -> ProcessConfig: ...
@staticmethod
def minimal(language: str) -> ProcessConfig: ...
class TreeHandle:
def root_node_type(self) -> str: ...
def root_child_count(self) -> int: ...
def contains_node_type(self, node_type: str) -> bool: ...
def has_error_nodes(self) -> bool: ...
def to_sexp(self) -> str: ...
def error_count(self) -> int: ...
def root_node_info(self) -> dict[str, object]: ...
def find_nodes_by_type(self, node_type: str) -> list[dict[str, object]]: ...
def named_children_info(self) -> list[dict[str, object]]: ...
def extract_text(self, start_byte: int, end_byte: int) -> str: ...
def run_query(self, language: str, query_source: str) -> list[dict[str, object]]: ...
__all__ = [
"DownloadError",
"LanguageNotFoundError",
"ParseError",
"ProcessConfig",
"QueryError",
"SupportedLanguage",
"TreeHandle",
"available_languages",
"cache_dir",
"clean_cache",
"configure",
"download",
"download_all",
"downloaded_languages",
"get_binding",
"get_language",
"get_parser",
"has_language",
"init",
"language_count",
"manifest_languages",
"parse_string",
"process",
]
def get_binding(name: SupportedLanguage) -> object: ...
def get_language(name: SupportedLanguage) -> Language: ...
def get_parser(name: SupportedLanguage) -> Parser: ...
def available_languages() -> list[str]: ...
def has_language(name: str) -> bool: ...
def language_count() -> int: ...
def parse_string(language: str, source: str) -> TreeHandle: ...
def process(source: str, config: ProcessConfig) -> dict[str, object]: ...
def init(config: dict[str, object]) -> None: ...
def configure(*, cache_dir: str | None = None) -> None: ...
def download(names: list[str]) -> int: ...
def download_all() -> int: ...
def manifest_languages() -> list[str]: ...
def downloaded_languages() -> list[str]: ...
def clean_cache() -> None: ...
def cache_dir() -> str: ...

View File

@@ -0,0 +1,9 @@
{
"name": "Tree-sitter",
"author": "ITDominator",
"version": "0.0.1",
"support": "",
"pre_launch": true,
"autoload": false,
"requests": {}
}

View File

@@ -0,0 +1,17 @@
from .tree_sitter import Language
Language.build_library(
"my-languages.so",
[
"tree-sitter-python",
"tree-sitter-javascript",
"tree-sitter-html",
"tree-sitter-css",
"tree-sitter-json",
"tree-sitter-java",
"tree-sitter-c",
"tree-sitter-cpp",
"tree-sitter-go",
"tree-sitter-gdscript",
],
)

View File

@@ -0,0 +1,121 @@
#!/bin/bash
# . CONFIG.sh
# set -o xtrace ## To debug scripts
# set -o errexit ## To exit on error
# set -o errunset ## To exit if a variable is referenced but not set
function old_build() {
touch __init__.py
cat <<'EOF' > compile.py
from tree_sitter import Language
Language.build_library(
"my-languages.so",
[
"tree-sitter-python",
"tree-sitter-javascript",
"tree-sitter-html",
"tree-sitter-css",
"tree-sitter-json",
"tree-sitter-java",
"tree-sitter-c",
"tree-sitter-cpp",
"tree-sitter-go"
"tree-sitter-gdscript",
],
)
EOF
python compile.py
cd ..
}
function build() {
touch __init__.py
cat <<'EOF' > compile.py
from tree_sitter_language_pack import init, download, get_language, get_parser, available_languages
from tree_sitter_language_pack import process, ProcessConfig
# Optional: Pre-download specific languages for offline use
# init(["python", "javascript", "rust"])
# Get a language (auto-downloads if not cached)
language = get_language("python")
# Get a pre-configured parser (auto-downloads if needed)
parser = get_parser("python")
tree = parser.parse(b"def hello(): pass")
print(tree.root_node)
# List all available languages
for lang in available_languages():
print(lang)
# Extract file intelligence (auto-downloads language if needed)
result = process(
"def hello(): pass",
ProcessConfig(
language = "python"
)
)
print(f"Functions: {len(result['structure'])}")
# Pre-download languages for offline use
download(["python", "javascript"])
# With chunking
result = process(
source,
ProcessConfig(
language = "python",
chunk_max_size = 1000,
comments = True
)
)
print(f"Chunks: {len(result['chunks'])}")
EOF
python compile.py
cd ..
}
function clone() {
git clone --depth 1 https://github.com/tree-sitter/tree-sitter-python
git clone --depth 1 https://github.com/tree-sitter/tree-sitter-javascript
git clone --depth 1 https://github.com/tree-sitter/tree-sitter-html
git clone --depth 1 https://github.com/tree-sitter/tree-sitter-css
git clone --depth 1 https://github.com/tree-sitter/tree-sitter-json
git clone --depth 1 https://github.com/tree-sitter/tree-sitter-java
git clone --depth 1 https://github.com/tree-sitter/tree-sitter-c
git clone --depth 1 https://github.com/tree-sitter/tree-sitter-cpp
git clone --depth 1 https://github.com/tree-sitter/tree-sitter-go
# git clone --depth 1 https://github.com/godotengine/tree-sitter-gdscript
}
function setup() {
# pip install tree-sitter -t .
pip install tree-sitter-language-pack -t .
# pip install tree_sitter_languages -t . # Old unmaintained library
}
function main() {
cd "$(dirname "$0")"
echo "Working Dir: " $(pwd)
mkdir -p build
cd build
# clone "$@"
setup "$@"
build "$@"
}
main "$@";

View File

@@ -0,0 +1,77 @@
# Python imports
import sys
import os
BASE_DIR = os.path.dirname( os.path.realpath(__file__) )
LIBS_DIR = f"{BASE_DIR}/libs"
if str(LIBS_DIR) not in sys.path:
sys.path.insert(0, LIBS_DIR)
# Lib imports
# Application imports
from libs.event_factory import Event_Factory, Code_Event_Types
from plugins.plugin_types import PluginCode
import tree_sitter_language_pack as tslp
class Plugin(PluginCode):
def __init__(self):
super(Plugin, self).__init__()
def _controller_message(self, event: Code_Event_Types.CodeEvent):
if isinstance(event, Code_Event_Types.TextChangedEvent):
if not tslp.has_language( event.file.ftype ):
try:
tslp.download( [event.file.ftype] )
except Exception:
logger.info(
f"Tree Sitter Language Pack:\nCouldn't download -> '{event.file.ftype}' language type..."
)
return
buffer = event.file.buffer
start_itr, \
end_itr = buffer.get_bounds()
text = buffer.get_text(start_itr, end_itr, True)
result = tslp.process(
text,
tslp.ProcessConfig(
language = event.file.ftype
)
)
event.file.tree_sitter_meta = result
def load(self):
tslp.configure(
cache_dir = f"{BASE_DIR}/cache/tree-sitter-language-pack/v1.0.0/libs"
)
def unload(self):
...
def run(self):
# tslp.download(
# [
# "python",
# "java",
# "go",
# "c",
# "cpp",
# "javascript",
# "html",
# "css",
# "json"
# ]
# )
# "gdscript"
...

View File

@@ -3,6 +3,7 @@
"author": "ITDominator", "author": "ITDominator",
"version": "0.0.1", "version": "0.0.1",
"support": "", "support": "",
"pre_launch": true,
"autoload": false, "autoload": false,
"requests": {} "requests": {}
} }