Start working on the harness
This commit is contained in:
parent
7c4705714e
commit
4f8aef3f89
1 changed files with 125 additions and 27 deletions
152
harness.py
152
harness.py
|
|
@ -1,4 +1,9 @@
|
|||
import bisect
|
||||
import enum
|
||||
import select
|
||||
import sys
|
||||
import termios
|
||||
import tty
|
||||
import typing
|
||||
|
||||
import grammar
|
||||
|
|
@ -88,46 +93,139 @@ def parse(table, tokens, trace=None):
|
|||
)
|
||||
|
||||
|
||||
def harness(lexer_func, grammar_func, start_rule, source_path):
|
||||
# generator = parser.GenerateLR1
|
||||
generator = parser.GenerateLALR
|
||||
# https://en.wikipedia.org/wiki/ANSI_escape_code
|
||||
# https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797
|
||||
|
||||
trace = None
|
||||
# trace = trace_state
|
||||
|
||||
table = grammar_func().build_table(start=start_rule, generator=generator)
|
||||
print(f"{len(table)} states")
|
||||
class CharColor(enum.IntEnum):
|
||||
CHAR_COLOR_DEFAULT = 0
|
||||
CHAR_COLOR_BLACK = 30
|
||||
CHAR_COLOR_RED = enum.auto()
|
||||
CHAR_COLOR_GREEN = enum.auto()
|
||||
CHAR_COLOR_YELLOW = enum.auto()
|
||||
CHAR_COLOR_BLUE = enum.auto()
|
||||
CHAR_COLOR_MAGENTA = enum.auto()
|
||||
CHAR_COLOR_CYAN = enum.auto()
|
||||
CHAR_COLOR_WHITE = enum.auto() # Really light gray
|
||||
CHAR_COLOR_BRIGHT_BLACK = 90 # Really dark gray
|
||||
CHAR_COLOR_BRIGHT_RED = enum.auto()
|
||||
CHAR_COLOR_BRIGHT_GREEN = enum.auto()
|
||||
CHAR_COLOR_BRIGHT_YELLOW = enum.auto()
|
||||
CHAR_COLOR_BRIGHT_BLUE = enum.auto()
|
||||
CHAR_COLOR_BRIGHT_MAGENTA = enum.auto()
|
||||
CHAR_COLOR_BRIGHT_CYAN = enum.auto()
|
||||
CHAR_COLOR_BRIGHT_WHITE = enum.auto()
|
||||
|
||||
average_entries = sum(len(row) for row in table) / len(table)
|
||||
max_entries = max(len(row) for row in table)
|
||||
print(f"{average_entries} average, {max_entries} max")
|
||||
|
||||
if source_path:
|
||||
with open(source_path, "r", encoding="utf-8") as f:
|
||||
src = f.read()
|
||||
tokens = lexer_func(src)
|
||||
def ESC(x: bytes) -> bytes:
|
||||
return b"\033" + x
|
||||
|
||||
|
||||
def CSI(x: bytes) -> bytes:
|
||||
return ESC(b"[" + x)
|
||||
|
||||
|
||||
CLEAR = CSI(b"H") + CSI(b"0m")
|
||||
|
||||
|
||||
def enter_alt_screen():
|
||||
sys.stdout.buffer.write(CSI(b"?1049h"))
|
||||
|
||||
|
||||
def leave_alt_screen():
|
||||
sys.stdout.buffer.write(CSI(b"?1049l"))
|
||||
|
||||
|
||||
class Harness:
|
||||
source: str | None
|
||||
|
||||
def __init__(self, lexer_func, grammar_func, start_rule, source_path):
|
||||
# self.generator = parser.GenerateLR1
|
||||
self.generator = parser.GenerateLALR
|
||||
self.lexer_func = lexer_func
|
||||
self.grammar_func = grammar_func
|
||||
self.start_rule = start_rule
|
||||
self.source_path = source_path
|
||||
|
||||
self.source = None
|
||||
self.table = None
|
||||
self.tokens = None
|
||||
self.tree = None
|
||||
self.errors = None
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
i, _, _ = select.select([sys.stdin], [], [], 1)
|
||||
if i:
|
||||
k = sys.stdin.read(1)
|
||||
print(f"Key {k}\r")
|
||||
return
|
||||
|
||||
self.update()
|
||||
|
||||
def update(self):
|
||||
if self.table is None:
|
||||
self.table = self.grammar_func().build_table(
|
||||
start=self.start_rule, generator=self.generator
|
||||
)
|
||||
|
||||
if self.tokens is None:
|
||||
with open(self.source_path, "r", encoding="utf-8") as f:
|
||||
self.source = f.read()
|
||||
self.tokens = self.lexer_func(self.source)
|
||||
|
||||
# print(f"{tokens.lines}")
|
||||
# tokens.dump(end=5)
|
||||
(_, errors) = parse(table, tokens, trace=trace)
|
||||
if len(errors) > 0:
|
||||
print(f"{len(errors)} errors:")
|
||||
for error in errors:
|
||||
print(f" {error}")
|
||||
if self.tree is None and self.errors is None:
|
||||
(tree, errors) = parse(self.table, self.tokens, trace=None)
|
||||
self.tree = tree
|
||||
self.errors = errors
|
||||
|
||||
sys.stdout.buffer.write(CLEAR)
|
||||
rows, cols = termios.tcgetwinsize(sys.stdout.fileno())
|
||||
|
||||
average_entries = sum(len(row) for row in self.table) / len(self.table)
|
||||
max_entries = max(len(row) for row in self.table)
|
||||
print(f"{len(self.table)} states - {average_entries} average, {max_entries} max\r")
|
||||
|
||||
if self.tree is not None:
|
||||
lines = []
|
||||
self.format_node(lines, self.tree)
|
||||
for line in lines[: rows - 2]:
|
||||
print(line[:cols] + "\r")
|
||||
|
||||
sys.stdout.flush()
|
||||
sys.stdout.buffer.flush()
|
||||
|
||||
def format_node(self, lines, node, indent=0):
|
||||
"""Print out an indented concrete syntax tree, from parse()."""
|
||||
lines.append((" " * indent) + node[0])
|
||||
for child in node[1]:
|
||||
self.format_node(lines, child, indent + 2)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
source_path = None
|
||||
if len(sys.argv) == 2:
|
||||
source_path = sys.argv[1]
|
||||
|
||||
harness(
|
||||
lexer_func=grammar.FineTokens,
|
||||
grammar_func=grammar.FineGrammar,
|
||||
start_rule="file",
|
||||
source_path=source_path,
|
||||
)
|
||||
fd = sys.stdin.fileno()
|
||||
old_settings = termios.tcgetattr(fd)
|
||||
try:
|
||||
tty.setraw(fd)
|
||||
enter_alt_screen()
|
||||
|
||||
h = Harness(
|
||||
lexer_func=grammar.FineTokens,
|
||||
grammar_func=grammar.FineGrammar,
|
||||
start_rule="file",
|
||||
source_path=source_path,
|
||||
)
|
||||
h.run()
|
||||
|
||||
finally:
|
||||
leave_alt_screen()
|
||||
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
|
||||
|
||||
# print(parser_faster.format_table(gen, table))
|
||||
# print()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue