+> (yield) operator and nice debug output!

This commit is contained in:
Xander Mckay 2024-11-26 15:37:08 -05:00
parent 440bd2f153
commit 0ebe183cfe
4 changed files with 55 additions and 9 deletions

25
examples/mini.it Normal file
View file

@ -0,0 +1,25 @@
from textual.app import App, ComposeResult
from textual.widgets import Header, Footer, TextArea
class Mini(App):
"""A Textual app to manage stopwatches."""
BINDINGS = [("ctrl+o", "open", "Open"), ("ctrl+s", "save", "Save"), ("ctrl+alt+s", "save_as", "Save as")]
def compose(self) -> ComposeResult:
"""Create child widgets for the app."""
+> Header()
+> TextArea()
+> Footer()
def action_save(self) -> None:
"""An action to save."""
pass
def action_open(self) -> None: pass
def action_save_as(self) -> None: pass
if __name__ == "__main__":
app = Mini()
app.run()

View file

@ -4,6 +4,13 @@ def fibonacci(x: int) -> list[int]:
for i in range(1, x): for i in range(1, x):
start.append <| start[i] + start[i - 1] start.append <| start[i] + start[i - 1]
return start return start
def a():
yield 1
+> 2
def b():
+>> a()
+>> a()
b() |> list |> print
a = 12 |> fibonacci a = 12 |> fibonacci
b = a :: a :: a :: a b = a :: a :: a :: a
c = b :: b :: b :: b c = b :: b :: b :: b
@ -24,7 +31,7 @@ print(d3d2(x=2)(y=4))
a = 1 a = 1
a++ a++
a |> print ?? 11 |> print print(None ?? a) # 2
'''a''' '''a'''
/* /*
very bad code that is very bad code that is

View file

@ -31,21 +31,26 @@ class lazy_typegetter:
def translate(file: io.StringIO): def translate(file: io.StringIO, debug: int = 0):
def infix(name: str): def infix(name: str):
yield tokenize.OP, ">>" yield tokenize.OP, ">>"
yield tokenize.NAME, name yield tokenize.NAME, name
yield tokenize.OP, "<<" yield tokenize.OP, "<<"
filedata = PREFIX + file.read() def dprint(data: str, prio: int):
print(filedata) if debug > prio:
return
print(data)
fdata = file.read()
filedata = PREFIX + fdata
dprint(f'---START FILE---\nname: {file.name}, len: {len(fdata)}\n' + filedata + '\n---END FILE---\n', 4)
patched_file = io.StringIO(filedata) patched_file = io.StringIO(filedata)
skip_token = 0 skip_token = 0
tokens = lazy_typegetter(list(tokenize.generate_tokens(patched_file.readline))) # Precalculate tokens tokens = lazy_typegetter(list(tokenize.generate_tokens(patched_file.readline))) # Precalculate tokens
dprint(f'---START DEBOUT---\nname: {file.name}', 4)
for n, i in enumerate(tokens): for n, i in enumerate(tokens):
type, name = i type, name = i
print(type, name) dprint(str(type) + " " + name, 4)
try: try:
next_type, next_name = tokens[n + 1] next_type, next_name = tokens[n + 1]
except IndexError: except IndexError:
@ -55,6 +60,7 @@ def translate(file: io.StringIO):
skip_token -= 1 skip_token -= 1
continue continue
if type == tokenize.OP and next_type == tokenize.OP: if type == tokenize.OP and next_type == tokenize.OP:
dprint(f"OP {name} {next_name} {tokens[n + 2][1]}", 5)
# Most likely special operation # Most likely special operation
if name == "|" and next_name == ">": # left pipe if name == "|" and next_name == ">": # left pipe
yield from infix("_INTERNAL_lpipe") yield from infix("_INTERNAL_lpipe")
@ -95,6 +101,14 @@ def translate(file: io.StringIO):
elif name == '*' and next_name == "/": elif name == '*' and next_name == "/":
yield tokenize.OP, "'''" yield tokenize.OP, "'''"
skip_token+=1 skip_token+=1
elif name == '+' and next_name == ">":
yield tokenize.NAME, "yield"
skip_token+=1
elif name == '+' and next_name == ">>":
yield tokenize.NAME, "yield"
yield tokenize.NAME, "from"
skip_token+=2
else: else:
yield type,name yield type,name
@ -119,7 +133,7 @@ def translate(file: io.StringIO):
# yield type, name # yield type, name
else: else:
yield type,name yield type,name
dprint(f'---END DEBOUT---', 4)
def transpile(input_path: Path, verbose: int) -> None: def transpile(input_path: Path, verbose: int) -> None:
dir = Path('dist') dir = Path('dist')
@ -129,10 +143,10 @@ def transpile(input_path: Path, verbose: int) -> None:
path = (dir / i.relative_to(input_path)).with_suffix('.py') path = (dir / i.relative_to(input_path)).with_suffix('.py')
path.parent.mkdir(parents=True, exist_ok=True) path.parent.mkdir(parents=True, exist_ok=True)
with i.open() as f: with i.open() as f:
path.write_text(tokenize.untokenize(translate(f))) path.write_text(tokenize.untokenize(translate(f, verbose)))
else: else:
with input_path.open() as f: with input_path.open() as f:
input_path.with_suffix('.py').write_text(tokenize.untokenize(translate(f))) input_path.with_suffix('.py').write_text(tokenize.untokenize(translate(f, verbose)))
app = typer.Typer() app = typer.Typer()
@app.command('t') @app.command('t')