ithon/ithon.py
2024-09-15 16:18:46 -04:00

152 lines
4.7 KiB
Python

import tokenize, io
PREFIX = """
# ITHON START
class _INTERNAL_Token:
__slots__ = ('action',)
def __init__(self, action):
self.action = action
def __rrshift__(self, lhs):
return _INTERNAL_Operation(self.action, lhs)
class _INTERNAL_Operation:
__slots__ = ('action', 'lhs')
def __init__(self, action, lhs):
self.action = action
self.lhs = lhs
def __lshift__(self, rhs):
return self.action(self.lhs, rhs)
class _INTERNAL_LazyIterable:
__slots__ = ('x','y')
def __init__(self, x, y) -> None:
self.x = iter(x)
self.y = iter(y)
def __iter__(self):
yield from self.x
yield from self.y
_INTERNAL_lazymerge = _INTERNAL_Token(lambda lhs, rhs: _INTERNAL_LazyIterable(lhs, rhs))
_INTERNAL_lpipe = _INTERNAL_Token(lambda lhs, rhs: rhs(lhs))
_INTERNAL_rpipe = _INTERNAL_Token(lambda lhs, rhs: lhs(rhs))
_INTERNAL_lspipe = _INTERNAL_Token(lambda lhs, rhs: rhs(*lhs))
_INTERNAL_rspipe = _INTERNAL_Token(lambda lhs, rhs: lhs(*rhs))
# ITHON END
"""
class lazy_typegetter:
class lazy_typegetter_iter:
def __init__(self, typegetter) -> None:
self.typegetter = typegetter
self.index = 0
def __next__(self):
try:
val: tuple[int, str] = self.typegetter[self.index]
except IndexError:
raise StopIteration
self.index+=1
return val
def __init__(self, tokens: list) -> None:
self.tokens = tokens
def __getitem__(self, item: int):
type, name, _, _, _ = self.tokens[item]
return type, name
def __iter__(self):
return self.lazy_typegetter_iter(self)
def translate(file: io.StringIO):
patched_file = io.StringIO(PREFIX + file.read())
skip_token = 0
tokens = lazy_typegetter(list(tokenize.generate_tokens(patched_file.readline))) # Precalculate tokens
for n, i in enumerate(tokens):
type, name = i
try:
next_type, next_name = tokens[n + 1]
except IndexError:
next_type, next_name = (None, None)
print(type, name)
if skip_token:
skip_token -= 1
continue
if type == tokenize.OP and next_type == tokenize.OP:
# Most likely special operation
if name == "|" and next_name == ">": # left pipe
yield tokenize.OP, ">>"
yield tokenize.NAME, "_INTERNAL_lpipe"
yield tokenize.OP, "<<"
skip_token+=1
elif name == "<" and next_name == "|": # right pipe
yield tokenize.OP, ">>"
yield tokenize.NAME, "_INTERNAL_rpipe"
yield tokenize.OP, "<<"
skip_token+=1
elif name == ":" and next_name == ":": # lazy merge
yield tokenize.OP, ">>"
yield tokenize.NAME, "_INTERNAL_lazymerge"
yield tokenize.OP, "<<"
skip_token+=1
elif name == "+" and next_name == "+": # increment
yield tokenize.OP, "+="
yield tokenize.NUMBER, "1"
skip_token+=1
elif name == "-" and next_name == "-": # decrement
yield tokenize.OP, "-="
yield tokenize.NUMBER, "1"
skip_token+=1
elif name == "-" and next_name == ">": # ->, todo
skip_token+=1
elif name == ")" and next_name == "=": # short functions
yield type, name
yield tokenize.OP, ":"
yield tokenize.NAME, "return"
skip_token += 1
elif name == "|" and next_name == "*" and tokens[n + 2][1] == ">": #|*>
yield tokenize.OP, ">>"
yield tokenize.NAME, "_INTERNAL_lspipe"
yield tokenize.OP, "<<"
skip_token+=2
elif name == "<" and next_name == "*" and tokens[n + 2][1] == "|": #<*|
yield tokenize.OP, ">>"
yield tokenize.NAME, "_INTERNAL_rspipe"
yield tokenize.OP, "<<"
skip_token+=2
else:
yield type,name
elif type == tokenize.OP:
if name == "<>": # barry is flufl
yield tokenize.OP, "!="
else:
yield type, name
else:
yield type,name
script = """
def fibonacci(x: int) -> list[int]:
start = [0,1]
for i in range(1, x):
start.append <| start[i] + start[i - 1]
return start
a = 12 |> fibonacci
b = a :: a
print <| [i for i in b]
print <*| ('a', 'b', 'c')
"""
code = tokenize.untokenize(translate(io.StringIO(script)))
print(code)
exec(code)