Initial commit

This commit is contained in:
Xanderplayz16 2024-09-15 15:14:27 -04:00
parent ae1b80cd4d
commit 1e11bad079

155
ithon.py Normal file
View file

@ -0,0 +1,155 @@
import tokenize, io
PREFIX = """
# ITHON START
import sys as _INTERNAL_SYS
class _INTERNAL_LazyIterable:
__slots__ = ('x','y')
def __init__(self, x, y) -> None:
self.x = iter(x)
self.y = iter(y)
def __iter__(self):
return _INTERNAL_LazyIterator(self)
class _INTERNAL_LazyIterator:
__slots__ = ('iterable',)
def __init__(self, iterable) -> None:
self.iterable = iterable
def __next__(self):
try:
return next(self.iterable.x)
except StopIteration:
try:
return next(self.iterable.y)
except StopIteration:
raise StopIteration
class _INTERNAL_LAZYMERGE:
def __init__(self, val):
self.val = val
def __rrshift__(self, other):
return _INTERNAL_LAZYMERGE(other)
def __lshift__(self, other):
return _INTERNAL_LazyIterable(self.val, other)
class _INTERNAL_LPIPE:
def __init__(self, val):
self.val = val
def __rrshift__(self, other):
return _INTERNAL_LPIPE(other)
def __lshift__(self, other):
if isinstance(other, list):
other.append(self.val)
else:
return other(self.val)
class _INTERNAL_RPIPE:
def __init__(self, action):
self.action = action
def __rrshift__(self, other):
return _INTERNAL_RPIPE(other)
def __lshift__(self, other):
if isinstance(self.action, list):
self.action.append(other)
else:
return self.action(other)
_INTERNAL_lpipe = _INTERNAL_LPIPE(None)
_INTERNAL_rpipe = _INTERNAL_RPIPE(None)
_INTERNAL_lazymerge = _INTERNAL_LAZYMERGE(None)
# ITHON END
"""
class lazy_typegetter:
class lazy_typegetter_iter:
def __init__(self, typegetter) -> None:
self.typegetter = typegetter
self.index = 0
def __next__(self):
try:
val: tuple[int, str] = self.typegetter[self.index]
except IndexError:
raise StopIteration
self.index+=1
return val
def __init__(self, tokens: list) -> None:
self.tokens = tokens
def __getitem__(self, item: int):
type, name, _, _, _ = self.tokens[item]
return type, name
def __iter__(self):
return self.lazy_typegetter_iter(self)
def translate(file: io.StringIO):
patched_file = io.StringIO(PREFIX + file.read())
skip_token = 0
tokens = lazy_typegetter(list(tokenize.generate_tokens(patched_file.readline))) # Precalculate tokens
for n, i in enumerate(tokens):
type, name = i
try:
next_type, next_name = tokens[n + 1]
except IndexError:
next_type, next_name = (None, None)
print(type, name)
if skip_token:
skip_token -= 1
continue
if type == tokenize.OP and next_type == tokenize.OP:
# Most likely special operation
if name == "|" and next_name == ">": # left pipe
yield tokenize.OP, ">>"
yield tokenize.NAME, "_INTERNAL_lpipe"
yield tokenize.OP, "<<"
skip_token+=1
elif name == "<" and next_name == "|": # right pipe
yield tokenize.OP, ">>"
yield tokenize.NAME, "_INTERNAL_rpipe"
yield tokenize.OP, "<<"
skip_token+=1
elif name == ":" and next_name == ":": # lazy merge
yield tokenize.OP, ">>"
yield tokenize.NAME, "_INTERNAL_lazymerge"
yield tokenize.OP, "<<"
skip_token+=1
elif name == "+" and next_name == "+": # increment
yield tokenize.OP, "+="
yield tokenize.NUMBER, "1"
skip_token+=1
elif name == "-" and next_name == "-": # decrement
yield tokenize.OP, "-="
yield tokenize.NUMBER, "1"
skip_token+=1
elif name == "-" and next_name == ">": #
skip_token+=1
elif name == ")" and next_name == "=": # short functions
yield type, name
yield tokenize.OP, ":"
yield tokenize.NAME, "return"
skip_token += 1
else:
yield type,name
elif type == tokenize.OP:
if name == "<>": # barry is flufl
yield tokenize.OP, "!="
else:
yield type, name
else:
yield type,name
script = """
def fibonacci(x):
start = [0,1]
for i in range(1, x):
start <| start[i] + start[i - 1]
return start
65536*16 |> fibonacci |> len |> print
"""
code = tokenize.untokenize(translate(io.StringIO(script)))
print(code)
exec(code)