Improve some stuff
This commit is contained in:
parent
1e11bad079
commit
f903f9a309
1 changed files with 45 additions and 48 deletions
93
ithon.py
93
ithon.py
|
@ -2,58 +2,42 @@ import tokenize, io
|
|||
|
||||
PREFIX = """
|
||||
# ITHON START
|
||||
import sys as _INTERNAL_SYS
|
||||
class _INTERNAL_Token:
|
||||
__slots__ = ('action',)
|
||||
|
||||
def __init__(self, action):
|
||||
self.action = action
|
||||
|
||||
def __rrshift__(self, lhs):
|
||||
return _INTERNAL_Operation(self.action, lhs)
|
||||
|
||||
class _INTERNAL_Operation:
|
||||
__slots__ = ('action', 'lhs')
|
||||
|
||||
def __init__(self, action, lhs):
|
||||
self.action = action
|
||||
self.lhs = lhs
|
||||
|
||||
def __lshift__(self, rhs):
|
||||
return self.action(self.lhs, rhs)
|
||||
|
||||
class _INTERNAL_LazyIterable:
|
||||
__slots__ = ('x','y')
|
||||
def __init__(self, x, y) -> None:
|
||||
self.x = iter(x)
|
||||
self.y = iter(y)
|
||||
def __iter__(self):
|
||||
return _INTERNAL_LazyIterator(self)
|
||||
class _INTERNAL_LazyIterator:
|
||||
__slots__ = ('iterable',)
|
||||
def __init__(self, iterable) -> None:
|
||||
self.iterable = iterable
|
||||
def __next__(self):
|
||||
try:
|
||||
return next(self.iterable.x)
|
||||
except StopIteration:
|
||||
try:
|
||||
return next(self.iterable.y)
|
||||
except StopIteration:
|
||||
raise StopIteration
|
||||
class _INTERNAL_LAZYMERGE:
|
||||
def __init__(self, val):
|
||||
self.val = val
|
||||
def __rrshift__(self, other):
|
||||
return _INTERNAL_LAZYMERGE(other)
|
||||
def __lshift__(self, other):
|
||||
return _INTERNAL_LazyIterable(self.val, other)
|
||||
class _INTERNAL_LPIPE:
|
||||
def __init__(self, val):
|
||||
self.val = val
|
||||
def __rrshift__(self, other):
|
||||
return _INTERNAL_LPIPE(other)
|
||||
def __lshift__(self, other):
|
||||
if isinstance(other, list):
|
||||
other.append(self.val)
|
||||
else:
|
||||
return other(self.val)
|
||||
class _INTERNAL_RPIPE:
|
||||
def __init__(self, action):
|
||||
self.action = action
|
||||
def __rrshift__(self, other):
|
||||
return _INTERNAL_RPIPE(other)
|
||||
def __lshift__(self, other):
|
||||
if isinstance(self.action, list):
|
||||
self.action.append(other)
|
||||
else:
|
||||
return self.action(other)
|
||||
yield from self.x
|
||||
yield from self.y
|
||||
|
||||
_INTERNAL_lazymerge = _INTERNAL_Token(lambda lhs, rhs: _INTERNAL_LazyIterable(lhs, rhs))
|
||||
|
||||
_INTERNAL_lpipe = _INTERNAL_Token(lambda lhs, rhs: rhs(lhs))
|
||||
_INTERNAL_rpipe = _INTERNAL_Token(lambda lhs, rhs: lhs(rhs))
|
||||
_INTERNAL_lspipe = _INTERNAL_Token(lambda lhs, rhs: rhs(*lhs))
|
||||
_INTERNAL_rspipe = _INTERNAL_Token(lambda lhs, rhs: lhs(*rhs))
|
||||
|
||||
_INTERNAL_lpipe = _INTERNAL_LPIPE(None)
|
||||
_INTERNAL_rpipe = _INTERNAL_RPIPE(None)
|
||||
|
||||
_INTERNAL_lazymerge = _INTERNAL_LAZYMERGE(None)
|
||||
|
||||
# ITHON END
|
||||
"""
|
||||
|
@ -121,7 +105,7 @@ def translate(file: io.StringIO):
|
|||
yield tokenize.OP, "-="
|
||||
yield tokenize.NUMBER, "1"
|
||||
skip_token+=1
|
||||
elif name == "-" and next_name == ">": #
|
||||
elif name == "-" and next_name == ">": # ->, todo
|
||||
skip_token+=1
|
||||
elif name == ")" and next_name == "=": # short functions
|
||||
yield type, name
|
||||
|
@ -129,6 +113,16 @@ def translate(file: io.StringIO):
|
|||
yield tokenize.NAME, "return"
|
||||
|
||||
skip_token += 1
|
||||
elif name == "|" and next_name == "*" and tokens[n + 2][1] == ">": #|*>
|
||||
yield tokenize.OP, ">>"
|
||||
yield tokenize.NAME, "_INTERNAL_lspipe"
|
||||
yield tokenize.OP, "<<"
|
||||
skip_token+=2
|
||||
elif name == "<" and next_name == "*" and tokens[n + 2][1] == "|": #<*|
|
||||
yield tokenize.OP, ">>"
|
||||
yield tokenize.NAME, "_INTERNAL_rspipe"
|
||||
yield tokenize.OP, "<<"
|
||||
skip_token+=2
|
||||
else:
|
||||
yield type,name
|
||||
|
||||
|
@ -143,12 +137,15 @@ def translate(file: io.StringIO):
|
|||
|
||||
script = """
|
||||
|
||||
def fibonacci(x):
|
||||
def fibonacci(x: int) -> list[int]:
|
||||
start = [0,1]
|
||||
for i in range(1, x):
|
||||
start <| start[i] + start[i - 1]
|
||||
start.append <| start[i] + start[i - 1]
|
||||
return start
|
||||
65536*16 |> fibonacci |> len |> print
|
||||
a = 12 |> fibonacci
|
||||
b = a :: a
|
||||
print <| [i for i in b]
|
||||
print <*| ('a', 'b', 'c')
|
||||
"""
|
||||
code = tokenize.untokenize(translate(io.StringIO(script)))
|
||||
print(code)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue