Improve some stuff
This commit is contained in:
parent
1e11bad079
commit
f903f9a309
1 changed files with 45 additions and 48 deletions
93
ithon.py
93
ithon.py
|
@ -2,58 +2,42 @@ import tokenize, io
|
||||||
|
|
||||||
PREFIX = """
|
PREFIX = """
|
||||||
# ITHON START
|
# ITHON START
|
||||||
import sys as _INTERNAL_SYS
|
class _INTERNAL_Token:
|
||||||
|
__slots__ = ('action',)
|
||||||
|
|
||||||
|
def __init__(self, action):
|
||||||
|
self.action = action
|
||||||
|
|
||||||
|
def __rrshift__(self, lhs):
|
||||||
|
return _INTERNAL_Operation(self.action, lhs)
|
||||||
|
|
||||||
|
class _INTERNAL_Operation:
|
||||||
|
__slots__ = ('action', 'lhs')
|
||||||
|
|
||||||
|
def __init__(self, action, lhs):
|
||||||
|
self.action = action
|
||||||
|
self.lhs = lhs
|
||||||
|
|
||||||
|
def __lshift__(self, rhs):
|
||||||
|
return self.action(self.lhs, rhs)
|
||||||
|
|
||||||
class _INTERNAL_LazyIterable:
|
class _INTERNAL_LazyIterable:
|
||||||
__slots__ = ('x','y')
|
__slots__ = ('x','y')
|
||||||
def __init__(self, x, y) -> None:
|
def __init__(self, x, y) -> None:
|
||||||
self.x = iter(x)
|
self.x = iter(x)
|
||||||
self.y = iter(y)
|
self.y = iter(y)
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return _INTERNAL_LazyIterator(self)
|
yield from self.x
|
||||||
class _INTERNAL_LazyIterator:
|
yield from self.y
|
||||||
__slots__ = ('iterable',)
|
|
||||||
def __init__(self, iterable) -> None:
|
_INTERNAL_lazymerge = _INTERNAL_Token(lambda lhs, rhs: _INTERNAL_LazyIterable(lhs, rhs))
|
||||||
self.iterable = iterable
|
|
||||||
def __next__(self):
|
_INTERNAL_lpipe = _INTERNAL_Token(lambda lhs, rhs: rhs(lhs))
|
||||||
try:
|
_INTERNAL_rpipe = _INTERNAL_Token(lambda lhs, rhs: lhs(rhs))
|
||||||
return next(self.iterable.x)
|
_INTERNAL_lspipe = _INTERNAL_Token(lambda lhs, rhs: rhs(*lhs))
|
||||||
except StopIteration:
|
_INTERNAL_rspipe = _INTERNAL_Token(lambda lhs, rhs: lhs(*rhs))
|
||||||
try:
|
|
||||||
return next(self.iterable.y)
|
|
||||||
except StopIteration:
|
|
||||||
raise StopIteration
|
|
||||||
class _INTERNAL_LAZYMERGE:
|
|
||||||
def __init__(self, val):
|
|
||||||
self.val = val
|
|
||||||
def __rrshift__(self, other):
|
|
||||||
return _INTERNAL_LAZYMERGE(other)
|
|
||||||
def __lshift__(self, other):
|
|
||||||
return _INTERNAL_LazyIterable(self.val, other)
|
|
||||||
class _INTERNAL_LPIPE:
|
|
||||||
def __init__(self, val):
|
|
||||||
self.val = val
|
|
||||||
def __rrshift__(self, other):
|
|
||||||
return _INTERNAL_LPIPE(other)
|
|
||||||
def __lshift__(self, other):
|
|
||||||
if isinstance(other, list):
|
|
||||||
other.append(self.val)
|
|
||||||
else:
|
|
||||||
return other(self.val)
|
|
||||||
class _INTERNAL_RPIPE:
|
|
||||||
def __init__(self, action):
|
|
||||||
self.action = action
|
|
||||||
def __rrshift__(self, other):
|
|
||||||
return _INTERNAL_RPIPE(other)
|
|
||||||
def __lshift__(self, other):
|
|
||||||
if isinstance(self.action, list):
|
|
||||||
self.action.append(other)
|
|
||||||
else:
|
|
||||||
return self.action(other)
|
|
||||||
|
|
||||||
_INTERNAL_lpipe = _INTERNAL_LPIPE(None)
|
|
||||||
_INTERNAL_rpipe = _INTERNAL_RPIPE(None)
|
|
||||||
|
|
||||||
_INTERNAL_lazymerge = _INTERNAL_LAZYMERGE(None)
|
|
||||||
|
|
||||||
# ITHON END
|
# ITHON END
|
||||||
"""
|
"""
|
||||||
|
@ -121,7 +105,7 @@ def translate(file: io.StringIO):
|
||||||
yield tokenize.OP, "-="
|
yield tokenize.OP, "-="
|
||||||
yield tokenize.NUMBER, "1"
|
yield tokenize.NUMBER, "1"
|
||||||
skip_token+=1
|
skip_token+=1
|
||||||
elif name == "-" and next_name == ">": #
|
elif name == "-" and next_name == ">": # ->, todo
|
||||||
skip_token+=1
|
skip_token+=1
|
||||||
elif name == ")" and next_name == "=": # short functions
|
elif name == ")" and next_name == "=": # short functions
|
||||||
yield type, name
|
yield type, name
|
||||||
|
@ -129,6 +113,16 @@ def translate(file: io.StringIO):
|
||||||
yield tokenize.NAME, "return"
|
yield tokenize.NAME, "return"
|
||||||
|
|
||||||
skip_token += 1
|
skip_token += 1
|
||||||
|
elif name == "|" and next_name == "*" and tokens[n + 2][1] == ">": #|*>
|
||||||
|
yield tokenize.OP, ">>"
|
||||||
|
yield tokenize.NAME, "_INTERNAL_lspipe"
|
||||||
|
yield tokenize.OP, "<<"
|
||||||
|
skip_token+=2
|
||||||
|
elif name == "<" and next_name == "*" and tokens[n + 2][1] == "|": #<*|
|
||||||
|
yield tokenize.OP, ">>"
|
||||||
|
yield tokenize.NAME, "_INTERNAL_rspipe"
|
||||||
|
yield tokenize.OP, "<<"
|
||||||
|
skip_token+=2
|
||||||
else:
|
else:
|
||||||
yield type,name
|
yield type,name
|
||||||
|
|
||||||
|
@ -143,12 +137,15 @@ def translate(file: io.StringIO):
|
||||||
|
|
||||||
script = """
|
script = """
|
||||||
|
|
||||||
def fibonacci(x):
|
def fibonacci(x: int) -> list[int]:
|
||||||
start = [0,1]
|
start = [0,1]
|
||||||
for i in range(1, x):
|
for i in range(1, x):
|
||||||
start <| start[i] + start[i - 1]
|
start.append <| start[i] + start[i - 1]
|
||||||
return start
|
return start
|
||||||
65536*16 |> fibonacci |> len |> print
|
a = 12 |> fibonacci
|
||||||
|
b = a :: a
|
||||||
|
print <| [i for i in b]
|
||||||
|
print <*| ('a', 'b', 'c')
|
||||||
"""
|
"""
|
||||||
code = tokenize.untokenize(translate(io.StringIO(script)))
|
code = tokenize.untokenize(translate(io.StringIO(script)))
|
||||||
print(code)
|
print(code)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue