From 6f8b5a4e96df0a9864dcc1a7d3f77841c2c5a817 Mon Sep 17 00:00:00 2001 From: Xander Mckay Date: Thu, 28 Nov 2024 18:17:24 -0500 Subject: [PATCH] Argument bugfixes, and minification! --- debug.txt | 1703 ++++++++++++++++++++++ examples/test.it | 2 +- ithon/__main__.py | 50 +- pyproject.toml | 3 + std/__pycache__/ipathlib.cpython-312.pyc | Bin 1379 -> 1379 bytes uv.lock | 23 +- 6 files changed, 1768 insertions(+), 13 deletions(-) create mode 100644 debug.txt diff --git a/debug.txt b/debug.txt new file mode 100644 index 0000000..2de1a4a --- /dev/null +++ b/debug.txt @@ -0,0 +1,1703 @@ +---START FILE--- +name: examples/test.it, len: 752 +# ITHON START +import typing as _INTERNAL_typing +import types as _INTERNAL_types +import inspect as _INTERNAL_inspect +import sys as _INTERNAL_sys +class _INTERNAL_Token: + __slots__ = ('action',) + + def __init__(self, action): + self.action = action + + def __rrshift__(self, lhs): + return _INTERNAL_Operation(self.action, lhs) + +class _INTERNAL_Operation: + __slots__ = ('action', 'lhs') + + def __init__(self, action, lhs): + self.action = action + self.lhs = lhs + + def __lshift__(self, rhs): + return self.action(self.lhs, rhs) + +class _INTERNAL_LazyIterable: + __slots__ = ('x','y') + def __init__(self, x, y) -> None: + self.x = iter(x) + self.y = iter(y) + def __iter__(self): + yield from self.x + yield from self.y + +def curry(f): + def wrapper(*args, **kwds) -> _INTERNAL_typing.Any | _INTERNAL_typing.Callable: + signature = _INTERNAL_inspect.signature(f) + ba = signature.bind_partial(*args, **kwds) + if len(ba.arguments) == len(signature.parameters): + return f(*args, **kwds) + else: + def _partial(*other_args, **other_kwds): + combined_args = args + other_args + combined_kwargs = dict(**kwds, **other_kwds) + return curry(f)(*combined_args, **combined_kwargs) + return _partial + return wrapper + +def compose(*funcs): + def _composed(arg): + val = arg + for i in funcs: + val = i(val) + return val + return _composed + +def _INTERNAL_add_fakeimport(name: str, code: str): # TODO: make this use sys.meta_path + module = _INTERNAL_types.ModuleType(name) + parent = '.'.join(name.split('.')[:-1]) if '.'.join(name.split('.')[:-1]) else name + if not parent == name and parent not in _INTERNAL_sys.modules: + _INTERNAL_sys.modules[parent] = _INTERNAL_types.ModuleType(parent) + globals = {'__package__': parent} + module.__dict__.update(globals) + exec(code, module.__dict__) + _INTERNAL_sys.modules[name] = module + +_INTERNAL_add_fakeimport('sentinels', '''import sys as _sys +from threading import Lock as _Lock + + +__all__ = ['Sentinel'] + + +# Design and implementation decisions: +# +# The first implementations created a dedicated class for each instance. +# However, once it was decided to use Sentinel for type signatures, there +# was no longer a need for a dedicated class for each sentinel value on order +# to enable strict type signatures. Since class objects consume a relatively +# large amount of memory, the implementation was changed to avoid this. +# +# With this change, the mechanism used for unpickling/copying objects needed +# to be changed too, since we could no longer count on each dedicated class +# simply returning its singleton instance as before. __reduce__ can return +# a string, upon which an attribute with that name is looked up in the module +# and returned. However, that would have meant that pickling/copying support +# would depend on the "name" argument being exactly the name of the variable +# used in the module, and simply wouldn't work for sentinels created in +# functions/methods. Instead, a registry for sentinels was added, where all +# sentinel objects are stored keyed by their name + module name. This is used +# to look up existing sentinels both during normal object creation and during +# copying/unpickling. + + +class Sentinel: + """Create a unique sentinel object. + + *name* should be the fully-qualified name of the variable to which the + return value shall be assigned. + + *repr*, if supplied, will be used for the repr of the sentinel object. + If not provided, "" will be used (with any leading class names + removed). + + *module_name*, if supplied, will be used instead of inspecting the call + stack to find the name of the module from which + """ + _name: str + _repr: str + _module_name: str + + def __new__( + cls, + name: str, + repr: str | None = None, + module_name: str | None = None, + ): + name = str(name) + repr = str(repr) if repr else f'<{name.split(".")[-1]}>' + if not module_name: + parent_frame = _get_parent_frame() + module_name = ( + parent_frame.f_globals.get('__name__', '__main__') + if parent_frame is not None + else __name__ + ) + + # Include the class's module and fully qualified name in the + # registry key to support sub-classing. + registry_key = _sys.intern( + f'{cls.__module__}-{cls.__qualname__}-{module_name}-{name}' + ) + sentinel = _registry.get(registry_key, None) + if sentinel is not None: + return sentinel + sentinel = super().__new__(cls) + sentinel._name = name + sentinel._repr = repr + sentinel._module_name = module_name + with _lock: + return _registry.setdefault(registry_key, sentinel) + + def __repr__(self): + return self._repr + + def __reduce__(self): + return ( + self.__class__, + ( + self._name, + self._repr, + self._module_name, + ), + ) + + +_lock = _Lock() +_registry: dict[str, Sentinel] = {} + + +# The following implementation attempts to support Python +# implementations which don't support sys._getframe(2), such as +# Jython and IronPython. +# +# For reference, see the implementation of namedtuple: +# https://github.com/python/cpython/blob/67444902a0f10419a557d0a2d3b8675c31b075a9/Lib/collections/__init__.py#L503 +def _get_parent_frame(): + """Return the frame object for the caller's parent stack frame.""" + try: + # Two frames up = the parent of the function which called this. + return _sys._getframe(2) + except (AttributeError, ValueError): + global _get_parent_frame + def _get_parent_frame(): + """Return the frame object for the caller's parent stack frame.""" + try: + raise Exception + except Exception: + try: + return _sys.exc_info()[2].tb_frame.f_back.f_back + except Exception: + global _get_parent_frame + def _get_parent_frame(): + """Return the frame object for the caller's parent stack frame.""" + return None + return _get_parent_frame() + return _get_parent_frame()''') +_INTERNAL_add_fakeimport('ipathlib', '''import pathlib +from typing import Self +""" +Pathlib without all the PAINlib. +""" + +class Path(pathlib.Path): + def listdir(self: Self) -> list[Self]: + return list(self.iterdir()) + def remove(self: Self, missing_ok: bool = True) -> None: + """Remove this file or link. If the path is a directory, use rmdir() instead.""" + self.unlink(missing_ok=missing_ok) + def rmtree(self: Self): + if self.is_file(): + self.remove() + else: + for child in self.iterdir(): + child.rmtree() + self.rmdir() + +PurePath = pathlib.PurePath''') +_INTERNAL_lazymerge = _INTERNAL_Token(lambda lhs, rhs: _INTERNAL_LazyIterable(lhs, rhs)) + +_INTERNAL_lpipe = _INTERNAL_Token(lambda lhs, rhs: rhs(lhs)) +_INTERNAL_rpipe = _INTERNAL_Token(lambda lhs, rhs: lhs(rhs)) +_INTERNAL_lspipe = _INTERNAL_Token(lambda lhs, rhs: rhs(*lhs)) +_INTERNAL_rspipe = _INTERNAL_Token(lambda lhs, rhs: lhs(*rhs)) + +_INTERNAL_nonereplace = _INTERNAL_Token(lambda lhs, rhs: lhs if lhs != None else rhs) + +_INTERNAL_lto = _INTERNAL_Token(lambda lhs, rhs: lhs(*rhs)) + +# If you write in other programming languages, this is very, very useful. +null = None +nil = None +void = None + +# ITHON END + +def fibonacci(x: int) -> list[int]: + start = [0,1] + for i in range(1, x): + start.append <| start[i] + start[i - 1] + return start +def a(): + yield 1 + +> 2 +def b(): + yield from a() + +>> a() +b() |> list |> print +a = 12 |> fibonacci +b = a :: a :: a :: a +c = b :: b :: b :: b +print <| [i for i in c] +print <*| ('a', 'b', 'c') +d = lambda x: x * 2 +#d2 = λ x: x * 2 + +d3d = curry <| (lambda x, y: x**2 + y**2) +print(d3d(2,4)) +print(d3d(2)(4)) +print(d3d(x=2)(y=4)) +@curry +def d3d2(x,y) = x**2 + y**2 +print(d3d2(2,4)) +print(d3d2(2)(4)) +print(d3d2(x=2)(y=4)) + +a = 1 +a++ +print(None ?? a) # 2 +'''a''' +/* +very bad code that is +commented out for a very +good reason +*/ +a++ /* something */ # something + +dtwice = compose(d,d) +print(dtwice(2)) +---END FILE--- + +---START DEBOUT--- +name: examples/test.it +64 # ITHON START +65 + +1 import +1 typing +1 as +1 _INTERNAL_typing +4 + +1 import +1 types +1 as +1 _INTERNAL_types +4 + +1 import +1 inspect +1 as +1 _INTERNAL_inspect +4 + +1 import +1 sys +1 as +1 _INTERNAL_sys +4 + +1 class +1 _INTERNAL_Token +55 : +4 + +5 +1 __slots__ +55 = +OP = ( 'action' +55 ( +3 'action' +55 , +OP , ) + +55 ) +4 + +65 + +1 def +1 __init__ +55 ( +1 self +55 , +1 action +55 ) +OP ) : + +55 : +4 + +5 +1 self +55 . +1 action +55 = +1 action +4 + +65 + +6 +1 def +1 __rrshift__ +55 ( +1 self +55 , +1 lhs +55 ) +OP ) : + +55 : +4 + +5 +1 return +1 _INTERNAL_Operation +55 ( +1 self +55 . +1 action +55 , +1 lhs +55 ) +4 + +65 + +6 +6 +1 class +1 _INTERNAL_Operation +55 : +4 + +5 +1 __slots__ +55 = +OP = ( 'action' +55 ( +3 'action' +55 , +3 'lhs' +55 ) +4 + +65 + +1 def +1 __init__ +55 ( +1 self +55 , +1 action +55 , +1 lhs +55 ) +OP ) : + +55 : +4 + +5 +1 self +55 . +1 action +55 = +1 action +4 + +1 self +55 . +1 lhs +55 = +1 lhs +4 + +65 + +6 +1 def +1 __lshift__ +55 ( +1 self +55 , +1 rhs +55 ) +OP ) : + +55 : +4 + +5 +1 return +1 self +55 . +1 action +55 ( +1 self +55 . +1 lhs +55 , +1 rhs +55 ) +4 + +65 + +6 +6 +1 class +1 _INTERNAL_LazyIterable +55 : +4 + +5 +1 __slots__ +55 = +OP = ( 'x' +55 ( +3 'x' +55 , +3 'y' +55 ) +4 + +1 def +1 __init__ +55 ( +1 self +55 , +1 x +55 , +1 y +55 ) +OP ) -> None +55 -> +1 None +55 : +4 + +5 +1 self +55 . +1 x +55 = +1 iter +55 ( +1 x +55 ) +4 + +1 self +55 . +1 y +55 = +1 iter +55 ( +1 y +55 ) +4 + +6 +1 def +1 __iter__ +55 ( +1 self +55 ) +OP ) : + +55 : +4 + +5 +1 yield +1 from +1 self +55 . +1 x +4 + +1 yield +1 from +1 self +55 . +1 y +4 + +65 + +6 +6 +1 def +1 curry +55 ( +1 f +55 ) +OP ) : + +55 : +4 + +5 +1 def +1 wrapper +55 ( +OP ( * args +55 * +1 args +55 , +OP , ** kwds +55 ** +1 kwds +55 ) +OP ) -> _INTERNAL_typing +55 -> +1 _INTERNAL_typing +55 . +1 Any +55 | +1 _INTERNAL_typing +55 . +1 Callable +55 : +4 + +5 +1 signature +55 = +1 _INTERNAL_inspect +55 . +1 signature +55 ( +1 f +55 ) +4 + +1 ba +55 = +1 signature +55 . +1 bind_partial +55 ( +OP ( * args +55 * +1 args +55 , +OP , ** kwds +55 ** +1 kwds +55 ) +4 + +1 if +1 len +55 ( +1 ba +55 . +1 arguments +55 ) +OP ) == len +55 == +1 len +55 ( +1 signature +55 . +1 parameters +55 ) +OP ) : + +55 : +4 + +5 +1 return +1 f +55 ( +OP ( * args +55 * +1 args +55 , +OP , ** kwds +55 ** +1 kwds +55 ) +4 + +6 +1 else +55 : +4 + +5 +1 def +1 _partial +55 ( +OP ( * other_args +55 * +1 other_args +55 , +OP , ** other_kwds +55 ** +1 other_kwds +55 ) +OP ) : + +55 : +4 + +5 +1 combined_args +55 = +1 args +55 + +1 other_args +4 + +1 combined_kwargs +55 = +1 dict +55 ( +OP ( ** kwds +55 ** +1 kwds +55 , +OP , ** other_kwds +55 ** +1 other_kwds +55 ) +4 + +1 return +1 curry +55 ( +1 f +55 ) +OP ) ( * +55 ( +OP ( * combined_args +55 * +1 combined_args +55 , +OP , ** combined_kwargs +55 ** +1 combined_kwargs +55 ) +4 + +6 +1 return +1 _partial +4 + +6 +6 +1 return +1 wrapper +4 + +65 + +6 +1 def +1 compose +55 ( +OP ( * funcs +55 * +1 funcs +55 ) +OP ) : + +55 : +4 + +5 +1 def +1 _composed +55 ( +1 arg +55 ) +OP ) : + +55 : +4 + +5 +1 val +55 = +1 arg +4 + +1 for +1 i +1 in +1 funcs +55 : +4 + +5 +1 val +55 = +1 i +55 ( +1 val +55 ) +4 + +6 +1 return +1 val +4 + +6 +1 return +1 _composed +4 + +65 + +6 +1 def +1 _INTERNAL_add_fakeimport +55 ( +1 name +55 : +1 str +55 , +1 code +55 : +1 str +55 ) +OP ) : # TODO: make this use sys.meta_path +55 : +64 # TODO: make this use sys.meta_path +4 + +5 +1 module +55 = +1 _INTERNAL_types +55 . +1 ModuleType +55 ( +1 name +55 ) +4 + +1 parent +55 = +3 '.' +55 . +1 join +55 ( +1 name +55 . +1 split +55 ( +3 '.' +55 ) +OP ) [ : +55 [ +OP [ : - +55 : +OP : - 1 +55 - +2 1 +55 ] +OP ] ) if +55 ) +1 if +3 '.' +55 . +1 join +55 ( +1 name +55 . +1 split +55 ( +3 '.' +55 ) +OP ) [ : +55 [ +OP [ : - +55 : +OP : - 1 +55 - +2 1 +55 ] +OP ] ) else +55 ) +1 else +1 name +4 + +1 if +1 not +1 parent +55 == +1 name +1 and +1 parent +1 not +1 in +1 _INTERNAL_sys +55 . +1 modules +55 : +4 + +5 +1 _INTERNAL_sys +55 . +1 modules +55 [ +1 parent +55 ] +OP ] = _INTERNAL_types +55 = +1 _INTERNAL_types +55 . +1 ModuleType +55 ( +1 parent +55 ) +4 + +6 +1 globals +55 = +OP = { '__package__' +55 { +3 '__package__' +55 : +1 parent +55 } +4 + +1 module +55 . +1 __dict__ +55 . +1 update +55 ( +1 globals +55 ) +4 + +1 exec +55 ( +1 code +55 , +1 module +55 . +1 __dict__ +55 ) +4 + +1 _INTERNAL_sys +55 . +1 modules +55 [ +1 name +55 ] +OP ] = module +55 = +1 module +4 + +65 + +6 +1 _INTERNAL_add_fakeimport +55 ( +3 'sentinels' +55 , +3 '''import sys as _sys +from threading import Lock as _Lock + + +__all__ = ['Sentinel'] + + +# Design and implementation decisions: +# +# The first implementations created a dedicated class for each instance. +# However, once it was decided to use Sentinel for type signatures, there +# was no longer a need for a dedicated class for each sentinel value on order +# to enable strict type signatures. Since class objects consume a relatively +# large amount of memory, the implementation was changed to avoid this. +# +# With this change, the mechanism used for unpickling/copying objects needed +# to be changed too, since we could no longer count on each dedicated class +# simply returning its singleton instance as before. __reduce__ can return +# a string, upon which an attribute with that name is looked up in the module +# and returned. However, that would have meant that pickling/copying support +# would depend on the "name" argument being exactly the name of the variable +# used in the module, and simply wouldn't work for sentinels created in +# functions/methods. Instead, a registry for sentinels was added, where all +# sentinel objects are stored keyed by their name + module name. This is used +# to look up existing sentinels both during normal object creation and during +# copying/unpickling. + + +class Sentinel: + """Create a unique sentinel object. + + *name* should be the fully-qualified name of the variable to which the + return value shall be assigned. + + *repr*, if supplied, will be used for the repr of the sentinel object. + If not provided, "" will be used (with any leading class names + removed). + + *module_name*, if supplied, will be used instead of inspecting the call + stack to find the name of the module from which + """ + _name: str + _repr: str + _module_name: str + + def __new__( + cls, + name: str, + repr: str | None = None, + module_name: str | None = None, + ): + name = str(name) + repr = str(repr) if repr else f'<{name.split(".")[-1]}>' + if not module_name: + parent_frame = _get_parent_frame() + module_name = ( + parent_frame.f_globals.get('__name__', '__main__') + if parent_frame is not None + else __name__ + ) + + # Include the class's module and fully qualified name in the + # registry key to support sub-classing. + registry_key = _sys.intern( + f'{cls.__module__}-{cls.__qualname__}-{module_name}-{name}' + ) + sentinel = _registry.get(registry_key, None) + if sentinel is not None: + return sentinel + sentinel = super().__new__(cls) + sentinel._name = name + sentinel._repr = repr + sentinel._module_name = module_name + with _lock: + return _registry.setdefault(registry_key, sentinel) + + def __repr__(self): + return self._repr + + def __reduce__(self): + return ( + self.__class__, + ( + self._name, + self._repr, + self._module_name, + ), + ) + + +_lock = _Lock() +_registry: dict[str, Sentinel] = {} + + +# The following implementation attempts to support Python +# implementations which don't support sys._getframe(2), such as +# Jython and IronPython. +# +# For reference, see the implementation of namedtuple: +# https://github.com/python/cpython/blob/67444902a0f10419a557d0a2d3b8675c31b075a9/Lib/collections/__init__.py#L503 +def _get_parent_frame(): + """Return the frame object for the caller's parent stack frame.""" + try: + # Two frames up = the parent of the function which called this. + return _sys._getframe(2) + except (AttributeError, ValueError): + global _get_parent_frame + def _get_parent_frame(): + """Return the frame object for the caller's parent stack frame.""" + try: + raise Exception + except Exception: + try: + return _sys.exc_info()[2].tb_frame.f_back.f_back + except Exception: + global _get_parent_frame + def _get_parent_frame(): + """Return the frame object for the caller's parent stack frame.""" + return None + return _get_parent_frame() + return _get_parent_frame()'''[1, 2] +[0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144] +a b c +20 +20 +20 +20 +20 +20 +2 +8 + +55 ) +4 + +1 _INTERNAL_add_fakeimport +55 ( +3 'ipathlib' +55 , +3 '''import pathlib +from typing import Self +""" +Pathlib without all the PAINlib. +""" + +class Path(pathlib.Path): + def listdir(self: Self) -> list[Self]: + return list(self.iterdir()) + def remove(self: Self, missing_ok: bool = True) -> None: + """Remove this file or link. If the path is a directory, use rmdir() instead.""" + self.unlink(missing_ok=missing_ok) + def rmtree(self: Self): + if self.is_file(): + self.remove() + else: + for child in self.iterdir(): + child.rmtree() + self.rmdir() + +PurePath = pathlib.PurePath''' +55 ) +4 + +1 _INTERNAL_lazymerge +55 = +1 _INTERNAL_Token +55 ( +1 lambda +1 lhs +55 , +1 rhs +55 : +1 _INTERNAL_LazyIterable +55 ( +1 lhs +55 , +1 rhs +55 ) +OP ) ) + +55 ) +4 + +65 + +1 _INTERNAL_lpipe +55 = +1 _INTERNAL_Token +55 ( +1 lambda +1 lhs +55 , +1 rhs +55 : +1 rhs +55 ( +1 lhs +55 ) +OP ) ) + +55 ) +4 + +1 _INTERNAL_rpipe +55 = +1 _INTERNAL_Token +55 ( +1 lambda +1 lhs +55 , +1 rhs +55 : +1 lhs +55 ( +1 rhs +55 ) +OP ) ) + +55 ) +4 + +1 _INTERNAL_lspipe +55 = +1 _INTERNAL_Token +55 ( +1 lambda +1 lhs +55 , +1 rhs +55 : +1 rhs +55 ( +OP ( * lhs +55 * +1 lhs +55 ) +OP ) ) + +55 ) +4 + +1 _INTERNAL_rspipe +55 = +1 _INTERNAL_Token +55 ( +1 lambda +1 lhs +55 , +1 rhs +55 : +1 lhs +55 ( +OP ( * rhs +55 * +1 rhs +55 ) +OP ) ) + +55 ) +4 + +65 + +1 _INTERNAL_nonereplace +55 = +1 _INTERNAL_Token +55 ( +1 lambda +1 lhs +55 , +1 rhs +55 : +1 lhs +1 if +1 lhs +55 != +1 None +1 else +1 rhs +55 ) +4 + +65 + +1 _INTERNAL_lto +55 = +1 _INTERNAL_Token +55 ( +1 lambda +1 lhs +55 , +1 rhs +55 : +1 lhs +55 ( +OP ( * rhs +55 * +1 rhs +55 ) +OP ) ) + +55 ) +4 + +65 + +64 # If you write in other programming languages, this is very, very useful. +65 + +1 null +55 = +1 None +4 + +1 nil +55 = +1 None +4 + +1 void +55 = +1 None +4 + +65 + +64 # ITHON END +65 + +65 + +1 def +1 fibonacci +55 ( +1 x +55 : +1 int +55 ) +OP ) -> list +55 -> +1 list +55 [ +1 int +55 ] +OP ] : + +55 : +4 + +5 +1 start +55 = +OP = [ 0 +55 [ +2 0 +55 , +2 1 +55 ] +4 + +1 for +1 i +1 in +1 range +55 ( +2 1 +55 , +1 x +55 ) +OP ) : + +55 : +4 + +5 +1 start +55 . +1 append +55 < +OP < | start +55 | +1 start +55 [ +1 i +55 ] +OP ] + start +55 + +1 start +55 [ +1 i +55 - +2 1 +55 ] +4 + +6 +1 return +1 start +4 + +6 +1 def +1 a +55 ( +OP ( ) : +55 ) +OP ) : + +55 : +4 + +5 +1 yield +2 1 +4 + +55 + +OP + > 2 +55 > +2 2 +4 + +6 +1 def +1 b +55 ( +OP ( ) : +55 ) +OP ) : + +55 : +4 + +5 +1 yield +1 from +1 a +55 ( +OP ( ) + +55 ) +4 + +55 + +OP + >> a +55 >> +1 a +55 ( +OP ( ) + +55 ) +4 + +6 +1 b +55 ( +OP ( ) | +55 ) +OP ) | > +55 | +OP | > list +55 > +1 list +55 | +OP | > print +55 > +1 print +4 + +1 a +55 = +2 12 +55 | +OP | > fibonacci +55 > +1 fibonacci +4 + +1 b +55 = +1 a +55 : +OP : : a +55 : +1 a +55 : +OP : : a +55 : +1 a +55 : +OP : : a +55 : +1 a +4 + +1 c +55 = +1 b +55 : +OP : : b +55 : +1 b +55 : +OP : : b +55 : +1 b +55 : +OP : : b +55 : +1 b +4 + +1 print +55 < +OP < | [ +55 | +55 [ +1 i +1 for +1 i +1 in +1 c +55 ] +4 + +1 print +55 < +OP < * | +55 * +55 | +55 ( +3 'a' +55 , +3 'b' +55 , +3 'c' +55 ) +4 + +1 d +55 = +1 lambda +1 x +55 : +1 x +55 * +2 2 +4 + +64 #d2 = λ x: x * 2 +65 + +65 + +1 d3d +55 = +1 curry +55 < +OP < | ( +55 | +55 ( +1 lambda +1 x +55 , +1 y +55 : +1 x +55 ** +2 2 +55 + +1 y +55 ** +2 2 +55 ) +4 + +1 print +55 ( +1 d3d +55 ( +2 2 +55 , +2 4 +55 ) +OP ) ) + +55 ) +4 + +1 print +55 ( +1 d3d +55 ( +2 2 +55 ) +OP ) ( 4 +55 ( +2 4 +55 ) +OP ) ) + +55 ) +4 + +1 print +55 ( +1 d3d +55 ( +1 x +55 = +2 2 +55 ) +OP ) ( y +55 ( +1 y +55 = +2 4 +55 ) +OP ) ) + +55 ) +4 + +55 @ +1 curry +4 + +1 def +1 d3d2 +55 ( +1 x +55 , +1 y +55 ) +OP ) = x +55 = +1 x +55 ** +2 2 +55 + +1 y +55 ** +2 2 +4 + +1 print +55 ( +1 d3d2 +55 ( +2 2 +55 , +2 4 +55 ) +OP ) ) + +55 ) +4 + +1 print +55 ( +1 d3d2 +55 ( +2 2 +55 ) +OP ) ( 4 +55 ( +2 4 +55 ) +OP ) ) + +55 ) +4 + +1 print +55 ( +1 d3d2 +55 ( +1 x +55 = +2 2 +55 ) +OP ) ( y +55 ( +1 y +55 = +2 4 +55 ) +OP ) ) + +55 ) +4 + +65 + +1 a +55 = +2 1 +4 + +1 a +55 + +OP + + + +55 + +4 + +1 print +55 ( +1 None +55 ? +OP ? ? a +55 ? +1 a +55 ) +64 # 2 +4 + +3 '''a''' +4 + +55 / +OP / * + +55 * +4 + +1 very +1 bad +1 code +1 that +1 is +4 + +1 commented +1 out +1 for +1 a +1 very +4 + +1 good +1 reason +4 + +55 * +OP * / + +55 / +4 + +1 a +55 + +OP + + / +55 + +55 / +OP / * something +55 * +1 something +55 * +OP * / # something +55 / +64 # something +4 + +65 + +1 dtwice +55 = +1 compose +55 ( +1 d +55 , +1 d +55 ) +4 + +1 print +55 ( +1 dtwice +55 ( +2 2 +55 ) +OP ) ) +55 ) +4 +0 +---END DEBOUT--- diff --git a/examples/test.it b/examples/test.it index 76f4734..fa5460e 100644 --- a/examples/test.it +++ b/examples/test.it @@ -8,7 +8,7 @@ def a(): yield 1 +> 2 def b(): - +>> a() + yield from a() +>> a() b() |> list |> print a = 12 |> fibonacci diff --git a/ithon/__main__.py b/ithon/__main__.py index 873f71a..3bcb3bc 100644 --- a/ithon/__main__.py +++ b/ithon/__main__.py @@ -1,9 +1,21 @@ -import tokenize, io, typer, typing, os, sys, pathlib, time +import tokenize, io, typer, typing, os, sys, pathlib, traceback + +try: + import python_minifier as minifier +except ImportError as e: + traceback.print_exception(e) + minifier_avail = False +else: + minifier_avail = True + from std.ipathlib import Path def patch_std(prefix: str): for i in (Path(__file__).parent.parent / 'std').iterdir(): if i.is_dir(): continue - prefix = prefix.replace(f"std'{i.name}'", "'''" + i.read_text().replace("'''", "\\'''") + "'''") + module = i.read_text() + prefix = prefix.replace(f"std'{i.name}'", "'''" + module.replace("'''", "\\'''") + "'''") + + return prefix PREFIX = patch_std((Path(__file__).parent / 'prefix.py').read_text()) @@ -37,12 +49,12 @@ def translate(file: io.StringIO, debug: int = 0): yield tokenize.NAME, name yield tokenize.OP, "<<" def dprint(data: str, prio: int): - if debug > prio: + if debug < prio: return print(data) fdata = file.read() filedata = PREFIX + fdata - dprint(f'---START FILE---\nname: {file.name}, len: {len(fdata)}\n' + filedata + '\n---END FILE---\n', 4) + dprint(f'---START FILE---\nname: {file.name}, len: {len(fdata)}\n' + filedata + '\n---END FILE---\n', 5) patched_file = io.StringIO(filedata) skip_token = 0 @@ -135,7 +147,7 @@ def translate(file: io.StringIO, debug: int = 0): yield type,name dprint(f'---END DEBOUT---', 4) -def transpile(input_path: Path, verbose: int) -> None: +def transpile(input_path: Path, verbosity: int, minify: bool) -> None: dir = Path('dist') if input_path.is_dir(): for i in input_path.glob('*'): @@ -143,22 +155,38 @@ def transpile(input_path: Path, verbose: int) -> None: path = (dir / i.relative_to(input_path)).with_suffix('.py') path.parent.mkdir(parents=True, exist_ok=True) with i.open() as f: - path.write_text(tokenize.untokenize(translate(f, verbose))) + transpiled = tokenize.untokenize(translate(f, verbosity)) + if minify: + if not minifier_avail: + print('ERROR: Minifier not available.') + return + transpiled = minifier.minify(transpiled) + path.write_text(transpiled) else: with input_path.open() as f: - input_path.with_suffix('.py').write_text(tokenize.untokenize(translate(f, verbose))) + transpiled = tokenize.untokenize(translate(f, verbosity)) + if minify: + if not minifier_avail: + print('ERROR: Minifier not available.') + return + transpiled = minifier.minify(transpiled) + input_path.with_suffix('.py').write_text(transpiled) app = typer.Typer() + +verbosity_arg = typing.Annotated[int, typer.Option('--verbosity', '-v')] +minify_arg = typing.Annotated[bool, typer.Option('--minify', '-m')] + @app.command('t') @app.command('ts') @app.command('transpile') -def transpile_cmd(input_path: pathlib.Path, verbose: int = 0) -> None: - transpile(input_path, verbose) +def transpile_cmd(input_path: pathlib.Path, verbosity: verbosity_arg = 0, minify: minify_arg = False) -> None: + transpile(input_path, verbosity, minify) @app.command('r') @app.command('run') -def run_cmd(input_path: pathlib.Path, verbose: int = 0) -> None: +def run_cmd(input_path: pathlib.Path, verbosity: verbosity_arg = 0, minify: minify_arg = False) -> None: input_path = Path(input_path) - transpile(input_path, verbose) + transpile(input_path, verbosity, minify) if input_path.is_dir(): os.system(f'{sys.executable} -m dist') Path('dist').rmtree() diff --git a/pyproject.toml b/pyproject.toml index ef652c5..747c053 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,3 +5,6 @@ description = "Add your description here" readme = "README.md" requires-python = ">=3.12" dependencies = ['typer'] +[project.optional-dependencies] +minifier = ['python-minifier'] +all = ['python-minifier'] \ No newline at end of file diff --git a/std/__pycache__/ipathlib.cpython-312.pyc b/std/__pycache__/ipathlib.cpython-312.pyc index c680d03671207dfa137bb4fe0882dbc73eb0c880..dc6bac792f6ae90e36d5d6f0d33e0b18705aac0a 100644 GIT binary patch delta 26 gcmaFN^_YwMG%qg~0}%Kzuy5q%V`aQGS&G#Y09Eh>&j0`b delta 26 gcmaFN^_YwMG%qg~0}!;=scq!uV`cm`S&G#Y0AR8P#Q*>R diff --git a/uv.lock b/uv.lock index b54d9d5..9bf215e 100644 --- a/uv.lock +++ b/uv.lock @@ -30,8 +30,20 @@ dependencies = [ { name = "typer" }, ] +[package.optional-dependencies] +all = [ + { name = "python-minifier" }, +] +minifier = [ + { name = "python-minifier" }, +] + [package.metadata] -requires-dist = [{ name = "typer" }] +requires-dist = [ + { name = "python-minifier", marker = "extra == 'all'" }, + { name = "python-minifier", marker = "extra == 'minifier'" }, + { name = "typer" }, +] [[package]] name = "markdown-it-py" @@ -63,6 +75,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 }, ] +[[package]] +name = "python-minifier" +version = "2.11.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/39/63/403fb2d6394b3e455e046d91f64b96072803aaf119027a26e716ed94d63c/python_minifier-2.11.3.tar.gz", hash = "sha256:489133b91212ec9658a7b64d243eb9eb67d7e53faf2ac5166a33301c61b3dcab", size = 64438 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/32/61d20860d18afb81cb7258bb02d4eaf4b09170383c2374514f6aef384fa9/python_minifier-2.11.3-py3-none-any.whl", hash = "sha256:37e10e9e318be701eecb48764942426be73ae9f562d75bea4e29c5f66945ce97", size = 56172 }, +] + [[package]] name = "rich" version = "13.9.4"