From 5316cff2a27e0e60255fb77eae865055f186256d Mon Sep 17 00:00:00 2001 From: Xander Mckay Date: Sat, 30 Nov 2024 10:29:08 -0500 Subject: [PATCH] Typechecker stdlib module :shipit: --- debug.txt | 1703 --------------------------------------- examples/typechecker.it | 4 + ithon/__main__.py | 14 +- ithon/prefix.py | 1 + std/typecheck.py | 714 ++++++++++++++++ test_.py | 9 + 6 files changed, 736 insertions(+), 1709 deletions(-) delete mode 100644 debug.txt create mode 100644 examples/typechecker.it create mode 100644 std/typecheck.py create mode 100644 test_.py diff --git a/debug.txt b/debug.txt deleted file mode 100644 index 2de1a4a..0000000 --- a/debug.txt +++ /dev/null @@ -1,1703 +0,0 @@ ----START FILE--- -name: examples/test.it, len: 752 -# ITHON START -import typing as _INTERNAL_typing -import types as _INTERNAL_types -import inspect as _INTERNAL_inspect -import sys as _INTERNAL_sys -class _INTERNAL_Token: - __slots__ = ('action',) - - def __init__(self, action): - self.action = action - - def __rrshift__(self, lhs): - return _INTERNAL_Operation(self.action, lhs) - -class _INTERNAL_Operation: - __slots__ = ('action', 'lhs') - - def __init__(self, action, lhs): - self.action = action - self.lhs = lhs - - def __lshift__(self, rhs): - return self.action(self.lhs, rhs) - -class _INTERNAL_LazyIterable: - __slots__ = ('x','y') - def __init__(self, x, y) -> None: - self.x = iter(x) - self.y = iter(y) - def __iter__(self): - yield from self.x - yield from self.y - -def curry(f): - def wrapper(*args, **kwds) -> _INTERNAL_typing.Any | _INTERNAL_typing.Callable: - signature = _INTERNAL_inspect.signature(f) - ba = signature.bind_partial(*args, **kwds) - if len(ba.arguments) == len(signature.parameters): - return f(*args, **kwds) - else: - def _partial(*other_args, **other_kwds): - combined_args = args + other_args - combined_kwargs = dict(**kwds, **other_kwds) - return curry(f)(*combined_args, **combined_kwargs) - return _partial - return wrapper - -def compose(*funcs): - def _composed(arg): - val = arg - for i in funcs: - val = i(val) - return val - return _composed - -def _INTERNAL_add_fakeimport(name: str, code: str): # TODO: make this use sys.meta_path - module = _INTERNAL_types.ModuleType(name) - parent = '.'.join(name.split('.')[:-1]) if '.'.join(name.split('.')[:-1]) else name - if not parent == name and parent not in _INTERNAL_sys.modules: - _INTERNAL_sys.modules[parent] = _INTERNAL_types.ModuleType(parent) - globals = {'__package__': parent} - module.__dict__.update(globals) - exec(code, module.__dict__) - _INTERNAL_sys.modules[name] = module - -_INTERNAL_add_fakeimport('sentinels', '''import sys as _sys -from threading import Lock as _Lock - - -__all__ = ['Sentinel'] - - -# Design and implementation decisions: -# -# The first implementations created a dedicated class for each instance. -# However, once it was decided to use Sentinel for type signatures, there -# was no longer a need for a dedicated class for each sentinel value on order -# to enable strict type signatures. Since class objects consume a relatively -# large amount of memory, the implementation was changed to avoid this. -# -# With this change, the mechanism used for unpickling/copying objects needed -# to be changed too, since we could no longer count on each dedicated class -# simply returning its singleton instance as before. __reduce__ can return -# a string, upon which an attribute with that name is looked up in the module -# and returned. However, that would have meant that pickling/copying support -# would depend on the "name" argument being exactly the name of the variable -# used in the module, and simply wouldn't work for sentinels created in -# functions/methods. Instead, a registry for sentinels was added, where all -# sentinel objects are stored keyed by their name + module name. This is used -# to look up existing sentinels both during normal object creation and during -# copying/unpickling. - - -class Sentinel: - """Create a unique sentinel object. - - *name* should be the fully-qualified name of the variable to which the - return value shall be assigned. - - *repr*, if supplied, will be used for the repr of the sentinel object. - If not provided, "" will be used (with any leading class names - removed). - - *module_name*, if supplied, will be used instead of inspecting the call - stack to find the name of the module from which - """ - _name: str - _repr: str - _module_name: str - - def __new__( - cls, - name: str, - repr: str | None = None, - module_name: str | None = None, - ): - name = str(name) - repr = str(repr) if repr else f'<{name.split(".")[-1]}>' - if not module_name: - parent_frame = _get_parent_frame() - module_name = ( - parent_frame.f_globals.get('__name__', '__main__') - if parent_frame is not None - else __name__ - ) - - # Include the class's module and fully qualified name in the - # registry key to support sub-classing. - registry_key = _sys.intern( - f'{cls.__module__}-{cls.__qualname__}-{module_name}-{name}' - ) - sentinel = _registry.get(registry_key, None) - if sentinel is not None: - return sentinel - sentinel = super().__new__(cls) - sentinel._name = name - sentinel._repr = repr - sentinel._module_name = module_name - with _lock: - return _registry.setdefault(registry_key, sentinel) - - def __repr__(self): - return self._repr - - def __reduce__(self): - return ( - self.__class__, - ( - self._name, - self._repr, - self._module_name, - ), - ) - - -_lock = _Lock() -_registry: dict[str, Sentinel] = {} - - -# The following implementation attempts to support Python -# implementations which don't support sys._getframe(2), such as -# Jython and IronPython. -# -# For reference, see the implementation of namedtuple: -# https://github.com/python/cpython/blob/67444902a0f10419a557d0a2d3b8675c31b075a9/Lib/collections/__init__.py#L503 -def _get_parent_frame(): - """Return the frame object for the caller's parent stack frame.""" - try: - # Two frames up = the parent of the function which called this. - return _sys._getframe(2) - except (AttributeError, ValueError): - global _get_parent_frame - def _get_parent_frame(): - """Return the frame object for the caller's parent stack frame.""" - try: - raise Exception - except Exception: - try: - return _sys.exc_info()[2].tb_frame.f_back.f_back - except Exception: - global _get_parent_frame - def _get_parent_frame(): - """Return the frame object for the caller's parent stack frame.""" - return None - return _get_parent_frame() - return _get_parent_frame()''') -_INTERNAL_add_fakeimport('ipathlib', '''import pathlib -from typing import Self -""" -Pathlib without all the PAINlib. -""" - -class Path(pathlib.Path): - def listdir(self: Self) -> list[Self]: - return list(self.iterdir()) - def remove(self: Self, missing_ok: bool = True) -> None: - """Remove this file or link. If the path is a directory, use rmdir() instead.""" - self.unlink(missing_ok=missing_ok) - def rmtree(self: Self): - if self.is_file(): - self.remove() - else: - for child in self.iterdir(): - child.rmtree() - self.rmdir() - -PurePath = pathlib.PurePath''') -_INTERNAL_lazymerge = _INTERNAL_Token(lambda lhs, rhs: _INTERNAL_LazyIterable(lhs, rhs)) - -_INTERNAL_lpipe = _INTERNAL_Token(lambda lhs, rhs: rhs(lhs)) -_INTERNAL_rpipe = _INTERNAL_Token(lambda lhs, rhs: lhs(rhs)) -_INTERNAL_lspipe = _INTERNAL_Token(lambda lhs, rhs: rhs(*lhs)) -_INTERNAL_rspipe = _INTERNAL_Token(lambda lhs, rhs: lhs(*rhs)) - -_INTERNAL_nonereplace = _INTERNAL_Token(lambda lhs, rhs: lhs if lhs != None else rhs) - -_INTERNAL_lto = _INTERNAL_Token(lambda lhs, rhs: lhs(*rhs)) - -# If you write in other programming languages, this is very, very useful. -null = None -nil = None -void = None - -# ITHON END - -def fibonacci(x: int) -> list[int]: - start = [0,1] - for i in range(1, x): - start.append <| start[i] + start[i - 1] - return start -def a(): - yield 1 - +> 2 -def b(): - yield from a() - +>> a() -b() |> list |> print -a = 12 |> fibonacci -b = a :: a :: a :: a -c = b :: b :: b :: b -print <| [i for i in c] -print <*| ('a', 'b', 'c') -d = lambda x: x * 2 -#d2 = λ x: x * 2 - -d3d = curry <| (lambda x, y: x**2 + y**2) -print(d3d(2,4)) -print(d3d(2)(4)) -print(d3d(x=2)(y=4)) -@curry -def d3d2(x,y) = x**2 + y**2 -print(d3d2(2,4)) -print(d3d2(2)(4)) -print(d3d2(x=2)(y=4)) - -a = 1 -a++ -print(None ?? a) # 2 -'''a''' -/* -very bad code that is -commented out for a very -good reason -*/ -a++ /* something */ # something - -dtwice = compose(d,d) -print(dtwice(2)) ----END FILE--- - ----START DEBOUT--- -name: examples/test.it -64 # ITHON START -65 - -1 import -1 typing -1 as -1 _INTERNAL_typing -4 - -1 import -1 types -1 as -1 _INTERNAL_types -4 - -1 import -1 inspect -1 as -1 _INTERNAL_inspect -4 - -1 import -1 sys -1 as -1 _INTERNAL_sys -4 - -1 class -1 _INTERNAL_Token -55 : -4 - -5 -1 __slots__ -55 = -OP = ( 'action' -55 ( -3 'action' -55 , -OP , ) - -55 ) -4 - -65 - -1 def -1 __init__ -55 ( -1 self -55 , -1 action -55 ) -OP ) : - -55 : -4 - -5 -1 self -55 . -1 action -55 = -1 action -4 - -65 - -6 -1 def -1 __rrshift__ -55 ( -1 self -55 , -1 lhs -55 ) -OP ) : - -55 : -4 - -5 -1 return -1 _INTERNAL_Operation -55 ( -1 self -55 . -1 action -55 , -1 lhs -55 ) -4 - -65 - -6 -6 -1 class -1 _INTERNAL_Operation -55 : -4 - -5 -1 __slots__ -55 = -OP = ( 'action' -55 ( -3 'action' -55 , -3 'lhs' -55 ) -4 - -65 - -1 def -1 __init__ -55 ( -1 self -55 , -1 action -55 , -1 lhs -55 ) -OP ) : - -55 : -4 - -5 -1 self -55 . -1 action -55 = -1 action -4 - -1 self -55 . -1 lhs -55 = -1 lhs -4 - -65 - -6 -1 def -1 __lshift__ -55 ( -1 self -55 , -1 rhs -55 ) -OP ) : - -55 : -4 - -5 -1 return -1 self -55 . -1 action -55 ( -1 self -55 . -1 lhs -55 , -1 rhs -55 ) -4 - -65 - -6 -6 -1 class -1 _INTERNAL_LazyIterable -55 : -4 - -5 -1 __slots__ -55 = -OP = ( 'x' -55 ( -3 'x' -55 , -3 'y' -55 ) -4 - -1 def -1 __init__ -55 ( -1 self -55 , -1 x -55 , -1 y -55 ) -OP ) -> None -55 -> -1 None -55 : -4 - -5 -1 self -55 . -1 x -55 = -1 iter -55 ( -1 x -55 ) -4 - -1 self -55 . -1 y -55 = -1 iter -55 ( -1 y -55 ) -4 - -6 -1 def -1 __iter__ -55 ( -1 self -55 ) -OP ) : - -55 : -4 - -5 -1 yield -1 from -1 self -55 . -1 x -4 - -1 yield -1 from -1 self -55 . -1 y -4 - -65 - -6 -6 -1 def -1 curry -55 ( -1 f -55 ) -OP ) : - -55 : -4 - -5 -1 def -1 wrapper -55 ( -OP ( * args -55 * -1 args -55 , -OP , ** kwds -55 ** -1 kwds -55 ) -OP ) -> _INTERNAL_typing -55 -> -1 _INTERNAL_typing -55 . -1 Any -55 | -1 _INTERNAL_typing -55 . -1 Callable -55 : -4 - -5 -1 signature -55 = -1 _INTERNAL_inspect -55 . -1 signature -55 ( -1 f -55 ) -4 - -1 ba -55 = -1 signature -55 . -1 bind_partial -55 ( -OP ( * args -55 * -1 args -55 , -OP , ** kwds -55 ** -1 kwds -55 ) -4 - -1 if -1 len -55 ( -1 ba -55 . -1 arguments -55 ) -OP ) == len -55 == -1 len -55 ( -1 signature -55 . -1 parameters -55 ) -OP ) : - -55 : -4 - -5 -1 return -1 f -55 ( -OP ( * args -55 * -1 args -55 , -OP , ** kwds -55 ** -1 kwds -55 ) -4 - -6 -1 else -55 : -4 - -5 -1 def -1 _partial -55 ( -OP ( * other_args -55 * -1 other_args -55 , -OP , ** other_kwds -55 ** -1 other_kwds -55 ) -OP ) : - -55 : -4 - -5 -1 combined_args -55 = -1 args -55 + -1 other_args -4 - -1 combined_kwargs -55 = -1 dict -55 ( -OP ( ** kwds -55 ** -1 kwds -55 , -OP , ** other_kwds -55 ** -1 other_kwds -55 ) -4 - -1 return -1 curry -55 ( -1 f -55 ) -OP ) ( * -55 ( -OP ( * combined_args -55 * -1 combined_args -55 , -OP , ** combined_kwargs -55 ** -1 combined_kwargs -55 ) -4 - -6 -1 return -1 _partial -4 - -6 -6 -1 return -1 wrapper -4 - -65 - -6 -1 def -1 compose -55 ( -OP ( * funcs -55 * -1 funcs -55 ) -OP ) : - -55 : -4 - -5 -1 def -1 _composed -55 ( -1 arg -55 ) -OP ) : - -55 : -4 - -5 -1 val -55 = -1 arg -4 - -1 for -1 i -1 in -1 funcs -55 : -4 - -5 -1 val -55 = -1 i -55 ( -1 val -55 ) -4 - -6 -1 return -1 val -4 - -6 -1 return -1 _composed -4 - -65 - -6 -1 def -1 _INTERNAL_add_fakeimport -55 ( -1 name -55 : -1 str -55 , -1 code -55 : -1 str -55 ) -OP ) : # TODO: make this use sys.meta_path -55 : -64 # TODO: make this use sys.meta_path -4 - -5 -1 module -55 = -1 _INTERNAL_types -55 . -1 ModuleType -55 ( -1 name -55 ) -4 - -1 parent -55 = -3 '.' -55 . -1 join -55 ( -1 name -55 . -1 split -55 ( -3 '.' -55 ) -OP ) [ : -55 [ -OP [ : - -55 : -OP : - 1 -55 - -2 1 -55 ] -OP ] ) if -55 ) -1 if -3 '.' -55 . -1 join -55 ( -1 name -55 . -1 split -55 ( -3 '.' -55 ) -OP ) [ : -55 [ -OP [ : - -55 : -OP : - 1 -55 - -2 1 -55 ] -OP ] ) else -55 ) -1 else -1 name -4 - -1 if -1 not -1 parent -55 == -1 name -1 and -1 parent -1 not -1 in -1 _INTERNAL_sys -55 . -1 modules -55 : -4 - -5 -1 _INTERNAL_sys -55 . -1 modules -55 [ -1 parent -55 ] -OP ] = _INTERNAL_types -55 = -1 _INTERNAL_types -55 . -1 ModuleType -55 ( -1 parent -55 ) -4 - -6 -1 globals -55 = -OP = { '__package__' -55 { -3 '__package__' -55 : -1 parent -55 } -4 - -1 module -55 . -1 __dict__ -55 . -1 update -55 ( -1 globals -55 ) -4 - -1 exec -55 ( -1 code -55 , -1 module -55 . -1 __dict__ -55 ) -4 - -1 _INTERNAL_sys -55 . -1 modules -55 [ -1 name -55 ] -OP ] = module -55 = -1 module -4 - -65 - -6 -1 _INTERNAL_add_fakeimport -55 ( -3 'sentinels' -55 , -3 '''import sys as _sys -from threading import Lock as _Lock - - -__all__ = ['Sentinel'] - - -# Design and implementation decisions: -# -# The first implementations created a dedicated class for each instance. -# However, once it was decided to use Sentinel for type signatures, there -# was no longer a need for a dedicated class for each sentinel value on order -# to enable strict type signatures. Since class objects consume a relatively -# large amount of memory, the implementation was changed to avoid this. -# -# With this change, the mechanism used for unpickling/copying objects needed -# to be changed too, since we could no longer count on each dedicated class -# simply returning its singleton instance as before. __reduce__ can return -# a string, upon which an attribute with that name is looked up in the module -# and returned. However, that would have meant that pickling/copying support -# would depend on the "name" argument being exactly the name of the variable -# used in the module, and simply wouldn't work for sentinels created in -# functions/methods. Instead, a registry for sentinels was added, where all -# sentinel objects are stored keyed by their name + module name. This is used -# to look up existing sentinels both during normal object creation and during -# copying/unpickling. - - -class Sentinel: - """Create a unique sentinel object. - - *name* should be the fully-qualified name of the variable to which the - return value shall be assigned. - - *repr*, if supplied, will be used for the repr of the sentinel object. - If not provided, "" will be used (with any leading class names - removed). - - *module_name*, if supplied, will be used instead of inspecting the call - stack to find the name of the module from which - """ - _name: str - _repr: str - _module_name: str - - def __new__( - cls, - name: str, - repr: str | None = None, - module_name: str | None = None, - ): - name = str(name) - repr = str(repr) if repr else f'<{name.split(".")[-1]}>' - if not module_name: - parent_frame = _get_parent_frame() - module_name = ( - parent_frame.f_globals.get('__name__', '__main__') - if parent_frame is not None - else __name__ - ) - - # Include the class's module and fully qualified name in the - # registry key to support sub-classing. - registry_key = _sys.intern( - f'{cls.__module__}-{cls.__qualname__}-{module_name}-{name}' - ) - sentinel = _registry.get(registry_key, None) - if sentinel is not None: - return sentinel - sentinel = super().__new__(cls) - sentinel._name = name - sentinel._repr = repr - sentinel._module_name = module_name - with _lock: - return _registry.setdefault(registry_key, sentinel) - - def __repr__(self): - return self._repr - - def __reduce__(self): - return ( - self.__class__, - ( - self._name, - self._repr, - self._module_name, - ), - ) - - -_lock = _Lock() -_registry: dict[str, Sentinel] = {} - - -# The following implementation attempts to support Python -# implementations which don't support sys._getframe(2), such as -# Jython and IronPython. -# -# For reference, see the implementation of namedtuple: -# https://github.com/python/cpython/blob/67444902a0f10419a557d0a2d3b8675c31b075a9/Lib/collections/__init__.py#L503 -def _get_parent_frame(): - """Return the frame object for the caller's parent stack frame.""" - try: - # Two frames up = the parent of the function which called this. - return _sys._getframe(2) - except (AttributeError, ValueError): - global _get_parent_frame - def _get_parent_frame(): - """Return the frame object for the caller's parent stack frame.""" - try: - raise Exception - except Exception: - try: - return _sys.exc_info()[2].tb_frame.f_back.f_back - except Exception: - global _get_parent_frame - def _get_parent_frame(): - """Return the frame object for the caller's parent stack frame.""" - return None - return _get_parent_frame() - return _get_parent_frame()'''[1, 2] -[0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144] -a b c -20 -20 -20 -20 -20 -20 -2 -8 - -55 ) -4 - -1 _INTERNAL_add_fakeimport -55 ( -3 'ipathlib' -55 , -3 '''import pathlib -from typing import Self -""" -Pathlib without all the PAINlib. -""" - -class Path(pathlib.Path): - def listdir(self: Self) -> list[Self]: - return list(self.iterdir()) - def remove(self: Self, missing_ok: bool = True) -> None: - """Remove this file or link. If the path is a directory, use rmdir() instead.""" - self.unlink(missing_ok=missing_ok) - def rmtree(self: Self): - if self.is_file(): - self.remove() - else: - for child in self.iterdir(): - child.rmtree() - self.rmdir() - -PurePath = pathlib.PurePath''' -55 ) -4 - -1 _INTERNAL_lazymerge -55 = -1 _INTERNAL_Token -55 ( -1 lambda -1 lhs -55 , -1 rhs -55 : -1 _INTERNAL_LazyIterable -55 ( -1 lhs -55 , -1 rhs -55 ) -OP ) ) - -55 ) -4 - -65 - -1 _INTERNAL_lpipe -55 = -1 _INTERNAL_Token -55 ( -1 lambda -1 lhs -55 , -1 rhs -55 : -1 rhs -55 ( -1 lhs -55 ) -OP ) ) - -55 ) -4 - -1 _INTERNAL_rpipe -55 = -1 _INTERNAL_Token -55 ( -1 lambda -1 lhs -55 , -1 rhs -55 : -1 lhs -55 ( -1 rhs -55 ) -OP ) ) - -55 ) -4 - -1 _INTERNAL_lspipe -55 = -1 _INTERNAL_Token -55 ( -1 lambda -1 lhs -55 , -1 rhs -55 : -1 rhs -55 ( -OP ( * lhs -55 * -1 lhs -55 ) -OP ) ) - -55 ) -4 - -1 _INTERNAL_rspipe -55 = -1 _INTERNAL_Token -55 ( -1 lambda -1 lhs -55 , -1 rhs -55 : -1 lhs -55 ( -OP ( * rhs -55 * -1 rhs -55 ) -OP ) ) - -55 ) -4 - -65 - -1 _INTERNAL_nonereplace -55 = -1 _INTERNAL_Token -55 ( -1 lambda -1 lhs -55 , -1 rhs -55 : -1 lhs -1 if -1 lhs -55 != -1 None -1 else -1 rhs -55 ) -4 - -65 - -1 _INTERNAL_lto -55 = -1 _INTERNAL_Token -55 ( -1 lambda -1 lhs -55 , -1 rhs -55 : -1 lhs -55 ( -OP ( * rhs -55 * -1 rhs -55 ) -OP ) ) - -55 ) -4 - -65 - -64 # If you write in other programming languages, this is very, very useful. -65 - -1 null -55 = -1 None -4 - -1 nil -55 = -1 None -4 - -1 void -55 = -1 None -4 - -65 - -64 # ITHON END -65 - -65 - -1 def -1 fibonacci -55 ( -1 x -55 : -1 int -55 ) -OP ) -> list -55 -> -1 list -55 [ -1 int -55 ] -OP ] : - -55 : -4 - -5 -1 start -55 = -OP = [ 0 -55 [ -2 0 -55 , -2 1 -55 ] -4 - -1 for -1 i -1 in -1 range -55 ( -2 1 -55 , -1 x -55 ) -OP ) : - -55 : -4 - -5 -1 start -55 . -1 append -55 < -OP < | start -55 | -1 start -55 [ -1 i -55 ] -OP ] + start -55 + -1 start -55 [ -1 i -55 - -2 1 -55 ] -4 - -6 -1 return -1 start -4 - -6 -1 def -1 a -55 ( -OP ( ) : -55 ) -OP ) : - -55 : -4 - -5 -1 yield -2 1 -4 - -55 + -OP + > 2 -55 > -2 2 -4 - -6 -1 def -1 b -55 ( -OP ( ) : -55 ) -OP ) : - -55 : -4 - -5 -1 yield -1 from -1 a -55 ( -OP ( ) - -55 ) -4 - -55 + -OP + >> a -55 >> -1 a -55 ( -OP ( ) - -55 ) -4 - -6 -1 b -55 ( -OP ( ) | -55 ) -OP ) | > -55 | -OP | > list -55 > -1 list -55 | -OP | > print -55 > -1 print -4 - -1 a -55 = -2 12 -55 | -OP | > fibonacci -55 > -1 fibonacci -4 - -1 b -55 = -1 a -55 : -OP : : a -55 : -1 a -55 : -OP : : a -55 : -1 a -55 : -OP : : a -55 : -1 a -4 - -1 c -55 = -1 b -55 : -OP : : b -55 : -1 b -55 : -OP : : b -55 : -1 b -55 : -OP : : b -55 : -1 b -4 - -1 print -55 < -OP < | [ -55 | -55 [ -1 i -1 for -1 i -1 in -1 c -55 ] -4 - -1 print -55 < -OP < * | -55 * -55 | -55 ( -3 'a' -55 , -3 'b' -55 , -3 'c' -55 ) -4 - -1 d -55 = -1 lambda -1 x -55 : -1 x -55 * -2 2 -4 - -64 #d2 = λ x: x * 2 -65 - -65 - -1 d3d -55 = -1 curry -55 < -OP < | ( -55 | -55 ( -1 lambda -1 x -55 , -1 y -55 : -1 x -55 ** -2 2 -55 + -1 y -55 ** -2 2 -55 ) -4 - -1 print -55 ( -1 d3d -55 ( -2 2 -55 , -2 4 -55 ) -OP ) ) - -55 ) -4 - -1 print -55 ( -1 d3d -55 ( -2 2 -55 ) -OP ) ( 4 -55 ( -2 4 -55 ) -OP ) ) - -55 ) -4 - -1 print -55 ( -1 d3d -55 ( -1 x -55 = -2 2 -55 ) -OP ) ( y -55 ( -1 y -55 = -2 4 -55 ) -OP ) ) - -55 ) -4 - -55 @ -1 curry -4 - -1 def -1 d3d2 -55 ( -1 x -55 , -1 y -55 ) -OP ) = x -55 = -1 x -55 ** -2 2 -55 + -1 y -55 ** -2 2 -4 - -1 print -55 ( -1 d3d2 -55 ( -2 2 -55 , -2 4 -55 ) -OP ) ) - -55 ) -4 - -1 print -55 ( -1 d3d2 -55 ( -2 2 -55 ) -OP ) ( 4 -55 ( -2 4 -55 ) -OP ) ) - -55 ) -4 - -1 print -55 ( -1 d3d2 -55 ( -1 x -55 = -2 2 -55 ) -OP ) ( y -55 ( -1 y -55 = -2 4 -55 ) -OP ) ) - -55 ) -4 - -65 - -1 a -55 = -2 1 -4 - -1 a -55 + -OP + + - -55 + -4 - -1 print -55 ( -1 None -55 ? -OP ? ? a -55 ? -1 a -55 ) -64 # 2 -4 - -3 '''a''' -4 - -55 / -OP / * - -55 * -4 - -1 very -1 bad -1 code -1 that -1 is -4 - -1 commented -1 out -1 for -1 a -1 very -4 - -1 good -1 reason -4 - -55 * -OP * / - -55 / -4 - -1 a -55 + -OP + + / -55 + -55 / -OP / * something -55 * -1 something -55 * -OP * / # something -55 / -64 # something -4 - -65 - -1 dtwice -55 = -1 compose -55 ( -1 d -55 , -1 d -55 ) -4 - -1 print -55 ( -1 dtwice -55 ( -2 2 -55 ) -OP ) ) -55 ) -4 -0 ----END DEBOUT--- diff --git a/examples/typechecker.it b/examples/typechecker.it new file mode 100644 index 0000000..a2ea2c3 --- /dev/null +++ b/examples/typechecker.it @@ -0,0 +1,4 @@ +import typecheck as tc +tc.checktype('hello', int | str) |> print +tc.checktype(0xdeadbeef, int | str) |> print +tc.checktype(['hello', 0xdeadbeef], int | str) |> print \ No newline at end of file diff --git a/ithon/__main__.py b/ithon/__main__.py index 3bcb3bc..628872d 100644 --- a/ithon/__main__.py +++ b/ithon/__main__.py @@ -43,7 +43,7 @@ class lazy_typegetter: -def translate(file: io.StringIO, debug: int = 0): +def translate(file: io.StringIO, debug: int = 0, bcp: bool = False): def infix(name: str): yield tokenize.OP, ">>" yield tokenize.NAME, name @@ -147,7 +147,7 @@ def translate(file: io.StringIO, debug: int = 0): yield type,name dprint(f'---END DEBOUT---', 4) -def transpile(input_path: Path, verbosity: int, minify: bool) -> None: +def transpile(input_path: Path, verbosity: int, minify: bool, bcp: bool) -> None: dir = Path('dist') if input_path.is_dir(): for i in input_path.glob('*'): @@ -176,17 +176,19 @@ app = typer.Typer() verbosity_arg = typing.Annotated[int, typer.Option('--verbosity', '-v')] minify_arg = typing.Annotated[bool, typer.Option('--minify', '-m')] +bcp_arg = typing.Annotated[bool, typer.Option('--build-custom-prefix', '-B')] + @app.command('t') @app.command('ts') @app.command('transpile') -def transpile_cmd(input_path: pathlib.Path, verbosity: verbosity_arg = 0, minify: minify_arg = False) -> None: - transpile(input_path, verbosity, minify) +def transpile_cmd(input_path: pathlib.Path, verbosity: verbosity_arg = 0, minify: minify_arg = False, bcp: bcp_arg = False) -> None: + transpile(input_path, verbosity, minify, bcp) @app.command('r') @app.command('run') -def run_cmd(input_path: pathlib.Path, verbosity: verbosity_arg = 0, minify: minify_arg = False) -> None: +def run_cmd(input_path: pathlib.Path, verbosity: verbosity_arg = 0, minify: minify_arg = False, bcp: bcp_arg = False) -> None: input_path = Path(input_path) - transpile(input_path, verbosity, minify) + transpile(input_path, verbosity, minify, bcp) if input_path.is_dir(): os.system(f'{sys.executable} -m dist') Path('dist').rmtree() diff --git a/ithon/prefix.py b/ithon/prefix.py index 80b9f86..84e5303 100644 --- a/ithon/prefix.py +++ b/ithon/prefix.py @@ -65,6 +65,7 @@ def _INTERNAL_add_fakeimport(name: str, code: str): # TODO: make this use sys.me _INTERNAL_add_fakeimport('sentinels', std'sentinels.py') _INTERNAL_add_fakeimport('ipathlib', std'ipathlib.py') +_INTERNAL_add_fakeimport('typecheck', std'typecheck.py') _INTERNAL_lazymerge = _INTERNAL_Token(lambda lhs, rhs: _INTERNAL_LazyIterable(lhs, rhs)) _INTERNAL_lpipe = _INTERNAL_Token(lambda lhs, rhs: rhs(lhs)) diff --git a/std/typecheck.py b/std/typecheck.py new file mode 100644 index 0000000..65bbff2 --- /dev/null +++ b/std/typecheck.py @@ -0,0 +1,714 @@ +from typing import ( + _GenericAlias, + _TypedDictMeta, + TYPE_CHECKING, + Any, + Callable, + Dict, + ForwardRef, + FrozenSet, + List, + Literal, + Mapping, + MutableMapping, + MutableSequence, + NamedTuple, + Never, + NewType, + NoReturn, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeAliasType, + TypeGuard, + TypeVar, + Union, + _eval_type as eval_type, + _type_repr as type_repr, + cast, + get_args, + get_origin, + get_type_hints, + overload +) +from collections.abc import Callable as CCallable, Mapping as CMapping, MutableMapping as CMutableMapping, MutableSequence as CMutableSequence, Sequence as CSequence +from inspect import Parameter +from types import UnionType, ModuleType, GenericAlias +import functools, math, inspect, sys, importlib, re, builtins + + + +_T = TypeVar("_T") +_F = TypeVar("_F") +_SimpleTypeVar = TypeVar("_SimpleTypeVar") +_SimpleTypeVarCo = TypeVar("_SimpleTypeVarCo", covariant=True) + + +_MISSING = object() + +_IMPORTABLE_TYPE_EXPRESSION_RE = re.compile(r"^((?:[a-zA-Z0-9_]+\.)+)(.*)$") +_UNIMPORTABLE_TYPE_EXPRESSION_RE = re.compile(r"^[a-zA-Z0-9_]+(\[.*\])?$") +_BUILTINS_MODULE: ModuleType = builtins +_EXTRA_ADVISE_IF_MOD_IS_BUILTINS = ( + " Try altering the type argument to be a string " + "reference (surrounded with quotes) instead, " + "if not already done." +) + +class UnresolvedForwardRefError(TypeError):... + +class UnresolvableTypeError(TypeError):... + +class ValidationError(TypeError):... + +class TypeNotSupportedError(TypeError):... + +class _TrycastOptions(NamedTuple): + strict: bool + eval: bool + funcname: str + +class _LazyStr(str): + def __init__(self, value_func: Callable[[], str], /) -> None: + self._value_func = value_func + self._value = None # type: Optional[str] + + def __str__(self) -> str: + if self._value is None: + self._value = self._value_func() + return self._value + +def _substitute(tp: object, substitutions: Dict[object, object]) -> object: + if isinstance(tp, GenericAlias): # ex: tuple[T1, T2] + return GenericAlias( # type: ignore[reportCallIssue] # pyright + tp.__origin__, tuple([_substitute(a, substitutions) for a in tp.__args__]) + ) + if isinstance(tp, TypeVar): # type: ignore[wrong-arg-types] # pytype + return substitutions.get(tp, tp) + return tp + +def _inspect_signature(value): + return inspect.signature( + value, + # Don't auto-unwrap decorated functions + follow_wrapped=False, + # Don't need annotation information + eval_str=False, + ) + +def _is_typed_dict(tp: object) -> bool: + return isinstance(tp, _TypedDictMeta) + + + +def _is_newtype(tp: object) -> bool: + return isinstance(tp, NewType) + +def _is_simple_typevar(T: object, covariant: bool = False) -> bool: + return ( + isinstance(T, TypeVar) # type: ignore[wrong-arg-types] # pytype + and T.__constraints__ == () # type: ignore[attribute-error] # pytype + and T.__covariant__ == covariant # type: ignore[attribute-error] # pytype + and T.__contravariant__ is False # type: ignore[attribute-error] # pytype + and T.__constraints__ == () # type: ignore[attribute-error] # pytype + ) + +def _checkcast_listlike( + tp: object, + value: object, + listlike_type: Type, + options: _TrycastOptions, + *, + covariant_t: bool = False, + t_ellipsis: bool = False, +) -> "Optional[ValidationError]": + if isinstance(value, listlike_type): + T_ = get_args(tp) + + if len(T_) == 0: # Python 3.9+ + (T,) = (_SimpleTypeVarCo if covariant_t else _SimpleTypeVar,) + + else: + if t_ellipsis: + if len(T_) == 2 and T_[1] is Ellipsis: + (T, _) = T_ + else: + return ValidationError(tp, value) + else: + (T,) = T_ + + if _is_simple_typevar(T, covariant=covariant_t): + pass + else: + for i, x in enumerate(value): # type: ignore[attribute-error] # pytype + e = _checkcast_inner(T, x, options) + if e is not None: + return ValidationError( + tp, + value, + _causes=[e._with_prefix(_LazyStr(lambda: f"At index {i}"))], + ) + + return None + else: + return ValidationError(tp, value) + + +def _checkcast_dictlike( + tp: object, + value: object, + dictlike_type: Type, + options: _TrycastOptions, + *, + covariant_v: bool = False, +) -> "Optional[ValidationError]": + if isinstance(value, dictlike_type): + K_V = get_args(tp) + + if len(K_V) == 0: # Python 3.9+ + (K, V) = ( + _SimpleTypeVar, + _SimpleTypeVarCo if covariant_v else _SimpleTypeVar, + ) + else: + (K, V) = K_V + + if _is_simple_typevar(K) and _is_simple_typevar(V, covariant=covariant_v): + pass + else: + for k, v in value.items(): # type: ignore[attribute-error] # pytype + e = _checkcast_inner(K, k, options) + if e is not None: + return ValidationError( + tp, + value, + _causes=[e._with_prefix(_LazyStr(lambda: f"Key {k!r}"))], + ) + e = _checkcast_inner(V, v, options) + if e is not None: + return ValidationError( + tp, + value, + _causes=[e._with_prefix(_LazyStr(lambda: f"At key {k!r}"))], + ) + return None + else: + return ValidationError(tp, value) + +def _type_check(arg: object, msg: str): + """Returns the argument if it appears to be a type. + Raises TypeError if the argument is a known non-type. + + As a special case, accepts None and returns type(None) instead. + Also wraps strings into ForwardRef instances. + """ + arg = _type_convert(arg, module=None) + # Recognize *common* non-types. (This check is not exhaustive.) + if isinstance(arg, (dict, list, int, tuple)): + raise TypeError(f"{msg} Got {arg!r:.100}.") + return arg + +# Python 3.10's typing._type_convert() +def _type_convert(arg, module=None): + """For converting None to type(None), and strings to ForwardRef.""" + if arg is None: + return type(None) + if isinstance(arg, str): + return ForwardRef(arg, module=module) + return arg + + +@overload +def checktype( + value: object, tp: str, /, *, eval: Literal[False] +) -> NoReturn: ... # pragma: no cover + + +@overload +def checktype( + value: object, tp: str, /, *, eval: bool = True +) -> bool: ... # pragma: no cover + + +@overload +def checktype(value: object, tp: Type[_T], /, *, eval: bool = True) -> TypeGuard[_T]: # type: ignore[invalid-annotation] # pytype + ... # pragma: no cover + + +@overload +def checktype( + value: object, tp: object, /, *, eval: bool = True +) -> bool: ... # pragma: no cover + + +def checktype(value, tp, /, *, eval=True): + """ + Returns whether `value` is in the shape of `tp` + (as accepted by a Python typechecker conforming to PEP 484 "Type Hints"). + + This method logically performs an operation similar to: + + return isinstance(value, tp) + + except that it supports many more types than `isinstance`, including: + * List[T] + * Dict[K, V] + * Optional[T] + * Union[T1, T2, ...] + * Literal[...] + * T extends TypedDict + + See trycast.trycast(..., strict=True) for information about parameters, + raised exceptions, and other details. + """ + e = _checkcast_outer( + tp, value, _TrycastOptions(strict=True, eval=eval, funcname="isassignable") + ) + result = e is None + if isinstance(tp, type): + return cast( # type: ignore[invalid-annotation] # pytype + TypeGuard[_T], # type: ignore[not-indexable] # pytype + result, + ) + else: + return result # type: ignore[bad-return-type] # pytype + +def _checkcast_outer( + tp: object, value: object, options: _TrycastOptions +) -> "Optional[ValidationError]": + if isinstance(tp, str): + if options.eval: # == options.eval (for pytype) + tp = eval_type_str(tp) # does use eval() + else: + raise UnresolvableTypeError( + f"Could not resolve type {tp!r}: " + f"Type appears to be a string reference " + f"and {options.funcname}() was called with eval=False, " + f"disabling eval of string type references." + ) + else: + try: + # TODO: Eliminate format operation done by f-string + # from the hot path of _checkcast_outer() + tp = _type_check( # type: ignore[16] # pyre + tp, + f"{options.funcname}() requires a type as its first argument.", + ) + except TypeError: + if isinstance(tp, tuple) and len(tp) >= 1 and isinstance(tp[0], type): + raise TypeError( + f"{options.funcname} does not support checking against a tuple of types. " + "Try checking against a Union[T1, T2, ...] instead." + ) + else: + raise + try: + return _checkcast_inner(tp, value, options) # type: ignore[bad-return-type] # pytype + except UnresolvedForwardRefError: + if options.eval: + advise = ( + "Try altering the first type argument to be a string " + "reference (surrounded with quotes) instead." + ) + else: + advise = ( + f"{options.funcname}() cannot resolve string type references " + "because it was called with eval=False." + ) + raise UnresolvedForwardRefError( + f"{options.funcname} does not support checking against type form {tp!r} " + "which contains a string-based forward reference. " + f"{advise}" + ) + +def _checkcast_inner( + tp: object, value: object, options: _TrycastOptions +) -> "Optional[ValidationError]": + """ + Raises: + * TypeNotSupportedError + * UnresolvedForwardRefError + """ + if tp is int: + # Also accept bools as valid int values + if isinstance(value, int): + return None + else: + return ValidationError(tp, value) + + if tp is float: + # Also accept ints and bools as valid float values + if isinstance(value, float) or isinstance(value, int): + return None + else: + return ValidationError(tp, value) + + if tp is complex: + # Also accept floats, ints, and bools as valid complex values + if ( + isinstance(value, complex) + or isinstance(value, float) + or isinstance(value, int) + ): + return None + else: + return ValidationError(tp, value) + + type_origin = get_origin(tp) + + if type_origin is list or type_origin is List: # List, List[T] + return _checkcast_listlike(tp, value, list, options) + + if type_origin is set or type_origin is Set: # Set, Set[T] + return _checkcast_listlike(tp, value, set, options) + + if type_origin is frozenset or type_origin is FrozenSet: # FrozenSet, FrozenSet[T] + return _checkcast_listlike(tp, value, frozenset, options, covariant_t=True) + + if type_origin is tuple or type_origin is Tuple: + if isinstance(value, tuple): + type_args = get_args(tp) + + if len(type_args) == 0 or ( + len(type_args) == 2 and type_args[1] is Ellipsis + ): # Tuple, Tuple[T, ...] + + return _checkcast_listlike( + tp, + value, + tuple, + options, + covariant_t=True, + t_ellipsis=True, + ) + else: # Tuple[Ts] + if len(value) != len(type_args): + return ValidationError(tp, value) + + for i, T, t in zip(range(len(type_args)), type_args, value): + e = _checkcast_inner(T, t, options) + if e is not None: + return ValidationError( + tp, + value, + _causes=[e._with_prefix(_LazyStr(lambda: f"At index {i}"))], + ) + + return None + else: + return ValidationError(tp, value) + + if type_origin is Sequence or type_origin is CSequence: # Sequence, Sequence[T] + return _checkcast_listlike(tp, value, CSequence, options, covariant_t=True) + + if ( + type_origin is MutableSequence or type_origin is CMutableSequence + ): # MutableSequence, MutableSequence[T] + return _checkcast_listlike(tp, value, CMutableSequence, options) + + if type_origin is dict or type_origin is Dict: # Dict, Dict[K, V] + return _checkcast_dictlike(tp, value, dict, options) + + if type_origin is Mapping or type_origin is CMapping: # Mapping, Mapping[K, V] + return _checkcast_dictlike(tp, value, CMapping, options, covariant_v=True) + + if ( + type_origin is MutableMapping or type_origin is CMutableMapping + ): # MutableMapping, MutableMapping[K, V] + return _checkcast_dictlike(tp, value, CMutableMapping, options) + + if ( + type_origin is Union or type_origin is UnionType + ): # Union[T1, T2, ...], Optional[T] + causes = [] + for T in get_args(tp): + e = _checkcast_inner(T, value, options) + if e is not None: + causes.append(e) + else: + return None + return ValidationError(tp, value, causes) + + if type_origin is Literal: # Literal[...] + for literal in get_args(tp): + if value == literal: + return None + return ValidationError(tp, value) + + if type_origin is CCallable: + callable_args = get_args(tp) + if callable_args == (): + # Callable + if callable(value): + return None + else: + return ValidationError(tp, value) + else: + assert len(callable_args) == 2 + (param_types, return_type) = callable_args + + if return_type is not Any: + # Callable[..., T] + raise TypeNotSupportedError( + f"{options.funcname} cannot reliably determine whether value is " + f"a {type_repr(tp)} because " + f"callables at runtime do not always have a " + f"declared return type. " + f"Consider using {options.funcname}(Callable, value) instead." + ) + + if param_types is Ellipsis: + # Callable[..., Any] + return _checkcast_inner(Callable, value, options) + + assert isinstance(param_types, list) + for param_type in param_types: + if param_type is not Any: + raise TypeNotSupportedError( + f"{options.funcname} cannot reliably determine whether value is " + f"a {type_repr(tp)} because " + f"callables at runtime do not always have " + f"declared parameter types. " + f"Consider using {options.funcname}(" + f"Callable[{','.join('Any' * len(param_types))}, Any], value) " + f"instead." + ) + + # Callable[[Any * N], Any] + if callable(value): + try: + sig = _inspect_signature(value) + except TypeError: + # Not a callable + return ValidationError(tp, value) + except ValueError as f: + # Unable to introspect signature for value. + # It might be a built-in function that lacks signature support. + # Assume conservatively that value does NOT match the requested type. + e = ValidationError(tp, value) + e.__cause__ = f + return e + else: + sig_min_param_count = 0 # type: float + sig_max_param_count = 0 # type: float + for expected_param in sig.parameters.values(): + if ( + expected_param.kind == Parameter.POSITIONAL_ONLY + or expected_param.kind == Parameter.POSITIONAL_OR_KEYWORD + ): + if expected_param.default is Parameter.empty: + sig_min_param_count += 1 + sig_max_param_count += 1 + elif expected_param.kind == Parameter.VAR_POSITIONAL: + sig_max_param_count = math.inf + + if sig_min_param_count <= len(param_types) <= sig_max_param_count: + return None + else: + return ValidationError(tp, value) + else: + return ValidationError(tp, value) + + if isinstance(type_origin, TypeAliasType): # type: ignore[16] # pyre + if len(type_origin.__type_params__) > 0: + substitutions = dict( + zip( + type_origin.__type_params__, + get_args(tp) + ((Any,) * len(type_origin.__type_params__)), + ) + ) # type: Dict[object, object] + new_tp = _substitute(tp.__value__, substitutions) # type: ignore[attr-defined] # mypy + else: + new_tp = tp.__value__ # type: ignore[attr-defined] # mypy + return _checkcast_inner(new_tp, value, options) # type: ignore[16] # pyre + + if isinstance(tp, _GenericAlias): # type: ignore[16] # pyre + raise TypeNotSupportedError( + f"{options.funcname} does not know how to recognize generic type " + f"{type_repr(type_origin)}." + ) + + if _is_typed_dict(tp): # T extends TypedDict + if isinstance(value, Mapping): + if options.eval: + resolved_annotations = get_type_hints( # does use eval() + tp # type: ignore[arg-type] # mypy + ) # resolve ForwardRefs in tp.__annotations__ + else: + resolved_annotations = tp.__annotations__ # type: ignore[attribute-error] # pytype + + try: + # {typing in Python 3.9+, typing_extensions}.TypedDict + required_keys = tp.__required_keys__ # type: ignore[attr-defined, attribute-error] # mypy, pytype + except AttributeError: + # {typing in Python 3.8, mypy_extensions}.TypedDict + if options.strict: + if sys.version_info[:2] >= (3, 9): + advise = "Suggest use a typing.TypedDict instead." + else: + advise = "Suggest use a typing_extensions.TypedDict instead." + advise2 = f"Or use {options.funcname}(..., strict=False)." + raise TypeNotSupportedError( + f"{options.funcname} cannot determine which keys are required " + f"and which are potentially-missing for the " + f"specified kind of TypedDict. {advise} {advise2}" + ) + else: + if tp.__total__: # type: ignore[attr-defined, attribute-error] # mypy, pytype + required_keys = resolved_annotations.keys() + else: + required_keys = frozenset() + + for k, v in value.items(): # type: ignore[attribute-error] # pytype + V = resolved_annotations.get(k, _MISSING) + if V is not _MISSING: + e = _checkcast_inner(V, v, options) + if e is not None: + return ValidationError( + tp, + value, + _causes=[e._with_prefix(_LazyStr(lambda: f"At key {k!r}"))], + ) + + for k in required_keys: + if k not in value: # type: ignore[unsupported-operands] # pytype + return ValidationError( + tp, + value, + _causes=[ + ValidationError._from_message( + _LazyStr(lambda: f"Required key {k!r} is missing") + ) + ], + ) + return None + else: + return ValidationError(tp, value) + + if _is_newtype(tp): + if options.strict: + supertype_repr = type_repr(tp.__supertype__) # type: ignore[attr-defined, attribute-error] # mypy, pytype + tp_name_repr = repr(tp.__name__) # type: ignore[attr-defined] # mypy + raise TypeNotSupportedError( + f"{options.funcname} cannot reliably determine whether value is " + f"a NewType({tp_name_repr}, {supertype_repr}) because " + f"NewType wrappers are erased at runtime " + f"and are indistinguishable from their supertype. " + f"Consider using {options.funcname}(..., strict=False) to treat " + f"NewType({tp_name_repr}, {supertype_repr}) " + f"like {supertype_repr}." + ) + else: + supertype = tp.__supertype__ # type: ignore[attr-defined, attribute-error] # mypy, pytype + return _checkcast_inner(supertype, value, options) + + if isinstance(tp, TypeVar): # type: ignore[wrong-arg-types] # pytype + raise TypeNotSupportedError( + f"{options.funcname} cannot reliably determine whether value matches a TypeVar." + ) + + if tp is Any: + return None + + if tp is Never or tp is NoReturn: + return ValidationError(tp, value) + + if isinstance(tp, TypeAliasType): # type: ignore[16] # pyre + if len(tp.__type_params__) > 0: # type: ignore[16] # pyre + substitutions = dict( + zip(tp.__type_params__, ((Any,) * len(tp.__type_params__))) + ) + new_tp = _substitute(tp.__value__, substitutions) + else: + new_tp = tp.__value__ + return _checkcast_inner(new_tp, value, options) # type: ignore[16] # pyre + + if isinstance(tp, ForwardRef): + raise UnresolvedForwardRefError() + + if isinstance(value, tp): # type: ignore[arg-type, wrong-arg-types] # mypy, pytype + return None + else: + return ValidationError(tp, value) + +@functools.lru_cache() +def eval_type_str(tp: str, /) -> object: + """ + Resolves a string-reference to a type that can be imported, + such as `'typing.List'`. + + This function does internally cache lookups that have been made in + the past to improve performance. If you need to clear this cache + you can call: + + eval_type_str.cache_clear() + + Note that this function's implementation uses eval() internally. + + Raises: + * UnresolvableTypeError -- + If the specified string-reference could not be resolved to a type. + """ + if not isinstance(tp, str): # pragma: no cover + raise ValueError() + + # Determine which module to lookup the type from + mod: ModuleType + module_name: str + member_expr: str + m = _IMPORTABLE_TYPE_EXPRESSION_RE.fullmatch(tp) + if m is not None: + (module_name_dot, member_expr) = m.groups() + module_name = module_name_dot[:-1] + try: + mod = importlib.import_module(module_name) + except Exception: + raise UnresolvableTypeError( + f"Could not resolve type {tp!r}: " f"Could not import {module_name!r}." + ) + else: + m = _UNIMPORTABLE_TYPE_EXPRESSION_RE.fullmatch(tp) + if m is not None: + mod = _BUILTINS_MODULE + module_name = _BUILTINS_MODULE.__name__ + member_expr = tp + else: + raise UnresolvableTypeError( + f"Could not resolve type {tp!r}: " + f"{tp!r} does not appear to be a valid type." + ) + + # Lookup the type from a module + try: + member = eval(member_expr, mod.__dict__, None) + except Exception: + raise UnresolvableTypeError( + f"Could not resolve type {tp!r}: " + f"Could not eval {member_expr!r} inside module {module_name!r}." + f"{_EXTRA_ADVISE_IF_MOD_IS_BUILTINS if mod is _BUILTINS_MODULE else ''}" + ) + + # Interpret an imported str as a TypeAlias + if isinstance(member, str): + member = ForwardRef(member, is_argument=False) + + # Resolve any ForwardRef instances inside the type + try: + member = eval_type(member, mod.__dict__, None) # type: ignore[16] # pyre + except Exception: + raise UnresolvableTypeError( + f"Could not resolve type {tp!r}: " + f"Could not eval type {member!r} inside module {module_name!r}." + f"{_EXTRA_ADVISE_IF_MOD_IS_BUILTINS if mod is _BUILTINS_MODULE else ''}" + ) + + # 1. Ensure the object is actually a type + # 2. As a special case, interpret None as type(None) + try: + member = _type_check(member, f"Could not resolve type {tp!r}: ") # type: ignore[16] # pyre + except TypeError as e: + raise UnresolvableTypeError(str(e)) + return member + diff --git a/test_.py b/test_.py new file mode 100644 index 0000000..1db12f7 --- /dev/null +++ b/test_.py @@ -0,0 +1,9 @@ +import std.typecheck as tc +import typing +OptionalString = str | None +print(tc.checktype("wow", OptionalString)) +print(tc.checktype(None, OptionalString)) +print(tc.checktype(0xDEADBEEF, OptionalString)) +@tc.check_args +def a(a:int,b:str,c:typing.Any):... +a(1,"",1223) \ No newline at end of file