Refactoring, Stdlib, everything!
This commit is contained in:
parent
90cc4ff466
commit
b9801cad59
9 changed files with 1901 additions and 85 deletions
2
.directory
Normal file
2
.directory
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
[Desktop Entry]
|
||||||
|
Icon=pythonbackend
|
|
@ -1,53 +1,11 @@
|
||||||
import tokenize, io
|
import tokenize, io, typer, typing, os
|
||||||
|
from pathlib import Path
|
||||||
PREFIX = """
|
def patch_std(prefix: str):
|
||||||
import typing as _INTERNAL_typing
|
for i in (Path(__file__).parent.parent / 'std').iterdir():
|
||||||
# ITHON START
|
prefix = prefix.replace(f"std'{i.name}'", "'''" + i.read_text().replace("'''", "\\'''") + "'''")
|
||||||
class _INTERNAL_Token:
|
return prefix
|
||||||
__slots__ = ('action',)
|
|
||||||
|
|
||||||
def __init__(self, action):
|
|
||||||
self.action = action
|
|
||||||
|
|
||||||
def __rrshift__(self, lhs):
|
|
||||||
return _INTERNAL_Operation(self.action, lhs)
|
|
||||||
|
|
||||||
class _INTERNAL_Operation:
|
|
||||||
__slots__ = ('action', 'lhs')
|
|
||||||
|
|
||||||
def __init__(self, action, lhs):
|
|
||||||
self.action = action
|
|
||||||
self.lhs = lhs
|
|
||||||
|
|
||||||
def __lshift__(self, rhs):
|
|
||||||
return self.action(self.lhs, rhs)
|
|
||||||
|
|
||||||
class _INTERNAL_LazyIterable:
|
|
||||||
__slots__ = ('x','y')
|
|
||||||
def __init__(self, x, y) -> None:
|
|
||||||
self.x = iter(x)
|
|
||||||
self.y = iter(y)
|
|
||||||
def __iter__(self):
|
|
||||||
yield from self.x
|
|
||||||
yield from self.y
|
|
||||||
|
|
||||||
_INTERNAL_lazymerge = _INTERNAL_Token(lambda lhs, rhs: _INTERNAL_LazyIterable(lhs, rhs))
|
|
||||||
|
|
||||||
_INTERNAL_lpipe = _INTERNAL_Token(lambda lhs, rhs: rhs(lhs))
|
|
||||||
_INTERNAL_rpipe = _INTERNAL_Token(lambda lhs, rhs: lhs(rhs))
|
|
||||||
_INTERNAL_lspipe = _INTERNAL_Token(lambda lhs, rhs: rhs(*lhs))
|
|
||||||
_INTERNAL_rspipe = _INTERNAL_Token(lambda lhs, rhs: lhs(*rhs))
|
|
||||||
|
|
||||||
# If you write in other programming languages, this is useful.
|
|
||||||
null = None
|
|
||||||
nil = None
|
|
||||||
void = None
|
|
||||||
|
|
||||||
type Maybe[T] = T | None
|
|
||||||
|
|
||||||
# ITHON END
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
PREFIX = patch_std((Path(__file__).parent / 'prefix.py').read_text())
|
||||||
|
|
||||||
class lazy_typegetter:
|
class lazy_typegetter:
|
||||||
class lazy_typegetter_iter:
|
class lazy_typegetter_iter:
|
||||||
|
@ -71,37 +29,40 @@ class lazy_typegetter:
|
||||||
return self.lazy_typegetter_iter(self)
|
return self.lazy_typegetter_iter(self)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def translate(file: io.StringIO):
|
def translate(file: io.StringIO):
|
||||||
patched_file = io.StringIO(PREFIX + file.read())
|
def infix(name: str):
|
||||||
|
yield tokenize.OP, ">>"
|
||||||
|
yield tokenize.NAME, name
|
||||||
|
yield tokenize.OP, "<<"
|
||||||
|
filedata = PREFIX + file.read()
|
||||||
|
print(filedata)
|
||||||
|
patched_file = io.StringIO(filedata)
|
||||||
|
|
||||||
skip_token = 0
|
skip_token = 0
|
||||||
tokens = lazy_typegetter(list(tokenize.generate_tokens(patched_file.readline))) # Precalculate tokens
|
tokens = lazy_typegetter(list(tokenize.generate_tokens(patched_file.readline))) # Precalculate tokens
|
||||||
|
|
||||||
for n, i in enumerate(tokens):
|
for n, i in enumerate(tokens):
|
||||||
type, name = i
|
type, name = i
|
||||||
|
print(type, name)
|
||||||
try:
|
try:
|
||||||
next_type, next_name = tokens[n + 1]
|
next_type, next_name = tokens[n + 1]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
next_type, next_name = (None, None)
|
next_type, next_name = (None, None)
|
||||||
print(type, name)
|
|
||||||
if skip_token:
|
if skip_token:
|
||||||
skip_token -= 1
|
skip_token -= 1
|
||||||
continue
|
continue
|
||||||
if type == tokenize.OP and next_type == tokenize.OP:
|
if type == tokenize.OP and next_type == tokenize.OP:
|
||||||
# Most likely special operation
|
# Most likely special operation
|
||||||
if name == "|" and next_name == ">": # left pipe
|
if name == "|" and next_name == ">": # left pipe
|
||||||
yield tokenize.OP, ">>"
|
yield from infix("_INTERNAL_lpipe")
|
||||||
yield tokenize.NAME, "_INTERNAL_lpipe"
|
|
||||||
yield tokenize.OP, "<<"
|
|
||||||
skip_token+=1
|
skip_token+=1
|
||||||
elif name == "<" and next_name == "|": # right pipe
|
elif name == "<" and next_name == "|": # right pipe
|
||||||
yield tokenize.OP, ">>"
|
yield from infix("_INTERNAL_rpipe")
|
||||||
yield tokenize.NAME, "_INTERNAL_rpipe"
|
|
||||||
yield tokenize.OP, "<<"
|
|
||||||
skip_token+=1
|
skip_token+=1
|
||||||
elif name == ":" and next_name == ":": # lazy merge
|
elif name == ":" and next_name == ":": # lazy merge
|
||||||
yield tokenize.OP, ">>"
|
yield from infix("_INTERNAL_lazymerge")
|
||||||
yield tokenize.NAME, "_INTERNAL_lazymerge"
|
|
||||||
yield tokenize.OP, "<<"
|
|
||||||
skip_token+=1
|
skip_token+=1
|
||||||
elif name == "+" and next_name == "+": # increment
|
elif name == "+" and next_name == "+": # increment
|
||||||
yield tokenize.OP, "+="
|
yield tokenize.OP, "+="
|
||||||
|
@ -111,24 +72,28 @@ def translate(file: io.StringIO):
|
||||||
yield tokenize.OP, "-="
|
yield tokenize.OP, "-="
|
||||||
yield tokenize.NUMBER, "1"
|
yield tokenize.NUMBER, "1"
|
||||||
skip_token+=1
|
skip_token+=1
|
||||||
elif name == "-" and next_name == ">": # ->, todo
|
|
||||||
skip_token+=1
|
|
||||||
elif name == ")" and next_name == "=": # short functions
|
elif name == ")" and next_name == "=": # short functions
|
||||||
yield type, name
|
yield type, name
|
||||||
yield tokenize.OP, ":"
|
yield tokenize.OP, ":"
|
||||||
yield tokenize.NAME, "return"
|
yield tokenize.NAME, "return"
|
||||||
|
|
||||||
skip_token += 1
|
skip_token += 1
|
||||||
|
elif name == "?" and next_name == "?":
|
||||||
|
yield from infix("_INTERNAL_nonereplace")
|
||||||
|
skip_token+=1
|
||||||
elif name == "|" and next_name == "*" and tokens[n + 2][1] == ">": #|*>
|
elif name == "|" and next_name == "*" and tokens[n + 2][1] == ">": #|*>
|
||||||
yield tokenize.OP, ">>"
|
yield from infix("_INTERNAL_lspipe")
|
||||||
yield tokenize.NAME, "_INTERNAL_lspipe"
|
|
||||||
yield tokenize.OP, "<<"
|
|
||||||
skip_token+=2
|
skip_token+=2
|
||||||
elif name == "<" and next_name == "*" and tokens[n + 2][1] == "|": #<*|
|
elif name == "<" and next_name == "*" and tokens[n + 2][1] == "|": #<*|
|
||||||
yield tokenize.OP, ">>"
|
yield from infix("_INTERNAL_rspipe")
|
||||||
yield tokenize.NAME, "_INTERNAL_rspipe"
|
|
||||||
yield tokenize.OP, "<<"
|
|
||||||
skip_token+=2
|
skip_token+=2
|
||||||
|
elif name == '/' and next_name == "*":
|
||||||
|
yield tokenize.NEWLINE, '\n'
|
||||||
|
yield tokenize.OP, "'''"
|
||||||
|
skip_token+=1
|
||||||
|
elif name == '*' and next_name == "/":
|
||||||
|
yield tokenize.OP, "'''"
|
||||||
|
skip_token+=1
|
||||||
else:
|
else:
|
||||||
yield type,name
|
yield type,name
|
||||||
|
|
||||||
|
@ -140,6 +105,12 @@ def translate(file: io.StringIO):
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
yield type, name
|
yield type, name
|
||||||
|
#elif type == tokenize.NAME and next_name == ':':
|
||||||
|
# # Custom control flow statement
|
||||||
|
# if name == 'local':
|
||||||
|
# yield
|
||||||
|
# else:
|
||||||
|
# yield type, name
|
||||||
#elif type == tokenize.NAME:
|
#elif type == tokenize.NAME:
|
||||||
# if name == "λ":
|
# if name == "λ":
|
||||||
# yield tokenize.NAME, 'lambda'
|
# yield tokenize.NAME, 'lambda'
|
||||||
|
@ -148,21 +119,12 @@ def translate(file: io.StringIO):
|
||||||
else:
|
else:
|
||||||
yield type,name
|
yield type,name
|
||||||
|
|
||||||
script = """
|
|
||||||
|
|
||||||
def fibonacci(x: int) -> list[int]:
|
app = typer.Typer()
|
||||||
start = [0,1]
|
@app.command('t')
|
||||||
for i in range(1, x):
|
@app.command('ts')
|
||||||
start.append <| start[i] + start[i - 1]
|
@app.command('transpile')
|
||||||
return start
|
def transpile(input_path: str, debug: int = 0):
|
||||||
a = 12 |> fibonacci
|
with Path(input_path).open() as f:
|
||||||
b = a :: a :: a :: a
|
Path(Path(input_path).stem + '.py').write_text(tokenize.untokenize(translate(f)))
|
||||||
c = b :: b :: b :: b
|
app()
|
||||||
print <| [i for i in c]
|
|
||||||
print <*| ('a', 'b', 'c')
|
|
||||||
d = lambda x: x * 2
|
|
||||||
#d2 = λ x: x * 2
|
|
||||||
"""
|
|
||||||
code = tokenize.untokenize(translate(io.StringIO(script)))
|
|
||||||
print(code)
|
|
||||||
exec(code)
|
|
BIN
ithon/__pycache__/__main__.cpython-312.pyc
Normal file
BIN
ithon/__pycache__/__main__.cpython-312.pyc
Normal file
Binary file not shown.
75
ithon/prefix.py
Normal file
75
ithon/prefix.py
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
# ITHON START
|
||||||
|
import typing as _INTERNAL_typing
|
||||||
|
import types as _INTERNAL_types
|
||||||
|
import inspect as _INTERNAL_inspect
|
||||||
|
import sys as _INTERNAL_sys
|
||||||
|
class _INTERNAL_Token:
|
||||||
|
__slots__ = ('action',)
|
||||||
|
|
||||||
|
def __init__(self, action):
|
||||||
|
self.action = action
|
||||||
|
|
||||||
|
def __rrshift__(self, lhs):
|
||||||
|
return _INTERNAL_Operation(self.action, lhs)
|
||||||
|
|
||||||
|
class _INTERNAL_Operation:
|
||||||
|
__slots__ = ('action', 'lhs')
|
||||||
|
|
||||||
|
def __init__(self, action, lhs):
|
||||||
|
self.action = action
|
||||||
|
self.lhs = lhs
|
||||||
|
|
||||||
|
def __lshift__(self, rhs):
|
||||||
|
return self.action(self.lhs, rhs)
|
||||||
|
|
||||||
|
class _INTERNAL_LazyIterable:
|
||||||
|
__slots__ = ('x','y')
|
||||||
|
def __init__(self, x, y) -> None:
|
||||||
|
self.x = iter(x)
|
||||||
|
self.y = iter(y)
|
||||||
|
def __iter__(self):
|
||||||
|
yield from self.x
|
||||||
|
yield from self.y
|
||||||
|
|
||||||
|
def curry(f):
|
||||||
|
def wrapper(*args, **kwds) -> _INTERNAL_typing.Any | _INTERNAL_typing.Callable:
|
||||||
|
signature = _INTERNAL_inspect.signature(f)
|
||||||
|
ba = signature.bind_partial(*args, **kwds)
|
||||||
|
if len(ba.arguments) == len(signature.parameters):
|
||||||
|
return f(*args, **kwds)
|
||||||
|
else:
|
||||||
|
def _partial(*other_args, **other_kwds):
|
||||||
|
combined_args = args + other_args
|
||||||
|
combined_kwargs = dict(**kwds, **other_kwds)
|
||||||
|
return curry(f)(*combined_args, **combined_kwargs)
|
||||||
|
return _partial
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
def _INTERNAL_add_fakeimport(name: str, code: str): # TODO: make this use sys.meta_path
|
||||||
|
module = _INTERNAL_types.ModuleType(name)
|
||||||
|
parent = '.'.join(name.split('.')[:-1]) if '.'.join(name.split('.')[:-1]) else name
|
||||||
|
if not parent == name and parent not in _INTERNAL_sys.modules:
|
||||||
|
_INTERNAL_sys.modules[parent] = _INTERNAL_types.ModuleType(parent)
|
||||||
|
globals = {'__package__': parent}
|
||||||
|
module.__dict__.update(globals)
|
||||||
|
exec(code, module.__dict__)
|
||||||
|
_INTERNAL_sys.modules[name] = module
|
||||||
|
|
||||||
|
_INTERNAL_add_fakeimport('sentinels', std'sentinels.py')
|
||||||
|
_INTERNAL_lazymerge = _INTERNAL_Token(lambda lhs, rhs: _INTERNAL_LazyIterable(lhs, rhs))
|
||||||
|
|
||||||
|
_INTERNAL_lpipe = _INTERNAL_Token(lambda lhs, rhs: rhs(lhs))
|
||||||
|
_INTERNAL_rpipe = _INTERNAL_Token(lambda lhs, rhs: lhs(rhs))
|
||||||
|
_INTERNAL_lspipe = _INTERNAL_Token(lambda lhs, rhs: rhs(*lhs))
|
||||||
|
_INTERNAL_rspipe = _INTERNAL_Token(lambda lhs, rhs: lhs(*rhs))
|
||||||
|
|
||||||
|
_INTERNAL_nonereplace = _INTERNAL_Token(lambda lhs, rhs: lhs if lhs != None else rhs)
|
||||||
|
|
||||||
|
_INTERNAL_lto = _INTERNAL_Token(lambda lhs, rhs: lhs(*rhs))
|
||||||
|
|
||||||
|
# If you write in other programming languages, this is very, very useful.
|
||||||
|
null = None
|
||||||
|
nil = None
|
||||||
|
void = None
|
||||||
|
|
||||||
|
# ITHON END
|
121
std/sentinels.py
Normal file
121
std/sentinels.py
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
import sys as _sys
|
||||||
|
from threading import Lock as _Lock
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['Sentinel']
|
||||||
|
|
||||||
|
|
||||||
|
# Design and implementation decisions:
|
||||||
|
#
|
||||||
|
# The first implementations created a dedicated class for each instance.
|
||||||
|
# However, once it was decided to use Sentinel for type signatures, there
|
||||||
|
# was no longer a need for a dedicated class for each sentinel value on order
|
||||||
|
# to enable strict type signatures. Since class objects consume a relatively
|
||||||
|
# large amount of memory, the implementation was changed to avoid this.
|
||||||
|
#
|
||||||
|
# With this change, the mechanism used for unpickling/copying objects needed
|
||||||
|
# to be changed too, since we could no longer count on each dedicated class
|
||||||
|
# simply returning its singleton instance as before. __reduce__ can return
|
||||||
|
# a string, upon which an attribute with that name is looked up in the module
|
||||||
|
# and returned. However, that would have meant that pickling/copying support
|
||||||
|
# would depend on the "name" argument being exactly the name of the variable
|
||||||
|
# used in the module, and simply wouldn't work for sentinels created in
|
||||||
|
# functions/methods. Instead, a registry for sentinels was added, where all
|
||||||
|
# sentinel objects are stored keyed by their name + module name. This is used
|
||||||
|
# to look up existing sentinels both during normal object creation and during
|
||||||
|
# copying/unpickling.
|
||||||
|
|
||||||
|
|
||||||
|
class Sentinel:
|
||||||
|
"""Create a unique sentinel object.
|
||||||
|
|
||||||
|
*name* should be the fully-qualified name of the variable to which the
|
||||||
|
return value shall be assigned.
|
||||||
|
|
||||||
|
*repr*, if supplied, will be used for the repr of the sentinel object.
|
||||||
|
If not provided, "<name>" will be used (with any leading class names
|
||||||
|
removed).
|
||||||
|
|
||||||
|
*module_name*, if supplied, will be used instead of inspecting the call
|
||||||
|
stack to find the name of the module from which
|
||||||
|
"""
|
||||||
|
_name: str
|
||||||
|
_repr: str
|
||||||
|
_module_name: str
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls,
|
||||||
|
name: str,
|
||||||
|
repr: str | None = None,
|
||||||
|
module_name: str | None = None,
|
||||||
|
):
|
||||||
|
name = str(name)
|
||||||
|
repr = str(repr) if repr else f'<{name.split(".")[-1]}>'
|
||||||
|
if not module_name:
|
||||||
|
parent_frame = _get_parent_frame()
|
||||||
|
module_name = (
|
||||||
|
parent_frame.f_globals.get('__name__', '__main__')
|
||||||
|
if parent_frame is not None
|
||||||
|
else __name__
|
||||||
|
)
|
||||||
|
|
||||||
|
# Include the class's module and fully qualified name in the
|
||||||
|
# registry key to support sub-classing.
|
||||||
|
registry_key = _sys.intern(
|
||||||
|
f'{cls.__module__}-{cls.__qualname__}-{module_name}-{name}'
|
||||||
|
)
|
||||||
|
sentinel = _registry.get(registry_key, None)
|
||||||
|
if sentinel is not None:
|
||||||
|
return sentinel
|
||||||
|
sentinel = super().__new__(cls)
|
||||||
|
sentinel._name = name
|
||||||
|
sentinel._repr = repr
|
||||||
|
sentinel._module_name = module_name
|
||||||
|
with _lock:
|
||||||
|
return _registry.setdefault(registry_key, sentinel)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self._repr
|
||||||
|
|
||||||
|
def __reduce__(self):
|
||||||
|
return (
|
||||||
|
self.__class__,
|
||||||
|
(
|
||||||
|
self._name,
|
||||||
|
self._repr,
|
||||||
|
self._module_name,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_lock = _Lock()
|
||||||
|
_registry: dict[str, Sentinel] = {}
|
||||||
|
|
||||||
|
|
||||||
|
# The following implementation attempts to support Python
|
||||||
|
# implementations which don't support sys._getframe(2), such as
|
||||||
|
# Jython and IronPython.
|
||||||
|
#
|
||||||
|
# For reference, see the implementation of namedtuple:
|
||||||
|
# https://github.com/python/cpython/blob/67444902a0f10419a557d0a2d3b8675c31b075a9/Lib/collections/__init__.py#L503
|
||||||
|
def _get_parent_frame():
|
||||||
|
"""Return the frame object for the caller's parent stack frame."""
|
||||||
|
try:
|
||||||
|
# Two frames up = the parent of the function which called this.
|
||||||
|
return _sys._getframe(2)
|
||||||
|
except (AttributeError, ValueError):
|
||||||
|
global _get_parent_frame
|
||||||
|
def _get_parent_frame():
|
||||||
|
"""Return the frame object for the caller's parent stack frame."""
|
||||||
|
try:
|
||||||
|
raise Exception
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
return _sys.exc_info()[2].tb_frame.f_back.f_back
|
||||||
|
except Exception:
|
||||||
|
global _get_parent_frame
|
||||||
|
def _get_parent_frame():
|
||||||
|
"""Return the frame object for the caller's parent stack frame."""
|
||||||
|
return None
|
||||||
|
return _get_parent_frame()
|
||||||
|
return _get_parent_frame()
|
34
test.it
Normal file
34
test.it
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
|
||||||
|
def fibonacci(x: int) -> list[int]:
|
||||||
|
start = [0,1]
|
||||||
|
for i in range(1, x):
|
||||||
|
start.append <| start[i] + start[i - 1]
|
||||||
|
return start
|
||||||
|
a = 12 |> fibonacci
|
||||||
|
b = a :: a :: a :: a
|
||||||
|
c = b :: b :: b :: b
|
||||||
|
print <| [i for i in c]
|
||||||
|
print <*| ('a', 'b', 'c')
|
||||||
|
d = lambda x: x * 2
|
||||||
|
#d2 = λ x: x * 2
|
||||||
|
|
||||||
|
d3d = curry <| (lambda x, y: x**2 + y**2)
|
||||||
|
print(d3d(2,4))
|
||||||
|
print(d3d(2)(4))
|
||||||
|
print(d3d(x=2)(y=4))
|
||||||
|
@curry
|
||||||
|
def d3d2(x,y) = x**2 + y**2
|
||||||
|
print(d3d2(2,4))
|
||||||
|
print(d3d2(2)(4))
|
||||||
|
print(d3d2(x=2)(y=4))
|
||||||
|
|
||||||
|
a = 1
|
||||||
|
a++
|
||||||
|
a |> print ?? 11 |> print
|
||||||
|
'''a'''
|
||||||
|
/*
|
||||||
|
very bad code that is
|
||||||
|
commented out for a very
|
||||||
|
good reason
|
||||||
|
*/
|
||||||
|
a++ /* something */ # something
|
233
test.py
Normal file
233
test.py
Normal file
|
@ -0,0 +1,233 @@
|
||||||
|
# ITHON START
|
||||||
|
import typing as _INTERNAL_typing
|
||||||
|
import types as _INTERNAL_types
|
||||||
|
import inspect as _INTERNAL_inspect
|
||||||
|
import sys as _INTERNAL_sys
|
||||||
|
class _INTERNAL_Token :
|
||||||
|
__slots__ =('action',)
|
||||||
|
|
||||||
|
def __init__ (self ,action ):
|
||||||
|
self .action =action
|
||||||
|
|
||||||
|
def __rrshift__ (self ,lhs ):
|
||||||
|
return _INTERNAL_Operation (self .action ,lhs )
|
||||||
|
|
||||||
|
class _INTERNAL_Operation :
|
||||||
|
__slots__ =('action','lhs')
|
||||||
|
|
||||||
|
def __init__ (self ,action ,lhs ):
|
||||||
|
self .action =action
|
||||||
|
self .lhs =lhs
|
||||||
|
|
||||||
|
def __lshift__ (self ,rhs ):
|
||||||
|
return self .action (self .lhs ,rhs )
|
||||||
|
|
||||||
|
class _INTERNAL_LazyIterable :
|
||||||
|
__slots__ =('x','y')
|
||||||
|
def __init__ (self ,x ,y )->None :
|
||||||
|
self .x =iter (x )
|
||||||
|
self .y =iter (y )
|
||||||
|
def __iter__ (self ):
|
||||||
|
yield from self .x
|
||||||
|
yield from self .y
|
||||||
|
|
||||||
|
def curry (f ):
|
||||||
|
def wrapper (*args ,**kwds )->_INTERNAL_typing .Any |_INTERNAL_typing .Callable :
|
||||||
|
signature =_INTERNAL_inspect .signature (f )
|
||||||
|
ba =signature .bind_partial (*args ,**kwds )
|
||||||
|
if len (ba .arguments )==len (signature .parameters ):
|
||||||
|
return f (*args ,**kwds )
|
||||||
|
else :
|
||||||
|
def _partial (*other_args ,**other_kwds ):
|
||||||
|
combined_args =args +other_args
|
||||||
|
combined_kwargs =dict (**kwds ,**other_kwds )
|
||||||
|
return curry (f )(*combined_args ,**combined_kwargs )
|
||||||
|
return _partial
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
def _INTERNAL_add_fakeimport (name :str ,code :str ):# TODO: make this use sys.meta_path
|
||||||
|
module =_INTERNAL_types .ModuleType (name )
|
||||||
|
parent ='.'.join (name .split ('.')[:-1 ])if '.'.join (name .split ('.')[:-1 ])else name
|
||||||
|
if not parent ==name and parent not in _INTERNAL_sys .modules :
|
||||||
|
_INTERNAL_sys .modules [parent ]=_INTERNAL_types .ModuleType (parent )
|
||||||
|
globals ={'__package__':parent }
|
||||||
|
module .__dict__ .update (globals )
|
||||||
|
exec (code ,module .__dict__ )
|
||||||
|
_INTERNAL_sys .modules [name ]=module
|
||||||
|
|
||||||
|
_INTERNAL_add_fakeimport ('sentinels','''import sys as _sys
|
||||||
|
from threading import Lock as _Lock
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['Sentinel']
|
||||||
|
|
||||||
|
|
||||||
|
# Design and implementation decisions:
|
||||||
|
#
|
||||||
|
# The first implementations created a dedicated class for each instance.
|
||||||
|
# However, once it was decided to use Sentinel for type signatures, there
|
||||||
|
# was no longer a need for a dedicated class for each sentinel value on order
|
||||||
|
# to enable strict type signatures. Since class objects consume a relatively
|
||||||
|
# large amount of memory, the implementation was changed to avoid this.
|
||||||
|
#
|
||||||
|
# With this change, the mechanism used for unpickling/copying objects needed
|
||||||
|
# to be changed too, since we could no longer count on each dedicated class
|
||||||
|
# simply returning its singleton instance as before. __reduce__ can return
|
||||||
|
# a string, upon which an attribute with that name is looked up in the module
|
||||||
|
# and returned. However, that would have meant that pickling/copying support
|
||||||
|
# would depend on the "name" argument being exactly the name of the variable
|
||||||
|
# used in the module, and simply wouldn't work for sentinels created in
|
||||||
|
# functions/methods. Instead, a registry for sentinels was added, where all
|
||||||
|
# sentinel objects are stored keyed by their name + module name. This is used
|
||||||
|
# to look up existing sentinels both during normal object creation and during
|
||||||
|
# copying/unpickling.
|
||||||
|
|
||||||
|
|
||||||
|
class Sentinel:
|
||||||
|
"""Create a unique sentinel object.
|
||||||
|
|
||||||
|
*name* should be the fully-qualified name of the variable to which the
|
||||||
|
return value shall be assigned.
|
||||||
|
|
||||||
|
*repr*, if supplied, will be used for the repr of the sentinel object.
|
||||||
|
If not provided, "<name>" will be used (with any leading class names
|
||||||
|
removed).
|
||||||
|
|
||||||
|
*module_name*, if supplied, will be used instead of inspecting the call
|
||||||
|
stack to find the name of the module from which
|
||||||
|
"""
|
||||||
|
_name: str
|
||||||
|
_repr: str
|
||||||
|
_module_name: str
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls,
|
||||||
|
name: str,
|
||||||
|
repr: str | None = None,
|
||||||
|
module_name: str | None = None,
|
||||||
|
):
|
||||||
|
name = str(name)
|
||||||
|
repr = str(repr) if repr else f'<{name.split(".")[-1]}>'
|
||||||
|
if not module_name:
|
||||||
|
parent_frame = _get_parent_frame()
|
||||||
|
module_name = (
|
||||||
|
parent_frame.f_globals.get('__name__', '__main__')
|
||||||
|
if parent_frame is not None
|
||||||
|
else __name__
|
||||||
|
)
|
||||||
|
|
||||||
|
# Include the class's module and fully qualified name in the
|
||||||
|
# registry key to support sub-classing.
|
||||||
|
registry_key = _sys.intern(
|
||||||
|
f'{cls.__module__}-{cls.__qualname__}-{module_name}-{name}'
|
||||||
|
)
|
||||||
|
sentinel = _registry.get(registry_key, None)
|
||||||
|
if sentinel is not None:
|
||||||
|
return sentinel
|
||||||
|
sentinel = super().__new__(cls)
|
||||||
|
sentinel._name = name
|
||||||
|
sentinel._repr = repr
|
||||||
|
sentinel._module_name = module_name
|
||||||
|
with _lock:
|
||||||
|
return _registry.setdefault(registry_key, sentinel)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self._repr
|
||||||
|
|
||||||
|
def __reduce__(self):
|
||||||
|
return (
|
||||||
|
self.__class__,
|
||||||
|
(
|
||||||
|
self._name,
|
||||||
|
self._repr,
|
||||||
|
self._module_name,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_lock = _Lock()
|
||||||
|
_registry: dict[str, Sentinel] = {}
|
||||||
|
|
||||||
|
|
||||||
|
# The following implementation attempts to support Python
|
||||||
|
# implementations which don't support sys._getframe(2), such as
|
||||||
|
# Jython and IronPython.
|
||||||
|
#
|
||||||
|
# The version added to the stdlib may simply return sys._getframe(2),
|
||||||
|
# without the fallbacks.
|
||||||
|
#
|
||||||
|
# For reference, see the implementation of namedtuple:
|
||||||
|
# https://github.com/python/cpython/blob/67444902a0f10419a557d0a2d3b8675c31b075a9/Lib/collections/__init__.py#L503
|
||||||
|
def _get_parent_frame():
|
||||||
|
"""Return the frame object for the caller's parent stack frame."""
|
||||||
|
try:
|
||||||
|
# Two frames up = the parent of the function which called this.
|
||||||
|
return _sys._getframe(2)
|
||||||
|
except (AttributeError, ValueError):
|
||||||
|
global _get_parent_frame
|
||||||
|
def _get_parent_frame():
|
||||||
|
"""Return the frame object for the caller's parent stack frame."""
|
||||||
|
try:
|
||||||
|
raise Exception
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
return _sys.exc_info()[2].tb_frame.f_back.f_back
|
||||||
|
except Exception:
|
||||||
|
global _get_parent_frame
|
||||||
|
def _get_parent_frame():
|
||||||
|
"""Return the frame object for the caller's parent stack frame."""
|
||||||
|
return None
|
||||||
|
return _get_parent_frame()
|
||||||
|
return _get_parent_frame()''')
|
||||||
|
_INTERNAL_lazymerge =_INTERNAL_Token (lambda lhs ,rhs :_INTERNAL_LazyIterable (lhs ,rhs ))
|
||||||
|
|
||||||
|
_INTERNAL_lpipe =_INTERNAL_Token (lambda lhs ,rhs :rhs (lhs ))
|
||||||
|
_INTERNAL_rpipe =_INTERNAL_Token (lambda lhs ,rhs :lhs (rhs ))
|
||||||
|
_INTERNAL_lspipe =_INTERNAL_Token (lambda lhs ,rhs :rhs (*lhs ))
|
||||||
|
_INTERNAL_rspipe =_INTERNAL_Token (lambda lhs ,rhs :lhs (*rhs ))
|
||||||
|
|
||||||
|
_INTERNAL_nonereplace =_INTERNAL_Token (lambda lhs ,rhs :lhs if lhs !=None else rhs )
|
||||||
|
|
||||||
|
_INTERNAL_lto =_INTERNAL_Token (lambda lhs ,rhs :lhs (*rhs ))
|
||||||
|
|
||||||
|
# If you write in other programming languages, this is very, very useful.
|
||||||
|
null =None
|
||||||
|
nil =None
|
||||||
|
void =None
|
||||||
|
|
||||||
|
# ITHON END
|
||||||
|
def fibonacci (x :int )->list [int ]:
|
||||||
|
start =[0 ,1 ]
|
||||||
|
for i in range (1 ,x ):
|
||||||
|
start .append >>_INTERNAL_rpipe <<start [i ]+start [i -1 ]
|
||||||
|
return start
|
||||||
|
a =12 >>_INTERNAL_lpipe <<fibonacci
|
||||||
|
b =a >>_INTERNAL_lazymerge <<a >>_INTERNAL_lazymerge <<a >>_INTERNAL_lazymerge <<a
|
||||||
|
c =b >>_INTERNAL_lazymerge <<b >>_INTERNAL_lazymerge <<b >>_INTERNAL_lazymerge <<b
|
||||||
|
print >>_INTERNAL_rpipe <<[i for i in c ]
|
||||||
|
print >>_INTERNAL_rspipe <<('a','b','c')
|
||||||
|
d =lambda x :x *2
|
||||||
|
#d2 = λ x: x * 2
|
||||||
|
|
||||||
|
d3d =curry >>_INTERNAL_rpipe <<(lambda x ,y :x **2 +y **2 )
|
||||||
|
print (d3d (2 ,4 ))
|
||||||
|
print (d3d (2 )(4 ))
|
||||||
|
print (d3d (x =2 )(y =4 ))
|
||||||
|
@curry
|
||||||
|
def d3d2 (x ,y ):return x **2 +y **2
|
||||||
|
print (d3d2 (2 ,4 ))
|
||||||
|
print (d3d2 (2 )(4 ))
|
||||||
|
print (d3d2 (x =2 )(y =4 ))
|
||||||
|
|
||||||
|
a =1
|
||||||
|
a +=1
|
||||||
|
a >>_INTERNAL_lpipe <<print >>_INTERNAL_nonereplace <<11 >>_INTERNAL_lpipe <<print
|
||||||
|
'''a'''
|
||||||
|
|
||||||
|
'''
|
||||||
|
very bad code that is
|
||||||
|
commented out for a very
|
||||||
|
good reason
|
||||||
|
'''
|
||||||
|
a +=1
|
||||||
|
'''something '''# something
|
16
test1.py
Normal file
16
test1.py
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
import sys, types
|
||||||
|
def add_fakeimport(name: str, code: str):
|
||||||
|
module = types.ModuleType(name)
|
||||||
|
parent = '.'.join(name.split('.')[:-1]) if '.'.join(name.split('.')[:-1]) else name
|
||||||
|
if not parent == name and parent not in sys.modules:
|
||||||
|
sys.modules[parent] = types.ModuleType(parent)
|
||||||
|
globals = {'__package__': parent}
|
||||||
|
print(globals)
|
||||||
|
module.__dict__.update(globals)
|
||||||
|
exec(code, module.__dict__)
|
||||||
|
sys.modules[name] = module
|
||||||
|
|
||||||
|
add_fakeimport('abcxyzaa.b', 'print("hi"); a = lambda: print("hi")')
|
||||||
|
add_fakeimport('abcxyzaa', 'from . import b; print("hi"); a = lambda: print("hi")')
|
||||||
|
import abcxyzaa as aa
|
||||||
|
aa.b.a()
|
Loading…
Add table
Add a link
Reference in a new issue