summaryrefslogtreecommitdiff
path: root/macros/latex/contrib/pythonimmediate/pythonimmediate_script_textopy.py
diff options
context:
space:
mode:
Diffstat (limited to 'macros/latex/contrib/pythonimmediate/pythonimmediate_script_textopy.py')
-rwxr-xr-xmacros/latex/contrib/pythonimmediate/pythonimmediate_script_textopy.py2295
1 files changed, 2295 insertions, 0 deletions
diff --git a/macros/latex/contrib/pythonimmediate/pythonimmediate_script_textopy.py b/macros/latex/contrib/pythonimmediate/pythonimmediate_script_textopy.py
new file mode 100755
index 0000000000..7e4f4a7de9
--- /dev/null
+++ b/macros/latex/contrib/pythonimmediate/pythonimmediate_script_textopy.py
@@ -0,0 +1,2295 @@
+#!/bin/python3
+"""
+======== TeX-to-Py half ========
+
+receive commands from TeX, then execute it here
+"""
+
+
+#from __future__ import annotations
+import sys
+import os
+import inspect
+import contextlib
+import io
+import functools
+from typing import Optional, Union, Callable, Any, Iterator, Protocol, Iterable, Sequence, Type, Tuple, List, Dict
+import typing
+from abc import ABC, abstractmethod
+from pathlib import Path
+from dataclasses import dataclass
+import tempfile
+import signal
+import traceback
+import re
+import collections
+import enum
+
+
+def user_documentation(x: Union[Callable, str])->Any:
+ return x
+
+
+
+#debug=functools.partial(print, file=sys.stderr, flush=True) # unfortunately this is async ... or so it seems...?
+#debug_file=open(Path(tempfile.gettempdir())/"pythonimmediate_debug_textopy.txt", "w", encoding='u8', buffering=2)
+#debug=functools.partial(print, file=debug_file, flush=True)
+debug=lambda *args, **kwargs: None
+
+
+import argparse
+parser=argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+parser.add_argument("mode", choices=["multiprocessing-network", "unnamed-pipe"])
+args=parser.parse_args()
+
+expansion_only_can_call_Python=False # normally. May be different in LuaTeX etc.
+
+# ======== setup communication method. Requires raw_readline() and send_raw() methods.
+
+if True:
+ sys.stdin=None # type: ignore
+ # avoid user mistakenly read
+
+ raw_readline=sys.__stdin__.readline # raw_readline() should return "⟨line⟩\n" or "" (if EOF) on each call
+
+if args.mode=="multiprocessing-network":
+ address=("localhost", 7348) # this must be identical to that of the other half-script
+ #address="./pythonimmediate.socket"
+
+ from multiprocessing.connection import Client
+ connection=Client(address)
+ debug("connected")
+
+ def send_raw(s: str)->None: # send_raw() should get pass the s = "⟨line⟩\n"
+ global connection
+ connection.send_bytes(s.encode('u8'))
+
+elif args.mode=="unnamed-pipe":
+ pytotex_pid_line=raw_readline()
+ match_=re.fullmatch("pytotex_pid=(\d+)\n", pytotex_pid_line)
+ assert match_
+ pytotex_pid=int(match_[1])
+
+ connection_=open("/proc/" + str(pytotex_pid) + "/fd/0", "w", encoding='u8',
+ buffering=1 # line buffering
+ )
+
+ def send_raw(s: str)->None:
+ global connection_
+ connection_.write(s)
+ connection_.flush() # just in case
+
+else:
+ assert False
+
+# ======== done.
+
+# https://stackoverflow.com/questions/5122465/can-i-fake-a-package-or-at-least-a-module-in-python-for-testing-purposes
+from types import ModuleType
+pythonimmediate: Any=ModuleType("pythonimmediate")
+pythonimmediate.__file__="pythonimmediate.py"
+sys.modules["pythonimmediate"]=pythonimmediate
+
+pythonimmediate.debugging=True
+
+def export_function_to_module(f: Callable)->Callable:
+ """
+ the functions decorated with this decorator are accessible from user code with
+
+ import pythonimmediate
+ pythonimmediate.⟨function name⟩(...)
+ """
+ setattr(pythonimmediate, f.__name__, f)
+ return f
+
+action_done=False
+
+
+def check_not_finished()->None:
+ global action_done
+ if action_done:
+ raise RuntimeError("can only do one action per block!")
+
+def send_finish(s: str)->None:
+ check_not_finished()
+ global action_done
+ action_done=True
+ send_raw(s)
+
+
+import random
+def surround_delimiter(block: str)->str:
+ while True:
+ delimiter=str(random.randint(0, 10**12))
+ if delimiter not in block: break
+ return delimiter + "\n" + block + "\n" + delimiter + "\n"
+
+bootstrap_code: Optional[str]=""
+def mark_bootstrap(code: str)->None:
+ global bootstrap_code
+ assert bootstrap_code is not None
+ bootstrap_code+=code
+
+def substitute_private(code: str)->str:
+ return (code
+ #.replace("\n", ' ') # because there are comments in code, cannot
+ .replace("__", "_" + "pythonimmediate" + "_")
+ )
+
+def send_bootstrap_code()->None:
+ global bootstrap_code
+ assert bootstrap_code is not None
+ send_raw(surround_delimiter(substitute_private(bootstrap_code)))
+ bootstrap_code = None
+
+# ========
+
+
+# as the name implies, this reads one "command" from Python side and execute it.
+# the command might do additional tasks e.g. read more \TeX\ code.
+#
+# e.g. if `block' is read from the communication channel, run |\__run_block:|.
+
+mark_bootstrap(
+r"""
+\cs_new_protected:Npn \__read_do_one_command: {
+ \begingroup
+ \endlinechar=-1~
+ \readline \__read_file to \__line
+ \expandafter
+ \endgroup % also this will give an error instead of silently do nothing when command is invalid
+ \csname __run_ \__line :\endcsname
+}
+
+% read documentation of |_peek| commands for details what this command does.
+\cs_new_protected:Npn \pythonimmediatecontinue #1 {
+ \immediate\write \__write_file {r #1}
+ \__read_do_one_command:
+}
+
+\cs_new_protected:Npn \pythonimmediatecontinuenoarg {
+ \pythonimmediatecontinue {}
+}
+
+% internal function. Just send an arbitrary block of data to Python.
+\cs_new_protected:Npn \__send_block:e #1 {
+ \immediate\write \__write_file {
+ #1 ^^J
+ pythonimm?""" + '"""' + r"""?'''? % following character will be newline
+ }
+}
+
+\cs_new_protected:Npn \__send_block:n #1 {
+ \__send_block:e {\unexpanded{#1}}
+}
+
+\AtEndDocument{
+ \immediate\write \__write_file {r}
+}
+""")
+
+
+# ========
+
+# when 'i⟨string⟩' is sent from TeX to Python, the function with index ⟨string⟩ in this dict is called
+TeX_handlers: Dict[str, Callable[[], None]]={}
+
+TeXToPyObjectType=Optional[str]
+
+def run_main_loop()->TeXToPyObjectType:
+ while True:
+ line=readline()
+ if not line: return None
+
+ if line[0]=="i":
+ TeX_handlers[line[1:]]()
+ elif line[0]=="r":
+ return line[1:]
+ else:
+ raise RuntimeError("Internal error: unexpected line "+line)
+
+def run_main_loop_get_return_one()->str:
+ line=readline()
+ assert line[0]=="r"
+ return line[1:]
+
+
+
+user_documentation(
+"""
+All exported functions can be accessed through the module as |import pythonimmediate|.
+
+The |_finish| functions are internal functions, which must be called \emph{at most} once in each
+|\pythonimmediate:n| call from \TeX\ to tell \TeX\ what to do.
+
+The |_local| functions simply execute the code. These functions will only return when
+the \TeX\ code finishes executing; nevertheless, the \TeX\ code might recursively execute some Python code
+inside it.
+
+A simple example is |pythonimmediate.run_block_local('123')| which simply typesets |123|.
+
+The |_peek| functions is the same as above; however, the \TeX\ code must contain an explicit command
+|\pythonimmediatecontinue{...}|.
+
+The argument of |\pythonimmediatecontinue| will be |e|-expanded
+by |\write| (note that the written content must not contain any newline character,
+otherwise the behavior is undefined), then returned as a string by the Python code.
+The Python function will only return when |\pythonimmediatecontinue| is called.
+
+In other words, |run_*_local(code)| is almost identical to |run_*_peek(code + "\pythonimmediatecontinue {}")|.
+""")
+
+@export_function_to_module
+def run_block_finish(block: str)->None:
+ send_finish("block\n" + surround_delimiter(block))
+
+
+@user_documentation
+@export_function_to_module
+def execute(block: str)->None:
+ """
+ Run a block of \TeX\ code (might consist of multiple lines).
+ Catcode-changing commands are allowed inside.
+
+ A simple example is |pythonimmediate.run_block_local('123')| which simply typesets |123|.
+
+ A more complicated example is |pythonimmediate.run_block_local(r'\verb+%+')|.
+ """
+ run_block_local(block)
+
+def check_line(line: str, *, braces: bool, newline: bool, continue_: Optional[bool])->None:
+ """
+ check user-provided line before sending to TeX for execution
+ """
+ if braces:
+ assert line.count("{") == line.count("}")
+ if newline:
+ assert '\n' not in line
+ assert '\r' not in line # this is not the line separator but just in case
+ if continue_==True: assert "pythonimmediatecontinue" in line
+ elif continue_==False: assert "pythonimmediatecontinue" not in line
+
+
+
+
+do_run_error_finish=True
+
+
+
+
+
+user_scope: Dict[str, Any]={} # consist of user's local variables etc.
+
+def readline()->str:
+ line=raw_readline()
+ if not line:
+ sys.stderr.write("\n\nTraceback (most recent call last):\n")
+ traceback.print_stack(file=sys.stderr)
+ sys.stderr.write("RuntimeError: Fatal irrecoverable TeX error\n\n")
+ os._exit(1)
+
+
+ assert line[-1]=='\n'
+ line=line[:-1]
+ debug("======== saw line", line)
+ return line
+
+block_delimiter: str="pythonimm?\"\"\"?'''?"
+
+def read_block()->str:
+ """
+ Internal function to read one block sent from \TeX\ (including the final delimiter line,
+ but the delimiter line is not returned)
+ """
+ lines: List[str]=[]
+ while True:
+ line=readline()
+ if line==block_delimiter:
+ return '\n'.join(lines)
+ else:
+ lines.append(line)
+
+
+@export_function_to_module
+class NToken(ABC):
+ """
+ Represent a possibly-notexpanded token.
+ For convenience, a notexpanded token is called a blue token.
+ It's not always possible to determine the notexpanded status of a following token in the input stream.
+ Remark: Token objects must be frozen.
+ """
+
+ @abstractmethod
+ def __str__(self)->str: ...
+
+ @abstractmethod
+ def repr1(self)->str: ...
+
+ @property
+ @abstractmethod
+ def assignable(self)->bool: ...
+
+ def assign(self, other: "NToken")->None:
+ assert self.assignable
+ NTokenList([T.let, self, C.other("="), C.space(' '), other]).execute()
+
+ def assign_future(self)->None:
+ assert self.assignable
+ futurelet_(PTTBalancedTokenList(BalancedTokenList([self.no_blue])))
+
+ def assign_futurenext(self)->None:
+ assert self.assignable
+ futureletnext_(PTTBalancedTokenList(BalancedTokenList([self.no_blue])))
+
+ def meaning_str(self)->str:
+ """
+ get the meaning of this token as a string.
+ """
+ return NTokenList([T.meaning, self]).expand_x().str()
+
+ @property
+ @abstractmethod
+ def blue(self)->"BlueToken": ...
+
+ @property
+ @abstractmethod
+ def no_blue(self)->"Token": ...
+
+ def meaning_equal(self, other: "Token")->bool:
+ return NTokenList([T.ifx, self, other, Catcode.other("1"), T["else"], Catcode.other("0"), T.fi]).expand_x().bool()
+
+ def str(self)->str:
+ """
+ self must represent a character of a TeX string. (i.e. equal to itself when detokenized)
+ return the string content.
+
+ default implementation below. Not necessarily correct.
+ """
+ raise ValueError("Token does not represent a string!")
+
+ def degree(self)->int:
+ """
+ return the imbalance degree for this token ({ -> 1, } -> -1, everything else -> 0)
+
+ default implementation below. Not necessarily correct.
+ """
+ return 0
+
+
+@export_function_to_module
+class Token(NToken):
+ """
+ Represent a TeX token, excluding the notexpanded possibility.
+ See also documentation of NToken.
+ """
+
+ @abstractmethod
+ def serialize(self)->str: ...
+
+ @property
+ def blue(self)->"BlueToken": return BlueToken(self)
+
+ @property
+ def no_blue(self)->"Token": return self
+
+ def __repr__(self)->str:
+ return f"<Token: {self.repr1()}>"
+
+ @staticmethod
+ def deserialize(s: str)->"Token":
+ t=TokenList.deserialize(s)
+ assert len(t)==1
+ return t[0]
+
+ @staticmethod
+ def get_next()->"Token":
+ """
+ Get the following token.
+
+ Note: in LaTeX3 versions without the commit |https://github.com/latex3/latex3/commit/24f7188904d6|
+ sometimes this may error out.
+
+ Note: because of the internal implementation of |\peek_analysis_map_inline:n|, this may
+ tokenize up to 2 tokens ahead (including the returned token),
+ as well as occasionally return the wrong token in unavoidable cases.
+ """
+ return Token.deserialize(str(get_next_()[0]))
+
+ @staticmethod
+ def peek_next()->"Token":
+ """
+ Get the following token without removing it from the input stream.
+
+ Equivalent to get_next() then put_next() immediately. See documentation of get_next() for some notes.
+ """
+ return Token.deserialize(
+ typing.cast(Callable[[], TTPLine], Python_call_TeX_local(
+ r"""
+ \cs_new_protected:Npn \__peek_next_callback: #1 {
+ \immediate\write \__write_file { r^^J #1 }
+ \expandafter % expand the ##1 in (*)
+ \__read_do_one_command:
+ }
+
+ \cs_new_protected:Npn %name% {
+ \peek_analysis_map_inline:n {
+ \peek_analysis_map_break:n {
+ \__tlserialize_char_unchecked:nnNN {##1}{##2}##3 \__peek_next_callback: ##1 % (*)
+ }
+ }
+ }
+ """, recursive=False))()
+ )
+
+ def put_next(self)->None:
+ d=self.degree()
+ if d==0:
+ BalancedTokenList([self]).put_next()
+ else:
+ assert isinstance(self, CharacterToken)
+ if d==1:
+ put_next_bgroup(PTTInt(self.index))
+ else:
+ assert d==-1
+ put_next_egroup(PTTInt(self.index))
+
+
+
+
+
+
+"""
+TeX code for serializing and deserializing a token list.
+Convert a token list from/to a string.
+"""
+
+
+mark_bootstrap(
+r"""
+\precattl_exec:n {
+
+% here #1 is the target token list to store the result to, #2 is a string with the final '.'.
+\cs_new_protected:Npn \__tldeserialize_dot:Nn #1 #2 {
+ \begingroup
+ \tl_set:Nn \__tmp {#2}
+ \tl_replace_all:Nnn \__tmp {~} {\cO\ }
+
+ \def \start ##1 { \csname ##1 \endcsname }
+
+ \def \> ##1 ##2 \cO\ { \csname ##1 \endcsname ##2 \cU\ }
+ \def \\ ##1 \cO\ ##2 { \expandafter \noexpand \csname ##1 \endcsname \csname ##2 \endcsname }
+ \def \1 ##1 ##2 { \char_generate:nn {`##1} {1} \csname ##2 \endcsname }
+ \def \2 ##1 ##2 { \char_generate:nn {`##1} {2} \csname ##2 \endcsname }
+ \def \3 ##1 ##2 { \char_generate:nn {`##1} {3} \csname ##2 \endcsname }
+ \def \4 ##1 ##2 { \char_generate:nn {`##1} {4} \csname ##2 \endcsname }
+ \def \6 ##1 ##2 { #### \char_generate:nn {`##1} {6} \csname ##2 \endcsname }
+ \def \7 ##1 ##2 { \char_generate:nn {`##1} {7} \csname ##2 \endcsname }
+ \def \8 ##1 ##2 { \char_generate:nn {`##1} {8} \csname ##2 \endcsname }
+ \def \A ##1 ##2 { \char_generate:nn {`##1} {10} \csname ##2 \endcsname }
+ \def \B ##1 ##2 { \char_generate:nn {`##1} {11} \csname ##2 \endcsname }
+ \def \C ##1 ##2 { \char_generate:nn {`##1} {12} \csname ##2 \endcsname }
+ \def \D ##1 ##2 { \expandafter \expandafter \expandafter \noexpand \char_generate:nn {`##1} {13} \csname ##2 \endcsname }
+ \def \R ##1 { \cFrozenRelax \csname ##1 \endcsname }
+
+ \let \. \empty
+
+ \exp_args:NNNx
+ \endgroup \tl_set:Nn #1 {\expandafter \start \__tmp}
+}
+
+\cs_new_protected:Npn \__tlserialize_char_unchecked:nnNN #1 #2 #3 #4 {
+ % #1=token, #2=char code, #3=catcode, #4: callback (will be called exactly once and with nothing following the input stream)
+ \int_compare:nNnTF {#2} = {-1} {
+ % token is control sequence
+ \tl_if_eq:onTF {#1} {\cFrozenRelax} {
+ #4 {\cStr{ R }}
+ } {
+ \tl_if_eq:onTF {#1} { \cC{} } {
+ #4 {\cStr{ \\\ }}
+ } {
+ \tl_set:Nx \__name { \expandafter \cs_to_str:N #1 }
+ \exp_args:Nx #4 { \prg_replicate:nn {\str_count_spaces:N \__name} {>} \cStr\\ \__name \cStr\ }
+ }
+ }
+ } {
+ % token is not control sequence
+ % (hex catcode) (character) (or escape sequence with that character)
+ \exp_args:Nx #4 { #3 \expandafter \string #1 }
+ }
+}
+
+}
+
+% deserialize as above but #2 does not end with '.'.
+\cs_new_protected:Npn \__tldeserialize_nodot:Nn #1 #2 {
+ \__tldeserialize_dot:Nn #1 {#2 .}
+}
+
+% serialize token list in #2 store to #1.
+\cs_new_protected:Npn \__tlserialize_nodot_unchecked:Nn #1 #2 {
+ \tl_build_begin:N #1
+ \tl_set:Nn \__tlserialize_callback { \tl_build_put_right:Nn #1 }
+ \tl_analysis_map_inline:nn {#2} {
+ \__tlserialize_char_unchecked:nnNN {##1}{##2}##3 \__tlserialize_callback
+ }
+ \tl_build_end:N #1
+}
+
+% serialize token list in #2 store to #1. Call T or F branch depends on whether serialize is successful.
+% #1 must be different from \__tlserialize_tmp.
+\cs_new_protected:Npn \__tlserialize_nodot:NnTF #1 #2 {
+ \__tlserialize_nodot_unchecked:Nn #1 {#2}
+ \__tldeserialize_nodot:NV \__tlserialize_nodot_tmp #1
+
+ \tl_if_eq:NnTF \__tlserialize_nodot_tmp {#2} % dangling
+}
+
+\cs_new_protected:Npn \__tlserialize_nodot:NnF #1 #2 {
+ \__tlserialize_nodot:NnTF #1 {#2} {} % dangling
+}
+
+\cs_new_protected:Npn \__tlserialize_nodot:NnT #1 #2 #3 { \__tlserialize_nodot:NnTF #1 {#2} {#3} {} }
+
+\msg_new:nnn {pythonimmediate} {cannot-serialize} {Token~list~cannot~be~serialized}
+
+\cs_new_protected:Npn \__tlserialize_nodot:Nn #1 #2{
+ \__tlserialize_nodot:NnF #1 {#2} {
+ \msg_error:nn {pythonimmediate} {cannot-serialize}
+ }
+}
+
+\cs_generate_variant:Nn \__tldeserialize_dot:Nn {NV}
+\cs_generate_variant:Nn \__tldeserialize_nodot:Nn {NV}
+\cs_generate_variant:Nn \__tlserialize_nodot:Nn {NV}
+""")
+
+
+class ControlSequenceTokenMaker:
+ """
+ shorthand to create control sequence objects in Python easier.
+ """
+ def __init__(self, prefix: str)->None:
+ self.prefix=prefix
+ def __getattribute__(self, a: str)->"ControlSequenceToken":
+ return ControlSequenceToken(object.__getattribute__(self, "prefix")+a)
+ def __getitem__(self, a: str)->"ControlSequenceToken":
+ return ControlSequenceToken(object.__getattribute__(self, "prefix")+a)
+
+
+@export_function_to_module
+@dataclass(repr=False, frozen=True)
+class ControlSequenceToken(Token):
+ make=typing.cast(ControlSequenceTokenMaker, None) # some interference makes this incorrect. Manually assign below
+ csname: str
+ @property
+ def assignable(self)->bool:
+ return True
+ def __str__(self)->str:
+ if self.csname=="": return r"\csname\endcsname"
+ return "\\"+self.csname
+ def serialize(self)->str:
+ return ">"*self.csname.count(" ") + "\\" + self.csname + " "
+ def repr1(self)->str:
+ return f"\\{self.csname}"
+
+
+ControlSequenceToken.make=ControlSequenceTokenMaker("")
+
+T=ControlSequenceToken.make
+P=ControlSequenceTokenMaker("_pythonimmediate_") # create private tokens
+
+@export_function_to_module
+class Catcode(enum.Enum):
+ begin_group=bgroup=1
+ end_group=egroup=2
+ math_toggle=math=3
+ alignment=4
+ parameter=param=6
+ math_superscript=superscript=7
+ math_subscript=subscript=8
+ space=10
+ letter=11
+ other=12
+ active=13
+
+ escape=0
+ end_of_line=paragraph=line=5
+ ignored=9
+ comment=14
+ invalid=15
+
+ @property
+ def for_token(self)->bool:
+ """
+ Return whether a token may have this catcode.
+ """
+ return self not in (Catcode.escape, Catcode.line, Catcode.ignored, Catcode.comment, Catcode.invalid)
+
+ def __call__(self, ch: Union[str, int])->"CharacterToken":
+ """
+ Shorthand:
+ Catcode.letter("a") = Catcode.letter(97) = CharacterToken(index=97, catcode=Catcode.letter)
+ """
+ if isinstance(ch, str): ch=ord(ch)
+ return CharacterToken(ch, self)
+
+C=Catcode
+
+@export_function_to_module
+@dataclass(repr=False, frozen=True) # must be frozen because bgroup and egroup below are reused
+class CharacterToken(Token):
+ index: int
+ catcode: Catcode
+ @property
+ def chr(self)->str:
+ return chr(self.index)
+ def __post_init__(self)->None:
+ assert self.catcode.for_token
+ def __str__(self)->str:
+ return self.chr
+ def serialize(self)->str:
+ return f"{self.catcode.value:X}{self.chr}"
+ def repr1(self)->str:
+ cat=str(self.catcode.value).translate(str.maketrans("0123456789", "₀₁₂₃₄₅₆₇₈₉"))
+ return f"{self.chr}{cat}"
+ @property
+ def assignable(self)->bool:
+ return self.catcode==Catcode.active
+ def degree(self)->int:
+ if self.catcode==Catcode.bgroup:
+ return 1
+ elif self.catcode==Catcode.egroup:
+ return -1
+ else:
+ return 0
+ def str(self)->str:
+ catcode=Catcode.space if self.index==32 else Catcode.other
+ if catcode!=self.catcode:
+ raise ValueError("this CharacterToken does not represent a string!")
+ return self.chr
+
+class FrozenRelaxToken(Token):
+ def __str__(self)->str:
+ return r"\relax"
+ def serialize(self)->str:
+ return "R"
+ def repr1(self)->str:
+ return r"[frozen]\relax"
+ @property
+ def assignable(self)->bool:
+ return False
+
+frozen_relax_token=FrozenRelaxToken()
+pythonimmediate.frozen_relax_token=frozen_relax_token
+
+# other special tokens later...
+
+bgroup=Catcode.bgroup("{")
+egroup=Catcode.egroup("}")
+space=Catcode.space(" ")
+
+
+
+@export_function_to_module
+@dataclass(frozen=True)
+class BlueToken(NToken):
+ token: Token
+
+ @property
+ def blue(self)->"BlueToken": return self
+
+ @property
+ def no_blue(self)->"Token": return self.token
+
+ def __str__(self)->str: return str(self.token)
+
+ def repr1(self)->str: return "notexpanded:"+self.token.repr1()
+
+ @property
+ def assignable(self)->bool: return self.token.assignable
+
+ def put_next(self)->None:
+ put_next_blue(PTTBalancedTokenList(BalancedTokenList([self.token])))
+
+
+doc_catcode_table: Dict[int, Catcode]={}
+doc_catcode_table[ord("{")]=Catcode.begin_group
+doc_catcode_table[ord("}")]=Catcode.end_group
+doc_catcode_table[ord("$")]=Catcode.math_toggle
+doc_catcode_table[ord("&")]=Catcode.alignment
+doc_catcode_table[ord("#")]=Catcode.parameter
+doc_catcode_table[ord("^")]=Catcode.math_superscript
+doc_catcode_table[ord("_")]=Catcode.math_subscript
+doc_catcode_table[ord(" ")]=Catcode.space
+doc_catcode_table[ord("~")]=Catcode.active
+for ch in range(ord('a'), ord('z')+1): doc_catcode_table[ch]=Catcode.letter
+for ch in range(ord('A'), ord('Z')+1): doc_catcode_table[ch]=Catcode.letter
+doc_catcode_table[ord("\\")]=Catcode.escape
+doc_catcode_table[ord("%")]=Catcode.comment
+
+e3_catcode_table=dict(doc_catcode_table)
+e3_catcode_table[ord("_")]=Catcode.letter
+e3_catcode_table[ord(":")]=Catcode.letter
+e3_catcode_table[ord(" ")]=Catcode.ignored
+e3_catcode_table[ord("~")]=Catcode.space
+
+
+TokenListType = typing.TypeVar("TokenListType", bound="TokenList")
+
+if typing.TYPE_CHECKING:
+ TokenListBaseClass = collections.UserList[Token]
+else: # Python 3.8 compatibility
+ TokenListBaseClass = collections.UserList
+
+@export_function_to_module
+class TokenList(TokenListBaseClass):
+ @staticmethod
+ def force_token_list(a: Iterable)->Iterable[Token]:
+ for x in a:
+ if isinstance(x, Token):
+ yield x
+ elif isinstance(x, Sequence):
+ yield bgroup
+ child=BalancedTokenList(x)
+ assert child.is_balanced()
+ yield from child
+ yield egroup
+ else:
+ raise RuntimeError(f"Cannot make TokenList from object {x} of type {type(x)}")
+
+ def is_balanced(self)->bool:
+ """
+ check if this is balanced.
+ """
+ degree=0
+ for x in self:
+ degree+=x.degree()
+ if degree<0: return False
+ return degree==0
+
+ def check_balanced(self)->None:
+ """
+ ensure that this is balanced.
+ """
+ if not self.is_balanced():
+ raise ValueError("Token list is not balanced")
+
+ def balanced_parts(self)->"List[Union[BalancedTokenList, Token]]":
+ """
+ split this TokenList into a list of balanced parts and unbalanced {/}tokens
+ """
+ degree=0
+ min_degree=0, 0
+ for i, token in enumerate(self):
+ degree+=token.degree()
+ min_degree=min(min_degree, (degree, i+1))
+ min_degree_pos=min_degree[1]
+
+ left_half: List[Union[BalancedTokenList, Token]]=[]
+ degree=0
+ last_pos=0
+ for i in range(min_degree_pos):
+ d=self[i].degree()
+ degree+=d
+ if degree<0:
+ degree=0
+ if last_pos!=i:
+ left_half.append(BalancedTokenList(self[last_pos:i]))
+ left_half.append(self[i])
+ last_pos=i+1
+ if min_degree_pos!=last_pos:
+ left_half.append(BalancedTokenList(self[last_pos:min_degree_pos]))
+
+ right_half: List[Union[BalancedTokenList, Token]]=[]
+ degree=0
+ last_pos=len(self)
+ for i in range(len(self)-1, min_degree_pos-1, -1):
+ d=self[i].degree()
+ degree-=d
+ if degree<0:
+ degree=0
+ if i+1!=last_pos:
+ right_half.append(BalancedTokenList(self[i+1:last_pos]))
+ right_half.append(self[i])
+ last_pos=i
+ if min_degree_pos!=last_pos:
+ right_half.append(BalancedTokenList(self[min_degree_pos:last_pos]))
+
+ return left_half+right_half[::-1]
+
+ def put_next(self)->None:
+ for part in reversed(self.balanced_parts()): part.put_next()
+
+ @property
+ def balanced(self)->"BalancedTokenList":
+ """
+ return a BalancedTokenList containing the content of this object.
+ it must be balanced.
+ """
+ return BalancedTokenList(self)
+
+ def __init__(self, a: Iterable=())->None:
+ super().__init__(TokenList.force_token_list(a))
+
+ @staticmethod
+ def iterable_from_string(s: str, get_catcode: Callable[[int], Catcode])->Iterable[Token]:
+ """
+ refer to documentation of from_string() for details.
+ """
+ i=0
+ while i<len(s):
+ ch=s[i]
+ i+=1
+ cat=get_catcode(ord(ch))
+ if cat==Catcode.space:
+ yield space
+ # special case: collapse multiple spaces into one but only if character code is space
+ if get_catcode(32) in (Catcode.space, Catcode.ignored):
+ while i<len(s) and s[i]==' ':
+ i+=1
+ elif cat.for_token:
+ yield cat(ch)
+ elif cat==Catcode.ignored:
+ continue
+ else:
+ assert cat==Catcode.escape, f"cannot create TokenList from string containing catcode {cat}"
+ cat=get_catcode(ord(s[i]))
+ if cat!=Catcode.letter:
+ yield ControlSequenceToken(s[i])
+ i+=1
+ else:
+ csname=s[i]
+ i+=1
+ while i<len(s) and get_catcode(ord(s[i]))==Catcode.letter:
+ csname+=s[i]
+ i+=1
+ yield ControlSequenceToken(csname)
+ # special case: remove spaces after control sequence but only if character code is space
+ if get_catcode(32) in (Catcode.space, Catcode.ignored):
+ while i<len(s) and s[i]==' ':
+ i+=1
+
+ @classmethod
+ def from_string(cls: Type[TokenListType], s: str, get_catcode: Callable[[int], Catcode])->TokenListType:
+ """
+ convert a string to a TokenList approximately.
+ The tokenization algorithm is slightly different from TeX's in the following respect:
+
+ * multiple spaces are collapsed to one space, but only if it has character code space (32).
+ * spaces with character code different from space (32) after a control sequence is not ignored.
+ * ^^ syntax are not supported. Use Python's escape syntax as usual.
+ """
+ return cls(TokenList.iterable_from_string(s, get_catcode))
+
+ @classmethod
+ def e3(cls: Type[TokenListType], s: str)->TokenListType:
+ """
+ approximate tokenizer in expl3 catcode, implemented in Python.
+ refer to documentation of from_string() for details.
+ """
+ return cls.from_string(s, lambda x: e3_catcode_table.get(x, Catcode.other))
+
+ @classmethod
+ def doc(cls: Type[TokenListType], s: str)->TokenListType:
+ """
+ approximate tokenizer in document catcode, implemented in Python.
+ refer to documentation of from_string() for details.
+ """
+ return cls.from_string(s, lambda x: doc_catcode_table.get(x, Catcode.other))
+
+ def serialize(self)->str:
+ return "".join(t.serialize() for t in self)
+
+ @classmethod
+ def deserialize(cls: Type[TokenListType], data: str)->TokenListType:
+ result: List[Token]=[]
+ i=0
+ cs_skip_space_count=0
+ while i<len(data):
+ if data[i]==">":
+ cs_skip_space_count+=1
+ i+=1
+ elif data[i]=="\\":
+ j=data.index(' ', i+1)
+ for __ in range(cs_skip_space_count):
+ j=data.index(' ', j+1)
+ cs_skip_space_count=0
+ result.append(ControlSequenceToken(data[i+1:j]))
+ i=j+1
+ elif data[i]=="R":
+ result.append(frozen_relax_token)
+ i+=1
+ else:
+ result.append(CharacterToken(index=ord(data[i+1]), catcode=Catcode(int(data[i], 16))))
+ i+=2
+ return cls(result)
+
+ def __repr__(self)->str:
+ return '<' + type(self).__name__ + ': ' + ' '.join(t.repr1() for t in self) + '>'
+
+ def execute(self)->None:
+ NTokenList(self).execute()
+
+ def expand_x(self)->"BalancedTokenList":
+ return NTokenList(self).expand_x()
+
+ def bool(self)->bool:
+ return NTokenList(self).bool()
+
+ def str(self)->str:
+ return NTokenList(self).str()
+
+
+
+@export_function_to_module
+class BalancedTokenList(TokenList):
+ """
+ Represents a balanced token list.
+ Note that runtime checking is not strictly enforced,
+ use `is_balanced()` method explicitly if you need to check.
+ """
+
+ def __init__(self, a: Iterable=())->None:
+ """
+ constructor. This must check for balanced-ness as balanced() method depends on this.
+ """
+ super().__init__(a)
+ self.check_balanced()
+
+ def expand_o(self)->"BalancedTokenList":
+ return BalancedTokenList(expand_o_(PTTBalancedTokenList(self))[0]) # type: ignore
+ def expand_x(self)->"BalancedTokenList":
+ return BalancedTokenList(expand_x_(PTTBalancedTokenList(self))[0]) # type: ignore
+ def execute(self)->None:
+ execute_(PTTBalancedTokenList(self))
+
+ def put_next(self)->None:
+ put_next_tokenlist(PTTBalancedTokenList(self))
+
+ @staticmethod
+ def get_next()->"BalancedTokenList":
+ """
+ get an (undelimited) argument from the TeX input stream.
+ """
+ return BalancedTokenList(get_argument_tokenlist_()[0]) # type: ignore
+
+
+
+if typing.TYPE_CHECKING:
+ NTokenListBaseClass = collections.UserList[NToken]
+else: # Python 3.8 compatibility
+ NTokenListBaseClass = collections.UserList
+
+@export_function_to_module
+class NTokenList(NTokenListBaseClass):
+ @staticmethod
+ def force_token_list(a: Iterable)->Iterable[NToken]:
+ for x in a:
+ if isinstance(x, NToken):
+ yield x
+ elif isinstance(x, Sequence):
+ yield bgroup
+ child=NTokenList(x)
+ assert child.is_balanced()
+ yield from child
+ yield egroup
+ else:
+ raise RuntimeError(f"Cannot make NTokenList from object {x} of type {type(x)}")
+
+ def __init__(self, a: Iterable=())->None:
+ super().__init__(NTokenList.force_token_list(a))
+
+ def is_balanced(self)->bool:
+ return TokenList(self).is_balanced() # a bit inefficient (need to construct a TokenList) but good enough
+
+ def simple_parts(self)->List[Union[BalancedTokenList, Token, BlueToken]]:
+ """
+ Split this NTokenList into a list of balanced non-blue parts, unbalanced {/} tokens, and blue tokens.
+ """
+ parts: List[Union[TokenList, BlueToken]]=[TokenList()]
+ for i in self:
+ if isinstance(i, BlueToken):
+ parts+=i, TokenList()
+ else:
+ assert isinstance(i, Token)
+ last_part=parts[-1]
+ assert isinstance(last_part, TokenList)
+ last_part.append(i)
+ result: List[Union[BalancedTokenList, Token, BlueToken]]=[]
+ for large_part in parts:
+ if isinstance(large_part, BlueToken):
+ result.append(large_part)
+ else:
+ result+=large_part.balanced_parts()
+ return result
+
+ def put_next(self)->None:
+ for part in reversed(self.simple_parts()): part.put_next()
+
+ def execute(self)->None:
+ """
+ Execute self.
+ """
+ parts=self.simple_parts()
+ if len(parts)==1:
+ x=parts[0]
+ if isinstance(x, BalancedTokenList):
+ x.execute()
+ return
+ NTokenList([*self, T.pythonimmediatecontinue, []]).put_next()
+ continue_until_passed_back()
+
+ def expand_x(self)->BalancedTokenList:
+ """
+ x-expand self. The result must be balanced.
+ """
+ NTokenList([T.edef, P.tmp, bgroup, *self, egroup]).execute()
+ return BalancedTokenList([P.tmp]).expand_o()
+
+ def str(self)->str:
+ """
+ self must represent a TeX string. (i.e. equal to itself when detokenized)
+ return the string content.
+ """
+ return "".join(t.str() for t in self)
+
+ def bool(self)->bool:
+ s=self.str()
+ return {"0": False, "1": True}[s]
+
+
+class TeXToPyData(ABC):
+ @staticmethod
+ @abstractmethod
+ def read()->"TeXToPyData":
+ ...
+ @staticmethod
+ @abstractmethod
+ def send_code(arg: str)->str:
+ pass
+ @staticmethod
+ @abstractmethod
+ def send_code_var(var: str)->str:
+ pass
+
+# tried and failed
+#@typing.runtime_checkable
+#class TeXToPyData(Protocol):
+# @staticmethod
+# def read()->"TeXToPyData":
+# ...
+#
+# #send_code: str
+#
+# #@staticmethod
+# #@property
+# #def send_code()->str:
+# # ...
+
+
+class TTPLine(TeXToPyData, str):
+ send_code=r"\immediate \write \__write_file {{\unexpanded{{ {} }}}}".format
+ send_code_var=r"\immediate \write \__write_file {{\unexpanded{{ {} }}}}".format
+ @staticmethod
+ def read()->"TTPLine":
+ return TTPLine(readline())
+
+# some old commands e.g. \$, \^, \_, \~ require \set@display@protect to be robust.
+# ~ needs to be redefined directly.
+mark_bootstrap(
+r"""
+\precattl_exec:n {
+ \cs_new_protected:Npn \__begingroup_setup_estr: {
+ \begingroup
+ \escapechar=-1~
+ \cC{set@display@protect}
+ \let \cA\~ \relax
+ }
+}
+""")
+
+class TTPELine(TeXToPyData, str):
+ """
+ Same as TTPEBlock, but for a single line only.
+ """
+ send_code=r"\__begingroup_setup_estr: \immediate \write \__write_file {{ {} }} \endgroup".format
+ send_code_var=r"\__begingroup_setup_estr: \immediate \write \__write_file {{ {} }} \endgroup".format
+ @staticmethod
+ def read()->"TTPELine":
+ return TTPELine(readline())
+
+class TTPEmbeddedLine(TeXToPyData, str):
+ @staticmethod
+ def send_code(self)->str:
+ raise RuntimeError("Must be manually handled")
+ @staticmethod
+ def send_code_var(self)->str:
+ raise RuntimeError("Must be manually handled")
+ @staticmethod
+ def read()->"TTPEmbeddedLine":
+ raise RuntimeError("Must be manually handled")
+
+class TTPBlock(TeXToPyData, str):
+ send_code=r"\__send_block:n {{ {} }}".format
+ send_code_var=r"\__send_block:V {}".format
+ @staticmethod
+ def read()->"TTPBlock":
+ return TTPBlock(read_block())
+
+class TTPEBlock(TeXToPyData, str):
+ """
+ A kind of argument that interprets "escaped string" and fully expand anything inside.
+ For example, {\\} sends a single backslash to Python, {\{} sends a single '{' to Python.
+ Done by fully expand the argument in \escapechar=-1 and convert it to a string.
+ Additional precaution is needed, see the note above.
+ """
+ send_code=r"\__begingroup_setup_estr: \__send_block:e {{ {} }} \endgroup".format
+ send_code_var=r"\__begingroup_setup_estr: \__send_block:e {} \endgroup".format
+ @staticmethod
+ def read()->"TTPEBlock":
+ return TTPEBlock(read_block())
+
+class TTPBalancedTokenList(TeXToPyData, BalancedTokenList):
+ send_code=r"\__tlserialize_nodot:Nn \__tmp {{ {} }} \immediate \write \__write_file {{\unexpanded\expandafter{{ \__tmp }}}}".format
+ send_code_var=r"\__tlserialize_nodot:NV \__tmp {} \immediate \write \__write_file {{\unexpanded\expandafter{{ \__tmp }}}}".format
+ @staticmethod
+ def read()->"TTPBalancedTokenList":
+ return TTPBalancedTokenList(BalancedTokenList.deserialize(readline()))
+
+
+class PyToTeXData(ABC):
+ @staticmethod
+ @abstractmethod
+ def read_code(var: str)->str:
+ ...
+ @abstractmethod
+ def write(self)->None:
+ ...
+
+@dataclass
+class PTTVerbatimLine(PyToTeXData):
+ """
+ Represents a line to be tokenized verbatim. Internally the |\readline| primitive is used, as such, any trailing spaces are stripped.
+ The trailing newline is not included, i.e. it's read under |\endlinechar=-1|.
+ """
+ data: str
+ read_code=r"\ior_str_get:NN \__read_file {} ".format
+ def write(self)->None:
+ assert "\n" not in self.data
+ assert self.data.rstrip()==self.data, "Cannot send verbatim line with trailing spaces!"
+ send_raw(self.data+"\n")
+
+@dataclass
+class PTTInt(PyToTeXData):
+ data: int
+ read_code=PTTVerbatimLine.read_code
+ def write(self)->None:
+ PTTVerbatimLine(str(self.data)).write()
+
+@dataclass
+class PTTTeXLine(PyToTeXData):
+ """
+ Represents a line to be tokenized in \TeX's current catcode regime.
+ The trailing newline is not included, i.e. it's tokenized under |\endlinechar=-1|.
+ """
+ data: str
+ read_code=r"\ior_get:NN \__read_file {} ".format
+ def write(self)->None:
+ assert "\n" not in self.data
+ send_raw(self.data+"\n")
+
+@dataclass
+class PTTBlock(PyToTeXData):
+ data: str
+ read_code=r"\__read_block:N {}".format
+ def write(self)->None:
+ send_raw(surround_delimiter(self.data))
+
+@dataclass
+class PTTBalancedTokenList(PyToTeXData):
+ data: BalancedTokenList
+ read_code=r"\ior_str_get:NN \__read_file {0} \__tldeserialize_dot:NV {0} {0}".format
+ def write(self)->None:
+ PTTVerbatimLine(self.data.serialize()+".").write()
+
+
+# ======== define TeX functions that execute Python code ========
+# ======== implementation of |\py| etc. Doesn't support verbatim argument yet. ========
+
+import itertools
+import string
+
+def random_identifiers()->Iterator[str]: # do this to avoid TeX hash collision while keeping the length short
+ for len_ in itertools.count(0):
+ for value in range(1<<len_):
+ for initial in string.ascii_letters:
+ yield initial + f"{value:0{len_}b}".translate({ord("0"): "a", ord("1"): "b"})
+
+random_identifier_iterable=random_identifiers()
+
+def get_random_identifier()->str:
+ return next(random_identifier_iterable)
+
+
+def define_TeX_call_Python(f: Callable[..., None], name: Optional[str]=None, argtypes: Optional[List[Type[TeXToPyData]]]=None, identifier: Optional[str]=None)->str:
+ """
+ This function setups some internal data structure, and
+ returns the \TeX\ code to be executed on the \TeX\ side to define the macro.
+
+ f: the Python function to be executed.
+ It should take some arguments and eventually (optionally) call one of the |_finish| functions.
+
+ name: the macro name on the \TeX\ side. This should only consist of letter characters in |expl3| catcode regime.
+
+ argtypes: list of argument types. If it's None it will be automatically deduced from the function |f|'s signature.
+
+ Returns: some code (to be executed in |expl3| catcode regime) as explained above.
+ """
+ if argtypes is None: argtypes=[p.annotation for p in inspect.signature(f).parameters.values()]
+ if name is None: name=f.__name__
+
+ if identifier is None: identifier=get_random_identifier()
+ assert identifier not in TeX_handlers
+
+ @functools.wraps(f)
+ def g()->None:
+ assert argtypes is not None
+ args=[argtype.read() for argtype in argtypes]
+
+
+ global action_done
+ old_action_done=action_done
+
+ action_done=False
+ try:
+ f(*args)
+ except:
+ if action_done:
+ # error occurred after 'finish' is called, cannot signal the error to TeX, will just ignore (after printing out the traceback)...
+ pass
+ else:
+ # TODO what should be done here? What if the error raised below is caught
+ action_done=True
+ raise
+ finally:
+ if not action_done:
+ run_none_finish()
+
+ action_done=old_action_done
+
+
+ TeX_handlers[identifier]=g
+
+ TeX_argspec = ""
+ TeX_send_input_commands = ""
+ for i, argtype in enumerate(argtypes):
+ if isinstance(argtype, str):
+ raise RuntimeError("string annotation or `from __future__ import annotations' not yet supported")
+ if not issubclass(argtype, TeXToPyData):
+ raise RuntimeError(f"Argument type {argtype} is incorrect, should be a subclass of TeXToPyData")
+ arg = f"#{i+1}"
+ TeX_send_input_commands += argtype.send_code(arg)
+ TeX_argspec += arg
+
+ return """
+ \\cs_new_protected:Npn \\""" + name + TeX_argspec + """ {
+ \immediate \write \__write_file { i """ + identifier + """ }
+ """ + TeX_send_input_commands + """
+ \__read_do_one_command:
+ }
+ """
+
+
+def define_internal_handler(f: Callable)->Callable:
+ mark_bootstrap(define_TeX_call_Python(f))
+ return f
+
+
+import linecache
+
+# https://stackoverflow.com/questions/47183305/file-string-traceback-with-line-preview
+def exec_or_eval_with_linecache(code: str, globals: dict, mode: str)->Any:
+ sourcename: str="<usercode>"
+ i=0
+ while sourcename in linecache.cache:
+ sourcename="<usercode" + str(i) + ">"
+ i+=1
+
+ lines=code.splitlines(keepends=True)
+ linecache.cache[sourcename] = len(code), None, lines, sourcename
+
+ compiled_code=compile(code, sourcename, mode)
+ return (exec if mode=="exec" else eval)(compiled_code, globals)
+
+ #del linecache.cache[sourcename]
+ # we never delete the cache, in case some function is defined here then later are called...
+
+def exec_with_linecache(code: str, globals: Dict[str, Any])->None:
+ exec_or_eval_with_linecache(code, globals, "exec")
+
+def eval_with_linecache(code: str, globals: Dict[str, Any])->Any:
+ return exec_or_eval_with_linecache(code, globals, "eval")
+
+
+@define_internal_handler
+def py(code: TTPEBlock)->None:
+ pythonimmediate.run_block_finish(str(eval_with_linecache(code, user_scope))+"%")
+
+@define_internal_handler
+def pyfile(filename: TTPELine)->None:
+ with open(filename, "r") as f:
+ source=f.read()
+ exec(compile(source, filename, "exec"), user_scope)
+
+def print_TeX(*args, **kwargs)->None:
+ if not hasattr(pythonimmediate, "file"):
+ raise RuntimeError("Internal error: attempt to print to TeX outside any environment!")
+ if pythonimmediate.file is not None:
+ functools.partial(print, file=pythonimmediate.file)(*args, **kwargs) # allow user to override `file` kwarg
+pythonimmediate.print=print_TeX
+
+class RedirectPrintTeX:
+ def __init__(self, t)->None:
+ self.t=t
+
+ def __enter__(self)->None:
+ if hasattr(pythonimmediate, "file"):
+ self.old=pythonimmediate.file
+ pythonimmediate.file=self.t
+
+ def __exit__(self, exc_type, exc_value, tb)->None:
+ if hasattr(self, "old"):
+ pythonimmediate.file=self.old
+ else:
+ del pythonimmediate.file
+
+def run_code_redirect_print_TeX(f: Callable[[], Any])->None:
+ with io.StringIO() as t:
+ with RedirectPrintTeX(t):
+ result=f()
+ if result is not None:
+ t.write(str(result)+"%")
+ content=t.getvalue()
+ if content.endswith("\n"):
+ content=content[:-1]
+ else:
+ #content+=r"\empty" # this works too
+ content+="%"
+ pythonimmediate.run_block_finish(content)
+
+@define_internal_handler
+def pyc(code: TTPEBlock)->None:
+ run_code_redirect_print_TeX(lambda: exec_with_linecache(code, user_scope))
+
+@define_internal_handler
+def pycq(code: TTPEBlock)->None:
+ with RedirectPrintTeX(None):
+ exec_with_linecache(code, user_scope)
+ run_none_finish()
+
+mark_bootstrap(
+r"""
+\NewDocumentCommand\pyv{v}{\py{#1}}
+\NewDocumentCommand\pycv{v}{\pyc{#1}}
+""")
+
+# ======== implementation of |pycode| environment
+mark_bootstrap(
+r"""
+\NewDocumentEnvironment{pycode}{}{
+ \saveenvreinsert \__code {
+ \exp_last_unbraced:Nx \__pycodex {{\__code} {\the\inputlineno} {
+ \ifdefined\currfilename \currfilename \fi
+ } {
+ \ifdefined\currfileabspath \currfileabspath \fi
+ }}
+ }
+}{
+ \endsaveenvreinsert
+}
+""")
+
+def normalize_lines(lines: List[str])->List[str]:
+ return [line.rstrip() for line in lines]
+
+@define_internal_handler
+def __pycodex(code: TTPBlock, lineno_: TTPLine, filename: TTPLine, fileabspath: TTPLine)->None:
+ if not code: return
+
+ lineno=int(lineno_)
+ # find where the code comes from... (for easy meaningful traceback)
+ target_filename: Optional[str] = None
+
+ code_lines_normalized=normalize_lines(code.splitlines(keepends=True))
+
+ for f in (fileabspath, filename):
+ if not f: continue
+ p=Path(f)
+ if not p.is_file(): continue
+ file_lines=p.read_text().splitlines(keepends=True)[lineno-len(code_lines_normalized)-1:lineno-1]
+ if normalize_lines(file_lines)==code_lines_normalized:
+ target_filename=f
+ break
+
+ if not target_filename:
+ raise RuntimeError("Source file not found! (attempted {})".format((fileabspath, filename)))
+
+ with io.StringIO() as t:
+ with RedirectPrintTeX(t):
+ if target_filename:
+ code_=''.join(file_lines) # restore missing trailing spaces
+ code_="\n"*(lineno-len(code_lines_normalized)-1)+code_
+ if target_filename:
+ compiled_code=compile(code_, target_filename, "exec")
+ exec(compiled_code, user_scope)
+ else:
+ exec(code_, user_scope)
+ pythonimmediate.run_block_finish(t.getvalue())
+
+# ======== Python-call-TeX functions
+# ======== additional functions...
+
+user_documentation(
+r"""
+These functions get an argument in the input stream and returns it detokenized.
+
+Which means, for example, |#| are doubled, multiple spaces might be collapsed into one, spaces might be introduced
+after a control sequence.
+
+It's undefined behavior if the message's "string representation" contains a "newline character".
+""")
+
+def template_substitute(template: str, pattern: str, substitute: Union[str, Callable[[re.Match], str]], optional: bool=False)->str:
+ """
+ pattern is a regex
+ """
+ if not optional:
+ #assert template.count(pattern)==1
+ assert len(re.findall(pattern, template))==1
+ return re.sub(pattern, substitute, template)
+
+#typing.TypeVarTuple(PyToTeXData)
+
+#PythonCallTeXFunctionType=Callable[[PyToTeXData], Optional[Tuple[TeXToPyData, ...]]]
+
+class PythonCallTeXFunctionType(Protocol): # https://stackoverflow.com/questions/57658879/python-type-hint-for-callable-with-variable-number-of-str-same-type-arguments
+ def __call__(self, *args: PyToTeXData)->Optional[Tuple[TeXToPyData, ...]]: ...
+
+class PythonCallTeXSyncFunctionType(PythonCallTeXFunctionType, Protocol): # https://stackoverflow.com/questions/57658879/python-type-hint-for-callable-with-variable-number-of-str-same-type-arguments
+ def __call__(self, *args: PyToTeXData)->Tuple[TeXToPyData, ...]: ...
+
+
+@dataclass(frozen=True)
+class Python_call_TeX_data:
+ TeX_code: str
+ recursive: bool
+ finish: bool
+ sync: Optional[bool]
+
+@dataclass(frozen=True)
+class Python_call_TeX_extra:
+ ptt_argtypes: Tuple[Type[PyToTeXData], ...]
+ ttp_argtypes: Union[Type[TeXToPyData], Tuple[Type[TeXToPyData], ...]]
+
+Python_call_TeX_defined: Dict[Python_call_TeX_data, Tuple[Python_call_TeX_extra, Callable]]={}
+
+def Python_call_TeX_local(TeX_code: str, *, recursive: bool=True, sync: Optional[bool]=None, finish: bool=False)->Callable:
+ data=Python_call_TeX_data(
+ TeX_code=TeX_code, recursive=recursive, sync=sync, finish=finish
+ )
+ return Python_call_TeX_defined[data][1]
+
+def build_Python_call_TeX(T: Type, TeX_code: str, *, recursive: bool=True, sync: Optional[bool]=None, finish: bool=False)->None:
+ assert T.__origin__ == typing.Callable[[], None].__origin__ # type: ignore
+ # might be typing.Callable or collections.abc.Callable depends on Python version
+ data=Python_call_TeX_data(
+ TeX_code=TeX_code, recursive=recursive, sync=sync, finish=finish
+ )
+
+ tmp: Any = T.__args__[-1]
+ ttp_argtypes: Union[Type[TeXToPyData], Tuple[Type[TeXToPyData], ...]]
+ if tmp is type(None):
+ ttp_argtypes = ()
+ elif isinstance(tmp, type) and issubclass(tmp, TeXToPyData):
+ # special case, return a single object instead of a tuple of length 1
+ ttp_argtypes = tmp
+ else:
+ ttp_argtypes = tmp.__args__ # type: ignore
+
+ extra=Python_call_TeX_extra(
+ ptt_argtypes=T.__args__[:-1],
+ ttp_argtypes=ttp_argtypes
+ ) # type: ignore
+ if data in Python_call_TeX_defined:
+ assert Python_call_TeX_defined[data][0]==extra
+ else:
+ if isinstance(ttp_argtypes, type) and issubclass(ttp_argtypes, TeXToPyData):
+ # special case, return a single object instead of a tuple of length 1
+ code, result1=define_Python_call_TeX(TeX_code=TeX_code, ptt_argtypes=[*extra.ptt_argtypes], ttp_argtypes=[ttp_argtypes],
+ recursive=recursive, sync=sync, finish=finish,
+ )
+ def result(*args):
+ [tmp]=result1(*args)
+ return tmp
+ else:
+ code, result=define_Python_call_TeX(TeX_code=TeX_code, ptt_argtypes=[*extra.ptt_argtypes], ttp_argtypes=[*ttp_argtypes],
+ recursive=recursive, sync=sync, finish=finish,
+ )
+ mark_bootstrap(code)
+ Python_call_TeX_defined[data]=extra, result
+
+def scan_Python_call_TeX(filename: str)->None:
+ """
+ scan the file in filename for occurrences of typing.cast(T, Python_call_TeX_local(...)), then call build_Python_call_TeX(T, ...) for each occurrence.
+
+ Don't use on untrusted code.
+ """
+ import ast
+ from copy import deepcopy
+ for node in ast.walk(ast.parse(Path(filename).read_text(), mode="exec")):
+ try:
+ if isinstance(node, ast.Call):
+ if (
+ isinstance(node.func, ast.Attribute) and
+ isinstance(node.func.value, ast.Name) and
+ node.func.value.id == "typing" and
+ node.func.attr == "cast"
+ ):
+ T = node.args[0]
+ if isinstance(node.args[1], ast.Call):
+ f_call = node.args[1]
+ if isinstance(f_call.func, ast.Name):
+ if f_call.func.id == "Python_call_TeX_local":
+ f_call=deepcopy(f_call)
+ assert isinstance(f_call.func, ast.Name)
+ f_call.func.id="build_Python_call_TeX"
+ f_call.args=[T]+f_call.args
+ eval(compile(ast.Expression(body=f_call), "<string>", "eval"))
+ except:
+ print("======== error on line", node.lineno, "========", file=sys.stderr)
+ raise
+
+def define_Python_call_TeX(TeX_code: str, ptt_argtypes: List[Type[PyToTeXData]], ttp_argtypes: List[Type[TeXToPyData]],
+ *,
+ recursive: bool=True,
+ sync: Optional[bool]=None,
+ finish: bool=False,
+ )->Tuple[str, PythonCallTeXFunctionType]:
+ r"""
+ |TeX_code| should be some expl3 code that defines a function with name |%name%| that when called should:
+ * run some \TeX\ code (which includes reading the arguments, if any)
+ * do the following if |sync|:
+ * send |r| to Python (equivalently write %sync%)
+ * send whatever needed for the output (as in |ttp_argtypes|)
+ * call |\__read_do_one_command:| iff not |finish|.
+
+ This is allowed to contain the following:
+ * %name%: the name of the function to be defined as explained above.
+ * %read_arg0(\var_name)%, %read_arg1(...)%: will be expanded to code that reads the input.
+ * %send_arg0(...)%, %send_arg1(...)%: will be expanded to code that sends the content.
+ * %send_arg0_var(\var_name)%, %send_arg1_var(...)%: will be expanded to code that sends the content in the variable.
+ * %optional_sync%: expanded to code that writes |r| (to sync), if |sync| is True.
+
+ ptt_argtypes: list of argument types to be sent from Python to TeX (i.e. input of the TeX function)
+
+ ttp_argtypes: list of argument types to be sent from TeX to Python (i.e. output of the TeX function)
+
+ recursive: whether the TeX_code might call another Python function. Default to True.
+ It does not hurt to always specify True, but performance would be a bit slower.
+
+ sync: whether the Python function need to wait for the TeX function to finish.
+ Required if |ttp_argtypes| is not empty.
+ This should be left to be the default None most of the time. (which will make it always sync if |debugging|,
+ otherwise only sync if needed i.e. there's some output)
+
+ finish: Include this if and only if |\__read_do_one_command:| is omitted.
+ Normally this is not needed, but it can be used as a slight optimization; and it's needed internally to implement
+ |run_none_finish| among others.
+ For each TeX-call-Python layer, \emph{exactly one} |finish| call can be made. If the function itself doesn't call
+ any |finish| call (which happens most of the time), then the wrapper will call |run_none_finish|.
+
+ Return some TeX code to be executed, and a Python function object that when called will call the TeX function
+ and return the result.
+
+ Possible optimizations:
+ * the |r| is not needed if not recursive and |ttp_argtypes| is nonempty
+ (the output itself tells Python when the \TeX\ code finished)
+ * the first line of the output may be on the same line as the |r| itself (done, use TTPEmbeddedLine type, although a bit hacky)
+ """
+ if ttp_argtypes!=[]:
+ assert sync!=False
+ sync=True
+
+ if sync is None:
+ sync=pythonimmediate.debugging
+
+ TeX_code=template_substitute(TeX_code, "%optional_sync%",
+ lambda _: r'\immediate\write\__write_file { r }' if sync else '',)
+
+ TeX_code=template_substitute(TeX_code, "%sync%",
+ lambda _: r'\immediate\write\__write_file { r }' if sync else '', optional=True)
+
+ assert sync is not None
+ if ttp_argtypes: assert sync
+ assert ttp_argtypes.count(TTPEmbeddedLine)<=1
+ identifier=get_random_identifier() # TODO to be fair it isn't necessary to make the identifier both ways distinct, can reuse
+
+ TeX_code=template_substitute(TeX_code, "%name%", lambda _: r"\__run_" + identifier + ":")
+
+ for i, argtype_ in enumerate(ptt_argtypes):
+ TeX_code=template_substitute(TeX_code, r"%read_arg" + str(i) + r"\(([^)]*)\)%",
+ lambda match: argtype_.read_code(match[1]),
+ optional=True)
+
+ for i, argtype in enumerate(ttp_argtypes):
+ TeX_code=template_substitute(TeX_code, f"%send_arg{i}" + r"\(([^)]*)\)%",
+ lambda match: argtype.send_code(match[1]),
+ optional=True)
+ TeX_code=template_substitute(TeX_code, f"%send_arg{i}_var" + r"\(([^)]*)\)%",
+ lambda match: argtype.send_code_var(match[1]),
+ optional=True)
+
+ def f(*args)->Optional[Tuple[TeXToPyData, ...]]:
+ assert len(args)==len(ptt_argtypes)
+
+ # send function header
+ check_not_finished()
+ if finish:
+ global action_done
+ action_done=True
+ send_raw(identifier+"\n")
+
+ # send function args
+ for arg, argtype in zip(args, ptt_argtypes):
+ assert isinstance(arg, argtype)
+ arg.write()
+
+ if not sync: return None
+
+ # wait for the result
+ if recursive:
+ result_=run_main_loop()
+ else:
+ result_=run_main_loop_get_return_one()
+
+ result: List[TeXToPyData]=[]
+ if TTPEmbeddedLine not in ttp_argtypes:
+ assert not result_
+ for argtype_ in ttp_argtypes:
+ if argtype_==TTPEmbeddedLine:
+ result.append(TTPEmbeddedLine(result_))
+ else:
+ result.append(argtype_.read())
+ return tuple(result)
+
+ return TeX_code, f
+
+scan_Python_call_TeX(__file__)
+
+def define_Python_call_TeX_local(*args, **kwargs)->PythonCallTeXFunctionType:
+ """
+ used to define "local" handlers i.e. used by this library.
+ The code will be included in mark_bootstrap().
+ """
+ code, result=define_Python_call_TeX(*args, **kwargs)
+ mark_bootstrap(code)
+ return result
+
+# essentially this is the same as the above, but just that the return type is guaranteed to be not None to satisfy type checkers
+def define_Python_call_TeX_local_sync(*args, **kwargs)->PythonCallTeXSyncFunctionType:
+ return define_Python_call_TeX_local(*args, **kwargs, sync=True) # type: ignore
+
+run_none_finish=define_Python_call_TeX_local(
+r"""
+\cs_new_eq:NN %name% \relax
+""", [], [], finish=True, sync=False)
+
+
+"""
+|run_error_finish| is fatal to TeX, so we only run it when it's fatal to Python.
+
+We want to make sure the Python traceback is printed strictly before run_error_finish() is called,
+so that the Python traceback is not interleaved with TeX error messages.
+"""
+run_error_finish=define_Python_call_TeX_local(
+r"""
+\msg_new:nnn {pythonimmediate} {python-error} {Python~error.}
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__data)%
+ \wlog{^^JPython~error~traceback:^^J\__data^^J}
+ \msg_error:nn {pythonimmediate} {python-error}
+}
+""", [PTTBlock], [], finish=True, sync=False)
+
+
+put_next_blue=define_Python_call_TeX_local(
+r"""
+\cs_new_protected:Npn \__put_next_blue_tmp {
+ %optional_sync%
+ \expandafter \__read_do_one_command: \noexpand
+}
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__target)%
+ \expandafter \__put_next_blue_tmp \__target
+}
+"""
+ , [PTTBalancedTokenList], [], recursive=False)
+
+
+put_next_tokenlist=define_Python_call_TeX_local(
+r"""
+\cs_new_protected:Npn \__put_next_tmp {
+ %optional_sync%
+ \__read_do_one_command:
+}
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__target)%
+ \expandafter \__put_next_tmp \__target
+}
+"""
+ , [PTTBalancedTokenList], [], recursive=False)
+
+get_next_=define_Python_call_TeX_local_sync(
+r"""
+\cs_new_protected:Npn %name% {
+ \peek_analysis_map_inline:n {
+ \peek_analysis_map_break:n {
+ \__tlserialize_char_unchecked:nnNN {##1}{##2}##3 \pythonimmediatecontinue
+ }
+ }
+}
+""", [], [TTPEmbeddedLine], recursive=False)
+
+put_next_bgroup=define_Python_call_TeX_local_sync(
+r"""
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__index)%
+ \expandafter \expandafter \expandafter \pythonimmediatecontinuenoarg
+ \char_generate:nn {\__index} {1}
+}
+""", [PTTInt], [], recursive=False)
+
+put_next_egroup=define_Python_call_TeX_local_sync(
+r"""
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__index)%
+ \expandafter \expandafter \expandafter \pythonimmediatecontinuenoarg
+ \char_generate:nn {\__index} {2}
+}
+""", [PTTInt], [], recursive=False)
+
+
+get_argument_tokenlist_=define_Python_call_TeX_local_sync(
+r"""
+\cs_new_protected:Npn %name% #1 {
+ %sync%
+ %send_arg0(#1)%
+ \__read_do_one_command:
+}
+""", [], [TTPBalancedTokenList], recursive=False)
+
+
+run_tokenized_line_local_=define_Python_call_TeX_local(
+r"""
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__data)%
+ \__data
+ %optional_sync%
+ \__read_do_one_command:
+}
+""", [PTTTeXLine], [])
+
+@export_function_to_module
+def run_tokenized_line_local(line: str, *, check_braces: bool=True, check_newline: bool=True, check_continue: bool=True)->None:
+ check_line(line, braces=check_braces, newline=check_newline, continue_=(False if check_continue else None))
+ run_tokenized_line_local_(PTTTeXLine(line))
+
+
+
+@export_function_to_module
+def run_tokenized_line_peek(line: str, *, check_braces: bool=True, check_newline: bool=True, check_continue: bool=True)->str:
+ check_line(line, braces=check_braces, newline=check_newline, continue_=(True if check_continue else None))
+ return typing.cast(
+ Callable[[PTTTeXLine], Tuple[TTPEmbeddedLine]],
+ Python_call_TeX_local(
+ r"""
+ \cs_new_protected:Npn %name% {
+ %read_arg0(\__data)%
+ \__data
+ }
+ """)
+ )(PTTTeXLine(line))[0]
+
+
+run_block_local_=define_Python_call_TeX_local(
+r"""
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__data)%
+ \begingroup \newlinechar=10~ \expandafter \endgroup
+ \scantokens \expandafter{\__data}
+ % trick described in https://tex.stackexchange.com/q/640274 to scantokens the code with \newlinechar=10
+
+ %optional_sync%
+ \__read_do_one_command:
+}
+""", [PTTBlock], [])
+
+@export_function_to_module
+def run_block_local(block: str)->None:
+ run_block_local_(PTTBlock(block))
+
+expand_o_=define_Python_call_TeX_local_sync(
+r"""
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__data)%
+ \exp_args:NNV \tl_set:No \__data \__data
+ %sync%
+ %send_arg0_var(\__data)%
+ \__read_do_one_command:
+}
+""", [PTTBalancedTokenList], [TTPBalancedTokenList], recursive=expansion_only_can_call_Python)
+
+expand_x_=define_Python_call_TeX_local_sync(
+r"""
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__data)%
+ \tl_set:Nx \__data {\__data}
+ %sync%
+ %send_arg0_var(\__data)%
+ \__read_do_one_command:
+}
+""", [PTTBalancedTokenList], [TTPBalancedTokenList], recursive=expansion_only_can_call_Python)
+
+execute_=define_Python_call_TeX_local(
+r"""
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__data)%
+ \__data
+ %optional_sync%
+ \__read_do_one_command:
+}
+""", [PTTBalancedTokenList], [])
+
+futurelet_=define_Python_call_TeX_local_sync(
+r"""
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__data)%
+ \expandafter \futurelet \__data \pythonimmediatecontinuenoarg
+}
+""", [PTTBalancedTokenList], [])
+
+futureletnext_=define_Python_call_TeX_local_sync(
+r"""
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__data)%
+ \afterassignment \pythonimmediatecontinuenoarg \expandafter \futurelet \__data
+}
+""", [PTTBalancedTokenList], [])
+
+continue_until_passed_back_=define_Python_call_TeX_local_sync(
+r"""
+\cs_new_eq:NN %name% \relax
+""", [], [TTPEmbeddedLine])
+
+@export_function_to_module
+def continue_until_passed_back_str()->str:
+ """
+ Usage:
+
+ First put some tokens in the input stream that includes |\pythonimmediatecontinue{...}|
+ (or |%sync% \__read_do_one_command:|), then call |continue_until_passed_back()|.
+
+ The function will only return when the |\pythonimmediatecontinue| is called.
+ """
+ return str(continue_until_passed_back_()[0])
+
+@export_function_to_module
+def continue_until_passed_back()->None:
+ """
+ Same as |continue_until_passed_back_str()| but nothing can be returned from TeX to Python.
+ """
+ result=continue_until_passed_back_str()
+ assert not result
+
+
+@export_function_to_module
+def expand_once()->None:
+ typing.cast(Callable[[], None], Python_call_TeX_local(
+ r"""
+ \cs_new_protected:Npn %name% { \expandafter \pythonimmediatecontinuenoarg }
+ """, recursive=False, sync=True))()
+
+
+@export_function_to_module
+@user_documentation
+def get_arg_str()->str:
+ """
+ Get a mandatory argument.
+ """
+ return typing.cast(Callable[[], TTPEmbeddedLine], Python_call_TeX_local(
+ r"""
+ \cs_new_protected:Npn %name% #1 {
+ \immediate\write\__write_file { \unexpanded {
+ r #1
+ }}
+ \__read_do_one_command:
+ }
+ """, recursive=False))()
+
+get_arg_estr_=define_Python_call_TeX_local_sync(
+r"""
+\cs_new_protected:Npn %name% #1 {
+ %sync%
+ %send_arg0(#1)%
+ \__read_do_one_command:
+}
+""", [], [TTPEBlock], recursive=False)
+@export_function_to_module
+@user_documentation
+def get_arg_estr()->str:
+ return str(get_arg_estr_()[0])
+
+
+get_optional_argument_detokenized_=define_Python_call_TeX_local_sync(
+r"""
+\NewDocumentCommand %name% {o} {
+ \immediate\write \__write_file {
+ r ^^J
+ \IfNoValueTF {#1} {
+ 0
+ } {
+ \unexpanded{1 #1}
+ }
+ }
+ \__read_do_one_command:
+}
+""", [], [TTPLine], recursive=False)
+@export_function_to_module
+@user_documentation
+def get_optional_arg_str()->Optional[str]:
+ """
+ Get an optional argument.
+ """
+ [result]=get_optional_argument_detokenized_()
+ result_=str(result)
+ if result_=="0": return None
+ assert result_[0]=="1", result_
+ return result_[1:]
+
+
+get_optional_arg_estr_=define_Python_call_TeX_local_sync(
+r"""
+\NewDocumentCommand %name% {o} {
+ %sync%
+ \IfNoValueTF {#1} {
+ %send_arg0(0)%
+ } {
+ %send_arg0(1 #1)%
+ }
+ \__read_do_one_command:
+}
+""", [], [TTPEBlock], recursive=False)
+
+@export_function_to_module
+@user_documentation
+def get_optional_arg_estr()->Optional[str]:
+ [result]=get_optional_arg_estr_()
+ result_=str(result)
+ if result_=="0": return None
+ assert result_[0]=="1", result_
+ return result_[1:]
+
+
+get_verbatim_argument_=define_Python_call_TeX_local_sync(
+r"""
+\NewDocumentCommand %name% {v} {
+ \immediate\write\__write_file { \unexpanded {
+ r ^^J
+ #1
+ }}
+ \__read_do_one_command:
+}
+""", [], [TTPLine], recursive=False)
+@export_function_to_module
+@user_documentation
+def get_verb_arg()->str:
+ """
+ Get a verbatim argument. Since it's verbatim, there's no worry of |#| being doubled,
+ but it can only be used at top level.
+ """
+ return str(get_verbatim_argument_()[0])
+
+get_multiline_verbatim_argument_=define_Python_call_TeX_local_sync(
+r"""
+\NewDocumentCommand %name% {+v} {
+ \immediate\write\__write_file { r }
+ \begingroup
+ \newlinechar=13~ % this is what +v argument type in xparse uses
+ \__send_block:n { #1 }
+ \endgroup
+ \__read_do_one_command:
+}
+""", [], [TTPBlock], recursive=False)
+@export_function_to_module
+@user_documentation
+def get_multiline_verb_arg()->str:
+ """
+ Get a multi-line verbatim argument.
+ """
+ return str(get_multiline_verbatim_argument_()[0])
+
+newcommand2=define_Python_call_TeX_local(
+r"""
+\cs_new_protected:Npn %name% {
+ \begingroup
+ \endlinechar=-1~
+ %read_arg0(\__line)%
+ %read_arg1(\__identifier)%
+ \cs_new_protected:cpx {\__line} {
+ \unexpanded{\immediate\write \__write_file} { i \__identifier }
+ \unexpanded{\__read_do_one_command:}
+ }
+ \endgroup
+ %optional_sync%
+ \__read_do_one_command:
+}
+""", [PTTVerbatimLine, PTTVerbatimLine], [], recursive=False)
+
+renewcommand2=define_Python_call_TeX_local(
+r"""
+\cs_new_protected:Npn %name% {
+ \begingroup
+ \endlinechar=-1~
+ \readline \__read_file to \__line
+ \readline \__read_file to \__identifier
+ \exp_args:Ncx \renewcommand {\__line} {
+ \unexpanded{\immediate\write \__write_file} { i \__identifier }
+ \unexpanded{\__read_do_one_command:}
+ }
+ \exp_args:Nc \MakeRobust {\__line} % also make the command global
+ \endgroup
+ %optional_sync%
+ \__read_do_one_command:
+}
+""", [PTTVerbatimLine, PTTVerbatimLine], [], recursive=False)
+
+def check_function_name(name: str)->None:
+ if not re.fullmatch("[A-Za-z]+", name) or (len(name)==1 and ord(name)<=0x7f):
+ raise RuntimeError("Invalid function name: "+name)
+
+def newcommand_(name: str, f: Callable)->Callable:
+ identifier=get_random_identifier()
+
+ newcommand2(PTTVerbatimLine(name), PTTVerbatimLine(identifier))
+
+ _code=define_TeX_call_Python(
+ lambda: run_code_redirect_print_TeX(f),
+ name, argtypes=[], identifier=identifier)
+ # ignore _code, already executed something equivalent in the TeX command
+ return f
+
+def renewcommand_(name: str, f: Callable)->Callable:
+ identifier=get_random_identifier()
+
+ renewcommand2(PTTVerbatimLine(name), PTTVerbatimLine(identifier))
+ # TODO remove the redundant entry from TeX_handlers (although technically is not very necessary, just cause slight memory leak)
+ #try: del TeX_handlers["u"+name]
+ #except KeyError: pass
+
+ _code=define_TeX_call_Python(
+ lambda: run_code_redirect_print_TeX(f),
+ name, argtypes=[], identifier=identifier)
+ # ignore _code, already executed something equivalent in the TeX command
+ return f
+
+
+
+@export_function_to_module
+def newcommand(x: Union[str, Callable, None]=None, f: Optional[Callable]=None)->Callable:
+ """
+ Define a new \TeX\ command.
+ If name is not provided, it's automatically deduced from the function.
+ """
+ if f is not None: return newcommand(x)(f)
+ if x is None: return newcommand # weird design but okay (allow |@newcommand()| as well as |@newcommand|)
+ if isinstance(x, str): return functools.partial(newcommand_, x)
+ return newcommand_(x.__name__, x)
+
+@export_function_to_module
+def renewcommand(x: Union[str, Callable, None]=None, f: Optional[Callable]=None)->Callable:
+ """
+ Redefine a \TeX\ command.
+ If name is not provided, it's automatically deduced from the function.
+ """
+ if f is not None: return newcommand(x)(f)
+ if x is None: return newcommand # weird design but okay (allow |@newcommand()| as well as |@newcommand|)
+ if isinstance(x, str): return functools.partial(renewcommand_, x)
+ return renewcommand_(x.__name__, x)
+
+
+# ========
+
+put_next_TeX_line=define_Python_call_TeX_local(
+r"""
+\cs_new_protected:Npn \__put_next_tmpa {
+ %optional_sync%
+ \__read_do_one_command:
+}
+\cs_new_protected:Npn %name% {
+ %read_arg0(\__target)%
+ \expandafter \__put_next_tmpa \__target
+}
+"""
+ , [PTTTeXLine], [], recursive=False)
+
+@export_function_to_module
+@user_documentation
+def put_next(arg: Union[str, Token, BalancedTokenList])->None:
+ """
+ Put some content forward in the input stream.
+
+ arg: has type |str| (will be tokenized in the current catcode regime, must be a single line),
+ or |BalancedTokenList|.
+ """
+ if isinstance(arg, str): put_next_TeX_line(PTTTeXLine(arg))
+ else: arg.put_next()
+
+
+
+# TODO I wonder which one is faster. Need to benchmark...
+@export_function_to_module
+@user_documentation
+def peek_next_meaning()->str:
+ """
+ Get the meaning of the following token, as a string, using the current |\escapechar|.
+
+ This is recommended over |peek_next_token()| as it will not tokenize an extra token.
+
+ It's undefined behavior if there's a newline (|\newlinechar| or |^^J|, the latter is OS-specific)
+ in the meaning string.
+ """
+ return typing.cast(Callable[[], TTPEmbeddedLine], Python_call_TeX_local(
+ r"""
+ \cs_new_protected:Npn \__peek_next_meaning_callback: {
+
+ \edef \__tmp {\meaning \__tmp} % just in case |\__tmp| is outer, |\write| will not be able to handle it
+ %\immediate\write \__write_file { r \unexpanded\expandafter{\__tmp} }
+ \immediate\write \__write_file { r \__tmp }
+
+ \__read_do_one_command:
+ }
+ \cs_new_protected:Npn %name% {
+ \futurelet \__tmp \__peek_next_meaning_callback:
+ }
+ """, recursive=False))()
+
+
+if 0:
+ peek_next_char_=define_Python_call_TeX_local_sync(
+
+ # first attempt. Slower than peek_next_meaning.
+ r"""
+ \cs_new_protected:Npn \__peek_next_char_callback: {
+ \edef \__tmpb { \expandafter\str_item:nn\expandafter{\meaning \__tmp} {-1} } % \expandafter just in case \__tmp is \outer
+ \if \noexpand\__tmp \__tmpb % is a character
+ \immediate\write \__write_file { r^^J \__tmpb . }
+ \else % is not?
+ \immediate\write \__write_file { r^^J }
+ \fi
+ \__read_do_one_command:
+ }
+ \cs_new_protected:Npn %name% {
+ \futurelet \__tmp \__peek_next_char_callback:
+ }
+ """
+
+ # second attempt. Faster than before but still slower than peek_next_meaning.
+ #r"""
+ #\cs_new_protected:Npn %name% {
+ # \futurelet \__tmp \__peek_next_char_callback:
+ #}
+ #
+ #\cs_new_protected:Npn \__peek_next_char_callback: {
+ # %\if \noexpand\__tmp \c_space_token % there's also this case and that \__tmp is some TeX primitive conditional...
+ # \expandafter \__peek_next_char_callback_b: \meaning \__tmp \relax
+ #}
+ #
+ #\cs_new_protected:Npn \__peek_next_char_callback_b: #1 #2 {
+ # \ifx #2 \relax
+ # \if \noexpand\__tmp #1 % is a character
+ # \immediate\write \__write_file { r^^J #1 }
+ # \else % is not?
+ # \immediate\write \__write_file { r^^J }
+ # \fi
+ # \expandafter \__read_do_one_command:
+ # \else
+ # \expandafter \__peek_next_char_callback_b: \expandafter #2
+ # \fi
+ #}
+ #
+ #"""
+
+ , [], [TTPLine], recursive=False)
+
+
+
+meaning_str_to_catcode: Dict[str, Catcode]={
+ "begin-group character ": Catcode.bgroup,
+ "end-group character ": Catcode.egroup,
+ "math shift character ": Catcode.math,
+ "alignment tab character ": Catcode.alignment,
+ "macro parameter character ": Catcode.parameter,
+ "superscript character ": Catcode.superscript,
+ "subscript character ": Catcode.subscript,
+ "blank space ": Catcode.space,
+ "the letter ": Catcode.letter,
+ "the character ": Catcode.other,
+ }
+
+def parse_meaning_str(s: str)->Optional[Tuple[Catcode, str]]:
+ if s and s[:-1] in meaning_str_to_catcode:
+ return meaning_str_to_catcode[s[:-1]], s[-1]
+ return None
+
+@export_function_to_module
+@user_documentation
+def peek_next_char()->str:
+ """
+ Get the character of the following token, or empty string if it's not a character.
+ Will also return nonempty if the next token is an implicit character token.
+
+ Uses peek_next_meaning() under the hood to get the meaning of the following token. See peek_next_meaning() for a warning on undefined behavior.
+ """
+
+ #return str(peek_next_char_()[0])
+ # too slow (marginally slower than peek_next_meaning)
+
+ r=parse_meaning_str(peek_next_meaning())
+ if r is None:
+ return ""
+ return r[1]
+
+@export_function_to_module
+def get_next_char()->str:
+ result=Token.get_next()
+ assert isinstance(result, CharacterToken), "Next token is not a character!"
+ return result.chr
+
+# ========
+
+try:
+ send_bootstrap_code()
+ run_main_loop() # if this returns cleanly TeX has no error. Otherwise some readline() will reach eof and print out a stack trace
+ assert not raw_readline(), "Internal error: TeX sends extra line"
+
+except:
+ # see also documentation of run_error_finish.
+ sys.stderr.write("\n")
+ traceback.print_exc(file=sys.stderr)
+
+ if do_run_error_finish:
+ action_done=False # force run it
+ run_error_finish(PTTBlock("".join(traceback.format_exc())))
+
+ os._exit(0)
+