Iterator,
List,
Optional,
+ Set,
Text,
Tuple,
Pattern,
del token
-def group(*choices):
+def group(*choices: str) -> str:
return "(" + "|".join(choices) + ")"
-def any(*choices):
+def any(*choices: str) -> str:
return group(*choices) + "*"
-def maybe(*choices):
+def maybe(*choices: str) -> str:
return group(*choices) + "?"
-def _combinations(*l):
+def _combinations(*l: str) -> Set[str]:
return set(x + y for x in l for y in l + ("",) if x.casefold() != y.casefold())
pass
-def printtoken(type, token, xxx_todo_changeme, xxx_todo_changeme1, line): # for testing
- (srow, scol) = xxx_todo_changeme
- (erow, ecol) = xxx_todo_changeme1
+Coord = Tuple[int, int]
+
+
+def printtoken(
+ type: int, token: Text, srow_col: Coord, erow_col: Coord, line: Text
+) -> None: # for testing
+ (srow, scol) = srow_col
+ (erow, ecol) = erow_col
print(
"%d,%d-%d,%d:\t%s\t%s" % (srow, scol, erow, ecol, tok_name[type], repr(token))
)
-Coord = Tuple[int, int]
TokenEater = Callable[[int, Text, Coord, Coord, Text], None]
# backwards compatible interface
-def tokenize_loop(readline, tokeneater):
+def tokenize_loop(readline: Callable[[], Text], tokeneater: TokenEater) -> None:
for token_info in generate_tokens(readline):
tokeneater(*token_info)
class Untokenizer:
-
tokens: List[Text]
prev_row: int
prev_col: int
or endprogs.get(token[1])
or endprogs.get(token[2])
)
- assert maybe_endprog is not None, f"endprog not found for {token}"
+ assert (
+ maybe_endprog is not None
+ ), f"endprog not found for {token}"
endprog = maybe_endprog
contstr, needcont = line[start:], 1
contline = line
if token in ("def", "for"):
if stashed and stashed[0] == NAME and stashed[1] == "async":
-
if token == "def":
async_def = True
async_def_indent = indents[-1]