List,
Optional,
Set,
- Text,
Tuple,
Pattern,
Union,
def _combinations(*l: str) -> Set[str]:
- return set(x + y for x in l for y in l + ("",) if x.casefold() != y.casefold())
+ return {x + y for x in l for y in l + ("",) if x.casefold() != y.casefold()}
Whitespace = r"[ \f\t]*"
def printtoken(
- type: int, token: Text, srow_col: Coord, erow_col: Coord, line: Text
+ type: int, token: str, srow_col: Coord, erow_col: Coord, line: str
) -> None: # for testing
(srow, scol) = srow_col
(erow, ecol) = erow_col
)
-TokenEater = Callable[[int, Text, Coord, Coord, Text], None]
+TokenEater = Callable[[int, str, Coord, Coord, str], None]
-def tokenize(readline: Callable[[], Text], tokeneater: TokenEater = printtoken) -> None:
+def tokenize(readline: Callable[[], str], tokeneater: TokenEater = printtoken) -> None:
"""
The tokenize() function accepts two parameters: one representing the
input stream, and one providing an output mechanism for tokenize().
# backwards compatible interface
-def tokenize_loop(readline: Callable[[], Text], tokeneater: TokenEater) -> None:
+def tokenize_loop(readline: Callable[[], str], tokeneater: TokenEater) -> None:
for token_info in generate_tokens(readline):
tokeneater(*token_info)
-GoodTokenInfo = Tuple[int, Text, Coord, Coord, Text]
+GoodTokenInfo = Tuple[int, str, Coord, Coord, str]
TokenInfo = Union[Tuple[int, str], GoodTokenInfo]
class Untokenizer:
- tokens: List[Text]
+ tokens: List[str]
prev_row: int
prev_col: int
if col_offset:
self.tokens.append(" " * col_offset)
- def untokenize(self, iterable: Iterable[TokenInfo]) -> Text:
+ def untokenize(self, iterable: Iterable[TokenInfo]) -> str:
for t in iterable:
if len(t) == 2:
self.compat(cast(Tuple[int, str], t), iterable)
break
tok_type, token, start, end, line = cast(
- Tuple[int, Text, Coord, Coord, Text], t
+ Tuple[int, str, Coord, Coord, str], t
)
self.add_whitespace(start)
self.tokens.append(token)
self.prev_col = 0
return "".join(self.tokens)
- def compat(self, token: Tuple[int, Text], iterable: Iterable[TokenInfo]) -> None:
+ def compat(self, token: Tuple[int, str], iterable: Iterable[TokenInfo]) -> None:
startline = False
indents = []
toks_append = self.tokens.append
try:
return readline()
except StopIteration:
- return bytes()
+ return b''
def find_cookie(line: bytes) -> Optional[str]:
try:
return default, [first, second]
-def untokenize(iterable: Iterable[TokenInfo]) -> Text:
+def untokenize(iterable: Iterable[TokenInfo]) -> str:
"""Transform tokens back into Python source code.
Each element returned by the iterable must be a token sequence
def generate_tokens(
- readline: Callable[[], Text], grammar: Optional[Grammar] = None
+ readline: Callable[[], str], grammar: Optional[Grammar] = None
) -> Iterator[GoodTokenInfo]:
"""
The generate_tokens() generator requires one argument, readline, which