]> git.madduck.net Git - etc/vim.git/blobdiff - blib2to3/pgen2/tokenize.pyi

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Test Windows, macOS and Linux on GitHub Actions (#1085)
[etc/vim.git] / blib2to3 / pgen2 / tokenize.pyi
index ac0f0f1bf6f1f254016e9b006c498e87787f93f6..d3011a8091bfaa83f42ee6b76ff4c934667f79ed 100644 (file)
@@ -1,18 +1,15 @@
 # Stubs for lib2to3.pgen2.tokenize (Python 3.6)
 # NOTE: Only elements from __all__ are present.
 
 # Stubs for lib2to3.pgen2.tokenize (Python 3.6)
 # NOTE: Only elements from __all__ are present.
 
-from typing import Callable, Iterable, Iterator, List, Text, Tuple
-from attr import dataclass
+from typing import Callable, Iterable, Iterator, List, Optional, Text, Tuple
 from blib2to3.pgen2.token import *  # noqa
 from blib2to3.pgen2.token import *  # noqa
+from blib2to3.pygram import Grammar
 
 
 _Coord = Tuple[int, int]
 _TokenEater = Callable[[int, Text, _Coord, _Coord, Text], None]
 _TokenInfo = Tuple[int, Text, _Coord, _Coord, Text]
 
 
 
 _Coord = Tuple[int, int]
 _TokenEater = Callable[[int, Text, _Coord, _Coord, Text], None]
 _TokenInfo = Tuple[int, Text, _Coord, _Coord, Text]
 
-@dataclass(frozen=True)
-class TokenizerConfig:
-    async_is_reserved_keyword: bool = False
 
 class TokenError(Exception): ...
 class StopTokenizing(Exception): ...
 
 class TokenError(Exception): ...
 class StopTokenizing(Exception): ...
@@ -30,5 +27,6 @@ class Untokenizer:
 
 def untokenize(iterable: Iterable[_TokenInfo]) -> Text: ...
 def generate_tokens(
 
 def untokenize(iterable: Iterable[_TokenInfo]) -> Text: ...
 def generate_tokens(
-    readline: Callable[[], Text]
+    readline: Callable[[], Text],
+    grammar: Optional[Grammar] = ...
 ) -> Iterator[_TokenInfo]: ...
 ) -> Iterator[_TokenInfo]: ...