]> git.madduck.net Git - etc/vim.git/blobdiff - blib2to3/pgen2/tokenize.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Hello github.com/psf!
[etc/vim.git] / blib2to3 / pgen2 / tokenize.py
index 43e1d597bc9b64792dd19229830f7578032fbe41..0912f43b867719f69e987bf1111725f81ed3693a 100644 (file)
@@ -31,7 +31,6 @@ __credits__ = \
 
 import re
 from codecs import BOM_UTF8, lookup
 
 import re
 from codecs import BOM_UTF8, lookup
-from attr import dataclass
 from blib2to3.pgen2.token import *
 
 from . import token
 from blib2to3.pgen2.token import *
 
 from . import token
@@ -138,10 +137,6 @@ single_quoted = (
 
 tabsize = 8
 
 
 tabsize = 8
 
-@dataclass(frozen=True)
-class TokenizerConfig:
-    async_is_reserved_keyword: bool = False
-
 class TokenError(Exception): pass
 
 class StopTokenizing(Exception): pass
 class TokenError(Exception): pass
 
 class StopTokenizing(Exception): pass
@@ -339,7 +334,7 @@ def untokenize(iterable):
     ut = Untokenizer()
     return ut.untokenize(iterable)
 
     ut = Untokenizer()
     return ut.untokenize(iterable)
 
-def generate_tokens(readline, config: TokenizerConfig = TokenizerConfig()):
+def generate_tokens(readline, grammar=None):
     """
     The generate_tokens() generator requires one argument, readline, which
     must be a callable object which provides the same interface as the
     """
     The generate_tokens() generator requires one argument, readline, which
     must be a callable object which provides the same interface as the
@@ -363,7 +358,7 @@ def generate_tokens(readline, config: TokenizerConfig = TokenizerConfig()):
 
     # If we know we're parsing 3.7+, we can unconditionally parse `async` and
     # `await` as keywords.
 
     # If we know we're parsing 3.7+, we can unconditionally parse `async` and
     # `await` as keywords.
-    async_is_reserved_keyword = config.async_is_reserved_keyword
+    async_keywords = False if grammar is None else grammar.async_keywords
     # 'stashed' and 'async_*' are used for async/await parsing
     stashed = None
     async_def = False
     # 'stashed' and 'async_*' are used for async/await parsing
     stashed = None
     async_def = False
@@ -514,7 +509,7 @@ def generate_tokens(readline, config: TokenizerConfig = TokenizerConfig()):
                         yield (STRING, token, spos, epos, line)
                 elif initial.isidentifier():               # ordinary name
                     if token in ('async', 'await'):
                         yield (STRING, token, spos, epos, line)
                 elif initial.isidentifier():               # ordinary name
                     if token in ('async', 'await'):
-                        if async_is_reserved_keyword or async_def:
+                        if async_keywords or async_def:
                             yield (ASYNC if token == 'async' else AWAIT,
                                    token, spos, epos, line)
                             continue
                             yield (ASYNC if token == 'async' else AWAIT,
                                    token, spos, epos, line)
                             continue