]> git.madduck.net Git - etc/vim.git/blobdiff - src/blib2to3/pgen2/parse.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Prepare release 23.10.0 (#3951)
[etc/vim.git] / src / blib2to3 / pgen2 / parse.py
index 17bf118e9fcd8053b655b4efc42e26f840b40465..ad51a3dad08dfbbe1fee7cc9b7845eeb7cb263fd 100644 (file)
@@ -10,24 +10,25 @@ how this parsing engine works.
 
 """
 from contextlib import contextmanager
 
 """
 from contextlib import contextmanager
-
-# Local imports
-from . import grammar, token, tokenize
 from typing import (
 from typing import (
-    cast,
+    TYPE_CHECKING,
     Any,
     Any,
-    Optional,
-    Union,
-    Tuple,
+    Callable,
     Dict,
     Dict,
-    List,
     Iterator,
     Iterator,
-    Callable,
+    List,
+    Optional,
     Set,
     Set,
-    TYPE_CHECKING,
+    Tuple,
+    Union,
+    cast,
 )
 )
+
 from blib2to3.pgen2.grammar import Grammar
 from blib2to3.pgen2.grammar import Grammar
-from blib2to3.pytree import convert, NL, Context, RawNode, Leaf, Node
+from blib2to3.pytree import NL, Context, Leaf, Node, RawNode, convert
+
+# Local imports
+from . import grammar, token, tokenize
 
 if TYPE_CHECKING:
     from blib2to3.pgen2.driver import TokenProxy
 
 if TYPE_CHECKING:
     from blib2to3.pgen2.driver import TokenProxy
@@ -112,7 +113,9 @@ class Recorder:
                     args.insert(0, ilabel)
                 func(*args)
 
                     args.insert(0, ilabel)
                 func(*args)
 
-    def determine_route(self, value: Optional[str] = None, force: bool = False) -> Optional[int]:
+    def determine_route(
+        self, value: Optional[str] = None, force: bool = False
+    ) -> Optional[int]:
         alive_ilabels = self.ilabels
         if len(alive_ilabels) == 0:
             *_, most_successful_ilabel = self._dead_ilabels
         alive_ilabels = self.ilabels
         if len(alive_ilabels) == 0:
             *_, most_successful_ilabel = self._dead_ilabels
@@ -208,6 +211,7 @@ class Parser:
         # See note in docstring above. TL;DR this is ignored.
         self.convert = convert or lam_sub
         self.is_backtracking = False
         # See note in docstring above. TL;DR this is ignored.
         self.convert = convert or lam_sub
         self.is_backtracking = False
+        self.last_token: Optional[int] = None
 
     def setup(self, proxy: "TokenProxy", start: Optional[int] = None) -> None:
         """Prepare for parsing.
 
     def setup(self, proxy: "TokenProxy", start: Optional[int] = None) -> None:
         """Prepare for parsing.
@@ -233,6 +237,7 @@ class Parser:
         self.rootnode: Optional[NL] = None
         self.used_names: Set[str] = set()
         self.proxy = proxy
         self.rootnode: Optional[NL] = None
         self.used_names: Set[str] = set()
         self.proxy = proxy
+        self.last_token = None
 
     def addtoken(self, type: int, value: str, context: Context) -> bool:
         """Add a token; return True iff this is the end of the program."""
 
     def addtoken(self, type: int, value: str, context: Context) -> bool:
         """Add a token; return True iff this is the end of the program."""
@@ -314,6 +319,7 @@ class Parser:
                         dfa, state, node = self.stack[-1]
                         states, first = dfa
                     # Done with this token
                         dfa, state, node = self.stack[-1]
                         states, first = dfa
                     # Done with this token
+                    self.last_token = type
                     return False
 
             else:
                     return False
 
             else:
@@ -340,9 +346,23 @@ class Parser:
                 return [self.grammar.keywords[value]]
             elif value in self.grammar.soft_keywords:
                 assert type in self.grammar.tokens
                 return [self.grammar.keywords[value]]
             elif value in self.grammar.soft_keywords:
                 assert type in self.grammar.tokens
+                # Current soft keywords (match, case, type) can only appear at the
+                # beginning of a statement. So as a shortcut, don't try to treat them
+                # like keywords in any other context.
+                # ('_' is also a soft keyword in the real grammar, but for our grammar
+                # it's just an expression, so we don't need to treat it specially.)
+                if self.last_token not in (
+                    None,
+                    token.INDENT,
+                    token.DEDENT,
+                    token.NEWLINE,
+                    token.SEMI,
+                    token.COLON,
+                ):
+                    return [self.grammar.tokens[type]]
                 return [
                 return [
-                    self.grammar.soft_keywords[value],
                     self.grammar.tokens[type],
                     self.grammar.tokens[type],
+                    self.grammar.soft_keywords[value],
                 ]
 
         ilabel = self.grammar.tokens.get(type)
                 ]
 
         ilabel = self.grammar.tokens.get(type)