All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
Fixes #3790
Slightly hacky, but I think this is correct and it should also improve performance somewhat.
<!-- Changes to the parser or to version autodetection -->
<!-- Changes to the parser or to version autodetection -->
+- Fix bug where attributes named `type` were not acccepted inside `match` statements
+ (#3950)
- Add support for PEP 695 type aliases containing lambdas and other unusual expressions
(#3949)
- Add support for PEP 695 type aliases containing lambdas and other unusual expressions
(#3949)
# See note in docstring above. TL;DR this is ignored.
self.convert = convert or lam_sub
self.is_backtracking = False
# See note in docstring above. TL;DR this is ignored.
self.convert = convert or lam_sub
self.is_backtracking = False
+ self.last_token: Optional[int] = None
def setup(self, proxy: "TokenProxy", start: Optional[int] = None) -> None:
"""Prepare for parsing.
def setup(self, proxy: "TokenProxy", start: Optional[int] = None) -> None:
"""Prepare for parsing.
self.rootnode: Optional[NL] = None
self.used_names: Set[str] = set()
self.proxy = proxy
self.rootnode: Optional[NL] = None
self.used_names: Set[str] = set()
self.proxy = proxy
def addtoken(self, type: int, value: str, context: Context) -> bool:
"""Add a token; return True iff this is the end of the program."""
def addtoken(self, type: int, value: str, context: Context) -> bool:
"""Add a token; return True iff this is the end of the program."""
dfa, state, node = self.stack[-1]
states, first = dfa
# Done with this token
dfa, state, node = self.stack[-1]
states, first = dfa
# Done with this token
return [self.grammar.keywords[value]]
elif value in self.grammar.soft_keywords:
assert type in self.grammar.tokens
return [self.grammar.keywords[value]]
elif value in self.grammar.soft_keywords:
assert type in self.grammar.tokens
+ # Current soft keywords (match, case, type) can only appear at the
+ # beginning of a statement. So as a shortcut, don't try to treat them
+ # like keywords in any other context.
+ # ('_' is also a soft keyword in the real grammar, but for our grammar
+ # it's just an expression, so we don't need to treat it specially.)
+ if self.last_token not in (
+ None,
+ token.INDENT,
+ token.DEDENT,
+ token.NEWLINE,
+ token.SEMI,
+ token.COLON,
+ ):
+ return [self.grammar.tokens[type]]
- self.grammar.soft_keywords[value],
self.grammar.tokens[type],
self.grammar.tokens[type],
+ self.grammar.soft_keywords[value],
]
ilabel = self.grammar.tokens.get(type)
]
ilabel = self.grammar.tokens.get(type)
+# issue 3790
+match (X.type, Y):
+ case _:
+ pass
type Alias[T]=lambda: T
type And[T]=T and T
type IfElse[T]=T if T else T
type Alias[T]=lambda: T
type And[T]=T and T
type IfElse[T]=T if T else T
+type One = int; type Another = str
+class X: type InClass = int
type = aliased
print(type(42))
type = aliased
print(type(42))
type Alias[T] = lambda: T
type And[T] = T and T
type IfElse[T] = T if T else T
type Alias[T] = lambda: T
type And[T] = T and T
type IfElse[T] = T if T else T
+type One = int
+type Another = str
+
+
+class X:
+ type InClass = int
+
type = aliased
print(type(42))
type = aliased
print(type(42))