]>
git.madduck.net Git - etc/vim.git/blobdiff - src/blib2to3/pgen2/parse.py
madduck's git repository
Every one of the projects in this repository is available at the canonical
URL git://git.madduck.net/madduck/pub/<projectpath> — see
each project's metadata for the exact URL.
All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@ git. madduck. net .
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
SSH access, as well as push access can be individually
arranged .
If you use my repositories frequently, consider adding the following
snippet to ~/.gitconfig and using the third clone URL listed for each
project:
[url "git://git.madduck.net/madduck/"]
insteadOf = madduck:
TYPE_CHECKING,
)
from blib2to3.pgen2.grammar import Grammar
TYPE_CHECKING,
)
from blib2to3.pgen2.grammar import Grammar
-from blib2to3.pytree import NL, Context, RawNode, Leaf, Node
+from blib2to3.pytree import convert, NL, Context, RawNode, Leaf, Node
if TYPE_CHECKING:
from blib2to3.driver import TokenProxy
if TYPE_CHECKING:
from blib2to3.driver import TokenProxy
finally:
self.parser.stack = self._start_point
finally:
self.parser.stack = self._start_point
- def add_token(
- self, tok_type: int, tok_val: Optional[Text], raw: bool = False
- ) -> None:
+ def add_token(self, tok_type: int, tok_val: Text, raw: bool = False) -> None:
func: Callable[..., Any]
if raw:
func = self.parser._addtoken
func: Callable[..., Any]
if raw:
func = self.parser._addtoken
args.insert(0, ilabel)
func(*args)
args.insert(0, ilabel)
func(*args)
- def determine_route(
- self, value: Optional[Text] = None, force: bool = False
- ) -> Optional[int]:
+ def determine_route(self, value: Text = None, force: bool = False) -> Optional[int]:
alive_ilabels = self.ilabels
if len(alive_ilabels) == 0:
*_, most_successful_ilabel = self._dead_ilabels
alive_ilabels = self.ilabels
if len(alive_ilabels) == 0:
*_, most_successful_ilabel = self._dead_ilabels
to be converted. The syntax tree is converted from the bottom
up.
to be converted. The syntax tree is converted from the bottom
up.
+ **post-note: the convert argument is ignored since for Black's
+ usage, convert will always be blib2to3.pytree.convert. Allowing
+ this to be dynamic hurts mypyc's ability to use early binding.
+ These docs are left for historical and informational value.
+
A concrete syntax tree node is a (type, value, context, nodes)
tuple, where type is the node type (a token or symbol number),
value is None for symbols and a string for tokens, context is
A concrete syntax tree node is a (type, value, context, nodes)
tuple, where type is the node type (a token or symbol number),
value is None for symbols and a string for tokens, context is
"""
self.grammar = grammar
"""
self.grammar = grammar
+ # See note in docstring above. TL;DR this is ignored.
self.convert = convert or lam_sub
def setup(self, proxy: "TokenProxy", start: Optional[int] = None) -> None:
self.convert = convert or lam_sub
def setup(self, proxy: "TokenProxy", start: Optional[int] = None) -> None:
self.used_names: Set[str] = set()
self.proxy = proxy
self.used_names: Set[str] = set()
self.proxy = proxy
- def addtoken(self, type: int, value: Optional[Text] , context: Context) -> bool:
+ def addtoken(self, type: int, value: Text , context: Context) -> bool:
"""Add a token; return True iff this is the end of the program."""
# Map from token to label
ilabels = self.classify(type, value, context)
"""Add a token; return True iff this is the end of the program."""
# Map from token to label
ilabels = self.classify(type, value, context)
next_token_type, next_token_value, *_ = proxy.eat(counter)
if next_token_type == tokenize.OP:
next_token_type, next_token_value, *_ = proxy.eat(counter)
if next_token_type == tokenize.OP:
- next_token_type = grammar.opmap[cast(str, next_token_value) ]
+ next_token_type = grammar.opmap[next_token_value ]
recorder.add_token(next_token_type, next_token_value)
counter += 1
recorder.add_token(next_token_type, next_token_value)
counter += 1
return self._addtoken(ilabel, type, value, context)
return self._addtoken(ilabel, type, value, context)
- def _addtoken(
- self, ilabel: int, type: int, value: Optional[Text], context: Context
- ) -> bool:
+ def _addtoken(self, ilabel: int, type: int, value: Text, context: Context) -> bool:
# Loop until the token is shifted; may raise exceptions
while True:
dfa, state, node = self.stack[-1]
# Loop until the token is shifted; may raise exceptions
while True:
dfa, state, node = self.stack[-1]
arcs = states[state]
# Look for a state with this label
for i, newstate in arcs:
arcs = states[state]
# Look for a state with this label
for i, newstate in arcs:
- t, v = self.grammar.labels[i]
- if ilabel == i:
+ t = self.grammar.labels[i][0]
+ if t >= 256:
+ # See if it's a symbol and if we're in its first set
+ itsdfa = self.grammar.dfas[t]
+ itsstates, itsfirst = itsdfa
+ if ilabel in itsfirst:
+ # Push a symbol
+ self.push(t, itsdfa, newstate, context)
+ break # To continue the outer while loop
+
+ elif ilabel == i:
# Look it up in the list of labels
# Look it up in the list of labels
# Shift a token; we're done with it
self.shift(type, value, newstate, context)
# Pop while we are in an accept-only state
# Shift a token; we're done with it
self.shift(type, value, newstate, context)
# Pop while we are in an accept-only state
states, first = dfa
# Done with this token
return False
states, first = dfa
# Done with this token
return False
- elif t >= 256:
- # See if it's a symbol and if we're in its first set
- itsdfa = self.grammar.dfas[t]
- itsstates, itsfirst = itsdfa
- if ilabel in itsfirst:
- # Push a symbol
- self.push(t, self.grammar.dfas[t], newstate, context)
- break # To continue the outer while loop
else:
if (0, state) in arcs:
# An accepting state, pop it and try something else
else:
if (0, state) in arcs:
# An accepting state, pop it and try something else
# No success finding a transition
raise ParseError("bad input", type, value, context)
# No success finding a transition
raise ParseError("bad input", type, value, context)
- def classify(self, type: int, value: Optional[Text] , context: Context) -> List[int]:
+ def classify(self, type: int, value: Text , context: Context) -> List[int]:
"""Turn a token into a label. (Internal)
Depending on whether the value is a soft-keyword or not,
this function may return multiple labels to choose from."""
if type == token.NAME:
# Keep a listing of all used names
"""Turn a token into a label. (Internal)
Depending on whether the value is a soft-keyword or not,
this function may return multiple labels to choose from."""
if type == token.NAME:
# Keep a listing of all used names
- assert value is not None
self.used_names.add(value)
# Check for reserved words
if value in self.grammar.keywords:
self.used_names.add(value)
# Check for reserved words
if value in self.grammar.keywords:
raise ParseError("bad token", type, value, context)
return [ilabel]
raise ParseError("bad token", type, value, context)
return [ilabel]
- def shift(
- self, type: int, value: Optional[Text], newstate: int, context: Context
- ) -> None:
+ def shift(self, type: int, value: Text, newstate: int, context: Context) -> None:
"""Shift a token. (Internal)"""
dfa, state, node = self.stack[-1]
"""Shift a token. (Internal)"""
dfa, state, node = self.stack[-1]
- assert value is not None
- assert context is not None
rawnode: RawNode = (type, value, context, None)
rawnode: RawNode = (type, value, context, None)
- newnode = self.convert(self.grammar, rawnode)
- if newnode is not None:
- assert node[-1] is not None
- node[-1].append(newnode)
+ newnode = convert(self.grammar, rawnode)
+ assert node[-1] is not None
+ node[-1].append(newnode)
self.stack[-1] = (dfa, newstate, node)
def push(self, type: int, newdfa: DFAS, newstate: int, context: Context) -> None:
self.stack[-1] = (dfa, newstate, node)
def push(self, type: int, newdfa: DFAS, newstate: int, context: Context) -> None:
def pop(self) -> None:
"""Pop a nonterminal. (Internal)"""
popdfa, popstate, popnode = self.stack.pop()
def pop(self) -> None:
"""Pop a nonterminal. (Internal)"""
popdfa, popstate, popnode = self.stack.pop()
- newnode = self.convert(self.grammar, popnode)
- if newnode is not None:
- if self.stack:
- dfa, state, node = self.stack[-1]
- assert node[-1] is not None
- node[-1].append(newnode)
- else:
- self.rootnode = newnode
- self.rootnode.used_names = self.used_names
+ newnode = convert(self.grammar, popnode)
+ if self.stack:
+ dfa, state, node = self.stack[-1]
+ assert node[-1] is not None
+ node[-1].append(newnode)
+ else:
+ self.rootnode = newnode
+ self.rootnode.used_names = self.used_names