madduck's git repository
Every one of the projects in this repository is available at the canonical
URL git://git.madduck.net/madduck/pub/<projectpath> — see
each project's metadata for the exact URL.
All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
SSH access, as well as push access can be individually
arranged.
If you use my repositories frequently, consider adding the following
snippet to ~/.gitconfig and using the third clone URL listed for each
project:
[url "git://git.madduck.net/madduck/"]
insteadOf = madduck:
import sys
from typing import (
Any,
import sys
from typing import (
Any,
+from contextlib import contextmanager
from dataclasses import dataclass, field
# Pgen imports
from . import grammar, parse, token, tokenize, pgen
from logging import Logger
from dataclasses import dataclass, field
# Pgen imports
from . import grammar, parse, token, tokenize, pgen
from logging import Logger
-from blib2to3.pytree import _Convert, NL
+from blib2to3.pytree import NL
from blib2to3.pgen2.grammar import Grammar
from blib2to3.pgen2.grammar import Grammar
-from contextlib import contextmanager
+from blib2to3.pgen2.tokenize import GoodTokenInfo
Path = Union[str, "os.PathLike[str]"]
Path = Union[str, "os.PathLike[str]"]
- def __init__(
- self,
- grammar: Grammar,
- convert: Optional[_Convert] = None,
- logger: Optional[Logger] = None,
- ) -> None:
+ def __init__(self, grammar: Grammar, logger: Optional[Logger] = None) -> None:
self.grammar = grammar
if logger is None:
logger = logging.getLogger(__name__)
self.logger = logger
self.grammar = grammar
if logger is None:
logger = logging.getLogger(__name__)
self.logger = logger
- def parse_tokens(self, tokens: Iterable[Any], debug: bool = False) -> NL:
+ def parse_tokens(self, tokens: Iterable[GoodTokenInfo], debug: bool = False) -> NL:
"""Parse a series of tokens and return the syntax tree."""
# XXX Move the prefix computation into a wrapper around tokenize.
proxy = TokenProxy(tokens)
"""Parse a series of tokens and return the syntax tree."""
# XXX Move the prefix computation into a wrapper around tokenize.
proxy = TokenProxy(tokens)
- p = parse.Parser(self.grammar, self.convert)
+ p = parse.Parser(self.grammar)
p.setup(proxy=proxy)
lineno = 1
column = 0
p.setup(proxy=proxy)
lineno = 1
column = 0
+ indent_columns: List[int] = []
type = value = start = end = line_text = None
prefix = ""
type = value = start = end = line_text = None
prefix = ""
if type == token.OP:
type = grammar.opmap[value]
if debug:
if type == token.OP:
type = grammar.opmap[value]
if debug:
+ assert type is not None
self.logger.debug(
"%s %r (prefix=%r)", token.tok_name[type], value, prefix
)
self.logger.debug(
"%s %r (prefix=%r)", token.tok_name[type], value, prefix
)
elif type == token.DEDENT:
_indent_col = indent_columns.pop()
prefix, _prefix = self._partially_consume_prefix(prefix, _indent_col)
elif type == token.DEDENT:
_indent_col = indent_columns.pop()
prefix, _prefix = self._partially_consume_prefix(prefix, _indent_col)
- if p.addtoken(type, value, (prefix, start)):
+ if p.addtoken(cast(int, type), value, (prefix, start)):
if debug:
self.logger.debug("Stop.")
break
if debug:
self.logger.debug("Stop.")
break