]>
git.madduck.net Git - etc/vim.git/blobdiff - blib2to3/pgen2/driver.py
madduck's git repository
Every one of the projects in this repository is available at the canonical
URL git://git.madduck.net/madduck/pub/<projectpath> — see
each project's metadata for the exact URL.
All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@ git. madduck. net .
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
SSH access, as well as push access can be individually
arranged .
If you use my repositories frequently, consider adding the following
snippet to ~/.gitconfig and using the third clone URL listed for each
project:
[url "git://git.madduck.net/madduck/"]
insteadOf = madduck:
import logging
import pkgutil
import sys
import logging
import pkgutil
import sys
+from typing import (
+ Any,
+ Callable,
+ IO,
+ Iterable,
+ List,
+ Optional,
+ Text,
+ Tuple,
+ Union,
+ Sequence,
+)
# Pgen imports
from . import grammar, parse, token, tokenize, pgen
# Pgen imports
from . import grammar, parse, token, tokenize, pgen
+from logging import Logger
+from blib2to3.pytree import _Convert, NL
+from blib2to3.pgen2.grammar import Grammar
+Path = Union[str, "os.PathLike[str]"]
- grammar,
- convert=None,
- logger=None,
- tokenizer_config=tokenize.TokenizerConfig(),
- ):
+ grammar: Grammar,
+ convert: Optional[_Convert] = None,
+ logger: Optional[Logger] = None,
+ ) -> None:
self.grammar = grammar
if logger is None:
logger = logging.getLogger(__name__)
self.logger = logger
self.convert = convert
self.grammar = grammar
if logger is None:
logger = logging.getLogger(__name__)
self.logger = logger
self.convert = convert
- self.tokenizer_config = tokenizer_config
- def parse_tokens(self, tokens, debug=False) :
+ def parse_tokens(self, tokens: Iterable[Any], debug: bool = False) -> NL :
"""Parse a series of tokens and return the syntax tree."""
# XXX Move the prefix computation into a wrapper around tokenize.
p = parse.Parser(self.grammar, self.convert)
"""Parse a series of tokens and return the syntax tree."""
# XXX Move the prefix computation into a wrapper around tokenize.
p = parse.Parser(self.grammar, self.convert)
if type == token.OP:
type = grammar.opmap[value]
if debug:
if type == token.OP:
type = grammar.opmap[value]
if debug:
- self.logger.debug("%s %r (prefix=%r)",
- token.tok_name[type], value, prefix)
+ self.logger.debug(
+ "%s %r (prefix=%r)", token.tok_name[type], value, prefix
+ )
if type == token.INDENT:
indent_columns.append(len(value))
_prefix = prefix + value
if type == token.INDENT:
indent_columns.append(len(value))
_prefix = prefix + value
column = 0
else:
# We never broke out -- EOF is too soon (how can this happen???)
column = 0
else:
# We never broke out -- EOF is too soon (how can this happen???)
- raise parse.ParseError("incomplete input",
- type, value, (prefix, start))
+ assert start is not None
+ raise parse.ParseError("incomplete input", type, value, (prefix, start))
+ assert p.rootnode is not None
- def parse_stream_raw(self, stream, debug=False) :
+ def parse_stream_raw(self, stream: IO[Text], debug: bool = False) -> NL :
"""Parse a stream and return the syntax tree."""
"""Parse a stream and return the syntax tree."""
- tokens = tokenize.generate_tokens(stream.readline, config=self.tokenizer_config )
+ tokens = tokenize.generate_tokens(stream.readline, grammar=self.grammar )
return self.parse_tokens(tokens, debug)
return self.parse_tokens(tokens, debug)
- def parse_stream(self, stream, debug=False) :
+ def parse_stream(self, stream: IO[Text], debug: bool = False) -> NL :
"""Parse a stream and return the syntax tree."""
return self.parse_stream_raw(stream, debug)
"""Parse a stream and return the syntax tree."""
return self.parse_stream_raw(stream, debug)
- def parse_file(self, filename, encoding=None, debug=False):
+ def parse_file(
+ self, filename: Path, encoding: Optional[Text] = None, debug: bool = False,
+ ) -> NL:
"""Parse a file and return the syntax tree."""
with io.open(filename, "r", encoding=encoding) as stream:
return self.parse_stream(stream, debug)
"""Parse a file and return the syntax tree."""
with io.open(filename, "r", encoding=encoding) as stream:
return self.parse_stream(stream, debug)
- def parse_string(self, text, debug=False) :
+ def parse_string(self, text: Text, debug: bool = False) -> NL :
"""Parse a string and return the syntax tree."""
tokens = tokenize.generate_tokens(
"""Parse a string and return the syntax tree."""
tokens = tokenize.generate_tokens(
- io.StringIO(text).readline,
- config=self.tokenizer_config,
+ io.StringIO(text).readline, grammar=self.grammar
)
return self.parse_tokens(tokens, debug)
)
return self.parse_tokens(tokens, debug)
- def _partially_consume_prefix(self, prefix, column) :
- lines = []
+ def _partially_consume_prefix(self, prefix: Text, column: int) -> Tuple[Text, Text] :
+ lines: List[str] = []
current_line = ""
current_column = 0
wait_for_nl = False
for char in prefix:
current_line += char
if wait_for_nl:
current_line = ""
current_column = 0
wait_for_nl = False
for char in prefix:
current_line += char
if wait_for_nl:
if current_line.strip() and current_column < column:
if current_line.strip() and current_column < column:
- res = '' .join(lines)
- return res, prefix[len(res):]
+ res = "" .join(lines)
+ return res, prefix[len(res) :]
lines.append(current_line)
current_line = ""
current_column = 0
wait_for_nl = False
lines.append(current_line)
current_line = ""
current_column = 0
wait_for_nl = False
# unexpected empty line
current_column = 0
else:
# indent is finished
wait_for_nl = True
# unexpected empty line
current_column = 0
else:
# indent is finished
wait_for_nl = True
- return '' .join(lines), current_line
+ return "" .join(lines), current_line
-def _generate_pickle_name(gt, cache_dir=None) :
+def _generate_pickle_name(gt: Path, cache_dir: Optional[Path] = None) -> Text :
head, tail = os.path.splitext(gt)
if tail == ".txt":
tail = ""
head, tail = os.path.splitext(gt)
if tail == ".txt":
tail = ""
-def load_grammar(gt="Grammar.txt", gp=None,
- save=True, force=False, logger=None):
+def load_grammar(
+ gt: Text = "Grammar.txt",
+ gp: Optional[Text] = None,
+ save: bool = True,
+ force: bool = False,
+ logger: Optional[Logger] = None,
+) -> Grammar:
"""Load the grammar (maybe from a pickle)."""
if logger is None:
logger = logging.getLogger(__name__)
gp = _generate_pickle_name(gt) if gp is None else gp
if force or not _newer(gp, gt):
logger.info("Generating grammar tables from %s", gt)
"""Load the grammar (maybe from a pickle)."""
if logger is None:
logger = logging.getLogger(__name__)
gp = _generate_pickle_name(gt) if gp is None else gp
if force or not _newer(gp, gt):
logger.info("Generating grammar tables from %s", gt)
- g = pgen.generate_grammar(gt)
+ g: grammar.Grammar = pgen.generate_grammar(gt)
if save:
logger.info("Writing grammar tables to %s", gp)
try:
if save:
logger.info("Writing grammar tables to %s", gp)
try:
+def _newer(a: Text, b: Text) -> bool :
"""Inquire whether file a was written since file b."""
if not os.path.exists(a):
return False
"""Inquire whether file a was written since file b."""
if not os.path.exists(a):
return False
return os.path.getmtime(a) >= os.path.getmtime(b)
return os.path.getmtime(a) >= os.path.getmtime(b)
-def load_packaged_grammar(package, grammar_source, cache_dir=None):
+def load_packaged_grammar(
+ package: str, grammar_source: Text, cache_dir: Optional[Path] = None
+) -> grammar.Grammar:
"""Normally, loads a pickled grammar by doing
pkgutil.get_data(package, pickled_grammar)
where *pickled_grammar* is computed from *grammar_source* by adding the
"""Normally, loads a pickled grammar by doing
pkgutil.get_data(package, pickled_grammar)
where *pickled_grammar* is computed from *grammar_source* by adding the
return load_grammar(grammar_source, gp=gp)
pickled_name = _generate_pickle_name(os.path.basename(grammar_source), cache_dir)
data = pkgutil.get_data(package, pickled_name)
return load_grammar(grammar_source, gp=gp)
pickled_name = _generate_pickle_name(os.path.basename(grammar_source), cache_dir)
data = pkgutil.get_data(package, pickled_name)
+ assert data is not None
g = grammar.Grammar()
g.loads(data)
return g
g = grammar.Grammar()
g.loads(data)
return g
+def main(*args: Text) -> bool :
"""Main program, when run as a script: produce grammar pickle files.
Calls load_grammar for each argument, a path to a grammar text file.
"""
if not args:
"""Main program, when run as a script: produce grammar pickle files.
Calls load_grammar for each argument, a path to a grammar text file.
"""
if not args:
- args = sys.argv[1:]
- logging.basicConfig(level=logging.INFO, stream=sys.stdout,
- format='%(message)s')
+ args = tuple(sys.argv[1:])
+ logging.basicConfig(level=logging.INFO, stream=sys.stdout, format="%(message)s")
for gt in args:
load_grammar(gt, save=True, force=True)
return True
for gt in args:
load_grammar(gt, save=True, force=True)
return True
if __name__ == "__main__":
sys.exit(int(not main()))
if __name__ == "__main__":
sys.exit(int(not main()))