]>
git.madduck.net Git - etc/vim.git/blobdiff - blib2to3/pgen2/driver.py
madduck's git repository
Every one of the projects in this repository is available at the canonical
URL git://git.madduck.net/madduck/pub/<projectpath> — see
each project's metadata for the exact URL.
All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@ git. madduck. net .
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
SSH access, as well as push access can be individually
arranged .
If you use my repositories frequently, consider adding the following
snippet to ~/.gitconfig and using the third clone URL listed for each
project:
[url "git://git.madduck.net/madduck/"]
insteadOf = madduck:
- def __init__(self, grammar, convert=None, logger=None):
+ def __init__(
+ self,
+ grammar,
+ convert=None,
+ logger=None,
+ tokenizer_config=tokenize.TokenizerConfig(),
+ ):
self.grammar = grammar
if logger is None:
self.grammar = grammar
if logger is None:
- logger = logging.getLogger()
+ logger = logging.getLogger(__name__ )
self.logger = logger
self.convert = convert
self.logger = logger
self.convert = convert
+ self.tokenizer_config = tokenizer_config
def parse_tokens(self, tokens, debug=False):
"""Parse a series of tokens and return the syntax tree."""
def parse_tokens(self, tokens, debug=False):
"""Parse a series of tokens and return the syntax tree."""
def parse_stream_raw(self, stream, debug=False):
"""Parse a stream and return the syntax tree."""
def parse_stream_raw(self, stream, debug=False):
"""Parse a stream and return the syntax tree."""
- tokens = tokenize.generate_tokens(stream.readline)
+ tokens = tokenize.generate_tokens(stream.readline, config=self.tokenizer_config )
return self.parse_tokens(tokens, debug)
def parse_stream(self, stream, debug=False):
return self.parse_tokens(tokens, debug)
def parse_stream(self, stream, debug=False):
def parse_string(self, text, debug=False):
"""Parse a string and return the syntax tree."""
def parse_string(self, text, debug=False):
"""Parse a string and return the syntax tree."""
- tokens = tokenize.generate_tokens(io.StringIO(text).readline)
+ tokens = tokenize.generate_tokens(
+ io.StringIO(text).readline,
+ config=self.tokenizer_config,
+ )
return self.parse_tokens(tokens, debug)
def _partially_consume_prefix(self, prefix, column):
return self.parse_tokens(tokens, debug)
def _partially_consume_prefix(self, prefix, column):
current_line = ""
current_column = 0
wait_for_nl = False
current_line = ""
current_column = 0
wait_for_nl = False
- elif char == '\t':
- current_column += 4
elif char == '\n':
# unexpected empty line
current_column = 0
elif char == '\n':
# unexpected empty line
current_column = 0
save=True, force=False, logger=None):
"""Load the grammar (maybe from a pickle)."""
if logger is None:
save=True, force=False, logger=None):
"""Load the grammar (maybe from a pickle)."""
if logger is None:
- logger = logging.getLogger()
+ logger = logging.getLogger(__name__ )
gp = _generate_pickle_name(gt) if gp is None else gp
if force or not _newer(gp, gt):
logger.info("Generating grammar tables from %s", gt)
gp = _generate_pickle_name(gt) if gp is None else gp
if force or not _newer(gp, gt):
logger.info("Generating grammar tables from %s", gt)