if debug:
self.logger.debug("%s %r (prefix=%r)",
token.tok_name[type], value, prefix)
- if type in {token.INDENT, token.DEDENT}:
- _prefix = prefix
+ if type == token.INDENT:
+ indent_columns.append(len(value))
+ _prefix = prefix + value
prefix = ""
- if type == token.DEDENT:
+ value = ""
+ elif type == token.DEDENT:
_indent_col = indent_columns.pop()
- prefix, _prefix = self._partially_consume_prefix(_prefix, _indent_col)
+ prefix, _prefix = self._partially_consume_prefix(prefix, _indent_col)
if p.addtoken(type, value, (prefix, start)):
if debug:
self.logger.debug("Stop.")
break
prefix = ""
- if type == token.INDENT:
- indent_columns.append(len(value))
- if _prefix.startswith(value):
- # Don't double-indent. Since we're delaying the prefix that
- # would normally belong to INDENT, we need to put the value
- # at the end versus at the beginning.
- _prefix = _prefix[len(value):] + value
if type in {token.INDENT, token.DEDENT}:
prefix = _prefix
lineno, column = end
elif char == '\t':
current_column += 4
elif char == '\n':
- # enexpected empty line
+ # unexpected empty line
current_column = 0
else:
# indent is finished
return ''.join(lines), current_line
-def _generate_pickle_name(gt):
+def _generate_pickle_name(gt, cache_dir=None):
head, tail = os.path.splitext(gt)
if tail == ".txt":
tail = ""
- return head + tail + ".".join(map(str, sys.version_info)) + ".pickle"
+ name = head + tail + ".".join(map(str, sys.version_info)) + ".pickle"
+ if cache_dir:
+ return os.path.join(cache_dir, os.path.basename(name))
+ else:
+ return name
def load_grammar(gt="Grammar.txt", gp=None,
return os.path.getmtime(a) >= os.path.getmtime(b)
-def load_packaged_grammar(package, grammar_source):
+def load_packaged_grammar(package, grammar_source, cache_dir=None):
"""Normally, loads a pickled grammar by doing
pkgutil.get_data(package, pickled_grammar)
where *pickled_grammar* is computed from *grammar_source* by adding the
"""
if os.path.isfile(grammar_source):
- return load_grammar(grammar_source)
- pickled_name = _generate_pickle_name(os.path.basename(grammar_source))
+ gp = _generate_pickle_name(grammar_source, cache_dir) if cache_dir else None
+ return load_grammar(grammar_source, gp=gp)
+ pickled_name = _generate_pickle_name(os.path.basename(grammar_source), cache_dir)
data = pkgutil.get_data(package, pickled_name)
g = grammar.Grammar()
g.loads(data)