]> git.madduck.net Git - etc/vim.git/blobdiff - blib2to3/pgen2/driver.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Set correct return statement for `is_type_comment` function (#929)
[etc/vim.git] / blib2to3 / pgen2 / driver.py
index a51ffc30b9fe9195c1ff936d403387e7fe8b4110..6ba6b929b4b039993cbfa69fb7999226c91312e8 100644 (file)
@@ -28,11 +28,10 @@ from . import grammar, parse, token, tokenize, pgen
 
 
 class Driver(object):
 
 
 class Driver(object):
-
     def __init__(self, grammar, convert=None, logger=None):
         self.grammar = grammar
         if logger is None:
     def __init__(self, grammar, convert=None, logger=None):
         self.grammar = grammar
         if logger is None:
-            logger = logging.getLogger()
+            logger = logging.getLogger(__name__)
         self.logger = logger
         self.convert = convert
 
         self.logger = logger
         self.convert = convert
 
@@ -68,8 +67,9 @@ class Driver(object):
             if type == token.OP:
                 type = grammar.opmap[value]
             if debug:
             if type == token.OP:
                 type = grammar.opmap[value]
             if debug:
-                self.logger.debug("%s %r (prefix=%r)",
-                                  token.tok_name[type], value, prefix)
+                self.logger.debug(
+                    "%s %r (prefix=%r)", token.tok_name[type], value, prefix
+                )
             if type == token.INDENT:
                 indent_columns.append(len(value))
                 _prefix = prefix + value
             if type == token.INDENT:
                 indent_columns.append(len(value))
                 _prefix = prefix + value
@@ -91,13 +91,12 @@ class Driver(object):
                 column = 0
         else:
             # We never broke out -- EOF is too soon (how can this happen???)
                 column = 0
         else:
             # We never broke out -- EOF is too soon (how can this happen???)
-            raise parse.ParseError("incomplete input",
-                                   type, value, (prefix, start))
+            raise parse.ParseError("incomplete input", type, value, (prefix, start))
         return p.rootnode
 
     def parse_stream_raw(self, stream, debug=False):
         """Parse a stream and return the syntax tree."""
         return p.rootnode
 
     def parse_stream_raw(self, stream, debug=False):
         """Parse a stream and return the syntax tree."""
-        tokens = tokenize.generate_tokens(stream.readline)
+        tokens = tokenize.generate_tokens(stream.readline, grammar=self.grammar)
         return self.parse_tokens(tokens, debug)
 
     def parse_stream(self, stream, debug=False):
         return self.parse_tokens(tokens, debug)
 
     def parse_stream(self, stream, debug=False):
@@ -111,7 +110,9 @@ class Driver(object):
 
     def parse_string(self, text, debug=False):
         """Parse a string and return the syntax tree."""
 
     def parse_string(self, text, debug=False):
         """Parse a string and return the syntax tree."""
-        tokens = tokenize.generate_tokens(io.StringIO(text).readline)
+        tokens = tokenize.generate_tokens(
+            io.StringIO(text).readline, grammar=self.grammar
+        )
         return self.parse_tokens(tokens, debug)
 
     def _partially_consume_prefix(self, prefix, column):
         return self.parse_tokens(tokens, debug)
 
     def _partially_consume_prefix(self, prefix, column):
@@ -122,26 +123,24 @@ class Driver(object):
         for char in prefix:
             current_line += char
             if wait_for_nl:
         for char in prefix:
             current_line += char
             if wait_for_nl:
-                if char == '\n':
+                if char == "\n":
                     if current_line.strip() and current_column < column:
                     if current_line.strip() and current_column < column:
-                        res = ''.join(lines)
-                        return res, prefix[len(res):]
+                        res = "".join(lines)
+                        return res, prefix[len(res) :]
 
                     lines.append(current_line)
                     current_line = ""
                     current_column = 0
                     wait_for_nl = False
 
                     lines.append(current_line)
                     current_line = ""
                     current_column = 0
                     wait_for_nl = False
-            elif char == ' ':
+            elif char in " \t":
                 current_column += 1
                 current_column += 1
-            elif char == '\t':
-                current_column += 4
-            elif char == '\n':
-                # enexpected empty line
+            elif char == "\n":
+                # unexpected empty line
                 current_column = 0
             else:
                 # indent is finished
                 wait_for_nl = True
                 current_column = 0
             else:
                 # indent is finished
                 wait_for_nl = True
-        return ''.join(lines), current_line
+        return "".join(lines), current_line
 
 
 def _generate_pickle_name(gt, cache_dir=None):
 
 
 def _generate_pickle_name(gt, cache_dir=None):
@@ -155,11 +154,10 @@ def _generate_pickle_name(gt, cache_dir=None):
         return name
 
 
         return name
 
 
-def load_grammar(gt="Grammar.txt", gp=None,
-                 save=True, force=False, logger=None):
+def load_grammar(gt="Grammar.txt", gp=None, save=True, force=False, logger=None):
     """Load the grammar (maybe from a pickle)."""
     if logger is None:
     """Load the grammar (maybe from a pickle)."""
     if logger is None:
-        logger = logging.getLogger()
+        logger = logging.getLogger(__name__)
     gp = _generate_pickle_name(gt) if gp is None else gp
     if force or not _newer(gp, gt):
         logger.info("Generating grammar tables from %s", gt)
     gp = _generate_pickle_name(gt) if gp is None else gp
     if force or not _newer(gp, gt):
         logger.info("Generating grammar tables from %s", gt)
@@ -213,11 +211,11 @@ def main(*args):
     """
     if not args:
         args = sys.argv[1:]
     """
     if not args:
         args = sys.argv[1:]
-    logging.basicConfig(level=logging.INFO, stream=sys.stdout,
-                        format='%(message)s')
+    logging.basicConfig(level=logging.INFO, stream=sys.stdout, format="%(message)s")
     for gt in args:
         load_grammar(gt, save=True, force=True)
     return True
 
     for gt in args:
         load_grammar(gt, save=True, force=True)
     return True
 
+
 if __name__ == "__main__":
     sys.exit(int(not main()))
 if __name__ == "__main__":
     sys.exit(int(not main()))