]> git.madduck.net Git - etc/vim.git/blobdiff - blib2to3/pgen2/driver.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

fix tests
[etc/vim.git] / blib2to3 / pgen2 / driver.py
index e681b526a2219ae46dff248bb991173622c7698e..6452c57a1fa4b4a9e4c35541205c4919d6e48ddc 100644 (file)
@@ -34,14 +34,12 @@ class Driver(object):
         grammar,
         convert=None,
         logger=None,
-        tokenizer_config=tokenize.TokenizerConfig(),
     ):
         self.grammar = grammar
         if logger is None:
             logger = logging.getLogger(__name__)
         self.logger = logger
         self.convert = convert
-        self.tokenizer_config = tokenizer_config
 
     def parse_tokens(self, tokens, debug=False):
         """Parse a series of tokens and return the syntax tree."""
@@ -104,7 +102,7 @@ class Driver(object):
 
     def parse_stream_raw(self, stream, debug=False):
         """Parse a stream and return the syntax tree."""
-        tokens = tokenize.generate_tokens(stream.readline, config=self.tokenizer_config)
+        tokens = tokenize.generate_tokens(stream.readline, grammar=self.grammar)
         return self.parse_tokens(tokens, debug)
 
     def parse_stream(self, stream, debug=False):
@@ -120,7 +118,7 @@ class Driver(object):
         """Parse a string and return the syntax tree."""
         tokens = tokenize.generate_tokens(
             io.StringIO(text).readline,
-            config=self.tokenizer_config,
+            grammar=self.grammar
         )
         return self.parse_tokens(tokens, debug)