madduck's git repository
Every one of the projects in this repository is available at the canonical
URL git://git.madduck.net/madduck/pub/<projectpath> — see
each project's metadata for the exact URL.
All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
SSH access, as well as push access can be individually
arranged.
If you use my repositories frequently, consider adding the following
snippet to ~/.gitconfig and using the third clone URL listed for each
project:
[url "git://git.madduck.net/madduck/"]
insteadOf = madduck:
grammar,
convert=None,
logger=None,
grammar,
convert=None,
logger=None,
- tokenizer_config=tokenize.TokenizerConfig(),
):
self.grammar = grammar
if logger is None:
logger = logging.getLogger(__name__)
self.logger = logger
self.convert = convert
):
self.grammar = grammar
if logger is None:
logger = logging.getLogger(__name__)
self.logger = logger
self.convert = convert
- self.tokenizer_config = tokenizer_config
def parse_tokens(self, tokens, debug=False):
"""Parse a series of tokens and return the syntax tree."""
def parse_tokens(self, tokens, debug=False):
"""Parse a series of tokens and return the syntax tree."""
def parse_stream_raw(self, stream, debug=False):
"""Parse a stream and return the syntax tree."""
def parse_stream_raw(self, stream, debug=False):
"""Parse a stream and return the syntax tree."""
- tokens = tokenize.generate_tokens(stream.readline, config=self.tokenizer_config)
+ tokens = tokenize.generate_tokens(stream.readline, grammar=self.grammar)
return self.parse_tokens(tokens, debug)
def parse_stream(self, stream, debug=False):
return self.parse_tokens(tokens, debug)
def parse_stream(self, stream, debug=False):
"""Parse a string and return the syntax tree."""
tokens = tokenize.generate_tokens(
io.StringIO(text).readline,
"""Parse a string and return the syntax tree."""
tokens = tokenize.generate_tokens(
io.StringIO(text).readline,
- config=self.tokenizer_config,
)
return self.parse_tokens(tokens, debug)
)
return self.parse_tokens(tokens, debug)