madduck's git repository
Every one of the projects in this repository is available at the canonical
URL git://git.madduck.net/madduck/pub/<projectpath> — see
each project's metadata for the exact URL.
All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
SSH access, as well as push access can be individually
arranged.
If you use my repositories frequently, consider adding the following
snippet to ~/.gitconfig and using the third clone URL listed for each
project:
[url "git://git.madduck.net/madduck/"]
insteadOf = madduck:
# Python imports
import io
# Python imports
import io
import pkgutil
import sys
import pkgutil
import sys
-from typing import (
- Any,
- cast,
- IO,
- Iterable,
- List,
- Optional,
- Text,
- Iterator,
- Tuple,
- TypeVar,
- Generic,
- Union,
-)
from contextlib import contextmanager
from dataclasses import dataclass, field
from contextlib import contextmanager
from dataclasses import dataclass, field
-
-# Pgen imports
-from . import grammar, parse, token, tokenize, pgen
from logging import Logger
from logging import Logger
-from blib2to3.pytree import NL
+from typing import IO, Any, Iterable, Iterator, List, Optional, Tuple, Union, cast
+
from blib2to3.pgen2.grammar import Grammar
from blib2to3.pgen2.tokenize import GoodTokenInfo
from blib2to3.pgen2.grammar import Grammar
from blib2to3.pgen2.tokenize import GoodTokenInfo
+from blib2to3.pytree import NL
+
+# Pgen imports
+from . import grammar, parse, pgen, token, tokenize
Path = Union[str, "os.PathLike[str]"]
Path = Union[str, "os.PathLike[str]"]
def can_advance(self, to: int) -> bool:
# Try to eat, fail if it can't. The eat operation is cached
def can_advance(self, to: int) -> bool:
# Try to eat, fail if it can't. The eat operation is cached
- # so there wont be any additional cost of eating here
+ # so there won't be any additional cost of eating here
try:
self.eat(to)
except StopIteration:
try:
self.eat(to)
except StopIteration:
def __init__(self, grammar: Grammar, logger: Optional[Logger] = None) -> None:
self.grammar = grammar
if logger is None:
def __init__(self, grammar: Grammar, logger: Optional[Logger] = None) -> None:
self.grammar = grammar
if logger is None:
assert p.rootnode is not None
return p.rootnode
assert p.rootnode is not None
return p.rootnode
- def parse_stream_raw(self, stream: IO[Text], debug: bool = False) -> NL:
+ def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> NL:
"""Parse a stream and return the syntax tree."""
tokens = tokenize.generate_tokens(stream.readline, grammar=self.grammar)
return self.parse_tokens(tokens, debug)
"""Parse a stream and return the syntax tree."""
tokens = tokenize.generate_tokens(stream.readline, grammar=self.grammar)
return self.parse_tokens(tokens, debug)
- def parse_stream(self, stream: IO[Text], debug: bool = False) -> NL:
+ def parse_stream(self, stream: IO[str], debug: bool = False) -> NL:
"""Parse a stream and return the syntax tree."""
return self.parse_stream_raw(stream, debug)
def parse_file(
"""Parse a stream and return the syntax tree."""
return self.parse_stream_raw(stream, debug)
def parse_file(
- self, filename: Path, encoding: Optional[Text] = None, debug: bool = False
+ self, filename: Path, encoding: Optional[str] = None, debug: bool = False
) -> NL:
"""Parse a file and return the syntax tree."""
) -> NL:
"""Parse a file and return the syntax tree."""
- with io.open(filename, "r", encoding=encoding) as stream:
+ with open(filename, encoding=encoding) as stream:
return self.parse_stream(stream, debug)
return self.parse_stream(stream, debug)
- def parse_string(self, text: Text, debug: bool = False) -> NL:
+ def parse_string(self, text: str, debug: bool = False) -> NL:
"""Parse a string and return the syntax tree."""
tokens = tokenize.generate_tokens(
io.StringIO(text).readline, grammar=self.grammar
)
return self.parse_tokens(tokens, debug)
"""Parse a string and return the syntax tree."""
tokens = tokenize.generate_tokens(
io.StringIO(text).readline, grammar=self.grammar
)
return self.parse_tokens(tokens, debug)
- def _partially_consume_prefix(self, prefix: Text, column: int) -> Tuple[Text, Text]:
+ def _partially_consume_prefix(self, prefix: str, column: int) -> Tuple[str, str]:
lines: List[str] = []
current_line = ""
current_column = 0
lines: List[str] = []
current_line = ""
current_column = 0
return "".join(lines), current_line
return "".join(lines), current_line
-def _generate_pickle_name(gt: Path, cache_dir: Optional[Path] = None) -> Text:
+def _generate_pickle_name(gt: Path, cache_dir: Optional[Path] = None) -> str:
head, tail = os.path.splitext(gt)
if tail == ".txt":
tail = ""
head, tail = os.path.splitext(gt)
if tail == ".txt":
tail = ""
- gt: Text = "Grammar.txt",
- gp: Optional[Text] = None,
+ gt: str = "Grammar.txt",
+ gp: Optional[str] = None,
save: bool = True,
force: bool = False,
logger: Optional[Logger] = None,
save: bool = True,
force: bool = False,
logger: Optional[Logger] = None,
-def _newer(a: Text, b: Text) -> bool:
+def _newer(a: str, b: str) -> bool:
"""Inquire whether file a was written since file b."""
if not os.path.exists(a):
return False
"""Inquire whether file a was written since file b."""
if not os.path.exists(a):
return False
def load_packaged_grammar(
def load_packaged_grammar(
- package: str, grammar_source: Text, cache_dir: Optional[Path] = None
+ package: str, grammar_source: str, cache_dir: Optional[Path] = None
) -> grammar.Grammar:
"""Normally, loads a pickled grammar by doing
pkgutil.get_data(package, pickled_grammar)
) -> grammar.Grammar:
"""Normally, loads a pickled grammar by doing
pkgutil.get_data(package, pickled_grammar)
-def main(*args: Text) -> bool:
+def main(*args: str) -> bool:
"""Main program, when run as a script: produce grammar pickle files.
Calls load_grammar for each argument, a path to a grammar text file.
"""Main program, when run as a script: produce grammar pickle files.
Calls load_grammar for each argument, a path to a grammar text file.