"""
import sys
-from typing import (
- Collection,
- Generic,
- Iterator,
- List,
- Optional,
- Set,
- Tuple,
- TypeVar,
- Union,
-)
+from typing import Generic, Iterator, List, Optional, Set, Tuple, TypeVar, Union
if sys.version_info >= (3, 8):
from typing import Final
from mypy_extensions import mypyc_attr
-# lib2to3 fork
-from blib2to3.pytree import Node, Leaf, type_repr, NL
-from blib2to3 import pygram
-from blib2to3.pgen2 import token
-
from black.cache import CACHE_DIR
from black.strings import has_triple_quotes
-
+from blib2to3 import pygram
+from blib2to3.pgen2 import token
+from blib2to3.pytree import NL, Leaf, Node, type_repr
pygram.initialize(CACHE_DIR)
syms: Final = pygram.python_symbols
syms.term,
syms.power,
}
+TYPED_NAMES: Final = {syms.tname, syms.tname_star}
ASSIGNMENTS: Final = {
"=",
"+=",
# that, too.
return prevp.prefix
+ elif (
+ prevp.type == token.STAR
+ and parent_type(prevp) == syms.star_expr
+ and parent_type(prevp.parent) == syms.subscriptlist
+ ):
+ # No space between typevar tuples.
+ return NO
+
elif prevp.type in VARARGS_SPECIALS:
if is_vararg(prevp, within=VARARGS_PARENTS | UNPACKING_PARENTS):
return NO
return NO
if t == token.EQUAL:
- if prev.type != syms.tname:
+ if prev.type not in TYPED_NAMES:
return NO
elif prev.type == token.EQUAL:
elif prev.type != token.COMMA:
return NO
- elif p.type == syms.tname:
+ elif p.type in TYPED_NAMES:
# type names
if not prev:
prevp = preceding_leaf(p)
elif p.type == syms.sliceop:
return NO
+ elif p.type == syms.except_clause:
+ if t == token.STAR:
+ return NO
+
return SPACE
return prev_siblings_are(node.prev_sibling, tokens[:-1])
-def last_two_except(leaves: List[Leaf], omit: Collection[LeafID]) -> Tuple[Leaf, Leaf]:
- """Return (penultimate, last) leaves skipping brackets in `omit` and contents."""
- stop_after: Optional[Leaf] = None
- last: Optional[Leaf] = None
- for leaf in reversed(leaves):
- if stop_after:
- if leaf is stop_after:
- stop_after = None
- continue
-
- if last:
- return leaf, last
-
- if id(leaf) in omit:
- stop_after = leaf.opening_bracket
- else:
- last = leaf
- else:
- raise LookupError("Last two leaves were also skipped")
-
-
def parent_type(node: Optional[LN]) -> Optional[NodeType]:
"""
Returns:
return None
-def first_child_is_arith(node: Node) -> bool:
- """Whether first child is an arithmetic or a binary arithmetic expression"""
- expr_types = {
+def is_arith_like(node: LN) -> bool:
+ """Whether node is an arithmetic or a binary arithmetic expression"""
+ return node.type in {
syms.arith_expr,
syms.shift_expr,
syms.xor_expr,
syms.and_expr,
}
- return bool(node.children and node.children[0].type in expr_types)
def is_docstring(leaf: Leaf) -> bool:
)
-def is_one_tuple_between(opening: Leaf, closing: Leaf, leaves: List[Leaf]) -> bool:
- """Return True if content between `opening` and `closing` looks like a one-tuple."""
- if opening.type != token.LPAR and closing.type != token.RPAR:
+def is_one_sequence_between(
+ opening: Leaf,
+ closing: Leaf,
+ leaves: List[Leaf],
+ brackets: Tuple[int, int] = (token.LPAR, token.RPAR),
+) -> bool:
+ """Return True if content between `opening` and `closing` is a one-sequence."""
+ if (opening.type, closing.type) != brackets:
return False
depth = closing.bracket_depth + 1
def is_string_token(nl: NL) -> TypeGuard[Leaf]:
return nl.type == token.STRING
+
+
+def is_number_token(nl: NL) -> TypeGuard[Leaf]:
+ return nl.type == token.NUMBER