is_rpar_token,
is_stub_body,
is_stub_suite,
+ is_tuple_containing_walrus,
is_vararg,
is_walrus_assignment,
is_yield,
get_string_prefix,
normalize_string_prefix,
normalize_string_quotes,
+ normalize_unicode_escape_sequences,
)
from black.trans import (
CannotTransform,
in ways that will no longer stringify to valid Python code on the tree.
"""
- def __init__(self, mode: Mode) -> None:
+ def __init__(self, mode: Mode, features: Collection[Feature]) -> None:
self.mode = mode
+ self.features = features
self.current_line: Line
self.__post_init__()
"""Default `visit_*()` implementation. Recurses to children of `node`."""
if isinstance(node, Leaf):
any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
- for comment in generate_comments(node, preview=self.mode.preview):
+ for comment in generate_comments(node):
if any_open_brackets:
# any comment within brackets is subject to splitting
self.current_line.append(comment)
`parens` holds a set of string leaf values immediately after which
invisible parens should be put.
"""
- normalize_invisible_parens(node, parens_after=parens, preview=self.mode.preview)
+ normalize_invisible_parens(
+ node, parens_after=parens, mode=self.mode, features=self.features
+ )
for child in node.children:
if is_name_token(child) and child.value in keywords:
yield from self.line()
def visit_funcdef(self, node: Node) -> Iterator[Line]:
"""Visit function definition."""
- if Preview.annotation_parens not in self.mode:
- yield from self.visit_stmt(node, keywords={"def"}, parens=set())
- else:
- yield from self.line()
+ yield from self.line()
- # Remove redundant brackets around return type annotation.
- is_return_annotation = False
- for child in node.children:
- if child.type == token.RARROW:
- is_return_annotation = True
- elif is_return_annotation:
- if child.type == syms.atom and child.children[0].type == token.LPAR:
- if maybe_make_parens_invisible_in_atom(
- child,
- parent=node,
- remove_brackets_around_comma=False,
- ):
- wrap_in_parentheses(node, child, visible=False)
- else:
+ # Remove redundant brackets around return type annotation.
+ is_return_annotation = False
+ for child in node.children:
+ if child.type == token.RARROW:
+ is_return_annotation = True
+ elif is_return_annotation:
+ if child.type == syms.atom and child.children[0].type == token.LPAR:
+ if maybe_make_parens_invisible_in_atom(
+ child,
+ parent=node,
+ remove_brackets_around_comma=False,
+ ):
wrap_in_parentheses(node, child, visible=False)
- is_return_annotation = False
+ else:
+ wrap_in_parentheses(node, child, visible=False)
+ is_return_annotation = False
- for child in node.children:
- yield from self.visit(child)
+ for child in node.children:
+ yield from self.visit(child)
def visit_match_case(self, node: Node) -> Iterator[Line]:
"""Visit either a match or case statement."""
- normalize_invisible_parens(node, parens_after=set(), preview=self.mode.preview)
+ normalize_invisible_parens(
+ node, parens_after=set(), mode=self.mode, features=self.features
+ )
yield from self.line()
for child in node.children:
):
wrap_in_parentheses(node, leaf)
- if Preview.remove_redundant_parens in self.mode:
- remove_await_parens(node)
+ remove_await_parens(node)
yield from self.visit_default(node)
yield from self.visit_default(node)
def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
+ if Preview.hex_codes_in_unicode_sequences in self.mode:
+ normalize_unicode_escape_sequences(leaf)
+
if is_docstring(leaf) and "\\\n" not in leaf.value:
# We're ignoring docstrings with backslash newline escapes because changing
# indentation of those changes the AST representation of the code.
- if Preview.normalize_docstring_quotes_and_prefixes_properly in self.mode:
- # There was a bug where --skip-string-normalization wouldn't stop us
- # from normalizing docstring prefixes. To maintain stability, we can
- # only address this buggy behaviour while the preview style is enabled.
- if self.mode.string_normalization:
- docstring = normalize_string_prefix(leaf.value)
- # visit_default() does handle string normalization for us, but
- # since this method acts differently depending on quote style (ex.
- # see padding logic below), there's a possibility for unstable
- # formatting as visit_default() is called *after*. To avoid a
- # situation where this function formats a docstring differently on
- # the second pass, normalize it early.
- docstring = normalize_string_quotes(docstring)
- else:
- docstring = leaf.value
- else:
- # ... otherwise, we'll keep the buggy behaviour >.<
+ if self.mode.string_normalization:
docstring = normalize_string_prefix(leaf.value)
+ # visit_default() does handle string normalization for us, but
+ # since this method acts differently depending on quote style (ex.
+ # see padding logic below), there's a possibility for unstable
+ # formatting as visit_default() is called *after*. To avoid a
+ # situation where this function formats a docstring differently on
+ # the second pass, normalize it early.
+ docstring = normalize_string_quotes(docstring)
+ else:
+ docstring = leaf.value
prefix = get_string_prefix(docstring)
docstring = docstring[len(prefix) :] # Remove the prefix
quote_char = docstring[0]
quote = quote_char * quote_len
# It's invalid to put closing single-character quotes on a new line.
- if Preview.long_docstring_quotes_on_newline in self.mode and quote_len == 3:
+ if self.mode and quote_len == 3:
# We need to find the length of the last line of the docstring
# to find if we can add the closing quotes to the line without
# exceeding the maximum line length.
self.visit_try_stmt = partial(
v, keywords={"try", "except", "else", "finally"}, parens=Ø
)
- if self.mode.preview:
- self.visit_except_clause = partial(
- v, keywords={"except"}, parens={"except"}
- )
- self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"})
- else:
- self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
- self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
+ self.visit_except_clause = partial(v, keywords={"except"}, parens={"except"})
+ self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"})
self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
else:
def _rhs(
- self: object, line: Line, features: Collection[Feature]
+ self: object, line: Line, features: Collection[Feature], mode: Mode
) -> Iterator[Line]:
"""Wraps calls to `right_hand_split`.
tail = auto()
-def left_hand_split(line: Line, _features: Collection[Feature] = ()) -> Iterator[Line]:
+def left_hand_split(
+ line: Line, _features: Collection[Feature], mode: Mode
+) -> Iterator[Line]:
"""Split line into many lines, starting with the first matching bracket pair.
Note: this usually looks weird, only use this for function definitions.
break
leaves_to_track: Set[LeafID] = set()
- if (
- Preview.handle_trailing_commas_in_head in original.mode
- and component is _BracketSplitComponent.head
- ):
+ if component is _BracketSplitComponent.head:
leaves_to_track = get_leaves_inside_matching_brackets(leaves)
# Populate the line
for leaf in leaves:
"""
@wraps(split_func)
- def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
- for split_line in split_func(line, features):
+ def split_wrapper(
+ line: Line, features: Collection[Feature], mode: Mode
+ ) -> Iterator[Line]:
+ for split_line in split_func(line, features, mode):
normalize_prefix(split_line.leaves[0], inside_brackets=True)
yield split_line
return split_wrapper
+def _get_last_non_comment_leaf(line: Line) -> Optional[int]:
+ for leaf_idx in range(len(line.leaves) - 1, 0, -1):
+ if line.leaves[leaf_idx].type != STANDALONE_COMMENT:
+ return leaf_idx
+ return None
+
+
+def _safe_add_trailing_comma(safe: bool, delimiter_priority: int, line: Line) -> Line:
+ if (
+ safe
+ and delimiter_priority == COMMA_PRIORITY
+ and line.leaves[-1].type != token.COMMA
+ and line.leaves[-1].type != STANDALONE_COMMENT
+ ):
+ new_comma = Leaf(token.COMMA, ",")
+ line.append(new_comma)
+ return line
+
+
@dont_increase_indentation
-def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
+def delimiter_split(
+ line: Line, features: Collection[Feature], mode: Mode
+) -> Iterator[Line]:
"""Split according to delimiters of the highest priority.
If the appropriate Features are given, the split will add trailing commas
)
current_line.append(leaf)
- for leaf in line.leaves:
+ last_non_comment_leaf = _get_last_non_comment_leaf(line)
+ for leaf_idx, leaf in enumerate(line.leaves):
yield from append_to_line(leaf)
for comment_after in line.comments_after(leaf):
trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
)
+ if (
+ Preview.add_trailing_comma_consistently in mode
+ and last_leaf.type == STANDALONE_COMMENT
+ and leaf_idx == last_non_comment_leaf
+ ):
+ current_line = _safe_add_trailing_comma(
+ trailing_comma_safe, delimiter_priority, current_line
+ )
+
leaf_priority = bt.delimiters.get(id(leaf))
if leaf_priority == delimiter_priority:
yield current_line
mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
)
if current_line:
- if (
- trailing_comma_safe
- and delimiter_priority == COMMA_PRIORITY
- and current_line.leaves[-1].type != token.COMMA
- and current_line.leaves[-1].type != STANDALONE_COMMENT
- ):
- new_comma = Leaf(token.COMMA, ",")
- current_line.append(new_comma)
+ current_line = _safe_add_trailing_comma(
+ trailing_comma_safe, delimiter_priority, current_line
+ )
yield current_line
@dont_increase_indentation
def standalone_comment_split(
- line: Line, features: Collection[Feature] = ()
+ line: Line, features: Collection[Feature], mode: Mode
) -> Iterator[Line]:
"""Split standalone comments from the rest of the line."""
if not line.contains_standalone_comments(0):
def normalize_invisible_parens(
- node: Node, parens_after: Set[str], *, preview: bool
+ node: Node, parens_after: Set[str], *, mode: Mode, features: Collection[Feature]
) -> None:
"""Make existing optional parentheses invisible or create new ones.
Standardizes on visible parentheses for single-element tuples, and keeps
existing visible parentheses for other tuples and generator expressions.
"""
- for pc in list_comments(node.prefix, is_endmarker=False, preview=preview):
+ for pc in list_comments(node.prefix, is_endmarker=False):
if pc.value in FMT_OFF:
# This `node` has a prefix with `# fmt: off`, don't mess with parens.
return
+
+ # The multiple context managers grammar has a different pattern, thus this is
+ # separate from the for-loop below. This possibly wraps them in invisible parens,
+ # and later will be removed in remove_with_parens when needed.
+ if node.type == syms.with_stmt:
+ _maybe_wrap_cms_in_parens(node, mode, features)
+
check_lpar = False
for index, child in enumerate(list(node.children)):
# Fixes a bug where invisible parens are not properly stripped from
# assignment statements that contain type annotations.
if isinstance(child, Node) and child.type == syms.annassign:
normalize_invisible_parens(
- child, parens_after=parens_after, preview=preview
+ child, parens_after=parens_after, mode=mode, features=features
)
# Add parentheses around long tuple unpacking in assignments.
if check_lpar:
if (
- preview
- and child.type == syms.atom
+ child.type == syms.atom
and node.type == syms.for_stmt
and isinstance(child.prev_sibling, Leaf)
and child.prev_sibling.type == token.NAME
remove_brackets_around_comma=True,
):
wrap_in_parentheses(node, child, visible=False)
- elif preview and isinstance(child, Node) and node.type == syms.with_stmt:
+ elif isinstance(child, Node) and node.type == syms.with_stmt:
remove_with_parens(child, node)
elif child.type == syms.atom:
if maybe_make_parens_invisible_in_atom(
elif is_one_tuple(child):
wrap_in_parentheses(node, child, visible=True)
elif node.type == syms.import_from:
- # "import from" nodes store parentheses directly as part of
- # the statement
- if is_lpar_token(child):
- assert is_rpar_token(node.children[-1])
- # make parentheses invisible
- child.value = ""
- node.children[-1].value = ""
- elif child.type != token.STAR:
- # insert invisible parentheses
- node.insert_child(index, Leaf(token.LPAR, ""))
- node.append_child(Leaf(token.RPAR, ""))
+ _normalize_import_from(node, child, index)
break
elif (
index == 1
elif not (isinstance(child, Leaf) and is_multiline_string(child)):
wrap_in_parentheses(node, child, visible=False)
- comma_check = child.type == token.COMMA if preview else False
+ comma_check = child.type == token.COMMA
check_lpar = isinstance(child, Leaf) and (
child.value in parens_after or comma_check
)
+def _normalize_import_from(parent: Node, child: LN, index: int) -> None:
+ # "import from" nodes store parentheses directly as part of
+ # the statement
+ if is_lpar_token(child):
+ assert is_rpar_token(parent.children[-1])
+ # make parentheses invisible
+ child.value = ""
+ parent.children[-1].value = ""
+ elif child.type != token.STAR:
+ # insert invisible parentheses
+ parent.insert_child(index, Leaf(token.LPAR, ""))
+ parent.append_child(Leaf(token.RPAR, ""))
+
+
def remove_await_parens(node: Node) -> None:
if node.children[0].type == token.AWAIT and len(node.children) > 1:
if (
# N.B. We've still removed any redundant nested brackets though :)
opening_bracket = cast(Leaf, node.children[1].children[0])
closing_bracket = cast(Leaf, node.children[1].children[-1])
- bracket_contents = cast(Node, node.children[1].children[1])
- if bracket_contents.type != syms.power:
- ensure_visible(opening_bracket)
- ensure_visible(closing_bracket)
- elif (
- bracket_contents.type == syms.power
- and bracket_contents.children[0].type == token.AWAIT
- ):
- ensure_visible(opening_bracket)
- ensure_visible(closing_bracket)
- # If we are in a nested await then recurse down.
- remove_await_parens(bracket_contents)
+ bracket_contents = node.children[1].children[1]
+ if isinstance(bracket_contents, Node):
+ if bracket_contents.type != syms.power:
+ ensure_visible(opening_bracket)
+ ensure_visible(closing_bracket)
+ elif (
+ bracket_contents.type == syms.power
+ and bracket_contents.children[0].type == token.AWAIT
+ ):
+ ensure_visible(opening_bracket)
+ ensure_visible(closing_bracket)
+ # If we are in a nested await then recurse down.
+ remove_await_parens(bracket_contents)
+
+
+def _maybe_wrap_cms_in_parens(
+ node: Node, mode: Mode, features: Collection[Feature]
+) -> None:
+ """When enabled and safe, wrap the multiple context managers in invisible parens.
+
+ It is only safe when `features` contain Feature.PARENTHESIZED_CONTEXT_MANAGERS.
+ """
+ if (
+ Feature.PARENTHESIZED_CONTEXT_MANAGERS not in features
+ or Preview.wrap_multiple_context_managers_in_parens not in mode
+ or len(node.children) <= 2
+ # If it's an atom, it's already wrapped in parens.
+ or node.children[1].type == syms.atom
+ ):
+ return
+ colon_index: Optional[int] = None
+ for i in range(2, len(node.children)):
+ if node.children[i].type == token.COLON:
+ colon_index = i
+ break
+ if colon_index is not None:
+ lpar = Leaf(token.LPAR, "")
+ rpar = Leaf(token.RPAR, "")
+ context_managers = node.children[1:colon_index]
+ for child in context_managers:
+ child.remove()
+ # After wrapping, the with_stmt will look like this:
+ # with_stmt
+ # NAME 'with'
+ # atom
+ # LPAR ''
+ # testlist_gexp
+ # ... <-- context_managers
+ # /testlist_gexp
+ # RPAR ''
+ # /atom
+ # COLON ':'
+ new_child = Node(
+ syms.atom, [lpar, Node(syms.testlist_gexp, context_managers), rpar]
+ )
+ node.insert_child(1, new_child)
def remove_with_parens(node: Node, parent: Node) -> None:
not remove_brackets_around_comma
and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
)
+ or is_tuple_containing_walrus(node)
):
return False
syms.return_stmt,
syms.except_clause,
syms.funcdef,
+ syms.with_stmt,
# these ones aren't useful to end users, but they do please fuzzers
syms.for_stmt,
syms.del_stmt,
+ syms.for_stmt,
]:
return False
if not line_str:
line_str = line_to_string(line)
result: List[Line] = []
- for transformed_line in transform(line, features):
+ for transformed_line in transform(line, features, mode):
if str(transformed_line).strip("\n") == line_str:
raise CannotTransform("Line transformer returned an unchanged result")