All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
2 Generating lines of code.
4 from functools import partial, wraps
6 from typing import Collection, Iterator, List, Optional, Set, Union
8 from black.nodes import WHITESPACE, RARROW, STATEMENT, STANDALONE_COMMENT
9 from black.nodes import ASSIGNMENTS, OPENING_BRACKETS, CLOSING_BRACKETS
10 from black.nodes import Visitor, syms, first_child_is_arith, ensure_visible
11 from black.nodes import is_docstring, is_empty_tuple, is_one_tuple, is_one_tuple_between
12 from black.nodes import is_name_token, is_lpar_token, is_rpar_token
13 from black.nodes import is_walrus_assignment, is_yield, is_vararg, is_multiline_string
14 from black.nodes import is_stub_suite, is_stub_body, is_atom_with_invisible_parens
15 from black.nodes import wrap_in_parentheses
16 from black.brackets import max_delimiter_priority_in_atom
17 from black.brackets import DOT_PRIORITY, COMMA_PRIORITY
18 from black.lines import Line, line_to_string, is_line_short_enough
19 from black.lines import can_omit_invisible_parens, can_be_split, append_leaves
20 from black.comments import generate_comments, list_comments, FMT_OFF
21 from black.numerics import normalize_numeric_literal
22 from black.strings import get_string_prefix, fix_docstring
23 from black.strings import normalize_string_prefix, normalize_string_quotes
24 from black.trans import Transformer, CannotTransform, StringMerger
25 from black.trans import StringSplitter, StringParenWrapper, StringParenStripper
26 from black.mode import Mode
27 from black.mode import Feature
29 from blib2to3.pytree import Node, Leaf
30 from blib2to3.pgen2 import token
35 LN = Union[Leaf, Node]
38 class CannotSplit(CannotTransform):
39 """A readable split that fits the allotted line length is impossible."""
42 # This isn't a dataclass because @dataclass + Generic breaks mypyc.
43 # See also https://github.com/mypyc/mypyc/issues/827.
44 class LineGenerator(Visitor[Line]):
45 """Generates reformatted Line objects. Empty lines are not emitted.
47 Note: destroys the tree it's visiting by mutating prefixes of its leaves
48 in ways that will no longer stringify to valid Python code on the tree.
51 def __init__(self, mode: Mode) -> None:
53 self.current_line: Line
56 def line(self, indent: int = 0) -> Iterator[Line]:
59 If the line is empty, only emit if it makes sense.
60 If the line is too long, split it first and then generate.
62 If any lines were generated, set up a new current_line.
64 if not self.current_line:
65 self.current_line.depth += indent
66 return # Line is empty, don't emit. Creating a new one unnecessary.
68 complete_line = self.current_line
69 self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
72 def visit_default(self, node: LN) -> Iterator[Line]:
73 """Default `visit_*()` implementation. Recurses to children of `node`."""
74 if isinstance(node, Leaf):
75 any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
76 for comment in generate_comments(node):
78 # any comment within brackets is subject to splitting
79 self.current_line.append(comment)
80 elif comment.type == token.COMMENT:
81 # regular trailing comment
82 self.current_line.append(comment)
83 yield from self.line()
86 # regular standalone comment
87 yield from self.line()
89 self.current_line.append(comment)
90 yield from self.line()
92 normalize_prefix(node, inside_brackets=any_open_brackets)
93 if self.mode.string_normalization and node.type == token.STRING:
94 node.value = normalize_string_prefix(node.value)
95 node.value = normalize_string_quotes(node.value)
96 if node.type == token.NUMBER:
97 normalize_numeric_literal(node)
98 if node.type not in WHITESPACE:
99 self.current_line.append(node)
100 yield from super().visit_default(node)
102 def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
103 """Increase indentation level, maybe yield a line."""
104 # In blib2to3 INDENT never holds comments.
105 yield from self.line(+1)
106 yield from self.visit_default(node)
108 def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
109 """Decrease indentation level, maybe yield a line."""
110 # The current line might still wait for trailing comments. At DEDENT time
111 # there won't be any (they would be prefixes on the preceding NEWLINE).
112 # Emit the line then.
113 yield from self.line()
115 # While DEDENT has no value, its prefix may contain standalone comments
116 # that belong to the current indentation level. Get 'em.
117 yield from self.visit_default(node)
119 # Finally, emit the dedent.
120 yield from self.line(-1)
123 self, node: Node, keywords: Set[str], parens: Set[str]
125 """Visit a statement.
127 This implementation is shared for `if`, `while`, `for`, `try`, `except`,
128 `def`, `with`, `class`, `assert`, and assignments.
130 The relevant Python language `keywords` for a given statement will be
131 NAME leaves within it. This methods puts those on a separate line.
133 `parens` holds a set of string leaf values immediately after which
134 invisible parens should be put.
136 normalize_invisible_parens(node, parens_after=parens)
137 for child in node.children:
138 if is_name_token(child) and child.value in keywords:
139 yield from self.line()
141 yield from self.visit(child)
143 def visit_match_case(self, node: Node) -> Iterator[Line]:
144 """Visit either a match or case statement."""
145 normalize_invisible_parens(node, parens_after=set())
147 yield from self.line()
148 for child in node.children:
149 yield from self.visit(child)
151 def visit_suite(self, node: Node) -> Iterator[Line]:
153 if self.mode.is_pyi and is_stub_suite(node):
154 yield from self.visit(node.children[2])
156 yield from self.visit_default(node)
158 def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
159 """Visit a statement without nested statements."""
160 if first_child_is_arith(node):
161 wrap_in_parentheses(node, node.children[0], visible=False)
162 is_suite_like = node.parent and node.parent.type in STATEMENT
164 if self.mode.is_pyi and is_stub_body(node):
165 yield from self.visit_default(node)
167 yield from self.line(+1)
168 yield from self.visit_default(node)
169 yield from self.line(-1)
175 or not is_stub_suite(node.parent)
177 yield from self.line()
178 yield from self.visit_default(node)
180 def visit_async_stmt(self, node: Node) -> Iterator[Line]:
181 """Visit `async def`, `async for`, `async with`."""
182 yield from self.line()
184 children = iter(node.children)
185 for child in children:
186 yield from self.visit(child)
188 if child.type == token.ASYNC:
191 internal_stmt = next(children)
192 for child in internal_stmt.children:
193 yield from self.visit(child)
195 def visit_decorators(self, node: Node) -> Iterator[Line]:
196 """Visit decorators."""
197 for child in node.children:
198 yield from self.line()
199 yield from self.visit(child)
201 def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
202 """Remove a semicolon and put the other statement on a separate line."""
203 yield from self.line()
205 def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
206 """End of file. Process outstanding comments and end with a newline."""
207 yield from self.visit_default(leaf)
208 yield from self.line()
210 def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
211 if not self.current_line.bracket_tracker.any_open_brackets():
212 yield from self.line()
213 yield from self.visit_default(leaf)
215 def visit_factor(self, node: Node) -> Iterator[Line]:
216 """Force parentheses between a unary op and a binary power:
220 _operator, operand = node.children
222 operand.type == syms.power
223 and len(operand.children) == 3
224 and operand.children[1].type == token.DOUBLESTAR
226 lpar = Leaf(token.LPAR, "(")
227 rpar = Leaf(token.RPAR, ")")
228 index = operand.remove() or 0
229 node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
230 yield from self.visit_default(node)
232 def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
233 if is_docstring(leaf) and "\\\n" not in leaf.value:
234 # We're ignoring docstrings with backslash newline escapes because changing
235 # indentation of those changes the AST representation of the code.
236 docstring = normalize_string_prefix(leaf.value)
237 prefix = get_string_prefix(docstring)
238 docstring = docstring[len(prefix) :] # Remove the prefix
239 quote_char = docstring[0]
240 # A natural way to remove the outer quotes is to do:
241 # docstring = docstring.strip(quote_char)
242 # but that breaks on """""x""" (which is '""x').
243 # So we actually need to remove the first character and the next two
244 # characters but only if they are the same as the first.
245 quote_len = 1 if docstring[1] != quote_char else 3
246 docstring = docstring[quote_len:-quote_len]
247 docstring_started_empty = not docstring
249 if is_multiline_string(leaf):
250 indent = " " * 4 * self.current_line.depth
251 docstring = fix_docstring(docstring, indent)
253 docstring = docstring.strip()
256 # Add some padding if the docstring starts / ends with a quote mark.
257 if docstring[0] == quote_char:
258 docstring = " " + docstring
259 if docstring[-1] == quote_char:
261 if docstring[-1] == "\\":
262 backslash_count = len(docstring) - len(docstring.rstrip("\\"))
263 if backslash_count % 2:
264 # Odd number of tailing backslashes, add some padding to
265 # avoid escaping the closing string quote.
267 elif not docstring_started_empty:
270 # We could enforce triple quotes at this point.
271 quote = quote_char * quote_len
272 leaf.value = prefix + quote + docstring + quote
274 yield from self.visit_default(leaf)
276 def __post_init__(self) -> None:
277 """You are in a twisty little maze of passages."""
278 self.current_line = Line(mode=self.mode)
282 self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
283 self.visit_if_stmt = partial(
284 v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
286 self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
287 self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
288 self.visit_try_stmt = partial(
289 v, keywords={"try", "except", "else", "finally"}, parens=Ø
291 self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
292 self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
293 self.visit_funcdef = partial(v, keywords={"def"}, parens=Ø)
294 self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
295 self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
296 self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
297 self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
298 self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
299 self.visit_async_funcdef = self.visit_async_stmt
300 self.visit_decorated = self.visit_decorators
303 self.visit_match_stmt = self.visit_match_case
304 self.visit_case_block = self.visit_match_case
308 line: Line, mode: Mode, features: Collection[Feature] = ()
310 """Transform a `line`, potentially splitting it into many lines.
312 They should fit in the allotted `line_length` but might not be able to.
314 `features` are syntactical features that may be used in the output.
320 line_str = line_to_string(line)
322 ll = mode.line_length
323 sn = mode.string_normalization
324 string_merge = StringMerger(ll, sn)
325 string_paren_strip = StringParenStripper(ll, sn)
326 string_split = StringSplitter(ll, sn)
327 string_paren_wrap = StringParenWrapper(ll, sn)
329 transformers: List[Transformer]
331 not line.contains_uncollapsable_type_comments()
332 and not line.should_split_rhs
333 and not line.magic_trailing_comma
335 is_line_short_enough(line, line_length=mode.line_length, line_str=line_str)
336 or line.contains_unsplittable_type_ignore()
338 and not (line.inside_brackets and line.contains_standalone_comments())
340 # Only apply basic string preprocessing, since lines shouldn't be split here.
341 if mode.experimental_string_processing:
342 transformers = [string_merge, string_paren_strip]
346 transformers = [left_hand_split]
350 self: object, line: Line, features: Collection[Feature]
352 """Wraps calls to `right_hand_split`.
354 The calls increasingly `omit` right-hand trailers (bracket pairs with
355 content), meaning the trailers get glued together to split on another
356 bracket pair instead.
358 for omit in generate_trailers_to_omit(line, mode.line_length):
360 right_hand_split(line, mode.line_length, features, omit=omit)
362 # Note: this check is only able to figure out if the first line of the
363 # *current* transformation fits in the line length. This is true only
364 # for simple cases. All others require running more transforms via
365 # `transform_line()`. This check doesn't know if those would succeed.
366 if is_line_short_enough(lines[0], line_length=mode.line_length):
370 # All splits failed, best effort split with no omits.
371 # This mostly happens to multiline strings that are by definition
372 # reported as not fitting a single line, as well as lines that contain
373 # trailing commas (those have to be exploded).
374 yield from right_hand_split(
375 line, line_length=mode.line_length, features=features
378 # HACK: nested functions (like _rhs) compiled by mypyc don't retain their
379 # __name__ attribute which is needed in `run_transformer` further down.
380 # Unfortunately a nested class breaks mypyc too. So a class must be created
381 # via type ... https://github.com/mypyc/mypyc/issues/884
382 rhs = type("rhs", (), {"__call__": _rhs})()
384 if mode.experimental_string_processing:
385 if line.inside_brackets:
391 standalone_comment_split,
404 if line.inside_brackets:
405 transformers = [delimiter_split, standalone_comment_split, rhs]
409 for transform in transformers:
410 # We are accumulating lines in `result` because we might want to abort
411 # mission and return the original line in the end, or attempt a different
414 result = run_transformer(line, transform, mode, features, line_str=line_str)
415 except CannotTransform:
425 def left_hand_split(line: Line, _features: Collection[Feature] = ()) -> Iterator[Line]:
426 """Split line into many lines, starting with the first matching bracket pair.
428 Note: this usually looks weird, only use this for function definitions.
429 Prefer RHS otherwise. This is why this function is not symmetrical with
430 :func:`right_hand_split` which also handles optional parentheses.
432 tail_leaves: List[Leaf] = []
433 body_leaves: List[Leaf] = []
434 head_leaves: List[Leaf] = []
435 current_leaves = head_leaves
436 matching_bracket: Optional[Leaf] = None
437 for leaf in line.leaves:
439 current_leaves is body_leaves
440 and leaf.type in CLOSING_BRACKETS
441 and leaf.opening_bracket is matching_bracket
443 current_leaves = tail_leaves if body_leaves else head_leaves
444 current_leaves.append(leaf)
445 if current_leaves is head_leaves:
446 if leaf.type in OPENING_BRACKETS:
447 matching_bracket = leaf
448 current_leaves = body_leaves
449 if not matching_bracket:
450 raise CannotSplit("No brackets found")
452 head = bracket_split_build_line(head_leaves, line, matching_bracket)
453 body = bracket_split_build_line(body_leaves, line, matching_bracket, is_body=True)
454 tail = bracket_split_build_line(tail_leaves, line, matching_bracket)
455 bracket_split_succeeded_or_raise(head, body, tail)
456 for result in (head, body, tail):
461 def right_hand_split(
464 features: Collection[Feature] = (),
465 omit: Collection[LeafID] = (),
467 """Split line into many lines, starting with the last matching bracket pair.
469 If the split was by optional parentheses, attempt splitting without them, too.
470 `omit` is a collection of closing bracket IDs that shouldn't be considered for
473 Note: running this function modifies `bracket_depth` on the leaves of `line`.
475 tail_leaves: List[Leaf] = []
476 body_leaves: List[Leaf] = []
477 head_leaves: List[Leaf] = []
478 current_leaves = tail_leaves
479 opening_bracket: Optional[Leaf] = None
480 closing_bracket: Optional[Leaf] = None
481 for leaf in reversed(line.leaves):
482 if current_leaves is body_leaves:
483 if leaf is opening_bracket:
484 current_leaves = head_leaves if body_leaves else tail_leaves
485 current_leaves.append(leaf)
486 if current_leaves is tail_leaves:
487 if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
488 opening_bracket = leaf.opening_bracket
489 closing_bracket = leaf
490 current_leaves = body_leaves
491 if not (opening_bracket and closing_bracket and head_leaves):
492 # If there is no opening or closing_bracket that means the split failed and
493 # all content is in the tail. Otherwise, if `head_leaves` are empty, it means
494 # the matching `opening_bracket` wasn't available on `line` anymore.
495 raise CannotSplit("No brackets found")
497 tail_leaves.reverse()
498 body_leaves.reverse()
499 head_leaves.reverse()
500 head = bracket_split_build_line(head_leaves, line, opening_bracket)
501 body = bracket_split_build_line(body_leaves, line, opening_bracket, is_body=True)
502 tail = bracket_split_build_line(tail_leaves, line, opening_bracket)
503 bracket_split_succeeded_or_raise(head, body, tail)
505 Feature.FORCE_OPTIONAL_PARENTHESES not in features
506 # the opening bracket is an optional paren
507 and opening_bracket.type == token.LPAR
508 and not opening_bracket.value
509 # the closing bracket is an optional paren
510 and closing_bracket.type == token.RPAR
511 and not closing_bracket.value
512 # it's not an import (optional parens are the only thing we can split on
513 # in this case; attempting a split without them is a waste of time)
514 and not line.is_import
515 # there are no standalone comments in the body
516 and not body.contains_standalone_comments(0)
517 # and we can actually remove the parens
518 and can_omit_invisible_parens(body, line_length, omit_on_explode=omit)
520 omit = {id(closing_bracket), *omit}
522 yield from right_hand_split(line, line_length, features=features, omit=omit)
525 except CannotSplit as e:
528 or is_line_short_enough(body, line_length=line_length)
531 "Splitting failed, body is still too long and can't be split."
534 elif head.contains_multiline_strings() or tail.contains_multiline_strings():
536 "The current optional pair of parentheses is bound to fail to"
537 " satisfy the splitting algorithm because the head or the tail"
538 " contains multiline strings which by definition never fit one"
542 ensure_visible(opening_bracket)
543 ensure_visible(closing_bracket)
544 for result in (head, body, tail):
549 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
550 """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
552 Do nothing otherwise.
554 A left- or right-hand split is based on a pair of brackets. Content before
555 (and including) the opening bracket is left on one line, content inside the
556 brackets is put on a separate line, and finally content starting with and
557 following the closing bracket is put on a separate line.
559 Those are called `head`, `body`, and `tail`, respectively. If the split
560 produced the same line (all content in `head`) or ended up with an empty `body`
561 and the `tail` is just the closing bracket, then it's considered failed.
563 tail_len = len(str(tail).strip())
566 raise CannotSplit("Splitting brackets produced the same line")
570 f"Splitting brackets on an empty body to save {tail_len} characters is"
575 def bracket_split_build_line(
576 leaves: List[Leaf], original: Line, opening_bracket: Leaf, *, is_body: bool = False
578 """Return a new line with given `leaves` and respective comments from `original`.
580 If `is_body` is True, the result line is one-indented inside brackets and as such
581 has its first leaf's prefix normalized and a trailing comma added when expected.
583 result = Line(mode=original.mode, depth=original.depth)
585 result.inside_brackets = True
588 # Since body is a new indent level, remove spurious leading whitespace.
589 normalize_prefix(leaves[0], inside_brackets=True)
590 # Ensure a trailing comma for imports and standalone function arguments, but
591 # be careful not to add one after any comments or within type annotations.
594 and opening_bracket.value == "("
595 and not any(leaf.type == token.COMMA for leaf in leaves)
596 # In particular, don't add one within a parenthesized return annotation.
597 # Unfortunately the indicator we're in a return annotation (RARROW) may
598 # be defined directly in the parent node, the parent of the parent ...
599 # and so on depending on how complex the return annotation is.
600 # This isn't perfect and there's some false negatives but they are in
601 # contexts were a comma is actually fine.
603 node.prev_sibling.type == RARROW
606 getattr(leaves[0].parent, "parent", None),
608 if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
612 if original.is_import or no_commas:
613 for i in range(len(leaves) - 1, -1, -1):
614 if leaves[i].type == STANDALONE_COMMENT:
617 if leaves[i].type != token.COMMA:
618 new_comma = Leaf(token.COMMA, ",")
619 leaves.insert(i + 1, new_comma)
624 result.append(leaf, preformatted=True)
625 for comment_after in original.comments_after(leaf):
626 result.append(comment_after, preformatted=True)
627 if is_body and should_split_line(result, opening_bracket):
628 result.should_split_rhs = True
632 def dont_increase_indentation(split_func: Transformer) -> Transformer:
633 """Normalize prefix of the first leaf in every line returned by `split_func`.
635 This is a decorator over relevant split functions.
639 def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
640 for line in split_func(line, features):
641 normalize_prefix(line.leaves[0], inside_brackets=True)
647 @dont_increase_indentation
648 def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
649 """Split according to delimiters of the highest priority.
651 If the appropriate Features are given, the split will add trailing commas
652 also in function signatures and calls that contain `*` and `**`.
655 last_leaf = line.leaves[-1]
657 raise CannotSplit("Line empty") from None
659 bt = line.bracket_tracker
661 delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
663 raise CannotSplit("No delimiters found") from None
665 if delimiter_priority == DOT_PRIORITY:
666 if bt.delimiter_count_with_priority(delimiter_priority) == 1:
667 raise CannotSplit("Splitting a single attribute from its owner looks wrong")
670 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
672 lowest_depth = sys.maxsize
673 trailing_comma_safe = True
675 def append_to_line(leaf: Leaf) -> Iterator[Line]:
676 """Append `leaf` to current line or to new line if appending impossible."""
677 nonlocal current_line
679 current_line.append_safe(leaf, preformatted=True)
684 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
686 current_line.append(leaf)
688 for leaf in line.leaves:
689 yield from append_to_line(leaf)
691 for comment_after in line.comments_after(leaf):
692 yield from append_to_line(comment_after)
694 lowest_depth = min(lowest_depth, leaf.bracket_depth)
695 if leaf.bracket_depth == lowest_depth:
696 if is_vararg(leaf, within={syms.typedargslist}):
697 trailing_comma_safe = (
698 trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
700 elif is_vararg(leaf, within={syms.arglist, syms.argument}):
701 trailing_comma_safe = (
702 trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
705 leaf_priority = bt.delimiters.get(id(leaf))
706 if leaf_priority == delimiter_priority:
710 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
715 and delimiter_priority == COMMA_PRIORITY
716 and current_line.leaves[-1].type != token.COMMA
717 and current_line.leaves[-1].type != STANDALONE_COMMENT
719 new_comma = Leaf(token.COMMA, ",")
720 current_line.append(new_comma)
724 @dont_increase_indentation
725 def standalone_comment_split(
726 line: Line, features: Collection[Feature] = ()
728 """Split standalone comments from the rest of the line."""
729 if not line.contains_standalone_comments(0):
730 raise CannotSplit("Line does not have any standalone comments")
733 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
736 def append_to_line(leaf: Leaf) -> Iterator[Line]:
737 """Append `leaf` to current line or to new line if appending impossible."""
738 nonlocal current_line
740 current_line.append_safe(leaf, preformatted=True)
745 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
747 current_line.append(leaf)
749 for leaf in line.leaves:
750 yield from append_to_line(leaf)
752 for comment_after in line.comments_after(leaf):
753 yield from append_to_line(comment_after)
759 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
760 """Leave existing extra newlines if not `inside_brackets`. Remove everything
763 Note: don't use backslashes for formatting or you'll lose your voting rights.
765 if not inside_brackets:
766 spl = leaf.prefix.split("#")
767 if "\\" not in spl[0]:
768 nl_count = spl[-1].count("\n")
771 leaf.prefix = "\n" * nl_count
777 def normalize_invisible_parens(node: Node, parens_after: Set[str]) -> None:
778 """Make existing optional parentheses invisible or create new ones.
780 `parens_after` is a set of string leaf values immediately after which parens
783 Standardizes on visible parentheses for single-element tuples, and keeps
784 existing visible parentheses for other tuples and generator expressions.
786 for pc in list_comments(node.prefix, is_endmarker=False):
787 if pc.value in FMT_OFF:
788 # This `node` has a prefix with `# fmt: off`, don't mess with parens.
791 for index, child in enumerate(list(node.children)):
792 # Fixes a bug where invisible parens are not properly stripped from
793 # assignment statements that contain type annotations.
794 if isinstance(child, Node) and child.type == syms.annassign:
795 normalize_invisible_parens(child, parens_after=parens_after)
797 # Add parentheses around long tuple unpacking in assignments.
800 and isinstance(child, Node)
801 and child.type == syms.testlist_star_expr
806 if child.type == syms.atom:
807 if maybe_make_parens_invisible_in_atom(child, parent=node):
808 wrap_in_parentheses(node, child, visible=False)
809 elif is_one_tuple(child):
810 wrap_in_parentheses(node, child, visible=True)
811 elif node.type == syms.import_from:
812 # "import from" nodes store parentheses directly as part of
814 if is_lpar_token(child):
815 assert is_rpar_token(node.children[-1])
816 # make parentheses invisible
818 node.children[-1].value = ""
819 elif child.type != token.STAR:
820 # insert invisible parentheses
821 node.insert_child(index, Leaf(token.LPAR, ""))
822 node.append_child(Leaf(token.RPAR, ""))
825 elif not (isinstance(child, Leaf) and is_multiline_string(child)):
826 wrap_in_parentheses(node, child, visible=False)
828 check_lpar = isinstance(child, Leaf) and child.value in parens_after
831 def maybe_make_parens_invisible_in_atom(node: LN, parent: LN) -> bool:
832 """If it's safe, make the parens in the atom `node` invisible, recursively.
833 Additionally, remove repeated, adjacent invisible parens from the atom `node`
834 as they are redundant.
836 Returns whether the node should itself be wrapped in invisible parentheses.
841 node.type != syms.atom
842 or is_empty_tuple(node)
843 or is_one_tuple(node)
844 or (is_yield(node) and parent.type != syms.expr_stmt)
845 or max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
849 if is_walrus_assignment(node):
855 # these ones aren't useful to end users, but they do please fuzzers
861 first = node.children[0]
862 last = node.children[-1]
863 if is_lpar_token(first) and is_rpar_token(last):
864 middle = node.children[1]
865 # make parentheses invisible
868 maybe_make_parens_invisible_in_atom(middle, parent=parent)
870 if is_atom_with_invisible_parens(middle):
871 # Strip the invisible parens from `middle` by replacing
872 # it with the child in-between the invisible parens
873 middle.replace(middle.children[1])
880 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
881 """Should `line` be immediately split with `delimiter_split()` after RHS?"""
883 if not (opening_bracket.parent and opening_bracket.value in "[{("):
886 # We're essentially checking if the body is delimited by commas and there's more
887 # than one of them (we're excluding the trailing comma and if the delimiter priority
888 # is still commas, that means there's more).
890 trailing_comma = False
892 last_leaf = line.leaves[-1]
893 if last_leaf.type == token.COMMA:
894 trailing_comma = True
895 exclude.add(id(last_leaf))
896 max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
897 except (IndexError, ValueError):
900 return max_priority == COMMA_PRIORITY and (
901 (line.mode.magic_trailing_comma and trailing_comma)
902 # always explode imports
903 or opening_bracket.parent.type in {syms.atom, syms.import_from}
907 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
908 """Generate sets of closing bracket IDs that should be omitted in a RHS.
910 Brackets can be omitted if the entire trailer up to and including
911 a preceding closing bracket fits in one line.
913 Yielded sets are cumulative (contain results of previous yields, too). First
914 set is empty, unless the line should explode, in which case bracket pairs until
915 the one that needs to explode are omitted.
918 omit: Set[LeafID] = set()
919 if not line.magic_trailing_comma:
922 length = 4 * line.depth
923 opening_bracket: Optional[Leaf] = None
924 closing_bracket: Optional[Leaf] = None
925 inner_brackets: Set[LeafID] = set()
926 for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
927 length += leaf_length
928 if length > line_length:
931 has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
932 if leaf.type == STANDALONE_COMMENT or has_inline_comment:
936 if leaf is opening_bracket:
937 opening_bracket = None
938 elif leaf.type in CLOSING_BRACKETS:
939 prev = line.leaves[index - 1] if index > 0 else None
942 and prev.type == token.COMMA
943 and not is_one_tuple_between(
944 leaf.opening_bracket, leaf, line.leaves
947 # Never omit bracket pairs with trailing commas.
948 # We need to explode on those.
951 inner_brackets.add(id(leaf))
952 elif leaf.type in CLOSING_BRACKETS:
953 prev = line.leaves[index - 1] if index > 0 else None
954 if prev and prev.type in OPENING_BRACKETS:
955 # Empty brackets would fail a split so treat them as "inner"
956 # brackets (e.g. only add them to the `omit` set if another
957 # pair of brackets was good enough.
958 inner_brackets.add(id(leaf))
962 omit.add(id(closing_bracket))
963 omit.update(inner_brackets)
964 inner_brackets.clear()
969 and prev.type == token.COMMA
970 and not is_one_tuple_between(leaf.opening_bracket, leaf, line.leaves)
972 # Never omit bracket pairs with trailing commas.
973 # We need to explode on those.
977 opening_bracket = leaf.opening_bracket
978 closing_bracket = leaf
983 transform: Transformer,
985 features: Collection[Feature],
990 line_str = line_to_string(line)
991 result: List[Line] = []
992 for transformed_line in transform(line, features):
993 if str(transformed_line).strip("\n") == line_str:
994 raise CannotTransform("Line transformer returned an unchanged result")
996 result.extend(transform_line(transformed_line, mode=mode, features=features))
999 transform.__class__.__name__ != "rhs"
1000 or not line.bracket_tracker.invisible
1001 or any(bracket.value for bracket in line.bracket_tracker.invisible)
1002 or line.contains_multiline_strings()
1003 or result[0].contains_uncollapsable_type_comments()
1004 or result[0].contains_unsplittable_type_ignore()
1005 or is_line_short_enough(result[0], line_length=mode.line_length)
1006 # If any leaves have no parents (which _can_ occur since
1007 # `transform(line)` potentially destroys the line's underlying node
1008 # structure), then we can't proceed. Doing so would cause the below
1009 # call to `append_leaves()` to fail.
1010 or any(leaf.parent is None for leaf in line.leaves)
1014 line_copy = line.clone()
1015 append_leaves(line_copy, line, line.leaves)
1016 features_fop = set(features) | {Feature.FORCE_OPTIONAL_PARENTHESES}
1017 second_opinion = run_transformer(
1018 line_copy, transform, mode, features_fop, line_str=line_str
1021 is_line_short_enough(ln, line_length=mode.line_length) for ln in second_opinion
1023 result = second_opinion