]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

9fbdfadba6adcdd348efc7039a5a987b88efc0fb
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4 from functools import partial, wraps
5 import sys
6 from typing import Collection, Iterator, List, Optional, Set, Union
7
8 from black.nodes import WHITESPACE, RARROW, STATEMENT, STANDALONE_COMMENT
9 from black.nodes import ASSIGNMENTS, OPENING_BRACKETS, CLOSING_BRACKETS
10 from black.nodes import Visitor, syms, first_child_is_arith, ensure_visible
11 from black.nodes import is_docstring, is_empty_tuple, is_one_tuple, is_one_tuple_between
12 from black.nodes import is_name_token, is_lpar_token, is_rpar_token
13 from black.nodes import is_walrus_assignment, is_yield, is_vararg, is_multiline_string
14 from black.nodes import is_stub_suite, is_stub_body, is_atom_with_invisible_parens
15 from black.nodes import wrap_in_parentheses
16 from black.brackets import max_delimiter_priority_in_atom
17 from black.brackets import DOT_PRIORITY, COMMA_PRIORITY
18 from black.lines import Line, line_to_string, is_line_short_enough
19 from black.lines import can_omit_invisible_parens, can_be_split, append_leaves
20 from black.comments import generate_comments, list_comments, FMT_OFF
21 from black.numerics import normalize_numeric_literal
22 from black.strings import get_string_prefix, fix_docstring
23 from black.strings import normalize_string_prefix, normalize_string_quotes
24 from black.trans import Transformer, CannotTransform, StringMerger, StringSplitter
25 from black.trans import StringParenWrapper, StringParenStripper, hug_power_op
26 from black.mode import Mode, Feature, Preview
27
28 from blib2to3.pytree import Node, Leaf
29 from blib2to3.pgen2 import token
30
31
32 # types
33 LeafID = int
34 LN = Union[Leaf, Node]
35
36
37 class CannotSplit(CannotTransform):
38     """A readable split that fits the allotted line length is impossible."""
39
40
41 # This isn't a dataclass because @dataclass + Generic breaks mypyc.
42 # See also https://github.com/mypyc/mypyc/issues/827.
43 class LineGenerator(Visitor[Line]):
44     """Generates reformatted Line objects.  Empty lines are not emitted.
45
46     Note: destroys the tree it's visiting by mutating prefixes of its leaves
47     in ways that will no longer stringify to valid Python code on the tree.
48     """
49
50     def __init__(self, mode: Mode) -> None:
51         self.mode = mode
52         self.current_line: Line
53         self.__post_init__()
54
55     def line(self, indent: int = 0) -> Iterator[Line]:
56         """Generate a line.
57
58         If the line is empty, only emit if it makes sense.
59         If the line is too long, split it first and then generate.
60
61         If any lines were generated, set up a new current_line.
62         """
63         if not self.current_line:
64             self.current_line.depth += indent
65             return  # Line is empty, don't emit. Creating a new one unnecessary.
66
67         complete_line = self.current_line
68         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
69         yield complete_line
70
71     def visit_default(self, node: LN) -> Iterator[Line]:
72         """Default `visit_*()` implementation. Recurses to children of `node`."""
73         if isinstance(node, Leaf):
74             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
75             for comment in generate_comments(node):
76                 if any_open_brackets:
77                     # any comment within brackets is subject to splitting
78                     self.current_line.append(comment)
79                 elif comment.type == token.COMMENT:
80                     # regular trailing comment
81                     self.current_line.append(comment)
82                     yield from self.line()
83
84                 else:
85                     # regular standalone comment
86                     yield from self.line()
87
88                     self.current_line.append(comment)
89                     yield from self.line()
90
91             normalize_prefix(node, inside_brackets=any_open_brackets)
92             if self.mode.string_normalization and node.type == token.STRING:
93                 node.value = normalize_string_prefix(node.value)
94                 node.value = normalize_string_quotes(node.value)
95             if node.type == token.NUMBER:
96                 normalize_numeric_literal(node)
97             if node.type not in WHITESPACE:
98                 self.current_line.append(node)
99         yield from super().visit_default(node)
100
101     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
102         """Increase indentation level, maybe yield a line."""
103         # In blib2to3 INDENT never holds comments.
104         yield from self.line(+1)
105         yield from self.visit_default(node)
106
107     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
108         """Decrease indentation level, maybe yield a line."""
109         # The current line might still wait for trailing comments.  At DEDENT time
110         # there won't be any (they would be prefixes on the preceding NEWLINE).
111         # Emit the line then.
112         yield from self.line()
113
114         # While DEDENT has no value, its prefix may contain standalone comments
115         # that belong to the current indentation level.  Get 'em.
116         yield from self.visit_default(node)
117
118         # Finally, emit the dedent.
119         yield from self.line(-1)
120
121     def visit_stmt(
122         self, node: Node, keywords: Set[str], parens: Set[str]
123     ) -> Iterator[Line]:
124         """Visit a statement.
125
126         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
127         `def`, `with`, `class`, `assert`, and assignments.
128
129         The relevant Python language `keywords` for a given statement will be
130         NAME leaves within it. This methods puts those on a separate line.
131
132         `parens` holds a set of string leaf values immediately after which
133         invisible parens should be put.
134         """
135         normalize_invisible_parens(node, parens_after=parens)
136         for child in node.children:
137             if is_name_token(child) and child.value in keywords:
138                 yield from self.line()
139
140             yield from self.visit(child)
141
142     def visit_match_case(self, node: Node) -> Iterator[Line]:
143         """Visit either a match or case statement."""
144         normalize_invisible_parens(node, parens_after=set())
145
146         yield from self.line()
147         for child in node.children:
148             yield from self.visit(child)
149
150     def visit_suite(self, node: Node) -> Iterator[Line]:
151         """Visit a suite."""
152         if self.mode.is_pyi and is_stub_suite(node):
153             yield from self.visit(node.children[2])
154         else:
155             yield from self.visit_default(node)
156
157     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
158         """Visit a statement without nested statements."""
159         if first_child_is_arith(node):
160             wrap_in_parentheses(node, node.children[0], visible=False)
161         is_suite_like = node.parent and node.parent.type in STATEMENT
162         if is_suite_like:
163             if self.mode.is_pyi and is_stub_body(node):
164                 yield from self.visit_default(node)
165             else:
166                 yield from self.line(+1)
167                 yield from self.visit_default(node)
168                 yield from self.line(-1)
169
170         else:
171             if (
172                 not self.mode.is_pyi
173                 or not node.parent
174                 or not is_stub_suite(node.parent)
175             ):
176                 yield from self.line()
177             yield from self.visit_default(node)
178
179     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
180         """Visit `async def`, `async for`, `async with`."""
181         yield from self.line()
182
183         children = iter(node.children)
184         for child in children:
185             yield from self.visit(child)
186
187             if child.type == token.ASYNC:
188                 break
189
190         internal_stmt = next(children)
191         for child in internal_stmt.children:
192             yield from self.visit(child)
193
194     def visit_decorators(self, node: Node) -> Iterator[Line]:
195         """Visit decorators."""
196         for child in node.children:
197             yield from self.line()
198             yield from self.visit(child)
199
200     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
201         """Remove a semicolon and put the other statement on a separate line."""
202         yield from self.line()
203
204     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
205         """End of file. Process outstanding comments and end with a newline."""
206         yield from self.visit_default(leaf)
207         yield from self.line()
208
209     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
210         if not self.current_line.bracket_tracker.any_open_brackets():
211             yield from self.line()
212         yield from self.visit_default(leaf)
213
214     def visit_factor(self, node: Node) -> Iterator[Line]:
215         """Force parentheses between a unary op and a binary power:
216
217         -2 ** 8 -> -(2 ** 8)
218         """
219         _operator, operand = node.children
220         if (
221             operand.type == syms.power
222             and len(operand.children) == 3
223             and operand.children[1].type == token.DOUBLESTAR
224         ):
225             lpar = Leaf(token.LPAR, "(")
226             rpar = Leaf(token.RPAR, ")")
227             index = operand.remove() or 0
228             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
229         yield from self.visit_default(node)
230
231     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
232         if is_docstring(leaf) and "\\\n" not in leaf.value:
233             # We're ignoring docstrings with backslash newline escapes because changing
234             # indentation of those changes the AST representation of the code.
235             docstring = normalize_string_prefix(leaf.value)
236             prefix = get_string_prefix(docstring)
237             docstring = docstring[len(prefix) :]  # Remove the prefix
238             quote_char = docstring[0]
239             # A natural way to remove the outer quotes is to do:
240             #   docstring = docstring.strip(quote_char)
241             # but that breaks on """""x""" (which is '""x').
242             # So we actually need to remove the first character and the next two
243             # characters but only if they are the same as the first.
244             quote_len = 1 if docstring[1] != quote_char else 3
245             docstring = docstring[quote_len:-quote_len]
246             docstring_started_empty = not docstring
247
248             if is_multiline_string(leaf):
249                 indent = " " * 4 * self.current_line.depth
250                 docstring = fix_docstring(docstring, indent)
251             else:
252                 docstring = docstring.strip()
253
254             if docstring:
255                 # Add some padding if the docstring starts / ends with a quote mark.
256                 if docstring[0] == quote_char:
257                     docstring = " " + docstring
258                 if docstring[-1] == quote_char:
259                     docstring += " "
260                 if docstring[-1] == "\\":
261                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
262                     if backslash_count % 2:
263                         # Odd number of tailing backslashes, add some padding to
264                         # avoid escaping the closing string quote.
265                         docstring += " "
266             elif not docstring_started_empty:
267                 docstring = " "
268
269             # We could enforce triple quotes at this point.
270             quote = quote_char * quote_len
271             leaf.value = prefix + quote + docstring + quote
272
273         yield from self.visit_default(leaf)
274
275     def __post_init__(self) -> None:
276         """You are in a twisty little maze of passages."""
277         self.current_line = Line(mode=self.mode)
278
279         v = self.visit_stmt
280         Ø: Set[str] = set()
281         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
282         self.visit_if_stmt = partial(
283             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
284         )
285         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
286         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
287         self.visit_try_stmt = partial(
288             v, keywords={"try", "except", "else", "finally"}, parens=Ø
289         )
290         self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
291         self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
292         self.visit_funcdef = partial(v, keywords={"def"}, parens=Ø)
293         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
294         self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
295         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
296         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
297         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
298         self.visit_async_funcdef = self.visit_async_stmt
299         self.visit_decorated = self.visit_decorators
300
301         # PEP 634
302         self.visit_match_stmt = self.visit_match_case
303         self.visit_case_block = self.visit_match_case
304
305
306 def transform_line(
307     line: Line, mode: Mode, features: Collection[Feature] = ()
308 ) -> Iterator[Line]:
309     """Transform a `line`, potentially splitting it into many lines.
310
311     They should fit in the allotted `line_length` but might not be able to.
312
313     `features` are syntactical features that may be used in the output.
314     """
315     if line.is_comment:
316         yield line
317         return
318
319     line_str = line_to_string(line)
320
321     ll = mode.line_length
322     sn = mode.string_normalization
323     string_merge = StringMerger(ll, sn)
324     string_paren_strip = StringParenStripper(ll, sn)
325     string_split = StringSplitter(ll, sn)
326     string_paren_wrap = StringParenWrapper(ll, sn)
327
328     transformers: List[Transformer]
329     if (
330         not line.contains_uncollapsable_type_comments()
331         and not line.should_split_rhs
332         and not line.magic_trailing_comma
333         and (
334             is_line_short_enough(line, line_length=mode.line_length, line_str=line_str)
335             or line.contains_unsplittable_type_ignore()
336         )
337         and not (line.inside_brackets and line.contains_standalone_comments())
338     ):
339         # Only apply basic string preprocessing, since lines shouldn't be split here.
340         if Preview.string_processing in mode:
341             transformers = [string_merge, string_paren_strip]
342         else:
343             transformers = []
344     elif line.is_def:
345         transformers = [left_hand_split]
346     else:
347
348         def _rhs(
349             self: object, line: Line, features: Collection[Feature]
350         ) -> Iterator[Line]:
351             """Wraps calls to `right_hand_split`.
352
353             The calls increasingly `omit` right-hand trailers (bracket pairs with
354             content), meaning the trailers get glued together to split on another
355             bracket pair instead.
356             """
357             for omit in generate_trailers_to_omit(line, mode.line_length):
358                 lines = list(
359                     right_hand_split(line, mode.line_length, features, omit=omit)
360                 )
361                 # Note: this check is only able to figure out if the first line of the
362                 # *current* transformation fits in the line length.  This is true only
363                 # for simple cases.  All others require running more transforms via
364                 # `transform_line()`.  This check doesn't know if those would succeed.
365                 if is_line_short_enough(lines[0], line_length=mode.line_length):
366                     yield from lines
367                     return
368
369             # All splits failed, best effort split with no omits.
370             # This mostly happens to multiline strings that are by definition
371             # reported as not fitting a single line, as well as lines that contain
372             # trailing commas (those have to be exploded).
373             yield from right_hand_split(
374                 line, line_length=mode.line_length, features=features
375             )
376
377         # HACK: nested functions (like _rhs) compiled by mypyc don't retain their
378         # __name__ attribute which is needed in `run_transformer` further down.
379         # Unfortunately a nested class breaks mypyc too. So a class must be created
380         # via type ... https://github.com/mypyc/mypyc/issues/884
381         rhs = type("rhs", (), {"__call__": _rhs})()
382
383         if Preview.string_processing in mode:
384             if line.inside_brackets:
385                 transformers = [
386                     string_merge,
387                     string_paren_strip,
388                     string_split,
389                     delimiter_split,
390                     standalone_comment_split,
391                     string_paren_wrap,
392                     rhs,
393                 ]
394             else:
395                 transformers = [
396                     string_merge,
397                     string_paren_strip,
398                     string_split,
399                     string_paren_wrap,
400                     rhs,
401                 ]
402         else:
403             if line.inside_brackets:
404                 transformers = [delimiter_split, standalone_comment_split, rhs]
405             else:
406                 transformers = [rhs]
407     # It's always safe to attempt hugging of power operations and pretty much every line
408     # could match.
409     transformers.append(hug_power_op)
410
411     for transform in transformers:
412         # We are accumulating lines in `result` because we might want to abort
413         # mission and return the original line in the end, or attempt a different
414         # split altogether.
415         try:
416             result = run_transformer(line, transform, mode, features, line_str=line_str)
417         except CannotTransform:
418             continue
419         else:
420             yield from result
421             break
422
423     else:
424         yield line
425
426
427 def left_hand_split(line: Line, _features: Collection[Feature] = ()) -> Iterator[Line]:
428     """Split line into many lines, starting with the first matching bracket pair.
429
430     Note: this usually looks weird, only use this for function definitions.
431     Prefer RHS otherwise.  This is why this function is not symmetrical with
432     :func:`right_hand_split` which also handles optional parentheses.
433     """
434     tail_leaves: List[Leaf] = []
435     body_leaves: List[Leaf] = []
436     head_leaves: List[Leaf] = []
437     current_leaves = head_leaves
438     matching_bracket: Optional[Leaf] = None
439     for leaf in line.leaves:
440         if (
441             current_leaves is body_leaves
442             and leaf.type in CLOSING_BRACKETS
443             and leaf.opening_bracket is matching_bracket
444         ):
445             current_leaves = tail_leaves if body_leaves else head_leaves
446         current_leaves.append(leaf)
447         if current_leaves is head_leaves:
448             if leaf.type in OPENING_BRACKETS:
449                 matching_bracket = leaf
450                 current_leaves = body_leaves
451     if not matching_bracket:
452         raise CannotSplit("No brackets found")
453
454     head = bracket_split_build_line(head_leaves, line, matching_bracket)
455     body = bracket_split_build_line(body_leaves, line, matching_bracket, is_body=True)
456     tail = bracket_split_build_line(tail_leaves, line, matching_bracket)
457     bracket_split_succeeded_or_raise(head, body, tail)
458     for result in (head, body, tail):
459         if result:
460             yield result
461
462
463 def right_hand_split(
464     line: Line,
465     line_length: int,
466     features: Collection[Feature] = (),
467     omit: Collection[LeafID] = (),
468 ) -> Iterator[Line]:
469     """Split line into many lines, starting with the last matching bracket pair.
470
471     If the split was by optional parentheses, attempt splitting without them, too.
472     `omit` is a collection of closing bracket IDs that shouldn't be considered for
473     this split.
474
475     Note: running this function modifies `bracket_depth` on the leaves of `line`.
476     """
477     tail_leaves: List[Leaf] = []
478     body_leaves: List[Leaf] = []
479     head_leaves: List[Leaf] = []
480     current_leaves = tail_leaves
481     opening_bracket: Optional[Leaf] = None
482     closing_bracket: Optional[Leaf] = None
483     for leaf in reversed(line.leaves):
484         if current_leaves is body_leaves:
485             if leaf is opening_bracket:
486                 current_leaves = head_leaves if body_leaves else tail_leaves
487         current_leaves.append(leaf)
488         if current_leaves is tail_leaves:
489             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
490                 opening_bracket = leaf.opening_bracket
491                 closing_bracket = leaf
492                 current_leaves = body_leaves
493     if not (opening_bracket and closing_bracket and head_leaves):
494         # If there is no opening or closing_bracket that means the split failed and
495         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
496         # the matching `opening_bracket` wasn't available on `line` anymore.
497         raise CannotSplit("No brackets found")
498
499     tail_leaves.reverse()
500     body_leaves.reverse()
501     head_leaves.reverse()
502     head = bracket_split_build_line(head_leaves, line, opening_bracket)
503     body = bracket_split_build_line(body_leaves, line, opening_bracket, is_body=True)
504     tail = bracket_split_build_line(tail_leaves, line, opening_bracket)
505     bracket_split_succeeded_or_raise(head, body, tail)
506     if (
507         Feature.FORCE_OPTIONAL_PARENTHESES not in features
508         # the opening bracket is an optional paren
509         and opening_bracket.type == token.LPAR
510         and not opening_bracket.value
511         # the closing bracket is an optional paren
512         and closing_bracket.type == token.RPAR
513         and not closing_bracket.value
514         # it's not an import (optional parens are the only thing we can split on
515         # in this case; attempting a split without them is a waste of time)
516         and not line.is_import
517         # there are no standalone comments in the body
518         and not body.contains_standalone_comments(0)
519         # and we can actually remove the parens
520         and can_omit_invisible_parens(body, line_length, omit_on_explode=omit)
521     ):
522         omit = {id(closing_bracket), *omit}
523         try:
524             yield from right_hand_split(line, line_length, features=features, omit=omit)
525             return
526
527         except CannotSplit as e:
528             if not (
529                 can_be_split(body)
530                 or is_line_short_enough(body, line_length=line_length)
531             ):
532                 raise CannotSplit(
533                     "Splitting failed, body is still too long and can't be split."
534                 ) from e
535
536             elif head.contains_multiline_strings() or tail.contains_multiline_strings():
537                 raise CannotSplit(
538                     "The current optional pair of parentheses is bound to fail to"
539                     " satisfy the splitting algorithm because the head or the tail"
540                     " contains multiline strings which by definition never fit one"
541                     " line."
542                 ) from e
543
544     ensure_visible(opening_bracket)
545     ensure_visible(closing_bracket)
546     for result in (head, body, tail):
547         if result:
548             yield result
549
550
551 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
552     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
553
554     Do nothing otherwise.
555
556     A left- or right-hand split is based on a pair of brackets. Content before
557     (and including) the opening bracket is left on one line, content inside the
558     brackets is put on a separate line, and finally content starting with and
559     following the closing bracket is put on a separate line.
560
561     Those are called `head`, `body`, and `tail`, respectively. If the split
562     produced the same line (all content in `head`) or ended up with an empty `body`
563     and the `tail` is just the closing bracket, then it's considered failed.
564     """
565     tail_len = len(str(tail).strip())
566     if not body:
567         if tail_len == 0:
568             raise CannotSplit("Splitting brackets produced the same line")
569
570         elif tail_len < 3:
571             raise CannotSplit(
572                 f"Splitting brackets on an empty body to save {tail_len} characters is"
573                 " not worth it"
574             )
575
576
577 def bracket_split_build_line(
578     leaves: List[Leaf], original: Line, opening_bracket: Leaf, *, is_body: bool = False
579 ) -> Line:
580     """Return a new line with given `leaves` and respective comments from `original`.
581
582     If `is_body` is True, the result line is one-indented inside brackets and as such
583     has its first leaf's prefix normalized and a trailing comma added when expected.
584     """
585     result = Line(mode=original.mode, depth=original.depth)
586     if is_body:
587         result.inside_brackets = True
588         result.depth += 1
589         if leaves:
590             # Since body is a new indent level, remove spurious leading whitespace.
591             normalize_prefix(leaves[0], inside_brackets=True)
592             # Ensure a trailing comma for imports and standalone function arguments, but
593             # be careful not to add one after any comments or within type annotations.
594             no_commas = (
595                 original.is_def
596                 and opening_bracket.value == "("
597                 and not any(leaf.type == token.COMMA for leaf in leaves)
598                 # In particular, don't add one within a parenthesized return annotation.
599                 # Unfortunately the indicator we're in a return annotation (RARROW) may
600                 # be defined directly in the parent node, the parent of the parent ...
601                 # and so on depending on how complex the return annotation is.
602                 # This isn't perfect and there's some false negatives but they are in
603                 # contexts were a comma is actually fine.
604                 and not any(
605                     node.prev_sibling.type == RARROW
606                     for node in (
607                         leaves[0].parent,
608                         getattr(leaves[0].parent, "parent", None),
609                     )
610                     if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
611                 )
612             )
613
614             if original.is_import or no_commas:
615                 for i in range(len(leaves) - 1, -1, -1):
616                     if leaves[i].type == STANDALONE_COMMENT:
617                         continue
618
619                     if leaves[i].type != token.COMMA:
620                         new_comma = Leaf(token.COMMA, ",")
621                         leaves.insert(i + 1, new_comma)
622                     break
623
624     # Populate the line
625     for leaf in leaves:
626         result.append(leaf, preformatted=True)
627         for comment_after in original.comments_after(leaf):
628             result.append(comment_after, preformatted=True)
629     if is_body and should_split_line(result, opening_bracket):
630         result.should_split_rhs = True
631     return result
632
633
634 def dont_increase_indentation(split_func: Transformer) -> Transformer:
635     """Normalize prefix of the first leaf in every line returned by `split_func`.
636
637     This is a decorator over relevant split functions.
638     """
639
640     @wraps(split_func)
641     def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
642         for line in split_func(line, features):
643             normalize_prefix(line.leaves[0], inside_brackets=True)
644             yield line
645
646     return split_wrapper
647
648
649 @dont_increase_indentation
650 def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
651     """Split according to delimiters of the highest priority.
652
653     If the appropriate Features are given, the split will add trailing commas
654     also in function signatures and calls that contain `*` and `**`.
655     """
656     try:
657         last_leaf = line.leaves[-1]
658     except IndexError:
659         raise CannotSplit("Line empty") from None
660
661     bt = line.bracket_tracker
662     try:
663         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
664     except ValueError:
665         raise CannotSplit("No delimiters found") from None
666
667     if delimiter_priority == DOT_PRIORITY:
668         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
669             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
670
671     current_line = Line(
672         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
673     )
674     lowest_depth = sys.maxsize
675     trailing_comma_safe = True
676
677     def append_to_line(leaf: Leaf) -> Iterator[Line]:
678         """Append `leaf` to current line or to new line if appending impossible."""
679         nonlocal current_line
680         try:
681             current_line.append_safe(leaf, preformatted=True)
682         except ValueError:
683             yield current_line
684
685             current_line = Line(
686                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
687             )
688             current_line.append(leaf)
689
690     for leaf in line.leaves:
691         yield from append_to_line(leaf)
692
693         for comment_after in line.comments_after(leaf):
694             yield from append_to_line(comment_after)
695
696         lowest_depth = min(lowest_depth, leaf.bracket_depth)
697         if leaf.bracket_depth == lowest_depth:
698             if is_vararg(leaf, within={syms.typedargslist}):
699                 trailing_comma_safe = (
700                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
701                 )
702             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
703                 trailing_comma_safe = (
704                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
705                 )
706
707         leaf_priority = bt.delimiters.get(id(leaf))
708         if leaf_priority == delimiter_priority:
709             yield current_line
710
711             current_line = Line(
712                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
713             )
714     if current_line:
715         if (
716             trailing_comma_safe
717             and delimiter_priority == COMMA_PRIORITY
718             and current_line.leaves[-1].type != token.COMMA
719             and current_line.leaves[-1].type != STANDALONE_COMMENT
720         ):
721             new_comma = Leaf(token.COMMA, ",")
722             current_line.append(new_comma)
723         yield current_line
724
725
726 @dont_increase_indentation
727 def standalone_comment_split(
728     line: Line, features: Collection[Feature] = ()
729 ) -> Iterator[Line]:
730     """Split standalone comments from the rest of the line."""
731     if not line.contains_standalone_comments(0):
732         raise CannotSplit("Line does not have any standalone comments")
733
734     current_line = Line(
735         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
736     )
737
738     def append_to_line(leaf: Leaf) -> Iterator[Line]:
739         """Append `leaf` to current line or to new line if appending impossible."""
740         nonlocal current_line
741         try:
742             current_line.append_safe(leaf, preformatted=True)
743         except ValueError:
744             yield current_line
745
746             current_line = Line(
747                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
748             )
749             current_line.append(leaf)
750
751     for leaf in line.leaves:
752         yield from append_to_line(leaf)
753
754         for comment_after in line.comments_after(leaf):
755             yield from append_to_line(comment_after)
756
757     if current_line:
758         yield current_line
759
760
761 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
762     """Leave existing extra newlines if not `inside_brackets`. Remove everything
763     else.
764
765     Note: don't use backslashes for formatting or you'll lose your voting rights.
766     """
767     if not inside_brackets:
768         spl = leaf.prefix.split("#")
769         if "\\" not in spl[0]:
770             nl_count = spl[-1].count("\n")
771             if len(spl) > 1:
772                 nl_count -= 1
773             leaf.prefix = "\n" * nl_count
774             return
775
776     leaf.prefix = ""
777
778
779 def normalize_invisible_parens(node: Node, parens_after: Set[str]) -> None:
780     """Make existing optional parentheses invisible or create new ones.
781
782     `parens_after` is a set of string leaf values immediately after which parens
783     should be put.
784
785     Standardizes on visible parentheses for single-element tuples, and keeps
786     existing visible parentheses for other tuples and generator expressions.
787     """
788     for pc in list_comments(node.prefix, is_endmarker=False):
789         if pc.value in FMT_OFF:
790             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
791             return
792     check_lpar = False
793     for index, child in enumerate(list(node.children)):
794         # Fixes a bug where invisible parens are not properly stripped from
795         # assignment statements that contain type annotations.
796         if isinstance(child, Node) and child.type == syms.annassign:
797             normalize_invisible_parens(child, parens_after=parens_after)
798
799         # Add parentheses around long tuple unpacking in assignments.
800         if (
801             index == 0
802             and isinstance(child, Node)
803             and child.type == syms.testlist_star_expr
804         ):
805             check_lpar = True
806
807         if check_lpar:
808             if child.type == syms.atom:
809                 if maybe_make_parens_invisible_in_atom(child, parent=node):
810                     wrap_in_parentheses(node, child, visible=False)
811             elif is_one_tuple(child):
812                 wrap_in_parentheses(node, child, visible=True)
813             elif node.type == syms.import_from:
814                 # "import from" nodes store parentheses directly as part of
815                 # the statement
816                 if is_lpar_token(child):
817                     assert is_rpar_token(node.children[-1])
818                     # make parentheses invisible
819                     child.value = ""
820                     node.children[-1].value = ""
821                 elif child.type != token.STAR:
822                     # insert invisible parentheses
823                     node.insert_child(index, Leaf(token.LPAR, ""))
824                     node.append_child(Leaf(token.RPAR, ""))
825                 break
826
827             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
828                 wrap_in_parentheses(node, child, visible=False)
829
830         check_lpar = isinstance(child, Leaf) and child.value in parens_after
831
832
833 def maybe_make_parens_invisible_in_atom(node: LN, parent: LN) -> bool:
834     """If it's safe, make the parens in the atom `node` invisible, recursively.
835     Additionally, remove repeated, adjacent invisible parens from the atom `node`
836     as they are redundant.
837
838     Returns whether the node should itself be wrapped in invisible parentheses.
839
840     """
841
842     if (
843         node.type != syms.atom
844         or is_empty_tuple(node)
845         or is_one_tuple(node)
846         or (is_yield(node) and parent.type != syms.expr_stmt)
847         or max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
848     ):
849         return False
850
851     if is_walrus_assignment(node):
852         if parent.type in [
853             syms.annassign,
854             syms.expr_stmt,
855             syms.assert_stmt,
856             syms.return_stmt,
857             # these ones aren't useful to end users, but they do please fuzzers
858             syms.for_stmt,
859             syms.del_stmt,
860         ]:
861             return False
862
863     first = node.children[0]
864     last = node.children[-1]
865     if is_lpar_token(first) and is_rpar_token(last):
866         middle = node.children[1]
867         # make parentheses invisible
868         first.value = ""
869         last.value = ""
870         maybe_make_parens_invisible_in_atom(middle, parent=parent)
871
872         if is_atom_with_invisible_parens(middle):
873             # Strip the invisible parens from `middle` by replacing
874             # it with the child in-between the invisible parens
875             middle.replace(middle.children[1])
876
877         return False
878
879     return True
880
881
882 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
883     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
884
885     if not (opening_bracket.parent and opening_bracket.value in "[{("):
886         return False
887
888     # We're essentially checking if the body is delimited by commas and there's more
889     # than one of them (we're excluding the trailing comma and if the delimiter priority
890     # is still commas, that means there's more).
891     exclude = set()
892     trailing_comma = False
893     try:
894         last_leaf = line.leaves[-1]
895         if last_leaf.type == token.COMMA:
896             trailing_comma = True
897             exclude.add(id(last_leaf))
898         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
899     except (IndexError, ValueError):
900         return False
901
902     return max_priority == COMMA_PRIORITY and (
903         (line.mode.magic_trailing_comma and trailing_comma)
904         # always explode imports
905         or opening_bracket.parent.type in {syms.atom, syms.import_from}
906     )
907
908
909 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
910     """Generate sets of closing bracket IDs that should be omitted in a RHS.
911
912     Brackets can be omitted if the entire trailer up to and including
913     a preceding closing bracket fits in one line.
914
915     Yielded sets are cumulative (contain results of previous yields, too).  First
916     set is empty, unless the line should explode, in which case bracket pairs until
917     the one that needs to explode are omitted.
918     """
919
920     omit: Set[LeafID] = set()
921     if not line.magic_trailing_comma:
922         yield omit
923
924     length = 4 * line.depth
925     opening_bracket: Optional[Leaf] = None
926     closing_bracket: Optional[Leaf] = None
927     inner_brackets: Set[LeafID] = set()
928     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
929         length += leaf_length
930         if length > line_length:
931             break
932
933         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
934         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
935             break
936
937         if opening_bracket:
938             if leaf is opening_bracket:
939                 opening_bracket = None
940             elif leaf.type in CLOSING_BRACKETS:
941                 prev = line.leaves[index - 1] if index > 0 else None
942                 if (
943                     prev
944                     and prev.type == token.COMMA
945                     and not is_one_tuple_between(
946                         leaf.opening_bracket, leaf, line.leaves
947                     )
948                 ):
949                     # Never omit bracket pairs with trailing commas.
950                     # We need to explode on those.
951                     break
952
953                 inner_brackets.add(id(leaf))
954         elif leaf.type in CLOSING_BRACKETS:
955             prev = line.leaves[index - 1] if index > 0 else None
956             if prev and prev.type in OPENING_BRACKETS:
957                 # Empty brackets would fail a split so treat them as "inner"
958                 # brackets (e.g. only add them to the `omit` set if another
959                 # pair of brackets was good enough.
960                 inner_brackets.add(id(leaf))
961                 continue
962
963             if closing_bracket:
964                 omit.add(id(closing_bracket))
965                 omit.update(inner_brackets)
966                 inner_brackets.clear()
967                 yield omit
968
969             if (
970                 prev
971                 and prev.type == token.COMMA
972                 and not is_one_tuple_between(leaf.opening_bracket, leaf, line.leaves)
973             ):
974                 # Never omit bracket pairs with trailing commas.
975                 # We need to explode on those.
976                 break
977
978             if leaf.value:
979                 opening_bracket = leaf.opening_bracket
980                 closing_bracket = leaf
981
982
983 def run_transformer(
984     line: Line,
985     transform: Transformer,
986     mode: Mode,
987     features: Collection[Feature],
988     *,
989     line_str: str = "",
990 ) -> List[Line]:
991     if not line_str:
992         line_str = line_to_string(line)
993     result: List[Line] = []
994     for transformed_line in transform(line, features):
995         if str(transformed_line).strip("\n") == line_str:
996             raise CannotTransform("Line transformer returned an unchanged result")
997
998         result.extend(transform_line(transformed_line, mode=mode, features=features))
999
1000     if (
1001         transform.__class__.__name__ != "rhs"
1002         or not line.bracket_tracker.invisible
1003         or any(bracket.value for bracket in line.bracket_tracker.invisible)
1004         or line.contains_multiline_strings()
1005         or result[0].contains_uncollapsable_type_comments()
1006         or result[0].contains_unsplittable_type_ignore()
1007         or is_line_short_enough(result[0], line_length=mode.line_length)
1008         # If any leaves have no parents (which _can_ occur since
1009         # `transform(line)` potentially destroys the line's underlying node
1010         # structure), then we can't proceed. Doing so would cause the below
1011         # call to `append_leaves()` to fail.
1012         or any(leaf.parent is None for leaf in line.leaves)
1013     ):
1014         return result
1015
1016     line_copy = line.clone()
1017     append_leaves(line_copy, line, line.leaves)
1018     features_fop = set(features) | {Feature.FORCE_OPTIONAL_PARENTHESES}
1019     second_opinion = run_transformer(
1020         line_copy, transform, mode, features_fop, line_str=line_str
1021     )
1022     if all(
1023         is_line_short_enough(ln, line_length=mode.line_length) for ln in second_opinion
1024     ):
1025         result = second_opinion
1026     return result