]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Resolve new flake8-bugbear errors (B020) (GH-2950)
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4 from functools import partial, wraps
5 import sys
6 from typing import Collection, Iterator, List, Optional, Set, Union
7
8 from black.nodes import WHITESPACE, RARROW, STATEMENT, STANDALONE_COMMENT
9 from black.nodes import ASSIGNMENTS, OPENING_BRACKETS, CLOSING_BRACKETS
10 from black.nodes import Visitor, syms, is_arith_like, ensure_visible
11 from black.nodes import (
12     is_docstring,
13     is_empty_tuple,
14     is_one_tuple,
15     is_one_sequence_between,
16 )
17 from black.nodes import is_name_token, is_lpar_token, is_rpar_token
18 from black.nodes import is_walrus_assignment, is_yield, is_vararg, is_multiline_string
19 from black.nodes import is_stub_suite, is_stub_body, is_atom_with_invisible_parens
20 from black.nodes import wrap_in_parentheses
21 from black.brackets import max_delimiter_priority_in_atom
22 from black.brackets import DOT_PRIORITY, COMMA_PRIORITY
23 from black.lines import Line, line_to_string, is_line_short_enough
24 from black.lines import can_omit_invisible_parens, can_be_split, append_leaves
25 from black.comments import generate_comments, list_comments, FMT_OFF
26 from black.numerics import normalize_numeric_literal
27 from black.strings import get_string_prefix, fix_docstring
28 from black.strings import normalize_string_prefix, normalize_string_quotes
29 from black.trans import Transformer, CannotTransform, StringMerger, StringSplitter
30 from black.trans import StringParenWrapper, StringParenStripper, hug_power_op
31 from black.mode import Mode, Feature, Preview
32
33 from blib2to3.pytree import Node, Leaf
34 from blib2to3.pgen2 import token
35
36
37 # types
38 LeafID = int
39 LN = Union[Leaf, Node]
40
41
42 class CannotSplit(CannotTransform):
43     """A readable split that fits the allotted line length is impossible."""
44
45
46 # This isn't a dataclass because @dataclass + Generic breaks mypyc.
47 # See also https://github.com/mypyc/mypyc/issues/827.
48 class LineGenerator(Visitor[Line]):
49     """Generates reformatted Line objects.  Empty lines are not emitted.
50
51     Note: destroys the tree it's visiting by mutating prefixes of its leaves
52     in ways that will no longer stringify to valid Python code on the tree.
53     """
54
55     def __init__(self, mode: Mode) -> None:
56         self.mode = mode
57         self.current_line: Line
58         self.__post_init__()
59
60     def line(self, indent: int = 0) -> Iterator[Line]:
61         """Generate a line.
62
63         If the line is empty, only emit if it makes sense.
64         If the line is too long, split it first and then generate.
65
66         If any lines were generated, set up a new current_line.
67         """
68         if not self.current_line:
69             self.current_line.depth += indent
70             return  # Line is empty, don't emit. Creating a new one unnecessary.
71
72         complete_line = self.current_line
73         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
74         yield complete_line
75
76     def visit_default(self, node: LN) -> Iterator[Line]:
77         """Default `visit_*()` implementation. Recurses to children of `node`."""
78         if isinstance(node, Leaf):
79             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
80             for comment in generate_comments(node, preview=self.mode.preview):
81                 if any_open_brackets:
82                     # any comment within brackets is subject to splitting
83                     self.current_line.append(comment)
84                 elif comment.type == token.COMMENT:
85                     # regular trailing comment
86                     self.current_line.append(comment)
87                     yield from self.line()
88
89                 else:
90                     # regular standalone comment
91                     yield from self.line()
92
93                     self.current_line.append(comment)
94                     yield from self.line()
95
96             normalize_prefix(node, inside_brackets=any_open_brackets)
97             if self.mode.string_normalization and node.type == token.STRING:
98                 node.value = normalize_string_prefix(node.value)
99                 node.value = normalize_string_quotes(node.value)
100             if node.type == token.NUMBER:
101                 normalize_numeric_literal(node)
102             if node.type not in WHITESPACE:
103                 self.current_line.append(node)
104         yield from super().visit_default(node)
105
106     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
107         """Increase indentation level, maybe yield a line."""
108         # In blib2to3 INDENT never holds comments.
109         yield from self.line(+1)
110         yield from self.visit_default(node)
111
112     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
113         """Decrease indentation level, maybe yield a line."""
114         # The current line might still wait for trailing comments.  At DEDENT time
115         # there won't be any (they would be prefixes on the preceding NEWLINE).
116         # Emit the line then.
117         yield from self.line()
118
119         # While DEDENT has no value, its prefix may contain standalone comments
120         # that belong to the current indentation level.  Get 'em.
121         yield from self.visit_default(node)
122
123         # Finally, emit the dedent.
124         yield from self.line(-1)
125
126     def visit_stmt(
127         self, node: Node, keywords: Set[str], parens: Set[str]
128     ) -> Iterator[Line]:
129         """Visit a statement.
130
131         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
132         `def`, `with`, `class`, `assert`, and assignments.
133
134         The relevant Python language `keywords` for a given statement will be
135         NAME leaves within it. This methods puts those on a separate line.
136
137         `parens` holds a set of string leaf values immediately after which
138         invisible parens should be put.
139         """
140         normalize_invisible_parens(node, parens_after=parens, preview=self.mode.preview)
141         for child in node.children:
142             if is_name_token(child) and child.value in keywords:
143                 yield from self.line()
144
145             yield from self.visit(child)
146
147     def visit_match_case(self, node: Node) -> Iterator[Line]:
148         """Visit either a match or case statement."""
149         normalize_invisible_parens(node, parens_after=set(), preview=self.mode.preview)
150
151         yield from self.line()
152         for child in node.children:
153             yield from self.visit(child)
154
155     def visit_suite(self, node: Node) -> Iterator[Line]:
156         """Visit a suite."""
157         if self.mode.is_pyi and is_stub_suite(node):
158             yield from self.visit(node.children[2])
159         else:
160             yield from self.visit_default(node)
161
162     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
163         """Visit a statement without nested statements."""
164         prev_type: Optional[int] = None
165         for child in node.children:
166             if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child):
167                 wrap_in_parentheses(node, child, visible=False)
168             prev_type = child.type
169
170         is_suite_like = node.parent and node.parent.type in STATEMENT
171         if is_suite_like:
172             if self.mode.is_pyi and is_stub_body(node):
173                 yield from self.visit_default(node)
174             else:
175                 yield from self.line(+1)
176                 yield from self.visit_default(node)
177                 yield from self.line(-1)
178
179         else:
180             if (
181                 not self.mode.is_pyi
182                 or not node.parent
183                 or not is_stub_suite(node.parent)
184             ):
185                 yield from self.line()
186             yield from self.visit_default(node)
187
188     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
189         """Visit `async def`, `async for`, `async with`."""
190         yield from self.line()
191
192         children = iter(node.children)
193         for child in children:
194             yield from self.visit(child)
195
196             if child.type == token.ASYNC:
197                 break
198
199         internal_stmt = next(children)
200         for child in internal_stmt.children:
201             yield from self.visit(child)
202
203     def visit_decorators(self, node: Node) -> Iterator[Line]:
204         """Visit decorators."""
205         for child in node.children:
206             yield from self.line()
207             yield from self.visit(child)
208
209     def visit_power(self, node: Node) -> Iterator[Line]:
210         for idx, leaf in enumerate(node.children[:-1]):
211             next_leaf = node.children[idx + 1]
212
213             if not isinstance(leaf, Leaf):
214                 continue
215
216             value = leaf.value.lower()
217             if (
218                 leaf.type == token.NUMBER
219                 and next_leaf.type == syms.trailer
220                 # Ensure that we are in an attribute trailer
221                 and next_leaf.children[0].type == token.DOT
222                 # It shouldn't wrap hexadecimal, binary and octal literals
223                 and not value.startswith(("0x", "0b", "0o"))
224                 # It shouldn't wrap complex literals
225                 and "j" not in value
226             ):
227                 wrap_in_parentheses(node, leaf)
228
229         yield from self.visit_default(node)
230
231     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
232         """Remove a semicolon and put the other statement on a separate line."""
233         yield from self.line()
234
235     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
236         """End of file. Process outstanding comments and end with a newline."""
237         yield from self.visit_default(leaf)
238         yield from self.line()
239
240     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
241         if not self.current_line.bracket_tracker.any_open_brackets():
242             yield from self.line()
243         yield from self.visit_default(leaf)
244
245     def visit_factor(self, node: Node) -> Iterator[Line]:
246         """Force parentheses between a unary op and a binary power:
247
248         -2 ** 8 -> -(2 ** 8)
249         """
250         _operator, operand = node.children
251         if (
252             operand.type == syms.power
253             and len(operand.children) == 3
254             and operand.children[1].type == token.DOUBLESTAR
255         ):
256             lpar = Leaf(token.LPAR, "(")
257             rpar = Leaf(token.RPAR, ")")
258             index = operand.remove() or 0
259             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
260         yield from self.visit_default(node)
261
262     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
263         if is_docstring(leaf) and "\\\n" not in leaf.value:
264             # We're ignoring docstrings with backslash newline escapes because changing
265             # indentation of those changes the AST representation of the code.
266             docstring = normalize_string_prefix(leaf.value)
267             prefix = get_string_prefix(docstring)
268             docstring = docstring[len(prefix) :]  # Remove the prefix
269             quote_char = docstring[0]
270             # A natural way to remove the outer quotes is to do:
271             #   docstring = docstring.strip(quote_char)
272             # but that breaks on """""x""" (which is '""x').
273             # So we actually need to remove the first character and the next two
274             # characters but only if they are the same as the first.
275             quote_len = 1 if docstring[1] != quote_char else 3
276             docstring = docstring[quote_len:-quote_len]
277             docstring_started_empty = not docstring
278
279             if is_multiline_string(leaf):
280                 indent = " " * 4 * self.current_line.depth
281                 docstring = fix_docstring(docstring, indent)
282             else:
283                 docstring = docstring.strip()
284
285             if docstring:
286                 # Add some padding if the docstring starts / ends with a quote mark.
287                 if docstring[0] == quote_char:
288                     docstring = " " + docstring
289                 if docstring[-1] == quote_char:
290                     docstring += " "
291                 if docstring[-1] == "\\":
292                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
293                     if backslash_count % 2:
294                         # Odd number of tailing backslashes, add some padding to
295                         # avoid escaping the closing string quote.
296                         docstring += " "
297             elif not docstring_started_empty:
298                 docstring = " "
299
300             # We could enforce triple quotes at this point.
301             quote = quote_char * quote_len
302             leaf.value = prefix + quote + docstring + quote
303
304         yield from self.visit_default(leaf)
305
306     def __post_init__(self) -> None:
307         """You are in a twisty little maze of passages."""
308         self.current_line = Line(mode=self.mode)
309
310         v = self.visit_stmt
311         Ø: Set[str] = set()
312         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
313         self.visit_if_stmt = partial(
314             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
315         )
316         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
317         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
318         self.visit_try_stmt = partial(
319             v, keywords={"try", "except", "else", "finally"}, parens=Ø
320         )
321         self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
322         self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
323         self.visit_funcdef = partial(v, keywords={"def"}, parens=Ø)
324         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
325         self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
326         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
327         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
328         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
329         self.visit_async_funcdef = self.visit_async_stmt
330         self.visit_decorated = self.visit_decorators
331
332         # PEP 634
333         self.visit_match_stmt = self.visit_match_case
334         self.visit_case_block = self.visit_match_case
335
336
337 def transform_line(
338     line: Line, mode: Mode, features: Collection[Feature] = ()
339 ) -> Iterator[Line]:
340     """Transform a `line`, potentially splitting it into many lines.
341
342     They should fit in the allotted `line_length` but might not be able to.
343
344     `features` are syntactical features that may be used in the output.
345     """
346     if line.is_comment:
347         yield line
348         return
349
350     line_str = line_to_string(line)
351
352     ll = mode.line_length
353     sn = mode.string_normalization
354     string_merge = StringMerger(ll, sn)
355     string_paren_strip = StringParenStripper(ll, sn)
356     string_split = StringSplitter(ll, sn)
357     string_paren_wrap = StringParenWrapper(ll, sn)
358
359     transformers: List[Transformer]
360     if (
361         not line.contains_uncollapsable_type_comments()
362         and not line.should_split_rhs
363         and not line.magic_trailing_comma
364         and (
365             is_line_short_enough(line, line_length=mode.line_length, line_str=line_str)
366             or line.contains_unsplittable_type_ignore()
367         )
368         and not (line.inside_brackets and line.contains_standalone_comments())
369     ):
370         # Only apply basic string preprocessing, since lines shouldn't be split here.
371         if Preview.string_processing in mode:
372             transformers = [string_merge, string_paren_strip]
373         else:
374             transformers = []
375     elif line.is_def:
376         transformers = [left_hand_split]
377     else:
378
379         def _rhs(
380             self: object, line: Line, features: Collection[Feature]
381         ) -> Iterator[Line]:
382             """Wraps calls to `right_hand_split`.
383
384             The calls increasingly `omit` right-hand trailers (bracket pairs with
385             content), meaning the trailers get glued together to split on another
386             bracket pair instead.
387             """
388             for omit in generate_trailers_to_omit(line, mode.line_length):
389                 lines = list(
390                     right_hand_split(line, mode.line_length, features, omit=omit)
391                 )
392                 # Note: this check is only able to figure out if the first line of the
393                 # *current* transformation fits in the line length.  This is true only
394                 # for simple cases.  All others require running more transforms via
395                 # `transform_line()`.  This check doesn't know if those would succeed.
396                 if is_line_short_enough(lines[0], line_length=mode.line_length):
397                     yield from lines
398                     return
399
400             # All splits failed, best effort split with no omits.
401             # This mostly happens to multiline strings that are by definition
402             # reported as not fitting a single line, as well as lines that contain
403             # trailing commas (those have to be exploded).
404             yield from right_hand_split(
405                 line, line_length=mode.line_length, features=features
406             )
407
408         # HACK: nested functions (like _rhs) compiled by mypyc don't retain their
409         # __name__ attribute which is needed in `run_transformer` further down.
410         # Unfortunately a nested class breaks mypyc too. So a class must be created
411         # via type ... https://github.com/mypyc/mypyc/issues/884
412         rhs = type("rhs", (), {"__call__": _rhs})()
413
414         if Preview.string_processing in mode:
415             if line.inside_brackets:
416                 transformers = [
417                     string_merge,
418                     string_paren_strip,
419                     string_split,
420                     delimiter_split,
421                     standalone_comment_split,
422                     string_paren_wrap,
423                     rhs,
424                 ]
425             else:
426                 transformers = [
427                     string_merge,
428                     string_paren_strip,
429                     string_split,
430                     string_paren_wrap,
431                     rhs,
432                 ]
433         else:
434             if line.inside_brackets:
435                 transformers = [delimiter_split, standalone_comment_split, rhs]
436             else:
437                 transformers = [rhs]
438     # It's always safe to attempt hugging of power operations and pretty much every line
439     # could match.
440     transformers.append(hug_power_op)
441
442     for transform in transformers:
443         # We are accumulating lines in `result` because we might want to abort
444         # mission and return the original line in the end, or attempt a different
445         # split altogether.
446         try:
447             result = run_transformer(line, transform, mode, features, line_str=line_str)
448         except CannotTransform:
449             continue
450         else:
451             yield from result
452             break
453
454     else:
455         yield line
456
457
458 def left_hand_split(line: Line, _features: Collection[Feature] = ()) -> Iterator[Line]:
459     """Split line into many lines, starting with the first matching bracket pair.
460
461     Note: this usually looks weird, only use this for function definitions.
462     Prefer RHS otherwise.  This is why this function is not symmetrical with
463     :func:`right_hand_split` which also handles optional parentheses.
464     """
465     tail_leaves: List[Leaf] = []
466     body_leaves: List[Leaf] = []
467     head_leaves: List[Leaf] = []
468     current_leaves = head_leaves
469     matching_bracket: Optional[Leaf] = None
470     for leaf in line.leaves:
471         if (
472             current_leaves is body_leaves
473             and leaf.type in CLOSING_BRACKETS
474             and leaf.opening_bracket is matching_bracket
475         ):
476             current_leaves = tail_leaves if body_leaves else head_leaves
477         current_leaves.append(leaf)
478         if current_leaves is head_leaves:
479             if leaf.type in OPENING_BRACKETS:
480                 matching_bracket = leaf
481                 current_leaves = body_leaves
482     if not matching_bracket:
483         raise CannotSplit("No brackets found")
484
485     head = bracket_split_build_line(head_leaves, line, matching_bracket)
486     body = bracket_split_build_line(body_leaves, line, matching_bracket, is_body=True)
487     tail = bracket_split_build_line(tail_leaves, line, matching_bracket)
488     bracket_split_succeeded_or_raise(head, body, tail)
489     for result in (head, body, tail):
490         if result:
491             yield result
492
493
494 def right_hand_split(
495     line: Line,
496     line_length: int,
497     features: Collection[Feature] = (),
498     omit: Collection[LeafID] = (),
499 ) -> Iterator[Line]:
500     """Split line into many lines, starting with the last matching bracket pair.
501
502     If the split was by optional parentheses, attempt splitting without them, too.
503     `omit` is a collection of closing bracket IDs that shouldn't be considered for
504     this split.
505
506     Note: running this function modifies `bracket_depth` on the leaves of `line`.
507     """
508     tail_leaves: List[Leaf] = []
509     body_leaves: List[Leaf] = []
510     head_leaves: List[Leaf] = []
511     current_leaves = tail_leaves
512     opening_bracket: Optional[Leaf] = None
513     closing_bracket: Optional[Leaf] = None
514     for leaf in reversed(line.leaves):
515         if current_leaves is body_leaves:
516             if leaf is opening_bracket:
517                 current_leaves = head_leaves if body_leaves else tail_leaves
518         current_leaves.append(leaf)
519         if current_leaves is tail_leaves:
520             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
521                 opening_bracket = leaf.opening_bracket
522                 closing_bracket = leaf
523                 current_leaves = body_leaves
524     if not (opening_bracket and closing_bracket and head_leaves):
525         # If there is no opening or closing_bracket that means the split failed and
526         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
527         # the matching `opening_bracket` wasn't available on `line` anymore.
528         raise CannotSplit("No brackets found")
529
530     tail_leaves.reverse()
531     body_leaves.reverse()
532     head_leaves.reverse()
533     head = bracket_split_build_line(head_leaves, line, opening_bracket)
534     body = bracket_split_build_line(body_leaves, line, opening_bracket, is_body=True)
535     tail = bracket_split_build_line(tail_leaves, line, opening_bracket)
536     bracket_split_succeeded_or_raise(head, body, tail)
537     if (
538         Feature.FORCE_OPTIONAL_PARENTHESES not in features
539         # the opening bracket is an optional paren
540         and opening_bracket.type == token.LPAR
541         and not opening_bracket.value
542         # the closing bracket is an optional paren
543         and closing_bracket.type == token.RPAR
544         and not closing_bracket.value
545         # it's not an import (optional parens are the only thing we can split on
546         # in this case; attempting a split without them is a waste of time)
547         and not line.is_import
548         # there are no standalone comments in the body
549         and not body.contains_standalone_comments(0)
550         # and we can actually remove the parens
551         and can_omit_invisible_parens(body, line_length)
552     ):
553         omit = {id(closing_bracket), *omit}
554         try:
555             yield from right_hand_split(line, line_length, features=features, omit=omit)
556             return
557
558         except CannotSplit as e:
559             if not (
560                 can_be_split(body)
561                 or is_line_short_enough(body, line_length=line_length)
562             ):
563                 raise CannotSplit(
564                     "Splitting failed, body is still too long and can't be split."
565                 ) from e
566
567             elif head.contains_multiline_strings() or tail.contains_multiline_strings():
568                 raise CannotSplit(
569                     "The current optional pair of parentheses is bound to fail to"
570                     " satisfy the splitting algorithm because the head or the tail"
571                     " contains multiline strings which by definition never fit one"
572                     " line."
573                 ) from e
574
575     ensure_visible(opening_bracket)
576     ensure_visible(closing_bracket)
577     for result in (head, body, tail):
578         if result:
579             yield result
580
581
582 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
583     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
584
585     Do nothing otherwise.
586
587     A left- or right-hand split is based on a pair of brackets. Content before
588     (and including) the opening bracket is left on one line, content inside the
589     brackets is put on a separate line, and finally content starting with and
590     following the closing bracket is put on a separate line.
591
592     Those are called `head`, `body`, and `tail`, respectively. If the split
593     produced the same line (all content in `head`) or ended up with an empty `body`
594     and the `tail` is just the closing bracket, then it's considered failed.
595     """
596     tail_len = len(str(tail).strip())
597     if not body:
598         if tail_len == 0:
599             raise CannotSplit("Splitting brackets produced the same line")
600
601         elif tail_len < 3:
602             raise CannotSplit(
603                 f"Splitting brackets on an empty body to save {tail_len} characters is"
604                 " not worth it"
605             )
606
607
608 def bracket_split_build_line(
609     leaves: List[Leaf], original: Line, opening_bracket: Leaf, *, is_body: bool = False
610 ) -> Line:
611     """Return a new line with given `leaves` and respective comments from `original`.
612
613     If `is_body` is True, the result line is one-indented inside brackets and as such
614     has its first leaf's prefix normalized and a trailing comma added when expected.
615     """
616     result = Line(mode=original.mode, depth=original.depth)
617     if is_body:
618         result.inside_brackets = True
619         result.depth += 1
620         if leaves:
621             # Since body is a new indent level, remove spurious leading whitespace.
622             normalize_prefix(leaves[0], inside_brackets=True)
623             # Ensure a trailing comma for imports and standalone function arguments, but
624             # be careful not to add one after any comments or within type annotations.
625             no_commas = (
626                 original.is_def
627                 and opening_bracket.value == "("
628                 and not any(leaf.type == token.COMMA for leaf in leaves)
629                 # In particular, don't add one within a parenthesized return annotation.
630                 # Unfortunately the indicator we're in a return annotation (RARROW) may
631                 # be defined directly in the parent node, the parent of the parent ...
632                 # and so on depending on how complex the return annotation is.
633                 # This isn't perfect and there's some false negatives but they are in
634                 # contexts were a comma is actually fine.
635                 and not any(
636                     node.prev_sibling.type == RARROW
637                     for node in (
638                         leaves[0].parent,
639                         getattr(leaves[0].parent, "parent", None),
640                     )
641                     if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
642                 )
643             )
644
645             if original.is_import or no_commas:
646                 for i in range(len(leaves) - 1, -1, -1):
647                     if leaves[i].type == STANDALONE_COMMENT:
648                         continue
649
650                     if leaves[i].type != token.COMMA:
651                         new_comma = Leaf(token.COMMA, ",")
652                         leaves.insert(i + 1, new_comma)
653                     break
654
655     # Populate the line
656     for leaf in leaves:
657         result.append(leaf, preformatted=True)
658         for comment_after in original.comments_after(leaf):
659             result.append(comment_after, preformatted=True)
660     if is_body and should_split_line(result, opening_bracket):
661         result.should_split_rhs = True
662     return result
663
664
665 def dont_increase_indentation(split_func: Transformer) -> Transformer:
666     """Normalize prefix of the first leaf in every line returned by `split_func`.
667
668     This is a decorator over relevant split functions.
669     """
670
671     @wraps(split_func)
672     def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
673         for split_line in split_func(line, features):
674             normalize_prefix(split_line.leaves[0], inside_brackets=True)
675             yield split_line
676
677     return split_wrapper
678
679
680 @dont_increase_indentation
681 def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
682     """Split according to delimiters of the highest priority.
683
684     If the appropriate Features are given, the split will add trailing commas
685     also in function signatures and calls that contain `*` and `**`.
686     """
687     try:
688         last_leaf = line.leaves[-1]
689     except IndexError:
690         raise CannotSplit("Line empty") from None
691
692     bt = line.bracket_tracker
693     try:
694         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
695     except ValueError:
696         raise CannotSplit("No delimiters found") from None
697
698     if delimiter_priority == DOT_PRIORITY:
699         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
700             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
701
702     current_line = Line(
703         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
704     )
705     lowest_depth = sys.maxsize
706     trailing_comma_safe = True
707
708     def append_to_line(leaf: Leaf) -> Iterator[Line]:
709         """Append `leaf` to current line or to new line if appending impossible."""
710         nonlocal current_line
711         try:
712             current_line.append_safe(leaf, preformatted=True)
713         except ValueError:
714             yield current_line
715
716             current_line = Line(
717                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
718             )
719             current_line.append(leaf)
720
721     for leaf in line.leaves:
722         yield from append_to_line(leaf)
723
724         for comment_after in line.comments_after(leaf):
725             yield from append_to_line(comment_after)
726
727         lowest_depth = min(lowest_depth, leaf.bracket_depth)
728         if leaf.bracket_depth == lowest_depth:
729             if is_vararg(leaf, within={syms.typedargslist}):
730                 trailing_comma_safe = (
731                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
732                 )
733             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
734                 trailing_comma_safe = (
735                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
736                 )
737
738         leaf_priority = bt.delimiters.get(id(leaf))
739         if leaf_priority == delimiter_priority:
740             yield current_line
741
742             current_line = Line(
743                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
744             )
745     if current_line:
746         if (
747             trailing_comma_safe
748             and delimiter_priority == COMMA_PRIORITY
749             and current_line.leaves[-1].type != token.COMMA
750             and current_line.leaves[-1].type != STANDALONE_COMMENT
751         ):
752             new_comma = Leaf(token.COMMA, ",")
753             current_line.append(new_comma)
754         yield current_line
755
756
757 @dont_increase_indentation
758 def standalone_comment_split(
759     line: Line, features: Collection[Feature] = ()
760 ) -> Iterator[Line]:
761     """Split standalone comments from the rest of the line."""
762     if not line.contains_standalone_comments(0):
763         raise CannotSplit("Line does not have any standalone comments")
764
765     current_line = Line(
766         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
767     )
768
769     def append_to_line(leaf: Leaf) -> Iterator[Line]:
770         """Append `leaf` to current line or to new line if appending impossible."""
771         nonlocal current_line
772         try:
773             current_line.append_safe(leaf, preformatted=True)
774         except ValueError:
775             yield current_line
776
777             current_line = Line(
778                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
779             )
780             current_line.append(leaf)
781
782     for leaf in line.leaves:
783         yield from append_to_line(leaf)
784
785         for comment_after in line.comments_after(leaf):
786             yield from append_to_line(comment_after)
787
788     if current_line:
789         yield current_line
790
791
792 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
793     """Leave existing extra newlines if not `inside_brackets`. Remove everything
794     else.
795
796     Note: don't use backslashes for formatting or you'll lose your voting rights.
797     """
798     if not inside_brackets:
799         spl = leaf.prefix.split("#")
800         if "\\" not in spl[0]:
801             nl_count = spl[-1].count("\n")
802             if len(spl) > 1:
803                 nl_count -= 1
804             leaf.prefix = "\n" * nl_count
805             return
806
807     leaf.prefix = ""
808
809
810 def normalize_invisible_parens(
811     node: Node, parens_after: Set[str], *, preview: bool
812 ) -> None:
813     """Make existing optional parentheses invisible or create new ones.
814
815     `parens_after` is a set of string leaf values immediately after which parens
816     should be put.
817
818     Standardizes on visible parentheses for single-element tuples, and keeps
819     existing visible parentheses for other tuples and generator expressions.
820     """
821     for pc in list_comments(node.prefix, is_endmarker=False, preview=preview):
822         if pc.value in FMT_OFF:
823             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
824             return
825     check_lpar = False
826     for index, child in enumerate(list(node.children)):
827         # Fixes a bug where invisible parens are not properly stripped from
828         # assignment statements that contain type annotations.
829         if isinstance(child, Node) and child.type == syms.annassign:
830             normalize_invisible_parens(
831                 child, parens_after=parens_after, preview=preview
832             )
833
834         # Add parentheses around long tuple unpacking in assignments.
835         if (
836             index == 0
837             and isinstance(child, Node)
838             and child.type == syms.testlist_star_expr
839         ):
840             check_lpar = True
841
842         if check_lpar:
843             if child.type == syms.atom:
844                 if maybe_make_parens_invisible_in_atom(
845                     child,
846                     parent=node,
847                     preview=preview,
848                 ):
849                     wrap_in_parentheses(node, child, visible=False)
850             elif is_one_tuple(child):
851                 wrap_in_parentheses(node, child, visible=True)
852             elif node.type == syms.import_from:
853                 # "import from" nodes store parentheses directly as part of
854                 # the statement
855                 if is_lpar_token(child):
856                     assert is_rpar_token(node.children[-1])
857                     # make parentheses invisible
858                     child.value = ""
859                     node.children[-1].value = ""
860                 elif child.type != token.STAR:
861                     # insert invisible parentheses
862                     node.insert_child(index, Leaf(token.LPAR, ""))
863                     node.append_child(Leaf(token.RPAR, ""))
864                 break
865
866             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
867                 wrap_in_parentheses(node, child, visible=False)
868
869         check_lpar = isinstance(child, Leaf) and child.value in parens_after
870
871
872 def maybe_make_parens_invisible_in_atom(
873     node: LN,
874     parent: LN,
875     preview: bool = False,
876 ) -> bool:
877     """If it's safe, make the parens in the atom `node` invisible, recursively.
878     Additionally, remove repeated, adjacent invisible parens from the atom `node`
879     as they are redundant.
880
881     Returns whether the node should itself be wrapped in invisible parentheses.
882
883     """
884     if (
885         preview
886         and parent.type == syms.for_stmt
887         and isinstance(node.prev_sibling, Leaf)
888         and node.prev_sibling.type == token.NAME
889         and node.prev_sibling.value == "for"
890     ):
891         for_stmt_check = False
892     else:
893         for_stmt_check = True
894
895     if (
896         node.type != syms.atom
897         or is_empty_tuple(node)
898         or is_one_tuple(node)
899         or (is_yield(node) and parent.type != syms.expr_stmt)
900         or (max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY and for_stmt_check)
901     ):
902         return False
903
904     if is_walrus_assignment(node):
905         if parent.type in [
906             syms.annassign,
907             syms.expr_stmt,
908             syms.assert_stmt,
909             syms.return_stmt,
910             # these ones aren't useful to end users, but they do please fuzzers
911             syms.for_stmt,
912             syms.del_stmt,
913         ]:
914             return False
915
916     first = node.children[0]
917     last = node.children[-1]
918     if is_lpar_token(first) and is_rpar_token(last):
919         middle = node.children[1]
920         # make parentheses invisible
921         first.value = ""
922         last.value = ""
923         maybe_make_parens_invisible_in_atom(middle, parent=parent, preview=preview)
924
925         if is_atom_with_invisible_parens(middle):
926             # Strip the invisible parens from `middle` by replacing
927             # it with the child in-between the invisible parens
928             middle.replace(middle.children[1])
929
930         return False
931
932     return True
933
934
935 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
936     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
937
938     if not (opening_bracket.parent and opening_bracket.value in "[{("):
939         return False
940
941     # We're essentially checking if the body is delimited by commas and there's more
942     # than one of them (we're excluding the trailing comma and if the delimiter priority
943     # is still commas, that means there's more).
944     exclude = set()
945     trailing_comma = False
946     try:
947         last_leaf = line.leaves[-1]
948         if last_leaf.type == token.COMMA:
949             trailing_comma = True
950             exclude.add(id(last_leaf))
951         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
952     except (IndexError, ValueError):
953         return False
954
955     return max_priority == COMMA_PRIORITY and (
956         (line.mode.magic_trailing_comma and trailing_comma)
957         # always explode imports
958         or opening_bracket.parent.type in {syms.atom, syms.import_from}
959     )
960
961
962 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
963     """Generate sets of closing bracket IDs that should be omitted in a RHS.
964
965     Brackets can be omitted if the entire trailer up to and including
966     a preceding closing bracket fits in one line.
967
968     Yielded sets are cumulative (contain results of previous yields, too).  First
969     set is empty, unless the line should explode, in which case bracket pairs until
970     the one that needs to explode are omitted.
971     """
972
973     omit: Set[LeafID] = set()
974     if not line.magic_trailing_comma:
975         yield omit
976
977     length = 4 * line.depth
978     opening_bracket: Optional[Leaf] = None
979     closing_bracket: Optional[Leaf] = None
980     inner_brackets: Set[LeafID] = set()
981     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
982         length += leaf_length
983         if length > line_length:
984             break
985
986         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
987         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
988             break
989
990         if opening_bracket:
991             if leaf is opening_bracket:
992                 opening_bracket = None
993             elif leaf.type in CLOSING_BRACKETS:
994                 prev = line.leaves[index - 1] if index > 0 else None
995                 if (
996                     prev
997                     and prev.type == token.COMMA
998                     and leaf.opening_bracket is not None
999                     and not is_one_sequence_between(
1000                         leaf.opening_bracket, leaf, line.leaves
1001                     )
1002                 ):
1003                     # Never omit bracket pairs with trailing commas.
1004                     # We need to explode on those.
1005                     break
1006
1007                 inner_brackets.add(id(leaf))
1008         elif leaf.type in CLOSING_BRACKETS:
1009             prev = line.leaves[index - 1] if index > 0 else None
1010             if prev and prev.type in OPENING_BRACKETS:
1011                 # Empty brackets would fail a split so treat them as "inner"
1012                 # brackets (e.g. only add them to the `omit` set if another
1013                 # pair of brackets was good enough.
1014                 inner_brackets.add(id(leaf))
1015                 continue
1016
1017             if closing_bracket:
1018                 omit.add(id(closing_bracket))
1019                 omit.update(inner_brackets)
1020                 inner_brackets.clear()
1021                 yield omit
1022
1023             if (
1024                 prev
1025                 and prev.type == token.COMMA
1026                 and leaf.opening_bracket is not None
1027                 and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves)
1028             ):
1029                 # Never omit bracket pairs with trailing commas.
1030                 # We need to explode on those.
1031                 break
1032
1033             if leaf.value:
1034                 opening_bracket = leaf.opening_bracket
1035                 closing_bracket = leaf
1036
1037
1038 def run_transformer(
1039     line: Line,
1040     transform: Transformer,
1041     mode: Mode,
1042     features: Collection[Feature],
1043     *,
1044     line_str: str = "",
1045 ) -> List[Line]:
1046     if not line_str:
1047         line_str = line_to_string(line)
1048     result: List[Line] = []
1049     for transformed_line in transform(line, features):
1050         if str(transformed_line).strip("\n") == line_str:
1051             raise CannotTransform("Line transformer returned an unchanged result")
1052
1053         result.extend(transform_line(transformed_line, mode=mode, features=features))
1054
1055     if (
1056         transform.__class__.__name__ != "rhs"
1057         or not line.bracket_tracker.invisible
1058         or any(bracket.value for bracket in line.bracket_tracker.invisible)
1059         or line.contains_multiline_strings()
1060         or result[0].contains_uncollapsable_type_comments()
1061         or result[0].contains_unsplittable_type_ignore()
1062         or is_line_short_enough(result[0], line_length=mode.line_length)
1063         # If any leaves have no parents (which _can_ occur since
1064         # `transform(line)` potentially destroys the line's underlying node
1065         # structure), then we can't proceed. Doing so would cause the below
1066         # call to `append_leaves()` to fail.
1067         or any(leaf.parent is None for leaf in line.leaves)
1068     ):
1069         return result
1070
1071     line_copy = line.clone()
1072     append_leaves(line_copy, line, line.leaves)
1073     features_fop = set(features) | {Feature.FORCE_OPTIONAL_PARENTHESES}
1074     second_opinion = run_transformer(
1075         line_copy, transform, mode, features_fop, line_str=line_str
1076     )
1077     if all(
1078         is_line_short_enough(ln, line_length=mode.line_length) for ln in second_opinion
1079     ):
1080         result = second_opinion
1081     return result