]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Fix bug introduced in #3564. (#3615)
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4 import sys
5 from dataclasses import replace
6 from enum import Enum, auto
7 from functools import partial, wraps
8 from typing import Collection, Iterator, List, Optional, Set, Union, cast
9
10 from black.brackets import (
11     COMMA_PRIORITY,
12     DOT_PRIORITY,
13     get_leaves_inside_matching_brackets,
14     max_delimiter_priority_in_atom,
15 )
16 from black.comments import FMT_OFF, generate_comments, list_comments
17 from black.lines import (
18     Line,
19     RHSResult,
20     append_leaves,
21     can_be_split,
22     can_omit_invisible_parens,
23     is_line_short_enough,
24     line_to_string,
25 )
26 from black.mode import Feature, Mode, Preview
27 from black.nodes import (
28     ASSIGNMENTS,
29     BRACKETS,
30     CLOSING_BRACKETS,
31     OPENING_BRACKETS,
32     RARROW,
33     STANDALONE_COMMENT,
34     STATEMENT,
35     WHITESPACE,
36     Visitor,
37     ensure_visible,
38     is_arith_like,
39     is_async_stmt_or_funcdef,
40     is_atom_with_invisible_parens,
41     is_docstring,
42     is_empty_tuple,
43     is_lpar_token,
44     is_multiline_string,
45     is_name_token,
46     is_one_sequence_between,
47     is_one_tuple,
48     is_rpar_token,
49     is_stub_body,
50     is_stub_suite,
51     is_tuple_containing_walrus,
52     is_vararg,
53     is_walrus_assignment,
54     is_yield,
55     syms,
56     wrap_in_parentheses,
57 )
58 from black.numerics import normalize_numeric_literal
59 from black.strings import (
60     fix_docstring,
61     get_string_prefix,
62     normalize_string_prefix,
63     normalize_string_quotes,
64     normalize_unicode_escape_sequences,
65 )
66 from black.trans import (
67     CannotTransform,
68     StringMerger,
69     StringParenStripper,
70     StringParenWrapper,
71     StringSplitter,
72     Transformer,
73     hug_power_op,
74 )
75 from blib2to3.pgen2 import token
76 from blib2to3.pytree import Leaf, Node
77
78 # types
79 LeafID = int
80 LN = Union[Leaf, Node]
81
82
83 class CannotSplit(CannotTransform):
84     """A readable split that fits the allotted line length is impossible."""
85
86
87 # This isn't a dataclass because @dataclass + Generic breaks mypyc.
88 # See also https://github.com/mypyc/mypyc/issues/827.
89 class LineGenerator(Visitor[Line]):
90     """Generates reformatted Line objects.  Empty lines are not emitted.
91
92     Note: destroys the tree it's visiting by mutating prefixes of its leaves
93     in ways that will no longer stringify to valid Python code on the tree.
94     """
95
96     def __init__(self, mode: Mode, features: Collection[Feature]) -> None:
97         self.mode = mode
98         self.features = features
99         self.current_line: Line
100         self.__post_init__()
101
102     def line(self, indent: int = 0) -> Iterator[Line]:
103         """Generate a line.
104
105         If the line is empty, only emit if it makes sense.
106         If the line is too long, split it first and then generate.
107
108         If any lines were generated, set up a new current_line.
109         """
110         if not self.current_line:
111             self.current_line.depth += indent
112             return  # Line is empty, don't emit. Creating a new one unnecessary.
113
114         if (
115             Preview.improved_async_statements_handling in self.mode
116             and len(self.current_line.leaves) == 1
117             and is_async_stmt_or_funcdef(self.current_line.leaves[0])
118         ):
119             # Special case for async def/for/with statements. `visit_async_stmt`
120             # adds an `ASYNC` leaf then visits the child def/for/with statement
121             # nodes. Line yields from those nodes shouldn't treat the former
122             # `ASYNC` leaf as a complete line.
123             return
124
125         complete_line = self.current_line
126         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
127         yield complete_line
128
129     def visit_default(self, node: LN) -> Iterator[Line]:
130         """Default `visit_*()` implementation. Recurses to children of `node`."""
131         if isinstance(node, Leaf):
132             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
133             for comment in generate_comments(node):
134                 if any_open_brackets:
135                     # any comment within brackets is subject to splitting
136                     self.current_line.append(comment)
137                 elif comment.type == token.COMMENT:
138                     # regular trailing comment
139                     self.current_line.append(comment)
140                     yield from self.line()
141
142                 else:
143                     # regular standalone comment
144                     yield from self.line()
145
146                     self.current_line.append(comment)
147                     yield from self.line()
148
149             normalize_prefix(node, inside_brackets=any_open_brackets)
150             if self.mode.string_normalization and node.type == token.STRING:
151                 node.value = normalize_string_prefix(node.value)
152                 node.value = normalize_string_quotes(node.value)
153             if node.type == token.NUMBER:
154                 normalize_numeric_literal(node)
155             if node.type not in WHITESPACE:
156                 self.current_line.append(node)
157         yield from super().visit_default(node)
158
159     def visit_test(self, node: Node) -> Iterator[Line]:
160         """Visit an `x if y else z` test"""
161
162         if Preview.parenthesize_conditional_expressions in self.mode:
163             already_parenthesized = (
164                 node.prev_sibling and node.prev_sibling.type == token.LPAR
165             )
166
167             if not already_parenthesized:
168                 lpar = Leaf(token.LPAR, "")
169                 rpar = Leaf(token.RPAR, "")
170                 node.insert_child(0, lpar)
171                 node.append_child(rpar)
172
173         yield from self.visit_default(node)
174
175     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
176         """Increase indentation level, maybe yield a line."""
177         # In blib2to3 INDENT never holds comments.
178         yield from self.line(+1)
179         yield from self.visit_default(node)
180
181     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
182         """Decrease indentation level, maybe yield a line."""
183         # The current line might still wait for trailing comments.  At DEDENT time
184         # there won't be any (they would be prefixes on the preceding NEWLINE).
185         # Emit the line then.
186         yield from self.line()
187
188         # While DEDENT has no value, its prefix may contain standalone comments
189         # that belong to the current indentation level.  Get 'em.
190         yield from self.visit_default(node)
191
192         # Finally, emit the dedent.
193         yield from self.line(-1)
194
195     def visit_stmt(
196         self, node: Node, keywords: Set[str], parens: Set[str]
197     ) -> Iterator[Line]:
198         """Visit a statement.
199
200         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
201         `def`, `with`, `class`, `assert`, and assignments.
202
203         The relevant Python language `keywords` for a given statement will be
204         NAME leaves within it. This methods puts those on a separate line.
205
206         `parens` holds a set of string leaf values immediately after which
207         invisible parens should be put.
208         """
209         normalize_invisible_parens(
210             node, parens_after=parens, mode=self.mode, features=self.features
211         )
212         for child in node.children:
213             if is_name_token(child) and child.value in keywords:
214                 yield from self.line()
215
216             yield from self.visit(child)
217
218     def visit_dictsetmaker(self, node: Node) -> Iterator[Line]:
219         if Preview.wrap_long_dict_values_in_parens in self.mode:
220             for i, child in enumerate(node.children):
221                 if i == 0:
222                     continue
223                 if node.children[i - 1].type == token.COLON:
224                     if child.type == syms.atom and child.children[0].type == token.LPAR:
225                         if maybe_make_parens_invisible_in_atom(
226                             child,
227                             parent=node,
228                             remove_brackets_around_comma=False,
229                         ):
230                             wrap_in_parentheses(node, child, visible=False)
231                     else:
232                         wrap_in_parentheses(node, child, visible=False)
233         yield from self.visit_default(node)
234
235     def visit_funcdef(self, node: Node) -> Iterator[Line]:
236         """Visit function definition."""
237         yield from self.line()
238
239         # Remove redundant brackets around return type annotation.
240         is_return_annotation = False
241         for child in node.children:
242             if child.type == token.RARROW:
243                 is_return_annotation = True
244             elif is_return_annotation:
245                 if child.type == syms.atom and child.children[0].type == token.LPAR:
246                     if maybe_make_parens_invisible_in_atom(
247                         child,
248                         parent=node,
249                         remove_brackets_around_comma=False,
250                     ):
251                         wrap_in_parentheses(node, child, visible=False)
252                 else:
253                     wrap_in_parentheses(node, child, visible=False)
254                 is_return_annotation = False
255
256         for child in node.children:
257             yield from self.visit(child)
258
259     def visit_match_case(self, node: Node) -> Iterator[Line]:
260         """Visit either a match or case statement."""
261         normalize_invisible_parens(
262             node, parens_after=set(), mode=self.mode, features=self.features
263         )
264
265         yield from self.line()
266         for child in node.children:
267             yield from self.visit(child)
268
269     def visit_suite(self, node: Node) -> Iterator[Line]:
270         """Visit a suite."""
271         if self.mode.is_pyi and is_stub_suite(node):
272             yield from self.visit(node.children[2])
273         else:
274             yield from self.visit_default(node)
275
276     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
277         """Visit a statement without nested statements."""
278         prev_type: Optional[int] = None
279         for child in node.children:
280             if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child):
281                 wrap_in_parentheses(node, child, visible=False)
282             prev_type = child.type
283
284         is_suite_like = node.parent and node.parent.type in STATEMENT
285         if is_suite_like:
286             if self.mode.is_pyi and is_stub_body(node):
287                 yield from self.visit_default(node)
288             else:
289                 yield from self.line(+1)
290                 yield from self.visit_default(node)
291                 yield from self.line(-1)
292
293         else:
294             if (
295                 not self.mode.is_pyi
296                 or not node.parent
297                 or not is_stub_suite(node.parent)
298             ):
299                 yield from self.line()
300             yield from self.visit_default(node)
301
302     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
303         """Visit `async def`, `async for`, `async with`."""
304         yield from self.line()
305
306         children = iter(node.children)
307         for child in children:
308             yield from self.visit(child)
309
310             if child.type == token.ASYNC or child.type == STANDALONE_COMMENT:
311                 # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async
312                 # line.
313                 break
314
315         internal_stmt = next(children)
316         if Preview.improved_async_statements_handling in self.mode:
317             yield from self.visit(internal_stmt)
318         else:
319             for child in internal_stmt.children:
320                 yield from self.visit(child)
321
322     def visit_decorators(self, node: Node) -> Iterator[Line]:
323         """Visit decorators."""
324         for child in node.children:
325             yield from self.line()
326             yield from self.visit(child)
327
328     def visit_power(self, node: Node) -> Iterator[Line]:
329         for idx, leaf in enumerate(node.children[:-1]):
330             next_leaf = node.children[idx + 1]
331
332             if not isinstance(leaf, Leaf):
333                 continue
334
335             value = leaf.value.lower()
336             if (
337                 leaf.type == token.NUMBER
338                 and next_leaf.type == syms.trailer
339                 # Ensure that we are in an attribute trailer
340                 and next_leaf.children[0].type == token.DOT
341                 # It shouldn't wrap hexadecimal, binary and octal literals
342                 and not value.startswith(("0x", "0b", "0o"))
343                 # It shouldn't wrap complex literals
344                 and "j" not in value
345             ):
346                 wrap_in_parentheses(node, leaf)
347
348         remove_await_parens(node)
349
350         yield from self.visit_default(node)
351
352     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
353         """Remove a semicolon and put the other statement on a separate line."""
354         yield from self.line()
355
356     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
357         """End of file. Process outstanding comments and end with a newline."""
358         yield from self.visit_default(leaf)
359         yield from self.line()
360
361     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
362         if not self.current_line.bracket_tracker.any_open_brackets():
363             yield from self.line()
364         yield from self.visit_default(leaf)
365
366     def visit_factor(self, node: Node) -> Iterator[Line]:
367         """Force parentheses between a unary op and a binary power:
368
369         -2 ** 8 -> -(2 ** 8)
370         """
371         _operator, operand = node.children
372         if (
373             operand.type == syms.power
374             and len(operand.children) == 3
375             and operand.children[1].type == token.DOUBLESTAR
376         ):
377             lpar = Leaf(token.LPAR, "(")
378             rpar = Leaf(token.RPAR, ")")
379             index = operand.remove() or 0
380             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
381         yield from self.visit_default(node)
382
383     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
384         if Preview.hex_codes_in_unicode_sequences in self.mode:
385             normalize_unicode_escape_sequences(leaf)
386
387         if is_docstring(leaf) and "\\\n" not in leaf.value:
388             # We're ignoring docstrings with backslash newline escapes because changing
389             # indentation of those changes the AST representation of the code.
390             if self.mode.string_normalization:
391                 docstring = normalize_string_prefix(leaf.value)
392                 # visit_default() does handle string normalization for us, but
393                 # since this method acts differently depending on quote style (ex.
394                 # see padding logic below), there's a possibility for unstable
395                 # formatting as visit_default() is called *after*. To avoid a
396                 # situation where this function formats a docstring differently on
397                 # the second pass, normalize it early.
398                 docstring = normalize_string_quotes(docstring)
399             else:
400                 docstring = leaf.value
401             prefix = get_string_prefix(docstring)
402             docstring = docstring[len(prefix) :]  # Remove the prefix
403             quote_char = docstring[0]
404             # A natural way to remove the outer quotes is to do:
405             #   docstring = docstring.strip(quote_char)
406             # but that breaks on """""x""" (which is '""x').
407             # So we actually need to remove the first character and the next two
408             # characters but only if they are the same as the first.
409             quote_len = 1 if docstring[1] != quote_char else 3
410             docstring = docstring[quote_len:-quote_len]
411             docstring_started_empty = not docstring
412             indent = " " * 4 * self.current_line.depth
413
414             if is_multiline_string(leaf):
415                 docstring = fix_docstring(docstring, indent)
416             else:
417                 docstring = docstring.strip()
418
419             has_trailing_backslash = False
420             if docstring:
421                 # Add some padding if the docstring starts / ends with a quote mark.
422                 if docstring[0] == quote_char:
423                     docstring = " " + docstring
424                 if docstring[-1] == quote_char:
425                     docstring += " "
426                 if docstring[-1] == "\\":
427                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
428                     if backslash_count % 2:
429                         # Odd number of tailing backslashes, add some padding to
430                         # avoid escaping the closing string quote.
431                         docstring += " "
432                         has_trailing_backslash = True
433             elif not docstring_started_empty:
434                 docstring = " "
435
436             # We could enforce triple quotes at this point.
437             quote = quote_char * quote_len
438
439             # It's invalid to put closing single-character quotes on a new line.
440             if self.mode and quote_len == 3:
441                 # We need to find the length of the last line of the docstring
442                 # to find if we can add the closing quotes to the line without
443                 # exceeding the maximum line length.
444                 # If docstring is one line, we don't put the closing quotes on a
445                 # separate line because it looks ugly (#3320).
446                 lines = docstring.splitlines()
447                 last_line_length = len(lines[-1]) if docstring else 0
448
449                 # If adding closing quotes would cause the last line to exceed
450                 # the maximum line length then put a line break before the
451                 # closing quotes
452                 if (
453                     len(lines) > 1
454                     and last_line_length + quote_len > self.mode.line_length
455                     and len(indent) + quote_len <= self.mode.line_length
456                     and not has_trailing_backslash
457                 ):
458                     leaf.value = prefix + quote + docstring + "\n" + indent + quote
459                 else:
460                     leaf.value = prefix + quote + docstring + quote
461             else:
462                 leaf.value = prefix + quote + docstring + quote
463
464         yield from self.visit_default(leaf)
465
466     def __post_init__(self) -> None:
467         """You are in a twisty little maze of passages."""
468         self.current_line = Line(mode=self.mode)
469
470         v = self.visit_stmt
471         Ø: Set[str] = set()
472         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
473         self.visit_if_stmt = partial(
474             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
475         )
476         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
477         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
478         self.visit_try_stmt = partial(
479             v, keywords={"try", "except", "else", "finally"}, parens=Ø
480         )
481         self.visit_except_clause = partial(v, keywords={"except"}, parens={"except"})
482         self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"})
483         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
484         self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
485         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
486         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
487         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
488         self.visit_async_funcdef = self.visit_async_stmt
489         self.visit_decorated = self.visit_decorators
490
491         # PEP 634
492         self.visit_match_stmt = self.visit_match_case
493         self.visit_case_block = self.visit_match_case
494
495
496 def transform_line(
497     line: Line, mode: Mode, features: Collection[Feature] = ()
498 ) -> Iterator[Line]:
499     """Transform a `line`, potentially splitting it into many lines.
500
501     They should fit in the allotted `line_length` but might not be able to.
502
503     `features` are syntactical features that may be used in the output.
504     """
505     if line.is_comment:
506         yield line
507         return
508
509     line_str = line_to_string(line)
510
511     ll = mode.line_length
512     sn = mode.string_normalization
513     string_merge = StringMerger(ll, sn)
514     string_paren_strip = StringParenStripper(ll, sn)
515     string_split = StringSplitter(ll, sn)
516     string_paren_wrap = StringParenWrapper(ll, sn)
517
518     transformers: List[Transformer]
519     if (
520         not line.contains_uncollapsable_type_comments()
521         and not line.should_split_rhs
522         and not line.magic_trailing_comma
523         and (
524             is_line_short_enough(line, mode=mode, line_str=line_str)
525             or line.contains_unsplittable_type_ignore()
526         )
527         and not (line.inside_brackets and line.contains_standalone_comments())
528     ):
529         # Only apply basic string preprocessing, since lines shouldn't be split here.
530         if Preview.string_processing in mode:
531             transformers = [string_merge, string_paren_strip]
532         else:
533             transformers = []
534     elif line.is_def:
535         transformers = [left_hand_split]
536     else:
537
538         def _rhs(
539             self: object, line: Line, features: Collection[Feature], mode: Mode
540         ) -> Iterator[Line]:
541             """Wraps calls to `right_hand_split`.
542
543             The calls increasingly `omit` right-hand trailers (bracket pairs with
544             content), meaning the trailers get glued together to split on another
545             bracket pair instead.
546             """
547             for omit in generate_trailers_to_omit(line, mode.line_length):
548                 lines = list(right_hand_split(line, mode, features, omit=omit))
549                 # Note: this check is only able to figure out if the first line of the
550                 # *current* transformation fits in the line length.  This is true only
551                 # for simple cases.  All others require running more transforms via
552                 # `transform_line()`.  This check doesn't know if those would succeed.
553                 if is_line_short_enough(lines[0], mode=mode):
554                     yield from lines
555                     return
556
557             # All splits failed, best effort split with no omits.
558             # This mostly happens to multiline strings that are by definition
559             # reported as not fitting a single line, as well as lines that contain
560             # trailing commas (those have to be exploded).
561             yield from right_hand_split(line, mode, features=features)
562
563         # HACK: nested functions (like _rhs) compiled by mypyc don't retain their
564         # __name__ attribute which is needed in `run_transformer` further down.
565         # Unfortunately a nested class breaks mypyc too. So a class must be created
566         # via type ... https://github.com/mypyc/mypyc/issues/884
567         rhs = type("rhs", (), {"__call__": _rhs})()
568
569         if Preview.string_processing in mode:
570             if line.inside_brackets:
571                 transformers = [
572                     string_merge,
573                     string_paren_strip,
574                     string_split,
575                     delimiter_split,
576                     standalone_comment_split,
577                     string_paren_wrap,
578                     rhs,
579                 ]
580             else:
581                 transformers = [
582                     string_merge,
583                     string_paren_strip,
584                     string_split,
585                     string_paren_wrap,
586                     rhs,
587                 ]
588         else:
589             if line.inside_brackets:
590                 transformers = [delimiter_split, standalone_comment_split, rhs]
591             else:
592                 transformers = [rhs]
593     # It's always safe to attempt hugging of power operations and pretty much every line
594     # could match.
595     transformers.append(hug_power_op)
596
597     for transform in transformers:
598         # We are accumulating lines in `result` because we might want to abort
599         # mission and return the original line in the end, or attempt a different
600         # split altogether.
601         try:
602             result = run_transformer(line, transform, mode, features, line_str=line_str)
603         except CannotTransform:
604             continue
605         else:
606             yield from result
607             break
608
609     else:
610         yield line
611
612
613 class _BracketSplitComponent(Enum):
614     head = auto()
615     body = auto()
616     tail = auto()
617
618
619 def left_hand_split(
620     line: Line, _features: Collection[Feature], mode: Mode
621 ) -> Iterator[Line]:
622     """Split line into many lines, starting with the first matching bracket pair.
623
624     Note: this usually looks weird, only use this for function definitions.
625     Prefer RHS otherwise.  This is why this function is not symmetrical with
626     :func:`right_hand_split` which also handles optional parentheses.
627     """
628     tail_leaves: List[Leaf] = []
629     body_leaves: List[Leaf] = []
630     head_leaves: List[Leaf] = []
631     current_leaves = head_leaves
632     matching_bracket: Optional[Leaf] = None
633     for leaf in line.leaves:
634         if (
635             current_leaves is body_leaves
636             and leaf.type in CLOSING_BRACKETS
637             and leaf.opening_bracket is matching_bracket
638             and isinstance(matching_bracket, Leaf)
639         ):
640             ensure_visible(leaf)
641             ensure_visible(matching_bracket)
642             current_leaves = tail_leaves if body_leaves else head_leaves
643         current_leaves.append(leaf)
644         if current_leaves is head_leaves:
645             if leaf.type in OPENING_BRACKETS:
646                 matching_bracket = leaf
647                 current_leaves = body_leaves
648     if not matching_bracket:
649         raise CannotSplit("No brackets found")
650
651     head = bracket_split_build_line(
652         head_leaves, line, matching_bracket, component=_BracketSplitComponent.head
653     )
654     body = bracket_split_build_line(
655         body_leaves, line, matching_bracket, component=_BracketSplitComponent.body
656     )
657     tail = bracket_split_build_line(
658         tail_leaves, line, matching_bracket, component=_BracketSplitComponent.tail
659     )
660     bracket_split_succeeded_or_raise(head, body, tail)
661     for result in (head, body, tail):
662         if result:
663             yield result
664
665
666 def right_hand_split(
667     line: Line,
668     mode: Mode,
669     features: Collection[Feature] = (),
670     omit: Collection[LeafID] = (),
671 ) -> Iterator[Line]:
672     """Split line into many lines, starting with the last matching bracket pair.
673
674     If the split was by optional parentheses, attempt splitting without them, too.
675     `omit` is a collection of closing bracket IDs that shouldn't be considered for
676     this split.
677
678     Note: running this function modifies `bracket_depth` on the leaves of `line`.
679     """
680     rhs_result = _first_right_hand_split(line, omit=omit)
681     yield from _maybe_split_omitting_optional_parens(
682         rhs_result, line, mode, features=features, omit=omit
683     )
684
685
686 def _first_right_hand_split(
687     line: Line,
688     omit: Collection[LeafID] = (),
689 ) -> RHSResult:
690     """Split the line into head, body, tail starting with the last bracket pair.
691
692     Note: this function should not have side effects. It's relied upon by
693     _maybe_split_omitting_optional_parens to get an opinion whether to prefer
694     splitting on the right side of an assignment statement.
695     """
696     tail_leaves: List[Leaf] = []
697     body_leaves: List[Leaf] = []
698     head_leaves: List[Leaf] = []
699     current_leaves = tail_leaves
700     opening_bracket: Optional[Leaf] = None
701     closing_bracket: Optional[Leaf] = None
702     for leaf in reversed(line.leaves):
703         if current_leaves is body_leaves:
704             if leaf is opening_bracket:
705                 current_leaves = head_leaves if body_leaves else tail_leaves
706         current_leaves.append(leaf)
707         if current_leaves is tail_leaves:
708             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
709                 opening_bracket = leaf.opening_bracket
710                 closing_bracket = leaf
711                 current_leaves = body_leaves
712     if not (opening_bracket and closing_bracket and head_leaves):
713         # If there is no opening or closing_bracket that means the split failed and
714         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
715         # the matching `opening_bracket` wasn't available on `line` anymore.
716         raise CannotSplit("No brackets found")
717
718     tail_leaves.reverse()
719     body_leaves.reverse()
720     head_leaves.reverse()
721     head = bracket_split_build_line(
722         head_leaves, line, opening_bracket, component=_BracketSplitComponent.head
723     )
724     body = bracket_split_build_line(
725         body_leaves, line, opening_bracket, component=_BracketSplitComponent.body
726     )
727     tail = bracket_split_build_line(
728         tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail
729     )
730     bracket_split_succeeded_or_raise(head, body, tail)
731     return RHSResult(head, body, tail, opening_bracket, closing_bracket)
732
733
734 def _maybe_split_omitting_optional_parens(
735     rhs: RHSResult,
736     line: Line,
737     mode: Mode,
738     features: Collection[Feature] = (),
739     omit: Collection[LeafID] = (),
740 ) -> Iterator[Line]:
741     if (
742         Feature.FORCE_OPTIONAL_PARENTHESES not in features
743         # the opening bracket is an optional paren
744         and rhs.opening_bracket.type == token.LPAR
745         and not rhs.opening_bracket.value
746         # the closing bracket is an optional paren
747         and rhs.closing_bracket.type == token.RPAR
748         and not rhs.closing_bracket.value
749         # it's not an import (optional parens are the only thing we can split on
750         # in this case; attempting a split without them is a waste of time)
751         and not line.is_import
752         # there are no standalone comments in the body
753         and not rhs.body.contains_standalone_comments(0)
754         # and we can actually remove the parens
755         and can_omit_invisible_parens(rhs, mode.line_length)
756     ):
757         omit = {id(rhs.closing_bracket), *omit}
758         try:
759             # The RHSResult Omitting Optional Parens.
760             rhs_oop = _first_right_hand_split(line, omit=omit)
761             if not (
762                 Preview.prefer_splitting_right_hand_side_of_assignments in line.mode
763                 # the split is right after `=`
764                 and len(rhs.head.leaves) >= 2
765                 and rhs.head.leaves[-2].type == token.EQUAL
766                 # the left side of assignment contains brackets
767                 and any(leaf.type in BRACKETS for leaf in rhs.head.leaves[:-1])
768                 # the left side of assignment is short enough (the -1 is for the ending
769                 # optional paren)
770                 and is_line_short_enough(
771                     rhs.head, mode=replace(mode, line_length=mode.line_length - 1)
772                 )
773                 # the left side of assignment won't explode further because of magic
774                 # trailing comma
775                 and rhs.head.magic_trailing_comma is None
776                 # the split by omitting optional parens isn't preferred by some other
777                 # reason
778                 and not _prefer_split_rhs_oop(rhs_oop, mode)
779             ):
780                 yield from _maybe_split_omitting_optional_parens(
781                     rhs_oop, line, mode, features=features, omit=omit
782                 )
783                 return
784
785         except CannotSplit as e:
786             if not (
787                 can_be_split(rhs.body) or is_line_short_enough(rhs.body, mode=mode)
788             ):
789                 raise CannotSplit(
790                     "Splitting failed, body is still too long and can't be split."
791                 ) from e
792
793             elif (
794                 rhs.head.contains_multiline_strings()
795                 or rhs.tail.contains_multiline_strings()
796             ):
797                 raise CannotSplit(
798                     "The current optional pair of parentheses is bound to fail to"
799                     " satisfy the splitting algorithm because the head or the tail"
800                     " contains multiline strings which by definition never fit one"
801                     " line."
802                 ) from e
803
804     ensure_visible(rhs.opening_bracket)
805     ensure_visible(rhs.closing_bracket)
806     for result in (rhs.head, rhs.body, rhs.tail):
807         if result:
808             yield result
809
810
811 def _prefer_split_rhs_oop(rhs_oop: RHSResult, mode: Mode) -> bool:
812     """
813     Returns whether we should prefer the result from a split omitting optional parens.
814     """
815     has_closing_bracket_after_assign = False
816     for leaf in reversed(rhs_oop.head.leaves):
817         if leaf.type == token.EQUAL:
818             break
819         if leaf.type in CLOSING_BRACKETS:
820             has_closing_bracket_after_assign = True
821             break
822     return (
823         # contains matching brackets after the `=` (done by checking there is a
824         # closing bracket)
825         has_closing_bracket_after_assign
826         or (
827             # the split is actually from inside the optional parens (done by checking
828             # the first line still contains the `=`)
829             any(leaf.type == token.EQUAL for leaf in rhs_oop.head.leaves)
830             # the first line is short enough
831             and is_line_short_enough(rhs_oop.head, mode=mode)
832         )
833         # contains unsplittable type ignore
834         or rhs_oop.head.contains_unsplittable_type_ignore()
835         or rhs_oop.body.contains_unsplittable_type_ignore()
836         or rhs_oop.tail.contains_unsplittable_type_ignore()
837     )
838
839
840 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
841     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
842
843     Do nothing otherwise.
844
845     A left- or right-hand split is based on a pair of brackets. Content before
846     (and including) the opening bracket is left on one line, content inside the
847     brackets is put on a separate line, and finally content starting with and
848     following the closing bracket is put on a separate line.
849
850     Those are called `head`, `body`, and `tail`, respectively. If the split
851     produced the same line (all content in `head`) or ended up with an empty `body`
852     and the `tail` is just the closing bracket, then it's considered failed.
853     """
854     tail_len = len(str(tail).strip())
855     if not body:
856         if tail_len == 0:
857             raise CannotSplit("Splitting brackets produced the same line")
858
859         elif tail_len < 3:
860             raise CannotSplit(
861                 f"Splitting brackets on an empty body to save {tail_len} characters is"
862                 " not worth it"
863             )
864
865
866 def bracket_split_build_line(
867     leaves: List[Leaf],
868     original: Line,
869     opening_bracket: Leaf,
870     *,
871     component: _BracketSplitComponent,
872 ) -> Line:
873     """Return a new line with given `leaves` and respective comments from `original`.
874
875     If it's the head component, brackets will be tracked so trailing commas are
876     respected.
877
878     If it's the body component, the result line is one-indented inside brackets and as
879     such has its first leaf's prefix normalized and a trailing comma added when
880     expected.
881     """
882     result = Line(mode=original.mode, depth=original.depth)
883     if component is _BracketSplitComponent.body:
884         result.inside_brackets = True
885         result.depth += 1
886         if leaves:
887             # Since body is a new indent level, remove spurious leading whitespace.
888             normalize_prefix(leaves[0], inside_brackets=True)
889             # Ensure a trailing comma for imports and standalone function arguments, but
890             # be careful not to add one after any comments or within type annotations.
891             no_commas = (
892                 original.is_def
893                 and opening_bracket.value == "("
894                 and not any(leaf.type == token.COMMA for leaf in leaves)
895                 # In particular, don't add one within a parenthesized return annotation.
896                 # Unfortunately the indicator we're in a return annotation (RARROW) may
897                 # be defined directly in the parent node, the parent of the parent ...
898                 # and so on depending on how complex the return annotation is.
899                 # This isn't perfect and there's some false negatives but they are in
900                 # contexts were a comma is actually fine.
901                 and not any(
902                     node.prev_sibling.type == RARROW
903                     for node in (
904                         leaves[0].parent,
905                         getattr(leaves[0].parent, "parent", None),
906                     )
907                     if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
908                 )
909             )
910
911             if original.is_import or no_commas:
912                 for i in range(len(leaves) - 1, -1, -1):
913                     if leaves[i].type == STANDALONE_COMMENT:
914                         continue
915
916                     if leaves[i].type != token.COMMA:
917                         new_comma = Leaf(token.COMMA, ",")
918                         leaves.insert(i + 1, new_comma)
919                     break
920
921     leaves_to_track: Set[LeafID] = set()
922     if component is _BracketSplitComponent.head:
923         leaves_to_track = get_leaves_inside_matching_brackets(leaves)
924     # Populate the line
925     for leaf in leaves:
926         result.append(
927             leaf,
928             preformatted=True,
929             track_bracket=id(leaf) in leaves_to_track,
930         )
931         for comment_after in original.comments_after(leaf):
932             result.append(comment_after, preformatted=True)
933     if component is _BracketSplitComponent.body and should_split_line(
934         result, opening_bracket
935     ):
936         result.should_split_rhs = True
937     return result
938
939
940 def dont_increase_indentation(split_func: Transformer) -> Transformer:
941     """Normalize prefix of the first leaf in every line returned by `split_func`.
942
943     This is a decorator over relevant split functions.
944     """
945
946     @wraps(split_func)
947     def split_wrapper(
948         line: Line, features: Collection[Feature], mode: Mode
949     ) -> Iterator[Line]:
950         for split_line in split_func(line, features, mode):
951             normalize_prefix(split_line.leaves[0], inside_brackets=True)
952             yield split_line
953
954     return split_wrapper
955
956
957 def _get_last_non_comment_leaf(line: Line) -> Optional[int]:
958     for leaf_idx in range(len(line.leaves) - 1, 0, -1):
959         if line.leaves[leaf_idx].type != STANDALONE_COMMENT:
960             return leaf_idx
961     return None
962
963
964 def _safe_add_trailing_comma(safe: bool, delimiter_priority: int, line: Line) -> Line:
965     if (
966         safe
967         and delimiter_priority == COMMA_PRIORITY
968         and line.leaves[-1].type != token.COMMA
969         and line.leaves[-1].type != STANDALONE_COMMENT
970     ):
971         new_comma = Leaf(token.COMMA, ",")
972         line.append(new_comma)
973     return line
974
975
976 @dont_increase_indentation
977 def delimiter_split(
978     line: Line, features: Collection[Feature], mode: Mode
979 ) -> Iterator[Line]:
980     """Split according to delimiters of the highest priority.
981
982     If the appropriate Features are given, the split will add trailing commas
983     also in function signatures and calls that contain `*` and `**`.
984     """
985     try:
986         last_leaf = line.leaves[-1]
987     except IndexError:
988         raise CannotSplit("Line empty") from None
989
990     bt = line.bracket_tracker
991     try:
992         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
993     except ValueError:
994         raise CannotSplit("No delimiters found") from None
995
996     if delimiter_priority == DOT_PRIORITY:
997         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
998             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
999
1000     current_line = Line(
1001         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1002     )
1003     lowest_depth = sys.maxsize
1004     trailing_comma_safe = True
1005
1006     def append_to_line(leaf: Leaf) -> Iterator[Line]:
1007         """Append `leaf` to current line or to new line if appending impossible."""
1008         nonlocal current_line
1009         try:
1010             current_line.append_safe(leaf, preformatted=True)
1011         except ValueError:
1012             yield current_line
1013
1014             current_line = Line(
1015                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1016             )
1017             current_line.append(leaf)
1018
1019     last_non_comment_leaf = _get_last_non_comment_leaf(line)
1020     for leaf_idx, leaf in enumerate(line.leaves):
1021         yield from append_to_line(leaf)
1022
1023         for comment_after in line.comments_after(leaf):
1024             yield from append_to_line(comment_after)
1025
1026         lowest_depth = min(lowest_depth, leaf.bracket_depth)
1027         if leaf.bracket_depth == lowest_depth:
1028             if is_vararg(leaf, within={syms.typedargslist}):
1029                 trailing_comma_safe = (
1030                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
1031                 )
1032             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
1033                 trailing_comma_safe = (
1034                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
1035                 )
1036
1037         if (
1038             Preview.add_trailing_comma_consistently in mode
1039             and last_leaf.type == STANDALONE_COMMENT
1040             and leaf_idx == last_non_comment_leaf
1041         ):
1042             current_line = _safe_add_trailing_comma(
1043                 trailing_comma_safe, delimiter_priority, current_line
1044             )
1045
1046         leaf_priority = bt.delimiters.get(id(leaf))
1047         if leaf_priority == delimiter_priority:
1048             yield current_line
1049
1050             current_line = Line(
1051                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1052             )
1053     if current_line:
1054         current_line = _safe_add_trailing_comma(
1055             trailing_comma_safe, delimiter_priority, current_line
1056         )
1057         yield current_line
1058
1059
1060 @dont_increase_indentation
1061 def standalone_comment_split(
1062     line: Line, features: Collection[Feature], mode: Mode
1063 ) -> Iterator[Line]:
1064     """Split standalone comments from the rest of the line."""
1065     if not line.contains_standalone_comments(0):
1066         raise CannotSplit("Line does not have any standalone comments")
1067
1068     current_line = Line(
1069         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1070     )
1071
1072     def append_to_line(leaf: Leaf) -> Iterator[Line]:
1073         """Append `leaf` to current line or to new line if appending impossible."""
1074         nonlocal current_line
1075         try:
1076             current_line.append_safe(leaf, preformatted=True)
1077         except ValueError:
1078             yield current_line
1079
1080             current_line = Line(
1081                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1082             )
1083             current_line.append(leaf)
1084
1085     for leaf in line.leaves:
1086         yield from append_to_line(leaf)
1087
1088         for comment_after in line.comments_after(leaf):
1089             yield from append_to_line(comment_after)
1090
1091     if current_line:
1092         yield current_line
1093
1094
1095 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
1096     """Leave existing extra newlines if not `inside_brackets`. Remove everything
1097     else.
1098
1099     Note: don't use backslashes for formatting or you'll lose your voting rights.
1100     """
1101     if not inside_brackets:
1102         spl = leaf.prefix.split("#")
1103         if "\\" not in spl[0]:
1104             nl_count = spl[-1].count("\n")
1105             if len(spl) > 1:
1106                 nl_count -= 1
1107             leaf.prefix = "\n" * nl_count
1108             return
1109
1110     leaf.prefix = ""
1111
1112
1113 def normalize_invisible_parens(
1114     node: Node, parens_after: Set[str], *, mode: Mode, features: Collection[Feature]
1115 ) -> None:
1116     """Make existing optional parentheses invisible or create new ones.
1117
1118     `parens_after` is a set of string leaf values immediately after which parens
1119     should be put.
1120
1121     Standardizes on visible parentheses for single-element tuples, and keeps
1122     existing visible parentheses for other tuples and generator expressions.
1123     """
1124     for pc in list_comments(node.prefix, is_endmarker=False):
1125         if pc.value in FMT_OFF:
1126             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
1127             return
1128
1129     # The multiple context managers grammar has a different pattern, thus this is
1130     # separate from the for-loop below. This possibly wraps them in invisible parens,
1131     # and later will be removed in remove_with_parens when needed.
1132     if node.type == syms.with_stmt:
1133         _maybe_wrap_cms_in_parens(node, mode, features)
1134
1135     check_lpar = False
1136     for index, child in enumerate(list(node.children)):
1137         # Fixes a bug where invisible parens are not properly stripped from
1138         # assignment statements that contain type annotations.
1139         if isinstance(child, Node) and child.type == syms.annassign:
1140             normalize_invisible_parens(
1141                 child, parens_after=parens_after, mode=mode, features=features
1142             )
1143
1144         # Add parentheses around long tuple unpacking in assignments.
1145         if (
1146             index == 0
1147             and isinstance(child, Node)
1148             and child.type == syms.testlist_star_expr
1149         ):
1150             check_lpar = True
1151
1152         if check_lpar:
1153             if (
1154                 child.type == syms.atom
1155                 and node.type == syms.for_stmt
1156                 and isinstance(child.prev_sibling, Leaf)
1157                 and child.prev_sibling.type == token.NAME
1158                 and child.prev_sibling.value == "for"
1159             ):
1160                 if maybe_make_parens_invisible_in_atom(
1161                     child,
1162                     parent=node,
1163                     remove_brackets_around_comma=True,
1164                 ):
1165                     wrap_in_parentheses(node, child, visible=False)
1166             elif isinstance(child, Node) and node.type == syms.with_stmt:
1167                 remove_with_parens(child, node)
1168             elif child.type == syms.atom:
1169                 if maybe_make_parens_invisible_in_atom(
1170                     child,
1171                     parent=node,
1172                 ):
1173                     wrap_in_parentheses(node, child, visible=False)
1174             elif is_one_tuple(child):
1175                 wrap_in_parentheses(node, child, visible=True)
1176             elif node.type == syms.import_from:
1177                 _normalize_import_from(node, child, index)
1178                 break
1179             elif (
1180                 index == 1
1181                 and child.type == token.STAR
1182                 and node.type == syms.except_clause
1183             ):
1184                 # In except* (PEP 654), the star is actually part of
1185                 # of the keyword. So we need to skip the insertion of
1186                 # invisible parentheses to work more precisely.
1187                 continue
1188
1189             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
1190                 wrap_in_parentheses(node, child, visible=False)
1191
1192         comma_check = child.type == token.COMMA
1193
1194         check_lpar = isinstance(child, Leaf) and (
1195             child.value in parens_after or comma_check
1196         )
1197
1198
1199 def _normalize_import_from(parent: Node, child: LN, index: int) -> None:
1200     # "import from" nodes store parentheses directly as part of
1201     # the statement
1202     if is_lpar_token(child):
1203         assert is_rpar_token(parent.children[-1])
1204         # make parentheses invisible
1205         child.value = ""
1206         parent.children[-1].value = ""
1207     elif child.type != token.STAR:
1208         # insert invisible parentheses
1209         parent.insert_child(index, Leaf(token.LPAR, ""))
1210         parent.append_child(Leaf(token.RPAR, ""))
1211
1212
1213 def remove_await_parens(node: Node) -> None:
1214     if node.children[0].type == token.AWAIT and len(node.children) > 1:
1215         if (
1216             node.children[1].type == syms.atom
1217             and node.children[1].children[0].type == token.LPAR
1218         ):
1219             if maybe_make_parens_invisible_in_atom(
1220                 node.children[1],
1221                 parent=node,
1222                 remove_brackets_around_comma=True,
1223             ):
1224                 wrap_in_parentheses(node, node.children[1], visible=False)
1225
1226             # Since await is an expression we shouldn't remove
1227             # brackets in cases where this would change
1228             # the AST due to operator precedence.
1229             # Therefore we only aim to remove brackets around
1230             # power nodes that aren't also await expressions themselves.
1231             # https://peps.python.org/pep-0492/#updated-operator-precedence-table
1232             # N.B. We've still removed any redundant nested brackets though :)
1233             opening_bracket = cast(Leaf, node.children[1].children[0])
1234             closing_bracket = cast(Leaf, node.children[1].children[-1])
1235             bracket_contents = node.children[1].children[1]
1236             if isinstance(bracket_contents, Node):
1237                 if bracket_contents.type != syms.power:
1238                     ensure_visible(opening_bracket)
1239                     ensure_visible(closing_bracket)
1240                 elif (
1241                     bracket_contents.type == syms.power
1242                     and bracket_contents.children[0].type == token.AWAIT
1243                 ):
1244                     ensure_visible(opening_bracket)
1245                     ensure_visible(closing_bracket)
1246                     # If we are in a nested await then recurse down.
1247                     remove_await_parens(bracket_contents)
1248
1249
1250 def _maybe_wrap_cms_in_parens(
1251     node: Node, mode: Mode, features: Collection[Feature]
1252 ) -> None:
1253     """When enabled and safe, wrap the multiple context managers in invisible parens.
1254
1255     It is only safe when `features` contain Feature.PARENTHESIZED_CONTEXT_MANAGERS.
1256     """
1257     if (
1258         Feature.PARENTHESIZED_CONTEXT_MANAGERS not in features
1259         or Preview.wrap_multiple_context_managers_in_parens not in mode
1260         or len(node.children) <= 2
1261         # If it's an atom, it's already wrapped in parens.
1262         or node.children[1].type == syms.atom
1263     ):
1264         return
1265     colon_index: Optional[int] = None
1266     for i in range(2, len(node.children)):
1267         if node.children[i].type == token.COLON:
1268             colon_index = i
1269             break
1270     if colon_index is not None:
1271         lpar = Leaf(token.LPAR, "")
1272         rpar = Leaf(token.RPAR, "")
1273         context_managers = node.children[1:colon_index]
1274         for child in context_managers:
1275             child.remove()
1276         # After wrapping, the with_stmt will look like this:
1277         #   with_stmt
1278         #     NAME 'with'
1279         #     atom
1280         #       LPAR ''
1281         #       testlist_gexp
1282         #         ... <-- context_managers
1283         #       /testlist_gexp
1284         #       RPAR ''
1285         #     /atom
1286         #     COLON ':'
1287         new_child = Node(
1288             syms.atom, [lpar, Node(syms.testlist_gexp, context_managers), rpar]
1289         )
1290         node.insert_child(1, new_child)
1291
1292
1293 def remove_with_parens(node: Node, parent: Node) -> None:
1294     """Recursively hide optional parens in `with` statements."""
1295     # Removing all unnecessary parentheses in with statements in one pass is a tad
1296     # complex as different variations of bracketed statements result in pretty
1297     # different parse trees:
1298     #
1299     # with (open("file")) as f:                       # this is an asexpr_test
1300     #     ...
1301     #
1302     # with (open("file") as f):                       # this is an atom containing an
1303     #     ...                                         # asexpr_test
1304     #
1305     # with (open("file")) as f, (open("file")) as f:  # this is asexpr_test, COMMA,
1306     #     ...                                         # asexpr_test
1307     #
1308     # with (open("file") as f, open("file") as f):    # an atom containing a
1309     #     ...                                         # testlist_gexp which then
1310     #                                                 # contains multiple asexpr_test(s)
1311     if node.type == syms.atom:
1312         if maybe_make_parens_invisible_in_atom(
1313             node,
1314             parent=parent,
1315             remove_brackets_around_comma=True,
1316         ):
1317             wrap_in_parentheses(parent, node, visible=False)
1318         if isinstance(node.children[1], Node):
1319             remove_with_parens(node.children[1], node)
1320     elif node.type == syms.testlist_gexp:
1321         for child in node.children:
1322             if isinstance(child, Node):
1323                 remove_with_parens(child, node)
1324     elif node.type == syms.asexpr_test and not any(
1325         leaf.type == token.COLONEQUAL for leaf in node.leaves()
1326     ):
1327         if maybe_make_parens_invisible_in_atom(
1328             node.children[0],
1329             parent=node,
1330             remove_brackets_around_comma=True,
1331         ):
1332             wrap_in_parentheses(node, node.children[0], visible=False)
1333
1334
1335 def maybe_make_parens_invisible_in_atom(
1336     node: LN,
1337     parent: LN,
1338     remove_brackets_around_comma: bool = False,
1339 ) -> bool:
1340     """If it's safe, make the parens in the atom `node` invisible, recursively.
1341     Additionally, remove repeated, adjacent invisible parens from the atom `node`
1342     as they are redundant.
1343
1344     Returns whether the node should itself be wrapped in invisible parentheses.
1345     """
1346     if (
1347         node.type != syms.atom
1348         or is_empty_tuple(node)
1349         or is_one_tuple(node)
1350         or (is_yield(node) and parent.type != syms.expr_stmt)
1351         or (
1352             # This condition tries to prevent removing non-optional brackets
1353             # around a tuple, however, can be a bit overzealous so we provide
1354             # and option to skip this check for `for` and `with` statements.
1355             not remove_brackets_around_comma
1356             and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
1357         )
1358         or is_tuple_containing_walrus(node)
1359     ):
1360         return False
1361
1362     if is_walrus_assignment(node):
1363         if parent.type in [
1364             syms.annassign,
1365             syms.expr_stmt,
1366             syms.assert_stmt,
1367             syms.return_stmt,
1368             syms.except_clause,
1369             syms.funcdef,
1370             syms.with_stmt,
1371             # these ones aren't useful to end users, but they do please fuzzers
1372             syms.for_stmt,
1373             syms.del_stmt,
1374             syms.for_stmt,
1375         ]:
1376             return False
1377
1378     first = node.children[0]
1379     last = node.children[-1]
1380     if is_lpar_token(first) and is_rpar_token(last):
1381         middle = node.children[1]
1382         # make parentheses invisible
1383         first.value = ""
1384         last.value = ""
1385         maybe_make_parens_invisible_in_atom(
1386             middle,
1387             parent=parent,
1388             remove_brackets_around_comma=remove_brackets_around_comma,
1389         )
1390
1391         if is_atom_with_invisible_parens(middle):
1392             # Strip the invisible parens from `middle` by replacing
1393             # it with the child in-between the invisible parens
1394             middle.replace(middle.children[1])
1395
1396         return False
1397
1398     return True
1399
1400
1401 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
1402     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
1403
1404     if not (opening_bracket.parent and opening_bracket.value in "[{("):
1405         return False
1406
1407     # We're essentially checking if the body is delimited by commas and there's more
1408     # than one of them (we're excluding the trailing comma and if the delimiter priority
1409     # is still commas, that means there's more).
1410     exclude = set()
1411     trailing_comma = False
1412     try:
1413         last_leaf = line.leaves[-1]
1414         if last_leaf.type == token.COMMA:
1415             trailing_comma = True
1416             exclude.add(id(last_leaf))
1417         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
1418     except (IndexError, ValueError):
1419         return False
1420
1421     return max_priority == COMMA_PRIORITY and (
1422         (line.mode.magic_trailing_comma and trailing_comma)
1423         # always explode imports
1424         or opening_bracket.parent.type in {syms.atom, syms.import_from}
1425     )
1426
1427
1428 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
1429     """Generate sets of closing bracket IDs that should be omitted in a RHS.
1430
1431     Brackets can be omitted if the entire trailer up to and including
1432     a preceding closing bracket fits in one line.
1433
1434     Yielded sets are cumulative (contain results of previous yields, too).  First
1435     set is empty, unless the line should explode, in which case bracket pairs until
1436     the one that needs to explode are omitted.
1437     """
1438
1439     omit: Set[LeafID] = set()
1440     if not line.magic_trailing_comma:
1441         yield omit
1442
1443     length = 4 * line.depth
1444     opening_bracket: Optional[Leaf] = None
1445     closing_bracket: Optional[Leaf] = None
1446     inner_brackets: Set[LeafID] = set()
1447     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
1448         length += leaf_length
1449         if length > line_length:
1450             break
1451
1452         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
1453         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
1454             break
1455
1456         if opening_bracket:
1457             if leaf is opening_bracket:
1458                 opening_bracket = None
1459             elif leaf.type in CLOSING_BRACKETS:
1460                 prev = line.leaves[index - 1] if index > 0 else None
1461                 if (
1462                     prev
1463                     and prev.type == token.COMMA
1464                     and leaf.opening_bracket is not None
1465                     and not is_one_sequence_between(
1466                         leaf.opening_bracket, leaf, line.leaves
1467                     )
1468                 ):
1469                     # Never omit bracket pairs with trailing commas.
1470                     # We need to explode on those.
1471                     break
1472
1473                 inner_brackets.add(id(leaf))
1474         elif leaf.type in CLOSING_BRACKETS:
1475             prev = line.leaves[index - 1] if index > 0 else None
1476             if prev and prev.type in OPENING_BRACKETS:
1477                 # Empty brackets would fail a split so treat them as "inner"
1478                 # brackets (e.g. only add them to the `omit` set if another
1479                 # pair of brackets was good enough.
1480                 inner_brackets.add(id(leaf))
1481                 continue
1482
1483             if closing_bracket:
1484                 omit.add(id(closing_bracket))
1485                 omit.update(inner_brackets)
1486                 inner_brackets.clear()
1487                 yield omit
1488
1489             if (
1490                 prev
1491                 and prev.type == token.COMMA
1492                 and leaf.opening_bracket is not None
1493                 and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves)
1494             ):
1495                 # Never omit bracket pairs with trailing commas.
1496                 # We need to explode on those.
1497                 break
1498
1499             if leaf.value:
1500                 opening_bracket = leaf.opening_bracket
1501                 closing_bracket = leaf
1502
1503
1504 def run_transformer(
1505     line: Line,
1506     transform: Transformer,
1507     mode: Mode,
1508     features: Collection[Feature],
1509     *,
1510     line_str: str = "",
1511 ) -> List[Line]:
1512     if not line_str:
1513         line_str = line_to_string(line)
1514     result: List[Line] = []
1515     for transformed_line in transform(line, features, mode):
1516         if str(transformed_line).strip("\n") == line_str:
1517             raise CannotTransform("Line transformer returned an unchanged result")
1518
1519         result.extend(transform_line(transformed_line, mode=mode, features=features))
1520
1521     features_set = set(features)
1522     if (
1523         Feature.FORCE_OPTIONAL_PARENTHESES in features_set
1524         or transform.__class__.__name__ != "rhs"
1525         or not line.bracket_tracker.invisible
1526         or any(bracket.value for bracket in line.bracket_tracker.invisible)
1527         or line.contains_multiline_strings()
1528         or result[0].contains_uncollapsable_type_comments()
1529         or result[0].contains_unsplittable_type_ignore()
1530         or is_line_short_enough(result[0], mode=mode)
1531         # If any leaves have no parents (which _can_ occur since
1532         # `transform(line)` potentially destroys the line's underlying node
1533         # structure), then we can't proceed. Doing so would cause the below
1534         # call to `append_leaves()` to fail.
1535         or any(leaf.parent is None for leaf in line.leaves)
1536     ):
1537         return result
1538
1539     line_copy = line.clone()
1540     append_leaves(line_copy, line, line.leaves)
1541     features_fop = features_set | {Feature.FORCE_OPTIONAL_PARENTHESES}
1542     second_opinion = run_transformer(
1543         line_copy, transform, mode, features_fop, line_str=line_str
1544     )
1545     if all(is_line_short_enough(ln, mode=mode) for ln in second_opinion):
1546         result = second_opinion
1547     return result