]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Fix up changelog (#3910)
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4 import sys
5 from dataclasses import replace
6 from enum import Enum, auto
7 from functools import partial, wraps
8 from typing import Collection, Iterator, List, Optional, Set, Union, cast
9
10 from black.brackets import (
11     COMMA_PRIORITY,
12     DOT_PRIORITY,
13     get_leaves_inside_matching_brackets,
14     max_delimiter_priority_in_atom,
15 )
16 from black.comments import FMT_OFF, generate_comments, list_comments
17 from black.lines import (
18     Line,
19     RHSResult,
20     append_leaves,
21     can_be_split,
22     can_omit_invisible_parens,
23     is_line_short_enough,
24     line_to_string,
25 )
26 from black.mode import Feature, Mode, Preview
27 from black.nodes import (
28     ASSIGNMENTS,
29     BRACKETS,
30     CLOSING_BRACKETS,
31     OPENING_BRACKETS,
32     RARROW,
33     STANDALONE_COMMENT,
34     STATEMENT,
35     WHITESPACE,
36     Visitor,
37     ensure_visible,
38     is_arith_like,
39     is_async_stmt_or_funcdef,
40     is_atom_with_invisible_parens,
41     is_docstring,
42     is_empty_tuple,
43     is_lpar_token,
44     is_multiline_string,
45     is_name_token,
46     is_one_sequence_between,
47     is_one_tuple,
48     is_rpar_token,
49     is_stub_body,
50     is_stub_suite,
51     is_tuple_containing_walrus,
52     is_type_ignore_comment_string,
53     is_vararg,
54     is_walrus_assignment,
55     is_yield,
56     syms,
57     wrap_in_parentheses,
58 )
59 from black.numerics import normalize_numeric_literal
60 from black.strings import (
61     fix_docstring,
62     get_string_prefix,
63     normalize_string_prefix,
64     normalize_string_quotes,
65     normalize_unicode_escape_sequences,
66 )
67 from black.trans import (
68     CannotTransform,
69     StringMerger,
70     StringParenStripper,
71     StringParenWrapper,
72     StringSplitter,
73     Transformer,
74     hug_power_op,
75 )
76 from blib2to3.pgen2 import token
77 from blib2to3.pytree import Leaf, Node
78
79 # types
80 LeafID = int
81 LN = Union[Leaf, Node]
82
83
84 class CannotSplit(CannotTransform):
85     """A readable split that fits the allotted line length is impossible."""
86
87
88 # This isn't a dataclass because @dataclass + Generic breaks mypyc.
89 # See also https://github.com/mypyc/mypyc/issues/827.
90 class LineGenerator(Visitor[Line]):
91     """Generates reformatted Line objects.  Empty lines are not emitted.
92
93     Note: destroys the tree it's visiting by mutating prefixes of its leaves
94     in ways that will no longer stringify to valid Python code on the tree.
95     """
96
97     def __init__(self, mode: Mode, features: Collection[Feature]) -> None:
98         self.mode = mode
99         self.features = features
100         self.current_line: Line
101         self.__post_init__()
102
103     def line(self, indent: int = 0) -> Iterator[Line]:
104         """Generate a line.
105
106         If the line is empty, only emit if it makes sense.
107         If the line is too long, split it first and then generate.
108
109         If any lines were generated, set up a new current_line.
110         """
111         if not self.current_line:
112             self.current_line.depth += indent
113             return  # Line is empty, don't emit. Creating a new one unnecessary.
114
115         if (
116             Preview.improved_async_statements_handling in self.mode
117             and len(self.current_line.leaves) == 1
118             and is_async_stmt_or_funcdef(self.current_line.leaves[0])
119         ):
120             # Special case for async def/for/with statements. `visit_async_stmt`
121             # adds an `ASYNC` leaf then visits the child def/for/with statement
122             # nodes. Line yields from those nodes shouldn't treat the former
123             # `ASYNC` leaf as a complete line.
124             return
125
126         complete_line = self.current_line
127         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
128         yield complete_line
129
130     def visit_default(self, node: LN) -> Iterator[Line]:
131         """Default `visit_*()` implementation. Recurses to children of `node`."""
132         if isinstance(node, Leaf):
133             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
134             for comment in generate_comments(node):
135                 if any_open_brackets:
136                     # any comment within brackets is subject to splitting
137                     self.current_line.append(comment)
138                 elif comment.type == token.COMMENT:
139                     # regular trailing comment
140                     self.current_line.append(comment)
141                     yield from self.line()
142
143                 else:
144                     # regular standalone comment
145                     yield from self.line()
146
147                     self.current_line.append(comment)
148                     yield from self.line()
149
150             normalize_prefix(node, inside_brackets=any_open_brackets)
151             if self.mode.string_normalization and node.type == token.STRING:
152                 node.value = normalize_string_prefix(node.value)
153                 node.value = normalize_string_quotes(node.value)
154             if node.type == token.NUMBER:
155                 normalize_numeric_literal(node)
156             if node.type not in WHITESPACE:
157                 self.current_line.append(node)
158         yield from super().visit_default(node)
159
160     def visit_test(self, node: Node) -> Iterator[Line]:
161         """Visit an `x if y else z` test"""
162
163         if Preview.parenthesize_conditional_expressions in self.mode:
164             already_parenthesized = (
165                 node.prev_sibling and node.prev_sibling.type == token.LPAR
166             )
167
168             if not already_parenthesized:
169                 lpar = Leaf(token.LPAR, "")
170                 rpar = Leaf(token.RPAR, "")
171                 node.insert_child(0, lpar)
172                 node.append_child(rpar)
173
174         yield from self.visit_default(node)
175
176     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
177         """Increase indentation level, maybe yield a line."""
178         # In blib2to3 INDENT never holds comments.
179         yield from self.line(+1)
180         yield from self.visit_default(node)
181
182     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
183         """Decrease indentation level, maybe yield a line."""
184         # The current line might still wait for trailing comments.  At DEDENT time
185         # there won't be any (they would be prefixes on the preceding NEWLINE).
186         # Emit the line then.
187         yield from self.line()
188
189         # While DEDENT has no value, its prefix may contain standalone comments
190         # that belong to the current indentation level.  Get 'em.
191         yield from self.visit_default(node)
192
193         # Finally, emit the dedent.
194         yield from self.line(-1)
195
196     def visit_stmt(
197         self, node: Node, keywords: Set[str], parens: Set[str]
198     ) -> Iterator[Line]:
199         """Visit a statement.
200
201         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
202         `def`, `with`, `class`, `assert`, and assignments.
203
204         The relevant Python language `keywords` for a given statement will be
205         NAME leaves within it. This methods puts those on a separate line.
206
207         `parens` holds a set of string leaf values immediately after which
208         invisible parens should be put.
209         """
210         normalize_invisible_parens(
211             node, parens_after=parens, mode=self.mode, features=self.features
212         )
213         for child in node.children:
214             if is_name_token(child) and child.value in keywords:
215                 yield from self.line()
216
217             yield from self.visit(child)
218
219     def visit_typeparams(self, node: Node) -> Iterator[Line]:
220         yield from self.visit_default(node)
221         node.children[0].prefix = ""
222
223     def visit_typevartuple(self, node: Node) -> Iterator[Line]:
224         yield from self.visit_default(node)
225         node.children[1].prefix = ""
226
227     def visit_paramspec(self, node: Node) -> Iterator[Line]:
228         yield from self.visit_default(node)
229         node.children[1].prefix = ""
230
231     def visit_dictsetmaker(self, node: Node) -> Iterator[Line]:
232         if Preview.wrap_long_dict_values_in_parens in self.mode:
233             for i, child in enumerate(node.children):
234                 if i == 0:
235                     continue
236                 if node.children[i - 1].type == token.COLON:
237                     if child.type == syms.atom and child.children[0].type == token.LPAR:
238                         if maybe_make_parens_invisible_in_atom(
239                             child,
240                             parent=node,
241                             remove_brackets_around_comma=False,
242                         ):
243                             wrap_in_parentheses(node, child, visible=False)
244                     else:
245                         wrap_in_parentheses(node, child, visible=False)
246         yield from self.visit_default(node)
247
248     def visit_funcdef(self, node: Node) -> Iterator[Line]:
249         """Visit function definition."""
250         yield from self.line()
251
252         # Remove redundant brackets around return type annotation.
253         is_return_annotation = False
254         for child in node.children:
255             if child.type == token.RARROW:
256                 is_return_annotation = True
257             elif is_return_annotation:
258                 if child.type == syms.atom and child.children[0].type == token.LPAR:
259                     if maybe_make_parens_invisible_in_atom(
260                         child,
261                         parent=node,
262                         remove_brackets_around_comma=False,
263                     ):
264                         wrap_in_parentheses(node, child, visible=False)
265                 else:
266                     wrap_in_parentheses(node, child, visible=False)
267                 is_return_annotation = False
268
269         for child in node.children:
270             yield from self.visit(child)
271
272     def visit_match_case(self, node: Node) -> Iterator[Line]:
273         """Visit either a match or case statement."""
274         normalize_invisible_parens(
275             node, parens_after=set(), mode=self.mode, features=self.features
276         )
277
278         yield from self.line()
279         for child in node.children:
280             yield from self.visit(child)
281
282     def visit_suite(self, node: Node) -> Iterator[Line]:
283         """Visit a suite."""
284         if (
285             self.mode.is_pyi or Preview.dummy_implementations in self.mode
286         ) and is_stub_suite(node):
287             yield from self.visit(node.children[2])
288         else:
289             yield from self.visit_default(node)
290
291     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
292         """Visit a statement without nested statements."""
293         prev_type: Optional[int] = None
294         for child in node.children:
295             if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child):
296                 wrap_in_parentheses(node, child, visible=False)
297             prev_type = child.type
298
299         is_suite_like = node.parent and node.parent.type in STATEMENT
300         if is_suite_like:
301             if (
302                 self.mode.is_pyi or Preview.dummy_implementations in self.mode
303             ) and is_stub_body(node):
304                 yield from self.visit_default(node)
305             else:
306                 yield from self.line(+1)
307                 yield from self.visit_default(node)
308                 yield from self.line(-1)
309
310         else:
311             if (
312                 not (self.mode.is_pyi or Preview.dummy_implementations in self.mode)
313                 or not node.parent
314                 or not is_stub_suite(node.parent)
315             ):
316                 yield from self.line()
317             yield from self.visit_default(node)
318
319     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
320         """Visit `async def`, `async for`, `async with`."""
321         yield from self.line()
322
323         children = iter(node.children)
324         for child in children:
325             yield from self.visit(child)
326
327             if child.type == token.ASYNC or child.type == STANDALONE_COMMENT:
328                 # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async
329                 # line.
330                 break
331
332         internal_stmt = next(children)
333         if Preview.improved_async_statements_handling in self.mode:
334             yield from self.visit(internal_stmt)
335         else:
336             for child in internal_stmt.children:
337                 yield from self.visit(child)
338
339     def visit_decorators(self, node: Node) -> Iterator[Line]:
340         """Visit decorators."""
341         for child in node.children:
342             yield from self.line()
343             yield from self.visit(child)
344
345     def visit_power(self, node: Node) -> Iterator[Line]:
346         for idx, leaf in enumerate(node.children[:-1]):
347             next_leaf = node.children[idx + 1]
348
349             if not isinstance(leaf, Leaf):
350                 continue
351
352             value = leaf.value.lower()
353             if (
354                 leaf.type == token.NUMBER
355                 and next_leaf.type == syms.trailer
356                 # Ensure that we are in an attribute trailer
357                 and next_leaf.children[0].type == token.DOT
358                 # It shouldn't wrap hexadecimal, binary and octal literals
359                 and not value.startswith(("0x", "0b", "0o"))
360                 # It shouldn't wrap complex literals
361                 and "j" not in value
362             ):
363                 wrap_in_parentheses(node, leaf)
364
365         remove_await_parens(node)
366
367         yield from self.visit_default(node)
368
369     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
370         """Remove a semicolon and put the other statement on a separate line."""
371         yield from self.line()
372
373     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
374         """End of file. Process outstanding comments and end with a newline."""
375         yield from self.visit_default(leaf)
376         yield from self.line()
377
378     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
379         if not self.current_line.bracket_tracker.any_open_brackets():
380             yield from self.line()
381         yield from self.visit_default(leaf)
382
383     def visit_factor(self, node: Node) -> Iterator[Line]:
384         """Force parentheses between a unary op and a binary power:
385
386         -2 ** 8 -> -(2 ** 8)
387         """
388         _operator, operand = node.children
389         if (
390             operand.type == syms.power
391             and len(operand.children) == 3
392             and operand.children[1].type == token.DOUBLESTAR
393         ):
394             lpar = Leaf(token.LPAR, "(")
395             rpar = Leaf(token.RPAR, ")")
396             index = operand.remove() or 0
397             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
398         yield from self.visit_default(node)
399
400     def visit_tname(self, node: Node) -> Iterator[Line]:
401         """
402         Add potential parentheses around types in function parameter lists to be made
403         into real parentheses in case the type hint is too long to fit on a line
404         Examples:
405         def foo(a: int, b: float = 7): ...
406
407         ->
408
409         def foo(a: (int), b: (float) = 7): ...
410         """
411         if Preview.parenthesize_long_type_hints in self.mode:
412             assert len(node.children) == 3
413             if maybe_make_parens_invisible_in_atom(node.children[2], parent=node):
414                 wrap_in_parentheses(node, node.children[2], visible=False)
415
416         yield from self.visit_default(node)
417
418     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
419         if Preview.hex_codes_in_unicode_sequences in self.mode:
420             normalize_unicode_escape_sequences(leaf)
421
422         if is_docstring(leaf) and "\\\n" not in leaf.value:
423             # We're ignoring docstrings with backslash newline escapes because changing
424             # indentation of those changes the AST representation of the code.
425             if self.mode.string_normalization:
426                 docstring = normalize_string_prefix(leaf.value)
427                 # visit_default() does handle string normalization for us, but
428                 # since this method acts differently depending on quote style (ex.
429                 # see padding logic below), there's a possibility for unstable
430                 # formatting as visit_default() is called *after*. To avoid a
431                 # situation where this function formats a docstring differently on
432                 # the second pass, normalize it early.
433                 docstring = normalize_string_quotes(docstring)
434             else:
435                 docstring = leaf.value
436             prefix = get_string_prefix(docstring)
437             docstring = docstring[len(prefix) :]  # Remove the prefix
438             quote_char = docstring[0]
439             # A natural way to remove the outer quotes is to do:
440             #   docstring = docstring.strip(quote_char)
441             # but that breaks on """""x""" (which is '""x').
442             # So we actually need to remove the first character and the next two
443             # characters but only if they are the same as the first.
444             quote_len = 1 if docstring[1] != quote_char else 3
445             docstring = docstring[quote_len:-quote_len]
446             docstring_started_empty = not docstring
447             indent = " " * 4 * self.current_line.depth
448
449             if is_multiline_string(leaf):
450                 docstring = fix_docstring(docstring, indent)
451             else:
452                 docstring = docstring.strip()
453
454             has_trailing_backslash = False
455             if docstring:
456                 # Add some padding if the docstring starts / ends with a quote mark.
457                 if docstring[0] == quote_char:
458                     docstring = " " + docstring
459                 if docstring[-1] == quote_char:
460                     docstring += " "
461                 if docstring[-1] == "\\":
462                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
463                     if backslash_count % 2:
464                         # Odd number of tailing backslashes, add some padding to
465                         # avoid escaping the closing string quote.
466                         docstring += " "
467                         has_trailing_backslash = True
468             elif not docstring_started_empty:
469                 docstring = " "
470
471             # We could enforce triple quotes at this point.
472             quote = quote_char * quote_len
473
474             # It's invalid to put closing single-character quotes on a new line.
475             if self.mode and quote_len == 3:
476                 # We need to find the length of the last line of the docstring
477                 # to find if we can add the closing quotes to the line without
478                 # exceeding the maximum line length.
479                 # If docstring is one line, we don't put the closing quotes on a
480                 # separate line because it looks ugly (#3320).
481                 lines = docstring.splitlines()
482                 last_line_length = len(lines[-1]) if docstring else 0
483
484                 # If adding closing quotes would cause the last line to exceed
485                 # the maximum line length then put a line break before the
486                 # closing quotes
487                 if (
488                     len(lines) > 1
489                     and last_line_length + quote_len > self.mode.line_length
490                     and len(indent) + quote_len <= self.mode.line_length
491                     and not has_trailing_backslash
492                 ):
493                     leaf.value = prefix + quote + docstring + "\n" + indent + quote
494                 else:
495                     leaf.value = prefix + quote + docstring + quote
496             else:
497                 leaf.value = prefix + quote + docstring + quote
498
499         yield from self.visit_default(leaf)
500
501     def __post_init__(self) -> None:
502         """You are in a twisty little maze of passages."""
503         self.current_line = Line(mode=self.mode)
504
505         v = self.visit_stmt
506         Ø: Set[str] = set()
507         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
508         self.visit_if_stmt = partial(
509             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
510         )
511         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
512         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
513         self.visit_try_stmt = partial(
514             v, keywords={"try", "except", "else", "finally"}, parens=Ø
515         )
516         self.visit_except_clause = partial(v, keywords={"except"}, parens={"except"})
517         self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"})
518         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
519
520         # When this is moved out of preview, add ":" directly to ASSIGNMENTS in nodes.py
521         if Preview.parenthesize_long_type_hints in self.mode:
522             assignments = ASSIGNMENTS | {":"}
523         else:
524             assignments = ASSIGNMENTS
525         self.visit_expr_stmt = partial(v, keywords=Ø, parens=assignments)
526
527         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
528         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
529         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
530         self.visit_async_funcdef = self.visit_async_stmt
531         self.visit_decorated = self.visit_decorators
532
533         # PEP 634
534         self.visit_match_stmt = self.visit_match_case
535         self.visit_case_block = self.visit_match_case
536
537
538 def transform_line(
539     line: Line, mode: Mode, features: Collection[Feature] = ()
540 ) -> Iterator[Line]:
541     """Transform a `line`, potentially splitting it into many lines.
542
543     They should fit in the allotted `line_length` but might not be able to.
544
545     `features` are syntactical features that may be used in the output.
546     """
547     if line.is_comment:
548         yield line
549         return
550
551     line_str = line_to_string(line)
552
553     ll = mode.line_length
554     sn = mode.string_normalization
555     string_merge = StringMerger(ll, sn)
556     string_paren_strip = StringParenStripper(ll, sn)
557     string_split = StringSplitter(ll, sn)
558     string_paren_wrap = StringParenWrapper(ll, sn)
559
560     transformers: List[Transformer]
561     if (
562         not line.contains_uncollapsable_type_comments()
563         and not line.should_split_rhs
564         and not line.magic_trailing_comma
565         and (
566             is_line_short_enough(line, mode=mode, line_str=line_str)
567             or line.contains_unsplittable_type_ignore()
568         )
569         and not (line.inside_brackets and line.contains_standalone_comments())
570     ):
571         # Only apply basic string preprocessing, since lines shouldn't be split here.
572         if Preview.string_processing in mode:
573             transformers = [string_merge, string_paren_strip]
574         else:
575             transformers = []
576     elif line.is_def:
577         transformers = [left_hand_split]
578     else:
579
580         def _rhs(
581             self: object, line: Line, features: Collection[Feature], mode: Mode
582         ) -> Iterator[Line]:
583             """Wraps calls to `right_hand_split`.
584
585             The calls increasingly `omit` right-hand trailers (bracket pairs with
586             content), meaning the trailers get glued together to split on another
587             bracket pair instead.
588             """
589             for omit in generate_trailers_to_omit(line, mode.line_length):
590                 lines = list(right_hand_split(line, mode, features, omit=omit))
591                 # Note: this check is only able to figure out if the first line of the
592                 # *current* transformation fits in the line length.  This is true only
593                 # for simple cases.  All others require running more transforms via
594                 # `transform_line()`.  This check doesn't know if those would succeed.
595                 if is_line_short_enough(lines[0], mode=mode):
596                     yield from lines
597                     return
598
599             # All splits failed, best effort split with no omits.
600             # This mostly happens to multiline strings that are by definition
601             # reported as not fitting a single line, as well as lines that contain
602             # trailing commas (those have to be exploded).
603             yield from right_hand_split(line, mode, features=features)
604
605         # HACK: nested functions (like _rhs) compiled by mypyc don't retain their
606         # __name__ attribute which is needed in `run_transformer` further down.
607         # Unfortunately a nested class breaks mypyc too. So a class must be created
608         # via type ... https://github.com/mypyc/mypyc/issues/884
609         rhs = type("rhs", (), {"__call__": _rhs})()
610
611         if Preview.string_processing in mode:
612             if line.inside_brackets:
613                 transformers = [
614                     string_merge,
615                     string_paren_strip,
616                     string_split,
617                     delimiter_split,
618                     standalone_comment_split,
619                     string_paren_wrap,
620                     rhs,
621                 ]
622             else:
623                 transformers = [
624                     string_merge,
625                     string_paren_strip,
626                     string_split,
627                     string_paren_wrap,
628                     rhs,
629                 ]
630         else:
631             if line.inside_brackets:
632                 transformers = [delimiter_split, standalone_comment_split, rhs]
633             else:
634                 transformers = [rhs]
635     # It's always safe to attempt hugging of power operations and pretty much every line
636     # could match.
637     transformers.append(hug_power_op)
638
639     for transform in transformers:
640         # We are accumulating lines in `result` because we might want to abort
641         # mission and return the original line in the end, or attempt a different
642         # split altogether.
643         try:
644             result = run_transformer(line, transform, mode, features, line_str=line_str)
645         except CannotTransform:
646             continue
647         else:
648             yield from result
649             break
650
651     else:
652         yield line
653
654
655 class _BracketSplitComponent(Enum):
656     head = auto()
657     body = auto()
658     tail = auto()
659
660
661 def left_hand_split(
662     line: Line, _features: Collection[Feature], mode: Mode
663 ) -> Iterator[Line]:
664     """Split line into many lines, starting with the first matching bracket pair.
665
666     Note: this usually looks weird, only use this for function definitions.
667     Prefer RHS otherwise.  This is why this function is not symmetrical with
668     :func:`right_hand_split` which also handles optional parentheses.
669     """
670     tail_leaves: List[Leaf] = []
671     body_leaves: List[Leaf] = []
672     head_leaves: List[Leaf] = []
673     current_leaves = head_leaves
674     matching_bracket: Optional[Leaf] = None
675     for leaf in line.leaves:
676         if (
677             current_leaves is body_leaves
678             and leaf.type in CLOSING_BRACKETS
679             and leaf.opening_bracket is matching_bracket
680             and isinstance(matching_bracket, Leaf)
681         ):
682             ensure_visible(leaf)
683             ensure_visible(matching_bracket)
684             current_leaves = tail_leaves if body_leaves else head_leaves
685         current_leaves.append(leaf)
686         if current_leaves is head_leaves:
687             if leaf.type in OPENING_BRACKETS:
688                 matching_bracket = leaf
689                 current_leaves = body_leaves
690     if not matching_bracket:
691         raise CannotSplit("No brackets found")
692
693     head = bracket_split_build_line(
694         head_leaves, line, matching_bracket, component=_BracketSplitComponent.head
695     )
696     body = bracket_split_build_line(
697         body_leaves, line, matching_bracket, component=_BracketSplitComponent.body
698     )
699     tail = bracket_split_build_line(
700         tail_leaves, line, matching_bracket, component=_BracketSplitComponent.tail
701     )
702     bracket_split_succeeded_or_raise(head, body, tail)
703     for result in (head, body, tail):
704         if result:
705             yield result
706
707
708 def right_hand_split(
709     line: Line,
710     mode: Mode,
711     features: Collection[Feature] = (),
712     omit: Collection[LeafID] = (),
713 ) -> Iterator[Line]:
714     """Split line into many lines, starting with the last matching bracket pair.
715
716     If the split was by optional parentheses, attempt splitting without them, too.
717     `omit` is a collection of closing bracket IDs that shouldn't be considered for
718     this split.
719
720     Note: running this function modifies `bracket_depth` on the leaves of `line`.
721     """
722     rhs_result = _first_right_hand_split(line, omit=omit)
723     yield from _maybe_split_omitting_optional_parens(
724         rhs_result, line, mode, features=features, omit=omit
725     )
726
727
728 def _first_right_hand_split(
729     line: Line,
730     omit: Collection[LeafID] = (),
731 ) -> RHSResult:
732     """Split the line into head, body, tail starting with the last bracket pair.
733
734     Note: this function should not have side effects. It's relied upon by
735     _maybe_split_omitting_optional_parens to get an opinion whether to prefer
736     splitting on the right side of an assignment statement.
737     """
738     tail_leaves: List[Leaf] = []
739     body_leaves: List[Leaf] = []
740     head_leaves: List[Leaf] = []
741     current_leaves = tail_leaves
742     opening_bracket: Optional[Leaf] = None
743     closing_bracket: Optional[Leaf] = None
744     for leaf in reversed(line.leaves):
745         if current_leaves is body_leaves:
746             if leaf is opening_bracket:
747                 current_leaves = head_leaves if body_leaves else tail_leaves
748         current_leaves.append(leaf)
749         if current_leaves is tail_leaves:
750             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
751                 opening_bracket = leaf.opening_bracket
752                 closing_bracket = leaf
753                 current_leaves = body_leaves
754     if not (opening_bracket and closing_bracket and head_leaves):
755         # If there is no opening or closing_bracket that means the split failed and
756         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
757         # the matching `opening_bracket` wasn't available on `line` anymore.
758         raise CannotSplit("No brackets found")
759
760     tail_leaves.reverse()
761     body_leaves.reverse()
762     head_leaves.reverse()
763     head = bracket_split_build_line(
764         head_leaves, line, opening_bracket, component=_BracketSplitComponent.head
765     )
766     body = bracket_split_build_line(
767         body_leaves, line, opening_bracket, component=_BracketSplitComponent.body
768     )
769     tail = bracket_split_build_line(
770         tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail
771     )
772     bracket_split_succeeded_or_raise(head, body, tail)
773     return RHSResult(head, body, tail, opening_bracket, closing_bracket)
774
775
776 def _maybe_split_omitting_optional_parens(
777     rhs: RHSResult,
778     line: Line,
779     mode: Mode,
780     features: Collection[Feature] = (),
781     omit: Collection[LeafID] = (),
782 ) -> Iterator[Line]:
783     if (
784         Feature.FORCE_OPTIONAL_PARENTHESES not in features
785         # the opening bracket is an optional paren
786         and rhs.opening_bracket.type == token.LPAR
787         and not rhs.opening_bracket.value
788         # the closing bracket is an optional paren
789         and rhs.closing_bracket.type == token.RPAR
790         and not rhs.closing_bracket.value
791         # it's not an import (optional parens are the only thing we can split on
792         # in this case; attempting a split without them is a waste of time)
793         and not line.is_import
794         # there are no standalone comments in the body
795         and not rhs.body.contains_standalone_comments(0)
796         # and we can actually remove the parens
797         and can_omit_invisible_parens(rhs, mode.line_length)
798     ):
799         omit = {id(rhs.closing_bracket), *omit}
800         try:
801             # The RHSResult Omitting Optional Parens.
802             rhs_oop = _first_right_hand_split(line, omit=omit)
803             if not (
804                 Preview.prefer_splitting_right_hand_side_of_assignments in line.mode
805                 # the split is right after `=`
806                 and len(rhs.head.leaves) >= 2
807                 and rhs.head.leaves[-2].type == token.EQUAL
808                 # the left side of assignment contains brackets
809                 and any(leaf.type in BRACKETS for leaf in rhs.head.leaves[:-1])
810                 # the left side of assignment is short enough (the -1 is for the ending
811                 # optional paren)
812                 and is_line_short_enough(
813                     rhs.head, mode=replace(mode, line_length=mode.line_length - 1)
814                 )
815                 # the left side of assignment won't explode further because of magic
816                 # trailing comma
817                 and rhs.head.magic_trailing_comma is None
818                 # the split by omitting optional parens isn't preferred by some other
819                 # reason
820                 and not _prefer_split_rhs_oop(rhs_oop, mode)
821             ):
822                 yield from _maybe_split_omitting_optional_parens(
823                     rhs_oop, line, mode, features=features, omit=omit
824                 )
825                 return
826
827         except CannotSplit as e:
828             if not (
829                 can_be_split(rhs.body) or is_line_short_enough(rhs.body, mode=mode)
830             ):
831                 raise CannotSplit(
832                     "Splitting failed, body is still too long and can't be split."
833                 ) from e
834
835             elif (
836                 rhs.head.contains_multiline_strings()
837                 or rhs.tail.contains_multiline_strings()
838             ):
839                 raise CannotSplit(
840                     "The current optional pair of parentheses is bound to fail to"
841                     " satisfy the splitting algorithm because the head or the tail"
842                     " contains multiline strings which by definition never fit one"
843                     " line."
844                 ) from e
845
846     ensure_visible(rhs.opening_bracket)
847     ensure_visible(rhs.closing_bracket)
848     for result in (rhs.head, rhs.body, rhs.tail):
849         if result:
850             yield result
851
852
853 def _prefer_split_rhs_oop(rhs_oop: RHSResult, mode: Mode) -> bool:
854     """
855     Returns whether we should prefer the result from a split omitting optional parens.
856     """
857     has_closing_bracket_after_assign = False
858     for leaf in reversed(rhs_oop.head.leaves):
859         if leaf.type == token.EQUAL:
860             break
861         if leaf.type in CLOSING_BRACKETS:
862             has_closing_bracket_after_assign = True
863             break
864     return (
865         # contains matching brackets after the `=` (done by checking there is a
866         # closing bracket)
867         has_closing_bracket_after_assign
868         or (
869             # the split is actually from inside the optional parens (done by checking
870             # the first line still contains the `=`)
871             any(leaf.type == token.EQUAL for leaf in rhs_oop.head.leaves)
872             # the first line is short enough
873             and is_line_short_enough(rhs_oop.head, mode=mode)
874         )
875         # contains unsplittable type ignore
876         or rhs_oop.head.contains_unsplittable_type_ignore()
877         or rhs_oop.body.contains_unsplittable_type_ignore()
878         or rhs_oop.tail.contains_unsplittable_type_ignore()
879     )
880
881
882 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
883     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
884
885     Do nothing otherwise.
886
887     A left- or right-hand split is based on a pair of brackets. Content before
888     (and including) the opening bracket is left on one line, content inside the
889     brackets is put on a separate line, and finally content starting with and
890     following the closing bracket is put on a separate line.
891
892     Those are called `head`, `body`, and `tail`, respectively. If the split
893     produced the same line (all content in `head`) or ended up with an empty `body`
894     and the `tail` is just the closing bracket, then it's considered failed.
895     """
896     tail_len = len(str(tail).strip())
897     if not body:
898         if tail_len == 0:
899             raise CannotSplit("Splitting brackets produced the same line")
900
901         elif tail_len < 3:
902             raise CannotSplit(
903                 f"Splitting brackets on an empty body to save {tail_len} characters is"
904                 " not worth it"
905             )
906
907
908 def bracket_split_build_line(
909     leaves: List[Leaf],
910     original: Line,
911     opening_bracket: Leaf,
912     *,
913     component: _BracketSplitComponent,
914 ) -> Line:
915     """Return a new line with given `leaves` and respective comments from `original`.
916
917     If it's the head component, brackets will be tracked so trailing commas are
918     respected.
919
920     If it's the body component, the result line is one-indented inside brackets and as
921     such has its first leaf's prefix normalized and a trailing comma added when
922     expected.
923     """
924     result = Line(mode=original.mode, depth=original.depth)
925     if component is _BracketSplitComponent.body:
926         result.inside_brackets = True
927         result.depth += 1
928         if leaves:
929             # Since body is a new indent level, remove spurious leading whitespace.
930             normalize_prefix(leaves[0], inside_brackets=True)
931             # Ensure a trailing comma for imports and standalone function arguments, but
932             # be careful not to add one after any comments or within type annotations.
933             no_commas = (
934                 original.is_def
935                 and opening_bracket.value == "("
936                 and not any(leaf.type == token.COMMA for leaf in leaves)
937                 # In particular, don't add one within a parenthesized return annotation.
938                 # Unfortunately the indicator we're in a return annotation (RARROW) may
939                 # be defined directly in the parent node, the parent of the parent ...
940                 # and so on depending on how complex the return annotation is.
941                 # This isn't perfect and there's some false negatives but they are in
942                 # contexts were a comma is actually fine.
943                 and not any(
944                     node.prev_sibling.type == RARROW
945                     for node in (
946                         leaves[0].parent,
947                         getattr(leaves[0].parent, "parent", None),
948                     )
949                     if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
950                 )
951                 # Except the false negatives above for PEP 604 unions where we
952                 # can't add the comma.
953                 and not (
954                     leaves[0].parent
955                     and leaves[0].parent.next_sibling
956                     and leaves[0].parent.next_sibling.type == token.VBAR
957                 )
958             )
959
960             if original.is_import or no_commas:
961                 for i in range(len(leaves) - 1, -1, -1):
962                     if leaves[i].type == STANDALONE_COMMENT:
963                         continue
964
965                     if leaves[i].type != token.COMMA:
966                         new_comma = Leaf(token.COMMA, ",")
967                         leaves.insert(i + 1, new_comma)
968                     break
969
970     leaves_to_track: Set[LeafID] = set()
971     if component is _BracketSplitComponent.head:
972         leaves_to_track = get_leaves_inside_matching_brackets(leaves)
973     # Populate the line
974     for leaf in leaves:
975         result.append(
976             leaf,
977             preformatted=True,
978             track_bracket=id(leaf) in leaves_to_track,
979         )
980         for comment_after in original.comments_after(leaf):
981             result.append(comment_after, preformatted=True)
982     if component is _BracketSplitComponent.body and should_split_line(
983         result, opening_bracket
984     ):
985         result.should_split_rhs = True
986     return result
987
988
989 def dont_increase_indentation(split_func: Transformer) -> Transformer:
990     """Normalize prefix of the first leaf in every line returned by `split_func`.
991
992     This is a decorator over relevant split functions.
993     """
994
995     @wraps(split_func)
996     def split_wrapper(
997         line: Line, features: Collection[Feature], mode: Mode
998     ) -> Iterator[Line]:
999         for split_line in split_func(line, features, mode):
1000             normalize_prefix(split_line.leaves[0], inside_brackets=True)
1001             yield split_line
1002
1003     return split_wrapper
1004
1005
1006 def _get_last_non_comment_leaf(line: Line) -> Optional[int]:
1007     for leaf_idx in range(len(line.leaves) - 1, 0, -1):
1008         if line.leaves[leaf_idx].type != STANDALONE_COMMENT:
1009             return leaf_idx
1010     return None
1011
1012
1013 def _safe_add_trailing_comma(safe: bool, delimiter_priority: int, line: Line) -> Line:
1014     if (
1015         safe
1016         and delimiter_priority == COMMA_PRIORITY
1017         and line.leaves[-1].type != token.COMMA
1018         and line.leaves[-1].type != STANDALONE_COMMENT
1019     ):
1020         new_comma = Leaf(token.COMMA, ",")
1021         line.append(new_comma)
1022     return line
1023
1024
1025 @dont_increase_indentation
1026 def delimiter_split(
1027     line: Line, features: Collection[Feature], mode: Mode
1028 ) -> Iterator[Line]:
1029     """Split according to delimiters of the highest priority.
1030
1031     If the appropriate Features are given, the split will add trailing commas
1032     also in function signatures and calls that contain `*` and `**`.
1033     """
1034     try:
1035         last_leaf = line.leaves[-1]
1036     except IndexError:
1037         raise CannotSplit("Line empty") from None
1038
1039     bt = line.bracket_tracker
1040     try:
1041         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
1042     except ValueError:
1043         raise CannotSplit("No delimiters found") from None
1044
1045     if delimiter_priority == DOT_PRIORITY:
1046         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
1047             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
1048
1049     current_line = Line(
1050         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1051     )
1052     lowest_depth = sys.maxsize
1053     trailing_comma_safe = True
1054
1055     def append_to_line(leaf: Leaf) -> Iterator[Line]:
1056         """Append `leaf` to current line or to new line if appending impossible."""
1057         nonlocal current_line
1058         try:
1059             current_line.append_safe(leaf, preformatted=True)
1060         except ValueError:
1061             yield current_line
1062
1063             current_line = Line(
1064                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1065             )
1066             current_line.append(leaf)
1067
1068     last_non_comment_leaf = _get_last_non_comment_leaf(line)
1069     for leaf_idx, leaf in enumerate(line.leaves):
1070         yield from append_to_line(leaf)
1071
1072         for comment_after in line.comments_after(leaf):
1073             yield from append_to_line(comment_after)
1074
1075         lowest_depth = min(lowest_depth, leaf.bracket_depth)
1076         if leaf.bracket_depth == lowest_depth:
1077             if is_vararg(leaf, within={syms.typedargslist}):
1078                 trailing_comma_safe = (
1079                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
1080                 )
1081             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
1082                 trailing_comma_safe = (
1083                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
1084                 )
1085
1086         if (
1087             Preview.add_trailing_comma_consistently in mode
1088             and last_leaf.type == STANDALONE_COMMENT
1089             and leaf_idx == last_non_comment_leaf
1090         ):
1091             current_line = _safe_add_trailing_comma(
1092                 trailing_comma_safe, delimiter_priority, current_line
1093             )
1094
1095         leaf_priority = bt.delimiters.get(id(leaf))
1096         if leaf_priority == delimiter_priority:
1097             yield current_line
1098
1099             current_line = Line(
1100                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1101             )
1102     if current_line:
1103         current_line = _safe_add_trailing_comma(
1104             trailing_comma_safe, delimiter_priority, current_line
1105         )
1106         yield current_line
1107
1108
1109 @dont_increase_indentation
1110 def standalone_comment_split(
1111     line: Line, features: Collection[Feature], mode: Mode
1112 ) -> Iterator[Line]:
1113     """Split standalone comments from the rest of the line."""
1114     if not line.contains_standalone_comments(0):
1115         raise CannotSplit("Line does not have any standalone comments")
1116
1117     current_line = Line(
1118         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1119     )
1120
1121     def append_to_line(leaf: Leaf) -> Iterator[Line]:
1122         """Append `leaf` to current line or to new line if appending impossible."""
1123         nonlocal current_line
1124         try:
1125             current_line.append_safe(leaf, preformatted=True)
1126         except ValueError:
1127             yield current_line
1128
1129             current_line = Line(
1130                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1131             )
1132             current_line.append(leaf)
1133
1134     for leaf in line.leaves:
1135         yield from append_to_line(leaf)
1136
1137         for comment_after in line.comments_after(leaf):
1138             yield from append_to_line(comment_after)
1139
1140     if current_line:
1141         yield current_line
1142
1143
1144 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
1145     """Leave existing extra newlines if not `inside_brackets`. Remove everything
1146     else.
1147
1148     Note: don't use backslashes for formatting or you'll lose your voting rights.
1149     """
1150     if not inside_brackets:
1151         spl = leaf.prefix.split("#")
1152         if "\\" not in spl[0]:
1153             nl_count = spl[-1].count("\n")
1154             if len(spl) > 1:
1155                 nl_count -= 1
1156             leaf.prefix = "\n" * nl_count
1157             return
1158
1159     leaf.prefix = ""
1160
1161
1162 def normalize_invisible_parens(
1163     node: Node, parens_after: Set[str], *, mode: Mode, features: Collection[Feature]
1164 ) -> None:
1165     """Make existing optional parentheses invisible or create new ones.
1166
1167     `parens_after` is a set of string leaf values immediately after which parens
1168     should be put.
1169
1170     Standardizes on visible parentheses for single-element tuples, and keeps
1171     existing visible parentheses for other tuples and generator expressions.
1172     """
1173     for pc in list_comments(node.prefix, is_endmarker=False):
1174         if pc.value in FMT_OFF:
1175             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
1176             return
1177
1178     # The multiple context managers grammar has a different pattern, thus this is
1179     # separate from the for-loop below. This possibly wraps them in invisible parens,
1180     # and later will be removed in remove_with_parens when needed.
1181     if node.type == syms.with_stmt:
1182         _maybe_wrap_cms_in_parens(node, mode, features)
1183
1184     check_lpar = False
1185     for index, child in enumerate(list(node.children)):
1186         # Fixes a bug where invisible parens are not properly stripped from
1187         # assignment statements that contain type annotations.
1188         if isinstance(child, Node) and child.type == syms.annassign:
1189             normalize_invisible_parens(
1190                 child, parens_after=parens_after, mode=mode, features=features
1191             )
1192
1193         # Add parentheses around long tuple unpacking in assignments.
1194         if (
1195             index == 0
1196             and isinstance(child, Node)
1197             and child.type == syms.testlist_star_expr
1198         ):
1199             check_lpar = True
1200
1201         if check_lpar:
1202             if (
1203                 child.type == syms.atom
1204                 and node.type == syms.for_stmt
1205                 and isinstance(child.prev_sibling, Leaf)
1206                 and child.prev_sibling.type == token.NAME
1207                 and child.prev_sibling.value == "for"
1208             ):
1209                 if maybe_make_parens_invisible_in_atom(
1210                     child,
1211                     parent=node,
1212                     remove_brackets_around_comma=True,
1213                 ):
1214                     wrap_in_parentheses(node, child, visible=False)
1215             elif isinstance(child, Node) and node.type == syms.with_stmt:
1216                 remove_with_parens(child, node)
1217             elif child.type == syms.atom:
1218                 if maybe_make_parens_invisible_in_atom(
1219                     child,
1220                     parent=node,
1221                 ):
1222                     wrap_in_parentheses(node, child, visible=False)
1223             elif is_one_tuple(child):
1224                 wrap_in_parentheses(node, child, visible=True)
1225             elif node.type == syms.import_from:
1226                 _normalize_import_from(node, child, index)
1227                 break
1228             elif (
1229                 index == 1
1230                 and child.type == token.STAR
1231                 and node.type == syms.except_clause
1232             ):
1233                 # In except* (PEP 654), the star is actually part of
1234                 # of the keyword. So we need to skip the insertion of
1235                 # invisible parentheses to work more precisely.
1236                 continue
1237
1238             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
1239                 wrap_in_parentheses(node, child, visible=False)
1240
1241         comma_check = child.type == token.COMMA
1242
1243         check_lpar = isinstance(child, Leaf) and (
1244             child.value in parens_after or comma_check
1245         )
1246
1247
1248 def _normalize_import_from(parent: Node, child: LN, index: int) -> None:
1249     # "import from" nodes store parentheses directly as part of
1250     # the statement
1251     if is_lpar_token(child):
1252         assert is_rpar_token(parent.children[-1])
1253         # make parentheses invisible
1254         child.value = ""
1255         parent.children[-1].value = ""
1256     elif child.type != token.STAR:
1257         # insert invisible parentheses
1258         parent.insert_child(index, Leaf(token.LPAR, ""))
1259         parent.append_child(Leaf(token.RPAR, ""))
1260
1261
1262 def remove_await_parens(node: Node) -> None:
1263     if node.children[0].type == token.AWAIT and len(node.children) > 1:
1264         if (
1265             node.children[1].type == syms.atom
1266             and node.children[1].children[0].type == token.LPAR
1267         ):
1268             if maybe_make_parens_invisible_in_atom(
1269                 node.children[1],
1270                 parent=node,
1271                 remove_brackets_around_comma=True,
1272             ):
1273                 wrap_in_parentheses(node, node.children[1], visible=False)
1274
1275             # Since await is an expression we shouldn't remove
1276             # brackets in cases where this would change
1277             # the AST due to operator precedence.
1278             # Therefore we only aim to remove brackets around
1279             # power nodes that aren't also await expressions themselves.
1280             # https://peps.python.org/pep-0492/#updated-operator-precedence-table
1281             # N.B. We've still removed any redundant nested brackets though :)
1282             opening_bracket = cast(Leaf, node.children[1].children[0])
1283             closing_bracket = cast(Leaf, node.children[1].children[-1])
1284             bracket_contents = node.children[1].children[1]
1285             if isinstance(bracket_contents, Node):
1286                 if bracket_contents.type != syms.power:
1287                     ensure_visible(opening_bracket)
1288                     ensure_visible(closing_bracket)
1289                 elif (
1290                     bracket_contents.type == syms.power
1291                     and bracket_contents.children[0].type == token.AWAIT
1292                 ):
1293                     ensure_visible(opening_bracket)
1294                     ensure_visible(closing_bracket)
1295                     # If we are in a nested await then recurse down.
1296                     remove_await_parens(bracket_contents)
1297
1298
1299 def _maybe_wrap_cms_in_parens(
1300     node: Node, mode: Mode, features: Collection[Feature]
1301 ) -> None:
1302     """When enabled and safe, wrap the multiple context managers in invisible parens.
1303
1304     It is only safe when `features` contain Feature.PARENTHESIZED_CONTEXT_MANAGERS.
1305     """
1306     if (
1307         Feature.PARENTHESIZED_CONTEXT_MANAGERS not in features
1308         or Preview.wrap_multiple_context_managers_in_parens not in mode
1309         or len(node.children) <= 2
1310         # If it's an atom, it's already wrapped in parens.
1311         or node.children[1].type == syms.atom
1312     ):
1313         return
1314     colon_index: Optional[int] = None
1315     for i in range(2, len(node.children)):
1316         if node.children[i].type == token.COLON:
1317             colon_index = i
1318             break
1319     if colon_index is not None:
1320         lpar = Leaf(token.LPAR, "")
1321         rpar = Leaf(token.RPAR, "")
1322         context_managers = node.children[1:colon_index]
1323         for child in context_managers:
1324             child.remove()
1325         # After wrapping, the with_stmt will look like this:
1326         #   with_stmt
1327         #     NAME 'with'
1328         #     atom
1329         #       LPAR ''
1330         #       testlist_gexp
1331         #         ... <-- context_managers
1332         #       /testlist_gexp
1333         #       RPAR ''
1334         #     /atom
1335         #     COLON ':'
1336         new_child = Node(
1337             syms.atom, [lpar, Node(syms.testlist_gexp, context_managers), rpar]
1338         )
1339         node.insert_child(1, new_child)
1340
1341
1342 def remove_with_parens(node: Node, parent: Node) -> None:
1343     """Recursively hide optional parens in `with` statements."""
1344     # Removing all unnecessary parentheses in with statements in one pass is a tad
1345     # complex as different variations of bracketed statements result in pretty
1346     # different parse trees:
1347     #
1348     # with (open("file")) as f:                       # this is an asexpr_test
1349     #     ...
1350     #
1351     # with (open("file") as f):                       # this is an atom containing an
1352     #     ...                                         # asexpr_test
1353     #
1354     # with (open("file")) as f, (open("file")) as f:  # this is asexpr_test, COMMA,
1355     #     ...                                         # asexpr_test
1356     #
1357     # with (open("file") as f, open("file") as f):    # an atom containing a
1358     #     ...                                         # testlist_gexp which then
1359     #                                                 # contains multiple asexpr_test(s)
1360     if node.type == syms.atom:
1361         if maybe_make_parens_invisible_in_atom(
1362             node,
1363             parent=parent,
1364             remove_brackets_around_comma=True,
1365         ):
1366             wrap_in_parentheses(parent, node, visible=False)
1367         if isinstance(node.children[1], Node):
1368             remove_with_parens(node.children[1], node)
1369     elif node.type == syms.testlist_gexp:
1370         for child in node.children:
1371             if isinstance(child, Node):
1372                 remove_with_parens(child, node)
1373     elif node.type == syms.asexpr_test and not any(
1374         leaf.type == token.COLONEQUAL for leaf in node.leaves()
1375     ):
1376         if maybe_make_parens_invisible_in_atom(
1377             node.children[0],
1378             parent=node,
1379             remove_brackets_around_comma=True,
1380         ):
1381             wrap_in_parentheses(node, node.children[0], visible=False)
1382
1383
1384 def maybe_make_parens_invisible_in_atom(
1385     node: LN,
1386     parent: LN,
1387     remove_brackets_around_comma: bool = False,
1388 ) -> bool:
1389     """If it's safe, make the parens in the atom `node` invisible, recursively.
1390     Additionally, remove repeated, adjacent invisible parens from the atom `node`
1391     as they are redundant.
1392
1393     Returns whether the node should itself be wrapped in invisible parentheses.
1394     """
1395     if (
1396         node.type not in (syms.atom, syms.expr)
1397         or is_empty_tuple(node)
1398         or is_one_tuple(node)
1399         or (is_yield(node) and parent.type != syms.expr_stmt)
1400         or (
1401             # This condition tries to prevent removing non-optional brackets
1402             # around a tuple, however, can be a bit overzealous so we provide
1403             # and option to skip this check for `for` and `with` statements.
1404             not remove_brackets_around_comma
1405             and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
1406         )
1407         or is_tuple_containing_walrus(node)
1408     ):
1409         return False
1410
1411     if is_walrus_assignment(node):
1412         if parent.type in [
1413             syms.annassign,
1414             syms.expr_stmt,
1415             syms.assert_stmt,
1416             syms.return_stmt,
1417             syms.except_clause,
1418             syms.funcdef,
1419             syms.with_stmt,
1420             syms.tname,
1421             # these ones aren't useful to end users, but they do please fuzzers
1422             syms.for_stmt,
1423             syms.del_stmt,
1424             syms.for_stmt,
1425         ]:
1426             return False
1427
1428     first = node.children[0]
1429     last = node.children[-1]
1430     if is_lpar_token(first) and is_rpar_token(last):
1431         middle = node.children[1]
1432         # make parentheses invisible
1433         if (
1434             # If the prefix of `middle` includes a type comment with
1435             # ignore annotation, then we do not remove the parentheses
1436             not is_type_ignore_comment_string(middle.prefix.strip())
1437         ):
1438             first.value = ""
1439             last.value = ""
1440         maybe_make_parens_invisible_in_atom(
1441             middle,
1442             parent=parent,
1443             remove_brackets_around_comma=remove_brackets_around_comma,
1444         )
1445
1446         if is_atom_with_invisible_parens(middle):
1447             # Strip the invisible parens from `middle` by replacing
1448             # it with the child in-between the invisible parens
1449             middle.replace(middle.children[1])
1450
1451         return False
1452
1453     return True
1454
1455
1456 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
1457     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
1458
1459     if not (opening_bracket.parent and opening_bracket.value in "[{("):
1460         return False
1461
1462     # We're essentially checking if the body is delimited by commas and there's more
1463     # than one of them (we're excluding the trailing comma and if the delimiter priority
1464     # is still commas, that means there's more).
1465     exclude = set()
1466     trailing_comma = False
1467     try:
1468         last_leaf = line.leaves[-1]
1469         if last_leaf.type == token.COMMA:
1470             trailing_comma = True
1471             exclude.add(id(last_leaf))
1472         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
1473     except (IndexError, ValueError):
1474         return False
1475
1476     return max_priority == COMMA_PRIORITY and (
1477         (line.mode.magic_trailing_comma and trailing_comma)
1478         # always explode imports
1479         or opening_bracket.parent.type in {syms.atom, syms.import_from}
1480     )
1481
1482
1483 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
1484     """Generate sets of closing bracket IDs that should be omitted in a RHS.
1485
1486     Brackets can be omitted if the entire trailer up to and including
1487     a preceding closing bracket fits in one line.
1488
1489     Yielded sets are cumulative (contain results of previous yields, too).  First
1490     set is empty, unless the line should explode, in which case bracket pairs until
1491     the one that needs to explode are omitted.
1492     """
1493
1494     omit: Set[LeafID] = set()
1495     if not line.magic_trailing_comma:
1496         yield omit
1497
1498     length = 4 * line.depth
1499     opening_bracket: Optional[Leaf] = None
1500     closing_bracket: Optional[Leaf] = None
1501     inner_brackets: Set[LeafID] = set()
1502     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
1503         length += leaf_length
1504         if length > line_length:
1505             break
1506
1507         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
1508         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
1509             break
1510
1511         if opening_bracket:
1512             if leaf is opening_bracket:
1513                 opening_bracket = None
1514             elif leaf.type in CLOSING_BRACKETS:
1515                 prev = line.leaves[index - 1] if index > 0 else None
1516                 if (
1517                     prev
1518                     and prev.type == token.COMMA
1519                     and leaf.opening_bracket is not None
1520                     and not is_one_sequence_between(
1521                         leaf.opening_bracket, leaf, line.leaves
1522                     )
1523                 ):
1524                     # Never omit bracket pairs with trailing commas.
1525                     # We need to explode on those.
1526                     break
1527
1528                 inner_brackets.add(id(leaf))
1529         elif leaf.type in CLOSING_BRACKETS:
1530             prev = line.leaves[index - 1] if index > 0 else None
1531             if prev and prev.type in OPENING_BRACKETS:
1532                 # Empty brackets would fail a split so treat them as "inner"
1533                 # brackets (e.g. only add them to the `omit` set if another
1534                 # pair of brackets was good enough.
1535                 inner_brackets.add(id(leaf))
1536                 continue
1537
1538             if closing_bracket:
1539                 omit.add(id(closing_bracket))
1540                 omit.update(inner_brackets)
1541                 inner_brackets.clear()
1542                 yield omit
1543
1544             if (
1545                 prev
1546                 and prev.type == token.COMMA
1547                 and leaf.opening_bracket is not None
1548                 and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves)
1549             ):
1550                 # Never omit bracket pairs with trailing commas.
1551                 # We need to explode on those.
1552                 break
1553
1554             if leaf.value:
1555                 opening_bracket = leaf.opening_bracket
1556                 closing_bracket = leaf
1557
1558
1559 def run_transformer(
1560     line: Line,
1561     transform: Transformer,
1562     mode: Mode,
1563     features: Collection[Feature],
1564     *,
1565     line_str: str = "",
1566 ) -> List[Line]:
1567     if not line_str:
1568         line_str = line_to_string(line)
1569     result: List[Line] = []
1570     for transformed_line in transform(line, features, mode):
1571         if str(transformed_line).strip("\n") == line_str:
1572             raise CannotTransform("Line transformer returned an unchanged result")
1573
1574         result.extend(transform_line(transformed_line, mode=mode, features=features))
1575
1576     features_set = set(features)
1577     if (
1578         Feature.FORCE_OPTIONAL_PARENTHESES in features_set
1579         or transform.__class__.__name__ != "rhs"
1580         or not line.bracket_tracker.invisible
1581         or any(bracket.value for bracket in line.bracket_tracker.invisible)
1582         or line.contains_multiline_strings()
1583         or result[0].contains_uncollapsable_type_comments()
1584         or result[0].contains_unsplittable_type_ignore()
1585         or is_line_short_enough(result[0], mode=mode)
1586         # If any leaves have no parents (which _can_ occur since
1587         # `transform(line)` potentially destroys the line's underlying node
1588         # structure), then we can't proceed. Doing so would cause the below
1589         # call to `append_leaves()` to fail.
1590         or any(leaf.parent is None for leaf in line.leaves)
1591     ):
1592         return result
1593
1594     line_copy = line.clone()
1595     append_leaves(line_copy, line, line.leaves)
1596     features_fop = features_set | {Feature.FORCE_OPTIONAL_PARENTHESES}
1597     second_opinion = run_transformer(
1598         line_copy, transform, mode, features_fop, line_str=line_str
1599     )
1600     if all(is_line_short_enough(ln, mode=mode) for ln in second_opinion):
1601         result = second_opinion
1602     return result