]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Fix crash with walrus + await + with (#3473)
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4 import sys
5 from dataclasses import dataclass
6 from enum import Enum, auto
7 from functools import partial, wraps
8 from typing import Collection, Iterator, List, Optional, Set, Union, cast
9
10 from black.brackets import (
11     COMMA_PRIORITY,
12     DOT_PRIORITY,
13     get_leaves_inside_matching_brackets,
14     max_delimiter_priority_in_atom,
15 )
16 from black.comments import FMT_OFF, generate_comments, list_comments
17 from black.lines import (
18     Line,
19     append_leaves,
20     can_be_split,
21     can_omit_invisible_parens,
22     is_line_short_enough,
23     line_to_string,
24 )
25 from black.mode import Feature, Mode, Preview
26 from black.nodes import (
27     ASSIGNMENTS,
28     BRACKETS,
29     CLOSING_BRACKETS,
30     OPENING_BRACKETS,
31     RARROW,
32     STANDALONE_COMMENT,
33     STATEMENT,
34     WHITESPACE,
35     Visitor,
36     ensure_visible,
37     is_arith_like,
38     is_atom_with_invisible_parens,
39     is_docstring,
40     is_empty_tuple,
41     is_lpar_token,
42     is_multiline_string,
43     is_name_token,
44     is_one_sequence_between,
45     is_one_tuple,
46     is_rpar_token,
47     is_stub_body,
48     is_stub_suite,
49     is_tuple_containing_walrus,
50     is_vararg,
51     is_walrus_assignment,
52     is_yield,
53     syms,
54     wrap_in_parentheses,
55 )
56 from black.numerics import normalize_numeric_literal
57 from black.strings import (
58     fix_docstring,
59     get_string_prefix,
60     normalize_string_prefix,
61     normalize_string_quotes,
62 )
63 from black.trans import (
64     CannotTransform,
65     StringMerger,
66     StringParenStripper,
67     StringParenWrapper,
68     StringSplitter,
69     Transformer,
70     hug_power_op,
71 )
72 from blib2to3.pgen2 import token
73 from blib2to3.pytree import Leaf, Node
74
75 # types
76 LeafID = int
77 LN = Union[Leaf, Node]
78
79
80 class CannotSplit(CannotTransform):
81     """A readable split that fits the allotted line length is impossible."""
82
83
84 # This isn't a dataclass because @dataclass + Generic breaks mypyc.
85 # See also https://github.com/mypyc/mypyc/issues/827.
86 class LineGenerator(Visitor[Line]):
87     """Generates reformatted Line objects.  Empty lines are not emitted.
88
89     Note: destroys the tree it's visiting by mutating prefixes of its leaves
90     in ways that will no longer stringify to valid Python code on the tree.
91     """
92
93     def __init__(self, mode: Mode) -> None:
94         self.mode = mode
95         self.current_line: Line
96         self.__post_init__()
97
98     def line(self, indent: int = 0) -> Iterator[Line]:
99         """Generate a line.
100
101         If the line is empty, only emit if it makes sense.
102         If the line is too long, split it first and then generate.
103
104         If any lines were generated, set up a new current_line.
105         """
106         if not self.current_line:
107             self.current_line.depth += indent
108             return  # Line is empty, don't emit. Creating a new one unnecessary.
109
110         complete_line = self.current_line
111         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
112         yield complete_line
113
114     def visit_default(self, node: LN) -> Iterator[Line]:
115         """Default `visit_*()` implementation. Recurses to children of `node`."""
116         if isinstance(node, Leaf):
117             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
118             for comment in generate_comments(node, preview=self.mode.preview):
119                 if any_open_brackets:
120                     # any comment within brackets is subject to splitting
121                     self.current_line.append(comment)
122                 elif comment.type == token.COMMENT:
123                     # regular trailing comment
124                     self.current_line.append(comment)
125                     yield from self.line()
126
127                 else:
128                     # regular standalone comment
129                     yield from self.line()
130
131                     self.current_line.append(comment)
132                     yield from self.line()
133
134             normalize_prefix(node, inside_brackets=any_open_brackets)
135             if self.mode.string_normalization and node.type == token.STRING:
136                 node.value = normalize_string_prefix(node.value)
137                 node.value = normalize_string_quotes(node.value)
138             if node.type == token.NUMBER:
139                 normalize_numeric_literal(node)
140             if node.type not in WHITESPACE:
141                 self.current_line.append(node)
142         yield from super().visit_default(node)
143
144     def visit_test(self, node: Node) -> Iterator[Line]:
145         """Visit an `x if y else z` test"""
146
147         if Preview.parenthesize_conditional_expressions in self.mode:
148             already_parenthesized = (
149                 node.prev_sibling and node.prev_sibling.type == token.LPAR
150             )
151
152             if not already_parenthesized:
153                 lpar = Leaf(token.LPAR, "")
154                 rpar = Leaf(token.RPAR, "")
155                 node.insert_child(0, lpar)
156                 node.append_child(rpar)
157
158         yield from self.visit_default(node)
159
160     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
161         """Increase indentation level, maybe yield a line."""
162         # In blib2to3 INDENT never holds comments.
163         yield from self.line(+1)
164         yield from self.visit_default(node)
165
166     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
167         """Decrease indentation level, maybe yield a line."""
168         # The current line might still wait for trailing comments.  At DEDENT time
169         # there won't be any (they would be prefixes on the preceding NEWLINE).
170         # Emit the line then.
171         yield from self.line()
172
173         # While DEDENT has no value, its prefix may contain standalone comments
174         # that belong to the current indentation level.  Get 'em.
175         yield from self.visit_default(node)
176
177         # Finally, emit the dedent.
178         yield from self.line(-1)
179
180     def visit_stmt(
181         self, node: Node, keywords: Set[str], parens: Set[str]
182     ) -> Iterator[Line]:
183         """Visit a statement.
184
185         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
186         `def`, `with`, `class`, `assert`, and assignments.
187
188         The relevant Python language `keywords` for a given statement will be
189         NAME leaves within it. This methods puts those on a separate line.
190
191         `parens` holds a set of string leaf values immediately after which
192         invisible parens should be put.
193         """
194         normalize_invisible_parens(node, parens_after=parens, preview=self.mode.preview)
195         for child in node.children:
196             if is_name_token(child) and child.value in keywords:
197                 yield from self.line()
198
199             yield from self.visit(child)
200
201     def visit_dictsetmaker(self, node: Node) -> Iterator[Line]:
202         if Preview.wrap_long_dict_values_in_parens in self.mode:
203             for i, child in enumerate(node.children):
204                 if i == 0:
205                     continue
206                 if node.children[i - 1].type == token.COLON:
207                     if child.type == syms.atom and child.children[0].type == token.LPAR:
208                         if maybe_make_parens_invisible_in_atom(
209                             child,
210                             parent=node,
211                             remove_brackets_around_comma=False,
212                         ):
213                             wrap_in_parentheses(node, child, visible=False)
214                     else:
215                         wrap_in_parentheses(node, child, visible=False)
216         yield from self.visit_default(node)
217
218     def visit_funcdef(self, node: Node) -> Iterator[Line]:
219         """Visit function definition."""
220         if Preview.annotation_parens not in self.mode:
221             yield from self.visit_stmt(node, keywords={"def"}, parens=set())
222         else:
223             yield from self.line()
224
225             # Remove redundant brackets around return type annotation.
226             is_return_annotation = False
227             for child in node.children:
228                 if child.type == token.RARROW:
229                     is_return_annotation = True
230                 elif is_return_annotation:
231                     if child.type == syms.atom and child.children[0].type == token.LPAR:
232                         if maybe_make_parens_invisible_in_atom(
233                             child,
234                             parent=node,
235                             remove_brackets_around_comma=False,
236                         ):
237                             wrap_in_parentheses(node, child, visible=False)
238                     else:
239                         wrap_in_parentheses(node, child, visible=False)
240                     is_return_annotation = False
241
242             for child in node.children:
243                 yield from self.visit(child)
244
245     def visit_match_case(self, node: Node) -> Iterator[Line]:
246         """Visit either a match or case statement."""
247         normalize_invisible_parens(node, parens_after=set(), preview=self.mode.preview)
248
249         yield from self.line()
250         for child in node.children:
251             yield from self.visit(child)
252
253     def visit_suite(self, node: Node) -> Iterator[Line]:
254         """Visit a suite."""
255         if self.mode.is_pyi and is_stub_suite(node):
256             yield from self.visit(node.children[2])
257         else:
258             yield from self.visit_default(node)
259
260     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
261         """Visit a statement without nested statements."""
262         prev_type: Optional[int] = None
263         for child in node.children:
264             if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child):
265                 wrap_in_parentheses(node, child, visible=False)
266             prev_type = child.type
267
268         is_suite_like = node.parent and node.parent.type in STATEMENT
269         if is_suite_like:
270             if self.mode.is_pyi and is_stub_body(node):
271                 yield from self.visit_default(node)
272             else:
273                 yield from self.line(+1)
274                 yield from self.visit_default(node)
275                 yield from self.line(-1)
276
277         else:
278             if (
279                 not self.mode.is_pyi
280                 or not node.parent
281                 or not is_stub_suite(node.parent)
282             ):
283                 yield from self.line()
284             yield from self.visit_default(node)
285
286     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
287         """Visit `async def`, `async for`, `async with`."""
288         yield from self.line()
289
290         children = iter(node.children)
291         for child in children:
292             yield from self.visit(child)
293
294             if child.type == token.ASYNC or child.type == STANDALONE_COMMENT:
295                 # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async
296                 # line.
297                 break
298
299         internal_stmt = next(children)
300         for child in internal_stmt.children:
301             yield from self.visit(child)
302
303     def visit_decorators(self, node: Node) -> Iterator[Line]:
304         """Visit decorators."""
305         for child in node.children:
306             yield from self.line()
307             yield from self.visit(child)
308
309     def visit_power(self, node: Node) -> Iterator[Line]:
310         for idx, leaf in enumerate(node.children[:-1]):
311             next_leaf = node.children[idx + 1]
312
313             if not isinstance(leaf, Leaf):
314                 continue
315
316             value = leaf.value.lower()
317             if (
318                 leaf.type == token.NUMBER
319                 and next_leaf.type == syms.trailer
320                 # Ensure that we are in an attribute trailer
321                 and next_leaf.children[0].type == token.DOT
322                 # It shouldn't wrap hexadecimal, binary and octal literals
323                 and not value.startswith(("0x", "0b", "0o"))
324                 # It shouldn't wrap complex literals
325                 and "j" not in value
326             ):
327                 wrap_in_parentheses(node, leaf)
328
329         if Preview.remove_redundant_parens in self.mode:
330             remove_await_parens(node)
331
332         yield from self.visit_default(node)
333
334     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
335         """Remove a semicolon and put the other statement on a separate line."""
336         yield from self.line()
337
338     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
339         """End of file. Process outstanding comments and end with a newline."""
340         yield from self.visit_default(leaf)
341         yield from self.line()
342
343     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
344         if not self.current_line.bracket_tracker.any_open_brackets():
345             yield from self.line()
346         yield from self.visit_default(leaf)
347
348     def visit_factor(self, node: Node) -> Iterator[Line]:
349         """Force parentheses between a unary op and a binary power:
350
351         -2 ** 8 -> -(2 ** 8)
352         """
353         _operator, operand = node.children
354         if (
355             operand.type == syms.power
356             and len(operand.children) == 3
357             and operand.children[1].type == token.DOUBLESTAR
358         ):
359             lpar = Leaf(token.LPAR, "(")
360             rpar = Leaf(token.RPAR, ")")
361             index = operand.remove() or 0
362             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
363         yield from self.visit_default(node)
364
365     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
366         if is_docstring(leaf) and "\\\n" not in leaf.value:
367             # We're ignoring docstrings with backslash newline escapes because changing
368             # indentation of those changes the AST representation of the code.
369             if Preview.normalize_docstring_quotes_and_prefixes_properly in self.mode:
370                 # There was a bug where --skip-string-normalization wouldn't stop us
371                 # from normalizing docstring prefixes. To maintain stability, we can
372                 # only address this buggy behaviour while the preview style is enabled.
373                 if self.mode.string_normalization:
374                     docstring = normalize_string_prefix(leaf.value)
375                     # visit_default() does handle string normalization for us, but
376                     # since this method acts differently depending on quote style (ex.
377                     # see padding logic below), there's a possibility for unstable
378                     # formatting as visit_default() is called *after*. To avoid a
379                     # situation where this function formats a docstring differently on
380                     # the second pass, normalize it early.
381                     docstring = normalize_string_quotes(docstring)
382                 else:
383                     docstring = leaf.value
384             else:
385                 # ... otherwise, we'll keep the buggy behaviour >.<
386                 docstring = normalize_string_prefix(leaf.value)
387             prefix = get_string_prefix(docstring)
388             docstring = docstring[len(prefix) :]  # Remove the prefix
389             quote_char = docstring[0]
390             # A natural way to remove the outer quotes is to do:
391             #   docstring = docstring.strip(quote_char)
392             # but that breaks on """""x""" (which is '""x').
393             # So we actually need to remove the first character and the next two
394             # characters but only if they are the same as the first.
395             quote_len = 1 if docstring[1] != quote_char else 3
396             docstring = docstring[quote_len:-quote_len]
397             docstring_started_empty = not docstring
398             indent = " " * 4 * self.current_line.depth
399
400             if is_multiline_string(leaf):
401                 docstring = fix_docstring(docstring, indent)
402             else:
403                 docstring = docstring.strip()
404
405             has_trailing_backslash = False
406             if docstring:
407                 # Add some padding if the docstring starts / ends with a quote mark.
408                 if docstring[0] == quote_char:
409                     docstring = " " + docstring
410                 if docstring[-1] == quote_char:
411                     docstring += " "
412                 if docstring[-1] == "\\":
413                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
414                     if backslash_count % 2:
415                         # Odd number of tailing backslashes, add some padding to
416                         # avoid escaping the closing string quote.
417                         docstring += " "
418                         has_trailing_backslash = True
419             elif not docstring_started_empty:
420                 docstring = " "
421
422             # We could enforce triple quotes at this point.
423             quote = quote_char * quote_len
424
425             # It's invalid to put closing single-character quotes on a new line.
426             if Preview.long_docstring_quotes_on_newline in self.mode and quote_len == 3:
427                 # We need to find the length of the last line of the docstring
428                 # to find if we can add the closing quotes to the line without
429                 # exceeding the maximum line length.
430                 # If docstring is one line, we don't put the closing quotes on a
431                 # separate line because it looks ugly (#3320).
432                 lines = docstring.splitlines()
433                 last_line_length = len(lines[-1]) if docstring else 0
434
435                 # If adding closing quotes would cause the last line to exceed
436                 # the maximum line length then put a line break before the
437                 # closing quotes
438                 if (
439                     len(lines) > 1
440                     and last_line_length + quote_len > self.mode.line_length
441                     and len(indent) + quote_len <= self.mode.line_length
442                     and not has_trailing_backslash
443                 ):
444                     leaf.value = prefix + quote + docstring + "\n" + indent + quote
445                 else:
446                     leaf.value = prefix + quote + docstring + quote
447             else:
448                 leaf.value = prefix + quote + docstring + quote
449
450         yield from self.visit_default(leaf)
451
452     def __post_init__(self) -> None:
453         """You are in a twisty little maze of passages."""
454         self.current_line = Line(mode=self.mode)
455
456         v = self.visit_stmt
457         Ø: Set[str] = set()
458         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
459         self.visit_if_stmt = partial(
460             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
461         )
462         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
463         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
464         self.visit_try_stmt = partial(
465             v, keywords={"try", "except", "else", "finally"}, parens=Ø
466         )
467         if self.mode.preview:
468             self.visit_except_clause = partial(
469                 v, keywords={"except"}, parens={"except"}
470             )
471             self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"})
472         else:
473             self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
474             self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
475         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
476         self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
477         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
478         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
479         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
480         self.visit_async_funcdef = self.visit_async_stmt
481         self.visit_decorated = self.visit_decorators
482
483         # PEP 634
484         self.visit_match_stmt = self.visit_match_case
485         self.visit_case_block = self.visit_match_case
486
487
488 def transform_line(
489     line: Line, mode: Mode, features: Collection[Feature] = ()
490 ) -> Iterator[Line]:
491     """Transform a `line`, potentially splitting it into many lines.
492
493     They should fit in the allotted `line_length` but might not be able to.
494
495     `features` are syntactical features that may be used in the output.
496     """
497     if line.is_comment:
498         yield line
499         return
500
501     line_str = line_to_string(line)
502
503     ll = mode.line_length
504     sn = mode.string_normalization
505     string_merge = StringMerger(ll, sn)
506     string_paren_strip = StringParenStripper(ll, sn)
507     string_split = StringSplitter(ll, sn)
508     string_paren_wrap = StringParenWrapper(ll, sn)
509
510     transformers: List[Transformer]
511     if (
512         not line.contains_uncollapsable_type_comments()
513         and not line.should_split_rhs
514         and not line.magic_trailing_comma
515         and (
516             is_line_short_enough(line, line_length=mode.line_length, line_str=line_str)
517             or line.contains_unsplittable_type_ignore()
518         )
519         and not (line.inside_brackets and line.contains_standalone_comments())
520     ):
521         # Only apply basic string preprocessing, since lines shouldn't be split here.
522         if Preview.string_processing in mode:
523             transformers = [string_merge, string_paren_strip]
524         else:
525             transformers = []
526     elif line.is_def:
527         transformers = [left_hand_split]
528     else:
529
530         def _rhs(
531             self: object, line: Line, features: Collection[Feature]
532         ) -> Iterator[Line]:
533             """Wraps calls to `right_hand_split`.
534
535             The calls increasingly `omit` right-hand trailers (bracket pairs with
536             content), meaning the trailers get glued together to split on another
537             bracket pair instead.
538             """
539             for omit in generate_trailers_to_omit(line, mode.line_length):
540                 lines = list(
541                     right_hand_split(line, mode.line_length, features, omit=omit)
542                 )
543                 # Note: this check is only able to figure out if the first line of the
544                 # *current* transformation fits in the line length.  This is true only
545                 # for simple cases.  All others require running more transforms via
546                 # `transform_line()`.  This check doesn't know if those would succeed.
547                 if is_line_short_enough(lines[0], line_length=mode.line_length):
548                     yield from lines
549                     return
550
551             # All splits failed, best effort split with no omits.
552             # This mostly happens to multiline strings that are by definition
553             # reported as not fitting a single line, as well as lines that contain
554             # trailing commas (those have to be exploded).
555             yield from right_hand_split(
556                 line, line_length=mode.line_length, features=features
557             )
558
559         # HACK: nested functions (like _rhs) compiled by mypyc don't retain their
560         # __name__ attribute which is needed in `run_transformer` further down.
561         # Unfortunately a nested class breaks mypyc too. So a class must be created
562         # via type ... https://github.com/mypyc/mypyc/issues/884
563         rhs = type("rhs", (), {"__call__": _rhs})()
564
565         if Preview.string_processing in mode:
566             if line.inside_brackets:
567                 transformers = [
568                     string_merge,
569                     string_paren_strip,
570                     string_split,
571                     delimiter_split,
572                     standalone_comment_split,
573                     string_paren_wrap,
574                     rhs,
575                 ]
576             else:
577                 transformers = [
578                     string_merge,
579                     string_paren_strip,
580                     string_split,
581                     string_paren_wrap,
582                     rhs,
583                 ]
584         else:
585             if line.inside_brackets:
586                 transformers = [delimiter_split, standalone_comment_split, rhs]
587             else:
588                 transformers = [rhs]
589     # It's always safe to attempt hugging of power operations and pretty much every line
590     # could match.
591     transformers.append(hug_power_op)
592
593     for transform in transformers:
594         # We are accumulating lines in `result` because we might want to abort
595         # mission and return the original line in the end, or attempt a different
596         # split altogether.
597         try:
598             result = run_transformer(line, transform, mode, features, line_str=line_str)
599         except CannotTransform:
600             continue
601         else:
602             yield from result
603             break
604
605     else:
606         yield line
607
608
609 class _BracketSplitComponent(Enum):
610     head = auto()
611     body = auto()
612     tail = auto()
613
614
615 def left_hand_split(line: Line, _features: Collection[Feature] = ()) -> Iterator[Line]:
616     """Split line into many lines, starting with the first matching bracket pair.
617
618     Note: this usually looks weird, only use this for function definitions.
619     Prefer RHS otherwise.  This is why this function is not symmetrical with
620     :func:`right_hand_split` which also handles optional parentheses.
621     """
622     tail_leaves: List[Leaf] = []
623     body_leaves: List[Leaf] = []
624     head_leaves: List[Leaf] = []
625     current_leaves = head_leaves
626     matching_bracket: Optional[Leaf] = None
627     for leaf in line.leaves:
628         if (
629             current_leaves is body_leaves
630             and leaf.type in CLOSING_BRACKETS
631             and leaf.opening_bracket is matching_bracket
632             and isinstance(matching_bracket, Leaf)
633         ):
634             ensure_visible(leaf)
635             ensure_visible(matching_bracket)
636             current_leaves = tail_leaves if body_leaves else head_leaves
637         current_leaves.append(leaf)
638         if current_leaves is head_leaves:
639             if leaf.type in OPENING_BRACKETS:
640                 matching_bracket = leaf
641                 current_leaves = body_leaves
642     if not matching_bracket:
643         raise CannotSplit("No brackets found")
644
645     head = bracket_split_build_line(
646         head_leaves, line, matching_bracket, component=_BracketSplitComponent.head
647     )
648     body = bracket_split_build_line(
649         body_leaves, line, matching_bracket, component=_BracketSplitComponent.body
650     )
651     tail = bracket_split_build_line(
652         tail_leaves, line, matching_bracket, component=_BracketSplitComponent.tail
653     )
654     bracket_split_succeeded_or_raise(head, body, tail)
655     for result in (head, body, tail):
656         if result:
657             yield result
658
659
660 @dataclass
661 class _RHSResult:
662     """Intermediate split result from a right hand split."""
663
664     head: Line
665     body: Line
666     tail: Line
667     opening_bracket: Leaf
668     closing_bracket: Leaf
669
670
671 def right_hand_split(
672     line: Line,
673     line_length: int,
674     features: Collection[Feature] = (),
675     omit: Collection[LeafID] = (),
676 ) -> Iterator[Line]:
677     """Split line into many lines, starting with the last matching bracket pair.
678
679     If the split was by optional parentheses, attempt splitting without them, too.
680     `omit` is a collection of closing bracket IDs that shouldn't be considered for
681     this split.
682
683     Note: running this function modifies `bracket_depth` on the leaves of `line`.
684     """
685     rhs_result = _first_right_hand_split(line, omit=omit)
686     yield from _maybe_split_omitting_optional_parens(
687         rhs_result, line, line_length, features=features, omit=omit
688     )
689
690
691 def _first_right_hand_split(
692     line: Line,
693     omit: Collection[LeafID] = (),
694 ) -> _RHSResult:
695     """Split the line into head, body, tail starting with the last bracket pair.
696
697     Note: this function should not have side effects. It's relied upon by
698     _maybe_split_omitting_optional_parens to get an opinion whether to prefer
699     splitting on the right side of an assignment statement.
700     """
701     tail_leaves: List[Leaf] = []
702     body_leaves: List[Leaf] = []
703     head_leaves: List[Leaf] = []
704     current_leaves = tail_leaves
705     opening_bracket: Optional[Leaf] = None
706     closing_bracket: Optional[Leaf] = None
707     for leaf in reversed(line.leaves):
708         if current_leaves is body_leaves:
709             if leaf is opening_bracket:
710                 current_leaves = head_leaves if body_leaves else tail_leaves
711         current_leaves.append(leaf)
712         if current_leaves is tail_leaves:
713             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
714                 opening_bracket = leaf.opening_bracket
715                 closing_bracket = leaf
716                 current_leaves = body_leaves
717     if not (opening_bracket and closing_bracket and head_leaves):
718         # If there is no opening or closing_bracket that means the split failed and
719         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
720         # the matching `opening_bracket` wasn't available on `line` anymore.
721         raise CannotSplit("No brackets found")
722
723     tail_leaves.reverse()
724     body_leaves.reverse()
725     head_leaves.reverse()
726     head = bracket_split_build_line(
727         head_leaves, line, opening_bracket, component=_BracketSplitComponent.head
728     )
729     body = bracket_split_build_line(
730         body_leaves, line, opening_bracket, component=_BracketSplitComponent.body
731     )
732     tail = bracket_split_build_line(
733         tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail
734     )
735     bracket_split_succeeded_or_raise(head, body, tail)
736     return _RHSResult(head, body, tail, opening_bracket, closing_bracket)
737
738
739 def _maybe_split_omitting_optional_parens(
740     rhs: _RHSResult,
741     line: Line,
742     line_length: int,
743     features: Collection[Feature] = (),
744     omit: Collection[LeafID] = (),
745 ) -> Iterator[Line]:
746     if (
747         Feature.FORCE_OPTIONAL_PARENTHESES not in features
748         # the opening bracket is an optional paren
749         and rhs.opening_bracket.type == token.LPAR
750         and not rhs.opening_bracket.value
751         # the closing bracket is an optional paren
752         and rhs.closing_bracket.type == token.RPAR
753         and not rhs.closing_bracket.value
754         # it's not an import (optional parens are the only thing we can split on
755         # in this case; attempting a split without them is a waste of time)
756         and not line.is_import
757         # there are no standalone comments in the body
758         and not rhs.body.contains_standalone_comments(0)
759         # and we can actually remove the parens
760         and can_omit_invisible_parens(rhs.body, line_length)
761     ):
762         omit = {id(rhs.closing_bracket), *omit}
763         try:
764             # The _RHSResult Omitting Optional Parens.
765             rhs_oop = _first_right_hand_split(line, omit=omit)
766             if not (
767                 Preview.prefer_splitting_right_hand_side_of_assignments in line.mode
768                 # the split is right after `=`
769                 and len(rhs.head.leaves) >= 2
770                 and rhs.head.leaves[-2].type == token.EQUAL
771                 # the left side of assignement contains brackets
772                 and any(leaf.type in BRACKETS for leaf in rhs.head.leaves[:-1])
773                 # the left side of assignment is short enough (the -1 is for the ending
774                 # optional paren)
775                 and is_line_short_enough(rhs.head, line_length=line_length - 1)
776                 # the left side of assignment won't explode further because of magic
777                 # trailing comma
778                 and rhs.head.magic_trailing_comma is None
779                 # the split by omitting optional parens isn't preferred by some other
780                 # reason
781                 and not _prefer_split_rhs_oop(rhs_oop, line_length=line_length)
782             ):
783                 yield from _maybe_split_omitting_optional_parens(
784                     rhs_oop, line, line_length, features=features, omit=omit
785                 )
786                 return
787
788         except CannotSplit as e:
789             if not (
790                 can_be_split(rhs.body)
791                 or is_line_short_enough(rhs.body, line_length=line_length)
792             ):
793                 raise CannotSplit(
794                     "Splitting failed, body is still too long and can't be split."
795                 ) from e
796
797             elif (
798                 rhs.head.contains_multiline_strings()
799                 or rhs.tail.contains_multiline_strings()
800             ):
801                 raise CannotSplit(
802                     "The current optional pair of parentheses is bound to fail to"
803                     " satisfy the splitting algorithm because the head or the tail"
804                     " contains multiline strings which by definition never fit one"
805                     " line."
806                 ) from e
807
808     ensure_visible(rhs.opening_bracket)
809     ensure_visible(rhs.closing_bracket)
810     for result in (rhs.head, rhs.body, rhs.tail):
811         if result:
812             yield result
813
814
815 def _prefer_split_rhs_oop(rhs_oop: _RHSResult, line_length: int) -> bool:
816     """
817     Returns whether we should prefer the result from a split omitting optional parens.
818     """
819     has_closing_bracket_after_assign = False
820     for leaf in reversed(rhs_oop.head.leaves):
821         if leaf.type == token.EQUAL:
822             break
823         if leaf.type in CLOSING_BRACKETS:
824             has_closing_bracket_after_assign = True
825             break
826     return (
827         # contains matching brackets after the `=` (done by checking there is a
828         # closing bracket)
829         has_closing_bracket_after_assign
830         or (
831             # the split is actually from inside the optional parens (done by checking
832             # the first line still contains the `=`)
833             any(leaf.type == token.EQUAL for leaf in rhs_oop.head.leaves)
834             # the first line is short enough
835             and is_line_short_enough(rhs_oop.head, line_length=line_length)
836         )
837         # contains unsplittable type ignore
838         or rhs_oop.head.contains_unsplittable_type_ignore()
839         or rhs_oop.body.contains_unsplittable_type_ignore()
840         or rhs_oop.tail.contains_unsplittable_type_ignore()
841     )
842
843
844 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
845     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
846
847     Do nothing otherwise.
848
849     A left- or right-hand split is based on a pair of brackets. Content before
850     (and including) the opening bracket is left on one line, content inside the
851     brackets is put on a separate line, and finally content starting with and
852     following the closing bracket is put on a separate line.
853
854     Those are called `head`, `body`, and `tail`, respectively. If the split
855     produced the same line (all content in `head`) or ended up with an empty `body`
856     and the `tail` is just the closing bracket, then it's considered failed.
857     """
858     tail_len = len(str(tail).strip())
859     if not body:
860         if tail_len == 0:
861             raise CannotSplit("Splitting brackets produced the same line")
862
863         elif tail_len < 3:
864             raise CannotSplit(
865                 f"Splitting brackets on an empty body to save {tail_len} characters is"
866                 " not worth it"
867             )
868
869
870 def bracket_split_build_line(
871     leaves: List[Leaf],
872     original: Line,
873     opening_bracket: Leaf,
874     *,
875     component: _BracketSplitComponent,
876 ) -> Line:
877     """Return a new line with given `leaves` and respective comments from `original`.
878
879     If it's the head component, brackets will be tracked so trailing commas are
880     respected.
881
882     If it's the body component, the result line is one-indented inside brackets and as
883     such has its first leaf's prefix normalized and a trailing comma added when
884     expected.
885     """
886     result = Line(mode=original.mode, depth=original.depth)
887     if component is _BracketSplitComponent.body:
888         result.inside_brackets = True
889         result.depth += 1
890         if leaves:
891             # Since body is a new indent level, remove spurious leading whitespace.
892             normalize_prefix(leaves[0], inside_brackets=True)
893             # Ensure a trailing comma for imports and standalone function arguments, but
894             # be careful not to add one after any comments or within type annotations.
895             no_commas = (
896                 original.is_def
897                 and opening_bracket.value == "("
898                 and not any(leaf.type == token.COMMA for leaf in leaves)
899                 # In particular, don't add one within a parenthesized return annotation.
900                 # Unfortunately the indicator we're in a return annotation (RARROW) may
901                 # be defined directly in the parent node, the parent of the parent ...
902                 # and so on depending on how complex the return annotation is.
903                 # This isn't perfect and there's some false negatives but they are in
904                 # contexts were a comma is actually fine.
905                 and not any(
906                     node.prev_sibling.type == RARROW
907                     for node in (
908                         leaves[0].parent,
909                         getattr(leaves[0].parent, "parent", None),
910                     )
911                     if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
912                 )
913             )
914
915             if original.is_import or no_commas:
916                 for i in range(len(leaves) - 1, -1, -1):
917                     if leaves[i].type == STANDALONE_COMMENT:
918                         continue
919
920                     if leaves[i].type != token.COMMA:
921                         new_comma = Leaf(token.COMMA, ",")
922                         leaves.insert(i + 1, new_comma)
923                     break
924
925     leaves_to_track: Set[LeafID] = set()
926     if (
927         Preview.handle_trailing_commas_in_head in original.mode
928         and component is _BracketSplitComponent.head
929     ):
930         leaves_to_track = get_leaves_inside_matching_brackets(leaves)
931     # Populate the line
932     for leaf in leaves:
933         result.append(
934             leaf,
935             preformatted=True,
936             track_bracket=id(leaf) in leaves_to_track,
937         )
938         for comment_after in original.comments_after(leaf):
939             result.append(comment_after, preformatted=True)
940     if component is _BracketSplitComponent.body and should_split_line(
941         result, opening_bracket
942     ):
943         result.should_split_rhs = True
944     return result
945
946
947 def dont_increase_indentation(split_func: Transformer) -> Transformer:
948     """Normalize prefix of the first leaf in every line returned by `split_func`.
949
950     This is a decorator over relevant split functions.
951     """
952
953     @wraps(split_func)
954     def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
955         for split_line in split_func(line, features):
956             normalize_prefix(split_line.leaves[0], inside_brackets=True)
957             yield split_line
958
959     return split_wrapper
960
961
962 @dont_increase_indentation
963 def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
964     """Split according to delimiters of the highest priority.
965
966     If the appropriate Features are given, the split will add trailing commas
967     also in function signatures and calls that contain `*` and `**`.
968     """
969     try:
970         last_leaf = line.leaves[-1]
971     except IndexError:
972         raise CannotSplit("Line empty") from None
973
974     bt = line.bracket_tracker
975     try:
976         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
977     except ValueError:
978         raise CannotSplit("No delimiters found") from None
979
980     if delimiter_priority == DOT_PRIORITY:
981         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
982             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
983
984     current_line = Line(
985         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
986     )
987     lowest_depth = sys.maxsize
988     trailing_comma_safe = True
989
990     def append_to_line(leaf: Leaf) -> Iterator[Line]:
991         """Append `leaf` to current line or to new line if appending impossible."""
992         nonlocal current_line
993         try:
994             current_line.append_safe(leaf, preformatted=True)
995         except ValueError:
996             yield current_line
997
998             current_line = Line(
999                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1000             )
1001             current_line.append(leaf)
1002
1003     for leaf in line.leaves:
1004         yield from append_to_line(leaf)
1005
1006         for comment_after in line.comments_after(leaf):
1007             yield from append_to_line(comment_after)
1008
1009         lowest_depth = min(lowest_depth, leaf.bracket_depth)
1010         if leaf.bracket_depth == lowest_depth:
1011             if is_vararg(leaf, within={syms.typedargslist}):
1012                 trailing_comma_safe = (
1013                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
1014                 )
1015             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
1016                 trailing_comma_safe = (
1017                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
1018                 )
1019
1020         leaf_priority = bt.delimiters.get(id(leaf))
1021         if leaf_priority == delimiter_priority:
1022             yield current_line
1023
1024             current_line = Line(
1025                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1026             )
1027     if current_line:
1028         if (
1029             trailing_comma_safe
1030             and delimiter_priority == COMMA_PRIORITY
1031             and current_line.leaves[-1].type != token.COMMA
1032             and current_line.leaves[-1].type != STANDALONE_COMMENT
1033         ):
1034             new_comma = Leaf(token.COMMA, ",")
1035             current_line.append(new_comma)
1036         yield current_line
1037
1038
1039 @dont_increase_indentation
1040 def standalone_comment_split(
1041     line: Line, features: Collection[Feature] = ()
1042 ) -> Iterator[Line]:
1043     """Split standalone comments from the rest of the line."""
1044     if not line.contains_standalone_comments(0):
1045         raise CannotSplit("Line does not have any standalone comments")
1046
1047     current_line = Line(
1048         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1049     )
1050
1051     def append_to_line(leaf: Leaf) -> Iterator[Line]:
1052         """Append `leaf` to current line or to new line if appending impossible."""
1053         nonlocal current_line
1054         try:
1055             current_line.append_safe(leaf, preformatted=True)
1056         except ValueError:
1057             yield current_line
1058
1059             current_line = Line(
1060                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1061             )
1062             current_line.append(leaf)
1063
1064     for leaf in line.leaves:
1065         yield from append_to_line(leaf)
1066
1067         for comment_after in line.comments_after(leaf):
1068             yield from append_to_line(comment_after)
1069
1070     if current_line:
1071         yield current_line
1072
1073
1074 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
1075     """Leave existing extra newlines if not `inside_brackets`. Remove everything
1076     else.
1077
1078     Note: don't use backslashes for formatting or you'll lose your voting rights.
1079     """
1080     if not inside_brackets:
1081         spl = leaf.prefix.split("#")
1082         if "\\" not in spl[0]:
1083             nl_count = spl[-1].count("\n")
1084             if len(spl) > 1:
1085                 nl_count -= 1
1086             leaf.prefix = "\n" * nl_count
1087             return
1088
1089     leaf.prefix = ""
1090
1091
1092 def normalize_invisible_parens(
1093     node: Node, parens_after: Set[str], *, preview: bool
1094 ) -> None:
1095     """Make existing optional parentheses invisible or create new ones.
1096
1097     `parens_after` is a set of string leaf values immediately after which parens
1098     should be put.
1099
1100     Standardizes on visible parentheses for single-element tuples, and keeps
1101     existing visible parentheses for other tuples and generator expressions.
1102     """
1103     for pc in list_comments(node.prefix, is_endmarker=False, preview=preview):
1104         if pc.value in FMT_OFF:
1105             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
1106             return
1107     check_lpar = False
1108     for index, child in enumerate(list(node.children)):
1109         # Fixes a bug where invisible parens are not properly stripped from
1110         # assignment statements that contain type annotations.
1111         if isinstance(child, Node) and child.type == syms.annassign:
1112             normalize_invisible_parens(
1113                 child, parens_after=parens_after, preview=preview
1114             )
1115
1116         # Add parentheses around long tuple unpacking in assignments.
1117         if (
1118             index == 0
1119             and isinstance(child, Node)
1120             and child.type == syms.testlist_star_expr
1121         ):
1122             check_lpar = True
1123
1124         if check_lpar:
1125             if (
1126                 preview
1127                 and child.type == syms.atom
1128                 and node.type == syms.for_stmt
1129                 and isinstance(child.prev_sibling, Leaf)
1130                 and child.prev_sibling.type == token.NAME
1131                 and child.prev_sibling.value == "for"
1132             ):
1133                 if maybe_make_parens_invisible_in_atom(
1134                     child,
1135                     parent=node,
1136                     remove_brackets_around_comma=True,
1137                 ):
1138                     wrap_in_parentheses(node, child, visible=False)
1139             elif preview and isinstance(child, Node) and node.type == syms.with_stmt:
1140                 remove_with_parens(child, node)
1141             elif child.type == syms.atom:
1142                 if maybe_make_parens_invisible_in_atom(
1143                     child,
1144                     parent=node,
1145                 ):
1146                     wrap_in_parentheses(node, child, visible=False)
1147             elif is_one_tuple(child):
1148                 wrap_in_parentheses(node, child, visible=True)
1149             elif node.type == syms.import_from:
1150                 # "import from" nodes store parentheses directly as part of
1151                 # the statement
1152                 if is_lpar_token(child):
1153                     assert is_rpar_token(node.children[-1])
1154                     # make parentheses invisible
1155                     child.value = ""
1156                     node.children[-1].value = ""
1157                 elif child.type != token.STAR:
1158                     # insert invisible parentheses
1159                     node.insert_child(index, Leaf(token.LPAR, ""))
1160                     node.append_child(Leaf(token.RPAR, ""))
1161                 break
1162             elif (
1163                 index == 1
1164                 and child.type == token.STAR
1165                 and node.type == syms.except_clause
1166             ):
1167                 # In except* (PEP 654), the star is actually part of
1168                 # of the keyword. So we need to skip the insertion of
1169                 # invisible parentheses to work more precisely.
1170                 continue
1171
1172             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
1173                 wrap_in_parentheses(node, child, visible=False)
1174
1175         comma_check = child.type == token.COMMA if preview else False
1176
1177         check_lpar = isinstance(child, Leaf) and (
1178             child.value in parens_after or comma_check
1179         )
1180
1181
1182 def remove_await_parens(node: Node) -> None:
1183     if node.children[0].type == token.AWAIT and len(node.children) > 1:
1184         if (
1185             node.children[1].type == syms.atom
1186             and node.children[1].children[0].type == token.LPAR
1187         ):
1188             if maybe_make_parens_invisible_in_atom(
1189                 node.children[1],
1190                 parent=node,
1191                 remove_brackets_around_comma=True,
1192             ):
1193                 wrap_in_parentheses(node, node.children[1], visible=False)
1194
1195             # Since await is an expression we shouldn't remove
1196             # brackets in cases where this would change
1197             # the AST due to operator precedence.
1198             # Therefore we only aim to remove brackets around
1199             # power nodes that aren't also await expressions themselves.
1200             # https://peps.python.org/pep-0492/#updated-operator-precedence-table
1201             # N.B. We've still removed any redundant nested brackets though :)
1202             opening_bracket = cast(Leaf, node.children[1].children[0])
1203             closing_bracket = cast(Leaf, node.children[1].children[-1])
1204             bracket_contents = cast(Node, node.children[1].children[1])
1205             if bracket_contents.type != syms.power:
1206                 ensure_visible(opening_bracket)
1207                 ensure_visible(closing_bracket)
1208             elif (
1209                 bracket_contents.type == syms.power
1210                 and bracket_contents.children[0].type == token.AWAIT
1211             ):
1212                 ensure_visible(opening_bracket)
1213                 ensure_visible(closing_bracket)
1214                 # If we are in a nested await then recurse down.
1215                 remove_await_parens(bracket_contents)
1216
1217
1218 def remove_with_parens(node: Node, parent: Node) -> None:
1219     """Recursively hide optional parens in `with` statements."""
1220     # Removing all unnecessary parentheses in with statements in one pass is a tad
1221     # complex as different variations of bracketed statements result in pretty
1222     # different parse trees:
1223     #
1224     # with (open("file")) as f:                       # this is an asexpr_test
1225     #     ...
1226     #
1227     # with (open("file") as f):                       # this is an atom containing an
1228     #     ...                                         # asexpr_test
1229     #
1230     # with (open("file")) as f, (open("file")) as f:  # this is asexpr_test, COMMA,
1231     #     ...                                         # asexpr_test
1232     #
1233     # with (open("file") as f, open("file") as f):    # an atom containing a
1234     #     ...                                         # testlist_gexp which then
1235     #                                                 # contains multiple asexpr_test(s)
1236     if node.type == syms.atom:
1237         if maybe_make_parens_invisible_in_atom(
1238             node,
1239             parent=parent,
1240             remove_brackets_around_comma=True,
1241         ):
1242             wrap_in_parentheses(parent, node, visible=False)
1243         if isinstance(node.children[1], Node):
1244             remove_with_parens(node.children[1], node)
1245     elif node.type == syms.testlist_gexp:
1246         for child in node.children:
1247             if isinstance(child, Node):
1248                 remove_with_parens(child, node)
1249     elif node.type == syms.asexpr_test and not any(
1250         leaf.type == token.COLONEQUAL for leaf in node.leaves()
1251     ):
1252         if maybe_make_parens_invisible_in_atom(
1253             node.children[0],
1254             parent=node,
1255             remove_brackets_around_comma=True,
1256         ):
1257             wrap_in_parentheses(node, node.children[0], visible=False)
1258
1259
1260 def maybe_make_parens_invisible_in_atom(
1261     node: LN,
1262     parent: LN,
1263     remove_brackets_around_comma: bool = False,
1264 ) -> bool:
1265     """If it's safe, make the parens in the atom `node` invisible, recursively.
1266     Additionally, remove repeated, adjacent invisible parens from the atom `node`
1267     as they are redundant.
1268
1269     Returns whether the node should itself be wrapped in invisible parentheses.
1270     """
1271     if (
1272         node.type != syms.atom
1273         or is_empty_tuple(node)
1274         or is_one_tuple(node)
1275         or (is_yield(node) and parent.type != syms.expr_stmt)
1276         or (
1277             # This condition tries to prevent removing non-optional brackets
1278             # around a tuple, however, can be a bit overzealous so we provide
1279             # and option to skip this check for `for` and `with` statements.
1280             not remove_brackets_around_comma
1281             and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
1282         )
1283         or is_tuple_containing_walrus(node)
1284     ):
1285         return False
1286
1287     if is_walrus_assignment(node):
1288         if parent.type in [
1289             syms.annassign,
1290             syms.expr_stmt,
1291             syms.assert_stmt,
1292             syms.return_stmt,
1293             syms.except_clause,
1294             syms.funcdef,
1295             syms.with_stmt,
1296             # these ones aren't useful to end users, but they do please fuzzers
1297             syms.for_stmt,
1298             syms.del_stmt,
1299             syms.for_stmt,
1300         ]:
1301             return False
1302
1303     first = node.children[0]
1304     last = node.children[-1]
1305     if is_lpar_token(first) and is_rpar_token(last):
1306         middle = node.children[1]
1307         # make parentheses invisible
1308         first.value = ""
1309         last.value = ""
1310         maybe_make_parens_invisible_in_atom(
1311             middle,
1312             parent=parent,
1313             remove_brackets_around_comma=remove_brackets_around_comma,
1314         )
1315
1316         if is_atom_with_invisible_parens(middle):
1317             # Strip the invisible parens from `middle` by replacing
1318             # it with the child in-between the invisible parens
1319             middle.replace(middle.children[1])
1320
1321         return False
1322
1323     return True
1324
1325
1326 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
1327     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
1328
1329     if not (opening_bracket.parent and opening_bracket.value in "[{("):
1330         return False
1331
1332     # We're essentially checking if the body is delimited by commas and there's more
1333     # than one of them (we're excluding the trailing comma and if the delimiter priority
1334     # is still commas, that means there's more).
1335     exclude = set()
1336     trailing_comma = False
1337     try:
1338         last_leaf = line.leaves[-1]
1339         if last_leaf.type == token.COMMA:
1340             trailing_comma = True
1341             exclude.add(id(last_leaf))
1342         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
1343     except (IndexError, ValueError):
1344         return False
1345
1346     return max_priority == COMMA_PRIORITY and (
1347         (line.mode.magic_trailing_comma and trailing_comma)
1348         # always explode imports
1349         or opening_bracket.parent.type in {syms.atom, syms.import_from}
1350     )
1351
1352
1353 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
1354     """Generate sets of closing bracket IDs that should be omitted in a RHS.
1355
1356     Brackets can be omitted if the entire trailer up to and including
1357     a preceding closing bracket fits in one line.
1358
1359     Yielded sets are cumulative (contain results of previous yields, too).  First
1360     set is empty, unless the line should explode, in which case bracket pairs until
1361     the one that needs to explode are omitted.
1362     """
1363
1364     omit: Set[LeafID] = set()
1365     if not line.magic_trailing_comma:
1366         yield omit
1367
1368     length = 4 * line.depth
1369     opening_bracket: Optional[Leaf] = None
1370     closing_bracket: Optional[Leaf] = None
1371     inner_brackets: Set[LeafID] = set()
1372     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
1373         length += leaf_length
1374         if length > line_length:
1375             break
1376
1377         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
1378         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
1379             break
1380
1381         if opening_bracket:
1382             if leaf is opening_bracket:
1383                 opening_bracket = None
1384             elif leaf.type in CLOSING_BRACKETS:
1385                 prev = line.leaves[index - 1] if index > 0 else None
1386                 if (
1387                     prev
1388                     and prev.type == token.COMMA
1389                     and leaf.opening_bracket is not None
1390                     and not is_one_sequence_between(
1391                         leaf.opening_bracket, leaf, line.leaves
1392                     )
1393                 ):
1394                     # Never omit bracket pairs with trailing commas.
1395                     # We need to explode on those.
1396                     break
1397
1398                 inner_brackets.add(id(leaf))
1399         elif leaf.type in CLOSING_BRACKETS:
1400             prev = line.leaves[index - 1] if index > 0 else None
1401             if prev and prev.type in OPENING_BRACKETS:
1402                 # Empty brackets would fail a split so treat them as "inner"
1403                 # brackets (e.g. only add them to the `omit` set if another
1404                 # pair of brackets was good enough.
1405                 inner_brackets.add(id(leaf))
1406                 continue
1407
1408             if closing_bracket:
1409                 omit.add(id(closing_bracket))
1410                 omit.update(inner_brackets)
1411                 inner_brackets.clear()
1412                 yield omit
1413
1414             if (
1415                 prev
1416                 and prev.type == token.COMMA
1417                 and leaf.opening_bracket is not None
1418                 and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves)
1419             ):
1420                 # Never omit bracket pairs with trailing commas.
1421                 # We need to explode on those.
1422                 break
1423
1424             if leaf.value:
1425                 opening_bracket = leaf.opening_bracket
1426                 closing_bracket = leaf
1427
1428
1429 def run_transformer(
1430     line: Line,
1431     transform: Transformer,
1432     mode: Mode,
1433     features: Collection[Feature],
1434     *,
1435     line_str: str = "",
1436 ) -> List[Line]:
1437     if not line_str:
1438         line_str = line_to_string(line)
1439     result: List[Line] = []
1440     for transformed_line in transform(line, features):
1441         if str(transformed_line).strip("\n") == line_str:
1442             raise CannotTransform("Line transformer returned an unchanged result")
1443
1444         result.extend(transform_line(transformed_line, mode=mode, features=features))
1445
1446     features_set = set(features)
1447     if (
1448         Feature.FORCE_OPTIONAL_PARENTHESES in features_set
1449         or transform.__class__.__name__ != "rhs"
1450         or not line.bracket_tracker.invisible
1451         or any(bracket.value for bracket in line.bracket_tracker.invisible)
1452         or line.contains_multiline_strings()
1453         or result[0].contains_uncollapsable_type_comments()
1454         or result[0].contains_unsplittable_type_ignore()
1455         or is_line_short_enough(result[0], line_length=mode.line_length)
1456         # If any leaves have no parents (which _can_ occur since
1457         # `transform(line)` potentially destroys the line's underlying node
1458         # structure), then we can't proceed. Doing so would cause the below
1459         # call to `append_leaves()` to fail.
1460         or any(leaf.parent is None for leaf in line.leaves)
1461     ):
1462         return result
1463
1464     line_copy = line.clone()
1465     append_leaves(line_copy, line, line.leaves)
1466     features_fop = features_set | {Feature.FORCE_OPTIONAL_PARENTHESES}
1467     second_opinion = run_transformer(
1468         line_copy, transform, mode, features_fop, line_str=line_str
1469     )
1470     if all(
1471         is_line_short_enough(ln, line_length=mode.line_length) for ln in second_opinion
1472     ):
1473         result = second_opinion
1474     return result