]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

d12ca39d0372972cef62043b230fa5785d67bd7f
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4
5 import sys
6 from dataclasses import replace
7 from enum import Enum, auto
8 from functools import partial, wraps
9 from typing import Collection, Iterator, List, Optional, Set, Union, cast
10
11 from black.brackets import (
12     COMMA_PRIORITY,
13     DOT_PRIORITY,
14     get_leaves_inside_matching_brackets,
15     max_delimiter_priority_in_atom,
16 )
17 from black.comments import FMT_OFF, generate_comments, list_comments
18 from black.lines import (
19     Line,
20     RHSResult,
21     append_leaves,
22     can_be_split,
23     can_omit_invisible_parens,
24     is_line_short_enough,
25     line_to_string,
26 )
27 from black.mode import Feature, Mode, Preview
28 from black.nodes import (
29     ASSIGNMENTS,
30     BRACKETS,
31     CLOSING_BRACKETS,
32     OPENING_BRACKETS,
33     RARROW,
34     STANDALONE_COMMENT,
35     STATEMENT,
36     WHITESPACE,
37     Visitor,
38     ensure_visible,
39     is_arith_like,
40     is_async_stmt_or_funcdef,
41     is_atom_with_invisible_parens,
42     is_docstring,
43     is_empty_tuple,
44     is_lpar_token,
45     is_multiline_string,
46     is_name_token,
47     is_one_sequence_between,
48     is_one_tuple,
49     is_rpar_token,
50     is_stub_body,
51     is_stub_suite,
52     is_tuple_containing_walrus,
53     is_type_ignore_comment_string,
54     is_vararg,
55     is_walrus_assignment,
56     is_yield,
57     syms,
58     wrap_in_parentheses,
59 )
60 from black.numerics import normalize_numeric_literal
61 from black.strings import (
62     fix_docstring,
63     get_string_prefix,
64     normalize_string_prefix,
65     normalize_string_quotes,
66     normalize_unicode_escape_sequences,
67 )
68 from black.trans import (
69     CannotTransform,
70     StringMerger,
71     StringParenStripper,
72     StringParenWrapper,
73     StringSplitter,
74     Transformer,
75     hug_power_op,
76 )
77 from blib2to3.pgen2 import token
78 from blib2to3.pytree import Leaf, Node
79
80 # types
81 LeafID = int
82 LN = Union[Leaf, Node]
83
84
85 class CannotSplit(CannotTransform):
86     """A readable split that fits the allotted line length is impossible."""
87
88
89 # This isn't a dataclass because @dataclass + Generic breaks mypyc.
90 # See also https://github.com/mypyc/mypyc/issues/827.
91 class LineGenerator(Visitor[Line]):
92     """Generates reformatted Line objects.  Empty lines are not emitted.
93
94     Note: destroys the tree it's visiting by mutating prefixes of its leaves
95     in ways that will no longer stringify to valid Python code on the tree.
96     """
97
98     def __init__(self, mode: Mode, features: Collection[Feature]) -> None:
99         self.mode = mode
100         self.features = features
101         self.current_line: Line
102         self.__post_init__()
103
104     def line(self, indent: int = 0) -> Iterator[Line]:
105         """Generate a line.
106
107         If the line is empty, only emit if it makes sense.
108         If the line is too long, split it first and then generate.
109
110         If any lines were generated, set up a new current_line.
111         """
112         if not self.current_line:
113             self.current_line.depth += indent
114             return  # Line is empty, don't emit. Creating a new one unnecessary.
115
116         if (
117             Preview.improved_async_statements_handling in self.mode
118             and len(self.current_line.leaves) == 1
119             and is_async_stmt_or_funcdef(self.current_line.leaves[0])
120         ):
121             # Special case for async def/for/with statements. `visit_async_stmt`
122             # adds an `ASYNC` leaf then visits the child def/for/with statement
123             # nodes. Line yields from those nodes shouldn't treat the former
124             # `ASYNC` leaf as a complete line.
125             return
126
127         complete_line = self.current_line
128         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
129         yield complete_line
130
131     def visit_default(self, node: LN) -> Iterator[Line]:
132         """Default `visit_*()` implementation. Recurses to children of `node`."""
133         if isinstance(node, Leaf):
134             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
135             for comment in generate_comments(node):
136                 if any_open_brackets:
137                     # any comment within brackets is subject to splitting
138                     self.current_line.append(comment)
139                 elif comment.type == token.COMMENT:
140                     # regular trailing comment
141                     self.current_line.append(comment)
142                     yield from self.line()
143
144                 else:
145                     # regular standalone comment
146                     yield from self.line()
147
148                     self.current_line.append(comment)
149                     yield from self.line()
150
151             normalize_prefix(node, inside_brackets=any_open_brackets)
152             if self.mode.string_normalization and node.type == token.STRING:
153                 node.value = normalize_string_prefix(node.value)
154                 node.value = normalize_string_quotes(node.value)
155             if node.type == token.NUMBER:
156                 normalize_numeric_literal(node)
157             if node.type not in WHITESPACE:
158                 self.current_line.append(node)
159         yield from super().visit_default(node)
160
161     def visit_test(self, node: Node) -> Iterator[Line]:
162         """Visit an `x if y else z` test"""
163
164         if Preview.parenthesize_conditional_expressions in self.mode:
165             already_parenthesized = (
166                 node.prev_sibling and node.prev_sibling.type == token.LPAR
167             )
168
169             if not already_parenthesized:
170                 lpar = Leaf(token.LPAR, "")
171                 rpar = Leaf(token.RPAR, "")
172                 node.insert_child(0, lpar)
173                 node.append_child(rpar)
174
175         yield from self.visit_default(node)
176
177     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
178         """Increase indentation level, maybe yield a line."""
179         # In blib2to3 INDENT never holds comments.
180         yield from self.line(+1)
181         yield from self.visit_default(node)
182
183     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
184         """Decrease indentation level, maybe yield a line."""
185         # The current line might still wait for trailing comments.  At DEDENT time
186         # there won't be any (they would be prefixes on the preceding NEWLINE).
187         # Emit the line then.
188         yield from self.line()
189
190         # While DEDENT has no value, its prefix may contain standalone comments
191         # that belong to the current indentation level.  Get 'em.
192         yield from self.visit_default(node)
193
194         # Finally, emit the dedent.
195         yield from self.line(-1)
196
197     def visit_stmt(
198         self, node: Node, keywords: Set[str], parens: Set[str]
199     ) -> Iterator[Line]:
200         """Visit a statement.
201
202         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
203         `def`, `with`, `class`, `assert`, and assignments.
204
205         The relevant Python language `keywords` for a given statement will be
206         NAME leaves within it. This methods puts those on a separate line.
207
208         `parens` holds a set of string leaf values immediately after which
209         invisible parens should be put.
210         """
211         normalize_invisible_parens(
212             node, parens_after=parens, mode=self.mode, features=self.features
213         )
214         for child in node.children:
215             if is_name_token(child) and child.value in keywords:
216                 yield from self.line()
217
218             yield from self.visit(child)
219
220     def visit_typeparams(self, node: Node) -> Iterator[Line]:
221         yield from self.visit_default(node)
222         node.children[0].prefix = ""
223
224     def visit_typevartuple(self, node: Node) -> Iterator[Line]:
225         yield from self.visit_default(node)
226         node.children[1].prefix = ""
227
228     def visit_paramspec(self, node: Node) -> Iterator[Line]:
229         yield from self.visit_default(node)
230         node.children[1].prefix = ""
231
232     def visit_dictsetmaker(self, node: Node) -> Iterator[Line]:
233         if Preview.wrap_long_dict_values_in_parens in self.mode:
234             for i, child in enumerate(node.children):
235                 if i == 0:
236                     continue
237                 if node.children[i - 1].type == token.COLON:
238                     if child.type == syms.atom and child.children[0].type == token.LPAR:
239                         if maybe_make_parens_invisible_in_atom(
240                             child,
241                             parent=node,
242                             remove_brackets_around_comma=False,
243                         ):
244                             wrap_in_parentheses(node, child, visible=False)
245                     else:
246                         wrap_in_parentheses(node, child, visible=False)
247         yield from self.visit_default(node)
248
249     def visit_funcdef(self, node: Node) -> Iterator[Line]:
250         """Visit function definition."""
251         yield from self.line()
252
253         # Remove redundant brackets around return type annotation.
254         is_return_annotation = False
255         for child in node.children:
256             if child.type == token.RARROW:
257                 is_return_annotation = True
258             elif is_return_annotation:
259                 if child.type == syms.atom and child.children[0].type == token.LPAR:
260                     if maybe_make_parens_invisible_in_atom(
261                         child,
262                         parent=node,
263                         remove_brackets_around_comma=False,
264                     ):
265                         wrap_in_parentheses(node, child, visible=False)
266                 else:
267                     wrap_in_parentheses(node, child, visible=False)
268                 is_return_annotation = False
269
270         for child in node.children:
271             yield from self.visit(child)
272
273     def visit_match_case(self, node: Node) -> Iterator[Line]:
274         """Visit either a match or case statement."""
275         normalize_invisible_parens(
276             node, parens_after=set(), mode=self.mode, features=self.features
277         )
278
279         yield from self.line()
280         for child in node.children:
281             yield from self.visit(child)
282
283     def visit_suite(self, node: Node) -> Iterator[Line]:
284         """Visit a suite."""
285         if (
286             self.mode.is_pyi or Preview.dummy_implementations in self.mode
287         ) and is_stub_suite(node):
288             yield from self.visit(node.children[2])
289         else:
290             yield from self.visit_default(node)
291
292     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
293         """Visit a statement without nested statements."""
294         prev_type: Optional[int] = None
295         for child in node.children:
296             if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child):
297                 wrap_in_parentheses(node, child, visible=False)
298             prev_type = child.type
299
300         is_suite_like = node.parent and node.parent.type in STATEMENT
301         if is_suite_like:
302             if (
303                 self.mode.is_pyi or Preview.dummy_implementations in self.mode
304             ) and is_stub_body(node):
305                 yield from self.visit_default(node)
306             else:
307                 yield from self.line(+1)
308                 yield from self.visit_default(node)
309                 yield from self.line(-1)
310
311         else:
312             if (
313                 not (self.mode.is_pyi or Preview.dummy_implementations in self.mode)
314                 or not node.parent
315                 or not is_stub_suite(node.parent)
316             ):
317                 yield from self.line()
318             yield from self.visit_default(node)
319
320     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
321         """Visit `async def`, `async for`, `async with`."""
322         yield from self.line()
323
324         children = iter(node.children)
325         for child in children:
326             yield from self.visit(child)
327
328             if child.type == token.ASYNC or child.type == STANDALONE_COMMENT:
329                 # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async
330                 # line.
331                 break
332
333         internal_stmt = next(children)
334         if Preview.improved_async_statements_handling in self.mode:
335             yield from self.visit(internal_stmt)
336         else:
337             for child in internal_stmt.children:
338                 yield from self.visit(child)
339
340     def visit_decorators(self, node: Node) -> Iterator[Line]:
341         """Visit decorators."""
342         for child in node.children:
343             yield from self.line()
344             yield from self.visit(child)
345
346     def visit_power(self, node: Node) -> Iterator[Line]:
347         for idx, leaf in enumerate(node.children[:-1]):
348             next_leaf = node.children[idx + 1]
349
350             if not isinstance(leaf, Leaf):
351                 continue
352
353             value = leaf.value.lower()
354             if (
355                 leaf.type == token.NUMBER
356                 and next_leaf.type == syms.trailer
357                 # Ensure that we are in an attribute trailer
358                 and next_leaf.children[0].type == token.DOT
359                 # It shouldn't wrap hexadecimal, binary and octal literals
360                 and not value.startswith(("0x", "0b", "0o"))
361                 # It shouldn't wrap complex literals
362                 and "j" not in value
363             ):
364                 wrap_in_parentheses(node, leaf)
365
366         remove_await_parens(node)
367
368         yield from self.visit_default(node)
369
370     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
371         """Remove a semicolon and put the other statement on a separate line."""
372         yield from self.line()
373
374     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
375         """End of file. Process outstanding comments and end with a newline."""
376         yield from self.visit_default(leaf)
377         yield from self.line()
378
379     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
380         if not self.current_line.bracket_tracker.any_open_brackets():
381             yield from self.line()
382         yield from self.visit_default(leaf)
383
384     def visit_factor(self, node: Node) -> Iterator[Line]:
385         """Force parentheses between a unary op and a binary power:
386
387         -2 ** 8 -> -(2 ** 8)
388         """
389         _operator, operand = node.children
390         if (
391             operand.type == syms.power
392             and len(operand.children) == 3
393             and operand.children[1].type == token.DOUBLESTAR
394         ):
395             lpar = Leaf(token.LPAR, "(")
396             rpar = Leaf(token.RPAR, ")")
397             index = operand.remove() or 0
398             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
399         yield from self.visit_default(node)
400
401     def visit_tname(self, node: Node) -> Iterator[Line]:
402         """
403         Add potential parentheses around types in function parameter lists to be made
404         into real parentheses in case the type hint is too long to fit on a line
405         Examples:
406         def foo(a: int, b: float = 7): ...
407
408         ->
409
410         def foo(a: (int), b: (float) = 7): ...
411         """
412         if Preview.parenthesize_long_type_hints in self.mode:
413             assert len(node.children) == 3
414             if maybe_make_parens_invisible_in_atom(node.children[2], parent=node):
415                 wrap_in_parentheses(node, node.children[2], visible=False)
416
417         yield from self.visit_default(node)
418
419     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
420         if Preview.hex_codes_in_unicode_sequences in self.mode:
421             normalize_unicode_escape_sequences(leaf)
422
423         if is_docstring(leaf) and "\\\n" not in leaf.value:
424             # We're ignoring docstrings with backslash newline escapes because changing
425             # indentation of those changes the AST representation of the code.
426             if self.mode.string_normalization:
427                 docstring = normalize_string_prefix(leaf.value)
428                 # visit_default() does handle string normalization for us, but
429                 # since this method acts differently depending on quote style (ex.
430                 # see padding logic below), there's a possibility for unstable
431                 # formatting as visit_default() is called *after*. To avoid a
432                 # situation where this function formats a docstring differently on
433                 # the second pass, normalize it early.
434                 docstring = normalize_string_quotes(docstring)
435             else:
436                 docstring = leaf.value
437             prefix = get_string_prefix(docstring)
438             docstring = docstring[len(prefix) :]  # Remove the prefix
439             quote_char = docstring[0]
440             # A natural way to remove the outer quotes is to do:
441             #   docstring = docstring.strip(quote_char)
442             # but that breaks on """""x""" (which is '""x').
443             # So we actually need to remove the first character and the next two
444             # characters but only if they are the same as the first.
445             quote_len = 1 if docstring[1] != quote_char else 3
446             docstring = docstring[quote_len:-quote_len]
447             docstring_started_empty = not docstring
448             indent = " " * 4 * self.current_line.depth
449
450             if is_multiline_string(leaf):
451                 docstring = fix_docstring(docstring, indent)
452             else:
453                 docstring = docstring.strip()
454
455             has_trailing_backslash = False
456             if docstring:
457                 # Add some padding if the docstring starts / ends with a quote mark.
458                 if docstring[0] == quote_char:
459                     docstring = " " + docstring
460                 if docstring[-1] == quote_char:
461                     docstring += " "
462                 if docstring[-1] == "\\":
463                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
464                     if backslash_count % 2:
465                         # Odd number of tailing backslashes, add some padding to
466                         # avoid escaping the closing string quote.
467                         docstring += " "
468                         has_trailing_backslash = True
469             elif not docstring_started_empty:
470                 docstring = " "
471
472             # We could enforce triple quotes at this point.
473             quote = quote_char * quote_len
474
475             # It's invalid to put closing single-character quotes on a new line.
476             if self.mode and quote_len == 3:
477                 # We need to find the length of the last line of the docstring
478                 # to find if we can add the closing quotes to the line without
479                 # exceeding the maximum line length.
480                 # If docstring is one line, we don't put the closing quotes on a
481                 # separate line because it looks ugly (#3320).
482                 lines = docstring.splitlines()
483                 last_line_length = len(lines[-1]) if docstring else 0
484
485                 # If adding closing quotes would cause the last line to exceed
486                 # the maximum line length then put a line break before the
487                 # closing quotes
488                 if (
489                     len(lines) > 1
490                     and last_line_length + quote_len > self.mode.line_length
491                     and len(indent) + quote_len <= self.mode.line_length
492                     and not has_trailing_backslash
493                 ):
494                     leaf.value = prefix + quote + docstring + "\n" + indent + quote
495                 else:
496                     leaf.value = prefix + quote + docstring + quote
497             else:
498                 leaf.value = prefix + quote + docstring + quote
499
500         yield from self.visit_default(leaf)
501
502     def __post_init__(self) -> None:
503         """You are in a twisty little maze of passages."""
504         self.current_line = Line(mode=self.mode)
505
506         v = self.visit_stmt
507         Ø: Set[str] = set()
508         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
509         self.visit_if_stmt = partial(
510             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
511         )
512         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
513         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
514         self.visit_try_stmt = partial(
515             v, keywords={"try", "except", "else", "finally"}, parens=Ø
516         )
517         self.visit_except_clause = partial(v, keywords={"except"}, parens={"except"})
518         self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"})
519         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
520
521         # When this is moved out of preview, add ":" directly to ASSIGNMENTS in nodes.py
522         if Preview.parenthesize_long_type_hints in self.mode:
523             assignments = ASSIGNMENTS | {":"}
524         else:
525             assignments = ASSIGNMENTS
526         self.visit_expr_stmt = partial(v, keywords=Ø, parens=assignments)
527
528         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
529         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
530         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
531         self.visit_async_funcdef = self.visit_async_stmt
532         self.visit_decorated = self.visit_decorators
533
534         # PEP 634
535         self.visit_match_stmt = self.visit_match_case
536         self.visit_case_block = self.visit_match_case
537
538
539 def _hugging_power_ops_line_to_string(
540     line: Line,
541     features: Collection[Feature],
542     mode: Mode,
543 ) -> Optional[str]:
544     try:
545         return line_to_string(next(hug_power_op(line, features, mode)))
546     except CannotTransform:
547         return None
548
549
550 def transform_line(
551     line: Line, mode: Mode, features: Collection[Feature] = ()
552 ) -> Iterator[Line]:
553     """Transform a `line`, potentially splitting it into many lines.
554
555     They should fit in the allotted `line_length` but might not be able to.
556
557     `features` are syntactical features that may be used in the output.
558     """
559     if line.is_comment:
560         yield line
561         return
562
563     line_str = line_to_string(line)
564
565     # We need the line string when power operators are hugging to determine if we should
566     # split the line. Default to line_str, if no power operator are present on the line.
567     line_str_hugging_power_ops = (
568         (_hugging_power_ops_line_to_string(line, features, mode) or line_str)
569         if Preview.fix_power_op_line_length in mode
570         else line_str
571     )
572
573     ll = mode.line_length
574     sn = mode.string_normalization
575     string_merge = StringMerger(ll, sn)
576     string_paren_strip = StringParenStripper(ll, sn)
577     string_split = StringSplitter(ll, sn)
578     string_paren_wrap = StringParenWrapper(ll, sn)
579
580     transformers: List[Transformer]
581     if (
582         not line.contains_uncollapsable_type_comments()
583         and not line.should_split_rhs
584         and not line.magic_trailing_comma
585         and (
586             is_line_short_enough(line, mode=mode, line_str=line_str_hugging_power_ops)
587             or line.contains_unsplittable_type_ignore()
588         )
589         and not (line.inside_brackets and line.contains_standalone_comments())
590     ):
591         # Only apply basic string preprocessing, since lines shouldn't be split here.
592         if Preview.string_processing in mode:
593             transformers = [string_merge, string_paren_strip]
594         else:
595             transformers = []
596     elif line.is_def and not should_split_funcdef_with_rhs(line, mode):
597         transformers = [left_hand_split]
598     else:
599
600         def _rhs(
601             self: object, line: Line, features: Collection[Feature], mode: Mode
602         ) -> Iterator[Line]:
603             """Wraps calls to `right_hand_split`.
604
605             The calls increasingly `omit` right-hand trailers (bracket pairs with
606             content), meaning the trailers get glued together to split on another
607             bracket pair instead.
608             """
609             for omit in generate_trailers_to_omit(line, mode.line_length):
610                 lines = list(right_hand_split(line, mode, features, omit=omit))
611                 # Note: this check is only able to figure out if the first line of the
612                 # *current* transformation fits in the line length.  This is true only
613                 # for simple cases.  All others require running more transforms via
614                 # `transform_line()`.  This check doesn't know if those would succeed.
615                 if is_line_short_enough(lines[0], mode=mode):
616                     yield from lines
617                     return
618
619             # All splits failed, best effort split with no omits.
620             # This mostly happens to multiline strings that are by definition
621             # reported as not fitting a single line, as well as lines that contain
622             # trailing commas (those have to be exploded).
623             yield from right_hand_split(line, mode, features=features)
624
625         # HACK: nested functions (like _rhs) compiled by mypyc don't retain their
626         # __name__ attribute which is needed in `run_transformer` further down.
627         # Unfortunately a nested class breaks mypyc too. So a class must be created
628         # via type ... https://github.com/mypyc/mypyc/issues/884
629         rhs = type("rhs", (), {"__call__": _rhs})()
630
631         if Preview.string_processing in mode:
632             if line.inside_brackets:
633                 transformers = [
634                     string_merge,
635                     string_paren_strip,
636                     string_split,
637                     delimiter_split,
638                     standalone_comment_split,
639                     string_paren_wrap,
640                     rhs,
641                 ]
642             else:
643                 transformers = [
644                     string_merge,
645                     string_paren_strip,
646                     string_split,
647                     string_paren_wrap,
648                     rhs,
649                 ]
650         else:
651             if line.inside_brackets:
652                 transformers = [delimiter_split, standalone_comment_split, rhs]
653             else:
654                 transformers = [rhs]
655     # It's always safe to attempt hugging of power operations and pretty much every line
656     # could match.
657     transformers.append(hug_power_op)
658
659     for transform in transformers:
660         # We are accumulating lines in `result` because we might want to abort
661         # mission and return the original line in the end, or attempt a different
662         # split altogether.
663         try:
664             result = run_transformer(line, transform, mode, features, line_str=line_str)
665         except CannotTransform:
666             continue
667         else:
668             yield from result
669             break
670
671     else:
672         yield line
673
674
675 def should_split_funcdef_with_rhs(line: Line, mode: Mode) -> bool:
676     """If a funcdef has a magic trailing comma in the return type, then we should first
677     split the line with rhs to respect the comma.
678     """
679     if Preview.respect_magic_trailing_comma_in_return_type not in mode:
680         return False
681
682     return_type_leaves: List[Leaf] = []
683     in_return_type = False
684
685     for leaf in line.leaves:
686         if leaf.type == token.COLON:
687             in_return_type = False
688         if in_return_type:
689             return_type_leaves.append(leaf)
690         if leaf.type == token.RARROW:
691             in_return_type = True
692
693     # using `bracket_split_build_line` will mess with whitespace, so we duplicate a
694     # couple lines from it.
695     result = Line(mode=line.mode, depth=line.depth)
696     leaves_to_track = get_leaves_inside_matching_brackets(return_type_leaves)
697     for leaf in return_type_leaves:
698         result.append(
699             leaf,
700             preformatted=True,
701             track_bracket=id(leaf) in leaves_to_track,
702         )
703
704     # we could also return true if the line is too long, and the return type is longer
705     # than the param list. Or if `should_split_rhs` returns True.
706     return result.magic_trailing_comma is not None
707
708
709 class _BracketSplitComponent(Enum):
710     head = auto()
711     body = auto()
712     tail = auto()
713
714
715 def left_hand_split(
716     line: Line, _features: Collection[Feature], mode: Mode
717 ) -> Iterator[Line]:
718     """Split line into many lines, starting with the first matching bracket pair.
719
720     Note: this usually looks weird, only use this for function definitions.
721     Prefer RHS otherwise.  This is why this function is not symmetrical with
722     :func:`right_hand_split` which also handles optional parentheses.
723     """
724     tail_leaves: List[Leaf] = []
725     body_leaves: List[Leaf] = []
726     head_leaves: List[Leaf] = []
727     current_leaves = head_leaves
728     matching_bracket: Optional[Leaf] = None
729     for leaf in line.leaves:
730         if (
731             current_leaves is body_leaves
732             and leaf.type in CLOSING_BRACKETS
733             and leaf.opening_bracket is matching_bracket
734             and isinstance(matching_bracket, Leaf)
735         ):
736             ensure_visible(leaf)
737             ensure_visible(matching_bracket)
738             current_leaves = tail_leaves if body_leaves else head_leaves
739         current_leaves.append(leaf)
740         if current_leaves is head_leaves:
741             if leaf.type in OPENING_BRACKETS:
742                 matching_bracket = leaf
743                 current_leaves = body_leaves
744     if not matching_bracket:
745         raise CannotSplit("No brackets found")
746
747     head = bracket_split_build_line(
748         head_leaves, line, matching_bracket, component=_BracketSplitComponent.head
749     )
750     body = bracket_split_build_line(
751         body_leaves, line, matching_bracket, component=_BracketSplitComponent.body
752     )
753     tail = bracket_split_build_line(
754         tail_leaves, line, matching_bracket, component=_BracketSplitComponent.tail
755     )
756     bracket_split_succeeded_or_raise(head, body, tail)
757     for result in (head, body, tail):
758         if result:
759             yield result
760
761
762 def right_hand_split(
763     line: Line,
764     mode: Mode,
765     features: Collection[Feature] = (),
766     omit: Collection[LeafID] = (),
767 ) -> Iterator[Line]:
768     """Split line into many lines, starting with the last matching bracket pair.
769
770     If the split was by optional parentheses, attempt splitting without them, too.
771     `omit` is a collection of closing bracket IDs that shouldn't be considered for
772     this split.
773
774     Note: running this function modifies `bracket_depth` on the leaves of `line`.
775     """
776     rhs_result = _first_right_hand_split(line, omit=omit)
777     yield from _maybe_split_omitting_optional_parens(
778         rhs_result, line, mode, features=features, omit=omit
779     )
780
781
782 def _first_right_hand_split(
783     line: Line,
784     omit: Collection[LeafID] = (),
785 ) -> RHSResult:
786     """Split the line into head, body, tail starting with the last bracket pair.
787
788     Note: this function should not have side effects. It's relied upon by
789     _maybe_split_omitting_optional_parens to get an opinion whether to prefer
790     splitting on the right side of an assignment statement.
791     """
792     tail_leaves: List[Leaf] = []
793     body_leaves: List[Leaf] = []
794     head_leaves: List[Leaf] = []
795     current_leaves = tail_leaves
796     opening_bracket: Optional[Leaf] = None
797     closing_bracket: Optional[Leaf] = None
798     for leaf in reversed(line.leaves):
799         if current_leaves is body_leaves:
800             if leaf is opening_bracket:
801                 current_leaves = head_leaves if body_leaves else tail_leaves
802         current_leaves.append(leaf)
803         if current_leaves is tail_leaves:
804             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
805                 opening_bracket = leaf.opening_bracket
806                 closing_bracket = leaf
807                 current_leaves = body_leaves
808     if not (opening_bracket and closing_bracket and head_leaves):
809         # If there is no opening or closing_bracket that means the split failed and
810         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
811         # the matching `opening_bracket` wasn't available on `line` anymore.
812         raise CannotSplit("No brackets found")
813
814     tail_leaves.reverse()
815     body_leaves.reverse()
816     head_leaves.reverse()
817     head = bracket_split_build_line(
818         head_leaves, line, opening_bracket, component=_BracketSplitComponent.head
819     )
820     body = bracket_split_build_line(
821         body_leaves, line, opening_bracket, component=_BracketSplitComponent.body
822     )
823     tail = bracket_split_build_line(
824         tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail
825     )
826     bracket_split_succeeded_or_raise(head, body, tail)
827     return RHSResult(head, body, tail, opening_bracket, closing_bracket)
828
829
830 def _maybe_split_omitting_optional_parens(
831     rhs: RHSResult,
832     line: Line,
833     mode: Mode,
834     features: Collection[Feature] = (),
835     omit: Collection[LeafID] = (),
836 ) -> Iterator[Line]:
837     if (
838         Feature.FORCE_OPTIONAL_PARENTHESES not in features
839         # the opening bracket is an optional paren
840         and rhs.opening_bracket.type == token.LPAR
841         and not rhs.opening_bracket.value
842         # the closing bracket is an optional paren
843         and rhs.closing_bracket.type == token.RPAR
844         and not rhs.closing_bracket.value
845         # it's not an import (optional parens are the only thing we can split on
846         # in this case; attempting a split without them is a waste of time)
847         and not line.is_import
848         # there are no standalone comments in the body
849         and not rhs.body.contains_standalone_comments(0)
850         # and we can actually remove the parens
851         and can_omit_invisible_parens(rhs, mode.line_length)
852     ):
853         omit = {id(rhs.closing_bracket), *omit}
854         try:
855             # The RHSResult Omitting Optional Parens.
856             rhs_oop = _first_right_hand_split(line, omit=omit)
857             if not (
858                 Preview.prefer_splitting_right_hand_side_of_assignments in line.mode
859                 # the split is right after `=`
860                 and len(rhs.head.leaves) >= 2
861                 and rhs.head.leaves[-2].type == token.EQUAL
862                 # the left side of assignment contains brackets
863                 and any(leaf.type in BRACKETS for leaf in rhs.head.leaves[:-1])
864                 # the left side of assignment is short enough (the -1 is for the ending
865                 # optional paren)
866                 and is_line_short_enough(
867                     rhs.head, mode=replace(mode, line_length=mode.line_length - 1)
868                 )
869                 # the left side of assignment won't explode further because of magic
870                 # trailing comma
871                 and rhs.head.magic_trailing_comma is None
872                 # the split by omitting optional parens isn't preferred by some other
873                 # reason
874                 and not _prefer_split_rhs_oop(rhs_oop, mode)
875             ):
876                 yield from _maybe_split_omitting_optional_parens(
877                     rhs_oop, line, mode, features=features, omit=omit
878                 )
879                 return
880
881         except CannotSplit as e:
882             if not (
883                 can_be_split(rhs.body) or is_line_short_enough(rhs.body, mode=mode)
884             ):
885                 raise CannotSplit(
886                     "Splitting failed, body is still too long and can't be split."
887                 ) from e
888
889             elif (
890                 rhs.head.contains_multiline_strings()
891                 or rhs.tail.contains_multiline_strings()
892             ):
893                 raise CannotSplit(
894                     "The current optional pair of parentheses is bound to fail to"
895                     " satisfy the splitting algorithm because the head or the tail"
896                     " contains multiline strings which by definition never fit one"
897                     " line."
898                 ) from e
899
900     ensure_visible(rhs.opening_bracket)
901     ensure_visible(rhs.closing_bracket)
902     for result in (rhs.head, rhs.body, rhs.tail):
903         if result:
904             yield result
905
906
907 def _prefer_split_rhs_oop(rhs_oop: RHSResult, mode: Mode) -> bool:
908     """
909     Returns whether we should prefer the result from a split omitting optional parens.
910     """
911     has_closing_bracket_after_assign = False
912     for leaf in reversed(rhs_oop.head.leaves):
913         if leaf.type == token.EQUAL:
914             break
915         if leaf.type in CLOSING_BRACKETS:
916             has_closing_bracket_after_assign = True
917             break
918     return (
919         # contains matching brackets after the `=` (done by checking there is a
920         # closing bracket)
921         has_closing_bracket_after_assign
922         or (
923             # the split is actually from inside the optional parens (done by checking
924             # the first line still contains the `=`)
925             any(leaf.type == token.EQUAL for leaf in rhs_oop.head.leaves)
926             # the first line is short enough
927             and is_line_short_enough(rhs_oop.head, mode=mode)
928         )
929         # contains unsplittable type ignore
930         or rhs_oop.head.contains_unsplittable_type_ignore()
931         or rhs_oop.body.contains_unsplittable_type_ignore()
932         or rhs_oop.tail.contains_unsplittable_type_ignore()
933     )
934
935
936 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
937     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
938
939     Do nothing otherwise.
940
941     A left- or right-hand split is based on a pair of brackets. Content before
942     (and including) the opening bracket is left on one line, content inside the
943     brackets is put on a separate line, and finally content starting with and
944     following the closing bracket is put on a separate line.
945
946     Those are called `head`, `body`, and `tail`, respectively. If the split
947     produced the same line (all content in `head`) or ended up with an empty `body`
948     and the `tail` is just the closing bracket, then it's considered failed.
949     """
950     tail_len = len(str(tail).strip())
951     if not body:
952         if tail_len == 0:
953             raise CannotSplit("Splitting brackets produced the same line")
954
955         elif tail_len < 3:
956             raise CannotSplit(
957                 f"Splitting brackets on an empty body to save {tail_len} characters is"
958                 " not worth it"
959             )
960
961
962 def bracket_split_build_line(
963     leaves: List[Leaf],
964     original: Line,
965     opening_bracket: Leaf,
966     *,
967     component: _BracketSplitComponent,
968 ) -> Line:
969     """Return a new line with given `leaves` and respective comments from `original`.
970
971     If it's the head component, brackets will be tracked so trailing commas are
972     respected.
973
974     If it's the body component, the result line is one-indented inside brackets and as
975     such has its first leaf's prefix normalized and a trailing comma added when
976     expected.
977     """
978     result = Line(mode=original.mode, depth=original.depth)
979     if component is _BracketSplitComponent.body:
980         result.inside_brackets = True
981         result.depth += 1
982         if leaves:
983             # Since body is a new indent level, remove spurious leading whitespace.
984             normalize_prefix(leaves[0], inside_brackets=True)
985             # Ensure a trailing comma for imports and standalone function arguments, but
986             # be careful not to add one after any comments or within type annotations.
987             no_commas = (
988                 original.is_def
989                 and opening_bracket.value == "("
990                 and not any(leaf.type == token.COMMA for leaf in leaves)
991                 # In particular, don't add one within a parenthesized return annotation.
992                 # Unfortunately the indicator we're in a return annotation (RARROW) may
993                 # be defined directly in the parent node, the parent of the parent ...
994                 # and so on depending on how complex the return annotation is.
995                 # This isn't perfect and there's some false negatives but they are in
996                 # contexts were a comma is actually fine.
997                 and not any(
998                     node.prev_sibling.type == RARROW
999                     for node in (
1000                         leaves[0].parent,
1001                         getattr(leaves[0].parent, "parent", None),
1002                     )
1003                     if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
1004                 )
1005                 # Except the false negatives above for PEP 604 unions where we
1006                 # can't add the comma.
1007                 and not (
1008                     leaves[0].parent
1009                     and leaves[0].parent.next_sibling
1010                     and leaves[0].parent.next_sibling.type == token.VBAR
1011                 )
1012             )
1013
1014             if original.is_import or no_commas:
1015                 for i in range(len(leaves) - 1, -1, -1):
1016                     if leaves[i].type == STANDALONE_COMMENT:
1017                         continue
1018
1019                     if leaves[i].type != token.COMMA:
1020                         new_comma = Leaf(token.COMMA, ",")
1021                         leaves.insert(i + 1, new_comma)
1022                     break
1023
1024     leaves_to_track: Set[LeafID] = set()
1025     if component is _BracketSplitComponent.head:
1026         leaves_to_track = get_leaves_inside_matching_brackets(leaves)
1027     # Populate the line
1028     for leaf in leaves:
1029         result.append(
1030             leaf,
1031             preformatted=True,
1032             track_bracket=id(leaf) in leaves_to_track,
1033         )
1034         for comment_after in original.comments_after(leaf):
1035             result.append(comment_after, preformatted=True)
1036     if component is _BracketSplitComponent.body and should_split_line(
1037         result, opening_bracket
1038     ):
1039         result.should_split_rhs = True
1040     return result
1041
1042
1043 def dont_increase_indentation(split_func: Transformer) -> Transformer:
1044     """Normalize prefix of the first leaf in every line returned by `split_func`.
1045
1046     This is a decorator over relevant split functions.
1047     """
1048
1049     @wraps(split_func)
1050     def split_wrapper(
1051         line: Line, features: Collection[Feature], mode: Mode
1052     ) -> Iterator[Line]:
1053         for split_line in split_func(line, features, mode):
1054             normalize_prefix(split_line.leaves[0], inside_brackets=True)
1055             yield split_line
1056
1057     return split_wrapper
1058
1059
1060 def _get_last_non_comment_leaf(line: Line) -> Optional[int]:
1061     for leaf_idx in range(len(line.leaves) - 1, 0, -1):
1062         if line.leaves[leaf_idx].type != STANDALONE_COMMENT:
1063             return leaf_idx
1064     return None
1065
1066
1067 def _safe_add_trailing_comma(safe: bool, delimiter_priority: int, line: Line) -> Line:
1068     if (
1069         safe
1070         and delimiter_priority == COMMA_PRIORITY
1071         and line.leaves[-1].type != token.COMMA
1072         and line.leaves[-1].type != STANDALONE_COMMENT
1073     ):
1074         new_comma = Leaf(token.COMMA, ",")
1075         line.append(new_comma)
1076     return line
1077
1078
1079 @dont_increase_indentation
1080 def delimiter_split(
1081     line: Line, features: Collection[Feature], mode: Mode
1082 ) -> Iterator[Line]:
1083     """Split according to delimiters of the highest priority.
1084
1085     If the appropriate Features are given, the split will add trailing commas
1086     also in function signatures and calls that contain `*` and `**`.
1087     """
1088     try:
1089         last_leaf = line.leaves[-1]
1090     except IndexError:
1091         raise CannotSplit("Line empty") from None
1092
1093     bt = line.bracket_tracker
1094     try:
1095         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
1096     except ValueError:
1097         raise CannotSplit("No delimiters found") from None
1098
1099     if delimiter_priority == DOT_PRIORITY:
1100         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
1101             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
1102
1103     current_line = Line(
1104         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1105     )
1106     lowest_depth = sys.maxsize
1107     trailing_comma_safe = True
1108
1109     def append_to_line(leaf: Leaf) -> Iterator[Line]:
1110         """Append `leaf` to current line or to new line if appending impossible."""
1111         nonlocal current_line
1112         try:
1113             current_line.append_safe(leaf, preformatted=True)
1114         except ValueError:
1115             yield current_line
1116
1117             current_line = Line(
1118                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1119             )
1120             current_line.append(leaf)
1121
1122     last_non_comment_leaf = _get_last_non_comment_leaf(line)
1123     for leaf_idx, leaf in enumerate(line.leaves):
1124         yield from append_to_line(leaf)
1125
1126         for comment_after in line.comments_after(leaf):
1127             yield from append_to_line(comment_after)
1128
1129         lowest_depth = min(lowest_depth, leaf.bracket_depth)
1130         if leaf.bracket_depth == lowest_depth:
1131             if is_vararg(leaf, within={syms.typedargslist}):
1132                 trailing_comma_safe = (
1133                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
1134                 )
1135             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
1136                 trailing_comma_safe = (
1137                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
1138                 )
1139
1140         if (
1141             Preview.add_trailing_comma_consistently in mode
1142             and last_leaf.type == STANDALONE_COMMENT
1143             and leaf_idx == last_non_comment_leaf
1144         ):
1145             current_line = _safe_add_trailing_comma(
1146                 trailing_comma_safe, delimiter_priority, current_line
1147             )
1148
1149         leaf_priority = bt.delimiters.get(id(leaf))
1150         if leaf_priority == delimiter_priority:
1151             yield current_line
1152
1153             current_line = Line(
1154                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1155             )
1156     if current_line:
1157         current_line = _safe_add_trailing_comma(
1158             trailing_comma_safe, delimiter_priority, current_line
1159         )
1160         yield current_line
1161
1162
1163 @dont_increase_indentation
1164 def standalone_comment_split(
1165     line: Line, features: Collection[Feature], mode: Mode
1166 ) -> Iterator[Line]:
1167     """Split standalone comments from the rest of the line."""
1168     if not line.contains_standalone_comments(0):
1169         raise CannotSplit("Line does not have any standalone comments")
1170
1171     current_line = Line(
1172         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1173     )
1174
1175     def append_to_line(leaf: Leaf) -> Iterator[Line]:
1176         """Append `leaf` to current line or to new line if appending impossible."""
1177         nonlocal current_line
1178         try:
1179             current_line.append_safe(leaf, preformatted=True)
1180         except ValueError:
1181             yield current_line
1182
1183             current_line = Line(
1184                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1185             )
1186             current_line.append(leaf)
1187
1188     for leaf in line.leaves:
1189         yield from append_to_line(leaf)
1190
1191         for comment_after in line.comments_after(leaf):
1192             yield from append_to_line(comment_after)
1193
1194     if current_line:
1195         yield current_line
1196
1197
1198 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
1199     """Leave existing extra newlines if not `inside_brackets`. Remove everything
1200     else.
1201
1202     Note: don't use backslashes for formatting or you'll lose your voting rights.
1203     """
1204     if not inside_brackets:
1205         spl = leaf.prefix.split("#")
1206         if "\\" not in spl[0]:
1207             nl_count = spl[-1].count("\n")
1208             if len(spl) > 1:
1209                 nl_count -= 1
1210             leaf.prefix = "\n" * nl_count
1211             return
1212
1213     leaf.prefix = ""
1214
1215
1216 def normalize_invisible_parens(
1217     node: Node, parens_after: Set[str], *, mode: Mode, features: Collection[Feature]
1218 ) -> None:
1219     """Make existing optional parentheses invisible or create new ones.
1220
1221     `parens_after` is a set of string leaf values immediately after which parens
1222     should be put.
1223
1224     Standardizes on visible parentheses for single-element tuples, and keeps
1225     existing visible parentheses for other tuples and generator expressions.
1226     """
1227     for pc in list_comments(node.prefix, is_endmarker=False):
1228         if pc.value in FMT_OFF:
1229             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
1230             return
1231
1232     # The multiple context managers grammar has a different pattern, thus this is
1233     # separate from the for-loop below. This possibly wraps them in invisible parens,
1234     # and later will be removed in remove_with_parens when needed.
1235     if node.type == syms.with_stmt:
1236         _maybe_wrap_cms_in_parens(node, mode, features)
1237
1238     check_lpar = False
1239     for index, child in enumerate(list(node.children)):
1240         # Fixes a bug where invisible parens are not properly stripped from
1241         # assignment statements that contain type annotations.
1242         if isinstance(child, Node) and child.type == syms.annassign:
1243             normalize_invisible_parens(
1244                 child, parens_after=parens_after, mode=mode, features=features
1245             )
1246
1247         # Add parentheses around long tuple unpacking in assignments.
1248         if (
1249             index == 0
1250             and isinstance(child, Node)
1251             and child.type == syms.testlist_star_expr
1252         ):
1253             check_lpar = True
1254
1255         if check_lpar:
1256             if (
1257                 child.type == syms.atom
1258                 and node.type == syms.for_stmt
1259                 and isinstance(child.prev_sibling, Leaf)
1260                 and child.prev_sibling.type == token.NAME
1261                 and child.prev_sibling.value == "for"
1262             ):
1263                 if maybe_make_parens_invisible_in_atom(
1264                     child,
1265                     parent=node,
1266                     remove_brackets_around_comma=True,
1267                 ):
1268                     wrap_in_parentheses(node, child, visible=False)
1269             elif isinstance(child, Node) and node.type == syms.with_stmt:
1270                 remove_with_parens(child, node)
1271             elif child.type == syms.atom:
1272                 if maybe_make_parens_invisible_in_atom(
1273                     child,
1274                     parent=node,
1275                 ):
1276                     wrap_in_parentheses(node, child, visible=False)
1277             elif is_one_tuple(child):
1278                 wrap_in_parentheses(node, child, visible=True)
1279             elif node.type == syms.import_from:
1280                 _normalize_import_from(node, child, index)
1281                 break
1282             elif (
1283                 index == 1
1284                 and child.type == token.STAR
1285                 and node.type == syms.except_clause
1286             ):
1287                 # In except* (PEP 654), the star is actually part of
1288                 # of the keyword. So we need to skip the insertion of
1289                 # invisible parentheses to work more precisely.
1290                 continue
1291
1292             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
1293                 wrap_in_parentheses(node, child, visible=False)
1294
1295         comma_check = child.type == token.COMMA
1296
1297         check_lpar = isinstance(child, Leaf) and (
1298             child.value in parens_after or comma_check
1299         )
1300
1301
1302 def _normalize_import_from(parent: Node, child: LN, index: int) -> None:
1303     # "import from" nodes store parentheses directly as part of
1304     # the statement
1305     if is_lpar_token(child):
1306         assert is_rpar_token(parent.children[-1])
1307         # make parentheses invisible
1308         child.value = ""
1309         parent.children[-1].value = ""
1310     elif child.type != token.STAR:
1311         # insert invisible parentheses
1312         parent.insert_child(index, Leaf(token.LPAR, ""))
1313         parent.append_child(Leaf(token.RPAR, ""))
1314
1315
1316 def remove_await_parens(node: Node) -> None:
1317     if node.children[0].type == token.AWAIT and len(node.children) > 1:
1318         if (
1319             node.children[1].type == syms.atom
1320             and node.children[1].children[0].type == token.LPAR
1321         ):
1322             if maybe_make_parens_invisible_in_atom(
1323                 node.children[1],
1324                 parent=node,
1325                 remove_brackets_around_comma=True,
1326             ):
1327                 wrap_in_parentheses(node, node.children[1], visible=False)
1328
1329             # Since await is an expression we shouldn't remove
1330             # brackets in cases where this would change
1331             # the AST due to operator precedence.
1332             # Therefore we only aim to remove brackets around
1333             # power nodes that aren't also await expressions themselves.
1334             # https://peps.python.org/pep-0492/#updated-operator-precedence-table
1335             # N.B. We've still removed any redundant nested brackets though :)
1336             opening_bracket = cast(Leaf, node.children[1].children[0])
1337             closing_bracket = cast(Leaf, node.children[1].children[-1])
1338             bracket_contents = node.children[1].children[1]
1339             if isinstance(bracket_contents, Node):
1340                 if bracket_contents.type != syms.power:
1341                     ensure_visible(opening_bracket)
1342                     ensure_visible(closing_bracket)
1343                 elif (
1344                     bracket_contents.type == syms.power
1345                     and bracket_contents.children[0].type == token.AWAIT
1346                 ):
1347                     ensure_visible(opening_bracket)
1348                     ensure_visible(closing_bracket)
1349                     # If we are in a nested await then recurse down.
1350                     remove_await_parens(bracket_contents)
1351
1352
1353 def _maybe_wrap_cms_in_parens(
1354     node: Node, mode: Mode, features: Collection[Feature]
1355 ) -> None:
1356     """When enabled and safe, wrap the multiple context managers in invisible parens.
1357
1358     It is only safe when `features` contain Feature.PARENTHESIZED_CONTEXT_MANAGERS.
1359     """
1360     if (
1361         Feature.PARENTHESIZED_CONTEXT_MANAGERS not in features
1362         or Preview.wrap_multiple_context_managers_in_parens not in mode
1363         or len(node.children) <= 2
1364         # If it's an atom, it's already wrapped in parens.
1365         or node.children[1].type == syms.atom
1366     ):
1367         return
1368     colon_index: Optional[int] = None
1369     for i in range(2, len(node.children)):
1370         if node.children[i].type == token.COLON:
1371             colon_index = i
1372             break
1373     if colon_index is not None:
1374         lpar = Leaf(token.LPAR, "")
1375         rpar = Leaf(token.RPAR, "")
1376         context_managers = node.children[1:colon_index]
1377         for child in context_managers:
1378             child.remove()
1379         # After wrapping, the with_stmt will look like this:
1380         #   with_stmt
1381         #     NAME 'with'
1382         #     atom
1383         #       LPAR ''
1384         #       testlist_gexp
1385         #         ... <-- context_managers
1386         #       /testlist_gexp
1387         #       RPAR ''
1388         #     /atom
1389         #     COLON ':'
1390         new_child = Node(
1391             syms.atom, [lpar, Node(syms.testlist_gexp, context_managers), rpar]
1392         )
1393         node.insert_child(1, new_child)
1394
1395
1396 def remove_with_parens(node: Node, parent: Node) -> None:
1397     """Recursively hide optional parens in `with` statements."""
1398     # Removing all unnecessary parentheses in with statements in one pass is a tad
1399     # complex as different variations of bracketed statements result in pretty
1400     # different parse trees:
1401     #
1402     # with (open("file")) as f:                       # this is an asexpr_test
1403     #     ...
1404     #
1405     # with (open("file") as f):                       # this is an atom containing an
1406     #     ...                                         # asexpr_test
1407     #
1408     # with (open("file")) as f, (open("file")) as f:  # this is asexpr_test, COMMA,
1409     #     ...                                         # asexpr_test
1410     #
1411     # with (open("file") as f, open("file") as f):    # an atom containing a
1412     #     ...                                         # testlist_gexp which then
1413     #                                                 # contains multiple asexpr_test(s)
1414     if node.type == syms.atom:
1415         if maybe_make_parens_invisible_in_atom(
1416             node,
1417             parent=parent,
1418             remove_brackets_around_comma=True,
1419         ):
1420             wrap_in_parentheses(parent, node, visible=False)
1421         if isinstance(node.children[1], Node):
1422             remove_with_parens(node.children[1], node)
1423     elif node.type == syms.testlist_gexp:
1424         for child in node.children:
1425             if isinstance(child, Node):
1426                 remove_with_parens(child, node)
1427     elif node.type == syms.asexpr_test and not any(
1428         leaf.type == token.COLONEQUAL for leaf in node.leaves()
1429     ):
1430         if maybe_make_parens_invisible_in_atom(
1431             node.children[0],
1432             parent=node,
1433             remove_brackets_around_comma=True,
1434         ):
1435             wrap_in_parentheses(node, node.children[0], visible=False)
1436
1437
1438 def maybe_make_parens_invisible_in_atom(
1439     node: LN,
1440     parent: LN,
1441     remove_brackets_around_comma: bool = False,
1442 ) -> bool:
1443     """If it's safe, make the parens in the atom `node` invisible, recursively.
1444     Additionally, remove repeated, adjacent invisible parens from the atom `node`
1445     as they are redundant.
1446
1447     Returns whether the node should itself be wrapped in invisible parentheses.
1448     """
1449     if (
1450         node.type not in (syms.atom, syms.expr)
1451         or is_empty_tuple(node)
1452         or is_one_tuple(node)
1453         or (is_yield(node) and parent.type != syms.expr_stmt)
1454         or (
1455             # This condition tries to prevent removing non-optional brackets
1456             # around a tuple, however, can be a bit overzealous so we provide
1457             # and option to skip this check for `for` and `with` statements.
1458             not remove_brackets_around_comma
1459             and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
1460         )
1461         or is_tuple_containing_walrus(node)
1462     ):
1463         return False
1464
1465     if is_walrus_assignment(node):
1466         if parent.type in [
1467             syms.annassign,
1468             syms.expr_stmt,
1469             syms.assert_stmt,
1470             syms.return_stmt,
1471             syms.except_clause,
1472             syms.funcdef,
1473             syms.with_stmt,
1474             syms.tname,
1475             # these ones aren't useful to end users, but they do please fuzzers
1476             syms.for_stmt,
1477             syms.del_stmt,
1478             syms.for_stmt,
1479         ]:
1480             return False
1481
1482     first = node.children[0]
1483     last = node.children[-1]
1484     if is_lpar_token(first) and is_rpar_token(last):
1485         middle = node.children[1]
1486         # make parentheses invisible
1487         if (
1488             # If the prefix of `middle` includes a type comment with
1489             # ignore annotation, then we do not remove the parentheses
1490             not is_type_ignore_comment_string(middle.prefix.strip())
1491         ):
1492             first.value = ""
1493             last.value = ""
1494         maybe_make_parens_invisible_in_atom(
1495             middle,
1496             parent=parent,
1497             remove_brackets_around_comma=remove_brackets_around_comma,
1498         )
1499
1500         if is_atom_with_invisible_parens(middle):
1501             # Strip the invisible parens from `middle` by replacing
1502             # it with the child in-between the invisible parens
1503             middle.replace(middle.children[1])
1504
1505         return False
1506
1507     return True
1508
1509
1510 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
1511     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
1512
1513     if not (opening_bracket.parent and opening_bracket.value in "[{("):
1514         return False
1515
1516     # We're essentially checking if the body is delimited by commas and there's more
1517     # than one of them (we're excluding the trailing comma and if the delimiter priority
1518     # is still commas, that means there's more).
1519     exclude = set()
1520     trailing_comma = False
1521     try:
1522         last_leaf = line.leaves[-1]
1523         if last_leaf.type == token.COMMA:
1524             trailing_comma = True
1525             exclude.add(id(last_leaf))
1526         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
1527     except (IndexError, ValueError):
1528         return False
1529
1530     return max_priority == COMMA_PRIORITY and (
1531         (line.mode.magic_trailing_comma and trailing_comma)
1532         # always explode imports
1533         or opening_bracket.parent.type in {syms.atom, syms.import_from}
1534     )
1535
1536
1537 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
1538     """Generate sets of closing bracket IDs that should be omitted in a RHS.
1539
1540     Brackets can be omitted if the entire trailer up to and including
1541     a preceding closing bracket fits in one line.
1542
1543     Yielded sets are cumulative (contain results of previous yields, too).  First
1544     set is empty, unless the line should explode, in which case bracket pairs until
1545     the one that needs to explode are omitted.
1546     """
1547
1548     omit: Set[LeafID] = set()
1549     if not line.magic_trailing_comma:
1550         yield omit
1551
1552     length = 4 * line.depth
1553     opening_bracket: Optional[Leaf] = None
1554     closing_bracket: Optional[Leaf] = None
1555     inner_brackets: Set[LeafID] = set()
1556     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
1557         length += leaf_length
1558         if length > line_length:
1559             break
1560
1561         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
1562         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
1563             break
1564
1565         if opening_bracket:
1566             if leaf is opening_bracket:
1567                 opening_bracket = None
1568             elif leaf.type in CLOSING_BRACKETS:
1569                 prev = line.leaves[index - 1] if index > 0 else None
1570                 if (
1571                     prev
1572                     and prev.type == token.COMMA
1573                     and leaf.opening_bracket is not None
1574                     and not is_one_sequence_between(
1575                         leaf.opening_bracket, leaf, line.leaves
1576                     )
1577                 ):
1578                     # Never omit bracket pairs with trailing commas.
1579                     # We need to explode on those.
1580                     break
1581
1582                 inner_brackets.add(id(leaf))
1583         elif leaf.type in CLOSING_BRACKETS:
1584             prev = line.leaves[index - 1] if index > 0 else None
1585             if prev and prev.type in OPENING_BRACKETS:
1586                 # Empty brackets would fail a split so treat them as "inner"
1587                 # brackets (e.g. only add them to the `omit` set if another
1588                 # pair of brackets was good enough.
1589                 inner_brackets.add(id(leaf))
1590                 continue
1591
1592             if closing_bracket:
1593                 omit.add(id(closing_bracket))
1594                 omit.update(inner_brackets)
1595                 inner_brackets.clear()
1596                 yield omit
1597
1598             if (
1599                 prev
1600                 and prev.type == token.COMMA
1601                 and leaf.opening_bracket is not None
1602                 and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves)
1603             ):
1604                 # Never omit bracket pairs with trailing commas.
1605                 # We need to explode on those.
1606                 break
1607
1608             if leaf.value:
1609                 opening_bracket = leaf.opening_bracket
1610                 closing_bracket = leaf
1611
1612
1613 def run_transformer(
1614     line: Line,
1615     transform: Transformer,
1616     mode: Mode,
1617     features: Collection[Feature],
1618     *,
1619     line_str: str = "",
1620 ) -> List[Line]:
1621     if not line_str:
1622         line_str = line_to_string(line)
1623     result: List[Line] = []
1624     for transformed_line in transform(line, features, mode):
1625         if str(transformed_line).strip("\n") == line_str:
1626             raise CannotTransform("Line transformer returned an unchanged result")
1627
1628         result.extend(transform_line(transformed_line, mode=mode, features=features))
1629
1630     features_set = set(features)
1631     if (
1632         Feature.FORCE_OPTIONAL_PARENTHESES in features_set
1633         or transform.__class__.__name__ != "rhs"
1634         or not line.bracket_tracker.invisible
1635         or any(bracket.value for bracket in line.bracket_tracker.invisible)
1636         or line.contains_multiline_strings()
1637         or result[0].contains_uncollapsable_type_comments()
1638         or result[0].contains_unsplittable_type_ignore()
1639         or is_line_short_enough(result[0], mode=mode)
1640         # If any leaves have no parents (which _can_ occur since
1641         # `transform(line)` potentially destroys the line's underlying node
1642         # structure), then we can't proceed. Doing so would cause the below
1643         # call to `append_leaves()` to fail.
1644         or any(leaf.parent is None for leaf in line.leaves)
1645     ):
1646         return result
1647
1648     line_copy = line.clone()
1649     append_leaves(line_copy, line, line.leaves)
1650     features_fop = features_set | {Feature.FORCE_OPTIONAL_PARENTHESES}
1651     second_opinion = run_transformer(
1652         line_copy, transform, mode, features_fop, line_str=line_str
1653     )
1654     if all(is_line_short_enough(ln, mode=mode) for ln in second_opinion):
1655         result = second_opinion
1656     return result