]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Fix typos in comments: assignement -> assignment (#3556)
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4 import sys
5 from dataclasses import dataclass
6 from enum import Enum, auto
7 from functools import partial, wraps
8 from typing import Collection, Iterator, List, Optional, Set, Union, cast
9
10 from black.brackets import (
11     COMMA_PRIORITY,
12     DOT_PRIORITY,
13     get_leaves_inside_matching_brackets,
14     max_delimiter_priority_in_atom,
15 )
16 from black.comments import FMT_OFF, generate_comments, list_comments
17 from black.lines import (
18     Line,
19     append_leaves,
20     can_be_split,
21     can_omit_invisible_parens,
22     is_line_short_enough,
23     line_to_string,
24 )
25 from black.mode import Feature, Mode, Preview
26 from black.nodes import (
27     ASSIGNMENTS,
28     BRACKETS,
29     CLOSING_BRACKETS,
30     OPENING_BRACKETS,
31     RARROW,
32     STANDALONE_COMMENT,
33     STATEMENT,
34     WHITESPACE,
35     Visitor,
36     ensure_visible,
37     is_arith_like,
38     is_atom_with_invisible_parens,
39     is_docstring,
40     is_empty_tuple,
41     is_lpar_token,
42     is_multiline_string,
43     is_name_token,
44     is_one_sequence_between,
45     is_one_tuple,
46     is_rpar_token,
47     is_stub_body,
48     is_stub_suite,
49     is_tuple_containing_walrus,
50     is_vararg,
51     is_walrus_assignment,
52     is_yield,
53     syms,
54     wrap_in_parentheses,
55 )
56 from black.numerics import normalize_numeric_literal
57 from black.strings import (
58     fix_docstring,
59     get_string_prefix,
60     normalize_string_prefix,
61     normalize_string_quotes,
62     normalize_unicode_escape_sequences,
63 )
64 from black.trans import (
65     CannotTransform,
66     StringMerger,
67     StringParenStripper,
68     StringParenWrapper,
69     StringSplitter,
70     Transformer,
71     hug_power_op,
72 )
73 from blib2to3.pgen2 import token
74 from blib2to3.pytree import Leaf, Node
75
76 # types
77 LeafID = int
78 LN = Union[Leaf, Node]
79
80
81 class CannotSplit(CannotTransform):
82     """A readable split that fits the allotted line length is impossible."""
83
84
85 # This isn't a dataclass because @dataclass + Generic breaks mypyc.
86 # See also https://github.com/mypyc/mypyc/issues/827.
87 class LineGenerator(Visitor[Line]):
88     """Generates reformatted Line objects.  Empty lines are not emitted.
89
90     Note: destroys the tree it's visiting by mutating prefixes of its leaves
91     in ways that will no longer stringify to valid Python code on the tree.
92     """
93
94     def __init__(self, mode: Mode, features: Collection[Feature]) -> None:
95         self.mode = mode
96         self.features = features
97         self.current_line: Line
98         self.__post_init__()
99
100     def line(self, indent: int = 0) -> Iterator[Line]:
101         """Generate a line.
102
103         If the line is empty, only emit if it makes sense.
104         If the line is too long, split it first and then generate.
105
106         If any lines were generated, set up a new current_line.
107         """
108         if not self.current_line:
109             self.current_line.depth += indent
110             return  # Line is empty, don't emit. Creating a new one unnecessary.
111
112         complete_line = self.current_line
113         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
114         yield complete_line
115
116     def visit_default(self, node: LN) -> Iterator[Line]:
117         """Default `visit_*()` implementation. Recurses to children of `node`."""
118         if isinstance(node, Leaf):
119             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
120             for comment in generate_comments(node):
121                 if any_open_brackets:
122                     # any comment within brackets is subject to splitting
123                     self.current_line.append(comment)
124                 elif comment.type == token.COMMENT:
125                     # regular trailing comment
126                     self.current_line.append(comment)
127                     yield from self.line()
128
129                 else:
130                     # regular standalone comment
131                     yield from self.line()
132
133                     self.current_line.append(comment)
134                     yield from self.line()
135
136             normalize_prefix(node, inside_brackets=any_open_brackets)
137             if self.mode.string_normalization and node.type == token.STRING:
138                 node.value = normalize_string_prefix(node.value)
139                 node.value = normalize_string_quotes(node.value)
140             if node.type == token.NUMBER:
141                 normalize_numeric_literal(node)
142             if node.type not in WHITESPACE:
143                 self.current_line.append(node)
144         yield from super().visit_default(node)
145
146     def visit_test(self, node: Node) -> Iterator[Line]:
147         """Visit an `x if y else z` test"""
148
149         if Preview.parenthesize_conditional_expressions in self.mode:
150             already_parenthesized = (
151                 node.prev_sibling and node.prev_sibling.type == token.LPAR
152             )
153
154             if not already_parenthesized:
155                 lpar = Leaf(token.LPAR, "")
156                 rpar = Leaf(token.RPAR, "")
157                 node.insert_child(0, lpar)
158                 node.append_child(rpar)
159
160         yield from self.visit_default(node)
161
162     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
163         """Increase indentation level, maybe yield a line."""
164         # In blib2to3 INDENT never holds comments.
165         yield from self.line(+1)
166         yield from self.visit_default(node)
167
168     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
169         """Decrease indentation level, maybe yield a line."""
170         # The current line might still wait for trailing comments.  At DEDENT time
171         # there won't be any (they would be prefixes on the preceding NEWLINE).
172         # Emit the line then.
173         yield from self.line()
174
175         # While DEDENT has no value, its prefix may contain standalone comments
176         # that belong to the current indentation level.  Get 'em.
177         yield from self.visit_default(node)
178
179         # Finally, emit the dedent.
180         yield from self.line(-1)
181
182     def visit_stmt(
183         self, node: Node, keywords: Set[str], parens: Set[str]
184     ) -> Iterator[Line]:
185         """Visit a statement.
186
187         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
188         `def`, `with`, `class`, `assert`, and assignments.
189
190         The relevant Python language `keywords` for a given statement will be
191         NAME leaves within it. This methods puts those on a separate line.
192
193         `parens` holds a set of string leaf values immediately after which
194         invisible parens should be put.
195         """
196         normalize_invisible_parens(
197             node, parens_after=parens, mode=self.mode, features=self.features
198         )
199         for child in node.children:
200             if is_name_token(child) and child.value in keywords:
201                 yield from self.line()
202
203             yield from self.visit(child)
204
205     def visit_dictsetmaker(self, node: Node) -> Iterator[Line]:
206         if Preview.wrap_long_dict_values_in_parens in self.mode:
207             for i, child in enumerate(node.children):
208                 if i == 0:
209                     continue
210                 if node.children[i - 1].type == token.COLON:
211                     if child.type == syms.atom and child.children[0].type == token.LPAR:
212                         if maybe_make_parens_invisible_in_atom(
213                             child,
214                             parent=node,
215                             remove_brackets_around_comma=False,
216                         ):
217                             wrap_in_parentheses(node, child, visible=False)
218                     else:
219                         wrap_in_parentheses(node, child, visible=False)
220         yield from self.visit_default(node)
221
222     def visit_funcdef(self, node: Node) -> Iterator[Line]:
223         """Visit function definition."""
224         yield from self.line()
225
226         # Remove redundant brackets around return type annotation.
227         is_return_annotation = False
228         for child in node.children:
229             if child.type == token.RARROW:
230                 is_return_annotation = True
231             elif is_return_annotation:
232                 if child.type == syms.atom and child.children[0].type == token.LPAR:
233                     if maybe_make_parens_invisible_in_atom(
234                         child,
235                         parent=node,
236                         remove_brackets_around_comma=False,
237                     ):
238                         wrap_in_parentheses(node, child, visible=False)
239                 else:
240                     wrap_in_parentheses(node, child, visible=False)
241                 is_return_annotation = False
242
243         for child in node.children:
244             yield from self.visit(child)
245
246     def visit_match_case(self, node: Node) -> Iterator[Line]:
247         """Visit either a match or case statement."""
248         normalize_invisible_parens(
249             node, parens_after=set(), mode=self.mode, features=self.features
250         )
251
252         yield from self.line()
253         for child in node.children:
254             yield from self.visit(child)
255
256     def visit_suite(self, node: Node) -> Iterator[Line]:
257         """Visit a suite."""
258         if self.mode.is_pyi and is_stub_suite(node):
259             yield from self.visit(node.children[2])
260         else:
261             yield from self.visit_default(node)
262
263     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
264         """Visit a statement without nested statements."""
265         prev_type: Optional[int] = None
266         for child in node.children:
267             if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child):
268                 wrap_in_parentheses(node, child, visible=False)
269             prev_type = child.type
270
271         is_suite_like = node.parent and node.parent.type in STATEMENT
272         if is_suite_like:
273             if self.mode.is_pyi and is_stub_body(node):
274                 yield from self.visit_default(node)
275             else:
276                 yield from self.line(+1)
277                 yield from self.visit_default(node)
278                 yield from self.line(-1)
279
280         else:
281             if (
282                 not self.mode.is_pyi
283                 or not node.parent
284                 or not is_stub_suite(node.parent)
285             ):
286                 yield from self.line()
287             yield from self.visit_default(node)
288
289     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
290         """Visit `async def`, `async for`, `async with`."""
291         yield from self.line()
292
293         children = iter(node.children)
294         for child in children:
295             yield from self.visit(child)
296
297             if child.type == token.ASYNC or child.type == STANDALONE_COMMENT:
298                 # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async
299                 # line.
300                 break
301
302         internal_stmt = next(children)
303         for child in internal_stmt.children:
304             yield from self.visit(child)
305
306     def visit_decorators(self, node: Node) -> Iterator[Line]:
307         """Visit decorators."""
308         for child in node.children:
309             yield from self.line()
310             yield from self.visit(child)
311
312     def visit_power(self, node: Node) -> Iterator[Line]:
313         for idx, leaf in enumerate(node.children[:-1]):
314             next_leaf = node.children[idx + 1]
315
316             if not isinstance(leaf, Leaf):
317                 continue
318
319             value = leaf.value.lower()
320             if (
321                 leaf.type == token.NUMBER
322                 and next_leaf.type == syms.trailer
323                 # Ensure that we are in an attribute trailer
324                 and next_leaf.children[0].type == token.DOT
325                 # It shouldn't wrap hexadecimal, binary and octal literals
326                 and not value.startswith(("0x", "0b", "0o"))
327                 # It shouldn't wrap complex literals
328                 and "j" not in value
329             ):
330                 wrap_in_parentheses(node, leaf)
331
332         remove_await_parens(node)
333
334         yield from self.visit_default(node)
335
336     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
337         """Remove a semicolon and put the other statement on a separate line."""
338         yield from self.line()
339
340     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
341         """End of file. Process outstanding comments and end with a newline."""
342         yield from self.visit_default(leaf)
343         yield from self.line()
344
345     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
346         if not self.current_line.bracket_tracker.any_open_brackets():
347             yield from self.line()
348         yield from self.visit_default(leaf)
349
350     def visit_factor(self, node: Node) -> Iterator[Line]:
351         """Force parentheses between a unary op and a binary power:
352
353         -2 ** 8 -> -(2 ** 8)
354         """
355         _operator, operand = node.children
356         if (
357             operand.type == syms.power
358             and len(operand.children) == 3
359             and operand.children[1].type == token.DOUBLESTAR
360         ):
361             lpar = Leaf(token.LPAR, "(")
362             rpar = Leaf(token.RPAR, ")")
363             index = operand.remove() or 0
364             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
365         yield from self.visit_default(node)
366
367     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
368         if Preview.hex_codes_in_unicode_sequences in self.mode:
369             normalize_unicode_escape_sequences(leaf)
370
371         if is_docstring(leaf) and "\\\n" not in leaf.value:
372             # We're ignoring docstrings with backslash newline escapes because changing
373             # indentation of those changes the AST representation of the code.
374             if self.mode.string_normalization:
375                 docstring = normalize_string_prefix(leaf.value)
376                 # visit_default() does handle string normalization for us, but
377                 # since this method acts differently depending on quote style (ex.
378                 # see padding logic below), there's a possibility for unstable
379                 # formatting as visit_default() is called *after*. To avoid a
380                 # situation where this function formats a docstring differently on
381                 # the second pass, normalize it early.
382                 docstring = normalize_string_quotes(docstring)
383             else:
384                 docstring = leaf.value
385             prefix = get_string_prefix(docstring)
386             docstring = docstring[len(prefix) :]  # Remove the prefix
387             quote_char = docstring[0]
388             # A natural way to remove the outer quotes is to do:
389             #   docstring = docstring.strip(quote_char)
390             # but that breaks on """""x""" (which is '""x').
391             # So we actually need to remove the first character and the next two
392             # characters but only if they are the same as the first.
393             quote_len = 1 if docstring[1] != quote_char else 3
394             docstring = docstring[quote_len:-quote_len]
395             docstring_started_empty = not docstring
396             indent = " " * 4 * self.current_line.depth
397
398             if is_multiline_string(leaf):
399                 docstring = fix_docstring(docstring, indent)
400             else:
401                 docstring = docstring.strip()
402
403             has_trailing_backslash = False
404             if docstring:
405                 # Add some padding if the docstring starts / ends with a quote mark.
406                 if docstring[0] == quote_char:
407                     docstring = " " + docstring
408                 if docstring[-1] == quote_char:
409                     docstring += " "
410                 if docstring[-1] == "\\":
411                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
412                     if backslash_count % 2:
413                         # Odd number of tailing backslashes, add some padding to
414                         # avoid escaping the closing string quote.
415                         docstring += " "
416                         has_trailing_backslash = True
417             elif not docstring_started_empty:
418                 docstring = " "
419
420             # We could enforce triple quotes at this point.
421             quote = quote_char * quote_len
422
423             # It's invalid to put closing single-character quotes on a new line.
424             if self.mode and quote_len == 3:
425                 # We need to find the length of the last line of the docstring
426                 # to find if we can add the closing quotes to the line without
427                 # exceeding the maximum line length.
428                 # If docstring is one line, we don't put the closing quotes on a
429                 # separate line because it looks ugly (#3320).
430                 lines = docstring.splitlines()
431                 last_line_length = len(lines[-1]) if docstring else 0
432
433                 # If adding closing quotes would cause the last line to exceed
434                 # the maximum line length then put a line break before the
435                 # closing quotes
436                 if (
437                     len(lines) > 1
438                     and last_line_length + quote_len > self.mode.line_length
439                     and len(indent) + quote_len <= self.mode.line_length
440                     and not has_trailing_backslash
441                 ):
442                     leaf.value = prefix + quote + docstring + "\n" + indent + quote
443                 else:
444                     leaf.value = prefix + quote + docstring + quote
445             else:
446                 leaf.value = prefix + quote + docstring + quote
447
448         yield from self.visit_default(leaf)
449
450     def __post_init__(self) -> None:
451         """You are in a twisty little maze of passages."""
452         self.current_line = Line(mode=self.mode)
453
454         v = self.visit_stmt
455         Ø: Set[str] = set()
456         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
457         self.visit_if_stmt = partial(
458             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
459         )
460         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
461         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
462         self.visit_try_stmt = partial(
463             v, keywords={"try", "except", "else", "finally"}, parens=Ø
464         )
465         self.visit_except_clause = partial(v, keywords={"except"}, parens={"except"})
466         self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"})
467         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
468         self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
469         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
470         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
471         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
472         self.visit_async_funcdef = self.visit_async_stmt
473         self.visit_decorated = self.visit_decorators
474
475         # PEP 634
476         self.visit_match_stmt = self.visit_match_case
477         self.visit_case_block = self.visit_match_case
478
479
480 def transform_line(
481     line: Line, mode: Mode, features: Collection[Feature] = ()
482 ) -> Iterator[Line]:
483     """Transform a `line`, potentially splitting it into many lines.
484
485     They should fit in the allotted `line_length` but might not be able to.
486
487     `features` are syntactical features that may be used in the output.
488     """
489     if line.is_comment:
490         yield line
491         return
492
493     line_str = line_to_string(line)
494
495     ll = mode.line_length
496     sn = mode.string_normalization
497     string_merge = StringMerger(ll, sn)
498     string_paren_strip = StringParenStripper(ll, sn)
499     string_split = StringSplitter(ll, sn)
500     string_paren_wrap = StringParenWrapper(ll, sn)
501
502     transformers: List[Transformer]
503     if (
504         not line.contains_uncollapsable_type_comments()
505         and not line.should_split_rhs
506         and not line.magic_trailing_comma
507         and (
508             is_line_short_enough(line, line_length=mode.line_length, line_str=line_str)
509             or line.contains_unsplittable_type_ignore()
510         )
511         and not (line.inside_brackets and line.contains_standalone_comments())
512     ):
513         # Only apply basic string preprocessing, since lines shouldn't be split here.
514         if Preview.string_processing in mode:
515             transformers = [string_merge, string_paren_strip]
516         else:
517             transformers = []
518     elif line.is_def:
519         transformers = [left_hand_split]
520     else:
521
522         def _rhs(
523             self: object, line: Line, features: Collection[Feature], mode: Mode
524         ) -> Iterator[Line]:
525             """Wraps calls to `right_hand_split`.
526
527             The calls increasingly `omit` right-hand trailers (bracket pairs with
528             content), meaning the trailers get glued together to split on another
529             bracket pair instead.
530             """
531             for omit in generate_trailers_to_omit(line, mode.line_length):
532                 lines = list(
533                     right_hand_split(line, mode.line_length, features, omit=omit)
534                 )
535                 # Note: this check is only able to figure out if the first line of the
536                 # *current* transformation fits in the line length.  This is true only
537                 # for simple cases.  All others require running more transforms via
538                 # `transform_line()`.  This check doesn't know if those would succeed.
539                 if is_line_short_enough(lines[0], line_length=mode.line_length):
540                     yield from lines
541                     return
542
543             # All splits failed, best effort split with no omits.
544             # This mostly happens to multiline strings that are by definition
545             # reported as not fitting a single line, as well as lines that contain
546             # trailing commas (those have to be exploded).
547             yield from right_hand_split(
548                 line, line_length=mode.line_length, features=features
549             )
550
551         # HACK: nested functions (like _rhs) compiled by mypyc don't retain their
552         # __name__ attribute which is needed in `run_transformer` further down.
553         # Unfortunately a nested class breaks mypyc too. So a class must be created
554         # via type ... https://github.com/mypyc/mypyc/issues/884
555         rhs = type("rhs", (), {"__call__": _rhs})()
556
557         if Preview.string_processing in mode:
558             if line.inside_brackets:
559                 transformers = [
560                     string_merge,
561                     string_paren_strip,
562                     string_split,
563                     delimiter_split,
564                     standalone_comment_split,
565                     string_paren_wrap,
566                     rhs,
567                 ]
568             else:
569                 transformers = [
570                     string_merge,
571                     string_paren_strip,
572                     string_split,
573                     string_paren_wrap,
574                     rhs,
575                 ]
576         else:
577             if line.inside_brackets:
578                 transformers = [delimiter_split, standalone_comment_split, rhs]
579             else:
580                 transformers = [rhs]
581     # It's always safe to attempt hugging of power operations and pretty much every line
582     # could match.
583     transformers.append(hug_power_op)
584
585     for transform in transformers:
586         # We are accumulating lines in `result` because we might want to abort
587         # mission and return the original line in the end, or attempt a different
588         # split altogether.
589         try:
590             result = run_transformer(line, transform, mode, features, line_str=line_str)
591         except CannotTransform:
592             continue
593         else:
594             yield from result
595             break
596
597     else:
598         yield line
599
600
601 class _BracketSplitComponent(Enum):
602     head = auto()
603     body = auto()
604     tail = auto()
605
606
607 def left_hand_split(
608     line: Line, _features: Collection[Feature], mode: Mode
609 ) -> Iterator[Line]:
610     """Split line into many lines, starting with the first matching bracket pair.
611
612     Note: this usually looks weird, only use this for function definitions.
613     Prefer RHS otherwise.  This is why this function is not symmetrical with
614     :func:`right_hand_split` which also handles optional parentheses.
615     """
616     tail_leaves: List[Leaf] = []
617     body_leaves: List[Leaf] = []
618     head_leaves: List[Leaf] = []
619     current_leaves = head_leaves
620     matching_bracket: Optional[Leaf] = None
621     for leaf in line.leaves:
622         if (
623             current_leaves is body_leaves
624             and leaf.type in CLOSING_BRACKETS
625             and leaf.opening_bracket is matching_bracket
626             and isinstance(matching_bracket, Leaf)
627         ):
628             ensure_visible(leaf)
629             ensure_visible(matching_bracket)
630             current_leaves = tail_leaves if body_leaves else head_leaves
631         current_leaves.append(leaf)
632         if current_leaves is head_leaves:
633             if leaf.type in OPENING_BRACKETS:
634                 matching_bracket = leaf
635                 current_leaves = body_leaves
636     if not matching_bracket:
637         raise CannotSplit("No brackets found")
638
639     head = bracket_split_build_line(
640         head_leaves, line, matching_bracket, component=_BracketSplitComponent.head
641     )
642     body = bracket_split_build_line(
643         body_leaves, line, matching_bracket, component=_BracketSplitComponent.body
644     )
645     tail = bracket_split_build_line(
646         tail_leaves, line, matching_bracket, component=_BracketSplitComponent.tail
647     )
648     bracket_split_succeeded_or_raise(head, body, tail)
649     for result in (head, body, tail):
650         if result:
651             yield result
652
653
654 @dataclass
655 class _RHSResult:
656     """Intermediate split result from a right hand split."""
657
658     head: Line
659     body: Line
660     tail: Line
661     opening_bracket: Leaf
662     closing_bracket: Leaf
663
664
665 def right_hand_split(
666     line: Line,
667     line_length: int,
668     features: Collection[Feature] = (),
669     omit: Collection[LeafID] = (),
670 ) -> Iterator[Line]:
671     """Split line into many lines, starting with the last matching bracket pair.
672
673     If the split was by optional parentheses, attempt splitting without them, too.
674     `omit` is a collection of closing bracket IDs that shouldn't be considered for
675     this split.
676
677     Note: running this function modifies `bracket_depth` on the leaves of `line`.
678     """
679     rhs_result = _first_right_hand_split(line, omit=omit)
680     yield from _maybe_split_omitting_optional_parens(
681         rhs_result, line, line_length, features=features, omit=omit
682     )
683
684
685 def _first_right_hand_split(
686     line: Line,
687     omit: Collection[LeafID] = (),
688 ) -> _RHSResult:
689     """Split the line into head, body, tail starting with the last bracket pair.
690
691     Note: this function should not have side effects. It's relied upon by
692     _maybe_split_omitting_optional_parens to get an opinion whether to prefer
693     splitting on the right side of an assignment statement.
694     """
695     tail_leaves: List[Leaf] = []
696     body_leaves: List[Leaf] = []
697     head_leaves: List[Leaf] = []
698     current_leaves = tail_leaves
699     opening_bracket: Optional[Leaf] = None
700     closing_bracket: Optional[Leaf] = None
701     for leaf in reversed(line.leaves):
702         if current_leaves is body_leaves:
703             if leaf is opening_bracket:
704                 current_leaves = head_leaves if body_leaves else tail_leaves
705         current_leaves.append(leaf)
706         if current_leaves is tail_leaves:
707             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
708                 opening_bracket = leaf.opening_bracket
709                 closing_bracket = leaf
710                 current_leaves = body_leaves
711     if not (opening_bracket and closing_bracket and head_leaves):
712         # If there is no opening or closing_bracket that means the split failed and
713         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
714         # the matching `opening_bracket` wasn't available on `line` anymore.
715         raise CannotSplit("No brackets found")
716
717     tail_leaves.reverse()
718     body_leaves.reverse()
719     head_leaves.reverse()
720     head = bracket_split_build_line(
721         head_leaves, line, opening_bracket, component=_BracketSplitComponent.head
722     )
723     body = bracket_split_build_line(
724         body_leaves, line, opening_bracket, component=_BracketSplitComponent.body
725     )
726     tail = bracket_split_build_line(
727         tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail
728     )
729     bracket_split_succeeded_or_raise(head, body, tail)
730     return _RHSResult(head, body, tail, opening_bracket, closing_bracket)
731
732
733 def _maybe_split_omitting_optional_parens(
734     rhs: _RHSResult,
735     line: Line,
736     line_length: int,
737     features: Collection[Feature] = (),
738     omit: Collection[LeafID] = (),
739 ) -> Iterator[Line]:
740     if (
741         Feature.FORCE_OPTIONAL_PARENTHESES not in features
742         # the opening bracket is an optional paren
743         and rhs.opening_bracket.type == token.LPAR
744         and not rhs.opening_bracket.value
745         # the closing bracket is an optional paren
746         and rhs.closing_bracket.type == token.RPAR
747         and not rhs.closing_bracket.value
748         # it's not an import (optional parens are the only thing we can split on
749         # in this case; attempting a split without them is a waste of time)
750         and not line.is_import
751         # there are no standalone comments in the body
752         and not rhs.body.contains_standalone_comments(0)
753         # and we can actually remove the parens
754         and can_omit_invisible_parens(rhs.body, line_length)
755     ):
756         omit = {id(rhs.closing_bracket), *omit}
757         try:
758             # The _RHSResult Omitting Optional Parens.
759             rhs_oop = _first_right_hand_split(line, omit=omit)
760             if not (
761                 Preview.prefer_splitting_right_hand_side_of_assignments in line.mode
762                 # the split is right after `=`
763                 and len(rhs.head.leaves) >= 2
764                 and rhs.head.leaves[-2].type == token.EQUAL
765                 # the left side of assignment contains brackets
766                 and any(leaf.type in BRACKETS for leaf in rhs.head.leaves[:-1])
767                 # the left side of assignment is short enough (the -1 is for the ending
768                 # optional paren)
769                 and is_line_short_enough(rhs.head, line_length=line_length - 1)
770                 # the left side of assignment won't explode further because of magic
771                 # trailing comma
772                 and rhs.head.magic_trailing_comma is None
773                 # the split by omitting optional parens isn't preferred by some other
774                 # reason
775                 and not _prefer_split_rhs_oop(rhs_oop, line_length=line_length)
776             ):
777                 yield from _maybe_split_omitting_optional_parens(
778                     rhs_oop, line, line_length, features=features, omit=omit
779                 )
780                 return
781
782         except CannotSplit as e:
783             if not (
784                 can_be_split(rhs.body)
785                 or is_line_short_enough(rhs.body, line_length=line_length)
786             ):
787                 raise CannotSplit(
788                     "Splitting failed, body is still too long and can't be split."
789                 ) from e
790
791             elif (
792                 rhs.head.contains_multiline_strings()
793                 or rhs.tail.contains_multiline_strings()
794             ):
795                 raise CannotSplit(
796                     "The current optional pair of parentheses is bound to fail to"
797                     " satisfy the splitting algorithm because the head or the tail"
798                     " contains multiline strings which by definition never fit one"
799                     " line."
800                 ) from e
801
802     ensure_visible(rhs.opening_bracket)
803     ensure_visible(rhs.closing_bracket)
804     for result in (rhs.head, rhs.body, rhs.tail):
805         if result:
806             yield result
807
808
809 def _prefer_split_rhs_oop(rhs_oop: _RHSResult, line_length: int) -> bool:
810     """
811     Returns whether we should prefer the result from a split omitting optional parens.
812     """
813     has_closing_bracket_after_assign = False
814     for leaf in reversed(rhs_oop.head.leaves):
815         if leaf.type == token.EQUAL:
816             break
817         if leaf.type in CLOSING_BRACKETS:
818             has_closing_bracket_after_assign = True
819             break
820     return (
821         # contains matching brackets after the `=` (done by checking there is a
822         # closing bracket)
823         has_closing_bracket_after_assign
824         or (
825             # the split is actually from inside the optional parens (done by checking
826             # the first line still contains the `=`)
827             any(leaf.type == token.EQUAL for leaf in rhs_oop.head.leaves)
828             # the first line is short enough
829             and is_line_short_enough(rhs_oop.head, line_length=line_length)
830         )
831         # contains unsplittable type ignore
832         or rhs_oop.head.contains_unsplittable_type_ignore()
833         or rhs_oop.body.contains_unsplittable_type_ignore()
834         or rhs_oop.tail.contains_unsplittable_type_ignore()
835     )
836
837
838 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
839     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
840
841     Do nothing otherwise.
842
843     A left- or right-hand split is based on a pair of brackets. Content before
844     (and including) the opening bracket is left on one line, content inside the
845     brackets is put on a separate line, and finally content starting with and
846     following the closing bracket is put on a separate line.
847
848     Those are called `head`, `body`, and `tail`, respectively. If the split
849     produced the same line (all content in `head`) or ended up with an empty `body`
850     and the `tail` is just the closing bracket, then it's considered failed.
851     """
852     tail_len = len(str(tail).strip())
853     if not body:
854         if tail_len == 0:
855             raise CannotSplit("Splitting brackets produced the same line")
856
857         elif tail_len < 3:
858             raise CannotSplit(
859                 f"Splitting brackets on an empty body to save {tail_len} characters is"
860                 " not worth it"
861             )
862
863
864 def bracket_split_build_line(
865     leaves: List[Leaf],
866     original: Line,
867     opening_bracket: Leaf,
868     *,
869     component: _BracketSplitComponent,
870 ) -> Line:
871     """Return a new line with given `leaves` and respective comments from `original`.
872
873     If it's the head component, brackets will be tracked so trailing commas are
874     respected.
875
876     If it's the body component, the result line is one-indented inside brackets and as
877     such has its first leaf's prefix normalized and a trailing comma added when
878     expected.
879     """
880     result = Line(mode=original.mode, depth=original.depth)
881     if component is _BracketSplitComponent.body:
882         result.inside_brackets = True
883         result.depth += 1
884         if leaves:
885             # Since body is a new indent level, remove spurious leading whitespace.
886             normalize_prefix(leaves[0], inside_brackets=True)
887             # Ensure a trailing comma for imports and standalone function arguments, but
888             # be careful not to add one after any comments or within type annotations.
889             no_commas = (
890                 original.is_def
891                 and opening_bracket.value == "("
892                 and not any(leaf.type == token.COMMA for leaf in leaves)
893                 # In particular, don't add one within a parenthesized return annotation.
894                 # Unfortunately the indicator we're in a return annotation (RARROW) may
895                 # be defined directly in the parent node, the parent of the parent ...
896                 # and so on depending on how complex the return annotation is.
897                 # This isn't perfect and there's some false negatives but they are in
898                 # contexts were a comma is actually fine.
899                 and not any(
900                     node.prev_sibling.type == RARROW
901                     for node in (
902                         leaves[0].parent,
903                         getattr(leaves[0].parent, "parent", None),
904                     )
905                     if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
906                 )
907             )
908
909             if original.is_import or no_commas:
910                 for i in range(len(leaves) - 1, -1, -1):
911                     if leaves[i].type == STANDALONE_COMMENT:
912                         continue
913
914                     if leaves[i].type != token.COMMA:
915                         new_comma = Leaf(token.COMMA, ",")
916                         leaves.insert(i + 1, new_comma)
917                     break
918
919     leaves_to_track: Set[LeafID] = set()
920     if component is _BracketSplitComponent.head:
921         leaves_to_track = get_leaves_inside_matching_brackets(leaves)
922     # Populate the line
923     for leaf in leaves:
924         result.append(
925             leaf,
926             preformatted=True,
927             track_bracket=id(leaf) in leaves_to_track,
928         )
929         for comment_after in original.comments_after(leaf):
930             result.append(comment_after, preformatted=True)
931     if component is _BracketSplitComponent.body and should_split_line(
932         result, opening_bracket
933     ):
934         result.should_split_rhs = True
935     return result
936
937
938 def dont_increase_indentation(split_func: Transformer) -> Transformer:
939     """Normalize prefix of the first leaf in every line returned by `split_func`.
940
941     This is a decorator over relevant split functions.
942     """
943
944     @wraps(split_func)
945     def split_wrapper(
946         line: Line, features: Collection[Feature], mode: Mode
947     ) -> Iterator[Line]:
948         for split_line in split_func(line, features, mode):
949             normalize_prefix(split_line.leaves[0], inside_brackets=True)
950             yield split_line
951
952     return split_wrapper
953
954
955 def _get_last_non_comment_leaf(line: Line) -> Optional[int]:
956     for leaf_idx in range(len(line.leaves) - 1, 0, -1):
957         if line.leaves[leaf_idx].type != STANDALONE_COMMENT:
958             return leaf_idx
959     return None
960
961
962 def _safe_add_trailing_comma(safe: bool, delimiter_priority: int, line: Line) -> Line:
963     if (
964         safe
965         and delimiter_priority == COMMA_PRIORITY
966         and line.leaves[-1].type != token.COMMA
967         and line.leaves[-1].type != STANDALONE_COMMENT
968     ):
969         new_comma = Leaf(token.COMMA, ",")
970         line.append(new_comma)
971     return line
972
973
974 @dont_increase_indentation
975 def delimiter_split(
976     line: Line, features: Collection[Feature], mode: Mode
977 ) -> Iterator[Line]:
978     """Split according to delimiters of the highest priority.
979
980     If the appropriate Features are given, the split will add trailing commas
981     also in function signatures and calls that contain `*` and `**`.
982     """
983     try:
984         last_leaf = line.leaves[-1]
985     except IndexError:
986         raise CannotSplit("Line empty") from None
987
988     bt = line.bracket_tracker
989     try:
990         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
991     except ValueError:
992         raise CannotSplit("No delimiters found") from None
993
994     if delimiter_priority == DOT_PRIORITY:
995         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
996             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
997
998     current_line = Line(
999         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1000     )
1001     lowest_depth = sys.maxsize
1002     trailing_comma_safe = True
1003
1004     def append_to_line(leaf: Leaf) -> Iterator[Line]:
1005         """Append `leaf` to current line or to new line if appending impossible."""
1006         nonlocal current_line
1007         try:
1008             current_line.append_safe(leaf, preformatted=True)
1009         except ValueError:
1010             yield current_line
1011
1012             current_line = Line(
1013                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1014             )
1015             current_line.append(leaf)
1016
1017     last_non_comment_leaf = _get_last_non_comment_leaf(line)
1018     for leaf_idx, leaf in enumerate(line.leaves):
1019         yield from append_to_line(leaf)
1020
1021         for comment_after in line.comments_after(leaf):
1022             yield from append_to_line(comment_after)
1023
1024         lowest_depth = min(lowest_depth, leaf.bracket_depth)
1025         if leaf.bracket_depth == lowest_depth:
1026             if is_vararg(leaf, within={syms.typedargslist}):
1027                 trailing_comma_safe = (
1028                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
1029                 )
1030             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
1031                 trailing_comma_safe = (
1032                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
1033                 )
1034
1035         if (
1036             Preview.add_trailing_comma_consistently in mode
1037             and last_leaf.type == STANDALONE_COMMENT
1038             and leaf_idx == last_non_comment_leaf
1039         ):
1040             current_line = _safe_add_trailing_comma(
1041                 trailing_comma_safe, delimiter_priority, current_line
1042             )
1043
1044         leaf_priority = bt.delimiters.get(id(leaf))
1045         if leaf_priority == delimiter_priority:
1046             yield current_line
1047
1048             current_line = Line(
1049                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1050             )
1051     if current_line:
1052         current_line = _safe_add_trailing_comma(
1053             trailing_comma_safe, delimiter_priority, current_line
1054         )
1055         yield current_line
1056
1057
1058 @dont_increase_indentation
1059 def standalone_comment_split(
1060     line: Line, features: Collection[Feature], mode: Mode
1061 ) -> Iterator[Line]:
1062     """Split standalone comments from the rest of the line."""
1063     if not line.contains_standalone_comments(0):
1064         raise CannotSplit("Line does not have any standalone comments")
1065
1066     current_line = Line(
1067         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1068     )
1069
1070     def append_to_line(leaf: Leaf) -> Iterator[Line]:
1071         """Append `leaf` to current line or to new line if appending impossible."""
1072         nonlocal current_line
1073         try:
1074             current_line.append_safe(leaf, preformatted=True)
1075         except ValueError:
1076             yield current_line
1077
1078             current_line = Line(
1079                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1080             )
1081             current_line.append(leaf)
1082
1083     for leaf in line.leaves:
1084         yield from append_to_line(leaf)
1085
1086         for comment_after in line.comments_after(leaf):
1087             yield from append_to_line(comment_after)
1088
1089     if current_line:
1090         yield current_line
1091
1092
1093 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
1094     """Leave existing extra newlines if not `inside_brackets`. Remove everything
1095     else.
1096
1097     Note: don't use backslashes for formatting or you'll lose your voting rights.
1098     """
1099     if not inside_brackets:
1100         spl = leaf.prefix.split("#")
1101         if "\\" not in spl[0]:
1102             nl_count = spl[-1].count("\n")
1103             if len(spl) > 1:
1104                 nl_count -= 1
1105             leaf.prefix = "\n" * nl_count
1106             return
1107
1108     leaf.prefix = ""
1109
1110
1111 def normalize_invisible_parens(
1112     node: Node, parens_after: Set[str], *, mode: Mode, features: Collection[Feature]
1113 ) -> None:
1114     """Make existing optional parentheses invisible or create new ones.
1115
1116     `parens_after` is a set of string leaf values immediately after which parens
1117     should be put.
1118
1119     Standardizes on visible parentheses for single-element tuples, and keeps
1120     existing visible parentheses for other tuples and generator expressions.
1121     """
1122     for pc in list_comments(node.prefix, is_endmarker=False):
1123         if pc.value in FMT_OFF:
1124             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
1125             return
1126
1127     # The multiple context managers grammar has a different pattern, thus this is
1128     # separate from the for-loop below. This possibly wraps them in invisible parens,
1129     # and later will be removed in remove_with_parens when needed.
1130     if node.type == syms.with_stmt:
1131         _maybe_wrap_cms_in_parens(node, mode, features)
1132
1133     check_lpar = False
1134     for index, child in enumerate(list(node.children)):
1135         # Fixes a bug where invisible parens are not properly stripped from
1136         # assignment statements that contain type annotations.
1137         if isinstance(child, Node) and child.type == syms.annassign:
1138             normalize_invisible_parens(
1139                 child, parens_after=parens_after, mode=mode, features=features
1140             )
1141
1142         # Add parentheses around long tuple unpacking in assignments.
1143         if (
1144             index == 0
1145             and isinstance(child, Node)
1146             and child.type == syms.testlist_star_expr
1147         ):
1148             check_lpar = True
1149
1150         if check_lpar:
1151             if (
1152                 child.type == syms.atom
1153                 and node.type == syms.for_stmt
1154                 and isinstance(child.prev_sibling, Leaf)
1155                 and child.prev_sibling.type == token.NAME
1156                 and child.prev_sibling.value == "for"
1157             ):
1158                 if maybe_make_parens_invisible_in_atom(
1159                     child,
1160                     parent=node,
1161                     remove_brackets_around_comma=True,
1162                 ):
1163                     wrap_in_parentheses(node, child, visible=False)
1164             elif isinstance(child, Node) and node.type == syms.with_stmt:
1165                 remove_with_parens(child, node)
1166             elif child.type == syms.atom:
1167                 if maybe_make_parens_invisible_in_atom(
1168                     child,
1169                     parent=node,
1170                 ):
1171                     wrap_in_parentheses(node, child, visible=False)
1172             elif is_one_tuple(child):
1173                 wrap_in_parentheses(node, child, visible=True)
1174             elif node.type == syms.import_from:
1175                 _normalize_import_from(node, child, index)
1176                 break
1177             elif (
1178                 index == 1
1179                 and child.type == token.STAR
1180                 and node.type == syms.except_clause
1181             ):
1182                 # In except* (PEP 654), the star is actually part of
1183                 # of the keyword. So we need to skip the insertion of
1184                 # invisible parentheses to work more precisely.
1185                 continue
1186
1187             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
1188                 wrap_in_parentheses(node, child, visible=False)
1189
1190         comma_check = child.type == token.COMMA
1191
1192         check_lpar = isinstance(child, Leaf) and (
1193             child.value in parens_after or comma_check
1194         )
1195
1196
1197 def _normalize_import_from(parent: Node, child: LN, index: int) -> None:
1198     # "import from" nodes store parentheses directly as part of
1199     # the statement
1200     if is_lpar_token(child):
1201         assert is_rpar_token(parent.children[-1])
1202         # make parentheses invisible
1203         child.value = ""
1204         parent.children[-1].value = ""
1205     elif child.type != token.STAR:
1206         # insert invisible parentheses
1207         parent.insert_child(index, Leaf(token.LPAR, ""))
1208         parent.append_child(Leaf(token.RPAR, ""))
1209
1210
1211 def remove_await_parens(node: Node) -> None:
1212     if node.children[0].type == token.AWAIT and len(node.children) > 1:
1213         if (
1214             node.children[1].type == syms.atom
1215             and node.children[1].children[0].type == token.LPAR
1216         ):
1217             if maybe_make_parens_invisible_in_atom(
1218                 node.children[1],
1219                 parent=node,
1220                 remove_brackets_around_comma=True,
1221             ):
1222                 wrap_in_parentheses(node, node.children[1], visible=False)
1223
1224             # Since await is an expression we shouldn't remove
1225             # brackets in cases where this would change
1226             # the AST due to operator precedence.
1227             # Therefore we only aim to remove brackets around
1228             # power nodes that aren't also await expressions themselves.
1229             # https://peps.python.org/pep-0492/#updated-operator-precedence-table
1230             # N.B. We've still removed any redundant nested brackets though :)
1231             opening_bracket = cast(Leaf, node.children[1].children[0])
1232             closing_bracket = cast(Leaf, node.children[1].children[-1])
1233             bracket_contents = node.children[1].children[1]
1234             if isinstance(bracket_contents, Node):
1235                 if bracket_contents.type != syms.power:
1236                     ensure_visible(opening_bracket)
1237                     ensure_visible(closing_bracket)
1238                 elif (
1239                     bracket_contents.type == syms.power
1240                     and bracket_contents.children[0].type == token.AWAIT
1241                 ):
1242                     ensure_visible(opening_bracket)
1243                     ensure_visible(closing_bracket)
1244                     # If we are in a nested await then recurse down.
1245                     remove_await_parens(bracket_contents)
1246
1247
1248 def _maybe_wrap_cms_in_parens(
1249     node: Node, mode: Mode, features: Collection[Feature]
1250 ) -> None:
1251     """When enabled and safe, wrap the multiple context managers in invisible parens.
1252
1253     It is only safe when `features` contain Feature.PARENTHESIZED_CONTEXT_MANAGERS.
1254     """
1255     if (
1256         Feature.PARENTHESIZED_CONTEXT_MANAGERS not in features
1257         or Preview.wrap_multiple_context_managers_in_parens not in mode
1258         or len(node.children) <= 2
1259         # If it's an atom, it's already wrapped in parens.
1260         or node.children[1].type == syms.atom
1261     ):
1262         return
1263     colon_index: Optional[int] = None
1264     for i in range(2, len(node.children)):
1265         if node.children[i].type == token.COLON:
1266             colon_index = i
1267             break
1268     if colon_index is not None:
1269         lpar = Leaf(token.LPAR, "")
1270         rpar = Leaf(token.RPAR, "")
1271         context_managers = node.children[1:colon_index]
1272         for child in context_managers:
1273             child.remove()
1274         # After wrapping, the with_stmt will look like this:
1275         #   with_stmt
1276         #     NAME 'with'
1277         #     atom
1278         #       LPAR ''
1279         #       testlist_gexp
1280         #         ... <-- context_managers
1281         #       /testlist_gexp
1282         #       RPAR ''
1283         #     /atom
1284         #     COLON ':'
1285         new_child = Node(
1286             syms.atom, [lpar, Node(syms.testlist_gexp, context_managers), rpar]
1287         )
1288         node.insert_child(1, new_child)
1289
1290
1291 def remove_with_parens(node: Node, parent: Node) -> None:
1292     """Recursively hide optional parens in `with` statements."""
1293     # Removing all unnecessary parentheses in with statements in one pass is a tad
1294     # complex as different variations of bracketed statements result in pretty
1295     # different parse trees:
1296     #
1297     # with (open("file")) as f:                       # this is an asexpr_test
1298     #     ...
1299     #
1300     # with (open("file") as f):                       # this is an atom containing an
1301     #     ...                                         # asexpr_test
1302     #
1303     # with (open("file")) as f, (open("file")) as f:  # this is asexpr_test, COMMA,
1304     #     ...                                         # asexpr_test
1305     #
1306     # with (open("file") as f, open("file") as f):    # an atom containing a
1307     #     ...                                         # testlist_gexp which then
1308     #                                                 # contains multiple asexpr_test(s)
1309     if node.type == syms.atom:
1310         if maybe_make_parens_invisible_in_atom(
1311             node,
1312             parent=parent,
1313             remove_brackets_around_comma=True,
1314         ):
1315             wrap_in_parentheses(parent, node, visible=False)
1316         if isinstance(node.children[1], Node):
1317             remove_with_parens(node.children[1], node)
1318     elif node.type == syms.testlist_gexp:
1319         for child in node.children:
1320             if isinstance(child, Node):
1321                 remove_with_parens(child, node)
1322     elif node.type == syms.asexpr_test and not any(
1323         leaf.type == token.COLONEQUAL for leaf in node.leaves()
1324     ):
1325         if maybe_make_parens_invisible_in_atom(
1326             node.children[0],
1327             parent=node,
1328             remove_brackets_around_comma=True,
1329         ):
1330             wrap_in_parentheses(node, node.children[0], visible=False)
1331
1332
1333 def maybe_make_parens_invisible_in_atom(
1334     node: LN,
1335     parent: LN,
1336     remove_brackets_around_comma: bool = False,
1337 ) -> bool:
1338     """If it's safe, make the parens in the atom `node` invisible, recursively.
1339     Additionally, remove repeated, adjacent invisible parens from the atom `node`
1340     as they are redundant.
1341
1342     Returns whether the node should itself be wrapped in invisible parentheses.
1343     """
1344     if (
1345         node.type != syms.atom
1346         or is_empty_tuple(node)
1347         or is_one_tuple(node)
1348         or (is_yield(node) and parent.type != syms.expr_stmt)
1349         or (
1350             # This condition tries to prevent removing non-optional brackets
1351             # around a tuple, however, can be a bit overzealous so we provide
1352             # and option to skip this check for `for` and `with` statements.
1353             not remove_brackets_around_comma
1354             and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
1355         )
1356         or is_tuple_containing_walrus(node)
1357     ):
1358         return False
1359
1360     if is_walrus_assignment(node):
1361         if parent.type in [
1362             syms.annassign,
1363             syms.expr_stmt,
1364             syms.assert_stmt,
1365             syms.return_stmt,
1366             syms.except_clause,
1367             syms.funcdef,
1368             syms.with_stmt,
1369             # these ones aren't useful to end users, but they do please fuzzers
1370             syms.for_stmt,
1371             syms.del_stmt,
1372             syms.for_stmt,
1373         ]:
1374             return False
1375
1376     first = node.children[0]
1377     last = node.children[-1]
1378     if is_lpar_token(first) and is_rpar_token(last):
1379         middle = node.children[1]
1380         # make parentheses invisible
1381         first.value = ""
1382         last.value = ""
1383         maybe_make_parens_invisible_in_atom(
1384             middle,
1385             parent=parent,
1386             remove_brackets_around_comma=remove_brackets_around_comma,
1387         )
1388
1389         if is_atom_with_invisible_parens(middle):
1390             # Strip the invisible parens from `middle` by replacing
1391             # it with the child in-between the invisible parens
1392             middle.replace(middle.children[1])
1393
1394         return False
1395
1396     return True
1397
1398
1399 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
1400     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
1401
1402     if not (opening_bracket.parent and opening_bracket.value in "[{("):
1403         return False
1404
1405     # We're essentially checking if the body is delimited by commas and there's more
1406     # than one of them (we're excluding the trailing comma and if the delimiter priority
1407     # is still commas, that means there's more).
1408     exclude = set()
1409     trailing_comma = False
1410     try:
1411         last_leaf = line.leaves[-1]
1412         if last_leaf.type == token.COMMA:
1413             trailing_comma = True
1414             exclude.add(id(last_leaf))
1415         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
1416     except (IndexError, ValueError):
1417         return False
1418
1419     return max_priority == COMMA_PRIORITY and (
1420         (line.mode.magic_trailing_comma and trailing_comma)
1421         # always explode imports
1422         or opening_bracket.parent.type in {syms.atom, syms.import_from}
1423     )
1424
1425
1426 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
1427     """Generate sets of closing bracket IDs that should be omitted in a RHS.
1428
1429     Brackets can be omitted if the entire trailer up to and including
1430     a preceding closing bracket fits in one line.
1431
1432     Yielded sets are cumulative (contain results of previous yields, too).  First
1433     set is empty, unless the line should explode, in which case bracket pairs until
1434     the one that needs to explode are omitted.
1435     """
1436
1437     omit: Set[LeafID] = set()
1438     if not line.magic_trailing_comma:
1439         yield omit
1440
1441     length = 4 * line.depth
1442     opening_bracket: Optional[Leaf] = None
1443     closing_bracket: Optional[Leaf] = None
1444     inner_brackets: Set[LeafID] = set()
1445     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
1446         length += leaf_length
1447         if length > line_length:
1448             break
1449
1450         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
1451         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
1452             break
1453
1454         if opening_bracket:
1455             if leaf is opening_bracket:
1456                 opening_bracket = None
1457             elif leaf.type in CLOSING_BRACKETS:
1458                 prev = line.leaves[index - 1] if index > 0 else None
1459                 if (
1460                     prev
1461                     and prev.type == token.COMMA
1462                     and leaf.opening_bracket is not None
1463                     and not is_one_sequence_between(
1464                         leaf.opening_bracket, leaf, line.leaves
1465                     )
1466                 ):
1467                     # Never omit bracket pairs with trailing commas.
1468                     # We need to explode on those.
1469                     break
1470
1471                 inner_brackets.add(id(leaf))
1472         elif leaf.type in CLOSING_BRACKETS:
1473             prev = line.leaves[index - 1] if index > 0 else None
1474             if prev and prev.type in OPENING_BRACKETS:
1475                 # Empty brackets would fail a split so treat them as "inner"
1476                 # brackets (e.g. only add them to the `omit` set if another
1477                 # pair of brackets was good enough.
1478                 inner_brackets.add(id(leaf))
1479                 continue
1480
1481             if closing_bracket:
1482                 omit.add(id(closing_bracket))
1483                 omit.update(inner_brackets)
1484                 inner_brackets.clear()
1485                 yield omit
1486
1487             if (
1488                 prev
1489                 and prev.type == token.COMMA
1490                 and leaf.opening_bracket is not None
1491                 and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves)
1492             ):
1493                 # Never omit bracket pairs with trailing commas.
1494                 # We need to explode on those.
1495                 break
1496
1497             if leaf.value:
1498                 opening_bracket = leaf.opening_bracket
1499                 closing_bracket = leaf
1500
1501
1502 def run_transformer(
1503     line: Line,
1504     transform: Transformer,
1505     mode: Mode,
1506     features: Collection[Feature],
1507     *,
1508     line_str: str = "",
1509 ) -> List[Line]:
1510     if not line_str:
1511         line_str = line_to_string(line)
1512     result: List[Line] = []
1513     for transformed_line in transform(line, features, mode):
1514         if str(transformed_line).strip("\n") == line_str:
1515             raise CannotTransform("Line transformer returned an unchanged result")
1516
1517         result.extend(transform_line(transformed_line, mode=mode, features=features))
1518
1519     features_set = set(features)
1520     if (
1521         Feature.FORCE_OPTIONAL_PARENTHESES in features_set
1522         or transform.__class__.__name__ != "rhs"
1523         or not line.bracket_tracker.invisible
1524         or any(bracket.value for bracket in line.bracket_tracker.invisible)
1525         or line.contains_multiline_strings()
1526         or result[0].contains_uncollapsable_type_comments()
1527         or result[0].contains_unsplittable_type_ignore()
1528         or is_line_short_enough(result[0], line_length=mode.line_length)
1529         # If any leaves have no parents (which _can_ occur since
1530         # `transform(line)` potentially destroys the line's underlying node
1531         # structure), then we can't proceed. Doing so would cause the below
1532         # call to `append_leaves()` to fail.
1533         or any(leaf.parent is None for leaf in line.leaves)
1534     ):
1535         return result
1536
1537     line_copy = line.clone()
1538     append_leaves(line_copy, line, line.leaves)
1539     features_fop = features_set | {Feature.FORCE_OPTIONAL_PARENTHESES}
1540     second_opinion = run_transformer(
1541         line_copy, transform, mode, features_fop, line_str=line_str
1542     )
1543     if all(
1544         is_line_short_enough(ln, line_length=mode.line_length) for ln in second_opinion
1545     ):
1546         result = second_opinion
1547     return result