]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Upgrade isort (#3534)
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4 import sys
5 from dataclasses import dataclass
6 from enum import Enum, auto
7 from functools import partial, wraps
8 from typing import Collection, Iterator, List, Optional, Set, Union, cast
9
10 from black.brackets import (
11     COMMA_PRIORITY,
12     DOT_PRIORITY,
13     get_leaves_inside_matching_brackets,
14     max_delimiter_priority_in_atom,
15 )
16 from black.comments import FMT_OFF, generate_comments, list_comments
17 from black.lines import (
18     Line,
19     append_leaves,
20     can_be_split,
21     can_omit_invisible_parens,
22     is_line_short_enough,
23     line_to_string,
24 )
25 from black.mode import Feature, Mode, Preview
26 from black.nodes import (
27     ASSIGNMENTS,
28     BRACKETS,
29     CLOSING_BRACKETS,
30     OPENING_BRACKETS,
31     RARROW,
32     STANDALONE_COMMENT,
33     STATEMENT,
34     WHITESPACE,
35     Visitor,
36     ensure_visible,
37     is_arith_like,
38     is_atom_with_invisible_parens,
39     is_docstring,
40     is_empty_tuple,
41     is_lpar_token,
42     is_multiline_string,
43     is_name_token,
44     is_one_sequence_between,
45     is_one_tuple,
46     is_rpar_token,
47     is_stub_body,
48     is_stub_suite,
49     is_tuple_containing_walrus,
50     is_vararg,
51     is_walrus_assignment,
52     is_yield,
53     syms,
54     wrap_in_parentheses,
55 )
56 from black.numerics import normalize_numeric_literal
57 from black.strings import (
58     fix_docstring,
59     get_string_prefix,
60     normalize_string_prefix,
61     normalize_string_quotes,
62     normalize_unicode_escape_sequences,
63 )
64 from black.trans import (
65     CannotTransform,
66     StringMerger,
67     StringParenStripper,
68     StringParenWrapper,
69     StringSplitter,
70     Transformer,
71     hug_power_op,
72 )
73 from blib2to3.pgen2 import token
74 from blib2to3.pytree import Leaf, Node
75
76 # types
77 LeafID = int
78 LN = Union[Leaf, Node]
79
80
81 class CannotSplit(CannotTransform):
82     """A readable split that fits the allotted line length is impossible."""
83
84
85 # This isn't a dataclass because @dataclass + Generic breaks mypyc.
86 # See also https://github.com/mypyc/mypyc/issues/827.
87 class LineGenerator(Visitor[Line]):
88     """Generates reformatted Line objects.  Empty lines are not emitted.
89
90     Note: destroys the tree it's visiting by mutating prefixes of its leaves
91     in ways that will no longer stringify to valid Python code on the tree.
92     """
93
94     def __init__(self, mode: Mode, features: Collection[Feature]) -> None:
95         self.mode = mode
96         self.features = features
97         self.current_line: Line
98         self.__post_init__()
99
100     def line(self, indent: int = 0) -> Iterator[Line]:
101         """Generate a line.
102
103         If the line is empty, only emit if it makes sense.
104         If the line is too long, split it first and then generate.
105
106         If any lines were generated, set up a new current_line.
107         """
108         if not self.current_line:
109             self.current_line.depth += indent
110             return  # Line is empty, don't emit. Creating a new one unnecessary.
111
112         complete_line = self.current_line
113         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
114         yield complete_line
115
116     def visit_default(self, node: LN) -> Iterator[Line]:
117         """Default `visit_*()` implementation. Recurses to children of `node`."""
118         if isinstance(node, Leaf):
119             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
120             for comment in generate_comments(node, preview=self.mode.preview):
121                 if any_open_brackets:
122                     # any comment within brackets is subject to splitting
123                     self.current_line.append(comment)
124                 elif comment.type == token.COMMENT:
125                     # regular trailing comment
126                     self.current_line.append(comment)
127                     yield from self.line()
128
129                 else:
130                     # regular standalone comment
131                     yield from self.line()
132
133                     self.current_line.append(comment)
134                     yield from self.line()
135
136             normalize_prefix(node, inside_brackets=any_open_brackets)
137             if self.mode.string_normalization and node.type == token.STRING:
138                 node.value = normalize_string_prefix(node.value)
139                 node.value = normalize_string_quotes(node.value)
140             if node.type == token.NUMBER:
141                 normalize_numeric_literal(node)
142             if node.type not in WHITESPACE:
143                 self.current_line.append(node)
144         yield from super().visit_default(node)
145
146     def visit_test(self, node: Node) -> Iterator[Line]:
147         """Visit an `x if y else z` test"""
148
149         if Preview.parenthesize_conditional_expressions in self.mode:
150             already_parenthesized = (
151                 node.prev_sibling and node.prev_sibling.type == token.LPAR
152             )
153
154             if not already_parenthesized:
155                 lpar = Leaf(token.LPAR, "")
156                 rpar = Leaf(token.RPAR, "")
157                 node.insert_child(0, lpar)
158                 node.append_child(rpar)
159
160         yield from self.visit_default(node)
161
162     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
163         """Increase indentation level, maybe yield a line."""
164         # In blib2to3 INDENT never holds comments.
165         yield from self.line(+1)
166         yield from self.visit_default(node)
167
168     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
169         """Decrease indentation level, maybe yield a line."""
170         # The current line might still wait for trailing comments.  At DEDENT time
171         # there won't be any (they would be prefixes on the preceding NEWLINE).
172         # Emit the line then.
173         yield from self.line()
174
175         # While DEDENT has no value, its prefix may contain standalone comments
176         # that belong to the current indentation level.  Get 'em.
177         yield from self.visit_default(node)
178
179         # Finally, emit the dedent.
180         yield from self.line(-1)
181
182     def visit_stmt(
183         self, node: Node, keywords: Set[str], parens: Set[str]
184     ) -> Iterator[Line]:
185         """Visit a statement.
186
187         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
188         `def`, `with`, `class`, `assert`, and assignments.
189
190         The relevant Python language `keywords` for a given statement will be
191         NAME leaves within it. This methods puts those on a separate line.
192
193         `parens` holds a set of string leaf values immediately after which
194         invisible parens should be put.
195         """
196         normalize_invisible_parens(
197             node, parens_after=parens, mode=self.mode, features=self.features
198         )
199         for child in node.children:
200             if is_name_token(child) and child.value in keywords:
201                 yield from self.line()
202
203             yield from self.visit(child)
204
205     def visit_dictsetmaker(self, node: Node) -> Iterator[Line]:
206         if Preview.wrap_long_dict_values_in_parens in self.mode:
207             for i, child in enumerate(node.children):
208                 if i == 0:
209                     continue
210                 if node.children[i - 1].type == token.COLON:
211                     if child.type == syms.atom and child.children[0].type == token.LPAR:
212                         if maybe_make_parens_invisible_in_atom(
213                             child,
214                             parent=node,
215                             remove_brackets_around_comma=False,
216                         ):
217                             wrap_in_parentheses(node, child, visible=False)
218                     else:
219                         wrap_in_parentheses(node, child, visible=False)
220         yield from self.visit_default(node)
221
222     def visit_funcdef(self, node: Node) -> Iterator[Line]:
223         """Visit function definition."""
224         if Preview.annotation_parens not in self.mode:
225             yield from self.visit_stmt(node, keywords={"def"}, parens=set())
226         else:
227             yield from self.line()
228
229             # Remove redundant brackets around return type annotation.
230             is_return_annotation = False
231             for child in node.children:
232                 if child.type == token.RARROW:
233                     is_return_annotation = True
234                 elif is_return_annotation:
235                     if child.type == syms.atom and child.children[0].type == token.LPAR:
236                         if maybe_make_parens_invisible_in_atom(
237                             child,
238                             parent=node,
239                             remove_brackets_around_comma=False,
240                         ):
241                             wrap_in_parentheses(node, child, visible=False)
242                     else:
243                         wrap_in_parentheses(node, child, visible=False)
244                     is_return_annotation = False
245
246             for child in node.children:
247                 yield from self.visit(child)
248
249     def visit_match_case(self, node: Node) -> Iterator[Line]:
250         """Visit either a match or case statement."""
251         normalize_invisible_parens(
252             node, parens_after=set(), mode=self.mode, features=self.features
253         )
254
255         yield from self.line()
256         for child in node.children:
257             yield from self.visit(child)
258
259     def visit_suite(self, node: Node) -> Iterator[Line]:
260         """Visit a suite."""
261         if self.mode.is_pyi and is_stub_suite(node):
262             yield from self.visit(node.children[2])
263         else:
264             yield from self.visit_default(node)
265
266     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
267         """Visit a statement without nested statements."""
268         prev_type: Optional[int] = None
269         for child in node.children:
270             if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child):
271                 wrap_in_parentheses(node, child, visible=False)
272             prev_type = child.type
273
274         is_suite_like = node.parent and node.parent.type in STATEMENT
275         if is_suite_like:
276             if self.mode.is_pyi and is_stub_body(node):
277                 yield from self.visit_default(node)
278             else:
279                 yield from self.line(+1)
280                 yield from self.visit_default(node)
281                 yield from self.line(-1)
282
283         else:
284             if (
285                 not self.mode.is_pyi
286                 or not node.parent
287                 or not is_stub_suite(node.parent)
288             ):
289                 yield from self.line()
290             yield from self.visit_default(node)
291
292     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
293         """Visit `async def`, `async for`, `async with`."""
294         yield from self.line()
295
296         children = iter(node.children)
297         for child in children:
298             yield from self.visit(child)
299
300             if child.type == token.ASYNC or child.type == STANDALONE_COMMENT:
301                 # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async
302                 # line.
303                 break
304
305         internal_stmt = next(children)
306         for child in internal_stmt.children:
307             yield from self.visit(child)
308
309     def visit_decorators(self, node: Node) -> Iterator[Line]:
310         """Visit decorators."""
311         for child in node.children:
312             yield from self.line()
313             yield from self.visit(child)
314
315     def visit_power(self, node: Node) -> Iterator[Line]:
316         for idx, leaf in enumerate(node.children[:-1]):
317             next_leaf = node.children[idx + 1]
318
319             if not isinstance(leaf, Leaf):
320                 continue
321
322             value = leaf.value.lower()
323             if (
324                 leaf.type == token.NUMBER
325                 and next_leaf.type == syms.trailer
326                 # Ensure that we are in an attribute trailer
327                 and next_leaf.children[0].type == token.DOT
328                 # It shouldn't wrap hexadecimal, binary and octal literals
329                 and not value.startswith(("0x", "0b", "0o"))
330                 # It shouldn't wrap complex literals
331                 and "j" not in value
332             ):
333                 wrap_in_parentheses(node, leaf)
334
335         if Preview.remove_redundant_parens in self.mode:
336             remove_await_parens(node)
337
338         yield from self.visit_default(node)
339
340     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
341         """Remove a semicolon and put the other statement on a separate line."""
342         yield from self.line()
343
344     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
345         """End of file. Process outstanding comments and end with a newline."""
346         yield from self.visit_default(leaf)
347         yield from self.line()
348
349     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
350         if not self.current_line.bracket_tracker.any_open_brackets():
351             yield from self.line()
352         yield from self.visit_default(leaf)
353
354     def visit_factor(self, node: Node) -> Iterator[Line]:
355         """Force parentheses between a unary op and a binary power:
356
357         -2 ** 8 -> -(2 ** 8)
358         """
359         _operator, operand = node.children
360         if (
361             operand.type == syms.power
362             and len(operand.children) == 3
363             and operand.children[1].type == token.DOUBLESTAR
364         ):
365             lpar = Leaf(token.LPAR, "(")
366             rpar = Leaf(token.RPAR, ")")
367             index = operand.remove() or 0
368             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
369         yield from self.visit_default(node)
370
371     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
372         if Preview.hex_codes_in_unicode_sequences in self.mode:
373             normalize_unicode_escape_sequences(leaf)
374
375         if is_docstring(leaf) and "\\\n" not in leaf.value:
376             # We're ignoring docstrings with backslash newline escapes because changing
377             # indentation of those changes the AST representation of the code.
378             if Preview.normalize_docstring_quotes_and_prefixes_properly in self.mode:
379                 # There was a bug where --skip-string-normalization wouldn't stop us
380                 # from normalizing docstring prefixes. To maintain stability, we can
381                 # only address this buggy behaviour while the preview style is enabled.
382                 if self.mode.string_normalization:
383                     docstring = normalize_string_prefix(leaf.value)
384                     # visit_default() does handle string normalization for us, but
385                     # since this method acts differently depending on quote style (ex.
386                     # see padding logic below), there's a possibility for unstable
387                     # formatting as visit_default() is called *after*. To avoid a
388                     # situation where this function formats a docstring differently on
389                     # the second pass, normalize it early.
390                     docstring = normalize_string_quotes(docstring)
391                 else:
392                     docstring = leaf.value
393             else:
394                 # ... otherwise, we'll keep the buggy behaviour >.<
395                 docstring = normalize_string_prefix(leaf.value)
396             prefix = get_string_prefix(docstring)
397             docstring = docstring[len(prefix) :]  # Remove the prefix
398             quote_char = docstring[0]
399             # A natural way to remove the outer quotes is to do:
400             #   docstring = docstring.strip(quote_char)
401             # but that breaks on """""x""" (which is '""x').
402             # So we actually need to remove the first character and the next two
403             # characters but only if they are the same as the first.
404             quote_len = 1 if docstring[1] != quote_char else 3
405             docstring = docstring[quote_len:-quote_len]
406             docstring_started_empty = not docstring
407             indent = " " * 4 * self.current_line.depth
408
409             if is_multiline_string(leaf):
410                 docstring = fix_docstring(docstring, indent)
411             else:
412                 docstring = docstring.strip()
413
414             has_trailing_backslash = False
415             if docstring:
416                 # Add some padding if the docstring starts / ends with a quote mark.
417                 if docstring[0] == quote_char:
418                     docstring = " " + docstring
419                 if docstring[-1] == quote_char:
420                     docstring += " "
421                 if docstring[-1] == "\\":
422                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
423                     if backslash_count % 2:
424                         # Odd number of tailing backslashes, add some padding to
425                         # avoid escaping the closing string quote.
426                         docstring += " "
427                         has_trailing_backslash = True
428             elif not docstring_started_empty:
429                 docstring = " "
430
431             # We could enforce triple quotes at this point.
432             quote = quote_char * quote_len
433
434             # It's invalid to put closing single-character quotes on a new line.
435             if Preview.long_docstring_quotes_on_newline in self.mode and quote_len == 3:
436                 # We need to find the length of the last line of the docstring
437                 # to find if we can add the closing quotes to the line without
438                 # exceeding the maximum line length.
439                 # If docstring is one line, we don't put the closing quotes on a
440                 # separate line because it looks ugly (#3320).
441                 lines = docstring.splitlines()
442                 last_line_length = len(lines[-1]) if docstring else 0
443
444                 # If adding closing quotes would cause the last line to exceed
445                 # the maximum line length then put a line break before the
446                 # closing quotes
447                 if (
448                     len(lines) > 1
449                     and last_line_length + quote_len > self.mode.line_length
450                     and len(indent) + quote_len <= self.mode.line_length
451                     and not has_trailing_backslash
452                 ):
453                     leaf.value = prefix + quote + docstring + "\n" + indent + quote
454                 else:
455                     leaf.value = prefix + quote + docstring + quote
456             else:
457                 leaf.value = prefix + quote + docstring + quote
458
459         yield from self.visit_default(leaf)
460
461     def __post_init__(self) -> None:
462         """You are in a twisty little maze of passages."""
463         self.current_line = Line(mode=self.mode)
464
465         v = self.visit_stmt
466         Ø: Set[str] = set()
467         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
468         self.visit_if_stmt = partial(
469             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
470         )
471         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
472         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
473         self.visit_try_stmt = partial(
474             v, keywords={"try", "except", "else", "finally"}, parens=Ø
475         )
476         if self.mode.preview:
477             self.visit_except_clause = partial(
478                 v, keywords={"except"}, parens={"except"}
479             )
480             self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"})
481         else:
482             self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
483             self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
484         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
485         self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
486         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
487         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
488         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
489         self.visit_async_funcdef = self.visit_async_stmt
490         self.visit_decorated = self.visit_decorators
491
492         # PEP 634
493         self.visit_match_stmt = self.visit_match_case
494         self.visit_case_block = self.visit_match_case
495
496
497 def transform_line(
498     line: Line, mode: Mode, features: Collection[Feature] = ()
499 ) -> Iterator[Line]:
500     """Transform a `line`, potentially splitting it into many lines.
501
502     They should fit in the allotted `line_length` but might not be able to.
503
504     `features` are syntactical features that may be used in the output.
505     """
506     if line.is_comment:
507         yield line
508         return
509
510     line_str = line_to_string(line)
511
512     ll = mode.line_length
513     sn = mode.string_normalization
514     string_merge = StringMerger(ll, sn)
515     string_paren_strip = StringParenStripper(ll, sn)
516     string_split = StringSplitter(ll, sn)
517     string_paren_wrap = StringParenWrapper(ll, sn)
518
519     transformers: List[Transformer]
520     if (
521         not line.contains_uncollapsable_type_comments()
522         and not line.should_split_rhs
523         and not line.magic_trailing_comma
524         and (
525             is_line_short_enough(line, line_length=mode.line_length, line_str=line_str)
526             or line.contains_unsplittable_type_ignore()
527         )
528         and not (line.inside_brackets and line.contains_standalone_comments())
529     ):
530         # Only apply basic string preprocessing, since lines shouldn't be split here.
531         if Preview.string_processing in mode:
532             transformers = [string_merge, string_paren_strip]
533         else:
534             transformers = []
535     elif line.is_def:
536         transformers = [left_hand_split]
537     else:
538
539         def _rhs(
540             self: object, line: Line, features: Collection[Feature]
541         ) -> Iterator[Line]:
542             """Wraps calls to `right_hand_split`.
543
544             The calls increasingly `omit` right-hand trailers (bracket pairs with
545             content), meaning the trailers get glued together to split on another
546             bracket pair instead.
547             """
548             for omit in generate_trailers_to_omit(line, mode.line_length):
549                 lines = list(
550                     right_hand_split(line, mode.line_length, features, omit=omit)
551                 )
552                 # Note: this check is only able to figure out if the first line of the
553                 # *current* transformation fits in the line length.  This is true only
554                 # for simple cases.  All others require running more transforms via
555                 # `transform_line()`.  This check doesn't know if those would succeed.
556                 if is_line_short_enough(lines[0], line_length=mode.line_length):
557                     yield from lines
558                     return
559
560             # All splits failed, best effort split with no omits.
561             # This mostly happens to multiline strings that are by definition
562             # reported as not fitting a single line, as well as lines that contain
563             # trailing commas (those have to be exploded).
564             yield from right_hand_split(
565                 line, line_length=mode.line_length, features=features
566             )
567
568         # HACK: nested functions (like _rhs) compiled by mypyc don't retain their
569         # __name__ attribute which is needed in `run_transformer` further down.
570         # Unfortunately a nested class breaks mypyc too. So a class must be created
571         # via type ... https://github.com/mypyc/mypyc/issues/884
572         rhs = type("rhs", (), {"__call__": _rhs})()
573
574         if Preview.string_processing in mode:
575             if line.inside_brackets:
576                 transformers = [
577                     string_merge,
578                     string_paren_strip,
579                     string_split,
580                     delimiter_split,
581                     standalone_comment_split,
582                     string_paren_wrap,
583                     rhs,
584                 ]
585             else:
586                 transformers = [
587                     string_merge,
588                     string_paren_strip,
589                     string_split,
590                     string_paren_wrap,
591                     rhs,
592                 ]
593         else:
594             if line.inside_brackets:
595                 transformers = [delimiter_split, standalone_comment_split, rhs]
596             else:
597                 transformers = [rhs]
598     # It's always safe to attempt hugging of power operations and pretty much every line
599     # could match.
600     transformers.append(hug_power_op)
601
602     for transform in transformers:
603         # We are accumulating lines in `result` because we might want to abort
604         # mission and return the original line in the end, or attempt a different
605         # split altogether.
606         try:
607             result = run_transformer(line, transform, mode, features, line_str=line_str)
608         except CannotTransform:
609             continue
610         else:
611             yield from result
612             break
613
614     else:
615         yield line
616
617
618 class _BracketSplitComponent(Enum):
619     head = auto()
620     body = auto()
621     tail = auto()
622
623
624 def left_hand_split(line: Line, _features: Collection[Feature] = ()) -> Iterator[Line]:
625     """Split line into many lines, starting with the first matching bracket pair.
626
627     Note: this usually looks weird, only use this for function definitions.
628     Prefer RHS otherwise.  This is why this function is not symmetrical with
629     :func:`right_hand_split` which also handles optional parentheses.
630     """
631     tail_leaves: List[Leaf] = []
632     body_leaves: List[Leaf] = []
633     head_leaves: List[Leaf] = []
634     current_leaves = head_leaves
635     matching_bracket: Optional[Leaf] = None
636     for leaf in line.leaves:
637         if (
638             current_leaves is body_leaves
639             and leaf.type in CLOSING_BRACKETS
640             and leaf.opening_bracket is matching_bracket
641             and isinstance(matching_bracket, Leaf)
642         ):
643             ensure_visible(leaf)
644             ensure_visible(matching_bracket)
645             current_leaves = tail_leaves if body_leaves else head_leaves
646         current_leaves.append(leaf)
647         if current_leaves is head_leaves:
648             if leaf.type in OPENING_BRACKETS:
649                 matching_bracket = leaf
650                 current_leaves = body_leaves
651     if not matching_bracket:
652         raise CannotSplit("No brackets found")
653
654     head = bracket_split_build_line(
655         head_leaves, line, matching_bracket, component=_BracketSplitComponent.head
656     )
657     body = bracket_split_build_line(
658         body_leaves, line, matching_bracket, component=_BracketSplitComponent.body
659     )
660     tail = bracket_split_build_line(
661         tail_leaves, line, matching_bracket, component=_BracketSplitComponent.tail
662     )
663     bracket_split_succeeded_or_raise(head, body, tail)
664     for result in (head, body, tail):
665         if result:
666             yield result
667
668
669 @dataclass
670 class _RHSResult:
671     """Intermediate split result from a right hand split."""
672
673     head: Line
674     body: Line
675     tail: Line
676     opening_bracket: Leaf
677     closing_bracket: Leaf
678
679
680 def right_hand_split(
681     line: Line,
682     line_length: int,
683     features: Collection[Feature] = (),
684     omit: Collection[LeafID] = (),
685 ) -> Iterator[Line]:
686     """Split line into many lines, starting with the last matching bracket pair.
687
688     If the split was by optional parentheses, attempt splitting without them, too.
689     `omit` is a collection of closing bracket IDs that shouldn't be considered for
690     this split.
691
692     Note: running this function modifies `bracket_depth` on the leaves of `line`.
693     """
694     rhs_result = _first_right_hand_split(line, omit=omit)
695     yield from _maybe_split_omitting_optional_parens(
696         rhs_result, line, line_length, features=features, omit=omit
697     )
698
699
700 def _first_right_hand_split(
701     line: Line,
702     omit: Collection[LeafID] = (),
703 ) -> _RHSResult:
704     """Split the line into head, body, tail starting with the last bracket pair.
705
706     Note: this function should not have side effects. It's relied upon by
707     _maybe_split_omitting_optional_parens to get an opinion whether to prefer
708     splitting on the right side of an assignment statement.
709     """
710     tail_leaves: List[Leaf] = []
711     body_leaves: List[Leaf] = []
712     head_leaves: List[Leaf] = []
713     current_leaves = tail_leaves
714     opening_bracket: Optional[Leaf] = None
715     closing_bracket: Optional[Leaf] = None
716     for leaf in reversed(line.leaves):
717         if current_leaves is body_leaves:
718             if leaf is opening_bracket:
719                 current_leaves = head_leaves if body_leaves else tail_leaves
720         current_leaves.append(leaf)
721         if current_leaves is tail_leaves:
722             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
723                 opening_bracket = leaf.opening_bracket
724                 closing_bracket = leaf
725                 current_leaves = body_leaves
726     if not (opening_bracket and closing_bracket and head_leaves):
727         # If there is no opening or closing_bracket that means the split failed and
728         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
729         # the matching `opening_bracket` wasn't available on `line` anymore.
730         raise CannotSplit("No brackets found")
731
732     tail_leaves.reverse()
733     body_leaves.reverse()
734     head_leaves.reverse()
735     head = bracket_split_build_line(
736         head_leaves, line, opening_bracket, component=_BracketSplitComponent.head
737     )
738     body = bracket_split_build_line(
739         body_leaves, line, opening_bracket, component=_BracketSplitComponent.body
740     )
741     tail = bracket_split_build_line(
742         tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail
743     )
744     bracket_split_succeeded_or_raise(head, body, tail)
745     return _RHSResult(head, body, tail, opening_bracket, closing_bracket)
746
747
748 def _maybe_split_omitting_optional_parens(
749     rhs: _RHSResult,
750     line: Line,
751     line_length: int,
752     features: Collection[Feature] = (),
753     omit: Collection[LeafID] = (),
754 ) -> Iterator[Line]:
755     if (
756         Feature.FORCE_OPTIONAL_PARENTHESES not in features
757         # the opening bracket is an optional paren
758         and rhs.opening_bracket.type == token.LPAR
759         and not rhs.opening_bracket.value
760         # the closing bracket is an optional paren
761         and rhs.closing_bracket.type == token.RPAR
762         and not rhs.closing_bracket.value
763         # it's not an import (optional parens are the only thing we can split on
764         # in this case; attempting a split without them is a waste of time)
765         and not line.is_import
766         # there are no standalone comments in the body
767         and not rhs.body.contains_standalone_comments(0)
768         # and we can actually remove the parens
769         and can_omit_invisible_parens(rhs.body, line_length)
770     ):
771         omit = {id(rhs.closing_bracket), *omit}
772         try:
773             # The _RHSResult Omitting Optional Parens.
774             rhs_oop = _first_right_hand_split(line, omit=omit)
775             if not (
776                 Preview.prefer_splitting_right_hand_side_of_assignments in line.mode
777                 # the split is right after `=`
778                 and len(rhs.head.leaves) >= 2
779                 and rhs.head.leaves[-2].type == token.EQUAL
780                 # the left side of assignement contains brackets
781                 and any(leaf.type in BRACKETS for leaf in rhs.head.leaves[:-1])
782                 # the left side of assignment is short enough (the -1 is for the ending
783                 # optional paren)
784                 and is_line_short_enough(rhs.head, line_length=line_length - 1)
785                 # the left side of assignment won't explode further because of magic
786                 # trailing comma
787                 and rhs.head.magic_trailing_comma is None
788                 # the split by omitting optional parens isn't preferred by some other
789                 # reason
790                 and not _prefer_split_rhs_oop(rhs_oop, line_length=line_length)
791             ):
792                 yield from _maybe_split_omitting_optional_parens(
793                     rhs_oop, line, line_length, features=features, omit=omit
794                 )
795                 return
796
797         except CannotSplit as e:
798             if not (
799                 can_be_split(rhs.body)
800                 or is_line_short_enough(rhs.body, line_length=line_length)
801             ):
802                 raise CannotSplit(
803                     "Splitting failed, body is still too long and can't be split."
804                 ) from e
805
806             elif (
807                 rhs.head.contains_multiline_strings()
808                 or rhs.tail.contains_multiline_strings()
809             ):
810                 raise CannotSplit(
811                     "The current optional pair of parentheses is bound to fail to"
812                     " satisfy the splitting algorithm because the head or the tail"
813                     " contains multiline strings which by definition never fit one"
814                     " line."
815                 ) from e
816
817     ensure_visible(rhs.opening_bracket)
818     ensure_visible(rhs.closing_bracket)
819     for result in (rhs.head, rhs.body, rhs.tail):
820         if result:
821             yield result
822
823
824 def _prefer_split_rhs_oop(rhs_oop: _RHSResult, line_length: int) -> bool:
825     """
826     Returns whether we should prefer the result from a split omitting optional parens.
827     """
828     has_closing_bracket_after_assign = False
829     for leaf in reversed(rhs_oop.head.leaves):
830         if leaf.type == token.EQUAL:
831             break
832         if leaf.type in CLOSING_BRACKETS:
833             has_closing_bracket_after_assign = True
834             break
835     return (
836         # contains matching brackets after the `=` (done by checking there is a
837         # closing bracket)
838         has_closing_bracket_after_assign
839         or (
840             # the split is actually from inside the optional parens (done by checking
841             # the first line still contains the `=`)
842             any(leaf.type == token.EQUAL for leaf in rhs_oop.head.leaves)
843             # the first line is short enough
844             and is_line_short_enough(rhs_oop.head, line_length=line_length)
845         )
846         # contains unsplittable type ignore
847         or rhs_oop.head.contains_unsplittable_type_ignore()
848         or rhs_oop.body.contains_unsplittable_type_ignore()
849         or rhs_oop.tail.contains_unsplittable_type_ignore()
850     )
851
852
853 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
854     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
855
856     Do nothing otherwise.
857
858     A left- or right-hand split is based on a pair of brackets. Content before
859     (and including) the opening bracket is left on one line, content inside the
860     brackets is put on a separate line, and finally content starting with and
861     following the closing bracket is put on a separate line.
862
863     Those are called `head`, `body`, and `tail`, respectively. If the split
864     produced the same line (all content in `head`) or ended up with an empty `body`
865     and the `tail` is just the closing bracket, then it's considered failed.
866     """
867     tail_len = len(str(tail).strip())
868     if not body:
869         if tail_len == 0:
870             raise CannotSplit("Splitting brackets produced the same line")
871
872         elif tail_len < 3:
873             raise CannotSplit(
874                 f"Splitting brackets on an empty body to save {tail_len} characters is"
875                 " not worth it"
876             )
877
878
879 def bracket_split_build_line(
880     leaves: List[Leaf],
881     original: Line,
882     opening_bracket: Leaf,
883     *,
884     component: _BracketSplitComponent,
885 ) -> Line:
886     """Return a new line with given `leaves` and respective comments from `original`.
887
888     If it's the head component, brackets will be tracked so trailing commas are
889     respected.
890
891     If it's the body component, the result line is one-indented inside brackets and as
892     such has its first leaf's prefix normalized and a trailing comma added when
893     expected.
894     """
895     result = Line(mode=original.mode, depth=original.depth)
896     if component is _BracketSplitComponent.body:
897         result.inside_brackets = True
898         result.depth += 1
899         if leaves:
900             # Since body is a new indent level, remove spurious leading whitespace.
901             normalize_prefix(leaves[0], inside_brackets=True)
902             # Ensure a trailing comma for imports and standalone function arguments, but
903             # be careful not to add one after any comments or within type annotations.
904             no_commas = (
905                 original.is_def
906                 and opening_bracket.value == "("
907                 and not any(leaf.type == token.COMMA for leaf in leaves)
908                 # In particular, don't add one within a parenthesized return annotation.
909                 # Unfortunately the indicator we're in a return annotation (RARROW) may
910                 # be defined directly in the parent node, the parent of the parent ...
911                 # and so on depending on how complex the return annotation is.
912                 # This isn't perfect and there's some false negatives but they are in
913                 # contexts were a comma is actually fine.
914                 and not any(
915                     node.prev_sibling.type == RARROW
916                     for node in (
917                         leaves[0].parent,
918                         getattr(leaves[0].parent, "parent", None),
919                     )
920                     if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
921                 )
922             )
923
924             if original.is_import or no_commas:
925                 for i in range(len(leaves) - 1, -1, -1):
926                     if leaves[i].type == STANDALONE_COMMENT:
927                         continue
928
929                     if leaves[i].type != token.COMMA:
930                         new_comma = Leaf(token.COMMA, ",")
931                         leaves.insert(i + 1, new_comma)
932                     break
933
934     leaves_to_track: Set[LeafID] = set()
935     if (
936         Preview.handle_trailing_commas_in_head in original.mode
937         and component is _BracketSplitComponent.head
938     ):
939         leaves_to_track = get_leaves_inside_matching_brackets(leaves)
940     # Populate the line
941     for leaf in leaves:
942         result.append(
943             leaf,
944             preformatted=True,
945             track_bracket=id(leaf) in leaves_to_track,
946         )
947         for comment_after in original.comments_after(leaf):
948             result.append(comment_after, preformatted=True)
949     if component is _BracketSplitComponent.body and should_split_line(
950         result, opening_bracket
951     ):
952         result.should_split_rhs = True
953     return result
954
955
956 def dont_increase_indentation(split_func: Transformer) -> Transformer:
957     """Normalize prefix of the first leaf in every line returned by `split_func`.
958
959     This is a decorator over relevant split functions.
960     """
961
962     @wraps(split_func)
963     def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
964         for split_line in split_func(line, features):
965             normalize_prefix(split_line.leaves[0], inside_brackets=True)
966             yield split_line
967
968     return split_wrapper
969
970
971 @dont_increase_indentation
972 def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
973     """Split according to delimiters of the highest priority.
974
975     If the appropriate Features are given, the split will add trailing commas
976     also in function signatures and calls that contain `*` and `**`.
977     """
978     try:
979         last_leaf = line.leaves[-1]
980     except IndexError:
981         raise CannotSplit("Line empty") from None
982
983     bt = line.bracket_tracker
984     try:
985         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
986     except ValueError:
987         raise CannotSplit("No delimiters found") from None
988
989     if delimiter_priority == DOT_PRIORITY:
990         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
991             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
992
993     current_line = Line(
994         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
995     )
996     lowest_depth = sys.maxsize
997     trailing_comma_safe = True
998
999     def append_to_line(leaf: Leaf) -> Iterator[Line]:
1000         """Append `leaf` to current line or to new line if appending impossible."""
1001         nonlocal current_line
1002         try:
1003             current_line.append_safe(leaf, preformatted=True)
1004         except ValueError:
1005             yield current_line
1006
1007             current_line = Line(
1008                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1009             )
1010             current_line.append(leaf)
1011
1012     for leaf in line.leaves:
1013         yield from append_to_line(leaf)
1014
1015         for comment_after in line.comments_after(leaf):
1016             yield from append_to_line(comment_after)
1017
1018         lowest_depth = min(lowest_depth, leaf.bracket_depth)
1019         if leaf.bracket_depth == lowest_depth:
1020             if is_vararg(leaf, within={syms.typedargslist}):
1021                 trailing_comma_safe = (
1022                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
1023                 )
1024             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
1025                 trailing_comma_safe = (
1026                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
1027                 )
1028
1029         leaf_priority = bt.delimiters.get(id(leaf))
1030         if leaf_priority == delimiter_priority:
1031             yield current_line
1032
1033             current_line = Line(
1034                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1035             )
1036     if current_line:
1037         if (
1038             trailing_comma_safe
1039             and delimiter_priority == COMMA_PRIORITY
1040             and current_line.leaves[-1].type != token.COMMA
1041             and current_line.leaves[-1].type != STANDALONE_COMMENT
1042         ):
1043             new_comma = Leaf(token.COMMA, ",")
1044             current_line.append(new_comma)
1045         yield current_line
1046
1047
1048 @dont_increase_indentation
1049 def standalone_comment_split(
1050     line: Line, features: Collection[Feature] = ()
1051 ) -> Iterator[Line]:
1052     """Split standalone comments from the rest of the line."""
1053     if not line.contains_standalone_comments(0):
1054         raise CannotSplit("Line does not have any standalone comments")
1055
1056     current_line = Line(
1057         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1058     )
1059
1060     def append_to_line(leaf: Leaf) -> Iterator[Line]:
1061         """Append `leaf` to current line or to new line if appending impossible."""
1062         nonlocal current_line
1063         try:
1064             current_line.append_safe(leaf, preformatted=True)
1065         except ValueError:
1066             yield current_line
1067
1068             current_line = Line(
1069                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
1070             )
1071             current_line.append(leaf)
1072
1073     for leaf in line.leaves:
1074         yield from append_to_line(leaf)
1075
1076         for comment_after in line.comments_after(leaf):
1077             yield from append_to_line(comment_after)
1078
1079     if current_line:
1080         yield current_line
1081
1082
1083 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
1084     """Leave existing extra newlines if not `inside_brackets`. Remove everything
1085     else.
1086
1087     Note: don't use backslashes for formatting or you'll lose your voting rights.
1088     """
1089     if not inside_brackets:
1090         spl = leaf.prefix.split("#")
1091         if "\\" not in spl[0]:
1092             nl_count = spl[-1].count("\n")
1093             if len(spl) > 1:
1094                 nl_count -= 1
1095             leaf.prefix = "\n" * nl_count
1096             return
1097
1098     leaf.prefix = ""
1099
1100
1101 def normalize_invisible_parens(
1102     node: Node, parens_after: Set[str], *, mode: Mode, features: Collection[Feature]
1103 ) -> None:
1104     """Make existing optional parentheses invisible or create new ones.
1105
1106     `parens_after` is a set of string leaf values immediately after which parens
1107     should be put.
1108
1109     Standardizes on visible parentheses for single-element tuples, and keeps
1110     existing visible parentheses for other tuples and generator expressions.
1111     """
1112     for pc in list_comments(node.prefix, is_endmarker=False, preview=mode.preview):
1113         if pc.value in FMT_OFF:
1114             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
1115             return
1116
1117     # The multiple context managers grammar has a different pattern, thus this is
1118     # separate from the for-loop below. This possibly wraps them in invisible parens,
1119     # and later will be removed in remove_with_parens when needed.
1120     if node.type == syms.with_stmt:
1121         _maybe_wrap_cms_in_parens(node, mode, features)
1122
1123     check_lpar = False
1124     for index, child in enumerate(list(node.children)):
1125         # Fixes a bug where invisible parens are not properly stripped from
1126         # assignment statements that contain type annotations.
1127         if isinstance(child, Node) and child.type == syms.annassign:
1128             normalize_invisible_parens(
1129                 child, parens_after=parens_after, mode=mode, features=features
1130             )
1131
1132         # Add parentheses around long tuple unpacking in assignments.
1133         if (
1134             index == 0
1135             and isinstance(child, Node)
1136             and child.type == syms.testlist_star_expr
1137         ):
1138             check_lpar = True
1139
1140         if check_lpar:
1141             if (
1142                 mode.preview
1143                 and child.type == syms.atom
1144                 and node.type == syms.for_stmt
1145                 and isinstance(child.prev_sibling, Leaf)
1146                 and child.prev_sibling.type == token.NAME
1147                 and child.prev_sibling.value == "for"
1148             ):
1149                 if maybe_make_parens_invisible_in_atom(
1150                     child,
1151                     parent=node,
1152                     remove_brackets_around_comma=True,
1153                 ):
1154                     wrap_in_parentheses(node, child, visible=False)
1155             elif (
1156                 mode.preview and isinstance(child, Node) and node.type == syms.with_stmt
1157             ):
1158                 remove_with_parens(child, node)
1159             elif child.type == syms.atom:
1160                 if maybe_make_parens_invisible_in_atom(
1161                     child,
1162                     parent=node,
1163                 ):
1164                     wrap_in_parentheses(node, child, visible=False)
1165             elif is_one_tuple(child):
1166                 wrap_in_parentheses(node, child, visible=True)
1167             elif node.type == syms.import_from:
1168                 _normalize_import_from(node, child, index)
1169                 break
1170             elif (
1171                 index == 1
1172                 and child.type == token.STAR
1173                 and node.type == syms.except_clause
1174             ):
1175                 # In except* (PEP 654), the star is actually part of
1176                 # of the keyword. So we need to skip the insertion of
1177                 # invisible parentheses to work more precisely.
1178                 continue
1179
1180             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
1181                 wrap_in_parentheses(node, child, visible=False)
1182
1183         comma_check = child.type == token.COMMA if mode.preview else False
1184
1185         check_lpar = isinstance(child, Leaf) and (
1186             child.value in parens_after or comma_check
1187         )
1188
1189
1190 def _normalize_import_from(parent: Node, child: LN, index: int) -> None:
1191     # "import from" nodes store parentheses directly as part of
1192     # the statement
1193     if is_lpar_token(child):
1194         assert is_rpar_token(parent.children[-1])
1195         # make parentheses invisible
1196         child.value = ""
1197         parent.children[-1].value = ""
1198     elif child.type != token.STAR:
1199         # insert invisible parentheses
1200         parent.insert_child(index, Leaf(token.LPAR, ""))
1201         parent.append_child(Leaf(token.RPAR, ""))
1202
1203
1204 def remove_await_parens(node: Node) -> None:
1205     if node.children[0].type == token.AWAIT and len(node.children) > 1:
1206         if (
1207             node.children[1].type == syms.atom
1208             and node.children[1].children[0].type == token.LPAR
1209         ):
1210             if maybe_make_parens_invisible_in_atom(
1211                 node.children[1],
1212                 parent=node,
1213                 remove_brackets_around_comma=True,
1214             ):
1215                 wrap_in_parentheses(node, node.children[1], visible=False)
1216
1217             # Since await is an expression we shouldn't remove
1218             # brackets in cases where this would change
1219             # the AST due to operator precedence.
1220             # Therefore we only aim to remove brackets around
1221             # power nodes that aren't also await expressions themselves.
1222             # https://peps.python.org/pep-0492/#updated-operator-precedence-table
1223             # N.B. We've still removed any redundant nested brackets though :)
1224             opening_bracket = cast(Leaf, node.children[1].children[0])
1225             closing_bracket = cast(Leaf, node.children[1].children[-1])
1226             bracket_contents = cast(Node, node.children[1].children[1])
1227             if bracket_contents.type != syms.power:
1228                 ensure_visible(opening_bracket)
1229                 ensure_visible(closing_bracket)
1230             elif (
1231                 bracket_contents.type == syms.power
1232                 and bracket_contents.children[0].type == token.AWAIT
1233             ):
1234                 ensure_visible(opening_bracket)
1235                 ensure_visible(closing_bracket)
1236                 # If we are in a nested await then recurse down.
1237                 remove_await_parens(bracket_contents)
1238
1239
1240 def _maybe_wrap_cms_in_parens(
1241     node: Node, mode: Mode, features: Collection[Feature]
1242 ) -> None:
1243     """When enabled and safe, wrap the multiple context managers in invisible parens.
1244
1245     It is only safe when `features` contain Feature.PARENTHESIZED_CONTEXT_MANAGERS.
1246     """
1247     if (
1248         Feature.PARENTHESIZED_CONTEXT_MANAGERS not in features
1249         or Preview.wrap_multiple_context_managers_in_parens not in mode
1250         or len(node.children) <= 2
1251         # If it's an atom, it's already wrapped in parens.
1252         or node.children[1].type == syms.atom
1253     ):
1254         return
1255     colon_index: Optional[int] = None
1256     for i in range(2, len(node.children)):
1257         if node.children[i].type == token.COLON:
1258             colon_index = i
1259             break
1260     if colon_index is not None:
1261         lpar = Leaf(token.LPAR, "")
1262         rpar = Leaf(token.RPAR, "")
1263         context_managers = node.children[1:colon_index]
1264         for child in context_managers:
1265             child.remove()
1266         # After wrapping, the with_stmt will look like this:
1267         #   with_stmt
1268         #     NAME 'with'
1269         #     atom
1270         #       LPAR ''
1271         #       testlist_gexp
1272         #         ... <-- context_managers
1273         #       /testlist_gexp
1274         #       RPAR ''
1275         #     /atom
1276         #     COLON ':'
1277         new_child = Node(
1278             syms.atom, [lpar, Node(syms.testlist_gexp, context_managers), rpar]
1279         )
1280         node.insert_child(1, new_child)
1281
1282
1283 def remove_with_parens(node: Node, parent: Node) -> None:
1284     """Recursively hide optional parens in `with` statements."""
1285     # Removing all unnecessary parentheses in with statements in one pass is a tad
1286     # complex as different variations of bracketed statements result in pretty
1287     # different parse trees:
1288     #
1289     # with (open("file")) as f:                       # this is an asexpr_test
1290     #     ...
1291     #
1292     # with (open("file") as f):                       # this is an atom containing an
1293     #     ...                                         # asexpr_test
1294     #
1295     # with (open("file")) as f, (open("file")) as f:  # this is asexpr_test, COMMA,
1296     #     ...                                         # asexpr_test
1297     #
1298     # with (open("file") as f, open("file") as f):    # an atom containing a
1299     #     ...                                         # testlist_gexp which then
1300     #                                                 # contains multiple asexpr_test(s)
1301     if node.type == syms.atom:
1302         if maybe_make_parens_invisible_in_atom(
1303             node,
1304             parent=parent,
1305             remove_brackets_around_comma=True,
1306         ):
1307             wrap_in_parentheses(parent, node, visible=False)
1308         if isinstance(node.children[1], Node):
1309             remove_with_parens(node.children[1], node)
1310     elif node.type == syms.testlist_gexp:
1311         for child in node.children:
1312             if isinstance(child, Node):
1313                 remove_with_parens(child, node)
1314     elif node.type == syms.asexpr_test and not any(
1315         leaf.type == token.COLONEQUAL for leaf in node.leaves()
1316     ):
1317         if maybe_make_parens_invisible_in_atom(
1318             node.children[0],
1319             parent=node,
1320             remove_brackets_around_comma=True,
1321         ):
1322             wrap_in_parentheses(node, node.children[0], visible=False)
1323
1324
1325 def maybe_make_parens_invisible_in_atom(
1326     node: LN,
1327     parent: LN,
1328     remove_brackets_around_comma: bool = False,
1329 ) -> bool:
1330     """If it's safe, make the parens in the atom `node` invisible, recursively.
1331     Additionally, remove repeated, adjacent invisible parens from the atom `node`
1332     as they are redundant.
1333
1334     Returns whether the node should itself be wrapped in invisible parentheses.
1335     """
1336     if (
1337         node.type != syms.atom
1338         or is_empty_tuple(node)
1339         or is_one_tuple(node)
1340         or (is_yield(node) and parent.type != syms.expr_stmt)
1341         or (
1342             # This condition tries to prevent removing non-optional brackets
1343             # around a tuple, however, can be a bit overzealous so we provide
1344             # and option to skip this check for `for` and `with` statements.
1345             not remove_brackets_around_comma
1346             and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
1347         )
1348         or is_tuple_containing_walrus(node)
1349     ):
1350         return False
1351
1352     if is_walrus_assignment(node):
1353         if parent.type in [
1354             syms.annassign,
1355             syms.expr_stmt,
1356             syms.assert_stmt,
1357             syms.return_stmt,
1358             syms.except_clause,
1359             syms.funcdef,
1360             syms.with_stmt,
1361             # these ones aren't useful to end users, but they do please fuzzers
1362             syms.for_stmt,
1363             syms.del_stmt,
1364             syms.for_stmt,
1365         ]:
1366             return False
1367
1368     first = node.children[0]
1369     last = node.children[-1]
1370     if is_lpar_token(first) and is_rpar_token(last):
1371         middle = node.children[1]
1372         # make parentheses invisible
1373         first.value = ""
1374         last.value = ""
1375         maybe_make_parens_invisible_in_atom(
1376             middle,
1377             parent=parent,
1378             remove_brackets_around_comma=remove_brackets_around_comma,
1379         )
1380
1381         if is_atom_with_invisible_parens(middle):
1382             # Strip the invisible parens from `middle` by replacing
1383             # it with the child in-between the invisible parens
1384             middle.replace(middle.children[1])
1385
1386         return False
1387
1388     return True
1389
1390
1391 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
1392     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
1393
1394     if not (opening_bracket.parent and opening_bracket.value in "[{("):
1395         return False
1396
1397     # We're essentially checking if the body is delimited by commas and there's more
1398     # than one of them (we're excluding the trailing comma and if the delimiter priority
1399     # is still commas, that means there's more).
1400     exclude = set()
1401     trailing_comma = False
1402     try:
1403         last_leaf = line.leaves[-1]
1404         if last_leaf.type == token.COMMA:
1405             trailing_comma = True
1406             exclude.add(id(last_leaf))
1407         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
1408     except (IndexError, ValueError):
1409         return False
1410
1411     return max_priority == COMMA_PRIORITY and (
1412         (line.mode.magic_trailing_comma and trailing_comma)
1413         # always explode imports
1414         or opening_bracket.parent.type in {syms.atom, syms.import_from}
1415     )
1416
1417
1418 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
1419     """Generate sets of closing bracket IDs that should be omitted in a RHS.
1420
1421     Brackets can be omitted if the entire trailer up to and including
1422     a preceding closing bracket fits in one line.
1423
1424     Yielded sets are cumulative (contain results of previous yields, too).  First
1425     set is empty, unless the line should explode, in which case bracket pairs until
1426     the one that needs to explode are omitted.
1427     """
1428
1429     omit: Set[LeafID] = set()
1430     if not line.magic_trailing_comma:
1431         yield omit
1432
1433     length = 4 * line.depth
1434     opening_bracket: Optional[Leaf] = None
1435     closing_bracket: Optional[Leaf] = None
1436     inner_brackets: Set[LeafID] = set()
1437     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
1438         length += leaf_length
1439         if length > line_length:
1440             break
1441
1442         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
1443         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
1444             break
1445
1446         if opening_bracket:
1447             if leaf is opening_bracket:
1448                 opening_bracket = None
1449             elif leaf.type in CLOSING_BRACKETS:
1450                 prev = line.leaves[index - 1] if index > 0 else None
1451                 if (
1452                     prev
1453                     and prev.type == token.COMMA
1454                     and leaf.opening_bracket is not None
1455                     and not is_one_sequence_between(
1456                         leaf.opening_bracket, leaf, line.leaves
1457                     )
1458                 ):
1459                     # Never omit bracket pairs with trailing commas.
1460                     # We need to explode on those.
1461                     break
1462
1463                 inner_brackets.add(id(leaf))
1464         elif leaf.type in CLOSING_BRACKETS:
1465             prev = line.leaves[index - 1] if index > 0 else None
1466             if prev and prev.type in OPENING_BRACKETS:
1467                 # Empty brackets would fail a split so treat them as "inner"
1468                 # brackets (e.g. only add them to the `omit` set if another
1469                 # pair of brackets was good enough.
1470                 inner_brackets.add(id(leaf))
1471                 continue
1472
1473             if closing_bracket:
1474                 omit.add(id(closing_bracket))
1475                 omit.update(inner_brackets)
1476                 inner_brackets.clear()
1477                 yield omit
1478
1479             if (
1480                 prev
1481                 and prev.type == token.COMMA
1482                 and leaf.opening_bracket is not None
1483                 and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves)
1484             ):
1485                 # Never omit bracket pairs with trailing commas.
1486                 # We need to explode on those.
1487                 break
1488
1489             if leaf.value:
1490                 opening_bracket = leaf.opening_bracket
1491                 closing_bracket = leaf
1492
1493
1494 def run_transformer(
1495     line: Line,
1496     transform: Transformer,
1497     mode: Mode,
1498     features: Collection[Feature],
1499     *,
1500     line_str: str = "",
1501 ) -> List[Line]:
1502     if not line_str:
1503         line_str = line_to_string(line)
1504     result: List[Line] = []
1505     for transformed_line in transform(line, features):
1506         if str(transformed_line).strip("\n") == line_str:
1507             raise CannotTransform("Line transformer returned an unchanged result")
1508
1509         result.extend(transform_line(transformed_line, mode=mode, features=features))
1510
1511     features_set = set(features)
1512     if (
1513         Feature.FORCE_OPTIONAL_PARENTHESES in features_set
1514         or transform.__class__.__name__ != "rhs"
1515         or not line.bracket_tracker.invisible
1516         or any(bracket.value for bracket in line.bracket_tracker.invisible)
1517         or line.contains_multiline_strings()
1518         or result[0].contains_uncollapsable_type_comments()
1519         or result[0].contains_unsplittable_type_ignore()
1520         or is_line_short_enough(result[0], line_length=mode.line_length)
1521         # If any leaves have no parents (which _can_ occur since
1522         # `transform(line)` potentially destroys the line's underlying node
1523         # structure), then we can't proceed. Doing so would cause the below
1524         # call to `append_leaves()` to fail.
1525         or any(leaf.parent is None for leaf in line.leaves)
1526     ):
1527         return result
1528
1529     line_copy = line.clone()
1530     append_leaves(line_copy, line, line.leaves)
1531     features_fop = features_set | {Feature.FORCE_OPTIONAL_PARENTHESES}
1532     second_opinion = run_transformer(
1533         line_copy, transform, mode, features_fop, line_str=line_str
1534     )
1535     if all(
1536         is_line_short_enough(ln, line_length=mode.line_length) for ln in second_opinion
1537     ):
1538         result = second_opinion
1539     return result