]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Remove unnecessary parentheses from `except` clauses (#2939)
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4 from functools import partial, wraps
5 import sys
6 from typing import Collection, Iterator, List, Optional, Set, Union
7
8 from black.nodes import WHITESPACE, RARROW, STATEMENT, STANDALONE_COMMENT
9 from black.nodes import ASSIGNMENTS, OPENING_BRACKETS, CLOSING_BRACKETS
10 from black.nodes import Visitor, syms, is_arith_like, ensure_visible
11 from black.nodes import (
12     is_docstring,
13     is_empty_tuple,
14     is_one_tuple,
15     is_one_sequence_between,
16 )
17 from black.nodes import is_name_token, is_lpar_token, is_rpar_token
18 from black.nodes import is_walrus_assignment, is_yield, is_vararg, is_multiline_string
19 from black.nodes import is_stub_suite, is_stub_body, is_atom_with_invisible_parens
20 from black.nodes import wrap_in_parentheses
21 from black.brackets import max_delimiter_priority_in_atom
22 from black.brackets import DOT_PRIORITY, COMMA_PRIORITY
23 from black.lines import Line, line_to_string, is_line_short_enough
24 from black.lines import can_omit_invisible_parens, can_be_split, append_leaves
25 from black.comments import generate_comments, list_comments, FMT_OFF
26 from black.numerics import normalize_numeric_literal
27 from black.strings import get_string_prefix, fix_docstring
28 from black.strings import normalize_string_prefix, normalize_string_quotes
29 from black.trans import Transformer, CannotTransform, StringMerger, StringSplitter
30 from black.trans import StringParenWrapper, StringParenStripper, hug_power_op
31 from black.mode import Mode, Feature, Preview
32
33 from blib2to3.pytree import Node, Leaf
34 from blib2to3.pgen2 import token
35
36
37 # types
38 LeafID = int
39 LN = Union[Leaf, Node]
40
41
42 class CannotSplit(CannotTransform):
43     """A readable split that fits the allotted line length is impossible."""
44
45
46 # This isn't a dataclass because @dataclass + Generic breaks mypyc.
47 # See also https://github.com/mypyc/mypyc/issues/827.
48 class LineGenerator(Visitor[Line]):
49     """Generates reformatted Line objects.  Empty lines are not emitted.
50
51     Note: destroys the tree it's visiting by mutating prefixes of its leaves
52     in ways that will no longer stringify to valid Python code on the tree.
53     """
54
55     def __init__(self, mode: Mode) -> None:
56         self.mode = mode
57         self.current_line: Line
58         self.__post_init__()
59
60     def line(self, indent: int = 0) -> Iterator[Line]:
61         """Generate a line.
62
63         If the line is empty, only emit if it makes sense.
64         If the line is too long, split it first and then generate.
65
66         If any lines were generated, set up a new current_line.
67         """
68         if not self.current_line:
69             self.current_line.depth += indent
70             return  # Line is empty, don't emit. Creating a new one unnecessary.
71
72         complete_line = self.current_line
73         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
74         yield complete_line
75
76     def visit_default(self, node: LN) -> Iterator[Line]:
77         """Default `visit_*()` implementation. Recurses to children of `node`."""
78         if isinstance(node, Leaf):
79             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
80             for comment in generate_comments(node, preview=self.mode.preview):
81                 if any_open_brackets:
82                     # any comment within brackets is subject to splitting
83                     self.current_line.append(comment)
84                 elif comment.type == token.COMMENT:
85                     # regular trailing comment
86                     self.current_line.append(comment)
87                     yield from self.line()
88
89                 else:
90                     # regular standalone comment
91                     yield from self.line()
92
93                     self.current_line.append(comment)
94                     yield from self.line()
95
96             normalize_prefix(node, inside_brackets=any_open_brackets)
97             if self.mode.string_normalization and node.type == token.STRING:
98                 node.value = normalize_string_prefix(node.value)
99                 node.value = normalize_string_quotes(node.value)
100             if node.type == token.NUMBER:
101                 normalize_numeric_literal(node)
102             if node.type not in WHITESPACE:
103                 self.current_line.append(node)
104         yield from super().visit_default(node)
105
106     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
107         """Increase indentation level, maybe yield a line."""
108         # In blib2to3 INDENT never holds comments.
109         yield from self.line(+1)
110         yield from self.visit_default(node)
111
112     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
113         """Decrease indentation level, maybe yield a line."""
114         # The current line might still wait for trailing comments.  At DEDENT time
115         # there won't be any (they would be prefixes on the preceding NEWLINE).
116         # Emit the line then.
117         yield from self.line()
118
119         # While DEDENT has no value, its prefix may contain standalone comments
120         # that belong to the current indentation level.  Get 'em.
121         yield from self.visit_default(node)
122
123         # Finally, emit the dedent.
124         yield from self.line(-1)
125
126     def visit_stmt(
127         self, node: Node, keywords: Set[str], parens: Set[str]
128     ) -> Iterator[Line]:
129         """Visit a statement.
130
131         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
132         `def`, `with`, `class`, `assert`, and assignments.
133
134         The relevant Python language `keywords` for a given statement will be
135         NAME leaves within it. This methods puts those on a separate line.
136
137         `parens` holds a set of string leaf values immediately after which
138         invisible parens should be put.
139         """
140         normalize_invisible_parens(node, parens_after=parens, preview=self.mode.preview)
141         for child in node.children:
142             if is_name_token(child) and child.value in keywords:
143                 yield from self.line()
144
145             yield from self.visit(child)
146
147     def visit_match_case(self, node: Node) -> Iterator[Line]:
148         """Visit either a match or case statement."""
149         normalize_invisible_parens(node, parens_after=set(), preview=self.mode.preview)
150
151         yield from self.line()
152         for child in node.children:
153             yield from self.visit(child)
154
155     def visit_suite(self, node: Node) -> Iterator[Line]:
156         """Visit a suite."""
157         if self.mode.is_pyi and is_stub_suite(node):
158             yield from self.visit(node.children[2])
159         else:
160             yield from self.visit_default(node)
161
162     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
163         """Visit a statement without nested statements."""
164         prev_type: Optional[int] = None
165         for child in node.children:
166             if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child):
167                 wrap_in_parentheses(node, child, visible=False)
168             prev_type = child.type
169
170         is_suite_like = node.parent and node.parent.type in STATEMENT
171         if is_suite_like:
172             if self.mode.is_pyi and is_stub_body(node):
173                 yield from self.visit_default(node)
174             else:
175                 yield from self.line(+1)
176                 yield from self.visit_default(node)
177                 yield from self.line(-1)
178
179         else:
180             if (
181                 not self.mode.is_pyi
182                 or not node.parent
183                 or not is_stub_suite(node.parent)
184             ):
185                 yield from self.line()
186             yield from self.visit_default(node)
187
188     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
189         """Visit `async def`, `async for`, `async with`."""
190         yield from self.line()
191
192         children = iter(node.children)
193         for child in children:
194             yield from self.visit(child)
195
196             if child.type == token.ASYNC:
197                 break
198
199         internal_stmt = next(children)
200         for child in internal_stmt.children:
201             yield from self.visit(child)
202
203     def visit_decorators(self, node: Node) -> Iterator[Line]:
204         """Visit decorators."""
205         for child in node.children:
206             yield from self.line()
207             yield from self.visit(child)
208
209     def visit_power(self, node: Node) -> Iterator[Line]:
210         for idx, leaf in enumerate(node.children[:-1]):
211             next_leaf = node.children[idx + 1]
212
213             if not isinstance(leaf, Leaf):
214                 continue
215
216             value = leaf.value.lower()
217             if (
218                 leaf.type == token.NUMBER
219                 and next_leaf.type == syms.trailer
220                 # Ensure that we are in an attribute trailer
221                 and next_leaf.children[0].type == token.DOT
222                 # It shouldn't wrap hexadecimal, binary and octal literals
223                 and not value.startswith(("0x", "0b", "0o"))
224                 # It shouldn't wrap complex literals
225                 and "j" not in value
226             ):
227                 wrap_in_parentheses(node, leaf)
228
229         yield from self.visit_default(node)
230
231     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
232         """Remove a semicolon and put the other statement on a separate line."""
233         yield from self.line()
234
235     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
236         """End of file. Process outstanding comments and end with a newline."""
237         yield from self.visit_default(leaf)
238         yield from self.line()
239
240     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
241         if not self.current_line.bracket_tracker.any_open_brackets():
242             yield from self.line()
243         yield from self.visit_default(leaf)
244
245     def visit_factor(self, node: Node) -> Iterator[Line]:
246         """Force parentheses between a unary op and a binary power:
247
248         -2 ** 8 -> -(2 ** 8)
249         """
250         _operator, operand = node.children
251         if (
252             operand.type == syms.power
253             and len(operand.children) == 3
254             and operand.children[1].type == token.DOUBLESTAR
255         ):
256             lpar = Leaf(token.LPAR, "(")
257             rpar = Leaf(token.RPAR, ")")
258             index = operand.remove() or 0
259             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
260         yield from self.visit_default(node)
261
262     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
263         if is_docstring(leaf) and "\\\n" not in leaf.value:
264             # We're ignoring docstrings with backslash newline escapes because changing
265             # indentation of those changes the AST representation of the code.
266             docstring = normalize_string_prefix(leaf.value)
267             prefix = get_string_prefix(docstring)
268             docstring = docstring[len(prefix) :]  # Remove the prefix
269             quote_char = docstring[0]
270             # A natural way to remove the outer quotes is to do:
271             #   docstring = docstring.strip(quote_char)
272             # but that breaks on """""x""" (which is '""x').
273             # So we actually need to remove the first character and the next two
274             # characters but only if they are the same as the first.
275             quote_len = 1 if docstring[1] != quote_char else 3
276             docstring = docstring[quote_len:-quote_len]
277             docstring_started_empty = not docstring
278
279             if is_multiline_string(leaf):
280                 indent = " " * 4 * self.current_line.depth
281                 docstring = fix_docstring(docstring, indent)
282             else:
283                 docstring = docstring.strip()
284
285             if docstring:
286                 # Add some padding if the docstring starts / ends with a quote mark.
287                 if docstring[0] == quote_char:
288                     docstring = " " + docstring
289                 if docstring[-1] == quote_char:
290                     docstring += " "
291                 if docstring[-1] == "\\":
292                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
293                     if backslash_count % 2:
294                         # Odd number of tailing backslashes, add some padding to
295                         # avoid escaping the closing string quote.
296                         docstring += " "
297             elif not docstring_started_empty:
298                 docstring = " "
299
300             # We could enforce triple quotes at this point.
301             quote = quote_char * quote_len
302             leaf.value = prefix + quote + docstring + quote
303
304         yield from self.visit_default(leaf)
305
306     def __post_init__(self) -> None:
307         """You are in a twisty little maze of passages."""
308         self.current_line = Line(mode=self.mode)
309
310         v = self.visit_stmt
311         Ø: Set[str] = set()
312         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
313         self.visit_if_stmt = partial(
314             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
315         )
316         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
317         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
318         self.visit_try_stmt = partial(
319             v, keywords={"try", "except", "else", "finally"}, parens=Ø
320         )
321         if self.mode.preview:
322             self.visit_except_clause = partial(
323                 v, keywords={"except"}, parens={"except"}
324             )
325         else:
326             self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
327         self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
328         self.visit_funcdef = partial(v, keywords={"def"}, parens=Ø)
329         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
330         self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
331         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
332         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
333         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
334         self.visit_async_funcdef = self.visit_async_stmt
335         self.visit_decorated = self.visit_decorators
336
337         # PEP 634
338         self.visit_match_stmt = self.visit_match_case
339         self.visit_case_block = self.visit_match_case
340
341
342 def transform_line(
343     line: Line, mode: Mode, features: Collection[Feature] = ()
344 ) -> Iterator[Line]:
345     """Transform a `line`, potentially splitting it into many lines.
346
347     They should fit in the allotted `line_length` but might not be able to.
348
349     `features` are syntactical features that may be used in the output.
350     """
351     if line.is_comment:
352         yield line
353         return
354
355     line_str = line_to_string(line)
356
357     ll = mode.line_length
358     sn = mode.string_normalization
359     string_merge = StringMerger(ll, sn)
360     string_paren_strip = StringParenStripper(ll, sn)
361     string_split = StringSplitter(ll, sn)
362     string_paren_wrap = StringParenWrapper(ll, sn)
363
364     transformers: List[Transformer]
365     if (
366         not line.contains_uncollapsable_type_comments()
367         and not line.should_split_rhs
368         and not line.magic_trailing_comma
369         and (
370             is_line_short_enough(line, line_length=mode.line_length, line_str=line_str)
371             or line.contains_unsplittable_type_ignore()
372         )
373         and not (line.inside_brackets and line.contains_standalone_comments())
374     ):
375         # Only apply basic string preprocessing, since lines shouldn't be split here.
376         if Preview.string_processing in mode:
377             transformers = [string_merge, string_paren_strip]
378         else:
379             transformers = []
380     elif line.is_def:
381         transformers = [left_hand_split]
382     else:
383
384         def _rhs(
385             self: object, line: Line, features: Collection[Feature]
386         ) -> Iterator[Line]:
387             """Wraps calls to `right_hand_split`.
388
389             The calls increasingly `omit` right-hand trailers (bracket pairs with
390             content), meaning the trailers get glued together to split on another
391             bracket pair instead.
392             """
393             for omit in generate_trailers_to_omit(line, mode.line_length):
394                 lines = list(
395                     right_hand_split(line, mode.line_length, features, omit=omit)
396                 )
397                 # Note: this check is only able to figure out if the first line of the
398                 # *current* transformation fits in the line length.  This is true only
399                 # for simple cases.  All others require running more transforms via
400                 # `transform_line()`.  This check doesn't know if those would succeed.
401                 if is_line_short_enough(lines[0], line_length=mode.line_length):
402                     yield from lines
403                     return
404
405             # All splits failed, best effort split with no omits.
406             # This mostly happens to multiline strings that are by definition
407             # reported as not fitting a single line, as well as lines that contain
408             # trailing commas (those have to be exploded).
409             yield from right_hand_split(
410                 line, line_length=mode.line_length, features=features
411             )
412
413         # HACK: nested functions (like _rhs) compiled by mypyc don't retain their
414         # __name__ attribute which is needed in `run_transformer` further down.
415         # Unfortunately a nested class breaks mypyc too. So a class must be created
416         # via type ... https://github.com/mypyc/mypyc/issues/884
417         rhs = type("rhs", (), {"__call__": _rhs})()
418
419         if Preview.string_processing in mode:
420             if line.inside_brackets:
421                 transformers = [
422                     string_merge,
423                     string_paren_strip,
424                     string_split,
425                     delimiter_split,
426                     standalone_comment_split,
427                     string_paren_wrap,
428                     rhs,
429                 ]
430             else:
431                 transformers = [
432                     string_merge,
433                     string_paren_strip,
434                     string_split,
435                     string_paren_wrap,
436                     rhs,
437                 ]
438         else:
439             if line.inside_brackets:
440                 transformers = [delimiter_split, standalone_comment_split, rhs]
441             else:
442                 transformers = [rhs]
443     # It's always safe to attempt hugging of power operations and pretty much every line
444     # could match.
445     transformers.append(hug_power_op)
446
447     for transform in transformers:
448         # We are accumulating lines in `result` because we might want to abort
449         # mission and return the original line in the end, or attempt a different
450         # split altogether.
451         try:
452             result = run_transformer(line, transform, mode, features, line_str=line_str)
453         except CannotTransform:
454             continue
455         else:
456             yield from result
457             break
458
459     else:
460         yield line
461
462
463 def left_hand_split(line: Line, _features: Collection[Feature] = ()) -> Iterator[Line]:
464     """Split line into many lines, starting with the first matching bracket pair.
465
466     Note: this usually looks weird, only use this for function definitions.
467     Prefer RHS otherwise.  This is why this function is not symmetrical with
468     :func:`right_hand_split` which also handles optional parentheses.
469     """
470     tail_leaves: List[Leaf] = []
471     body_leaves: List[Leaf] = []
472     head_leaves: List[Leaf] = []
473     current_leaves = head_leaves
474     matching_bracket: Optional[Leaf] = None
475     for leaf in line.leaves:
476         if (
477             current_leaves is body_leaves
478             and leaf.type in CLOSING_BRACKETS
479             and leaf.opening_bracket is matching_bracket
480         ):
481             current_leaves = tail_leaves if body_leaves else head_leaves
482         current_leaves.append(leaf)
483         if current_leaves is head_leaves:
484             if leaf.type in OPENING_BRACKETS:
485                 matching_bracket = leaf
486                 current_leaves = body_leaves
487     if not matching_bracket:
488         raise CannotSplit("No brackets found")
489
490     head = bracket_split_build_line(head_leaves, line, matching_bracket)
491     body = bracket_split_build_line(body_leaves, line, matching_bracket, is_body=True)
492     tail = bracket_split_build_line(tail_leaves, line, matching_bracket)
493     bracket_split_succeeded_or_raise(head, body, tail)
494     for result in (head, body, tail):
495         if result:
496             yield result
497
498
499 def right_hand_split(
500     line: Line,
501     line_length: int,
502     features: Collection[Feature] = (),
503     omit: Collection[LeafID] = (),
504 ) -> Iterator[Line]:
505     """Split line into many lines, starting with the last matching bracket pair.
506
507     If the split was by optional parentheses, attempt splitting without them, too.
508     `omit` is a collection of closing bracket IDs that shouldn't be considered for
509     this split.
510
511     Note: running this function modifies `bracket_depth` on the leaves of `line`.
512     """
513     tail_leaves: List[Leaf] = []
514     body_leaves: List[Leaf] = []
515     head_leaves: List[Leaf] = []
516     current_leaves = tail_leaves
517     opening_bracket: Optional[Leaf] = None
518     closing_bracket: Optional[Leaf] = None
519     for leaf in reversed(line.leaves):
520         if current_leaves is body_leaves:
521             if leaf is opening_bracket:
522                 current_leaves = head_leaves if body_leaves else tail_leaves
523         current_leaves.append(leaf)
524         if current_leaves is tail_leaves:
525             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
526                 opening_bracket = leaf.opening_bracket
527                 closing_bracket = leaf
528                 current_leaves = body_leaves
529     if not (opening_bracket and closing_bracket and head_leaves):
530         # If there is no opening or closing_bracket that means the split failed and
531         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
532         # the matching `opening_bracket` wasn't available on `line` anymore.
533         raise CannotSplit("No brackets found")
534
535     tail_leaves.reverse()
536     body_leaves.reverse()
537     head_leaves.reverse()
538     head = bracket_split_build_line(head_leaves, line, opening_bracket)
539     body = bracket_split_build_line(body_leaves, line, opening_bracket, is_body=True)
540     tail = bracket_split_build_line(tail_leaves, line, opening_bracket)
541     bracket_split_succeeded_or_raise(head, body, tail)
542     if (
543         Feature.FORCE_OPTIONAL_PARENTHESES not in features
544         # the opening bracket is an optional paren
545         and opening_bracket.type == token.LPAR
546         and not opening_bracket.value
547         # the closing bracket is an optional paren
548         and closing_bracket.type == token.RPAR
549         and not closing_bracket.value
550         # it's not an import (optional parens are the only thing we can split on
551         # in this case; attempting a split without them is a waste of time)
552         and not line.is_import
553         # there are no standalone comments in the body
554         and not body.contains_standalone_comments(0)
555         # and we can actually remove the parens
556         and can_omit_invisible_parens(body, line_length)
557     ):
558         omit = {id(closing_bracket), *omit}
559         try:
560             yield from right_hand_split(line, line_length, features=features, omit=omit)
561             return
562
563         except CannotSplit as e:
564             if not (
565                 can_be_split(body)
566                 or is_line_short_enough(body, line_length=line_length)
567             ):
568                 raise CannotSplit(
569                     "Splitting failed, body is still too long and can't be split."
570                 ) from e
571
572             elif head.contains_multiline_strings() or tail.contains_multiline_strings():
573                 raise CannotSplit(
574                     "The current optional pair of parentheses is bound to fail to"
575                     " satisfy the splitting algorithm because the head or the tail"
576                     " contains multiline strings which by definition never fit one"
577                     " line."
578                 ) from e
579
580     ensure_visible(opening_bracket)
581     ensure_visible(closing_bracket)
582     for result in (head, body, tail):
583         if result:
584             yield result
585
586
587 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
588     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
589
590     Do nothing otherwise.
591
592     A left- or right-hand split is based on a pair of brackets. Content before
593     (and including) the opening bracket is left on one line, content inside the
594     brackets is put on a separate line, and finally content starting with and
595     following the closing bracket is put on a separate line.
596
597     Those are called `head`, `body`, and `tail`, respectively. If the split
598     produced the same line (all content in `head`) or ended up with an empty `body`
599     and the `tail` is just the closing bracket, then it's considered failed.
600     """
601     tail_len = len(str(tail).strip())
602     if not body:
603         if tail_len == 0:
604             raise CannotSplit("Splitting brackets produced the same line")
605
606         elif tail_len < 3:
607             raise CannotSplit(
608                 f"Splitting brackets on an empty body to save {tail_len} characters is"
609                 " not worth it"
610             )
611
612
613 def bracket_split_build_line(
614     leaves: List[Leaf], original: Line, opening_bracket: Leaf, *, is_body: bool = False
615 ) -> Line:
616     """Return a new line with given `leaves` and respective comments from `original`.
617
618     If `is_body` is True, the result line is one-indented inside brackets and as such
619     has its first leaf's prefix normalized and a trailing comma added when expected.
620     """
621     result = Line(mode=original.mode, depth=original.depth)
622     if is_body:
623         result.inside_brackets = True
624         result.depth += 1
625         if leaves:
626             # Since body is a new indent level, remove spurious leading whitespace.
627             normalize_prefix(leaves[0], inside_brackets=True)
628             # Ensure a trailing comma for imports and standalone function arguments, but
629             # be careful not to add one after any comments or within type annotations.
630             no_commas = (
631                 original.is_def
632                 and opening_bracket.value == "("
633                 and not any(leaf.type == token.COMMA for leaf in leaves)
634                 # In particular, don't add one within a parenthesized return annotation.
635                 # Unfortunately the indicator we're in a return annotation (RARROW) may
636                 # be defined directly in the parent node, the parent of the parent ...
637                 # and so on depending on how complex the return annotation is.
638                 # This isn't perfect and there's some false negatives but they are in
639                 # contexts were a comma is actually fine.
640                 and not any(
641                     node.prev_sibling.type == RARROW
642                     for node in (
643                         leaves[0].parent,
644                         getattr(leaves[0].parent, "parent", None),
645                     )
646                     if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
647                 )
648             )
649
650             if original.is_import or no_commas:
651                 for i in range(len(leaves) - 1, -1, -1):
652                     if leaves[i].type == STANDALONE_COMMENT:
653                         continue
654
655                     if leaves[i].type != token.COMMA:
656                         new_comma = Leaf(token.COMMA, ",")
657                         leaves.insert(i + 1, new_comma)
658                     break
659
660     # Populate the line
661     for leaf in leaves:
662         result.append(leaf, preformatted=True)
663         for comment_after in original.comments_after(leaf):
664             result.append(comment_after, preformatted=True)
665     if is_body and should_split_line(result, opening_bracket):
666         result.should_split_rhs = True
667     return result
668
669
670 def dont_increase_indentation(split_func: Transformer) -> Transformer:
671     """Normalize prefix of the first leaf in every line returned by `split_func`.
672
673     This is a decorator over relevant split functions.
674     """
675
676     @wraps(split_func)
677     def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
678         for split_line in split_func(line, features):
679             normalize_prefix(split_line.leaves[0], inside_brackets=True)
680             yield split_line
681
682     return split_wrapper
683
684
685 @dont_increase_indentation
686 def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
687     """Split according to delimiters of the highest priority.
688
689     If the appropriate Features are given, the split will add trailing commas
690     also in function signatures and calls that contain `*` and `**`.
691     """
692     try:
693         last_leaf = line.leaves[-1]
694     except IndexError:
695         raise CannotSplit("Line empty") from None
696
697     bt = line.bracket_tracker
698     try:
699         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
700     except ValueError:
701         raise CannotSplit("No delimiters found") from None
702
703     if delimiter_priority == DOT_PRIORITY:
704         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
705             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
706
707     current_line = Line(
708         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
709     )
710     lowest_depth = sys.maxsize
711     trailing_comma_safe = True
712
713     def append_to_line(leaf: Leaf) -> Iterator[Line]:
714         """Append `leaf` to current line or to new line if appending impossible."""
715         nonlocal current_line
716         try:
717             current_line.append_safe(leaf, preformatted=True)
718         except ValueError:
719             yield current_line
720
721             current_line = Line(
722                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
723             )
724             current_line.append(leaf)
725
726     for leaf in line.leaves:
727         yield from append_to_line(leaf)
728
729         for comment_after in line.comments_after(leaf):
730             yield from append_to_line(comment_after)
731
732         lowest_depth = min(lowest_depth, leaf.bracket_depth)
733         if leaf.bracket_depth == lowest_depth:
734             if is_vararg(leaf, within={syms.typedargslist}):
735                 trailing_comma_safe = (
736                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
737                 )
738             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
739                 trailing_comma_safe = (
740                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
741                 )
742
743         leaf_priority = bt.delimiters.get(id(leaf))
744         if leaf_priority == delimiter_priority:
745             yield current_line
746
747             current_line = Line(
748                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
749             )
750     if current_line:
751         if (
752             trailing_comma_safe
753             and delimiter_priority == COMMA_PRIORITY
754             and current_line.leaves[-1].type != token.COMMA
755             and current_line.leaves[-1].type != STANDALONE_COMMENT
756         ):
757             new_comma = Leaf(token.COMMA, ",")
758             current_line.append(new_comma)
759         yield current_line
760
761
762 @dont_increase_indentation
763 def standalone_comment_split(
764     line: Line, features: Collection[Feature] = ()
765 ) -> Iterator[Line]:
766     """Split standalone comments from the rest of the line."""
767     if not line.contains_standalone_comments(0):
768         raise CannotSplit("Line does not have any standalone comments")
769
770     current_line = Line(
771         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
772     )
773
774     def append_to_line(leaf: Leaf) -> Iterator[Line]:
775         """Append `leaf` to current line or to new line if appending impossible."""
776         nonlocal current_line
777         try:
778             current_line.append_safe(leaf, preformatted=True)
779         except ValueError:
780             yield current_line
781
782             current_line = Line(
783                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
784             )
785             current_line.append(leaf)
786
787     for leaf in line.leaves:
788         yield from append_to_line(leaf)
789
790         for comment_after in line.comments_after(leaf):
791             yield from append_to_line(comment_after)
792
793     if current_line:
794         yield current_line
795
796
797 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
798     """Leave existing extra newlines if not `inside_brackets`. Remove everything
799     else.
800
801     Note: don't use backslashes for formatting or you'll lose your voting rights.
802     """
803     if not inside_brackets:
804         spl = leaf.prefix.split("#")
805         if "\\" not in spl[0]:
806             nl_count = spl[-1].count("\n")
807             if len(spl) > 1:
808                 nl_count -= 1
809             leaf.prefix = "\n" * nl_count
810             return
811
812     leaf.prefix = ""
813
814
815 def normalize_invisible_parens(
816     node: Node, parens_after: Set[str], *, preview: bool
817 ) -> None:
818     """Make existing optional parentheses invisible or create new ones.
819
820     `parens_after` is a set of string leaf values immediately after which parens
821     should be put.
822
823     Standardizes on visible parentheses for single-element tuples, and keeps
824     existing visible parentheses for other tuples and generator expressions.
825     """
826     for pc in list_comments(node.prefix, is_endmarker=False, preview=preview):
827         if pc.value in FMT_OFF:
828             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
829             return
830     check_lpar = False
831     for index, child in enumerate(list(node.children)):
832         # Fixes a bug where invisible parens are not properly stripped from
833         # assignment statements that contain type annotations.
834         if isinstance(child, Node) and child.type == syms.annassign:
835             normalize_invisible_parens(
836                 child, parens_after=parens_after, preview=preview
837             )
838
839         # Add parentheses around long tuple unpacking in assignments.
840         if (
841             index == 0
842             and isinstance(child, Node)
843             and child.type == syms.testlist_star_expr
844         ):
845             check_lpar = True
846
847         if check_lpar:
848             if child.type == syms.atom:
849                 if maybe_make_parens_invisible_in_atom(
850                     child,
851                     parent=node,
852                     preview=preview,
853                 ):
854                     wrap_in_parentheses(node, child, visible=False)
855             elif is_one_tuple(child):
856                 wrap_in_parentheses(node, child, visible=True)
857             elif node.type == syms.import_from:
858                 # "import from" nodes store parentheses directly as part of
859                 # the statement
860                 if is_lpar_token(child):
861                     assert is_rpar_token(node.children[-1])
862                     # make parentheses invisible
863                     child.value = ""
864                     node.children[-1].value = ""
865                 elif child.type != token.STAR:
866                     # insert invisible parentheses
867                     node.insert_child(index, Leaf(token.LPAR, ""))
868                     node.append_child(Leaf(token.RPAR, ""))
869                 break
870
871             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
872                 wrap_in_parentheses(node, child, visible=False)
873
874         check_lpar = isinstance(child, Leaf) and child.value in parens_after
875
876
877 def maybe_make_parens_invisible_in_atom(
878     node: LN,
879     parent: LN,
880     preview: bool = False,
881 ) -> bool:
882     """If it's safe, make the parens in the atom `node` invisible, recursively.
883     Additionally, remove repeated, adjacent invisible parens from the atom `node`
884     as they are redundant.
885
886     Returns whether the node should itself be wrapped in invisible parentheses.
887
888     """
889     if (
890         preview
891         and parent.type == syms.for_stmt
892         and isinstance(node.prev_sibling, Leaf)
893         and node.prev_sibling.type == token.NAME
894         and node.prev_sibling.value == "for"
895     ):
896         for_stmt_check = False
897     else:
898         for_stmt_check = True
899
900     if (
901         node.type != syms.atom
902         or is_empty_tuple(node)
903         or is_one_tuple(node)
904         or (is_yield(node) and parent.type != syms.expr_stmt)
905         or (max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY and for_stmt_check)
906     ):
907         return False
908
909     if is_walrus_assignment(node):
910         if parent.type in [
911             syms.annassign,
912             syms.expr_stmt,
913             syms.assert_stmt,
914             syms.return_stmt,
915             # these ones aren't useful to end users, but they do please fuzzers
916             syms.for_stmt,
917             syms.del_stmt,
918         ]:
919             return False
920
921     first = node.children[0]
922     last = node.children[-1]
923     if is_lpar_token(first) and is_rpar_token(last):
924         middle = node.children[1]
925         # make parentheses invisible
926         first.value = ""
927         last.value = ""
928         maybe_make_parens_invisible_in_atom(middle, parent=parent, preview=preview)
929
930         if is_atom_with_invisible_parens(middle):
931             # Strip the invisible parens from `middle` by replacing
932             # it with the child in-between the invisible parens
933             middle.replace(middle.children[1])
934
935         return False
936
937     return True
938
939
940 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
941     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
942
943     if not (opening_bracket.parent and opening_bracket.value in "[{("):
944         return False
945
946     # We're essentially checking if the body is delimited by commas and there's more
947     # than one of them (we're excluding the trailing comma and if the delimiter priority
948     # is still commas, that means there's more).
949     exclude = set()
950     trailing_comma = False
951     try:
952         last_leaf = line.leaves[-1]
953         if last_leaf.type == token.COMMA:
954             trailing_comma = True
955             exclude.add(id(last_leaf))
956         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
957     except (IndexError, ValueError):
958         return False
959
960     return max_priority == COMMA_PRIORITY and (
961         (line.mode.magic_trailing_comma and trailing_comma)
962         # always explode imports
963         or opening_bracket.parent.type in {syms.atom, syms.import_from}
964     )
965
966
967 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
968     """Generate sets of closing bracket IDs that should be omitted in a RHS.
969
970     Brackets can be omitted if the entire trailer up to and including
971     a preceding closing bracket fits in one line.
972
973     Yielded sets are cumulative (contain results of previous yields, too).  First
974     set is empty, unless the line should explode, in which case bracket pairs until
975     the one that needs to explode are omitted.
976     """
977
978     omit: Set[LeafID] = set()
979     if not line.magic_trailing_comma:
980         yield omit
981
982     length = 4 * line.depth
983     opening_bracket: Optional[Leaf] = None
984     closing_bracket: Optional[Leaf] = None
985     inner_brackets: Set[LeafID] = set()
986     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
987         length += leaf_length
988         if length > line_length:
989             break
990
991         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
992         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
993             break
994
995         if opening_bracket:
996             if leaf is opening_bracket:
997                 opening_bracket = None
998             elif leaf.type in CLOSING_BRACKETS:
999                 prev = line.leaves[index - 1] if index > 0 else None
1000                 if (
1001                     prev
1002                     and prev.type == token.COMMA
1003                     and leaf.opening_bracket is not None
1004                     and not is_one_sequence_between(
1005                         leaf.opening_bracket, leaf, line.leaves
1006                     )
1007                 ):
1008                     # Never omit bracket pairs with trailing commas.
1009                     # We need to explode on those.
1010                     break
1011
1012                 inner_brackets.add(id(leaf))
1013         elif leaf.type in CLOSING_BRACKETS:
1014             prev = line.leaves[index - 1] if index > 0 else None
1015             if prev and prev.type in OPENING_BRACKETS:
1016                 # Empty brackets would fail a split so treat them as "inner"
1017                 # brackets (e.g. only add them to the `omit` set if another
1018                 # pair of brackets was good enough.
1019                 inner_brackets.add(id(leaf))
1020                 continue
1021
1022             if closing_bracket:
1023                 omit.add(id(closing_bracket))
1024                 omit.update(inner_brackets)
1025                 inner_brackets.clear()
1026                 yield omit
1027
1028             if (
1029                 prev
1030                 and prev.type == token.COMMA
1031                 and leaf.opening_bracket is not None
1032                 and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves)
1033             ):
1034                 # Never omit bracket pairs with trailing commas.
1035                 # We need to explode on those.
1036                 break
1037
1038             if leaf.value:
1039                 opening_bracket = leaf.opening_bracket
1040                 closing_bracket = leaf
1041
1042
1043 def run_transformer(
1044     line: Line,
1045     transform: Transformer,
1046     mode: Mode,
1047     features: Collection[Feature],
1048     *,
1049     line_str: str = "",
1050 ) -> List[Line]:
1051     if not line_str:
1052         line_str = line_to_string(line)
1053     result: List[Line] = []
1054     for transformed_line in transform(line, features):
1055         if str(transformed_line).strip("\n") == line_str:
1056             raise CannotTransform("Line transformer returned an unchanged result")
1057
1058         result.extend(transform_line(transformed_line, mode=mode, features=features))
1059
1060     if (
1061         transform.__class__.__name__ != "rhs"
1062         or not line.bracket_tracker.invisible
1063         or any(bracket.value for bracket in line.bracket_tracker.invisible)
1064         or line.contains_multiline_strings()
1065         or result[0].contains_uncollapsable_type_comments()
1066         or result[0].contains_unsplittable_type_ignore()
1067         or is_line_short_enough(result[0], line_length=mode.line_length)
1068         # If any leaves have no parents (which _can_ occur since
1069         # `transform(line)` potentially destroys the line's underlying node
1070         # structure), then we can't proceed. Doing so would cause the below
1071         # call to `append_leaves()` to fail.
1072         or any(leaf.parent is None for leaf in line.leaves)
1073     ):
1074         return result
1075
1076     line_copy = line.clone()
1077     append_leaves(line_copy, line, line.leaves)
1078     features_fop = set(features) | {Feature.FORCE_OPTIONAL_PARENTHESES}
1079     second_opinion = run_transformer(
1080         line_copy, transform, mode, features_fop, line_str=line_str
1081     )
1082     if all(
1083         is_line_short_enough(ln, line_length=mode.line_length) for ln in second_opinion
1084     ):
1085         result = second_opinion
1086     return result