]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

4dc242a1dfe05b9f8ef7ff53b26333073469cf57
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4 from functools import partial, wraps
5 import sys
6 from typing import Collection, Iterator, List, Optional, Set, Union
7
8 from black.nodes import WHITESPACE, RARROW, STATEMENT, STANDALONE_COMMENT
9 from black.nodes import ASSIGNMENTS, OPENING_BRACKETS, CLOSING_BRACKETS
10 from black.nodes import Visitor, syms, is_arith_like, ensure_visible
11 from black.nodes import is_docstring, is_empty_tuple, is_one_tuple, is_one_tuple_between
12 from black.nodes import is_name_token, is_lpar_token, is_rpar_token
13 from black.nodes import is_walrus_assignment, is_yield, is_vararg, is_multiline_string
14 from black.nodes import is_stub_suite, is_stub_body, is_atom_with_invisible_parens
15 from black.nodes import wrap_in_parentheses
16 from black.brackets import max_delimiter_priority_in_atom
17 from black.brackets import DOT_PRIORITY, COMMA_PRIORITY
18 from black.lines import Line, line_to_string, is_line_short_enough
19 from black.lines import can_omit_invisible_parens, can_be_split, append_leaves
20 from black.comments import generate_comments, list_comments, FMT_OFF
21 from black.numerics import normalize_numeric_literal
22 from black.strings import get_string_prefix, fix_docstring
23 from black.strings import normalize_string_prefix, normalize_string_quotes
24 from black.trans import Transformer, CannotTransform, StringMerger, StringSplitter
25 from black.trans import StringParenWrapper, StringParenStripper, hug_power_op
26 from black.mode import Mode, Feature, Preview
27
28 from blib2to3.pytree import Node, Leaf
29 from blib2to3.pgen2 import token
30
31
32 # types
33 LeafID = int
34 LN = Union[Leaf, Node]
35
36
37 class CannotSplit(CannotTransform):
38     """A readable split that fits the allotted line length is impossible."""
39
40
41 # This isn't a dataclass because @dataclass + Generic breaks mypyc.
42 # See also https://github.com/mypyc/mypyc/issues/827.
43 class LineGenerator(Visitor[Line]):
44     """Generates reformatted Line objects.  Empty lines are not emitted.
45
46     Note: destroys the tree it's visiting by mutating prefixes of its leaves
47     in ways that will no longer stringify to valid Python code on the tree.
48     """
49
50     def __init__(self, mode: Mode) -> None:
51         self.mode = mode
52         self.current_line: Line
53         self.__post_init__()
54
55     def line(self, indent: int = 0) -> Iterator[Line]:
56         """Generate a line.
57
58         If the line is empty, only emit if it makes sense.
59         If the line is too long, split it first and then generate.
60
61         If any lines were generated, set up a new current_line.
62         """
63         if not self.current_line:
64             self.current_line.depth += indent
65             return  # Line is empty, don't emit. Creating a new one unnecessary.
66
67         complete_line = self.current_line
68         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
69         yield complete_line
70
71     def visit_default(self, node: LN) -> Iterator[Line]:
72         """Default `visit_*()` implementation. Recurses to children of `node`."""
73         if isinstance(node, Leaf):
74             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
75             for comment in generate_comments(node):
76                 if any_open_brackets:
77                     # any comment within brackets is subject to splitting
78                     self.current_line.append(comment)
79                 elif comment.type == token.COMMENT:
80                     # regular trailing comment
81                     self.current_line.append(comment)
82                     yield from self.line()
83
84                 else:
85                     # regular standalone comment
86                     yield from self.line()
87
88                     self.current_line.append(comment)
89                     yield from self.line()
90
91             normalize_prefix(node, inside_brackets=any_open_brackets)
92             if self.mode.string_normalization and node.type == token.STRING:
93                 node.value = normalize_string_prefix(node.value)
94                 node.value = normalize_string_quotes(node.value)
95             if node.type == token.NUMBER:
96                 normalize_numeric_literal(node)
97             if node.type not in WHITESPACE:
98                 self.current_line.append(node)
99         yield from super().visit_default(node)
100
101     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
102         """Increase indentation level, maybe yield a line."""
103         # In blib2to3 INDENT never holds comments.
104         yield from self.line(+1)
105         yield from self.visit_default(node)
106
107     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
108         """Decrease indentation level, maybe yield a line."""
109         # The current line might still wait for trailing comments.  At DEDENT time
110         # there won't be any (they would be prefixes on the preceding NEWLINE).
111         # Emit the line then.
112         yield from self.line()
113
114         # While DEDENT has no value, its prefix may contain standalone comments
115         # that belong to the current indentation level.  Get 'em.
116         yield from self.visit_default(node)
117
118         # Finally, emit the dedent.
119         yield from self.line(-1)
120
121     def visit_stmt(
122         self, node: Node, keywords: Set[str], parens: Set[str]
123     ) -> Iterator[Line]:
124         """Visit a statement.
125
126         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
127         `def`, `with`, `class`, `assert`, and assignments.
128
129         The relevant Python language `keywords` for a given statement will be
130         NAME leaves within it. This methods puts those on a separate line.
131
132         `parens` holds a set of string leaf values immediately after which
133         invisible parens should be put.
134         """
135         normalize_invisible_parens(node, parens_after=parens)
136         for child in node.children:
137             if is_name_token(child) and child.value in keywords:
138                 yield from self.line()
139
140             yield from self.visit(child)
141
142     def visit_match_case(self, node: Node) -> Iterator[Line]:
143         """Visit either a match or case statement."""
144         normalize_invisible_parens(node, parens_after=set())
145
146         yield from self.line()
147         for child in node.children:
148             yield from self.visit(child)
149
150     def visit_suite(self, node: Node) -> Iterator[Line]:
151         """Visit a suite."""
152         if self.mode.is_pyi and is_stub_suite(node):
153             yield from self.visit(node.children[2])
154         else:
155             yield from self.visit_default(node)
156
157     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
158         """Visit a statement without nested statements."""
159         prev_type: Optional[int] = None
160         for child in node.children:
161             if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child):
162                 wrap_in_parentheses(node, child, visible=False)
163             prev_type = child.type
164
165         is_suite_like = node.parent and node.parent.type in STATEMENT
166         if is_suite_like:
167             if self.mode.is_pyi and is_stub_body(node):
168                 yield from self.visit_default(node)
169             else:
170                 yield from self.line(+1)
171                 yield from self.visit_default(node)
172                 yield from self.line(-1)
173
174         else:
175             if (
176                 not self.mode.is_pyi
177                 or not node.parent
178                 or not is_stub_suite(node.parent)
179             ):
180                 yield from self.line()
181             yield from self.visit_default(node)
182
183     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
184         """Visit `async def`, `async for`, `async with`."""
185         yield from self.line()
186
187         children = iter(node.children)
188         for child in children:
189             yield from self.visit(child)
190
191             if child.type == token.ASYNC:
192                 break
193
194         internal_stmt = next(children)
195         for child in internal_stmt.children:
196             yield from self.visit(child)
197
198     def visit_decorators(self, node: Node) -> Iterator[Line]:
199         """Visit decorators."""
200         for child in node.children:
201             yield from self.line()
202             yield from self.visit(child)
203
204     def visit_power(self, node: Node) -> Iterator[Line]:
205         for idx, leaf in enumerate(node.children[:-1]):
206             next_leaf = node.children[idx + 1]
207
208             if not isinstance(leaf, Leaf):
209                 continue
210
211             value = leaf.value.lower()
212             if (
213                 leaf.type == token.NUMBER
214                 and next_leaf.type == syms.trailer
215                 # Ensure that we are in an attribute trailer
216                 and next_leaf.children[0].type == token.DOT
217                 # It shouldn't wrap hexadecimal, binary and octal literals
218                 and not value.startswith(("0x", "0b", "0o"))
219                 # It shouldn't wrap complex literals
220                 and "j" not in value
221             ):
222                 wrap_in_parentheses(node, leaf)
223
224         yield from self.visit_default(node)
225
226     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
227         """Remove a semicolon and put the other statement on a separate line."""
228         yield from self.line()
229
230     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
231         """End of file. Process outstanding comments and end with a newline."""
232         yield from self.visit_default(leaf)
233         yield from self.line()
234
235     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
236         if not self.current_line.bracket_tracker.any_open_brackets():
237             yield from self.line()
238         yield from self.visit_default(leaf)
239
240     def visit_factor(self, node: Node) -> Iterator[Line]:
241         """Force parentheses between a unary op and a binary power:
242
243         -2 ** 8 -> -(2 ** 8)
244         """
245         _operator, operand = node.children
246         if (
247             operand.type == syms.power
248             and len(operand.children) == 3
249             and operand.children[1].type == token.DOUBLESTAR
250         ):
251             lpar = Leaf(token.LPAR, "(")
252             rpar = Leaf(token.RPAR, ")")
253             index = operand.remove() or 0
254             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
255         yield from self.visit_default(node)
256
257     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
258         if is_docstring(leaf) and "\\\n" not in leaf.value:
259             # We're ignoring docstrings with backslash newline escapes because changing
260             # indentation of those changes the AST representation of the code.
261             docstring = normalize_string_prefix(leaf.value)
262             prefix = get_string_prefix(docstring)
263             docstring = docstring[len(prefix) :]  # Remove the prefix
264             quote_char = docstring[0]
265             # A natural way to remove the outer quotes is to do:
266             #   docstring = docstring.strip(quote_char)
267             # but that breaks on """""x""" (which is '""x').
268             # So we actually need to remove the first character and the next two
269             # characters but only if they are the same as the first.
270             quote_len = 1 if docstring[1] != quote_char else 3
271             docstring = docstring[quote_len:-quote_len]
272             docstring_started_empty = not docstring
273
274             if is_multiline_string(leaf):
275                 indent = " " * 4 * self.current_line.depth
276                 docstring = fix_docstring(docstring, indent)
277             else:
278                 docstring = docstring.strip()
279
280             if docstring:
281                 # Add some padding if the docstring starts / ends with a quote mark.
282                 if docstring[0] == quote_char:
283                     docstring = " " + docstring
284                 if docstring[-1] == quote_char:
285                     docstring += " "
286                 if docstring[-1] == "\\":
287                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
288                     if backslash_count % 2:
289                         # Odd number of tailing backslashes, add some padding to
290                         # avoid escaping the closing string quote.
291                         docstring += " "
292             elif not docstring_started_empty:
293                 docstring = " "
294
295             # We could enforce triple quotes at this point.
296             quote = quote_char * quote_len
297             leaf.value = prefix + quote + docstring + quote
298
299         yield from self.visit_default(leaf)
300
301     def __post_init__(self) -> None:
302         """You are in a twisty little maze of passages."""
303         self.current_line = Line(mode=self.mode)
304
305         v = self.visit_stmt
306         Ø: Set[str] = set()
307         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
308         self.visit_if_stmt = partial(
309             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
310         )
311         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
312         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
313         self.visit_try_stmt = partial(
314             v, keywords={"try", "except", "else", "finally"}, parens=Ø
315         )
316         self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
317         self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
318         self.visit_funcdef = partial(v, keywords={"def"}, parens=Ø)
319         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
320         self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
321         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
322         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
323         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
324         self.visit_async_funcdef = self.visit_async_stmt
325         self.visit_decorated = self.visit_decorators
326
327         # PEP 634
328         self.visit_match_stmt = self.visit_match_case
329         self.visit_case_block = self.visit_match_case
330
331
332 def transform_line(
333     line: Line, mode: Mode, features: Collection[Feature] = ()
334 ) -> Iterator[Line]:
335     """Transform a `line`, potentially splitting it into many lines.
336
337     They should fit in the allotted `line_length` but might not be able to.
338
339     `features` are syntactical features that may be used in the output.
340     """
341     if line.is_comment:
342         yield line
343         return
344
345     line_str = line_to_string(line)
346
347     ll = mode.line_length
348     sn = mode.string_normalization
349     string_merge = StringMerger(ll, sn)
350     string_paren_strip = StringParenStripper(ll, sn)
351     string_split = StringSplitter(ll, sn)
352     string_paren_wrap = StringParenWrapper(ll, sn)
353
354     transformers: List[Transformer]
355     if (
356         not line.contains_uncollapsable_type_comments()
357         and not line.should_split_rhs
358         and not line.magic_trailing_comma
359         and (
360             is_line_short_enough(line, line_length=mode.line_length, line_str=line_str)
361             or line.contains_unsplittable_type_ignore()
362         )
363         and not (line.inside_brackets and line.contains_standalone_comments())
364     ):
365         # Only apply basic string preprocessing, since lines shouldn't be split here.
366         if Preview.string_processing in mode:
367             transformers = [string_merge, string_paren_strip]
368         else:
369             transformers = []
370     elif line.is_def:
371         transformers = [left_hand_split]
372     else:
373
374         def _rhs(
375             self: object, line: Line, features: Collection[Feature]
376         ) -> Iterator[Line]:
377             """Wraps calls to `right_hand_split`.
378
379             The calls increasingly `omit` right-hand trailers (bracket pairs with
380             content), meaning the trailers get glued together to split on another
381             bracket pair instead.
382             """
383             for omit in generate_trailers_to_omit(line, mode.line_length):
384                 lines = list(
385                     right_hand_split(line, mode.line_length, features, omit=omit)
386                 )
387                 # Note: this check is only able to figure out if the first line of the
388                 # *current* transformation fits in the line length.  This is true only
389                 # for simple cases.  All others require running more transforms via
390                 # `transform_line()`.  This check doesn't know if those would succeed.
391                 if is_line_short_enough(lines[0], line_length=mode.line_length):
392                     yield from lines
393                     return
394
395             # All splits failed, best effort split with no omits.
396             # This mostly happens to multiline strings that are by definition
397             # reported as not fitting a single line, as well as lines that contain
398             # trailing commas (those have to be exploded).
399             yield from right_hand_split(
400                 line, line_length=mode.line_length, features=features
401             )
402
403         # HACK: nested functions (like _rhs) compiled by mypyc don't retain their
404         # __name__ attribute which is needed in `run_transformer` further down.
405         # Unfortunately a nested class breaks mypyc too. So a class must be created
406         # via type ... https://github.com/mypyc/mypyc/issues/884
407         rhs = type("rhs", (), {"__call__": _rhs})()
408
409         if Preview.string_processing in mode:
410             if line.inside_brackets:
411                 transformers = [
412                     string_merge,
413                     string_paren_strip,
414                     string_split,
415                     delimiter_split,
416                     standalone_comment_split,
417                     string_paren_wrap,
418                     rhs,
419                 ]
420             else:
421                 transformers = [
422                     string_merge,
423                     string_paren_strip,
424                     string_split,
425                     string_paren_wrap,
426                     rhs,
427                 ]
428         else:
429             if line.inside_brackets:
430                 transformers = [delimiter_split, standalone_comment_split, rhs]
431             else:
432                 transformers = [rhs]
433     # It's always safe to attempt hugging of power operations and pretty much every line
434     # could match.
435     transformers.append(hug_power_op)
436
437     for transform in transformers:
438         # We are accumulating lines in `result` because we might want to abort
439         # mission and return the original line in the end, or attempt a different
440         # split altogether.
441         try:
442             result = run_transformer(line, transform, mode, features, line_str=line_str)
443         except CannotTransform:
444             continue
445         else:
446             yield from result
447             break
448
449     else:
450         yield line
451
452
453 def left_hand_split(line: Line, _features: Collection[Feature] = ()) -> Iterator[Line]:
454     """Split line into many lines, starting with the first matching bracket pair.
455
456     Note: this usually looks weird, only use this for function definitions.
457     Prefer RHS otherwise.  This is why this function is not symmetrical with
458     :func:`right_hand_split` which also handles optional parentheses.
459     """
460     tail_leaves: List[Leaf] = []
461     body_leaves: List[Leaf] = []
462     head_leaves: List[Leaf] = []
463     current_leaves = head_leaves
464     matching_bracket: Optional[Leaf] = None
465     for leaf in line.leaves:
466         if (
467             current_leaves is body_leaves
468             and leaf.type in CLOSING_BRACKETS
469             and leaf.opening_bracket is matching_bracket
470         ):
471             current_leaves = tail_leaves if body_leaves else head_leaves
472         current_leaves.append(leaf)
473         if current_leaves is head_leaves:
474             if leaf.type in OPENING_BRACKETS:
475                 matching_bracket = leaf
476                 current_leaves = body_leaves
477     if not matching_bracket:
478         raise CannotSplit("No brackets found")
479
480     head = bracket_split_build_line(head_leaves, line, matching_bracket)
481     body = bracket_split_build_line(body_leaves, line, matching_bracket, is_body=True)
482     tail = bracket_split_build_line(tail_leaves, line, matching_bracket)
483     bracket_split_succeeded_or_raise(head, body, tail)
484     for result in (head, body, tail):
485         if result:
486             yield result
487
488
489 def right_hand_split(
490     line: Line,
491     line_length: int,
492     features: Collection[Feature] = (),
493     omit: Collection[LeafID] = (),
494 ) -> Iterator[Line]:
495     """Split line into many lines, starting with the last matching bracket pair.
496
497     If the split was by optional parentheses, attempt splitting without them, too.
498     `omit` is a collection of closing bracket IDs that shouldn't be considered for
499     this split.
500
501     Note: running this function modifies `bracket_depth` on the leaves of `line`.
502     """
503     tail_leaves: List[Leaf] = []
504     body_leaves: List[Leaf] = []
505     head_leaves: List[Leaf] = []
506     current_leaves = tail_leaves
507     opening_bracket: Optional[Leaf] = None
508     closing_bracket: Optional[Leaf] = None
509     for leaf in reversed(line.leaves):
510         if current_leaves is body_leaves:
511             if leaf is opening_bracket:
512                 current_leaves = head_leaves if body_leaves else tail_leaves
513         current_leaves.append(leaf)
514         if current_leaves is tail_leaves:
515             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
516                 opening_bracket = leaf.opening_bracket
517                 closing_bracket = leaf
518                 current_leaves = body_leaves
519     if not (opening_bracket and closing_bracket and head_leaves):
520         # If there is no opening or closing_bracket that means the split failed and
521         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
522         # the matching `opening_bracket` wasn't available on `line` anymore.
523         raise CannotSplit("No brackets found")
524
525     tail_leaves.reverse()
526     body_leaves.reverse()
527     head_leaves.reverse()
528     head = bracket_split_build_line(head_leaves, line, opening_bracket)
529     body = bracket_split_build_line(body_leaves, line, opening_bracket, is_body=True)
530     tail = bracket_split_build_line(tail_leaves, line, opening_bracket)
531     bracket_split_succeeded_or_raise(head, body, tail)
532     if (
533         Feature.FORCE_OPTIONAL_PARENTHESES not in features
534         # the opening bracket is an optional paren
535         and opening_bracket.type == token.LPAR
536         and not opening_bracket.value
537         # the closing bracket is an optional paren
538         and closing_bracket.type == token.RPAR
539         and not closing_bracket.value
540         # it's not an import (optional parens are the only thing we can split on
541         # in this case; attempting a split without them is a waste of time)
542         and not line.is_import
543         # there are no standalone comments in the body
544         and not body.contains_standalone_comments(0)
545         # and we can actually remove the parens
546         and can_omit_invisible_parens(body, line_length)
547     ):
548         omit = {id(closing_bracket), *omit}
549         try:
550             yield from right_hand_split(line, line_length, features=features, omit=omit)
551             return
552
553         except CannotSplit as e:
554             if not (
555                 can_be_split(body)
556                 or is_line_short_enough(body, line_length=line_length)
557             ):
558                 raise CannotSplit(
559                     "Splitting failed, body is still too long and can't be split."
560                 ) from e
561
562             elif head.contains_multiline_strings() or tail.contains_multiline_strings():
563                 raise CannotSplit(
564                     "The current optional pair of parentheses is bound to fail to"
565                     " satisfy the splitting algorithm because the head or the tail"
566                     " contains multiline strings which by definition never fit one"
567                     " line."
568                 ) from e
569
570     ensure_visible(opening_bracket)
571     ensure_visible(closing_bracket)
572     for result in (head, body, tail):
573         if result:
574             yield result
575
576
577 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
578     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
579
580     Do nothing otherwise.
581
582     A left- or right-hand split is based on a pair of brackets. Content before
583     (and including) the opening bracket is left on one line, content inside the
584     brackets is put on a separate line, and finally content starting with and
585     following the closing bracket is put on a separate line.
586
587     Those are called `head`, `body`, and `tail`, respectively. If the split
588     produced the same line (all content in `head`) or ended up with an empty `body`
589     and the `tail` is just the closing bracket, then it's considered failed.
590     """
591     tail_len = len(str(tail).strip())
592     if not body:
593         if tail_len == 0:
594             raise CannotSplit("Splitting brackets produced the same line")
595
596         elif tail_len < 3:
597             raise CannotSplit(
598                 f"Splitting brackets on an empty body to save {tail_len} characters is"
599                 " not worth it"
600             )
601
602
603 def bracket_split_build_line(
604     leaves: List[Leaf], original: Line, opening_bracket: Leaf, *, is_body: bool = False
605 ) -> Line:
606     """Return a new line with given `leaves` and respective comments from `original`.
607
608     If `is_body` is True, the result line is one-indented inside brackets and as such
609     has its first leaf's prefix normalized and a trailing comma added when expected.
610     """
611     result = Line(mode=original.mode, depth=original.depth)
612     if is_body:
613         result.inside_brackets = True
614         result.depth += 1
615         if leaves:
616             # Since body is a new indent level, remove spurious leading whitespace.
617             normalize_prefix(leaves[0], inside_brackets=True)
618             # Ensure a trailing comma for imports and standalone function arguments, but
619             # be careful not to add one after any comments or within type annotations.
620             no_commas = (
621                 original.is_def
622                 and opening_bracket.value == "("
623                 and not any(leaf.type == token.COMMA for leaf in leaves)
624                 # In particular, don't add one within a parenthesized return annotation.
625                 # Unfortunately the indicator we're in a return annotation (RARROW) may
626                 # be defined directly in the parent node, the parent of the parent ...
627                 # and so on depending on how complex the return annotation is.
628                 # This isn't perfect and there's some false negatives but they are in
629                 # contexts were a comma is actually fine.
630                 and not any(
631                     node.prev_sibling.type == RARROW
632                     for node in (
633                         leaves[0].parent,
634                         getattr(leaves[0].parent, "parent", None),
635                     )
636                     if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
637                 )
638             )
639
640             if original.is_import or no_commas:
641                 for i in range(len(leaves) - 1, -1, -1):
642                     if leaves[i].type == STANDALONE_COMMENT:
643                         continue
644
645                     if leaves[i].type != token.COMMA:
646                         new_comma = Leaf(token.COMMA, ",")
647                         leaves.insert(i + 1, new_comma)
648                     break
649
650     # Populate the line
651     for leaf in leaves:
652         result.append(leaf, preformatted=True)
653         for comment_after in original.comments_after(leaf):
654             result.append(comment_after, preformatted=True)
655     if is_body and should_split_line(result, opening_bracket):
656         result.should_split_rhs = True
657     return result
658
659
660 def dont_increase_indentation(split_func: Transformer) -> Transformer:
661     """Normalize prefix of the first leaf in every line returned by `split_func`.
662
663     This is a decorator over relevant split functions.
664     """
665
666     @wraps(split_func)
667     def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
668         for line in split_func(line, features):
669             normalize_prefix(line.leaves[0], inside_brackets=True)
670             yield line
671
672     return split_wrapper
673
674
675 @dont_increase_indentation
676 def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
677     """Split according to delimiters of the highest priority.
678
679     If the appropriate Features are given, the split will add trailing commas
680     also in function signatures and calls that contain `*` and `**`.
681     """
682     try:
683         last_leaf = line.leaves[-1]
684     except IndexError:
685         raise CannotSplit("Line empty") from None
686
687     bt = line.bracket_tracker
688     try:
689         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
690     except ValueError:
691         raise CannotSplit("No delimiters found") from None
692
693     if delimiter_priority == DOT_PRIORITY:
694         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
695             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
696
697     current_line = Line(
698         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
699     )
700     lowest_depth = sys.maxsize
701     trailing_comma_safe = True
702
703     def append_to_line(leaf: Leaf) -> Iterator[Line]:
704         """Append `leaf` to current line or to new line if appending impossible."""
705         nonlocal current_line
706         try:
707             current_line.append_safe(leaf, preformatted=True)
708         except ValueError:
709             yield current_line
710
711             current_line = Line(
712                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
713             )
714             current_line.append(leaf)
715
716     for leaf in line.leaves:
717         yield from append_to_line(leaf)
718
719         for comment_after in line.comments_after(leaf):
720             yield from append_to_line(comment_after)
721
722         lowest_depth = min(lowest_depth, leaf.bracket_depth)
723         if leaf.bracket_depth == lowest_depth:
724             if is_vararg(leaf, within={syms.typedargslist}):
725                 trailing_comma_safe = (
726                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
727                 )
728             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
729                 trailing_comma_safe = (
730                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
731                 )
732
733         leaf_priority = bt.delimiters.get(id(leaf))
734         if leaf_priority == delimiter_priority:
735             yield current_line
736
737             current_line = Line(
738                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
739             )
740     if current_line:
741         if (
742             trailing_comma_safe
743             and delimiter_priority == COMMA_PRIORITY
744             and current_line.leaves[-1].type != token.COMMA
745             and current_line.leaves[-1].type != STANDALONE_COMMENT
746         ):
747             new_comma = Leaf(token.COMMA, ",")
748             current_line.append(new_comma)
749         yield current_line
750
751
752 @dont_increase_indentation
753 def standalone_comment_split(
754     line: Line, features: Collection[Feature] = ()
755 ) -> Iterator[Line]:
756     """Split standalone comments from the rest of the line."""
757     if not line.contains_standalone_comments(0):
758         raise CannotSplit("Line does not have any standalone comments")
759
760     current_line = Line(
761         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
762     )
763
764     def append_to_line(leaf: Leaf) -> Iterator[Line]:
765         """Append `leaf` to current line or to new line if appending impossible."""
766         nonlocal current_line
767         try:
768             current_line.append_safe(leaf, preformatted=True)
769         except ValueError:
770             yield current_line
771
772             current_line = Line(
773                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
774             )
775             current_line.append(leaf)
776
777     for leaf in line.leaves:
778         yield from append_to_line(leaf)
779
780         for comment_after in line.comments_after(leaf):
781             yield from append_to_line(comment_after)
782
783     if current_line:
784         yield current_line
785
786
787 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
788     """Leave existing extra newlines if not `inside_brackets`. Remove everything
789     else.
790
791     Note: don't use backslashes for formatting or you'll lose your voting rights.
792     """
793     if not inside_brackets:
794         spl = leaf.prefix.split("#")
795         if "\\" not in spl[0]:
796             nl_count = spl[-1].count("\n")
797             if len(spl) > 1:
798                 nl_count -= 1
799             leaf.prefix = "\n" * nl_count
800             return
801
802     leaf.prefix = ""
803
804
805 def normalize_invisible_parens(node: Node, parens_after: Set[str]) -> None:
806     """Make existing optional parentheses invisible or create new ones.
807
808     `parens_after` is a set of string leaf values immediately after which parens
809     should be put.
810
811     Standardizes on visible parentheses for single-element tuples, and keeps
812     existing visible parentheses for other tuples and generator expressions.
813     """
814     for pc in list_comments(node.prefix, is_endmarker=False):
815         if pc.value in FMT_OFF:
816             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
817             return
818     check_lpar = False
819     for index, child in enumerate(list(node.children)):
820         # Fixes a bug where invisible parens are not properly stripped from
821         # assignment statements that contain type annotations.
822         if isinstance(child, Node) and child.type == syms.annassign:
823             normalize_invisible_parens(child, parens_after=parens_after)
824
825         # Add parentheses around long tuple unpacking in assignments.
826         if (
827             index == 0
828             and isinstance(child, Node)
829             and child.type == syms.testlist_star_expr
830         ):
831             check_lpar = True
832
833         if check_lpar:
834             if child.type == syms.atom:
835                 if maybe_make_parens_invisible_in_atom(child, parent=node):
836                     wrap_in_parentheses(node, child, visible=False)
837             elif is_one_tuple(child):
838                 wrap_in_parentheses(node, child, visible=True)
839             elif node.type == syms.import_from:
840                 # "import from" nodes store parentheses directly as part of
841                 # the statement
842                 if is_lpar_token(child):
843                     assert is_rpar_token(node.children[-1])
844                     # make parentheses invisible
845                     child.value = ""
846                     node.children[-1].value = ""
847                 elif child.type != token.STAR:
848                     # insert invisible parentheses
849                     node.insert_child(index, Leaf(token.LPAR, ""))
850                     node.append_child(Leaf(token.RPAR, ""))
851                 break
852
853             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
854                 wrap_in_parentheses(node, child, visible=False)
855
856         check_lpar = isinstance(child, Leaf) and child.value in parens_after
857
858
859 def maybe_make_parens_invisible_in_atom(node: LN, parent: LN) -> bool:
860     """If it's safe, make the parens in the atom `node` invisible, recursively.
861     Additionally, remove repeated, adjacent invisible parens from the atom `node`
862     as they are redundant.
863
864     Returns whether the node should itself be wrapped in invisible parentheses.
865
866     """
867
868     if (
869         node.type != syms.atom
870         or is_empty_tuple(node)
871         or is_one_tuple(node)
872         or (is_yield(node) and parent.type != syms.expr_stmt)
873         or max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
874     ):
875         return False
876
877     if is_walrus_assignment(node):
878         if parent.type in [
879             syms.annassign,
880             syms.expr_stmt,
881             syms.assert_stmt,
882             syms.return_stmt,
883             # these ones aren't useful to end users, but they do please fuzzers
884             syms.for_stmt,
885             syms.del_stmt,
886         ]:
887             return False
888
889     first = node.children[0]
890     last = node.children[-1]
891     if is_lpar_token(first) and is_rpar_token(last):
892         middle = node.children[1]
893         # make parentheses invisible
894         first.value = ""
895         last.value = ""
896         maybe_make_parens_invisible_in_atom(middle, parent=parent)
897
898         if is_atom_with_invisible_parens(middle):
899             # Strip the invisible parens from `middle` by replacing
900             # it with the child in-between the invisible parens
901             middle.replace(middle.children[1])
902
903         return False
904
905     return True
906
907
908 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
909     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
910
911     if not (opening_bracket.parent and opening_bracket.value in "[{("):
912         return False
913
914     # We're essentially checking if the body is delimited by commas and there's more
915     # than one of them (we're excluding the trailing comma and if the delimiter priority
916     # is still commas, that means there's more).
917     exclude = set()
918     trailing_comma = False
919     try:
920         last_leaf = line.leaves[-1]
921         if last_leaf.type == token.COMMA:
922             trailing_comma = True
923             exclude.add(id(last_leaf))
924         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
925     except (IndexError, ValueError):
926         return False
927
928     return max_priority == COMMA_PRIORITY and (
929         (line.mode.magic_trailing_comma and trailing_comma)
930         # always explode imports
931         or opening_bracket.parent.type in {syms.atom, syms.import_from}
932     )
933
934
935 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
936     """Generate sets of closing bracket IDs that should be omitted in a RHS.
937
938     Brackets can be omitted if the entire trailer up to and including
939     a preceding closing bracket fits in one line.
940
941     Yielded sets are cumulative (contain results of previous yields, too).  First
942     set is empty, unless the line should explode, in which case bracket pairs until
943     the one that needs to explode are omitted.
944     """
945
946     omit: Set[LeafID] = set()
947     if not line.magic_trailing_comma:
948         yield omit
949
950     length = 4 * line.depth
951     opening_bracket: Optional[Leaf] = None
952     closing_bracket: Optional[Leaf] = None
953     inner_brackets: Set[LeafID] = set()
954     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
955         length += leaf_length
956         if length > line_length:
957             break
958
959         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
960         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
961             break
962
963         if opening_bracket:
964             if leaf is opening_bracket:
965                 opening_bracket = None
966             elif leaf.type in CLOSING_BRACKETS:
967                 prev = line.leaves[index - 1] if index > 0 else None
968                 if (
969                     prev
970                     and prev.type == token.COMMA
971                     and leaf.opening_bracket is not None
972                     and not is_one_tuple_between(
973                         leaf.opening_bracket, leaf, line.leaves
974                     )
975                 ):
976                     # Never omit bracket pairs with trailing commas.
977                     # We need to explode on those.
978                     break
979
980                 inner_brackets.add(id(leaf))
981         elif leaf.type in CLOSING_BRACKETS:
982             prev = line.leaves[index - 1] if index > 0 else None
983             if prev and prev.type in OPENING_BRACKETS:
984                 # Empty brackets would fail a split so treat them as "inner"
985                 # brackets (e.g. only add them to the `omit` set if another
986                 # pair of brackets was good enough.
987                 inner_brackets.add(id(leaf))
988                 continue
989
990             if closing_bracket:
991                 omit.add(id(closing_bracket))
992                 omit.update(inner_brackets)
993                 inner_brackets.clear()
994                 yield omit
995
996             if (
997                 prev
998                 and prev.type == token.COMMA
999                 and leaf.opening_bracket is not None
1000                 and not is_one_tuple_between(leaf.opening_bracket, leaf, line.leaves)
1001             ):
1002                 # Never omit bracket pairs with trailing commas.
1003                 # We need to explode on those.
1004                 break
1005
1006             if leaf.value:
1007                 opening_bracket = leaf.opening_bracket
1008                 closing_bracket = leaf
1009
1010
1011 def run_transformer(
1012     line: Line,
1013     transform: Transformer,
1014     mode: Mode,
1015     features: Collection[Feature],
1016     *,
1017     line_str: str = "",
1018 ) -> List[Line]:
1019     if not line_str:
1020         line_str = line_to_string(line)
1021     result: List[Line] = []
1022     for transformed_line in transform(line, features):
1023         if str(transformed_line).strip("\n") == line_str:
1024             raise CannotTransform("Line transformer returned an unchanged result")
1025
1026         result.extend(transform_line(transformed_line, mode=mode, features=features))
1027
1028     if (
1029         transform.__class__.__name__ != "rhs"
1030         or not line.bracket_tracker.invisible
1031         or any(bracket.value for bracket in line.bracket_tracker.invisible)
1032         or line.contains_multiline_strings()
1033         or result[0].contains_uncollapsable_type_comments()
1034         or result[0].contains_unsplittable_type_ignore()
1035         or is_line_short_enough(result[0], line_length=mode.line_length)
1036         # If any leaves have no parents (which _can_ occur since
1037         # `transform(line)` potentially destroys the line's underlying node
1038         # structure), then we can't proceed. Doing so would cause the below
1039         # call to `append_leaves()` to fail.
1040         or any(leaf.parent is None for leaf in line.leaves)
1041     ):
1042         return result
1043
1044     line_copy = line.clone()
1045     append_leaves(line_copy, line, line.leaves)
1046     features_fop = set(features) | {Feature.FORCE_OPTIONAL_PARENTHESES}
1047     second_opinion = run_transformer(
1048         line_copy, transform, mode, features_fop, line_str=line_str
1049     )
1050     if all(
1051         is_line_short_enough(ln, line_length=mode.line_length) for ln in second_opinion
1052     ):
1053         result = second_opinion
1054     return result