]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Add cpython Lib/ repository config into primer config - Disabled (#2429)
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4 from functools import partial, wraps
5 import sys
6 from typing import Collection, Iterator, List, Optional, Set, Union
7
8 from dataclasses import dataclass, field
9
10 from black.nodes import WHITESPACE, STATEMENT, STANDALONE_COMMENT
11 from black.nodes import ASSIGNMENTS, OPENING_BRACKETS, CLOSING_BRACKETS
12 from black.nodes import Visitor, syms, first_child_is_arith, ensure_visible
13 from black.nodes import is_docstring, is_empty_tuple, is_one_tuple, is_one_tuple_between
14 from black.nodes import is_walrus_assignment, is_yield, is_vararg, is_multiline_string
15 from black.nodes import is_stub_suite, is_stub_body, is_atom_with_invisible_parens
16 from black.nodes import wrap_in_parentheses
17 from black.brackets import max_delimiter_priority_in_atom
18 from black.brackets import DOT_PRIORITY, COMMA_PRIORITY
19 from black.lines import Line, line_to_string, is_line_short_enough
20 from black.lines import can_omit_invisible_parens, can_be_split, append_leaves
21 from black.comments import generate_comments, list_comments, FMT_OFF
22 from black.numerics import normalize_numeric_literal
23 from black.strings import get_string_prefix, fix_docstring
24 from black.strings import normalize_string_prefix, normalize_string_quotes
25 from black.trans import Transformer, CannotTransform, StringMerger
26 from black.trans import StringSplitter, StringParenWrapper, StringParenStripper
27 from black.mode import Mode
28 from black.mode import Feature
29
30 from blib2to3.pytree import Node, Leaf
31 from blib2to3.pgen2 import token
32
33
34 # types
35 LeafID = int
36 LN = Union[Leaf, Node]
37
38
39 class CannotSplit(CannotTransform):
40     """A readable split that fits the allotted line length is impossible."""
41
42
43 @dataclass
44 class LineGenerator(Visitor[Line]):
45     """Generates reformatted Line objects.  Empty lines are not emitted.
46
47     Note: destroys the tree it's visiting by mutating prefixes of its leaves
48     in ways that will no longer stringify to valid Python code on the tree.
49     """
50
51     mode: Mode
52     remove_u_prefix: bool = False
53     current_line: Line = field(init=False)
54
55     def line(self, indent: int = 0) -> Iterator[Line]:
56         """Generate a line.
57
58         If the line is empty, only emit if it makes sense.
59         If the line is too long, split it first and then generate.
60
61         If any lines were generated, set up a new current_line.
62         """
63         if not self.current_line:
64             self.current_line.depth += indent
65             return  # Line is empty, don't emit. Creating a new one unnecessary.
66
67         complete_line = self.current_line
68         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
69         yield complete_line
70
71     def visit_default(self, node: LN) -> Iterator[Line]:
72         """Default `visit_*()` implementation. Recurses to children of `node`."""
73         if isinstance(node, Leaf):
74             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
75             for comment in generate_comments(node):
76                 if any_open_brackets:
77                     # any comment within brackets is subject to splitting
78                     self.current_line.append(comment)
79                 elif comment.type == token.COMMENT:
80                     # regular trailing comment
81                     self.current_line.append(comment)
82                     yield from self.line()
83
84                 else:
85                     # regular standalone comment
86                     yield from self.line()
87
88                     self.current_line.append(comment)
89                     yield from self.line()
90
91             normalize_prefix(node, inside_brackets=any_open_brackets)
92             if self.mode.string_normalization and node.type == token.STRING:
93                 node.value = normalize_string_prefix(
94                     node.value, remove_u_prefix=self.remove_u_prefix
95                 )
96                 node.value = normalize_string_quotes(node.value)
97             if node.type == token.NUMBER:
98                 normalize_numeric_literal(node)
99             if node.type not in WHITESPACE:
100                 self.current_line.append(node)
101         yield from super().visit_default(node)
102
103     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
104         """Increase indentation level, maybe yield a line."""
105         # In blib2to3 INDENT never holds comments.
106         yield from self.line(+1)
107         yield from self.visit_default(node)
108
109     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
110         """Decrease indentation level, maybe yield a line."""
111         # The current line might still wait for trailing comments.  At DEDENT time
112         # there won't be any (they would be prefixes on the preceding NEWLINE).
113         # Emit the line then.
114         yield from self.line()
115
116         # While DEDENT has no value, its prefix may contain standalone comments
117         # that belong to the current indentation level.  Get 'em.
118         yield from self.visit_default(node)
119
120         # Finally, emit the dedent.
121         yield from self.line(-1)
122
123     def visit_stmt(
124         self, node: Node, keywords: Set[str], parens: Set[str]
125     ) -> Iterator[Line]:
126         """Visit a statement.
127
128         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
129         `def`, `with`, `class`, `assert` and assignments.
130
131         The relevant Python language `keywords` for a given statement will be
132         NAME leaves within it. This methods puts those on a separate line.
133
134         `parens` holds a set of string leaf values immediately after which
135         invisible parens should be put.
136         """
137         normalize_invisible_parens(node, parens_after=parens)
138         for child in node.children:
139             if child.type == token.NAME and child.value in keywords:  # type: ignore
140                 yield from self.line()
141
142             yield from self.visit(child)
143
144     def visit_suite(self, node: Node) -> Iterator[Line]:
145         """Visit a suite."""
146         if self.mode.is_pyi and is_stub_suite(node):
147             yield from self.visit(node.children[2])
148         else:
149             yield from self.visit_default(node)
150
151     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
152         """Visit a statement without nested statements."""
153         if first_child_is_arith(node):
154             wrap_in_parentheses(node, node.children[0], visible=False)
155         is_suite_like = node.parent and node.parent.type in STATEMENT
156         if is_suite_like:
157             if self.mode.is_pyi and is_stub_body(node):
158                 yield from self.visit_default(node)
159             else:
160                 yield from self.line(+1)
161                 yield from self.visit_default(node)
162                 yield from self.line(-1)
163
164         else:
165             if (
166                 not self.mode.is_pyi
167                 or not node.parent
168                 or not is_stub_suite(node.parent)
169             ):
170                 yield from self.line()
171             yield from self.visit_default(node)
172
173     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
174         """Visit `async def`, `async for`, `async with`."""
175         yield from self.line()
176
177         children = iter(node.children)
178         for child in children:
179             yield from self.visit(child)
180
181             if child.type == token.ASYNC:
182                 break
183
184         internal_stmt = next(children)
185         for child in internal_stmt.children:
186             yield from self.visit(child)
187
188     def visit_decorators(self, node: Node) -> Iterator[Line]:
189         """Visit decorators."""
190         for child in node.children:
191             yield from self.line()
192             yield from self.visit(child)
193
194     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
195         """Remove a semicolon and put the other statement on a separate line."""
196         yield from self.line()
197
198     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
199         """End of file. Process outstanding comments and end with a newline."""
200         yield from self.visit_default(leaf)
201         yield from self.line()
202
203     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
204         if not self.current_line.bracket_tracker.any_open_brackets():
205             yield from self.line()
206         yield from self.visit_default(leaf)
207
208     def visit_factor(self, node: Node) -> Iterator[Line]:
209         """Force parentheses between a unary op and a binary power:
210
211         -2 ** 8 -> -(2 ** 8)
212         """
213         _operator, operand = node.children
214         if (
215             operand.type == syms.power
216             and len(operand.children) == 3
217             and operand.children[1].type == token.DOUBLESTAR
218         ):
219             lpar = Leaf(token.LPAR, "(")
220             rpar = Leaf(token.RPAR, ")")
221             index = operand.remove() or 0
222             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
223         yield from self.visit_default(node)
224
225     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
226         if is_docstring(leaf) and "\\\n" not in leaf.value:
227             # We're ignoring docstrings with backslash newline escapes because changing
228             # indentation of those changes the AST representation of the code.
229             docstring = normalize_string_prefix(leaf.value, self.remove_u_prefix)
230             prefix = get_string_prefix(docstring)
231             docstring = docstring[len(prefix) :]  # Remove the prefix
232             quote_char = docstring[0]
233             # A natural way to remove the outer quotes is to do:
234             #   docstring = docstring.strip(quote_char)
235             # but that breaks on """""x""" (which is '""x').
236             # So we actually need to remove the first character and the next two
237             # characters but only if they are the same as the first.
238             quote_len = 1 if docstring[1] != quote_char else 3
239             docstring = docstring[quote_len:-quote_len]
240             docstring_started_empty = not docstring
241
242             if is_multiline_string(leaf):
243                 indent = " " * 4 * self.current_line.depth
244                 docstring = fix_docstring(docstring, indent)
245             else:
246                 docstring = docstring.strip()
247
248             if docstring:
249                 # Add some padding if the docstring starts / ends with a quote mark.
250                 if docstring[0] == quote_char:
251                     docstring = " " + docstring
252                 if docstring[-1] == quote_char:
253                     docstring += " "
254                 if docstring[-1] == "\\":
255                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
256                     if backslash_count % 2:
257                         # Odd number of tailing backslashes, add some padding to
258                         # avoid escaping the closing string quote.
259                         docstring += " "
260             elif not docstring_started_empty:
261                 docstring = " "
262
263             # We could enforce triple quotes at this point.
264             quote = quote_char * quote_len
265             leaf.value = prefix + quote + docstring + quote
266
267         yield from self.visit_default(leaf)
268
269     def __post_init__(self) -> None:
270         """You are in a twisty little maze of passages."""
271         self.current_line = Line(mode=self.mode)
272
273         v = self.visit_stmt
274         Ø: Set[str] = set()
275         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
276         self.visit_if_stmt = partial(
277             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
278         )
279         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
280         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
281         self.visit_try_stmt = partial(
282             v, keywords={"try", "except", "else", "finally"}, parens=Ø
283         )
284         self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
285         self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
286         self.visit_funcdef = partial(v, keywords={"def"}, parens=Ø)
287         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
288         self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
289         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
290         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
291         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
292         self.visit_async_funcdef = self.visit_async_stmt
293         self.visit_decorated = self.visit_decorators
294
295
296 def transform_line(
297     line: Line, mode: Mode, features: Collection[Feature] = ()
298 ) -> Iterator[Line]:
299     """Transform a `line`, potentially splitting it into many lines.
300
301     They should fit in the allotted `line_length` but might not be able to.
302
303     `features` are syntactical features that may be used in the output.
304     """
305     if line.is_comment:
306         yield line
307         return
308
309     line_str = line_to_string(line)
310
311     ll = mode.line_length
312     sn = mode.string_normalization
313     string_merge = StringMerger(ll, sn)
314     string_paren_strip = StringParenStripper(ll, sn)
315     string_split = StringSplitter(ll, sn)
316     string_paren_wrap = StringParenWrapper(ll, sn)
317
318     transformers: List[Transformer]
319     if (
320         not line.contains_uncollapsable_type_comments()
321         and not line.should_split_rhs
322         and not line.magic_trailing_comma
323         and (
324             is_line_short_enough(line, line_length=mode.line_length, line_str=line_str)
325             or line.contains_unsplittable_type_ignore()
326         )
327         and not (line.inside_brackets and line.contains_standalone_comments())
328     ):
329         # Only apply basic string preprocessing, since lines shouldn't be split here.
330         if mode.experimental_string_processing:
331             transformers = [string_merge, string_paren_strip]
332         else:
333             transformers = []
334     elif line.is_def:
335         transformers = [left_hand_split]
336     else:
337
338         def rhs(line: Line, features: Collection[Feature]) -> Iterator[Line]:
339             """Wraps calls to `right_hand_split`.
340
341             The calls increasingly `omit` right-hand trailers (bracket pairs with
342             content), meaning the trailers get glued together to split on another
343             bracket pair instead.
344             """
345             for omit in generate_trailers_to_omit(line, mode.line_length):
346                 lines = list(
347                     right_hand_split(line, mode.line_length, features, omit=omit)
348                 )
349                 # Note: this check is only able to figure out if the first line of the
350                 # *current* transformation fits in the line length.  This is true only
351                 # for simple cases.  All others require running more transforms via
352                 # `transform_line()`.  This check doesn't know if those would succeed.
353                 if is_line_short_enough(lines[0], line_length=mode.line_length):
354                     yield from lines
355                     return
356
357             # All splits failed, best effort split with no omits.
358             # This mostly happens to multiline strings that are by definition
359             # reported as not fitting a single line, as well as lines that contain
360             # trailing commas (those have to be exploded).
361             yield from right_hand_split(
362                 line, line_length=mode.line_length, features=features
363             )
364
365         if mode.experimental_string_processing:
366             if line.inside_brackets:
367                 transformers = [
368                     string_merge,
369                     string_paren_strip,
370                     string_split,
371                     delimiter_split,
372                     standalone_comment_split,
373                     string_paren_wrap,
374                     rhs,
375                 ]
376             else:
377                 transformers = [
378                     string_merge,
379                     string_paren_strip,
380                     string_split,
381                     string_paren_wrap,
382                     rhs,
383                 ]
384         else:
385             if line.inside_brackets:
386                 transformers = [delimiter_split, standalone_comment_split, rhs]
387             else:
388                 transformers = [rhs]
389
390     for transform in transformers:
391         # We are accumulating lines in `result` because we might want to abort
392         # mission and return the original line in the end, or attempt a different
393         # split altogether.
394         try:
395             result = run_transformer(line, transform, mode, features, line_str=line_str)
396         except CannotTransform:
397             continue
398         else:
399             yield from result
400             break
401
402     else:
403         yield line
404
405
406 def left_hand_split(line: Line, _features: Collection[Feature] = ()) -> Iterator[Line]:
407     """Split line into many lines, starting with the first matching bracket pair.
408
409     Note: this usually looks weird, only use this for function definitions.
410     Prefer RHS otherwise.  This is why this function is not symmetrical with
411     :func:`right_hand_split` which also handles optional parentheses.
412     """
413     tail_leaves: List[Leaf] = []
414     body_leaves: List[Leaf] = []
415     head_leaves: List[Leaf] = []
416     current_leaves = head_leaves
417     matching_bracket: Optional[Leaf] = None
418     for leaf in line.leaves:
419         if (
420             current_leaves is body_leaves
421             and leaf.type in CLOSING_BRACKETS
422             and leaf.opening_bracket is matching_bracket
423         ):
424             current_leaves = tail_leaves if body_leaves else head_leaves
425         current_leaves.append(leaf)
426         if current_leaves is head_leaves:
427             if leaf.type in OPENING_BRACKETS:
428                 matching_bracket = leaf
429                 current_leaves = body_leaves
430     if not matching_bracket:
431         raise CannotSplit("No brackets found")
432
433     head = bracket_split_build_line(head_leaves, line, matching_bracket)
434     body = bracket_split_build_line(body_leaves, line, matching_bracket, is_body=True)
435     tail = bracket_split_build_line(tail_leaves, line, matching_bracket)
436     bracket_split_succeeded_or_raise(head, body, tail)
437     for result in (head, body, tail):
438         if result:
439             yield result
440
441
442 def right_hand_split(
443     line: Line,
444     line_length: int,
445     features: Collection[Feature] = (),
446     omit: Collection[LeafID] = (),
447 ) -> Iterator[Line]:
448     """Split line into many lines, starting with the last matching bracket pair.
449
450     If the split was by optional parentheses, attempt splitting without them, too.
451     `omit` is a collection of closing bracket IDs that shouldn't be considered for
452     this split.
453
454     Note: running this function modifies `bracket_depth` on the leaves of `line`.
455     """
456     tail_leaves: List[Leaf] = []
457     body_leaves: List[Leaf] = []
458     head_leaves: List[Leaf] = []
459     current_leaves = tail_leaves
460     opening_bracket: Optional[Leaf] = None
461     closing_bracket: Optional[Leaf] = None
462     for leaf in reversed(line.leaves):
463         if current_leaves is body_leaves:
464             if leaf is opening_bracket:
465                 current_leaves = head_leaves if body_leaves else tail_leaves
466         current_leaves.append(leaf)
467         if current_leaves is tail_leaves:
468             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
469                 opening_bracket = leaf.opening_bracket
470                 closing_bracket = leaf
471                 current_leaves = body_leaves
472     if not (opening_bracket and closing_bracket and head_leaves):
473         # If there is no opening or closing_bracket that means the split failed and
474         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
475         # the matching `opening_bracket` wasn't available on `line` anymore.
476         raise CannotSplit("No brackets found")
477
478     tail_leaves.reverse()
479     body_leaves.reverse()
480     head_leaves.reverse()
481     head = bracket_split_build_line(head_leaves, line, opening_bracket)
482     body = bracket_split_build_line(body_leaves, line, opening_bracket, is_body=True)
483     tail = bracket_split_build_line(tail_leaves, line, opening_bracket)
484     bracket_split_succeeded_or_raise(head, body, tail)
485     if (
486         Feature.FORCE_OPTIONAL_PARENTHESES not in features
487         # the opening bracket is an optional paren
488         and opening_bracket.type == token.LPAR
489         and not opening_bracket.value
490         # the closing bracket is an optional paren
491         and closing_bracket.type == token.RPAR
492         and not closing_bracket.value
493         # it's not an import (optional parens are the only thing we can split on
494         # in this case; attempting a split without them is a waste of time)
495         and not line.is_import
496         # there are no standalone comments in the body
497         and not body.contains_standalone_comments(0)
498         # and we can actually remove the parens
499         and can_omit_invisible_parens(body, line_length, omit_on_explode=omit)
500     ):
501         omit = {id(closing_bracket), *omit}
502         try:
503             yield from right_hand_split(line, line_length, features=features, omit=omit)
504             return
505
506         except CannotSplit:
507             if not (
508                 can_be_split(body)
509                 or is_line_short_enough(body, line_length=line_length)
510             ):
511                 raise CannotSplit(
512                     "Splitting failed, body is still too long and can't be split."
513                 )
514
515             elif head.contains_multiline_strings() or tail.contains_multiline_strings():
516                 raise CannotSplit(
517                     "The current optional pair of parentheses is bound to fail to"
518                     " satisfy the splitting algorithm because the head or the tail"
519                     " contains multiline strings which by definition never fit one"
520                     " line."
521                 )
522
523     ensure_visible(opening_bracket)
524     ensure_visible(closing_bracket)
525     for result in (head, body, tail):
526         if result:
527             yield result
528
529
530 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
531     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
532
533     Do nothing otherwise.
534
535     A left- or right-hand split is based on a pair of brackets. Content before
536     (and including) the opening bracket is left on one line, content inside the
537     brackets is put on a separate line, and finally content starting with and
538     following the closing bracket is put on a separate line.
539
540     Those are called `head`, `body`, and `tail`, respectively. If the split
541     produced the same line (all content in `head`) or ended up with an empty `body`
542     and the `tail` is just the closing bracket, then it's considered failed.
543     """
544     tail_len = len(str(tail).strip())
545     if not body:
546         if tail_len == 0:
547             raise CannotSplit("Splitting brackets produced the same line")
548
549         elif tail_len < 3:
550             raise CannotSplit(
551                 f"Splitting brackets on an empty body to save {tail_len} characters is"
552                 " not worth it"
553             )
554
555
556 def bracket_split_build_line(
557     leaves: List[Leaf], original: Line, opening_bracket: Leaf, *, is_body: bool = False
558 ) -> Line:
559     """Return a new line with given `leaves` and respective comments from `original`.
560
561     If `is_body` is True, the result line is one-indented inside brackets and as such
562     has its first leaf's prefix normalized and a trailing comma added when expected.
563     """
564     result = Line(mode=original.mode, depth=original.depth)
565     if is_body:
566         result.inside_brackets = True
567         result.depth += 1
568         if leaves:
569             # Since body is a new indent level, remove spurious leading whitespace.
570             normalize_prefix(leaves[0], inside_brackets=True)
571             # Ensure a trailing comma for imports and standalone function arguments, but
572             # be careful not to add one after any comments or within type annotations.
573             no_commas = (
574                 original.is_def
575                 and opening_bracket.value == "("
576                 and not any(leaf.type == token.COMMA for leaf in leaves)
577             )
578
579             if original.is_import or no_commas:
580                 for i in range(len(leaves) - 1, -1, -1):
581                     if leaves[i].type == STANDALONE_COMMENT:
582                         continue
583
584                     if leaves[i].type != token.COMMA:
585                         new_comma = Leaf(token.COMMA, ",")
586                         leaves.insert(i + 1, new_comma)
587                     break
588
589     # Populate the line
590     for leaf in leaves:
591         result.append(leaf, preformatted=True)
592         for comment_after in original.comments_after(leaf):
593             result.append(comment_after, preformatted=True)
594     if is_body and should_split_line(result, opening_bracket):
595         result.should_split_rhs = True
596     return result
597
598
599 def dont_increase_indentation(split_func: Transformer) -> Transformer:
600     """Normalize prefix of the first leaf in every line returned by `split_func`.
601
602     This is a decorator over relevant split functions.
603     """
604
605     @wraps(split_func)
606     def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
607         for line in split_func(line, features):
608             normalize_prefix(line.leaves[0], inside_brackets=True)
609             yield line
610
611     return split_wrapper
612
613
614 @dont_increase_indentation
615 def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
616     """Split according to delimiters of the highest priority.
617
618     If the appropriate Features are given, the split will add trailing commas
619     also in function signatures and calls that contain `*` and `**`.
620     """
621     try:
622         last_leaf = line.leaves[-1]
623     except IndexError:
624         raise CannotSplit("Line empty")
625
626     bt = line.bracket_tracker
627     try:
628         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
629     except ValueError:
630         raise CannotSplit("No delimiters found")
631
632     if delimiter_priority == DOT_PRIORITY:
633         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
634             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
635
636     current_line = Line(
637         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
638     )
639     lowest_depth = sys.maxsize
640     trailing_comma_safe = True
641
642     def append_to_line(leaf: Leaf) -> Iterator[Line]:
643         """Append `leaf` to current line or to new line if appending impossible."""
644         nonlocal current_line
645         try:
646             current_line.append_safe(leaf, preformatted=True)
647         except ValueError:
648             yield current_line
649
650             current_line = Line(
651                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
652             )
653             current_line.append(leaf)
654
655     for leaf in line.leaves:
656         yield from append_to_line(leaf)
657
658         for comment_after in line.comments_after(leaf):
659             yield from append_to_line(comment_after)
660
661         lowest_depth = min(lowest_depth, leaf.bracket_depth)
662         if leaf.bracket_depth == lowest_depth:
663             if is_vararg(leaf, within={syms.typedargslist}):
664                 trailing_comma_safe = (
665                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
666                 )
667             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
668                 trailing_comma_safe = (
669                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
670                 )
671
672         leaf_priority = bt.delimiters.get(id(leaf))
673         if leaf_priority == delimiter_priority:
674             yield current_line
675
676             current_line = Line(
677                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
678             )
679     if current_line:
680         if (
681             trailing_comma_safe
682             and delimiter_priority == COMMA_PRIORITY
683             and current_line.leaves[-1].type != token.COMMA
684             and current_line.leaves[-1].type != STANDALONE_COMMENT
685         ):
686             new_comma = Leaf(token.COMMA, ",")
687             current_line.append(new_comma)
688         yield current_line
689
690
691 @dont_increase_indentation
692 def standalone_comment_split(
693     line: Line, features: Collection[Feature] = ()
694 ) -> Iterator[Line]:
695     """Split standalone comments from the rest of the line."""
696     if not line.contains_standalone_comments(0):
697         raise CannotSplit("Line does not have any standalone comments")
698
699     current_line = Line(
700         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
701     )
702
703     def append_to_line(leaf: Leaf) -> Iterator[Line]:
704         """Append `leaf` to current line or to new line if appending impossible."""
705         nonlocal current_line
706         try:
707             current_line.append_safe(leaf, preformatted=True)
708         except ValueError:
709             yield current_line
710
711             current_line = Line(
712                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
713             )
714             current_line.append(leaf)
715
716     for leaf in line.leaves:
717         yield from append_to_line(leaf)
718
719         for comment_after in line.comments_after(leaf):
720             yield from append_to_line(comment_after)
721
722     if current_line:
723         yield current_line
724
725
726 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
727     """Leave existing extra newlines if not `inside_brackets`. Remove everything
728     else.
729
730     Note: don't use backslashes for formatting or you'll lose your voting rights.
731     """
732     if not inside_brackets:
733         spl = leaf.prefix.split("#")
734         if "\\" not in spl[0]:
735             nl_count = spl[-1].count("\n")
736             if len(spl) > 1:
737                 nl_count -= 1
738             leaf.prefix = "\n" * nl_count
739             return
740
741     leaf.prefix = ""
742
743
744 def normalize_invisible_parens(node: Node, parens_after: Set[str]) -> None:
745     """Make existing optional parentheses invisible or create new ones.
746
747     `parens_after` is a set of string leaf values immediately after which parens
748     should be put.
749
750     Standardizes on visible parentheses for single-element tuples, and keeps
751     existing visible parentheses for other tuples and generator expressions.
752     """
753     for pc in list_comments(node.prefix, is_endmarker=False):
754         if pc.value in FMT_OFF:
755             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
756             return
757     check_lpar = False
758     for index, child in enumerate(list(node.children)):
759         # Fixes a bug where invisible parens are not properly stripped from
760         # assignment statements that contain type annotations.
761         if isinstance(child, Node) and child.type == syms.annassign:
762             normalize_invisible_parens(child, parens_after=parens_after)
763
764         # Add parentheses around long tuple unpacking in assignments.
765         if (
766             index == 0
767             and isinstance(child, Node)
768             and child.type == syms.testlist_star_expr
769         ):
770             check_lpar = True
771
772         if check_lpar:
773             if child.type == syms.atom:
774                 if maybe_make_parens_invisible_in_atom(child, parent=node):
775                     wrap_in_parentheses(node, child, visible=False)
776             elif is_one_tuple(child):
777                 wrap_in_parentheses(node, child, visible=True)
778             elif node.type == syms.import_from:
779                 # "import from" nodes store parentheses directly as part of
780                 # the statement
781                 if child.type == token.LPAR:
782                     # make parentheses invisible
783                     child.value = ""  # type: ignore
784                     node.children[-1].value = ""  # type: ignore
785                 elif child.type != token.STAR:
786                     # insert invisible parentheses
787                     node.insert_child(index, Leaf(token.LPAR, ""))
788                     node.append_child(Leaf(token.RPAR, ""))
789                 break
790
791             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
792                 wrap_in_parentheses(node, child, visible=False)
793
794         check_lpar = isinstance(child, Leaf) and child.value in parens_after
795
796
797 def maybe_make_parens_invisible_in_atom(node: LN, parent: LN) -> bool:
798     """If it's safe, make the parens in the atom `node` invisible, recursively.
799     Additionally, remove repeated, adjacent invisible parens from the atom `node`
800     as they are redundant.
801
802     Returns whether the node should itself be wrapped in invisible parentheses.
803
804     """
805
806     if (
807         node.type != syms.atom
808         or is_empty_tuple(node)
809         or is_one_tuple(node)
810         or (is_yield(node) and parent.type != syms.expr_stmt)
811         or max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
812     ):
813         return False
814
815     if is_walrus_assignment(node):
816         if parent.type in [
817             syms.annassign,
818             syms.expr_stmt,
819             syms.assert_stmt,
820             syms.return_stmt,
821             # these ones aren't useful to end users, but they do please fuzzers
822             syms.for_stmt,
823             syms.del_stmt,
824         ]:
825             return False
826
827     first = node.children[0]
828     last = node.children[-1]
829     if first.type == token.LPAR and last.type == token.RPAR:
830         middle = node.children[1]
831         # make parentheses invisible
832         first.value = ""  # type: ignore
833         last.value = ""  # type: ignore
834         maybe_make_parens_invisible_in_atom(middle, parent=parent)
835
836         if is_atom_with_invisible_parens(middle):
837             # Strip the invisible parens from `middle` by replacing
838             # it with the child in-between the invisible parens
839             middle.replace(middle.children[1])
840
841         return False
842
843     return True
844
845
846 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
847     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
848
849     if not (opening_bracket.parent and opening_bracket.value in "[{("):
850         return False
851
852     # We're essentially checking if the body is delimited by commas and there's more
853     # than one of them (we're excluding the trailing comma and if the delimiter priority
854     # is still commas, that means there's more).
855     exclude = set()
856     trailing_comma = False
857     try:
858         last_leaf = line.leaves[-1]
859         if last_leaf.type == token.COMMA:
860             trailing_comma = True
861             exclude.add(id(last_leaf))
862         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
863     except (IndexError, ValueError):
864         return False
865
866     return max_priority == COMMA_PRIORITY and (
867         (line.mode.magic_trailing_comma and trailing_comma)
868         # always explode imports
869         or opening_bracket.parent.type in {syms.atom, syms.import_from}
870     )
871
872
873 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
874     """Generate sets of closing bracket IDs that should be omitted in a RHS.
875
876     Brackets can be omitted if the entire trailer up to and including
877     a preceding closing bracket fits in one line.
878
879     Yielded sets are cumulative (contain results of previous yields, too).  First
880     set is empty, unless the line should explode, in which case bracket pairs until
881     the one that needs to explode are omitted.
882     """
883
884     omit: Set[LeafID] = set()
885     if not line.magic_trailing_comma:
886         yield omit
887
888     length = 4 * line.depth
889     opening_bracket: Optional[Leaf] = None
890     closing_bracket: Optional[Leaf] = None
891     inner_brackets: Set[LeafID] = set()
892     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
893         length += leaf_length
894         if length > line_length:
895             break
896
897         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
898         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
899             break
900
901         if opening_bracket:
902             if leaf is opening_bracket:
903                 opening_bracket = None
904             elif leaf.type in CLOSING_BRACKETS:
905                 prev = line.leaves[index - 1] if index > 0 else None
906                 if (
907                     prev
908                     and prev.type == token.COMMA
909                     and not is_one_tuple_between(
910                         leaf.opening_bracket, leaf, line.leaves
911                     )
912                 ):
913                     # Never omit bracket pairs with trailing commas.
914                     # We need to explode on those.
915                     break
916
917                 inner_brackets.add(id(leaf))
918         elif leaf.type in CLOSING_BRACKETS:
919             prev = line.leaves[index - 1] if index > 0 else None
920             if prev and prev.type in OPENING_BRACKETS:
921                 # Empty brackets would fail a split so treat them as "inner"
922                 # brackets (e.g. only add them to the `omit` set if another
923                 # pair of brackets was good enough.
924                 inner_brackets.add(id(leaf))
925                 continue
926
927             if closing_bracket:
928                 omit.add(id(closing_bracket))
929                 omit.update(inner_brackets)
930                 inner_brackets.clear()
931                 yield omit
932
933             if (
934                 prev
935                 and prev.type == token.COMMA
936                 and not is_one_tuple_between(leaf.opening_bracket, leaf, line.leaves)
937             ):
938                 # Never omit bracket pairs with trailing commas.
939                 # We need to explode on those.
940                 break
941
942             if leaf.value:
943                 opening_bracket = leaf.opening_bracket
944                 closing_bracket = leaf
945
946
947 def run_transformer(
948     line: Line,
949     transform: Transformer,
950     mode: Mode,
951     features: Collection[Feature],
952     *,
953     line_str: str = "",
954 ) -> List[Line]:
955     if not line_str:
956         line_str = line_to_string(line)
957     result: List[Line] = []
958     for transformed_line in transform(line, features):
959         if str(transformed_line).strip("\n") == line_str:
960             raise CannotTransform("Line transformer returned an unchanged result")
961
962         result.extend(transform_line(transformed_line, mode=mode, features=features))
963
964     if (
965         transform.__name__ != "rhs"
966         or not line.bracket_tracker.invisible
967         or any(bracket.value for bracket in line.bracket_tracker.invisible)
968         or line.contains_multiline_strings()
969         or result[0].contains_uncollapsable_type_comments()
970         or result[0].contains_unsplittable_type_ignore()
971         or is_line_short_enough(result[0], line_length=mode.line_length)
972         # If any leaves have no parents (which _can_ occur since
973         # `transform(line)` potentially destroys the line's underlying node
974         # structure), then we can't proceed. Doing so would cause the below
975         # call to `append_leaves()` to fail.
976         or any(leaf.parent is None for leaf in line.leaves)
977     ):
978         return result
979
980     line_copy = line.clone()
981     append_leaves(line_copy, line, line.leaves)
982     features_fop = set(features) | {Feature.FORCE_OPTIONAL_PARENTHESES}
983     second_opinion = run_transformer(
984         line_copy, transform, mode, features_fop, line_str=line_str
985     )
986     if all(
987         is_line_short_enough(ln, line_length=mode.line_length) for ln in second_opinion
988     ):
989         result = second_opinion
990     return result