]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Use codespell to find typos (#2228)
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4 from functools import partial, wraps
5 import sys
6 from typing import Collection, Iterator, List, Optional, Set, Union
7
8 from dataclasses import dataclass, field
9
10 from black.nodes import WHITESPACE, STATEMENT, STANDALONE_COMMENT
11 from black.nodes import ASSIGNMENTS, OPENING_BRACKETS, CLOSING_BRACKETS
12 from black.nodes import Visitor, syms, first_child_is_arith, ensure_visible
13 from black.nodes import is_docstring, is_empty_tuple, is_one_tuple, is_one_tuple_between
14 from black.nodes import is_walrus_assignment, is_yield, is_vararg, is_multiline_string
15 from black.nodes import is_stub_suite, is_stub_body, is_atom_with_invisible_parens
16 from black.nodes import wrap_in_parentheses
17 from black.brackets import max_delimiter_priority_in_atom
18 from black.brackets import DOT_PRIORITY, COMMA_PRIORITY
19 from black.lines import Line, line_to_string, is_line_short_enough
20 from black.lines import can_omit_invisible_parens, can_be_split, append_leaves
21 from black.comments import generate_comments, list_comments, FMT_OFF
22 from black.numerics import normalize_numeric_literal
23 from black.strings import get_string_prefix, fix_docstring
24 from black.strings import normalize_string_prefix, normalize_string_quotes
25 from black.trans import Transformer, CannotTransform, StringMerger
26 from black.trans import StringSplitter, StringParenWrapper, StringParenStripper
27 from black.mode import Mode
28 from black.mode import Feature
29
30 from blib2to3.pytree import Node, Leaf
31 from blib2to3.pgen2 import token
32
33
34 # types
35 LeafID = int
36 LN = Union[Leaf, Node]
37
38
39 class CannotSplit(CannotTransform):
40     """A readable split that fits the allotted line length is impossible."""
41
42
43 @dataclass
44 class LineGenerator(Visitor[Line]):
45     """Generates reformatted Line objects.  Empty lines are not emitted.
46
47     Note: destroys the tree it's visiting by mutating prefixes of its leaves
48     in ways that will no longer stringify to valid Python code on the tree.
49     """
50
51     mode: Mode
52     remove_u_prefix: bool = False
53     current_line: Line = field(init=False)
54
55     def line(self, indent: int = 0) -> Iterator[Line]:
56         """Generate a line.
57
58         If the line is empty, only emit if it makes sense.
59         If the line is too long, split it first and then generate.
60
61         If any lines were generated, set up a new current_line.
62         """
63         if not self.current_line:
64             self.current_line.depth += indent
65             return  # Line is empty, don't emit. Creating a new one unnecessary.
66
67         complete_line = self.current_line
68         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
69         yield complete_line
70
71     def visit_default(self, node: LN) -> Iterator[Line]:
72         """Default `visit_*()` implementation. Recurses to children of `node`."""
73         if isinstance(node, Leaf):
74             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
75             for comment in generate_comments(node):
76                 if any_open_brackets:
77                     # any comment within brackets is subject to splitting
78                     self.current_line.append(comment)
79                 elif comment.type == token.COMMENT:
80                     # regular trailing comment
81                     self.current_line.append(comment)
82                     yield from self.line()
83
84                 else:
85                     # regular standalone comment
86                     yield from self.line()
87
88                     self.current_line.append(comment)
89                     yield from self.line()
90
91             normalize_prefix(node, inside_brackets=any_open_brackets)
92             if self.mode.string_normalization and node.type == token.STRING:
93                 node.value = normalize_string_prefix(
94                     node.value, remove_u_prefix=self.remove_u_prefix
95                 )
96                 node.value = normalize_string_quotes(node.value)
97             if node.type == token.NUMBER:
98                 normalize_numeric_literal(node)
99             if node.type not in WHITESPACE:
100                 self.current_line.append(node)
101         yield from super().visit_default(node)
102
103     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
104         """Increase indentation level, maybe yield a line."""
105         # In blib2to3 INDENT never holds comments.
106         yield from self.line(+1)
107         yield from self.visit_default(node)
108
109     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
110         """Decrease indentation level, maybe yield a line."""
111         # The current line might still wait for trailing comments.  At DEDENT time
112         # there won't be any (they would be prefixes on the preceding NEWLINE).
113         # Emit the line then.
114         yield from self.line()
115
116         # While DEDENT has no value, its prefix may contain standalone comments
117         # that belong to the current indentation level.  Get 'em.
118         yield from self.visit_default(node)
119
120         # Finally, emit the dedent.
121         yield from self.line(-1)
122
123     def visit_stmt(
124         self, node: Node, keywords: Set[str], parens: Set[str]
125     ) -> Iterator[Line]:
126         """Visit a statement.
127
128         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
129         `def`, `with`, `class`, `assert` and assignments.
130
131         The relevant Python language `keywords` for a given statement will be
132         NAME leaves within it. This methods puts those on a separate line.
133
134         `parens` holds a set of string leaf values immediately after which
135         invisible parens should be put.
136         """
137         normalize_invisible_parens(node, parens_after=parens)
138         for child in node.children:
139             if child.type == token.NAME and child.value in keywords:  # type: ignore
140                 yield from self.line()
141
142             yield from self.visit(child)
143
144     def visit_suite(self, node: Node) -> Iterator[Line]:
145         """Visit a suite."""
146         if self.mode.is_pyi and is_stub_suite(node):
147             yield from self.visit(node.children[2])
148         else:
149             yield from self.visit_default(node)
150
151     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
152         """Visit a statement without nested statements."""
153         if first_child_is_arith(node):
154             wrap_in_parentheses(node, node.children[0], visible=False)
155         is_suite_like = node.parent and node.parent.type in STATEMENT
156         if is_suite_like:
157             if self.mode.is_pyi and is_stub_body(node):
158                 yield from self.visit_default(node)
159             else:
160                 yield from self.line(+1)
161                 yield from self.visit_default(node)
162                 yield from self.line(-1)
163
164         else:
165             if (
166                 not self.mode.is_pyi
167                 or not node.parent
168                 or not is_stub_suite(node.parent)
169             ):
170                 yield from self.line()
171             yield from self.visit_default(node)
172
173     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
174         """Visit `async def`, `async for`, `async with`."""
175         yield from self.line()
176
177         children = iter(node.children)
178         for child in children:
179             yield from self.visit(child)
180
181             if child.type == token.ASYNC:
182                 break
183
184         internal_stmt = next(children)
185         for child in internal_stmt.children:
186             yield from self.visit(child)
187
188     def visit_decorators(self, node: Node) -> Iterator[Line]:
189         """Visit decorators."""
190         for child in node.children:
191             yield from self.line()
192             yield from self.visit(child)
193
194     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
195         """Remove a semicolon and put the other statement on a separate line."""
196         yield from self.line()
197
198     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
199         """End of file. Process outstanding comments and end with a newline."""
200         yield from self.visit_default(leaf)
201         yield from self.line()
202
203     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
204         if not self.current_line.bracket_tracker.any_open_brackets():
205             yield from self.line()
206         yield from self.visit_default(leaf)
207
208     def visit_factor(self, node: Node) -> Iterator[Line]:
209         """Force parentheses between a unary op and a binary power:
210
211         -2 ** 8 -> -(2 ** 8)
212         """
213         _operator, operand = node.children
214         if (
215             operand.type == syms.power
216             and len(operand.children) == 3
217             and operand.children[1].type == token.DOUBLESTAR
218         ):
219             lpar = Leaf(token.LPAR, "(")
220             rpar = Leaf(token.RPAR, ")")
221             index = operand.remove() or 0
222             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
223         yield from self.visit_default(node)
224
225     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
226         if is_docstring(leaf) and "\\\n" not in leaf.value:
227             # We're ignoring docstrings with backslash newline escapes because changing
228             # indentation of those changes the AST representation of the code.
229             prefix = get_string_prefix(leaf.value)
230             docstring = leaf.value[len(prefix) :]  # Remove the prefix
231             quote_char = docstring[0]
232             # A natural way to remove the outer quotes is to do:
233             #   docstring = docstring.strip(quote_char)
234             # but that breaks on """""x""" (which is '""x').
235             # So we actually need to remove the first character and the next two
236             # characters but only if they are the same as the first.
237             quote_len = 1 if docstring[1] != quote_char else 3
238             docstring = docstring[quote_len:-quote_len]
239
240             if is_multiline_string(leaf):
241                 indent = " " * 4 * self.current_line.depth
242                 docstring = fix_docstring(docstring, indent)
243             else:
244                 docstring = docstring.strip()
245
246             if docstring:
247                 # Add some padding if the docstring starts / ends with a quote mark.
248                 if docstring[0] == quote_char:
249                     docstring = " " + docstring
250                 if docstring[-1] == quote_char:
251                     docstring += " "
252                 if docstring[-1] == "\\":
253                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
254                     if backslash_count % 2:
255                         # Odd number of tailing backslashes, add some padding to
256                         # avoid escaping the closing string quote.
257                         docstring += " "
258             else:
259                 # Add some padding if the docstring is empty.
260                 docstring = " "
261
262             # We could enforce triple quotes at this point.
263             quote = quote_char * quote_len
264             leaf.value = prefix + quote + docstring + quote
265
266         yield from self.visit_default(leaf)
267
268     def __post_init__(self) -> None:
269         """You are in a twisty little maze of passages."""
270         self.current_line = Line(mode=self.mode)
271
272         v = self.visit_stmt
273         Ø: Set[str] = set()
274         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
275         self.visit_if_stmt = partial(
276             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
277         )
278         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
279         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
280         self.visit_try_stmt = partial(
281             v, keywords={"try", "except", "else", "finally"}, parens=Ø
282         )
283         self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
284         self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
285         self.visit_funcdef = partial(v, keywords={"def"}, parens=Ø)
286         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
287         self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
288         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
289         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
290         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
291         self.visit_async_funcdef = self.visit_async_stmt
292         self.visit_decorated = self.visit_decorators
293
294
295 def transform_line(
296     line: Line, mode: Mode, features: Collection[Feature] = ()
297 ) -> Iterator[Line]:
298     """Transform a `line`, potentially splitting it into many lines.
299
300     They should fit in the allotted `line_length` but might not be able to.
301
302     `features` are syntactical features that may be used in the output.
303     """
304     if line.is_comment:
305         yield line
306         return
307
308     line_str = line_to_string(line)
309
310     ll = mode.line_length
311     sn = mode.string_normalization
312     string_merge = StringMerger(ll, sn)
313     string_paren_strip = StringParenStripper(ll, sn)
314     string_split = StringSplitter(ll, sn)
315     string_paren_wrap = StringParenWrapper(ll, sn)
316
317     transformers: List[Transformer]
318     if (
319         not line.contains_uncollapsable_type_comments()
320         and not line.should_split_rhs
321         and not line.magic_trailing_comma
322         and (
323             is_line_short_enough(line, line_length=mode.line_length, line_str=line_str)
324             or line.contains_unsplittable_type_ignore()
325         )
326         and not (line.inside_brackets and line.contains_standalone_comments())
327     ):
328         # Only apply basic string preprocessing, since lines shouldn't be split here.
329         if mode.experimental_string_processing:
330             transformers = [string_merge, string_paren_strip]
331         else:
332             transformers = []
333     elif line.is_def:
334         transformers = [left_hand_split]
335     else:
336
337         def rhs(line: Line, features: Collection[Feature]) -> Iterator[Line]:
338             """Wraps calls to `right_hand_split`.
339
340             The calls increasingly `omit` right-hand trailers (bracket pairs with
341             content), meaning the trailers get glued together to split on another
342             bracket pair instead.
343             """
344             for omit in generate_trailers_to_omit(line, mode.line_length):
345                 lines = list(
346                     right_hand_split(line, mode.line_length, features, omit=omit)
347                 )
348                 # Note: this check is only able to figure out if the first line of the
349                 # *current* transformation fits in the line length.  This is true only
350                 # for simple cases.  All others require running more transforms via
351                 # `transform_line()`.  This check doesn't know if those would succeed.
352                 if is_line_short_enough(lines[0], line_length=mode.line_length):
353                     yield from lines
354                     return
355
356             # All splits failed, best effort split with no omits.
357             # This mostly happens to multiline strings that are by definition
358             # reported as not fitting a single line, as well as lines that contain
359             # trailing commas (those have to be exploded).
360             yield from right_hand_split(
361                 line, line_length=mode.line_length, features=features
362             )
363
364         if mode.experimental_string_processing:
365             if line.inside_brackets:
366                 transformers = [
367                     string_merge,
368                     string_paren_strip,
369                     string_split,
370                     delimiter_split,
371                     standalone_comment_split,
372                     string_paren_wrap,
373                     rhs,
374                 ]
375             else:
376                 transformers = [
377                     string_merge,
378                     string_paren_strip,
379                     string_split,
380                     string_paren_wrap,
381                     rhs,
382                 ]
383         else:
384             if line.inside_brackets:
385                 transformers = [delimiter_split, standalone_comment_split, rhs]
386             else:
387                 transformers = [rhs]
388
389     for transform in transformers:
390         # We are accumulating lines in `result` because we might want to abort
391         # mission and return the original line in the end, or attempt a different
392         # split altogether.
393         try:
394             result = run_transformer(line, transform, mode, features, line_str=line_str)
395         except CannotTransform:
396             continue
397         else:
398             yield from result
399             break
400
401     else:
402         yield line
403
404
405 def left_hand_split(line: Line, _features: Collection[Feature] = ()) -> Iterator[Line]:
406     """Split line into many lines, starting with the first matching bracket pair.
407
408     Note: this usually looks weird, only use this for function definitions.
409     Prefer RHS otherwise.  This is why this function is not symmetrical with
410     :func:`right_hand_split` which also handles optional parentheses.
411     """
412     tail_leaves: List[Leaf] = []
413     body_leaves: List[Leaf] = []
414     head_leaves: List[Leaf] = []
415     current_leaves = head_leaves
416     matching_bracket: Optional[Leaf] = None
417     for leaf in line.leaves:
418         if (
419             current_leaves is body_leaves
420             and leaf.type in CLOSING_BRACKETS
421             and leaf.opening_bracket is matching_bracket
422         ):
423             current_leaves = tail_leaves if body_leaves else head_leaves
424         current_leaves.append(leaf)
425         if current_leaves is head_leaves:
426             if leaf.type in OPENING_BRACKETS:
427                 matching_bracket = leaf
428                 current_leaves = body_leaves
429     if not matching_bracket:
430         raise CannotSplit("No brackets found")
431
432     head = bracket_split_build_line(head_leaves, line, matching_bracket)
433     body = bracket_split_build_line(body_leaves, line, matching_bracket, is_body=True)
434     tail = bracket_split_build_line(tail_leaves, line, matching_bracket)
435     bracket_split_succeeded_or_raise(head, body, tail)
436     for result in (head, body, tail):
437         if result:
438             yield result
439
440
441 def right_hand_split(
442     line: Line,
443     line_length: int,
444     features: Collection[Feature] = (),
445     omit: Collection[LeafID] = (),
446 ) -> Iterator[Line]:
447     """Split line into many lines, starting with the last matching bracket pair.
448
449     If the split was by optional parentheses, attempt splitting without them, too.
450     `omit` is a collection of closing bracket IDs that shouldn't be considered for
451     this split.
452
453     Note: running this function modifies `bracket_depth` on the leaves of `line`.
454     """
455     tail_leaves: List[Leaf] = []
456     body_leaves: List[Leaf] = []
457     head_leaves: List[Leaf] = []
458     current_leaves = tail_leaves
459     opening_bracket: Optional[Leaf] = None
460     closing_bracket: Optional[Leaf] = None
461     for leaf in reversed(line.leaves):
462         if current_leaves is body_leaves:
463             if leaf is opening_bracket:
464                 current_leaves = head_leaves if body_leaves else tail_leaves
465         current_leaves.append(leaf)
466         if current_leaves is tail_leaves:
467             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
468                 opening_bracket = leaf.opening_bracket
469                 closing_bracket = leaf
470                 current_leaves = body_leaves
471     if not (opening_bracket and closing_bracket and head_leaves):
472         # If there is no opening or closing_bracket that means the split failed and
473         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
474         # the matching `opening_bracket` wasn't available on `line` anymore.
475         raise CannotSplit("No brackets found")
476
477     tail_leaves.reverse()
478     body_leaves.reverse()
479     head_leaves.reverse()
480     head = bracket_split_build_line(head_leaves, line, opening_bracket)
481     body = bracket_split_build_line(body_leaves, line, opening_bracket, is_body=True)
482     tail = bracket_split_build_line(tail_leaves, line, opening_bracket)
483     bracket_split_succeeded_or_raise(head, body, tail)
484     if (
485         Feature.FORCE_OPTIONAL_PARENTHESES not in features
486         # the opening bracket is an optional paren
487         and opening_bracket.type == token.LPAR
488         and not opening_bracket.value
489         # the closing bracket is an optional paren
490         and closing_bracket.type == token.RPAR
491         and not closing_bracket.value
492         # it's not an import (optional parens are the only thing we can split on
493         # in this case; attempting a split without them is a waste of time)
494         and not line.is_import
495         # there are no standalone comments in the body
496         and not body.contains_standalone_comments(0)
497         # and we can actually remove the parens
498         and can_omit_invisible_parens(body, line_length, omit_on_explode=omit)
499     ):
500         omit = {id(closing_bracket), *omit}
501         try:
502             yield from right_hand_split(line, line_length, features=features, omit=omit)
503             return
504
505         except CannotSplit:
506             if not (
507                 can_be_split(body)
508                 or is_line_short_enough(body, line_length=line_length)
509             ):
510                 raise CannotSplit(
511                     "Splitting failed, body is still too long and can't be split."
512                 )
513
514             elif head.contains_multiline_strings() or tail.contains_multiline_strings():
515                 raise CannotSplit(
516                     "The current optional pair of parentheses is bound to fail to"
517                     " satisfy the splitting algorithm because the head or the tail"
518                     " contains multiline strings which by definition never fit one"
519                     " line."
520                 )
521
522     ensure_visible(opening_bracket)
523     ensure_visible(closing_bracket)
524     for result in (head, body, tail):
525         if result:
526             yield result
527
528
529 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
530     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
531
532     Do nothing otherwise.
533
534     A left- or right-hand split is based on a pair of brackets. Content before
535     (and including) the opening bracket is left on one line, content inside the
536     brackets is put on a separate line, and finally content starting with and
537     following the closing bracket is put on a separate line.
538
539     Those are called `head`, `body`, and `tail`, respectively. If the split
540     produced the same line (all content in `head`) or ended up with an empty `body`
541     and the `tail` is just the closing bracket, then it's considered failed.
542     """
543     tail_len = len(str(tail).strip())
544     if not body:
545         if tail_len == 0:
546             raise CannotSplit("Splitting brackets produced the same line")
547
548         elif tail_len < 3:
549             raise CannotSplit(
550                 f"Splitting brackets on an empty body to save {tail_len} characters is"
551                 " not worth it"
552             )
553
554
555 def bracket_split_build_line(
556     leaves: List[Leaf], original: Line, opening_bracket: Leaf, *, is_body: bool = False
557 ) -> Line:
558     """Return a new line with given `leaves` and respective comments from `original`.
559
560     If `is_body` is True, the result line is one-indented inside brackets and as such
561     has its first leaf's prefix normalized and a trailing comma added when expected.
562     """
563     result = Line(mode=original.mode, depth=original.depth)
564     if is_body:
565         result.inside_brackets = True
566         result.depth += 1
567         if leaves:
568             # Since body is a new indent level, remove spurious leading whitespace.
569             normalize_prefix(leaves[0], inside_brackets=True)
570             # Ensure a trailing comma for imports and standalone function arguments, but
571             # be careful not to add one after any comments or within type annotations.
572             no_commas = (
573                 original.is_def
574                 and opening_bracket.value == "("
575                 and not any(leaf.type == token.COMMA for leaf in leaves)
576             )
577
578             if original.is_import or no_commas:
579                 for i in range(len(leaves) - 1, -1, -1):
580                     if leaves[i].type == STANDALONE_COMMENT:
581                         continue
582
583                     if leaves[i].type != token.COMMA:
584                         new_comma = Leaf(token.COMMA, ",")
585                         leaves.insert(i + 1, new_comma)
586                     break
587
588     # Populate the line
589     for leaf in leaves:
590         result.append(leaf, preformatted=True)
591         for comment_after in original.comments_after(leaf):
592             result.append(comment_after, preformatted=True)
593     if is_body and should_split_line(result, opening_bracket):
594         result.should_split_rhs = True
595     return result
596
597
598 def dont_increase_indentation(split_func: Transformer) -> Transformer:
599     """Normalize prefix of the first leaf in every line returned by `split_func`.
600
601     This is a decorator over relevant split functions.
602     """
603
604     @wraps(split_func)
605     def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
606         for line in split_func(line, features):
607             normalize_prefix(line.leaves[0], inside_brackets=True)
608             yield line
609
610     return split_wrapper
611
612
613 @dont_increase_indentation
614 def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
615     """Split according to delimiters of the highest priority.
616
617     If the appropriate Features are given, the split will add trailing commas
618     also in function signatures and calls that contain `*` and `**`.
619     """
620     try:
621         last_leaf = line.leaves[-1]
622     except IndexError:
623         raise CannotSplit("Line empty")
624
625     bt = line.bracket_tracker
626     try:
627         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
628     except ValueError:
629         raise CannotSplit("No delimiters found")
630
631     if delimiter_priority == DOT_PRIORITY:
632         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
633             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
634
635     current_line = Line(
636         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
637     )
638     lowest_depth = sys.maxsize
639     trailing_comma_safe = True
640
641     def append_to_line(leaf: Leaf) -> Iterator[Line]:
642         """Append `leaf` to current line or to new line if appending impossible."""
643         nonlocal current_line
644         try:
645             current_line.append_safe(leaf, preformatted=True)
646         except ValueError:
647             yield current_line
648
649             current_line = Line(
650                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
651             )
652             current_line.append(leaf)
653
654     for leaf in line.leaves:
655         yield from append_to_line(leaf)
656
657         for comment_after in line.comments_after(leaf):
658             yield from append_to_line(comment_after)
659
660         lowest_depth = min(lowest_depth, leaf.bracket_depth)
661         if leaf.bracket_depth == lowest_depth:
662             if is_vararg(leaf, within={syms.typedargslist}):
663                 trailing_comma_safe = (
664                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
665                 )
666             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
667                 trailing_comma_safe = (
668                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
669                 )
670
671         leaf_priority = bt.delimiters.get(id(leaf))
672         if leaf_priority == delimiter_priority:
673             yield current_line
674
675             current_line = Line(
676                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
677             )
678     if current_line:
679         if (
680             trailing_comma_safe
681             and delimiter_priority == COMMA_PRIORITY
682             and current_line.leaves[-1].type != token.COMMA
683             and current_line.leaves[-1].type != STANDALONE_COMMENT
684         ):
685             new_comma = Leaf(token.COMMA, ",")
686             current_line.append(new_comma)
687         yield current_line
688
689
690 @dont_increase_indentation
691 def standalone_comment_split(
692     line: Line, features: Collection[Feature] = ()
693 ) -> Iterator[Line]:
694     """Split standalone comments from the rest of the line."""
695     if not line.contains_standalone_comments(0):
696         raise CannotSplit("Line does not have any standalone comments")
697
698     current_line = Line(
699         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
700     )
701
702     def append_to_line(leaf: Leaf) -> Iterator[Line]:
703         """Append `leaf` to current line or to new line if appending impossible."""
704         nonlocal current_line
705         try:
706             current_line.append_safe(leaf, preformatted=True)
707         except ValueError:
708             yield current_line
709
710             current_line = Line(
711                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
712             )
713             current_line.append(leaf)
714
715     for leaf in line.leaves:
716         yield from append_to_line(leaf)
717
718         for comment_after in line.comments_after(leaf):
719             yield from append_to_line(comment_after)
720
721     if current_line:
722         yield current_line
723
724
725 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
726     """Leave existing extra newlines if not `inside_brackets`. Remove everything
727     else.
728
729     Note: don't use backslashes for formatting or you'll lose your voting rights.
730     """
731     if not inside_brackets:
732         spl = leaf.prefix.split("#")
733         if "\\" not in spl[0]:
734             nl_count = spl[-1].count("\n")
735             if len(spl) > 1:
736                 nl_count -= 1
737             leaf.prefix = "\n" * nl_count
738             return
739
740     leaf.prefix = ""
741
742
743 def normalize_invisible_parens(node: Node, parens_after: Set[str]) -> None:
744     """Make existing optional parentheses invisible or create new ones.
745
746     `parens_after` is a set of string leaf values immediately after which parens
747     should be put.
748
749     Standardizes on visible parentheses for single-element tuples, and keeps
750     existing visible parentheses for other tuples and generator expressions.
751     """
752     for pc in list_comments(node.prefix, is_endmarker=False):
753         if pc.value in FMT_OFF:
754             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
755             return
756     check_lpar = False
757     for index, child in enumerate(list(node.children)):
758         # Fixes a bug where invisible parens are not properly stripped from
759         # assignment statements that contain type annotations.
760         if isinstance(child, Node) and child.type == syms.annassign:
761             normalize_invisible_parens(child, parens_after=parens_after)
762
763         # Add parentheses around long tuple unpacking in assignments.
764         if (
765             index == 0
766             and isinstance(child, Node)
767             and child.type == syms.testlist_star_expr
768         ):
769             check_lpar = True
770
771         if check_lpar:
772             if child.type == syms.atom:
773                 if maybe_make_parens_invisible_in_atom(child, parent=node):
774                     wrap_in_parentheses(node, child, visible=False)
775             elif is_one_tuple(child):
776                 wrap_in_parentheses(node, child, visible=True)
777             elif node.type == syms.import_from:
778                 # "import from" nodes store parentheses directly as part of
779                 # the statement
780                 if child.type == token.LPAR:
781                     # make parentheses invisible
782                     child.value = ""  # type: ignore
783                     node.children[-1].value = ""  # type: ignore
784                 elif child.type != token.STAR:
785                     # insert invisible parentheses
786                     node.insert_child(index, Leaf(token.LPAR, ""))
787                     node.append_child(Leaf(token.RPAR, ""))
788                 break
789
790             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
791                 wrap_in_parentheses(node, child, visible=False)
792
793         check_lpar = isinstance(child, Leaf) and child.value in parens_after
794
795
796 def maybe_make_parens_invisible_in_atom(node: LN, parent: LN) -> bool:
797     """If it's safe, make the parens in the atom `node` invisible, recursively.
798     Additionally, remove repeated, adjacent invisible parens from the atom `node`
799     as they are redundant.
800
801     Returns whether the node should itself be wrapped in invisible parentheses.
802
803     """
804
805     if (
806         node.type != syms.atom
807         or is_empty_tuple(node)
808         or is_one_tuple(node)
809         or (is_yield(node) and parent.type != syms.expr_stmt)
810         or max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
811     ):
812         return False
813
814     if is_walrus_assignment(node):
815         if parent.type in [
816             syms.annassign,
817             syms.expr_stmt,
818             syms.assert_stmt,
819             syms.return_stmt,
820             # these ones aren't useful to end users, but they do please fuzzers
821             syms.for_stmt,
822             syms.del_stmt,
823         ]:
824             return False
825
826     first = node.children[0]
827     last = node.children[-1]
828     if first.type == token.LPAR and last.type == token.RPAR:
829         middle = node.children[1]
830         # make parentheses invisible
831         first.value = ""  # type: ignore
832         last.value = ""  # type: ignore
833         maybe_make_parens_invisible_in_atom(middle, parent=parent)
834
835         if is_atom_with_invisible_parens(middle):
836             # Strip the invisible parens from `middle` by replacing
837             # it with the child in-between the invisible parens
838             middle.replace(middle.children[1])
839
840         return False
841
842     return True
843
844
845 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
846     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
847
848     if not (opening_bracket.parent and opening_bracket.value in "[{("):
849         return False
850
851     # We're essentially checking if the body is delimited by commas and there's more
852     # than one of them (we're excluding the trailing comma and if the delimiter priority
853     # is still commas, that means there's more).
854     exclude = set()
855     trailing_comma = False
856     try:
857         last_leaf = line.leaves[-1]
858         if last_leaf.type == token.COMMA:
859             trailing_comma = True
860             exclude.add(id(last_leaf))
861         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
862     except (IndexError, ValueError):
863         return False
864
865     return max_priority == COMMA_PRIORITY and (
866         (line.mode.magic_trailing_comma and trailing_comma)
867         # always explode imports
868         or opening_bracket.parent.type in {syms.atom, syms.import_from}
869     )
870
871
872 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
873     """Generate sets of closing bracket IDs that should be omitted in a RHS.
874
875     Brackets can be omitted if the entire trailer up to and including
876     a preceding closing bracket fits in one line.
877
878     Yielded sets are cumulative (contain results of previous yields, too).  First
879     set is empty, unless the line should explode, in which case bracket pairs until
880     the one that needs to explode are omitted.
881     """
882
883     omit: Set[LeafID] = set()
884     if not line.magic_trailing_comma:
885         yield omit
886
887     length = 4 * line.depth
888     opening_bracket: Optional[Leaf] = None
889     closing_bracket: Optional[Leaf] = None
890     inner_brackets: Set[LeafID] = set()
891     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
892         length += leaf_length
893         if length > line_length:
894             break
895
896         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
897         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
898             break
899
900         if opening_bracket:
901             if leaf is opening_bracket:
902                 opening_bracket = None
903             elif leaf.type in CLOSING_BRACKETS:
904                 prev = line.leaves[index - 1] if index > 0 else None
905                 if (
906                     prev
907                     and prev.type == token.COMMA
908                     and not is_one_tuple_between(
909                         leaf.opening_bracket, leaf, line.leaves
910                     )
911                 ):
912                     # Never omit bracket pairs with trailing commas.
913                     # We need to explode on those.
914                     break
915
916                 inner_brackets.add(id(leaf))
917         elif leaf.type in CLOSING_BRACKETS:
918             prev = line.leaves[index - 1] if index > 0 else None
919             if prev and prev.type in OPENING_BRACKETS:
920                 # Empty brackets would fail a split so treat them as "inner"
921                 # brackets (e.g. only add them to the `omit` set if another
922                 # pair of brackets was good enough.
923                 inner_brackets.add(id(leaf))
924                 continue
925
926             if closing_bracket:
927                 omit.add(id(closing_bracket))
928                 omit.update(inner_brackets)
929                 inner_brackets.clear()
930                 yield omit
931
932             if (
933                 prev
934                 and prev.type == token.COMMA
935                 and not is_one_tuple_between(leaf.opening_bracket, leaf, line.leaves)
936             ):
937                 # Never omit bracket pairs with trailing commas.
938                 # We need to explode on those.
939                 break
940
941             if leaf.value:
942                 opening_bracket = leaf.opening_bracket
943                 closing_bracket = leaf
944
945
946 def run_transformer(
947     line: Line,
948     transform: Transformer,
949     mode: Mode,
950     features: Collection[Feature],
951     *,
952     line_str: str = "",
953 ) -> List[Line]:
954     if not line_str:
955         line_str = line_to_string(line)
956     result: List[Line] = []
957     for transformed_line in transform(line, features):
958         if str(transformed_line).strip("\n") == line_str:
959             raise CannotTransform("Line transformer returned an unchanged result")
960
961         result.extend(transform_line(transformed_line, mode=mode, features=features))
962
963     if not (
964         transform.__name__ == "rhs"
965         and line.bracket_tracker.invisible
966         and not any(bracket.value for bracket in line.bracket_tracker.invisible)
967         and not line.contains_multiline_strings()
968         and not result[0].contains_uncollapsable_type_comments()
969         and not result[0].contains_unsplittable_type_ignore()
970         and not is_line_short_enough(result[0], line_length=mode.line_length)
971     ):
972         return result
973
974     line_copy = line.clone()
975     append_leaves(line_copy, line, line.leaves)
976     features_fop = set(features) | {Feature.FORCE_OPTIONAL_PARENTHESES}
977     second_opinion = run_transformer(
978         line_copy, transform, mode, features_fop, line_str=line_str
979     )
980     if all(
981         is_line_short_enough(ln, line_length=mode.line_length) for ln in second_opinion
982     ):
983         result = second_opinion
984     return result