]> git.madduck.net Git - etc/vim.git/blob - src/black/linegen.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

black/parser: partial support for pattern matching (#2586)
[etc/vim.git] / src / black / linegen.py
1 """
2 Generating lines of code.
3 """
4 from functools import partial, wraps
5 import sys
6 from typing import Collection, Iterator, List, Optional, Set, Union
7
8 from dataclasses import dataclass, field
9
10 from black.nodes import WHITESPACE, RARROW, STATEMENT, STANDALONE_COMMENT
11 from black.nodes import ASSIGNMENTS, OPENING_BRACKETS, CLOSING_BRACKETS
12 from black.nodes import Visitor, syms, first_child_is_arith, ensure_visible
13 from black.nodes import is_docstring, is_empty_tuple, is_one_tuple, is_one_tuple_between
14 from black.nodes import is_walrus_assignment, is_yield, is_vararg, is_multiline_string
15 from black.nodes import is_stub_suite, is_stub_body, is_atom_with_invisible_parens
16 from black.nodes import wrap_in_parentheses
17 from black.brackets import max_delimiter_priority_in_atom
18 from black.brackets import DOT_PRIORITY, COMMA_PRIORITY
19 from black.lines import Line, line_to_string, is_line_short_enough
20 from black.lines import can_omit_invisible_parens, can_be_split, append_leaves
21 from black.comments import generate_comments, list_comments, FMT_OFF
22 from black.numerics import normalize_numeric_literal
23 from black.strings import get_string_prefix, fix_docstring
24 from black.strings import normalize_string_prefix, normalize_string_quotes
25 from black.trans import Transformer, CannotTransform, StringMerger
26 from black.trans import StringSplitter, StringParenWrapper, StringParenStripper
27 from black.mode import Mode
28 from black.mode import Feature
29
30 from blib2to3.pytree import Node, Leaf
31 from blib2to3.pgen2 import token
32
33
34 # types
35 LeafID = int
36 LN = Union[Leaf, Node]
37
38
39 class CannotSplit(CannotTransform):
40     """A readable split that fits the allotted line length is impossible."""
41
42
43 @dataclass
44 class LineGenerator(Visitor[Line]):
45     """Generates reformatted Line objects.  Empty lines are not emitted.
46
47     Note: destroys the tree it's visiting by mutating prefixes of its leaves
48     in ways that will no longer stringify to valid Python code on the tree.
49     """
50
51     mode: Mode
52     remove_u_prefix: bool = False
53     current_line: Line = field(init=False)
54
55     def line(self, indent: int = 0) -> Iterator[Line]:
56         """Generate a line.
57
58         If the line is empty, only emit if it makes sense.
59         If the line is too long, split it first and then generate.
60
61         If any lines were generated, set up a new current_line.
62         """
63         if not self.current_line:
64             self.current_line.depth += indent
65             return  # Line is empty, don't emit. Creating a new one unnecessary.
66
67         complete_line = self.current_line
68         self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
69         yield complete_line
70
71     def visit_default(self, node: LN) -> Iterator[Line]:
72         """Default `visit_*()` implementation. Recurses to children of `node`."""
73         if isinstance(node, Leaf):
74             any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
75             for comment in generate_comments(node):
76                 if any_open_brackets:
77                     # any comment within brackets is subject to splitting
78                     self.current_line.append(comment)
79                 elif comment.type == token.COMMENT:
80                     # regular trailing comment
81                     self.current_line.append(comment)
82                     yield from self.line()
83
84                 else:
85                     # regular standalone comment
86                     yield from self.line()
87
88                     self.current_line.append(comment)
89                     yield from self.line()
90
91             normalize_prefix(node, inside_brackets=any_open_brackets)
92             if self.mode.string_normalization and node.type == token.STRING:
93                 node.value = normalize_string_prefix(
94                     node.value, remove_u_prefix=self.remove_u_prefix
95                 )
96                 node.value = normalize_string_quotes(node.value)
97             if node.type == token.NUMBER:
98                 normalize_numeric_literal(node)
99             if node.type not in WHITESPACE:
100                 self.current_line.append(node)
101         yield from super().visit_default(node)
102
103     def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
104         """Increase indentation level, maybe yield a line."""
105         # In blib2to3 INDENT never holds comments.
106         yield from self.line(+1)
107         yield from self.visit_default(node)
108
109     def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
110         """Decrease indentation level, maybe yield a line."""
111         # The current line might still wait for trailing comments.  At DEDENT time
112         # there won't be any (they would be prefixes on the preceding NEWLINE).
113         # Emit the line then.
114         yield from self.line()
115
116         # While DEDENT has no value, its prefix may contain standalone comments
117         # that belong to the current indentation level.  Get 'em.
118         yield from self.visit_default(node)
119
120         # Finally, emit the dedent.
121         yield from self.line(-1)
122
123     def visit_stmt(
124         self, node: Node, keywords: Set[str], parens: Set[str]
125     ) -> Iterator[Line]:
126         """Visit a statement.
127
128         This implementation is shared for `if`, `while`, `for`, `try`, `except`,
129         `def`, `with`, `class`, `assert`, `match`, `case` and assignments.
130
131         The relevant Python language `keywords` for a given statement will be
132         NAME leaves within it. This methods puts those on a separate line.
133
134         `parens` holds a set of string leaf values immediately after which
135         invisible parens should be put.
136         """
137         normalize_invisible_parens(node, parens_after=parens)
138         for child in node.children:
139             if child.type == token.NAME and child.value in keywords:  # type: ignore
140                 yield from self.line()
141
142             yield from self.visit(child)
143
144     def visit_suite(self, node: Node) -> Iterator[Line]:
145         """Visit a suite."""
146         if self.mode.is_pyi and is_stub_suite(node):
147             yield from self.visit(node.children[2])
148         else:
149             yield from self.visit_default(node)
150
151     def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
152         """Visit a statement without nested statements."""
153         if first_child_is_arith(node):
154             wrap_in_parentheses(node, node.children[0], visible=False)
155         is_suite_like = node.parent and node.parent.type in STATEMENT
156         if is_suite_like:
157             if self.mode.is_pyi and is_stub_body(node):
158                 yield from self.visit_default(node)
159             else:
160                 yield from self.line(+1)
161                 yield from self.visit_default(node)
162                 yield from self.line(-1)
163
164         else:
165             if (
166                 not self.mode.is_pyi
167                 or not node.parent
168                 or not is_stub_suite(node.parent)
169             ):
170                 yield from self.line()
171             yield from self.visit_default(node)
172
173     def visit_async_stmt(self, node: Node) -> Iterator[Line]:
174         """Visit `async def`, `async for`, `async with`."""
175         yield from self.line()
176
177         children = iter(node.children)
178         for child in children:
179             yield from self.visit(child)
180
181             if child.type == token.ASYNC:
182                 break
183
184         internal_stmt = next(children)
185         for child in internal_stmt.children:
186             yield from self.visit(child)
187
188     def visit_decorators(self, node: Node) -> Iterator[Line]:
189         """Visit decorators."""
190         for child in node.children:
191             yield from self.line()
192             yield from self.visit(child)
193
194     def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
195         """Remove a semicolon and put the other statement on a separate line."""
196         yield from self.line()
197
198     def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
199         """End of file. Process outstanding comments and end with a newline."""
200         yield from self.visit_default(leaf)
201         yield from self.line()
202
203     def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
204         if not self.current_line.bracket_tracker.any_open_brackets():
205             yield from self.line()
206         yield from self.visit_default(leaf)
207
208     def visit_factor(self, node: Node) -> Iterator[Line]:
209         """Force parentheses between a unary op and a binary power:
210
211         -2 ** 8 -> -(2 ** 8)
212         """
213         _operator, operand = node.children
214         if (
215             operand.type == syms.power
216             and len(operand.children) == 3
217             and operand.children[1].type == token.DOUBLESTAR
218         ):
219             lpar = Leaf(token.LPAR, "(")
220             rpar = Leaf(token.RPAR, ")")
221             index = operand.remove() or 0
222             node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
223         yield from self.visit_default(node)
224
225     def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
226         if is_docstring(leaf) and "\\\n" not in leaf.value:
227             # We're ignoring docstrings with backslash newline escapes because changing
228             # indentation of those changes the AST representation of the code.
229             docstring = normalize_string_prefix(leaf.value, self.remove_u_prefix)
230             prefix = get_string_prefix(docstring)
231             docstring = docstring[len(prefix) :]  # Remove the prefix
232             quote_char = docstring[0]
233             # A natural way to remove the outer quotes is to do:
234             #   docstring = docstring.strip(quote_char)
235             # but that breaks on """""x""" (which is '""x').
236             # So we actually need to remove the first character and the next two
237             # characters but only if they are the same as the first.
238             quote_len = 1 if docstring[1] != quote_char else 3
239             docstring = docstring[quote_len:-quote_len]
240             docstring_started_empty = not docstring
241
242             if is_multiline_string(leaf):
243                 indent = " " * 4 * self.current_line.depth
244                 docstring = fix_docstring(docstring, indent)
245             else:
246                 docstring = docstring.strip()
247
248             if docstring:
249                 # Add some padding if the docstring starts / ends with a quote mark.
250                 if docstring[0] == quote_char:
251                     docstring = " " + docstring
252                 if docstring[-1] == quote_char:
253                     docstring += " "
254                 if docstring[-1] == "\\":
255                     backslash_count = len(docstring) - len(docstring.rstrip("\\"))
256                     if backslash_count % 2:
257                         # Odd number of tailing backslashes, add some padding to
258                         # avoid escaping the closing string quote.
259                         docstring += " "
260             elif not docstring_started_empty:
261                 docstring = " "
262
263             # We could enforce triple quotes at this point.
264             quote = quote_char * quote_len
265             leaf.value = prefix + quote + docstring + quote
266
267         yield from self.visit_default(leaf)
268
269     def __post_init__(self) -> None:
270         """You are in a twisty little maze of passages."""
271         self.current_line = Line(mode=self.mode)
272
273         v = self.visit_stmt
274         Ø: Set[str] = set()
275         self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
276         self.visit_if_stmt = partial(
277             v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
278         )
279         self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
280         self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
281         self.visit_try_stmt = partial(
282             v, keywords={"try", "except", "else", "finally"}, parens=Ø
283         )
284         self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
285         self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
286         self.visit_funcdef = partial(v, keywords={"def"}, parens=Ø)
287         self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
288         self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
289         self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
290         self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
291         self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
292         self.visit_async_funcdef = self.visit_async_stmt
293         self.visit_decorated = self.visit_decorators
294
295         # PEP 634
296         self.visit_match_stmt = partial(v, keywords={"match"}, parens=Ø)
297         self.visit_case_block = partial(v, keywords={"case"}, parens=Ø)
298
299
300 def transform_line(
301     line: Line, mode: Mode, features: Collection[Feature] = ()
302 ) -> Iterator[Line]:
303     """Transform a `line`, potentially splitting it into many lines.
304
305     They should fit in the allotted `line_length` but might not be able to.
306
307     `features` are syntactical features that may be used in the output.
308     """
309     if line.is_comment:
310         yield line
311         return
312
313     line_str = line_to_string(line)
314
315     ll = mode.line_length
316     sn = mode.string_normalization
317     string_merge = StringMerger(ll, sn)
318     string_paren_strip = StringParenStripper(ll, sn)
319     string_split = StringSplitter(ll, sn)
320     string_paren_wrap = StringParenWrapper(ll, sn)
321
322     transformers: List[Transformer]
323     if (
324         not line.contains_uncollapsable_type_comments()
325         and not line.should_split_rhs
326         and not line.magic_trailing_comma
327         and (
328             is_line_short_enough(line, line_length=mode.line_length, line_str=line_str)
329             or line.contains_unsplittable_type_ignore()
330         )
331         and not (line.inside_brackets and line.contains_standalone_comments())
332     ):
333         # Only apply basic string preprocessing, since lines shouldn't be split here.
334         if mode.experimental_string_processing:
335             transformers = [string_merge, string_paren_strip]
336         else:
337             transformers = []
338     elif line.is_def:
339         transformers = [left_hand_split]
340     else:
341
342         def rhs(line: Line, features: Collection[Feature]) -> Iterator[Line]:
343             """Wraps calls to `right_hand_split`.
344
345             The calls increasingly `omit` right-hand trailers (bracket pairs with
346             content), meaning the trailers get glued together to split on another
347             bracket pair instead.
348             """
349             for omit in generate_trailers_to_omit(line, mode.line_length):
350                 lines = list(
351                     right_hand_split(line, mode.line_length, features, omit=omit)
352                 )
353                 # Note: this check is only able to figure out if the first line of the
354                 # *current* transformation fits in the line length.  This is true only
355                 # for simple cases.  All others require running more transforms via
356                 # `transform_line()`.  This check doesn't know if those would succeed.
357                 if is_line_short_enough(lines[0], line_length=mode.line_length):
358                     yield from lines
359                     return
360
361             # All splits failed, best effort split with no omits.
362             # This mostly happens to multiline strings that are by definition
363             # reported as not fitting a single line, as well as lines that contain
364             # trailing commas (those have to be exploded).
365             yield from right_hand_split(
366                 line, line_length=mode.line_length, features=features
367             )
368
369         if mode.experimental_string_processing:
370             if line.inside_brackets:
371                 transformers = [
372                     string_merge,
373                     string_paren_strip,
374                     string_split,
375                     delimiter_split,
376                     standalone_comment_split,
377                     string_paren_wrap,
378                     rhs,
379                 ]
380             else:
381                 transformers = [
382                     string_merge,
383                     string_paren_strip,
384                     string_split,
385                     string_paren_wrap,
386                     rhs,
387                 ]
388         else:
389             if line.inside_brackets:
390                 transformers = [delimiter_split, standalone_comment_split, rhs]
391             else:
392                 transformers = [rhs]
393
394     for transform in transformers:
395         # We are accumulating lines in `result` because we might want to abort
396         # mission and return the original line in the end, or attempt a different
397         # split altogether.
398         try:
399             result = run_transformer(line, transform, mode, features, line_str=line_str)
400         except CannotTransform:
401             continue
402         else:
403             yield from result
404             break
405
406     else:
407         yield line
408
409
410 def left_hand_split(line: Line, _features: Collection[Feature] = ()) -> Iterator[Line]:
411     """Split line into many lines, starting with the first matching bracket pair.
412
413     Note: this usually looks weird, only use this for function definitions.
414     Prefer RHS otherwise.  This is why this function is not symmetrical with
415     :func:`right_hand_split` which also handles optional parentheses.
416     """
417     tail_leaves: List[Leaf] = []
418     body_leaves: List[Leaf] = []
419     head_leaves: List[Leaf] = []
420     current_leaves = head_leaves
421     matching_bracket: Optional[Leaf] = None
422     for leaf in line.leaves:
423         if (
424             current_leaves is body_leaves
425             and leaf.type in CLOSING_BRACKETS
426             and leaf.opening_bracket is matching_bracket
427         ):
428             current_leaves = tail_leaves if body_leaves else head_leaves
429         current_leaves.append(leaf)
430         if current_leaves is head_leaves:
431             if leaf.type in OPENING_BRACKETS:
432                 matching_bracket = leaf
433                 current_leaves = body_leaves
434     if not matching_bracket:
435         raise CannotSplit("No brackets found")
436
437     head = bracket_split_build_line(head_leaves, line, matching_bracket)
438     body = bracket_split_build_line(body_leaves, line, matching_bracket, is_body=True)
439     tail = bracket_split_build_line(tail_leaves, line, matching_bracket)
440     bracket_split_succeeded_or_raise(head, body, tail)
441     for result in (head, body, tail):
442         if result:
443             yield result
444
445
446 def right_hand_split(
447     line: Line,
448     line_length: int,
449     features: Collection[Feature] = (),
450     omit: Collection[LeafID] = (),
451 ) -> Iterator[Line]:
452     """Split line into many lines, starting with the last matching bracket pair.
453
454     If the split was by optional parentheses, attempt splitting without them, too.
455     `omit` is a collection of closing bracket IDs that shouldn't be considered for
456     this split.
457
458     Note: running this function modifies `bracket_depth` on the leaves of `line`.
459     """
460     tail_leaves: List[Leaf] = []
461     body_leaves: List[Leaf] = []
462     head_leaves: List[Leaf] = []
463     current_leaves = tail_leaves
464     opening_bracket: Optional[Leaf] = None
465     closing_bracket: Optional[Leaf] = None
466     for leaf in reversed(line.leaves):
467         if current_leaves is body_leaves:
468             if leaf is opening_bracket:
469                 current_leaves = head_leaves if body_leaves else tail_leaves
470         current_leaves.append(leaf)
471         if current_leaves is tail_leaves:
472             if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
473                 opening_bracket = leaf.opening_bracket
474                 closing_bracket = leaf
475                 current_leaves = body_leaves
476     if not (opening_bracket and closing_bracket and head_leaves):
477         # If there is no opening or closing_bracket that means the split failed and
478         # all content is in the tail.  Otherwise, if `head_leaves` are empty, it means
479         # the matching `opening_bracket` wasn't available on `line` anymore.
480         raise CannotSplit("No brackets found")
481
482     tail_leaves.reverse()
483     body_leaves.reverse()
484     head_leaves.reverse()
485     head = bracket_split_build_line(head_leaves, line, opening_bracket)
486     body = bracket_split_build_line(body_leaves, line, opening_bracket, is_body=True)
487     tail = bracket_split_build_line(tail_leaves, line, opening_bracket)
488     bracket_split_succeeded_or_raise(head, body, tail)
489     if (
490         Feature.FORCE_OPTIONAL_PARENTHESES not in features
491         # the opening bracket is an optional paren
492         and opening_bracket.type == token.LPAR
493         and not opening_bracket.value
494         # the closing bracket is an optional paren
495         and closing_bracket.type == token.RPAR
496         and not closing_bracket.value
497         # it's not an import (optional parens are the only thing we can split on
498         # in this case; attempting a split without them is a waste of time)
499         and not line.is_import
500         # there are no standalone comments in the body
501         and not body.contains_standalone_comments(0)
502         # and we can actually remove the parens
503         and can_omit_invisible_parens(body, line_length, omit_on_explode=omit)
504     ):
505         omit = {id(closing_bracket), *omit}
506         try:
507             yield from right_hand_split(line, line_length, features=features, omit=omit)
508             return
509
510         except CannotSplit as e:
511             if not (
512                 can_be_split(body)
513                 or is_line_short_enough(body, line_length=line_length)
514             ):
515                 raise CannotSplit(
516                     "Splitting failed, body is still too long and can't be split."
517                 ) from e
518
519             elif head.contains_multiline_strings() or tail.contains_multiline_strings():
520                 raise CannotSplit(
521                     "The current optional pair of parentheses is bound to fail to"
522                     " satisfy the splitting algorithm because the head or the tail"
523                     " contains multiline strings which by definition never fit one"
524                     " line."
525                 ) from e
526
527     ensure_visible(opening_bracket)
528     ensure_visible(closing_bracket)
529     for result in (head, body, tail):
530         if result:
531             yield result
532
533
534 def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
535     """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
536
537     Do nothing otherwise.
538
539     A left- or right-hand split is based on a pair of brackets. Content before
540     (and including) the opening bracket is left on one line, content inside the
541     brackets is put on a separate line, and finally content starting with and
542     following the closing bracket is put on a separate line.
543
544     Those are called `head`, `body`, and `tail`, respectively. If the split
545     produced the same line (all content in `head`) or ended up with an empty `body`
546     and the `tail` is just the closing bracket, then it's considered failed.
547     """
548     tail_len = len(str(tail).strip())
549     if not body:
550         if tail_len == 0:
551             raise CannotSplit("Splitting brackets produced the same line")
552
553         elif tail_len < 3:
554             raise CannotSplit(
555                 f"Splitting brackets on an empty body to save {tail_len} characters is"
556                 " not worth it"
557             )
558
559
560 def bracket_split_build_line(
561     leaves: List[Leaf], original: Line, opening_bracket: Leaf, *, is_body: bool = False
562 ) -> Line:
563     """Return a new line with given `leaves` and respective comments from `original`.
564
565     If `is_body` is True, the result line is one-indented inside brackets and as such
566     has its first leaf's prefix normalized and a trailing comma added when expected.
567     """
568     result = Line(mode=original.mode, depth=original.depth)
569     if is_body:
570         result.inside_brackets = True
571         result.depth += 1
572         if leaves:
573             # Since body is a new indent level, remove spurious leading whitespace.
574             normalize_prefix(leaves[0], inside_brackets=True)
575             # Ensure a trailing comma for imports and standalone function arguments, but
576             # be careful not to add one after any comments or within type annotations.
577             no_commas = (
578                 original.is_def
579                 and opening_bracket.value == "("
580                 and not any(leaf.type == token.COMMA for leaf in leaves)
581                 # In particular, don't add one within a parenthesized return annotation.
582                 # Unfortunately the indicator we're in a return annotation (RARROW) may
583                 # be defined directly in the parent node, the parent of the parent ...
584                 # and so on depending on how complex the return annotation is.
585                 # This isn't perfect and there's some false negatives but they are in
586                 # contexts were a comma is actually fine.
587                 and not any(
588                     node.prev_sibling.type == RARROW
589                     for node in (
590                         leaves[0].parent,
591                         getattr(leaves[0].parent, "parent", None),
592                     )
593                     if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
594                 )
595             )
596
597             if original.is_import or no_commas:
598                 for i in range(len(leaves) - 1, -1, -1):
599                     if leaves[i].type == STANDALONE_COMMENT:
600                         continue
601
602                     if leaves[i].type != token.COMMA:
603                         new_comma = Leaf(token.COMMA, ",")
604                         leaves.insert(i + 1, new_comma)
605                     break
606
607     # Populate the line
608     for leaf in leaves:
609         result.append(leaf, preformatted=True)
610         for comment_after in original.comments_after(leaf):
611             result.append(comment_after, preformatted=True)
612     if is_body and should_split_line(result, opening_bracket):
613         result.should_split_rhs = True
614     return result
615
616
617 def dont_increase_indentation(split_func: Transformer) -> Transformer:
618     """Normalize prefix of the first leaf in every line returned by `split_func`.
619
620     This is a decorator over relevant split functions.
621     """
622
623     @wraps(split_func)
624     def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
625         for line in split_func(line, features):
626             normalize_prefix(line.leaves[0], inside_brackets=True)
627             yield line
628
629     return split_wrapper
630
631
632 @dont_increase_indentation
633 def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
634     """Split according to delimiters of the highest priority.
635
636     If the appropriate Features are given, the split will add trailing commas
637     also in function signatures and calls that contain `*` and `**`.
638     """
639     try:
640         last_leaf = line.leaves[-1]
641     except IndexError:
642         raise CannotSplit("Line empty") from None
643
644     bt = line.bracket_tracker
645     try:
646         delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
647     except ValueError:
648         raise CannotSplit("No delimiters found") from None
649
650     if delimiter_priority == DOT_PRIORITY:
651         if bt.delimiter_count_with_priority(delimiter_priority) == 1:
652             raise CannotSplit("Splitting a single attribute from its owner looks wrong")
653
654     current_line = Line(
655         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
656     )
657     lowest_depth = sys.maxsize
658     trailing_comma_safe = True
659
660     def append_to_line(leaf: Leaf) -> Iterator[Line]:
661         """Append `leaf` to current line or to new line if appending impossible."""
662         nonlocal current_line
663         try:
664             current_line.append_safe(leaf, preformatted=True)
665         except ValueError:
666             yield current_line
667
668             current_line = Line(
669                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
670             )
671             current_line.append(leaf)
672
673     for leaf in line.leaves:
674         yield from append_to_line(leaf)
675
676         for comment_after in line.comments_after(leaf):
677             yield from append_to_line(comment_after)
678
679         lowest_depth = min(lowest_depth, leaf.bracket_depth)
680         if leaf.bracket_depth == lowest_depth:
681             if is_vararg(leaf, within={syms.typedargslist}):
682                 trailing_comma_safe = (
683                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
684                 )
685             elif is_vararg(leaf, within={syms.arglist, syms.argument}):
686                 trailing_comma_safe = (
687                     trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
688                 )
689
690         leaf_priority = bt.delimiters.get(id(leaf))
691         if leaf_priority == delimiter_priority:
692             yield current_line
693
694             current_line = Line(
695                 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
696             )
697     if current_line:
698         if (
699             trailing_comma_safe
700             and delimiter_priority == COMMA_PRIORITY
701             and current_line.leaves[-1].type != token.COMMA
702             and current_line.leaves[-1].type != STANDALONE_COMMENT
703         ):
704             new_comma = Leaf(token.COMMA, ",")
705             current_line.append(new_comma)
706         yield current_line
707
708
709 @dont_increase_indentation
710 def standalone_comment_split(
711     line: Line, features: Collection[Feature] = ()
712 ) -> Iterator[Line]:
713     """Split standalone comments from the rest of the line."""
714     if not line.contains_standalone_comments(0):
715         raise CannotSplit("Line does not have any standalone comments")
716
717     current_line = Line(
718         mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
719     )
720
721     def append_to_line(leaf: Leaf) -> Iterator[Line]:
722         """Append `leaf` to current line or to new line if appending impossible."""
723         nonlocal current_line
724         try:
725             current_line.append_safe(leaf, preformatted=True)
726         except ValueError:
727             yield current_line
728
729             current_line = Line(
730                 line.mode, depth=line.depth, inside_brackets=line.inside_brackets
731             )
732             current_line.append(leaf)
733
734     for leaf in line.leaves:
735         yield from append_to_line(leaf)
736
737         for comment_after in line.comments_after(leaf):
738             yield from append_to_line(comment_after)
739
740     if current_line:
741         yield current_line
742
743
744 def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
745     """Leave existing extra newlines if not `inside_brackets`. Remove everything
746     else.
747
748     Note: don't use backslashes for formatting or you'll lose your voting rights.
749     """
750     if not inside_brackets:
751         spl = leaf.prefix.split("#")
752         if "\\" not in spl[0]:
753             nl_count = spl[-1].count("\n")
754             if len(spl) > 1:
755                 nl_count -= 1
756             leaf.prefix = "\n" * nl_count
757             return
758
759     leaf.prefix = ""
760
761
762 def normalize_invisible_parens(node: Node, parens_after: Set[str]) -> None:
763     """Make existing optional parentheses invisible or create new ones.
764
765     `parens_after` is a set of string leaf values immediately after which parens
766     should be put.
767
768     Standardizes on visible parentheses for single-element tuples, and keeps
769     existing visible parentheses for other tuples and generator expressions.
770     """
771     for pc in list_comments(node.prefix, is_endmarker=False):
772         if pc.value in FMT_OFF:
773             # This `node` has a prefix with `# fmt: off`, don't mess with parens.
774             return
775     check_lpar = False
776     for index, child in enumerate(list(node.children)):
777         # Fixes a bug where invisible parens are not properly stripped from
778         # assignment statements that contain type annotations.
779         if isinstance(child, Node) and child.type == syms.annassign:
780             normalize_invisible_parens(child, parens_after=parens_after)
781
782         # Add parentheses around long tuple unpacking in assignments.
783         if (
784             index == 0
785             and isinstance(child, Node)
786             and child.type == syms.testlist_star_expr
787         ):
788             check_lpar = True
789
790         if check_lpar:
791             if child.type == syms.atom:
792                 if maybe_make_parens_invisible_in_atom(child, parent=node):
793                     wrap_in_parentheses(node, child, visible=False)
794             elif is_one_tuple(child):
795                 wrap_in_parentheses(node, child, visible=True)
796             elif node.type == syms.import_from:
797                 # "import from" nodes store parentheses directly as part of
798                 # the statement
799                 if child.type == token.LPAR:
800                     # make parentheses invisible
801                     child.value = ""  # type: ignore
802                     node.children[-1].value = ""  # type: ignore
803                 elif child.type != token.STAR:
804                     # insert invisible parentheses
805                     node.insert_child(index, Leaf(token.LPAR, ""))
806                     node.append_child(Leaf(token.RPAR, ""))
807                 break
808
809             elif not (isinstance(child, Leaf) and is_multiline_string(child)):
810                 wrap_in_parentheses(node, child, visible=False)
811
812         check_lpar = isinstance(child, Leaf) and child.value in parens_after
813
814
815 def maybe_make_parens_invisible_in_atom(node: LN, parent: LN) -> bool:
816     """If it's safe, make the parens in the atom `node` invisible, recursively.
817     Additionally, remove repeated, adjacent invisible parens from the atom `node`
818     as they are redundant.
819
820     Returns whether the node should itself be wrapped in invisible parentheses.
821
822     """
823
824     if (
825         node.type != syms.atom
826         or is_empty_tuple(node)
827         or is_one_tuple(node)
828         or (is_yield(node) and parent.type != syms.expr_stmt)
829         or max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
830     ):
831         return False
832
833     if is_walrus_assignment(node):
834         if parent.type in [
835             syms.annassign,
836             syms.expr_stmt,
837             syms.assert_stmt,
838             syms.return_stmt,
839             # these ones aren't useful to end users, but they do please fuzzers
840             syms.for_stmt,
841             syms.del_stmt,
842         ]:
843             return False
844
845     first = node.children[0]
846     last = node.children[-1]
847     if first.type == token.LPAR and last.type == token.RPAR:
848         middle = node.children[1]
849         # make parentheses invisible
850         first.value = ""  # type: ignore
851         last.value = ""  # type: ignore
852         maybe_make_parens_invisible_in_atom(middle, parent=parent)
853
854         if is_atom_with_invisible_parens(middle):
855             # Strip the invisible parens from `middle` by replacing
856             # it with the child in-between the invisible parens
857             middle.replace(middle.children[1])
858
859         return False
860
861     return True
862
863
864 def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
865     """Should `line` be immediately split with `delimiter_split()` after RHS?"""
866
867     if not (opening_bracket.parent and opening_bracket.value in "[{("):
868         return False
869
870     # We're essentially checking if the body is delimited by commas and there's more
871     # than one of them (we're excluding the trailing comma and if the delimiter priority
872     # is still commas, that means there's more).
873     exclude = set()
874     trailing_comma = False
875     try:
876         last_leaf = line.leaves[-1]
877         if last_leaf.type == token.COMMA:
878             trailing_comma = True
879             exclude.add(id(last_leaf))
880         max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
881     except (IndexError, ValueError):
882         return False
883
884     return max_priority == COMMA_PRIORITY and (
885         (line.mode.magic_trailing_comma and trailing_comma)
886         # always explode imports
887         or opening_bracket.parent.type in {syms.atom, syms.import_from}
888     )
889
890
891 def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
892     """Generate sets of closing bracket IDs that should be omitted in a RHS.
893
894     Brackets can be omitted if the entire trailer up to and including
895     a preceding closing bracket fits in one line.
896
897     Yielded sets are cumulative (contain results of previous yields, too).  First
898     set is empty, unless the line should explode, in which case bracket pairs until
899     the one that needs to explode are omitted.
900     """
901
902     omit: Set[LeafID] = set()
903     if not line.magic_trailing_comma:
904         yield omit
905
906     length = 4 * line.depth
907     opening_bracket: Optional[Leaf] = None
908     closing_bracket: Optional[Leaf] = None
909     inner_brackets: Set[LeafID] = set()
910     for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):
911         length += leaf_length
912         if length > line_length:
913             break
914
915         has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
916         if leaf.type == STANDALONE_COMMENT or has_inline_comment:
917             break
918
919         if opening_bracket:
920             if leaf is opening_bracket:
921                 opening_bracket = None
922             elif leaf.type in CLOSING_BRACKETS:
923                 prev = line.leaves[index - 1] if index > 0 else None
924                 if (
925                     prev
926                     and prev.type == token.COMMA
927                     and not is_one_tuple_between(
928                         leaf.opening_bracket, leaf, line.leaves
929                     )
930                 ):
931                     # Never omit bracket pairs with trailing commas.
932                     # We need to explode on those.
933                     break
934
935                 inner_brackets.add(id(leaf))
936         elif leaf.type in CLOSING_BRACKETS:
937             prev = line.leaves[index - 1] if index > 0 else None
938             if prev and prev.type in OPENING_BRACKETS:
939                 # Empty brackets would fail a split so treat them as "inner"
940                 # brackets (e.g. only add them to the `omit` set if another
941                 # pair of brackets was good enough.
942                 inner_brackets.add(id(leaf))
943                 continue
944
945             if closing_bracket:
946                 omit.add(id(closing_bracket))
947                 omit.update(inner_brackets)
948                 inner_brackets.clear()
949                 yield omit
950
951             if (
952                 prev
953                 and prev.type == token.COMMA
954                 and not is_one_tuple_between(leaf.opening_bracket, leaf, line.leaves)
955             ):
956                 # Never omit bracket pairs with trailing commas.
957                 # We need to explode on those.
958                 break
959
960             if leaf.value:
961                 opening_bracket = leaf.opening_bracket
962                 closing_bracket = leaf
963
964
965 def run_transformer(
966     line: Line,
967     transform: Transformer,
968     mode: Mode,
969     features: Collection[Feature],
970     *,
971     line_str: str = "",
972 ) -> List[Line]:
973     if not line_str:
974         line_str = line_to_string(line)
975     result: List[Line] = []
976     for transformed_line in transform(line, features):
977         if str(transformed_line).strip("\n") == line_str:
978             raise CannotTransform("Line transformer returned an unchanged result")
979
980         result.extend(transform_line(transformed_line, mode=mode, features=features))
981
982     if (
983         transform.__name__ != "rhs"
984         or not line.bracket_tracker.invisible
985         or any(bracket.value for bracket in line.bracket_tracker.invisible)
986         or line.contains_multiline_strings()
987         or result[0].contains_uncollapsable_type_comments()
988         or result[0].contains_unsplittable_type_ignore()
989         or is_line_short_enough(result[0], line_length=mode.line_length)
990         # If any leaves have no parents (which _can_ occur since
991         # `transform(line)` potentially destroys the line's underlying node
992         # structure), then we can't proceed. Doing so would cause the below
993         # call to `append_leaves()` to fail.
994         or any(leaf.parent is None for leaf in line.leaves)
995     ):
996         return result
997
998     line_copy = line.clone()
999     append_leaves(line_copy, line, line.leaves)
1000     features_fop = set(features) | {Feature.FORCE_OPTIONAL_PARENTHESES}
1001     second_opinion = run_transformer(
1002         line_copy, transform, mode, features_fop, line_str=line_str
1003     )
1004     if all(
1005         is_line_short_enough(ln, line_length=mode.line_length) for ln in second_opinion
1006     ):
1007         result = second_opinion
1008     return result