-#!/usr/bin/env python3
-
import asyncio
import pickle
from asyncio.base_events import BaseEventLoop
from blib2to3.pgen2 import driver, token
from blib2to3.pgen2.parse import ParseError
-__version__ = "18.4a2"
+__version__ = "18.4a4"
DEFAULT_LINE_LENGTH = 88
+
# types
syms = pygram.python_symbols
FileContent = str
self.consumed = consumed
def trim_prefix(self, leaf: Leaf) -> None:
- leaf.prefix = leaf.prefix[self.consumed:]
+ leaf.prefix = leaf.prefix[self.consumed :]
def leaf_from_consumed(self, leaf: Leaf) -> Leaf:
"""Returns a new Leaf from the consumed part of the prefix."""
- unformatted_prefix = leaf.prefix[:self.consumed]
+ unformatted_prefix = leaf.prefix[: self.consumed]
return Leaf(token.NEWLINE, unformatted_prefix)
else:
cache: Cache = {}
if write_back != WriteBack.DIFF:
- cache = read_cache()
+ cache = read_cache(line_length)
src = src.resolve()
if src in cache and cache[src] == get_cache_info(src):
changed = Changed.CACHED
)
):
changed = Changed.YES
- if write_back != WriteBack.DIFF and changed is not Changed.NO:
- write_cache(cache, [src])
+ if write_back == WriteBack.YES and changed is not Changed.NO:
+ write_cache(cache, [src], line_length)
report.done(src, changed)
except Exception as exc:
report.failed(src, str(exc))
"""
cache: Cache = {}
if write_back != WriteBack.DIFF:
- cache = read_cache()
+ cache = read_cache(line_length)
sources, cached = filter_cached(cache, sources)
for src in cached:
report.done(src, Changed.CACHED)
if cancelled:
await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
- if write_back != WriteBack.DIFF and formatted:
- write_cache(cache, formatted)
+ if write_back == WriteBack.YES and formatted:
+ write_cache(cache, formatted, line_length)
def format_file_in_place(
with open(src, "w", encoding=src_buffer.encoding) as f:
f.write(dst_contents)
elif write_back == write_back.DIFF:
- src_name = f"{src.name} (original)"
- dst_name = f"{src.name} (formatted)"
+ src_name = f"{src} (original)"
+ dst_name = f"{src} (formatted)"
diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
if lock:
lock.acquire()
GRAMMARS = [
pygram.python_grammar_no_print_statement_no_exec_statement,
pygram.python_grammar_no_print_statement,
- pygram.python_grammar_no_exec_statement,
pygram.python_grammar,
]
syms.listmaker,
syms.testlist_gexp,
}
+TEST_DESCENDANTS = {
+ syms.test,
+ syms.lambdef,
+ syms.or_test,
+ syms.and_test,
+ syms.not_test,
+ syms.comparison,
+ syms.star_expr,
+ syms.expr,
+ syms.xor_expr,
+ syms.and_expr,
+ syms.shift_expr,
+ syms.arith_expr,
+ syms.trailer,
+ syms.term,
+ syms.power,
+}
COMPREHENSION_PRIORITY = 20
COMMA_PRIORITY = 10
+TERNARY_PRIORITY = 7
LOGIC_PRIORITY = 5
STRING_PRIORITY = 4
COMPARATOR_PRIORITY = 3
return False
+ def get_open_lsqb(self) -> Optional[Leaf]:
+ """Return the most recent opening square bracket (if any)."""
+ return self.bracket_match.get((self.depth - 1, token.RSQB))
+
@dataclass
class Line:
if self.leaves and not preformatted:
# Note: at this point leaf.prefix should be empty except for
# imports, for which we only preserve newlines.
- leaf.prefix += whitespace(leaf)
+ leaf.prefix += whitespace(
+ leaf, complex_subscript=self.is_complex_subscript(leaf)
+ )
if self.inside_brackets or not preformatted:
self.bracket_tracker.mark(leaf)
self.maybe_remove_trailing_comma(leaf)
else:
return False
- for leaf in self.leaves[_opening_index + 1:]:
+ for leaf in self.leaves[_opening_index + 1 :]:
if leaf is closing:
break
self.comments[i] = (comma_index - 1, comment)
self.leaves.pop()
+ def is_complex_subscript(self, leaf: Leaf) -> bool:
+ """Return True iff `leaf` is part of a slice with non-trivial exprs."""
+ open_lsqb = (
+ leaf if leaf.type == token.LSQB else self.bracket_tracker.get_open_lsqb()
+ )
+ if open_lsqb is None:
+ return False
+
+ subscript_start = open_lsqb.next_sibling
+ if (
+ isinstance(subscript_start, Node)
+ and subscript_start.type == syms.subscriptlist
+ ):
+ subscript_start = child_towards(subscript_start, leaf)
+ return subscript_start is not None and any(
+ n.type in TEST_DESCENDANTS for n in subscript_start.pre_order()
+ )
+
def __str__(self) -> str:
"""Render the line."""
if not self:
# Don't insert empty lines before the first line in the file.
return 0, 0
- if self.previous_line and self.previous_line.is_decorator:
- # Don't insert empty lines between decorators.
+ if self.previous_line.is_decorator:
+ return 0, 0
+
+ if (
+ self.previous_line.is_comment
+ and self.previous_line.depth == current_line.depth
+ and before == 0
+ ):
return 0, 0
newlines = 2
newlines -= 1
return newlines, 0
- if current_line.is_flow_control:
- return before, 1
-
if (
self.previous_line
and self.previous_line.is_import
):
return (before or 1), 0
- if (
- self.previous_line
- and self.previous_line.is_yield
- and (not current_line.is_yield or depth != self.previous_line.depth)
- ):
- return (before or 1), 0
-
return before, 0
def visit_DEDENT(self, node: Node) -> Iterator[Line]:
"""Decrease indentation level, maybe yield a line."""
- # DEDENT has no value. Additionally, in blib2to3 it never holds comments.
+ # The current line might still wait for trailing comments. At DEDENT time
+ # there won't be any (they would be prefixes on the preceding NEWLINE).
+ # Emit the line then.
+ yield from self.line()
+
+ # While DEDENT has no value, its prefix may contain standalone comments
+ # that belong to the current indentation level. Get 'em.
+ yield from self.visit_default(node)
+
+ # Finally, emit the dedent.
yield from self.line(-1)
def visit_stmt(
ALWAYS_NO_SPACE = CLOSING_BRACKETS | {token.COMMA, STANDALONE_COMMENT}
-def whitespace(leaf: Leaf) -> str: # noqa C901
- """Return whitespace prefix if needed for the given `leaf`."""
+def whitespace(leaf: Leaf, *, complex_subscript: bool) -> str: # noqa C901
+ """Return whitespace prefix if needed for the given `leaf`.
+
+ `complex_subscript` signals whether the given leaf is part of a subscription
+ which has non-trivial arguments, like arithmetic expressions or function calls.
+ """
NO = ""
SPACE = " "
DOUBLESPACE = " "
return DOUBLESPACE
assert p is not None, f"INTERNAL ERROR: hand-made leaf without parent: {leaf!r}"
- if t == token.COLON and p.type not in {syms.subscript, syms.subscriptlist}:
+ if (
+ t == token.COLON
+ and p.type not in {syms.subscript, syms.subscriptlist, syms.sliceop}
+ ):
return NO
prev = leaf.prev_sibling
return NO
if t == token.COLON:
- return SPACE if prevp.type == token.COMMA else NO
+ if prevp.type == token.COLON:
+ return NO
+
+ elif prevp.type != token.COMMA and not complex_subscript:
+ return NO
+
+ return SPACE
if prevp.type == token.EQUAL:
if prevp.parent:
elif prevp.type == token.COLON:
if prevp.parent and prevp.parent.type in {syms.subscript, syms.sliceop}:
- return NO
+ return SPACE if complex_subscript else NO
elif (
prevp.parent
if prev and prev.type == token.LPAR:
return NO
- elif p.type == syms.subscript:
+ elif p.type in {syms.subscript, syms.sliceop}:
# indexing
if not prev:
assert p.parent is not None, "subscripts are always parented"
return NO
- else:
+ elif not complex_subscript:
return NO
elif p.type == syms.atom:
return None
+def child_towards(ancestor: Node, descendant: LN) -> Optional[LN]:
+ """Return the child of `ancestor` that contains `descendant`."""
+ node: Optional[LN] = descendant
+ while node and node.parent != ancestor:
+ node = node.parent
+ return node
+
+
def is_split_after_delimiter(leaf: Leaf, previous: Leaf = None) -> int:
"""Return the priority of the `leaf` delimiter, given a line break after it.
):
return COMPREHENSION_PRIORITY
+ if (
+ leaf.type == token.NAME
+ and leaf.value in {"if", "else"}
+ and leaf.parent
+ and leaf.parent.type == syms.test
+ ):
+ return TERNARY_PRIORITY
+
if leaf.type == token.NAME and leaf.value in LOGIC_OPERATORS and leaf.parent:
return LOGIC_PRIORITY
split_funcs: List[SplitFunc]
if line.is_def:
split_funcs = [left_hand_split]
+ elif line.is_import:
+ split_funcs = [explode_split]
elif line.inside_brackets:
split_funcs = [delimiter_split, standalone_comment_split, right_hand_split]
else:
yield current_line
+def explode_split(
+ line: Line, py36: bool = False, omit: Collection[LeafID] = ()
+) -> Iterator[Line]:
+ """Split by rightmost bracket and immediately split contents by a delimiter."""
+ new_lines = list(right_hand_split(line, py36, omit))
+ if len(new_lines) != 3:
+ yield from new_lines
+ return
+
+ yield new_lines[0]
+
+ try:
+ yield from delimiter_split(new_lines[1], py36)
+
+ except CannotSplit:
+ yield new_lines[1]
+
+ yield new_lines[2]
+
+
def is_import(leaf: Leaf) -> bool:
"""Return True if the given leaf starts an import statement."""
p = leaf.parent
unescaped_new_quote = re.compile(rf"(([^\\]|^)(\\\\)*){new_quote}")
escaped_new_quote = re.compile(rf"([^\\]|^)\\(\\\\)*{new_quote}")
escaped_orig_quote = re.compile(rf"([^\\]|^)\\(\\\\)*{orig_quote}")
- body = leaf.value[first_quote_pos + len(orig_quote):-len(orig_quote)]
+ body = leaf.value[first_quote_pos + len(orig_quote) : -len(orig_quote)]
if "r" in prefix.casefold():
if unescaped_new_quote.search(body):
# There's at least one unescaped new_quote in this raw string
CACHE_DIR = Path(user_cache_dir("black", version=__version__))
-CACHE_FILE = CACHE_DIR / "cache.pickle"
-def read_cache() -> Cache:
+def get_cache_file(line_length: int) -> Path:
+ return CACHE_DIR / f"cache.{line_length}.pickle"
+
+
+def read_cache(line_length: int) -> Cache:
"""Read the cache if it exists and is well formed.
If it is not well formed, the call to write_cache later should resolve the issue.
"""
- if not CACHE_FILE.exists():
+ cache_file = get_cache_file(line_length)
+ if not cache_file.exists():
return {}
- with CACHE_FILE.open("rb") as fobj:
+ with cache_file.open("rb") as fobj:
try:
cache: Cache = pickle.load(fobj)
except pickle.UnpicklingError:
return todo, done
-def write_cache(cache: Cache, sources: List[Path]) -> None:
+def write_cache(cache: Cache, sources: List[Path], line_length: int) -> None:
"""Update the cache file."""
+ cache_file = get_cache_file(line_length)
try:
if not CACHE_DIR.exists():
CACHE_DIR.mkdir(parents=True)
new_cache = {**cache, **{src.resolve(): get_cache_info(src) for src in sources}}
- with CACHE_FILE.open("wb") as fobj:
+ with cache_file.open("wb") as fobj:
pickle.dump(new_cache, fobj, protocol=pickle.HIGHEST_PROTOCOL)
except OSError:
pass