X-Git-Url: https://git.madduck.net/etc/vim.git/blobdiff_plain/e269f44b25737360e0dc65379f889dfa931dc68a..27c05e1e24e02c62d0c2de2a1ab0673b2c2ca653:/src/black/__init__.py diff --git a/src/black/__init__.py b/src/black/__init__.py index afc76e1..188a4f7 100644 --- a/src/black/__init__.py +++ b/src/black/__init__.py @@ -1,6 +1,5 @@ import io import json -import os import platform import re import sys @@ -8,7 +7,7 @@ import tokenize import traceback from contextlib import contextmanager from dataclasses import replace -from datetime import datetime +from datetime import datetime, timezone from enum import Enum from json.decoder import JSONDecodeError from pathlib import Path @@ -28,18 +27,14 @@ from typing import ( Union, ) -if sys.version_info >= (3, 8): - from typing import Final -else: - from typing_extensions import Final - import click from click.core import ParameterSource from mypy_extensions import mypyc_attr +from pathspec import PathSpec from pathspec.patterns.gitwildmatch import GitWildMatchPatternError from _black_version import version as __version__ -from black.cache import Cache, get_cache_info, read_cache, write_cache +from black.cache import Cache from black.comments import normalize_fmt_off from black.const import ( DEFAULT_EXCLUDES, @@ -67,15 +62,10 @@ from black.handle_ipynb_magics import ( unmask_cell, ) from black.linegen import LN, LineGenerator, transform_line -from black.lines import EmptyLineTracker, Line -from black.mode import ( - FUTURE_FLAG_TO_FEATURE, - VERSION_TO_FEATURES, - Feature, - Mode, - TargetVersion, - supports_feature, -) +from black.lines import EmptyLineTracker, LinesBlock +from black.mode import FUTURE_FLAG_TO_FEATURE, VERSION_TO_FEATURES, Feature +from black.mode import Mode as Mode # re-exported +from black.mode import TargetVersion, supports_feature from black.nodes import ( STARS, is_number_token, @@ -92,7 +82,6 @@ from blib2to3.pgen2 import token from blib2to3.pytree import Leaf, Node COMPILED = Path(__file__).suffix in (".pyd", ".so") -DEFAULT_WORKERS: Final = os.cpu_count() # types FileContent = str @@ -133,7 +122,9 @@ def read_pyproject_toml( otherwise. """ if not value: - value = find_pyproject_toml(ctx.params.get("src", ())) + value = find_pyproject_toml( + ctx.params.get("src", ()), ctx.params.get("stdin_filename", None) + ) if value is None: return None @@ -161,6 +152,16 @@ def read_pyproject_toml( "target-version", "Config key target-version must be a list" ) + exclude = config.get("exclude") + if exclude is not None and not isinstance(exclude, str): + raise click.BadOptionUsage("exclude", "Config key exclude must be a string") + + extend_exclude = config.get("extend_exclude") + if extend_exclude is not None and not isinstance(extend_exclude, str): + raise click.BadOptionUsage( + "extend-exclude", "Config key extend-exclude must be a string" + ) + default_map: Dict[str, Any] = {} if ctx.default_map: default_map.update(ctx.default_map) @@ -225,8 +226,9 @@ def validate_regex( callback=target_version_option_callback, multiple=True, help=( - "Python versions that should be supported by Black's output. [default: per-file" - " auto-detection]" + "Python versions that should be supported by Black's output. By default, Black" + " will try to infer this from the project metadata in pyproject.toml. If this" + " does not yield conclusive results, Black will use per-file auto-detection." ), ) @click.option( @@ -250,11 +252,17 @@ def validate_regex( multiple=True, help=( "When processing Jupyter Notebooks, add the given magic to the list" - f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})." + f" of known python-magics ({', '.join(sorted(PYTHON_CELL_MAGICS))})." " Useful for formatting cells with custom python magics." ), default=[], ) +@click.option( + "-x", + "--skip-source-first-line", + is_flag=True, + help="Skip the first line of the source code.", +) @click.option( "-S", "--skip-string-normalization", @@ -361,6 +369,7 @@ def validate_regex( @click.option( "--stdin-filename", type=str, + is_eager=True, help=( "The name of the file when passing it through stdin. Useful to make " "sure Black will respect --force-exclude option on some " @@ -371,9 +380,11 @@ def validate_regex( "-W", "--workers", type=click.IntRange(min=1), - default=DEFAULT_WORKERS, - show_default=True, - help="Number of parallel workers", + default=None, + help=( + "Number of parallel workers [default: BLACK_NUM_WORKERS environment variable " + "or number of CPUs in the system]" + ), ) @click.option( "-q", @@ -436,6 +447,7 @@ def main( # noqa: C901 pyi: bool, ipynb: bool, python_cell_magics: Sequence[str], + skip_source_first_line: bool, skip_string_normalization: bool, skip_magic_trailing_comma: bool, experimental_string_processing: bool, @@ -448,7 +460,7 @@ def main( # noqa: C901 extend_exclude: Optional[Pattern[str]], force_exclude: Optional[Pattern[str]], stdin_filename: Optional[str], - workers: int, + workers: Optional[int], src: Tuple[str, ...], config: Optional[str], ) -> None: @@ -477,22 +489,6 @@ def main( # noqa: C901 fg="blue", ) - normalized = [ - (source, source) - if source == "-" - else (normalize_path_maybe_ignore(Path(source), root), source) - for source in src - ] - srcs_string = ", ".join( - [ - f'"{_norm}"' - if _norm - else f'\033[31m"{source} (skipping - invalid)"\033[34m' - for _norm, source in normalized - ] - ) - out(f"Sources to be formatted: {srcs_string}", fg="blue") - if config: config_source = ctx.get_parameter_source("config") user_level_config = str(find_user_pyproject_toml()) @@ -509,6 +505,9 @@ def main( # noqa: C901 out("Using configuration from project root.", fg="blue") else: out(f"Using configuration in '{config}'.", fg="blue") + if ctx.default_map: + for param, value in ctx.default_map.items(): + out(f"{param}: {value}") error_msg = "Oh no! 💥 💔 💥" if ( @@ -536,6 +535,7 @@ def main( # noqa: C901 line_length=line_length, is_pyi=pyi, is_ipynb=ipynb, + skip_source_first_line=skip_source_first_line, string_normalization=not skip_string_normalization, magic_trailing_comma=not skip_magic_trailing_comma, experimental_string_processing=experimental_string_processing, @@ -555,9 +555,10 @@ def main( # noqa: C901 content=code, fast=fast, write_back=write_back, mode=mode, report=report ) else: + assert root is not None # root is only None if code is not None try: sources = get_sources( - ctx=ctx, + root=root, src=src, quiet=quiet, verbose=verbose, @@ -610,7 +611,7 @@ def main( # noqa: C901 def get_sources( *, - ctx: click.Context, + root: Path, src: Tuple[str, ...], quiet: bool, verbose: bool, @@ -624,11 +625,10 @@ def get_sources( """Compute the set of files to be formatted.""" sources: Set[Path] = set() - if exclude is None: - exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) - gitignore = get_gitignore(ctx.obj["root"]) - else: - gitignore = None + using_default_exclude = exclude is None + exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) if exclude is None else exclude + gitignore: Optional[Dict[Path, PathSpec]] = None + root_gitignore = get_gitignore(root) for s in src: if s == "-" and stdin_filename: @@ -639,9 +639,15 @@ def get_sources( is_stdin = False if is_stdin or p.is_file(): - normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report) + normalized_path: Optional[str] = normalize_path_maybe_ignore( + p, root, report + ) if normalized_path is None: + if verbose: + out(f'Skipping invalid source: "{normalized_path}"', fg="red") continue + if verbose: + out(f'Found input source: "{normalized_path}"', fg="blue") normalized_path = "/" + normalized_path # Hard-exclude any files that matches the `--force-exclude` regex. @@ -657,16 +663,27 @@ def get_sources( p = Path(f"{STDIN_PLACEHOLDER}{str(p)}") if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed( - verbose=verbose, quiet=quiet + warn=verbose or not quiet ): continue sources.add(p) elif p.is_dir(): + p_relative = normalize_path_maybe_ignore(p, root, report) + assert p_relative is not None + p = root / p_relative + if verbose: + out(f'Found input source directory: "{p}"', fg="blue") + + if using_default_exclude: + gitignore = { + root: root_gitignore, + p: get_gitignore(p), + } sources.update( gen_python_files( p.iterdir(), - ctx.obj["root"], + root, include, exclude, extend_exclude, @@ -678,9 +695,12 @@ def get_sources( ) ) elif s == "-": + if verbose: + out("Found input source stdin", fg="blue") sources.add(p) else: err(f"invalid path: {s}") + return sources @@ -752,12 +772,9 @@ def reformat_one( if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode): changed = Changed.YES else: - cache: Cache = {} + cache = Cache.read(mode) if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF): - cache = read_cache(mode) - res_src = src.resolve() - res_src_s = str(res_src) - if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src): + if not cache.is_changed(src): changed = Changed.CACHED if changed is not Changed.CACHED and format_file_in_place( src, fast=fast, write_back=write_back, mode=mode @@ -766,7 +783,7 @@ def reformat_one( if (write_back is WriteBack.YES and changed is not Changed.CACHED) or ( write_back is WriteBack.CHECK and changed is Changed.NO ): - write_cache(cache, [src], mode) + cache.write([src]) report.done(src, changed) except Exception as exc: if report.verbose: @@ -792,8 +809,11 @@ def format_file_in_place( elif src.suffix == ".ipynb": mode = replace(mode, is_ipynb=True) - then = datetime.utcfromtimestamp(src.stat().st_mtime) + then = datetime.fromtimestamp(src.stat().st_mtime, timezone.utc) + header = b"" with open(src, "rb") as buf: + if mode.skip_source_first_line: + header = buf.readline() src_contents, encoding, newline = decode_bytes(buf.read()) try: dst_contents = format_file_contents(src_contents, fast=fast, mode=mode) @@ -803,14 +823,16 @@ def format_file_in_place( raise ValueError( f"File '{src}' cannot be parsed as valid Jupyter notebook." ) from None + src_contents = header.decode(encoding) + src_contents + dst_contents = header.decode(encoding) + dst_contents if write_back == WriteBack.YES: with open(src, "w", encoding=encoding, newline=newline) as f: f.write(dst_contents) elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF): - now = datetime.utcnow() - src_name = f"{src}\t{then} +0000" - dst_name = f"{src}\t{now} +0000" + now = datetime.now(timezone.utc) + src_name = f"{src}\t{then}" + dst_name = f"{src}\t{now}" if mode.is_ipynb: diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name) else: @@ -848,7 +870,7 @@ def format_stdin_to_stdout( write a diff to stdout. The `mode` argument is passed to :func:`format_file_contents`. """ - then = datetime.utcnow() + then = datetime.now(timezone.utc) if content is None: src, encoding, newline = decode_bytes(sys.stdin.buffer.read()) @@ -873,9 +895,9 @@ def format_stdin_to_stdout( dst += "\n" f.write(dst) elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF): - now = datetime.utcnow() - src_name = f"STDIN\t{then} +0000" - dst_name = f"STDOUT\t{now} +0000" + now = datetime.now(timezone.utc) + src_name = f"STDIN\t{then}" + dst_name = f"STDOUT\t{now}" d = diff(src, dst, src_name, dst_name) if write_back == WriteBack.COLOR_DIFF: d = color_diff(d) @@ -904,9 +926,6 @@ def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileCo valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it. `mode` is passed to :func:`format_str`. """ - if not src_contents.strip(): - raise NothingChanged - if mode.is_ipynb: dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode) else: @@ -1001,6 +1020,9 @@ def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileCon Operate cell-by-cell, only on code cells, only for Python notebooks. If the ``.ipynb`` originally had a trailing newline, it'll be preserved. """ + if not src_contents: + raise NothingChanged + trailing_newline = src_contents[-1] == "\n" modified = False nb = json.loads(src_contents) @@ -1065,31 +1087,46 @@ def format_str(src_contents: str, *, mode: Mode) -> str: def _format_str_once(src_contents: str, *, mode: Mode) -> str: src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions) - dst_contents = [] + dst_blocks: List[LinesBlock] = [] if mode.target_versions: versions = mode.target_versions else: future_imports = get_future_imports(src_node) versions = detect_target_versions(src_node, future_imports=future_imports) - normalize_fmt_off(src_node, preview=mode.preview) - lines = LineGenerator(mode=mode) - elt = EmptyLineTracker(is_pyi=mode.is_pyi) - empty_line = Line(mode=mode) - after = 0 + context_manager_features = { + feature + for feature in {Feature.PARENTHESIZED_CONTEXT_MANAGERS} + if supports_feature(versions, feature) + } + normalize_fmt_off(src_node) + lines = LineGenerator(mode=mode, features=context_manager_features) + elt = EmptyLineTracker(mode=mode) split_line_features = { feature for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF} if supports_feature(versions, feature) } + block: Optional[LinesBlock] = None for current_line in lines.visit(src_node): - dst_contents.append(str(empty_line) * after) - before, after = elt.maybe_empty_lines(current_line) - dst_contents.append(str(empty_line) * before) + block = elt.maybe_empty_lines(current_line) + dst_blocks.append(block) for line in transform_line( current_line, mode=mode, features=split_line_features ): - dst_contents.append(str(line)) + block.content_lines.append(str(line)) + if dst_blocks: + dst_blocks[-1].after = 0 + dst_contents = [] + for block in dst_blocks: + dst_contents.extend(block.all_lines()) + if not dst_contents: + # Use decode_bytes to retrieve the correct source newline (CRLF or LF), + # and check if normalized_content has more than one line + normalized_content, _, newline = decode_bytes(src_contents.encode("utf-8")) + if "\n" in normalized_content: + return newline + return "" return "".join(dst_contents) @@ -1125,6 +1162,10 @@ def get_features_used( # noqa: C901 - relaxed decorator syntax; - usage of __future__ flags (annotations); - print / exec statements; + - parenthesized context managers; + - match statements; + - except* clause; + - variadic generics; """ features: Set[Feature] = set() if future_imports: @@ -1200,6 +1241,23 @@ def get_features_used( # noqa: C901 ): features.add(Feature.ANN_ASSIGN_EXTENDED_RHS) + elif ( + n.type == syms.with_stmt + and len(n.children) > 2 + and n.children[1].type == syms.atom + ): + atom_children = n.children[1].children + if ( + len(atom_children) == 3 + and atom_children[0].type == token.LPAR + and atom_children[1].type == syms.testlist_gexp + and atom_children[2].type == token.RPAR + ): + features.add(Feature.PARENTHESIZED_CONTEXT_MANAGERS) + + elif n.type == syms.match_stmt: + features.add(Feature.PATTERN_MATCHING) + elif ( n.type == syms.except_clause and len(n.children) >= 2 @@ -1219,6 +1277,9 @@ def get_features_used( # noqa: C901 ): features.add(Feature.VARIADIC_GENERICS) + elif n.type in (syms.type_stmt, syms.typeparams): + features.add(Feature.TYPE_PARAMS) + return features @@ -1343,47 +1404,14 @@ def nullcontext() -> Iterator[None]: yield -def patch_click() -> None: - """Make Click not crash on Python 3.6 with LANG=C. - - On certain misconfigured environments, Python 3 selects the ASCII encoding as the - default which restricts paths that it can access during the lifetime of the - application. Click refuses to work in this scenario by raising a RuntimeError. - - In case of Black the likelihood that non-ASCII characters are going to be used in - file paths is minimal since it's Python source code. Moreover, this crash was - spurious on Python 3.7 thanks to PEP 538 and PEP 540. - """ - modules: List[Any] = [] - try: - from click import core - except ImportError: - pass - else: - modules.append(core) - try: - # Removed in Click 8.1.0 and newer; we keep this around for users who have - # older versions installed. - from click import _unicodefun # type: ignore - except ImportError: - pass - else: - modules.append(_unicodefun) - - for module in modules: - if hasattr(module, "_verify_python3_env"): - module._verify_python3_env = lambda: None # type: ignore - if hasattr(module, "_verify_python_env"): - module._verify_python_env = lambda: None # type: ignore - - def patched_main() -> None: - if sys.platform == "win32" and getattr(sys, "frozen", False): + # PyInstaller patches multiprocessing to need freeze_support() even in non-Windows + # environments so just assume we always need to call it if frozen. + if getattr(sys, "frozen", False): from multiprocessing import freeze_support freeze_support() - patch_click() main()