All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
8 from contextlib import contextmanager
9 from dataclasses import replace
10 from datetime import datetime
12 from json.decoder import JSONDecodeError
13 from pathlib import Path
31 from click.core import ParameterSource
32 from mypy_extensions import mypyc_attr
33 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
35 from _black_version import version as __version__
36 from black.cache import Cache, get_cache_info, read_cache, write_cache
37 from black.comments import normalize_fmt_off
38 from black.const import (
44 from black.files import (
47 find_user_pyproject_toml,
50 normalize_path_maybe_ignore,
52 wrap_stream_for_windows,
54 from black.handle_ipynb_magics import (
57 jupyter_dependencies_are_installed,
59 put_trailing_semicolon_back,
60 remove_trailing_semicolon,
63 from black.linegen import LN, LineGenerator, transform_line
64 from black.lines import EmptyLineTracker, Line
65 from black.mode import (
66 FUTURE_FLAG_TO_FEATURE,
73 from black.nodes import (
76 is_simple_decorator_expression,
80 from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
81 from black.parsing import InvalidInput # noqa F401
82 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
83 from black.report import Changed, NothingChanged, Report
84 from black.trans import iter_fexpr_spans
85 from blib2to3.pgen2 import token
86 from blib2to3.pytree import Leaf, Node
88 COMPILED = Path(__file__).suffix in (".pyd", ".so")
96 class WriteBack(Enum):
104 def from_configuration(
105 cls, *, check: bool, diff: bool, color: bool = False
107 if check and not diff:
111 return cls.COLOR_DIFF
113 return cls.DIFF if diff else cls.YES
116 # Legacy name, left for integrations.
120 def read_pyproject_toml(
121 ctx: click.Context, param: click.Parameter, value: Optional[str]
123 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
125 Returns the path to a successfully found and read configuration file, None
129 value = find_pyproject_toml(ctx.params.get("src", ()))
134 config = parse_pyproject_toml(value)
135 except (OSError, ValueError) as e:
136 raise click.FileError(
137 filename=value, hint=f"Error reading configuration file: {e}"
143 # Sanitize the values to be Click friendly. For more information please see:
144 # https://github.com/psf/black/issues/1458
145 # https://github.com/pallets/click/issues/1567
147 k: str(v) if not isinstance(v, (list, dict)) else v
148 for k, v in config.items()
151 target_version = config.get("target_version")
152 if target_version is not None and not isinstance(target_version, list):
153 raise click.BadOptionUsage(
154 "target-version", "Config key target-version must be a list"
157 default_map: Dict[str, Any] = {}
159 default_map.update(ctx.default_map)
160 default_map.update(config)
162 ctx.default_map = default_map
166 def target_version_option_callback(
167 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
168 ) -> List[TargetVersion]:
169 """Compute the target versions from a --target-version flag.
171 This is its own function because mypy couldn't infer the type correctly
172 when it was a lambda, causing mypyc trouble.
174 return [TargetVersion[val.upper()] for val in v]
177 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
178 """Compile a regular expression string in `regex`.
180 If it contains newlines, use verbose mode.
183 regex = "(?x)" + regex
184 compiled: Pattern[str] = re.compile(regex)
190 param: click.Parameter,
191 value: Optional[str],
192 ) -> Optional[Pattern[str]]:
194 return re_compile_maybe_verbose(value) if value is not None else None
195 except re.error as e:
196 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
200 context_settings={"help_option_names": ["-h", "--help"]},
201 # While Click does set this field automatically using the docstring, mypyc
202 # (annoyingly) strips 'em so we need to set it here too.
203 help="The uncompromising code formatter.",
205 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
210 default=DEFAULT_LINE_LENGTH,
211 help="How many characters per line to allow.",
217 type=click.Choice([v.name.lower() for v in TargetVersion]),
218 callback=target_version_option_callback,
221 "Python versions that should be supported by Black's output. [default: per-file"
229 "Format all input files like typing stubs regardless of file extension (useful"
230 " when piping source on standard input)."
237 "Format all input files like Jupyter Notebooks regardless of file extension "
238 "(useful when piping source on standard input)."
242 "--python-cell-magics",
245 "When processing Jupyter Notebooks, add the given magic to the list"
246 f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
247 " Useful for formatting cells with custom python magics."
253 "--skip-string-normalization",
255 help="Don't normalize string quotes or prefixes.",
259 "--skip-magic-trailing-comma",
261 help="Don't use trailing commas as a reason to split lines.",
264 "--experimental-string-processing",
267 help="(DEPRECATED and now included in --preview) Normalize string literals.",
273 "Enable potentially disruptive style changes that may be added to Black's main"
274 " functionality in the next major release."
281 "Don't write the files back, just return the status. Return code 0 means"
282 " nothing would change. Return code 1 means some files would be reformatted."
283 " Return code 123 means there was an internal error."
289 help="Don't write the files back, just output a diff for each file on stdout.",
292 "--color/--no-color",
294 help="Show colored diff. Only applies when `--diff` is given.",
299 help="If --fast given, skip temporary sanity checks. [default: --safe]",
302 "--required-version",
305 "Require a specific version of Black to be running (useful for unifying results"
306 " across many environments e.g. with a pyproject.toml file). It can be"
307 " either a major version number or an exact version."
313 default=DEFAULT_INCLUDES,
314 callback=validate_regex,
316 "A regular expression that matches files and directories that should be"
317 " included on recursive searches. An empty value means all files are included"
318 " regardless of the name. Use forward slashes for directories on all platforms"
319 " (Windows, too). Exclusions are calculated first, inclusions later."
326 callback=validate_regex,
328 "A regular expression that matches files and directories that should be"
329 " excluded on recursive searches. An empty value means no paths are excluded."
330 " Use forward slashes for directories on all platforms (Windows, too)."
331 " Exclusions are calculated first, inclusions later. [default:"
332 f" {DEFAULT_EXCLUDES}]"
339 callback=validate_regex,
341 "Like --exclude, but adds additional files and directories on top of the"
342 " excluded ones. (Useful if you simply want to add to the default)"
348 callback=validate_regex,
350 "Like --exclude, but files and directories matching this regex will be "
351 "excluded even when they are passed explicitly as arguments."
358 "The name of the file when passing it through stdin. Useful to make "
359 "sure Black will respect --force-exclude option on some "
360 "editors that rely on using stdin."
366 type=click.IntRange(min=1),
368 help="Number of parallel workers [default: number of CPUs in the system]",
375 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
376 " those with 2>/dev/null."
384 "Also emit messages to stderr about files that were not changed or were ignored"
385 " due to exclusion patterns."
388 @click.version_option(
391 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
392 f"Python ({platform.python_implementation()}) {platform.python_version()}"
399 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
415 callback=read_pyproject_toml,
416 help="Read configuration from FILE path.",
419 def main( # noqa: C901
423 target_version: List[TargetVersion],
430 python_cell_magics: Sequence[str],
431 skip_string_normalization: bool,
432 skip_magic_trailing_comma: bool,
433 experimental_string_processing: bool,
437 required_version: Optional[str],
438 include: Pattern[str],
439 exclude: Optional[Pattern[str]],
440 extend_exclude: Optional[Pattern[str]],
441 force_exclude: Optional[Pattern[str]],
442 stdin_filename: Optional[str],
443 workers: Optional[int],
444 src: Tuple[str, ...],
445 config: Optional[str],
447 """The uncompromising code formatter."""
448 ctx.ensure_object(dict)
450 if src and code is not None:
453 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
456 if not src and code is None:
457 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
461 find_project_root(src, stdin_filename) if code is None else (None, None)
463 ctx.obj["root"] = root
468 f"Identified `{root}` as project root containing a {method}.",
475 else (normalize_path_maybe_ignore(Path(source), root), source)
478 srcs_string = ", ".join(
482 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
483 for _norm, source in normalized
486 out(f"Sources to be formatted: {srcs_string}", fg="blue")
489 config_source = ctx.get_parameter_source("config")
490 user_level_config = str(find_user_pyproject_toml())
491 if config == user_level_config:
493 "Using configuration from user-level config at "
494 f"'{user_level_config}'.",
497 elif config_source in (
498 ParameterSource.DEFAULT,
499 ParameterSource.DEFAULT_MAP,
501 out("Using configuration from project root.", fg="blue")
503 out(f"Using configuration in '{config}'.", fg="blue")
505 error_msg = "Oh no! 💥 💔 💥"
508 and required_version != __version__
509 and required_version != __version__.split(".")[0]
512 f"{error_msg} The required version `{required_version}` does not match"
513 f" the running version `{__version__}`!"
517 err("Cannot pass both `pyi` and `ipynb` flags!")
520 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
522 versions = set(target_version)
524 # We'll autodetect later.
527 target_versions=versions,
528 line_length=line_length,
531 string_normalization=not skip_string_normalization,
532 magic_trailing_comma=not skip_magic_trailing_comma,
533 experimental_string_processing=experimental_string_processing,
535 python_cell_magics=set(python_cell_magics),
539 # Run in quiet mode by default with -c; the extra output isn't useful.
540 # You can still pass -v to get verbose output.
543 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
547 content=code, fast=fast, write_back=write_back, mode=mode, report=report
551 sources = get_sources(
558 extend_exclude=extend_exclude,
559 force_exclude=force_exclude,
561 stdin_filename=stdin_filename,
563 except GitWildMatchPatternError:
568 "No Python files are present to be formatted. Nothing to do 😴",
574 if len(sources) == 1:
578 write_back=write_back,
583 from black.concurrency import reformat_many
588 write_back=write_back,
594 if verbose or not quiet:
595 if code is None and (verbose or report.change_count or report.failure_count):
597 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
599 click.echo(str(report), err=True)
600 ctx.exit(report.return_code)
606 src: Tuple[str, ...],
609 include: Pattern[str],
610 exclude: Optional[Pattern[str]],
611 extend_exclude: Optional[Pattern[str]],
612 force_exclude: Optional[Pattern[str]],
614 stdin_filename: Optional[str],
616 """Compute the set of files to be formatted."""
617 sources: Set[Path] = set()
618 root = ctx.obj["root"]
621 if s == "-" and stdin_filename:
622 p = Path(stdin_filename)
628 if is_stdin or p.is_file():
629 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
630 if normalized_path is None:
633 normalized_path = "/" + normalized_path
634 # Hard-exclude any files that matches the `--force-exclude` regex.
636 force_exclude_match = force_exclude.search(normalized_path)
638 force_exclude_match = None
639 if force_exclude_match and force_exclude_match.group(0):
640 report.path_ignored(p, "matches the --force-exclude regular expression")
644 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
646 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
647 verbose=verbose, quiet=quiet
654 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
655 gitignore = get_gitignore(root)
656 p_gitignore = get_gitignore(p)
657 # No need to use p's gitignore if it is identical to root's gitignore
658 # (i.e. root and p point to the same directory).
659 if gitignore != p_gitignore:
660 gitignore += p_gitignore
680 err(f"invalid path: {s}")
685 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
688 Exit if there is no `src` provided for formatting
691 if verbose or not quiet:
697 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
700 Reformat and print out `content` without spawning child processes.
701 Similar to `reformat_one`, but for string content.
703 `fast`, `write_back`, and `mode` options are passed to
704 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
706 path = Path("<string>")
709 if format_stdin_to_stdout(
710 content=content, fast=fast, write_back=write_back, mode=mode
712 changed = Changed.YES
713 report.done(path, changed)
714 except Exception as exc:
716 traceback.print_exc()
717 report.failed(path, str(exc))
720 # diff-shades depends on being to monkeypatch this function to operate. I know it's
721 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
722 @mypyc_attr(patchable=True)
724 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
726 """Reformat a single file under `src` without spawning child processes.
728 `fast`, `write_back`, and `mode` options are passed to
729 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
736 elif str(src).startswith(STDIN_PLACEHOLDER):
738 # Use the original name again in case we want to print something
740 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
745 if src.suffix == ".pyi":
746 mode = replace(mode, is_pyi=True)
747 elif src.suffix == ".ipynb":
748 mode = replace(mode, is_ipynb=True)
749 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
750 changed = Changed.YES
753 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
754 cache = read_cache(mode)
755 res_src = src.resolve()
756 res_src_s = str(res_src)
757 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
758 changed = Changed.CACHED
759 if changed is not Changed.CACHED and format_file_in_place(
760 src, fast=fast, write_back=write_back, mode=mode
762 changed = Changed.YES
763 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
764 write_back is WriteBack.CHECK and changed is Changed.NO
766 write_cache(cache, [src], mode)
767 report.done(src, changed)
768 except Exception as exc:
770 traceback.print_exc()
771 report.failed(src, str(exc))
774 def format_file_in_place(
778 write_back: WriteBack = WriteBack.NO,
779 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
781 """Format file under `src` path. Return True if changed.
783 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
785 `mode` and `fast` options are passed to :func:`format_file_contents`.
787 if src.suffix == ".pyi":
788 mode = replace(mode, is_pyi=True)
789 elif src.suffix == ".ipynb":
790 mode = replace(mode, is_ipynb=True)
792 then = datetime.utcfromtimestamp(src.stat().st_mtime)
793 with open(src, "rb") as buf:
794 src_contents, encoding, newline = decode_bytes(buf.read())
796 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
797 except NothingChanged:
799 except JSONDecodeError:
801 f"File '{src}' cannot be parsed as valid Jupyter notebook."
804 if write_back == WriteBack.YES:
805 with open(src, "w", encoding=encoding, newline=newline) as f:
806 f.write(dst_contents)
807 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
808 now = datetime.utcnow()
809 src_name = f"{src}\t{then} +0000"
810 dst_name = f"{src}\t{now} +0000"
812 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
814 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
816 if write_back == WriteBack.COLOR_DIFF:
817 diff_contents = color_diff(diff_contents)
819 with lock or nullcontext():
820 f = io.TextIOWrapper(
826 f = wrap_stream_for_windows(f)
827 f.write(diff_contents)
833 def format_stdin_to_stdout(
836 content: Optional[str] = None,
837 write_back: WriteBack = WriteBack.NO,
840 """Format file on stdin. Return True if changed.
842 If content is None, it's read from sys.stdin.
844 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
845 write a diff to stdout. The `mode` argument is passed to
846 :func:`format_file_contents`.
848 then = datetime.utcnow()
851 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
853 src, encoding, newline = content, "utf-8", ""
857 dst = format_file_contents(src, fast=fast, mode=mode)
860 except NothingChanged:
864 f = io.TextIOWrapper(
865 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
867 if write_back == WriteBack.YES:
868 # Make sure there's a newline after the content
869 if dst and dst[-1] != "\n":
872 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
873 now = datetime.utcnow()
874 src_name = f"STDIN\t{then} +0000"
875 dst_name = f"STDOUT\t{now} +0000"
876 d = diff(src, dst, src_name, dst_name)
877 if write_back == WriteBack.COLOR_DIFF:
879 f = wrap_stream_for_windows(f)
884 def check_stability_and_equivalence(
885 src_contents: str, dst_contents: str, *, mode: Mode
887 """Perform stability and equivalence checks.
889 Raise AssertionError if source and destination contents are not
890 equivalent, or if a second pass of the formatter would format the
893 assert_equivalent(src_contents, dst_contents)
894 assert_stable(src_contents, dst_contents, mode=mode)
897 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
898 """Reformat contents of a file and return new contents.
900 If `fast` is False, additionally confirm that the reformatted code is
901 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
902 `mode` is passed to :func:`format_str`.
904 if not src_contents.strip():
908 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
910 dst_contents = format_str(src_contents, mode=mode)
911 if src_contents == dst_contents:
914 if not fast and not mode.is_ipynb:
915 # Jupyter notebooks will already have been checked above.
916 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
920 def validate_cell(src: str, mode: Mode) -> None:
921 """Check that cell does not already contain TransformerManager transformations,
922 or non-Python cell magics, which might cause tokenizer_rt to break because of
925 If a cell contains ``!ls``, then it'll be transformed to
926 ``get_ipython().system('ls')``. However, if the cell originally contained
927 ``get_ipython().system('ls')``, then it would get transformed in the same way:
929 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
930 "get_ipython().system('ls')\n"
931 >>> TransformerManager().transform_cell("!ls")
932 "get_ipython().system('ls')\n"
934 Due to the impossibility of safely roundtripping in such situations, cells
935 containing transformed magics will be ignored.
937 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
941 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
946 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
947 """Format code in given cell of Jupyter notebook.
951 - if cell has trailing semicolon, remove it;
952 - if cell has IPython magics, mask them;
954 - reinstate IPython magics;
955 - reinstate trailing semicolon (if originally present);
956 - strip trailing newlines.
958 Cells with syntax errors will not be processed, as they
959 could potentially be automagics or multi-line magics, which
960 are currently not supported.
962 validate_cell(src, mode)
963 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
967 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
969 raise NothingChanged from None
970 masked_dst = format_str(masked_src, mode=mode)
972 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
973 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
974 dst = put_trailing_semicolon_back(
975 dst_without_trailing_semicolon, has_trailing_semicolon
977 dst = dst.rstrip("\n")
979 raise NothingChanged from None
983 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
984 """If notebook is marked as non-Python, don't format it.
986 All notebook metadata fields are optional, see
987 https://nbformat.readthedocs.io/en/latest/format_description.html. So
988 if a notebook has empty metadata, we will try to parse it anyway.
990 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
991 if language is not None and language != "python":
992 raise NothingChanged from None
995 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
996 """Format Jupyter notebook.
998 Operate cell-by-cell, only on code cells, only for Python notebooks.
999 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1001 trailing_newline = src_contents[-1] == "\n"
1003 nb = json.loads(src_contents)
1004 validate_metadata(nb)
1005 for cell in nb["cells"]:
1006 if cell.get("cell_type", None) == "code":
1008 src = "".join(cell["source"])
1009 dst = format_cell(src, fast=fast, mode=mode)
1010 except NothingChanged:
1013 cell["source"] = dst.splitlines(keepends=True)
1016 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1017 if trailing_newline:
1018 dst_contents = dst_contents + "\n"
1021 raise NothingChanged
1024 def format_str(src_contents: str, *, mode: Mode) -> str:
1025 """Reformat a string and return new contents.
1027 `mode` determines formatting options, such as how many characters per line are
1031 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1032 def f(arg: str = "") -> None:
1035 A more complex example:
1038 ... black.format_str(
1039 ... "def f(arg:str='')->None: hey",
1040 ... mode=black.Mode(
1041 ... target_versions={black.TargetVersion.PY36},
1043 ... string_normalization=False,
1054 dst_contents = _format_str_once(src_contents, mode=mode)
1055 # Forced second pass to work around optional trailing commas (becoming
1056 # forced trailing commas on pass 2) interacting differently with optional
1057 # parentheses. Admittedly ugly.
1058 if src_contents != dst_contents:
1059 return _format_str_once(dst_contents, mode=mode)
1063 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1064 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1066 if mode.target_versions:
1067 versions = mode.target_versions
1069 future_imports = get_future_imports(src_node)
1070 versions = detect_target_versions(src_node, future_imports=future_imports)
1072 normalize_fmt_off(src_node, preview=mode.preview)
1073 lines = LineGenerator(mode=mode)
1074 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1075 empty_line = Line(mode=mode)
1077 split_line_features = {
1079 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1080 if supports_feature(versions, feature)
1082 for current_line in lines.visit(src_node):
1083 dst_contents.append(str(empty_line) * after)
1084 before, after = elt.maybe_empty_lines(current_line)
1085 dst_contents.append(str(empty_line) * before)
1086 for line in transform_line(
1087 current_line, mode=mode, features=split_line_features
1089 dst_contents.append(str(line))
1090 return "".join(dst_contents)
1093 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1094 """Return a tuple of (decoded_contents, encoding, newline).
1096 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1097 universal newlines (i.e. only contains LF).
1099 srcbuf = io.BytesIO(src)
1100 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1102 return "", encoding, "\n"
1104 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1106 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1107 return tiow.read(), encoding, newline
1110 def get_features_used( # noqa: C901
1111 node: Node, *, future_imports: Optional[Set[str]] = None
1113 """Return a set of (relatively) new Python features used in this file.
1115 Currently looking for:
1117 - self-documenting expressions in f-strings (f"{x=}");
1118 - underscores in numeric literals;
1119 - trailing commas after * or ** in function signatures and calls;
1120 - positional only arguments in function signatures and lambdas;
1121 - assignment expression;
1122 - relaxed decorator syntax;
1123 - usage of __future__ flags (annotations);
1124 - print / exec statements;
1126 features: Set[Feature] = set()
1129 FUTURE_FLAG_TO_FEATURE[future_import]
1130 for future_import in future_imports
1131 if future_import in FUTURE_FLAG_TO_FEATURE
1134 for n in node.pre_order():
1135 if is_string_token(n):
1136 value_head = n.value[:2]
1137 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1138 features.add(Feature.F_STRINGS)
1139 if Feature.DEBUG_F_STRINGS not in features:
1140 for span_beg, span_end in iter_fexpr_spans(n.value):
1141 if n.value[span_beg : span_end - 1].rstrip().endswith("="):
1142 features.add(Feature.DEBUG_F_STRINGS)
1145 elif is_number_token(n):
1147 features.add(Feature.NUMERIC_UNDERSCORES)
1149 elif n.type == token.SLASH:
1150 if n.parent and n.parent.type in {
1155 features.add(Feature.POS_ONLY_ARGUMENTS)
1157 elif n.type == token.COLONEQUAL:
1158 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1160 elif n.type == syms.decorator:
1161 if len(n.children) > 1 and not is_simple_decorator_expression(
1164 features.add(Feature.RELAXED_DECORATORS)
1167 n.type in {syms.typedargslist, syms.arglist}
1169 and n.children[-1].type == token.COMMA
1171 if n.type == syms.typedargslist:
1172 feature = Feature.TRAILING_COMMA_IN_DEF
1174 feature = Feature.TRAILING_COMMA_IN_CALL
1176 for ch in n.children:
1177 if ch.type in STARS:
1178 features.add(feature)
1180 if ch.type == syms.argument:
1181 for argch in ch.children:
1182 if argch.type in STARS:
1183 features.add(feature)
1186 n.type in {syms.return_stmt, syms.yield_expr}
1187 and len(n.children) >= 2
1188 and n.children[1].type == syms.testlist_star_expr
1189 and any(child.type == syms.star_expr for child in n.children[1].children)
1191 features.add(Feature.UNPACKING_ON_FLOW)
1194 n.type == syms.annassign
1195 and len(n.children) >= 4
1196 and n.children[3].type == syms.testlist_star_expr
1198 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1201 n.type == syms.except_clause
1202 and len(n.children) >= 2
1203 and n.children[1].type == token.STAR
1205 features.add(Feature.EXCEPT_STAR)
1207 elif n.type in {syms.subscriptlist, syms.trailer} and any(
1208 child.type == syms.star_expr for child in n.children
1210 features.add(Feature.VARIADIC_GENERICS)
1213 n.type == syms.tname_star
1214 and len(n.children) == 3
1215 and n.children[2].type == syms.star_expr
1217 features.add(Feature.VARIADIC_GENERICS)
1222 def detect_target_versions(
1223 node: Node, *, future_imports: Optional[Set[str]] = None
1224 ) -> Set[TargetVersion]:
1225 """Detect the version to target based on the nodes used."""
1226 features = get_features_used(node, future_imports=future_imports)
1228 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1232 def get_future_imports(node: Node) -> Set[str]:
1233 """Return a set of __future__ imports in the file."""
1234 imports: Set[str] = set()
1236 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1237 for child in children:
1238 if isinstance(child, Leaf):
1239 if child.type == token.NAME:
1242 elif child.type == syms.import_as_name:
1243 orig_name = child.children[0]
1244 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1245 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1246 yield orig_name.value
1248 elif child.type == syms.import_as_names:
1249 yield from get_imports_from_children(child.children)
1252 raise AssertionError("Invalid syntax parsing imports")
1254 for child in node.children:
1255 if child.type != syms.simple_stmt:
1258 first_child = child.children[0]
1259 if isinstance(first_child, Leaf):
1260 # Continue looking if we see a docstring; otherwise stop.
1262 len(child.children) == 2
1263 and first_child.type == token.STRING
1264 and child.children[1].type == token.NEWLINE
1270 elif first_child.type == syms.import_from:
1271 module_name = first_child.children[1]
1272 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1275 imports |= set(get_imports_from_children(first_child.children[3:]))
1282 def assert_equivalent(src: str, dst: str) -> None:
1283 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1285 src_ast = parse_ast(src)
1286 except Exception as exc:
1287 raise AssertionError(
1288 "cannot use --safe with this file; failed to parse source file AST: "
1290 "This could be caused by running Black with an older Python version "
1291 "that does not support new syntax used in your source file."
1295 dst_ast = parse_ast(dst)
1296 except Exception as exc:
1297 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1298 raise AssertionError(
1299 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1300 "Please report a bug on https://github.com/psf/black/issues. "
1301 f"This invalid output might be helpful: {log}"
1304 src_ast_str = "\n".join(stringify_ast(src_ast))
1305 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1306 if src_ast_str != dst_ast_str:
1307 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1308 raise AssertionError(
1309 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1310 " source. Please report a bug on "
1311 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1315 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1316 """Raise AssertionError if `dst` reformats differently the second time."""
1317 # We shouldn't call format_str() here, because that formats the string
1318 # twice and may hide a bug where we bounce back and forth between two
1320 newdst = _format_str_once(dst, mode=mode)
1324 diff(src, dst, "source", "first pass"),
1325 diff(dst, newdst, "first pass", "second pass"),
1327 raise AssertionError(
1328 "INTERNAL ERROR: Black produced different code on the second pass of the"
1329 " formatter. Please report a bug on https://github.com/psf/black/issues."
1330 f" This diff might be helpful: {log}"
1335 def nullcontext() -> Iterator[None]:
1336 """Return an empty context manager.
1338 To be used like `nullcontext` in Python 3.7.
1343 def patch_click() -> None:
1344 """Make Click not crash on Python 3.6 with LANG=C.
1346 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1347 default which restricts paths that it can access during the lifetime of the
1348 application. Click refuses to work in this scenario by raising a RuntimeError.
1350 In case of Black the likelihood that non-ASCII characters are going to be used in
1351 file paths is minimal since it's Python source code. Moreover, this crash was
1352 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1354 modules: List[Any] = []
1356 from click import core
1360 modules.append(core)
1362 # Removed in Click 8.1.0 and newer; we keep this around for users who have
1363 # older versions installed.
1364 from click import _unicodefun # type: ignore
1368 modules.append(_unicodefun)
1370 for module in modules:
1371 if hasattr(module, "_verify_python3_env"):
1372 module._verify_python3_env = lambda: None # type: ignore
1373 if hasattr(module, "_verify_python_env"):
1374 module._verify_python_env = lambda: None # type: ignore
1377 def patched_main() -> None:
1378 # PyInstaller patches multiprocessing to need freeze_support() even in non-Windows
1379 # environments so just assume we always need to call it if frozen.
1380 if getattr(sys, "frozen", False):
1381 from multiprocessing import freeze_support
1389 if __name__ == "__main__":