All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
9 from contextlib import contextmanager
10 from dataclasses import replace
11 from datetime import datetime
13 from json.decoder import JSONDecodeError
14 from pathlib import Path
31 if sys.version_info >= (3, 8):
32 from typing import Final
34 from typing_extensions import Final
37 from click.core import ParameterSource
38 from mypy_extensions import mypyc_attr
39 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
41 from _black_version import version as __version__
42 from black.cache import Cache, get_cache_info, read_cache, write_cache
43 from black.comments import normalize_fmt_off
44 from black.const import (
50 from black.files import (
53 find_user_pyproject_toml,
56 normalize_path_maybe_ignore,
58 wrap_stream_for_windows,
60 from black.handle_ipynb_magics import (
63 jupyter_dependencies_are_installed,
65 put_trailing_semicolon_back,
66 remove_trailing_semicolon,
69 from black.linegen import LN, LineGenerator, transform_line
70 from black.lines import EmptyLineTracker, Line
71 from black.mode import (
72 FUTURE_FLAG_TO_FEATURE,
79 from black.nodes import (
82 is_simple_decorator_expression,
86 from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
87 from black.parsing import InvalidInput # noqa F401
88 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
89 from black.report import Changed, NothingChanged, Report
90 from black.trans import iter_fexpr_spans
91 from blib2to3.pgen2 import token
92 from blib2to3.pytree import Leaf, Node
94 COMPILED = Path(__file__).suffix in (".pyd", ".so")
95 DEFAULT_WORKERS: Final = os.cpu_count()
103 class WriteBack(Enum):
111 def from_configuration(
112 cls, *, check: bool, diff: bool, color: bool = False
114 if check and not diff:
118 return cls.COLOR_DIFF
120 return cls.DIFF if diff else cls.YES
123 # Legacy name, left for integrations.
127 def read_pyproject_toml(
128 ctx: click.Context, param: click.Parameter, value: Optional[str]
130 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
132 Returns the path to a successfully found and read configuration file, None
136 value = find_pyproject_toml(ctx.params.get("src", ()))
141 config = parse_pyproject_toml(value)
142 except (OSError, ValueError) as e:
143 raise click.FileError(
144 filename=value, hint=f"Error reading configuration file: {e}"
150 # Sanitize the values to be Click friendly. For more information please see:
151 # https://github.com/psf/black/issues/1458
152 # https://github.com/pallets/click/issues/1567
154 k: str(v) if not isinstance(v, (list, dict)) else v
155 for k, v in config.items()
158 target_version = config.get("target_version")
159 if target_version is not None and not isinstance(target_version, list):
160 raise click.BadOptionUsage(
161 "target-version", "Config key target-version must be a list"
164 default_map: Dict[str, Any] = {}
166 default_map.update(ctx.default_map)
167 default_map.update(config)
169 ctx.default_map = default_map
173 def target_version_option_callback(
174 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
175 ) -> List[TargetVersion]:
176 """Compute the target versions from a --target-version flag.
178 This is its own function because mypy couldn't infer the type correctly
179 when it was a lambda, causing mypyc trouble.
181 return [TargetVersion[val.upper()] for val in v]
184 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
185 """Compile a regular expression string in `regex`.
187 If it contains newlines, use verbose mode.
190 regex = "(?x)" + regex
191 compiled: Pattern[str] = re.compile(regex)
197 param: click.Parameter,
198 value: Optional[str],
199 ) -> Optional[Pattern[str]]:
201 return re_compile_maybe_verbose(value) if value is not None else None
202 except re.error as e:
203 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
207 context_settings={"help_option_names": ["-h", "--help"]},
208 # While Click does set this field automatically using the docstring, mypyc
209 # (annoyingly) strips 'em so we need to set it here too.
210 help="The uncompromising code formatter.",
212 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
217 default=DEFAULT_LINE_LENGTH,
218 help="How many characters per line to allow.",
224 type=click.Choice([v.name.lower() for v in TargetVersion]),
225 callback=target_version_option_callback,
228 "Python versions that should be supported by Black's output. [default: per-file"
236 "Format all input files like typing stubs regardless of file extension (useful"
237 " when piping source on standard input)."
244 "Format all input files like Jupyter Notebooks regardless of file extension "
245 "(useful when piping source on standard input)."
249 "--python-cell-magics",
252 "When processing Jupyter Notebooks, add the given magic to the list"
253 f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
254 " Useful for formatting cells with custom python magics."
260 "--skip-string-normalization",
262 help="Don't normalize string quotes or prefixes.",
266 "--skip-magic-trailing-comma",
268 help="Don't use trailing commas as a reason to split lines.",
271 "--experimental-string-processing",
274 help="(DEPRECATED and now included in --preview) Normalize string literals.",
280 "Enable potentially disruptive style changes that may be added to Black's main"
281 " functionality in the next major release."
288 "Don't write the files back, just return the status. Return code 0 means"
289 " nothing would change. Return code 1 means some files would be reformatted."
290 " Return code 123 means there was an internal error."
296 help="Don't write the files back, just output a diff for each file on stdout.",
299 "--color/--no-color",
301 help="Show colored diff. Only applies when `--diff` is given.",
306 help="If --fast given, skip temporary sanity checks. [default: --safe]",
309 "--required-version",
312 "Require a specific version of Black to be running (useful for unifying results"
313 " across many environments e.g. with a pyproject.toml file). It can be"
314 " either a major version number or an exact version."
320 default=DEFAULT_INCLUDES,
321 callback=validate_regex,
323 "A regular expression that matches files and directories that should be"
324 " included on recursive searches. An empty value means all files are included"
325 " regardless of the name. Use forward slashes for directories on all platforms"
326 " (Windows, too). Exclusions are calculated first, inclusions later."
333 callback=validate_regex,
335 "A regular expression that matches files and directories that should be"
336 " excluded on recursive searches. An empty value means no paths are excluded."
337 " Use forward slashes for directories on all platforms (Windows, too)."
338 " Exclusions are calculated first, inclusions later. [default:"
339 f" {DEFAULT_EXCLUDES}]"
346 callback=validate_regex,
348 "Like --exclude, but adds additional files and directories on top of the"
349 " excluded ones. (Useful if you simply want to add to the default)"
355 callback=validate_regex,
357 "Like --exclude, but files and directories matching this regex will be "
358 "excluded even when they are passed explicitly as arguments."
365 "The name of the file when passing it through stdin. Useful to make "
366 "sure Black will respect --force-exclude option on some "
367 "editors that rely on using stdin."
373 type=click.IntRange(min=1),
374 default=DEFAULT_WORKERS,
376 help="Number of parallel workers",
383 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
384 " those with 2>/dev/null."
392 "Also emit messages to stderr about files that were not changed or were ignored"
393 " due to exclusion patterns."
396 @click.version_option(
399 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
400 f"Python ({platform.python_implementation()}) {platform.python_version()}"
407 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
423 callback=read_pyproject_toml,
424 help="Read configuration from FILE path.",
427 def main( # noqa: C901
431 target_version: List[TargetVersion],
438 python_cell_magics: Sequence[str],
439 skip_string_normalization: bool,
440 skip_magic_trailing_comma: bool,
441 experimental_string_processing: bool,
445 required_version: Optional[str],
446 include: Pattern[str],
447 exclude: Optional[Pattern[str]],
448 extend_exclude: Optional[Pattern[str]],
449 force_exclude: Optional[Pattern[str]],
450 stdin_filename: Optional[str],
452 src: Tuple[str, ...],
453 config: Optional[str],
455 """The uncompromising code formatter."""
456 ctx.ensure_object(dict)
458 if src and code is not None:
461 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
464 if not src and code is None:
465 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
469 find_project_root(src, stdin_filename) if code is None else (None, None)
471 ctx.obj["root"] = root
476 f"Identified `{root}` as project root containing a {method}.",
483 else (normalize_path_maybe_ignore(Path(source), root), source)
486 srcs_string = ", ".join(
490 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
491 for _norm, source in normalized
494 out(f"Sources to be formatted: {srcs_string}", fg="blue")
497 config_source = ctx.get_parameter_source("config")
498 user_level_config = str(find_user_pyproject_toml())
499 if config == user_level_config:
501 "Using configuration from user-level config at "
502 f"'{user_level_config}'.",
505 elif config_source in (
506 ParameterSource.DEFAULT,
507 ParameterSource.DEFAULT_MAP,
509 out("Using configuration from project root.", fg="blue")
511 out(f"Using configuration in '{config}'.", fg="blue")
513 error_msg = "Oh no! 💥 💔 💥"
516 and required_version != __version__
517 and required_version != __version__.split(".")[0]
520 f"{error_msg} The required version `{required_version}` does not match"
521 f" the running version `{__version__}`!"
525 err("Cannot pass both `pyi` and `ipynb` flags!")
528 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
530 versions = set(target_version)
532 # We'll autodetect later.
535 target_versions=versions,
536 line_length=line_length,
539 string_normalization=not skip_string_normalization,
540 magic_trailing_comma=not skip_magic_trailing_comma,
541 experimental_string_processing=experimental_string_processing,
543 python_cell_magics=set(python_cell_magics),
547 # Run in quiet mode by default with -c; the extra output isn't useful.
548 # You can still pass -v to get verbose output.
551 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
555 content=code, fast=fast, write_back=write_back, mode=mode, report=report
559 sources = get_sources(
566 extend_exclude=extend_exclude,
567 force_exclude=force_exclude,
569 stdin_filename=stdin_filename,
571 except GitWildMatchPatternError:
576 "No Python files are present to be formatted. Nothing to do 😴",
582 if len(sources) == 1:
586 write_back=write_back,
591 from black.concurrency import reformat_many
596 write_back=write_back,
602 if verbose or not quiet:
603 if code is None and (verbose or report.change_count or report.failure_count):
605 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
607 click.echo(str(report), err=True)
608 ctx.exit(report.return_code)
614 src: Tuple[str, ...],
617 include: Pattern[str],
618 exclude: Optional[Pattern[str]],
619 extend_exclude: Optional[Pattern[str]],
620 force_exclude: Optional[Pattern[str]],
622 stdin_filename: Optional[str],
624 """Compute the set of files to be formatted."""
625 sources: Set[Path] = set()
626 root = ctx.obj["root"]
629 if s == "-" and stdin_filename:
630 p = Path(stdin_filename)
636 if is_stdin or p.is_file():
637 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
638 if normalized_path is None:
641 normalized_path = "/" + normalized_path
642 # Hard-exclude any files that matches the `--force-exclude` regex.
644 force_exclude_match = force_exclude.search(normalized_path)
646 force_exclude_match = None
647 if force_exclude_match and force_exclude_match.group(0):
648 report.path_ignored(p, "matches the --force-exclude regular expression")
652 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
654 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
655 verbose=verbose, quiet=quiet
662 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
663 gitignore = get_gitignore(root)
683 err(f"invalid path: {s}")
688 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
691 Exit if there is no `src` provided for formatting
694 if verbose or not quiet:
700 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
703 Reformat and print out `content` without spawning child processes.
704 Similar to `reformat_one`, but for string content.
706 `fast`, `write_back`, and `mode` options are passed to
707 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
709 path = Path("<string>")
712 if format_stdin_to_stdout(
713 content=content, fast=fast, write_back=write_back, mode=mode
715 changed = Changed.YES
716 report.done(path, changed)
717 except Exception as exc:
719 traceback.print_exc()
720 report.failed(path, str(exc))
723 # diff-shades depends on being to monkeypatch this function to operate. I know it's
724 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
725 @mypyc_attr(patchable=True)
727 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
729 """Reformat a single file under `src` without spawning child processes.
731 `fast`, `write_back`, and `mode` options are passed to
732 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
739 elif str(src).startswith(STDIN_PLACEHOLDER):
741 # Use the original name again in case we want to print something
743 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
748 if src.suffix == ".pyi":
749 mode = replace(mode, is_pyi=True)
750 elif src.suffix == ".ipynb":
751 mode = replace(mode, is_ipynb=True)
752 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
753 changed = Changed.YES
756 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
757 cache = read_cache(mode)
758 res_src = src.resolve()
759 res_src_s = str(res_src)
760 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
761 changed = Changed.CACHED
762 if changed is not Changed.CACHED and format_file_in_place(
763 src, fast=fast, write_back=write_back, mode=mode
765 changed = Changed.YES
766 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
767 write_back is WriteBack.CHECK and changed is Changed.NO
769 write_cache(cache, [src], mode)
770 report.done(src, changed)
771 except Exception as exc:
773 traceback.print_exc()
774 report.failed(src, str(exc))
777 def format_file_in_place(
781 write_back: WriteBack = WriteBack.NO,
782 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
784 """Format file under `src` path. Return True if changed.
786 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
788 `mode` and `fast` options are passed to :func:`format_file_contents`.
790 if src.suffix == ".pyi":
791 mode = replace(mode, is_pyi=True)
792 elif src.suffix == ".ipynb":
793 mode = replace(mode, is_ipynb=True)
795 then = datetime.utcfromtimestamp(src.stat().st_mtime)
796 with open(src, "rb") as buf:
797 src_contents, encoding, newline = decode_bytes(buf.read())
799 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
800 except NothingChanged:
802 except JSONDecodeError:
804 f"File '{src}' cannot be parsed as valid Jupyter notebook."
807 if write_back == WriteBack.YES:
808 with open(src, "w", encoding=encoding, newline=newline) as f:
809 f.write(dst_contents)
810 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
811 now = datetime.utcnow()
812 src_name = f"{src}\t{then} +0000"
813 dst_name = f"{src}\t{now} +0000"
815 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
817 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
819 if write_back == WriteBack.COLOR_DIFF:
820 diff_contents = color_diff(diff_contents)
822 with lock or nullcontext():
823 f = io.TextIOWrapper(
829 f = wrap_stream_for_windows(f)
830 f.write(diff_contents)
836 def format_stdin_to_stdout(
839 content: Optional[str] = None,
840 write_back: WriteBack = WriteBack.NO,
843 """Format file on stdin. Return True if changed.
845 If content is None, it's read from sys.stdin.
847 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
848 write a diff to stdout. The `mode` argument is passed to
849 :func:`format_file_contents`.
851 then = datetime.utcnow()
854 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
856 src, encoding, newline = content, "utf-8", ""
860 dst = format_file_contents(src, fast=fast, mode=mode)
863 except NothingChanged:
867 f = io.TextIOWrapper(
868 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
870 if write_back == WriteBack.YES:
871 # Make sure there's a newline after the content
872 if dst and dst[-1] != "\n":
875 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
876 now = datetime.utcnow()
877 src_name = f"STDIN\t{then} +0000"
878 dst_name = f"STDOUT\t{now} +0000"
879 d = diff(src, dst, src_name, dst_name)
880 if write_back == WriteBack.COLOR_DIFF:
882 f = wrap_stream_for_windows(f)
887 def check_stability_and_equivalence(
888 src_contents: str, dst_contents: str, *, mode: Mode
890 """Perform stability and equivalence checks.
892 Raise AssertionError if source and destination contents are not
893 equivalent, or if a second pass of the formatter would format the
896 assert_equivalent(src_contents, dst_contents)
897 assert_stable(src_contents, dst_contents, mode=mode)
900 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
901 """Reformat contents of a file and return new contents.
903 If `fast` is False, additionally confirm that the reformatted code is
904 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
905 `mode` is passed to :func:`format_str`.
907 if not src_contents.strip():
911 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
913 dst_contents = format_str(src_contents, mode=mode)
914 if src_contents == dst_contents:
917 if not fast and not mode.is_ipynb:
918 # Jupyter notebooks will already have been checked above.
919 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
923 def validate_cell(src: str, mode: Mode) -> None:
924 """Check that cell does not already contain TransformerManager transformations,
925 or non-Python cell magics, which might cause tokenizer_rt to break because of
928 If a cell contains ``!ls``, then it'll be transformed to
929 ``get_ipython().system('ls')``. However, if the cell originally contained
930 ``get_ipython().system('ls')``, then it would get transformed in the same way:
932 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
933 "get_ipython().system('ls')\n"
934 >>> TransformerManager().transform_cell("!ls")
935 "get_ipython().system('ls')\n"
937 Due to the impossibility of safely roundtripping in such situations, cells
938 containing transformed magics will be ignored.
940 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
944 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
949 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
950 """Format code in given cell of Jupyter notebook.
954 - if cell has trailing semicolon, remove it;
955 - if cell has IPython magics, mask them;
957 - reinstate IPython magics;
958 - reinstate trailing semicolon (if originally present);
959 - strip trailing newlines.
961 Cells with syntax errors will not be processed, as they
962 could potentially be automagics or multi-line magics, which
963 are currently not supported.
965 validate_cell(src, mode)
966 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
970 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
972 raise NothingChanged from None
973 masked_dst = format_str(masked_src, mode=mode)
975 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
976 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
977 dst = put_trailing_semicolon_back(
978 dst_without_trailing_semicolon, has_trailing_semicolon
980 dst = dst.rstrip("\n")
982 raise NothingChanged from None
986 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
987 """If notebook is marked as non-Python, don't format it.
989 All notebook metadata fields are optional, see
990 https://nbformat.readthedocs.io/en/latest/format_description.html. So
991 if a notebook has empty metadata, we will try to parse it anyway.
993 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
994 if language is not None and language != "python":
995 raise NothingChanged from None
998 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
999 """Format Jupyter notebook.
1001 Operate cell-by-cell, only on code cells, only for Python notebooks.
1002 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1004 trailing_newline = src_contents[-1] == "\n"
1006 nb = json.loads(src_contents)
1007 validate_metadata(nb)
1008 for cell in nb["cells"]:
1009 if cell.get("cell_type", None) == "code":
1011 src = "".join(cell["source"])
1012 dst = format_cell(src, fast=fast, mode=mode)
1013 except NothingChanged:
1016 cell["source"] = dst.splitlines(keepends=True)
1019 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1020 if trailing_newline:
1021 dst_contents = dst_contents + "\n"
1024 raise NothingChanged
1027 def format_str(src_contents: str, *, mode: Mode) -> str:
1028 """Reformat a string and return new contents.
1030 `mode` determines formatting options, such as how many characters per line are
1034 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1035 def f(arg: str = "") -> None:
1038 A more complex example:
1041 ... black.format_str(
1042 ... "def f(arg:str='')->None: hey",
1043 ... mode=black.Mode(
1044 ... target_versions={black.TargetVersion.PY36},
1046 ... string_normalization=False,
1057 dst_contents = _format_str_once(src_contents, mode=mode)
1058 # Forced second pass to work around optional trailing commas (becoming
1059 # forced trailing commas on pass 2) interacting differently with optional
1060 # parentheses. Admittedly ugly.
1061 if src_contents != dst_contents:
1062 return _format_str_once(dst_contents, mode=mode)
1066 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1067 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1069 if mode.target_versions:
1070 versions = mode.target_versions
1072 future_imports = get_future_imports(src_node)
1073 versions = detect_target_versions(src_node, future_imports=future_imports)
1075 normalize_fmt_off(src_node, preview=mode.preview)
1076 lines = LineGenerator(mode=mode)
1077 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1078 empty_line = Line(mode=mode)
1080 split_line_features = {
1082 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1083 if supports_feature(versions, feature)
1085 for current_line in lines.visit(src_node):
1086 dst_contents.append(str(empty_line) * after)
1087 before, after = elt.maybe_empty_lines(current_line)
1088 dst_contents.append(str(empty_line) * before)
1089 for line in transform_line(
1090 current_line, mode=mode, features=split_line_features
1092 dst_contents.append(str(line))
1093 return "".join(dst_contents)
1096 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1097 """Return a tuple of (decoded_contents, encoding, newline).
1099 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1100 universal newlines (i.e. only contains LF).
1102 srcbuf = io.BytesIO(src)
1103 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1105 return "", encoding, "\n"
1107 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1109 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1110 return tiow.read(), encoding, newline
1113 def get_features_used( # noqa: C901
1114 node: Node, *, future_imports: Optional[Set[str]] = None
1116 """Return a set of (relatively) new Python features used in this file.
1118 Currently looking for:
1120 - self-documenting expressions in f-strings (f"{x=}");
1121 - underscores in numeric literals;
1122 - trailing commas after * or ** in function signatures and calls;
1123 - positional only arguments in function signatures and lambdas;
1124 - assignment expression;
1125 - relaxed decorator syntax;
1126 - usage of __future__ flags (annotations);
1127 - print / exec statements;
1129 features: Set[Feature] = set()
1132 FUTURE_FLAG_TO_FEATURE[future_import]
1133 for future_import in future_imports
1134 if future_import in FUTURE_FLAG_TO_FEATURE
1137 for n in node.pre_order():
1138 if is_string_token(n):
1139 value_head = n.value[:2]
1140 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1141 features.add(Feature.F_STRINGS)
1142 if Feature.DEBUG_F_STRINGS not in features:
1143 for span_beg, span_end in iter_fexpr_spans(n.value):
1144 if n.value[span_beg : span_end - 1].rstrip().endswith("="):
1145 features.add(Feature.DEBUG_F_STRINGS)
1148 elif is_number_token(n):
1150 features.add(Feature.NUMERIC_UNDERSCORES)
1152 elif n.type == token.SLASH:
1153 if n.parent and n.parent.type in {
1158 features.add(Feature.POS_ONLY_ARGUMENTS)
1160 elif n.type == token.COLONEQUAL:
1161 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1163 elif n.type == syms.decorator:
1164 if len(n.children) > 1 and not is_simple_decorator_expression(
1167 features.add(Feature.RELAXED_DECORATORS)
1170 n.type in {syms.typedargslist, syms.arglist}
1172 and n.children[-1].type == token.COMMA
1174 if n.type == syms.typedargslist:
1175 feature = Feature.TRAILING_COMMA_IN_DEF
1177 feature = Feature.TRAILING_COMMA_IN_CALL
1179 for ch in n.children:
1180 if ch.type in STARS:
1181 features.add(feature)
1183 if ch.type == syms.argument:
1184 for argch in ch.children:
1185 if argch.type in STARS:
1186 features.add(feature)
1189 n.type in {syms.return_stmt, syms.yield_expr}
1190 and len(n.children) >= 2
1191 and n.children[1].type == syms.testlist_star_expr
1192 and any(child.type == syms.star_expr for child in n.children[1].children)
1194 features.add(Feature.UNPACKING_ON_FLOW)
1197 n.type == syms.annassign
1198 and len(n.children) >= 4
1199 and n.children[3].type == syms.testlist_star_expr
1201 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1204 n.type == syms.except_clause
1205 and len(n.children) >= 2
1206 and n.children[1].type == token.STAR
1208 features.add(Feature.EXCEPT_STAR)
1210 elif n.type in {syms.subscriptlist, syms.trailer} and any(
1211 child.type == syms.star_expr for child in n.children
1213 features.add(Feature.VARIADIC_GENERICS)
1216 n.type == syms.tname_star
1217 and len(n.children) == 3
1218 and n.children[2].type == syms.star_expr
1220 features.add(Feature.VARIADIC_GENERICS)
1225 def detect_target_versions(
1226 node: Node, *, future_imports: Optional[Set[str]] = None
1227 ) -> Set[TargetVersion]:
1228 """Detect the version to target based on the nodes used."""
1229 features = get_features_used(node, future_imports=future_imports)
1231 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1235 def get_future_imports(node: Node) -> Set[str]:
1236 """Return a set of __future__ imports in the file."""
1237 imports: Set[str] = set()
1239 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1240 for child in children:
1241 if isinstance(child, Leaf):
1242 if child.type == token.NAME:
1245 elif child.type == syms.import_as_name:
1246 orig_name = child.children[0]
1247 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1248 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1249 yield orig_name.value
1251 elif child.type == syms.import_as_names:
1252 yield from get_imports_from_children(child.children)
1255 raise AssertionError("Invalid syntax parsing imports")
1257 for child in node.children:
1258 if child.type != syms.simple_stmt:
1261 first_child = child.children[0]
1262 if isinstance(first_child, Leaf):
1263 # Continue looking if we see a docstring; otherwise stop.
1265 len(child.children) == 2
1266 and first_child.type == token.STRING
1267 and child.children[1].type == token.NEWLINE
1273 elif first_child.type == syms.import_from:
1274 module_name = first_child.children[1]
1275 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1278 imports |= set(get_imports_from_children(first_child.children[3:]))
1285 def assert_equivalent(src: str, dst: str) -> None:
1286 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1288 src_ast = parse_ast(src)
1289 except Exception as exc:
1290 raise AssertionError(
1291 "cannot use --safe with this file; failed to parse source file AST: "
1293 "This could be caused by running Black with an older Python version "
1294 "that does not support new syntax used in your source file."
1298 dst_ast = parse_ast(dst)
1299 except Exception as exc:
1300 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1301 raise AssertionError(
1302 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1303 "Please report a bug on https://github.com/psf/black/issues. "
1304 f"This invalid output might be helpful: {log}"
1307 src_ast_str = "\n".join(stringify_ast(src_ast))
1308 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1309 if src_ast_str != dst_ast_str:
1310 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1311 raise AssertionError(
1312 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1313 " source. Please report a bug on "
1314 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1318 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1319 """Raise AssertionError if `dst` reformats differently the second time."""
1320 # We shouldn't call format_str() here, because that formats the string
1321 # twice and may hide a bug where we bounce back and forth between two
1323 newdst = _format_str_once(dst, mode=mode)
1327 diff(src, dst, "source", "first pass"),
1328 diff(dst, newdst, "first pass", "second pass"),
1330 raise AssertionError(
1331 "INTERNAL ERROR: Black produced different code on the second pass of the"
1332 " formatter. Please report a bug on https://github.com/psf/black/issues."
1333 f" This diff might be helpful: {log}"
1338 def nullcontext() -> Iterator[None]:
1339 """Return an empty context manager.
1341 To be used like `nullcontext` in Python 3.7.
1346 def patch_click() -> None:
1347 """Make Click not crash on Python 3.6 with LANG=C.
1349 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1350 default which restricts paths that it can access during the lifetime of the
1351 application. Click refuses to work in this scenario by raising a RuntimeError.
1353 In case of Black the likelihood that non-ASCII characters are going to be used in
1354 file paths is minimal since it's Python source code. Moreover, this crash was
1355 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1357 modules: List[Any] = []
1359 from click import core
1363 modules.append(core)
1365 # Removed in Click 8.1.0 and newer; we keep this around for users who have
1366 # older versions installed.
1367 from click import _unicodefun # type: ignore
1371 modules.append(_unicodefun)
1373 for module in modules:
1374 if hasattr(module, "_verify_python3_env"):
1375 module._verify_python3_env = lambda: None # type: ignore
1376 if hasattr(module, "_verify_python_env"):
1377 module._verify_python_env = lambda: None # type: ignore
1380 def patched_main() -> None:
1381 if sys.platform == "win32" and getattr(sys, "frozen", False):
1382 from multiprocessing import freeze_support
1390 if __name__ == "__main__":