All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
8 from contextlib import contextmanager
9 from dataclasses import replace
10 from datetime import datetime
12 from json.decoder import JSONDecodeError
13 from pathlib import Path
31 from click.core import ParameterSource
32 from mypy_extensions import mypyc_attr
33 from pathspec import PathSpec
34 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
36 from _black_version import version as __version__
37 from black.cache import Cache, get_cache_info, read_cache, write_cache
38 from black.comments import normalize_fmt_off
39 from black.const import (
45 from black.files import (
48 find_user_pyproject_toml,
51 normalize_path_maybe_ignore,
53 wrap_stream_for_windows,
55 from black.handle_ipynb_magics import (
58 jupyter_dependencies_are_installed,
60 put_trailing_semicolon_back,
61 remove_trailing_semicolon,
64 from black.linegen import LN, LineGenerator, transform_line
65 from black.lines import EmptyLineTracker, LinesBlock
66 from black.mode import (
67 FUTURE_FLAG_TO_FEATURE,
74 from black.nodes import (
77 is_simple_decorator_expression,
81 from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
82 from black.parsing import InvalidInput # noqa F401
83 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
84 from black.report import Changed, NothingChanged, Report
85 from black.trans import iter_fexpr_spans
86 from blib2to3.pgen2 import token
87 from blib2to3.pytree import Leaf, Node
89 COMPILED = Path(__file__).suffix in (".pyd", ".so")
97 class WriteBack(Enum):
105 def from_configuration(
106 cls, *, check: bool, diff: bool, color: bool = False
108 if check and not diff:
112 return cls.COLOR_DIFF
114 return cls.DIFF if diff else cls.YES
117 # Legacy name, left for integrations.
121 def read_pyproject_toml(
122 ctx: click.Context, param: click.Parameter, value: Optional[str]
124 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
126 Returns the path to a successfully found and read configuration file, None
130 value = find_pyproject_toml(ctx.params.get("src", ()))
135 config = parse_pyproject_toml(value)
136 except (OSError, ValueError) as e:
137 raise click.FileError(
138 filename=value, hint=f"Error reading configuration file: {e}"
144 # Sanitize the values to be Click friendly. For more information please see:
145 # https://github.com/psf/black/issues/1458
146 # https://github.com/pallets/click/issues/1567
148 k: str(v) if not isinstance(v, (list, dict)) else v
149 for k, v in config.items()
152 target_version = config.get("target_version")
153 if target_version is not None and not isinstance(target_version, list):
154 raise click.BadOptionUsage(
155 "target-version", "Config key target-version must be a list"
158 default_map: Dict[str, Any] = {}
160 default_map.update(ctx.default_map)
161 default_map.update(config)
163 ctx.default_map = default_map
167 def target_version_option_callback(
168 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
169 ) -> List[TargetVersion]:
170 """Compute the target versions from a --target-version flag.
172 This is its own function because mypy couldn't infer the type correctly
173 when it was a lambda, causing mypyc trouble.
175 return [TargetVersion[val.upper()] for val in v]
178 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
179 """Compile a regular expression string in `regex`.
181 If it contains newlines, use verbose mode.
184 regex = "(?x)" + regex
185 compiled: Pattern[str] = re.compile(regex)
191 param: click.Parameter,
192 value: Optional[str],
193 ) -> Optional[Pattern[str]]:
195 return re_compile_maybe_verbose(value) if value is not None else None
196 except re.error as e:
197 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
201 context_settings={"help_option_names": ["-h", "--help"]},
202 # While Click does set this field automatically using the docstring, mypyc
203 # (annoyingly) strips 'em so we need to set it here too.
204 help="The uncompromising code formatter.",
206 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
211 default=DEFAULT_LINE_LENGTH,
212 help="How many characters per line to allow.",
218 type=click.Choice([v.name.lower() for v in TargetVersion]),
219 callback=target_version_option_callback,
222 "Python versions that should be supported by Black's output. [default: per-file"
230 "Format all input files like typing stubs regardless of file extension (useful"
231 " when piping source on standard input)."
238 "Format all input files like Jupyter Notebooks regardless of file extension "
239 "(useful when piping source on standard input)."
243 "--python-cell-magics",
246 "When processing Jupyter Notebooks, add the given magic to the list"
247 f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
248 " Useful for formatting cells with custom python magics."
254 "--skip-source-first-line",
256 help="Skip the first line of the source code.",
260 "--skip-string-normalization",
262 help="Don't normalize string quotes or prefixes.",
266 "--skip-magic-trailing-comma",
268 help="Don't use trailing commas as a reason to split lines.",
271 "--experimental-string-processing",
274 help="(DEPRECATED and now included in --preview) Normalize string literals.",
280 "Enable potentially disruptive style changes that may be added to Black's main"
281 " functionality in the next major release."
288 "Don't write the files back, just return the status. Return code 0 means"
289 " nothing would change. Return code 1 means some files would be reformatted."
290 " Return code 123 means there was an internal error."
296 help="Don't write the files back, just output a diff for each file on stdout.",
299 "--color/--no-color",
301 help="Show colored diff. Only applies when `--diff` is given.",
306 help="If --fast given, skip temporary sanity checks. [default: --safe]",
309 "--required-version",
312 "Require a specific version of Black to be running (useful for unifying results"
313 " across many environments e.g. with a pyproject.toml file). It can be"
314 " either a major version number or an exact version."
320 default=DEFAULT_INCLUDES,
321 callback=validate_regex,
323 "A regular expression that matches files and directories that should be"
324 " included on recursive searches. An empty value means all files are included"
325 " regardless of the name. Use forward slashes for directories on all platforms"
326 " (Windows, too). Exclusions are calculated first, inclusions later."
333 callback=validate_regex,
335 "A regular expression that matches files and directories that should be"
336 " excluded on recursive searches. An empty value means no paths are excluded."
337 " Use forward slashes for directories on all platforms (Windows, too)."
338 " Exclusions are calculated first, inclusions later. [default:"
339 f" {DEFAULT_EXCLUDES}]"
346 callback=validate_regex,
348 "Like --exclude, but adds additional files and directories on top of the"
349 " excluded ones. (Useful if you simply want to add to the default)"
355 callback=validate_regex,
357 "Like --exclude, but files and directories matching this regex will be "
358 "excluded even when they are passed explicitly as arguments."
365 "The name of the file when passing it through stdin. Useful to make "
366 "sure Black will respect --force-exclude option on some "
367 "editors that rely on using stdin."
373 type=click.IntRange(min=1),
375 help="Number of parallel workers [default: number of CPUs in the system]",
382 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
383 " those with 2>/dev/null."
391 "Also emit messages to stderr about files that were not changed or were ignored"
392 " due to exclusion patterns."
395 @click.version_option(
398 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
399 f"Python ({platform.python_implementation()}) {platform.python_version()}"
406 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
422 callback=read_pyproject_toml,
423 help="Read configuration from FILE path.",
426 def main( # noqa: C901
430 target_version: List[TargetVersion],
437 python_cell_magics: Sequence[str],
438 skip_source_first_line: bool,
439 skip_string_normalization: bool,
440 skip_magic_trailing_comma: bool,
441 experimental_string_processing: bool,
445 required_version: Optional[str],
446 include: Pattern[str],
447 exclude: Optional[Pattern[str]],
448 extend_exclude: Optional[Pattern[str]],
449 force_exclude: Optional[Pattern[str]],
450 stdin_filename: Optional[str],
451 workers: Optional[int],
452 src: Tuple[str, ...],
453 config: Optional[str],
455 """The uncompromising code formatter."""
456 ctx.ensure_object(dict)
458 if src and code is not None:
461 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
464 if not src and code is None:
465 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
469 find_project_root(src, stdin_filename) if code is None else (None, None)
471 ctx.obj["root"] = root
476 f"Identified `{root}` as project root containing a {method}.",
483 else (normalize_path_maybe_ignore(Path(source), root), source)
486 srcs_string = ", ".join(
490 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
491 for _norm, source in normalized
494 out(f"Sources to be formatted: {srcs_string}", fg="blue")
497 config_source = ctx.get_parameter_source("config")
498 user_level_config = str(find_user_pyproject_toml())
499 if config == user_level_config:
502 "Using configuration from user-level config at "
503 f"'{user_level_config}'."
507 elif config_source in (
508 ParameterSource.DEFAULT,
509 ParameterSource.DEFAULT_MAP,
511 out("Using configuration from project root.", fg="blue")
513 out(f"Using configuration in '{config}'.", fg="blue")
515 error_msg = "Oh no! 💥 💔 💥"
518 and required_version != __version__
519 and required_version != __version__.split(".")[0]
522 f"{error_msg} The required version `{required_version}` does not match"
523 f" the running version `{__version__}`!"
527 err("Cannot pass both `pyi` and `ipynb` flags!")
530 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
532 versions = set(target_version)
534 # We'll autodetect later.
537 target_versions=versions,
538 line_length=line_length,
541 skip_source_first_line=skip_source_first_line,
542 string_normalization=not skip_string_normalization,
543 magic_trailing_comma=not skip_magic_trailing_comma,
544 experimental_string_processing=experimental_string_processing,
546 python_cell_magics=set(python_cell_magics),
550 # Run in quiet mode by default with -c; the extra output isn't useful.
551 # You can still pass -v to get verbose output.
554 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
558 content=code, fast=fast, write_back=write_back, mode=mode, report=report
562 sources = get_sources(
569 extend_exclude=extend_exclude,
570 force_exclude=force_exclude,
572 stdin_filename=stdin_filename,
574 except GitWildMatchPatternError:
579 "No Python files are present to be formatted. Nothing to do 😴",
585 if len(sources) == 1:
589 write_back=write_back,
594 from black.concurrency import reformat_many
599 write_back=write_back,
605 if verbose or not quiet:
606 if code is None and (verbose or report.change_count or report.failure_count):
608 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
610 click.echo(str(report), err=True)
611 ctx.exit(report.return_code)
617 src: Tuple[str, ...],
620 include: Pattern[str],
621 exclude: Optional[Pattern[str]],
622 extend_exclude: Optional[Pattern[str]],
623 force_exclude: Optional[Pattern[str]],
625 stdin_filename: Optional[str],
627 """Compute the set of files to be formatted."""
628 sources: Set[Path] = set()
629 root = ctx.obj["root"]
631 exclude_is_None = exclude is None
632 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) if exclude is None else exclude
633 gitignore = None # type: Optional[PathSpec]
634 root_gitignore = get_gitignore(root)
637 if s == "-" and stdin_filename:
638 p = Path(stdin_filename)
644 if is_stdin or p.is_file():
645 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
646 if normalized_path is None:
649 normalized_path = "/" + normalized_path
650 # Hard-exclude any files that matches the `--force-exclude` regex.
652 force_exclude_match = force_exclude.search(normalized_path)
654 force_exclude_match = None
655 if force_exclude_match and force_exclude_match.group(0):
656 report.path_ignored(p, "matches the --force-exclude regular expression")
660 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
662 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
663 verbose=verbose, quiet=quiet
670 p_gitignore = get_gitignore(p)
671 # No need to use p's gitignore if it is identical to root's gitignore
672 # (i.e. root and p point to the same directory).
673 if root_gitignore == p_gitignore:
674 gitignore = root_gitignore
676 gitignore = root_gitignore + p_gitignore
694 err(f"invalid path: {s}")
699 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
702 Exit if there is no `src` provided for formatting
705 if verbose or not quiet:
711 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
714 Reformat and print out `content` without spawning child processes.
715 Similar to `reformat_one`, but for string content.
717 `fast`, `write_back`, and `mode` options are passed to
718 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
720 path = Path("<string>")
723 if format_stdin_to_stdout(
724 content=content, fast=fast, write_back=write_back, mode=mode
726 changed = Changed.YES
727 report.done(path, changed)
728 except Exception as exc:
730 traceback.print_exc()
731 report.failed(path, str(exc))
734 # diff-shades depends on being to monkeypatch this function to operate. I know it's
735 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
736 @mypyc_attr(patchable=True)
738 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
740 """Reformat a single file under `src` without spawning child processes.
742 `fast`, `write_back`, and `mode` options are passed to
743 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
750 elif str(src).startswith(STDIN_PLACEHOLDER):
752 # Use the original name again in case we want to print something
754 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
759 if src.suffix == ".pyi":
760 mode = replace(mode, is_pyi=True)
761 elif src.suffix == ".ipynb":
762 mode = replace(mode, is_ipynb=True)
763 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
764 changed = Changed.YES
767 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
768 cache = read_cache(mode)
769 res_src = src.resolve()
770 res_src_s = str(res_src)
771 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
772 changed = Changed.CACHED
773 if changed is not Changed.CACHED and format_file_in_place(
774 src, fast=fast, write_back=write_back, mode=mode
776 changed = Changed.YES
777 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
778 write_back is WriteBack.CHECK and changed is Changed.NO
780 write_cache(cache, [src], mode)
781 report.done(src, changed)
782 except Exception as exc:
784 traceback.print_exc()
785 report.failed(src, str(exc))
788 def format_file_in_place(
792 write_back: WriteBack = WriteBack.NO,
793 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
795 """Format file under `src` path. Return True if changed.
797 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
799 `mode` and `fast` options are passed to :func:`format_file_contents`.
801 if src.suffix == ".pyi":
802 mode = replace(mode, is_pyi=True)
803 elif src.suffix == ".ipynb":
804 mode = replace(mode, is_ipynb=True)
806 then = datetime.utcfromtimestamp(src.stat().st_mtime)
808 with open(src, "rb") as buf:
809 if mode.skip_source_first_line:
810 header = buf.readline()
811 src_contents, encoding, newline = decode_bytes(buf.read())
813 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
814 except NothingChanged:
816 except JSONDecodeError:
818 f"File '{src}' cannot be parsed as valid Jupyter notebook."
820 src_contents = header.decode(encoding) + src_contents
821 dst_contents = header.decode(encoding) + dst_contents
823 if write_back == WriteBack.YES:
824 with open(src, "w", encoding=encoding, newline=newline) as f:
825 f.write(dst_contents)
826 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
827 now = datetime.utcnow()
828 src_name = f"{src}\t{then} +0000"
829 dst_name = f"{src}\t{now} +0000"
831 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
833 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
835 if write_back == WriteBack.COLOR_DIFF:
836 diff_contents = color_diff(diff_contents)
838 with lock or nullcontext():
839 f = io.TextIOWrapper(
845 f = wrap_stream_for_windows(f)
846 f.write(diff_contents)
852 def format_stdin_to_stdout(
855 content: Optional[str] = None,
856 write_back: WriteBack = WriteBack.NO,
859 """Format file on stdin. Return True if changed.
861 If content is None, it's read from sys.stdin.
863 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
864 write a diff to stdout. The `mode` argument is passed to
865 :func:`format_file_contents`.
867 then = datetime.utcnow()
870 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
872 src, encoding, newline = content, "utf-8", ""
876 dst = format_file_contents(src, fast=fast, mode=mode)
879 except NothingChanged:
883 f = io.TextIOWrapper(
884 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
886 if write_back == WriteBack.YES:
887 # Make sure there's a newline after the content
888 if dst and dst[-1] != "\n":
891 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
892 now = datetime.utcnow()
893 src_name = f"STDIN\t{then} +0000"
894 dst_name = f"STDOUT\t{now} +0000"
895 d = diff(src, dst, src_name, dst_name)
896 if write_back == WriteBack.COLOR_DIFF:
898 f = wrap_stream_for_windows(f)
903 def check_stability_and_equivalence(
904 src_contents: str, dst_contents: str, *, mode: Mode
906 """Perform stability and equivalence checks.
908 Raise AssertionError if source and destination contents are not
909 equivalent, or if a second pass of the formatter would format the
912 assert_equivalent(src_contents, dst_contents)
913 assert_stable(src_contents, dst_contents, mode=mode)
916 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
917 """Reformat contents of a file and return new contents.
919 If `fast` is False, additionally confirm that the reformatted code is
920 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
921 `mode` is passed to :func:`format_str`.
923 if not src_contents.strip():
927 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
929 dst_contents = format_str(src_contents, mode=mode)
930 if src_contents == dst_contents:
933 if not fast and not mode.is_ipynb:
934 # Jupyter notebooks will already have been checked above.
935 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
939 def validate_cell(src: str, mode: Mode) -> None:
940 """Check that cell does not already contain TransformerManager transformations,
941 or non-Python cell magics, which might cause tokenizer_rt to break because of
944 If a cell contains ``!ls``, then it'll be transformed to
945 ``get_ipython().system('ls')``. However, if the cell originally contained
946 ``get_ipython().system('ls')``, then it would get transformed in the same way:
948 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
949 "get_ipython().system('ls')\n"
950 >>> TransformerManager().transform_cell("!ls")
951 "get_ipython().system('ls')\n"
953 Due to the impossibility of safely roundtripping in such situations, cells
954 containing transformed magics will be ignored.
956 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
960 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
965 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
966 """Format code in given cell of Jupyter notebook.
970 - if cell has trailing semicolon, remove it;
971 - if cell has IPython magics, mask them;
973 - reinstate IPython magics;
974 - reinstate trailing semicolon (if originally present);
975 - strip trailing newlines.
977 Cells with syntax errors will not be processed, as they
978 could potentially be automagics or multi-line magics, which
979 are currently not supported.
981 validate_cell(src, mode)
982 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
986 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
988 raise NothingChanged from None
989 masked_dst = format_str(masked_src, mode=mode)
991 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
992 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
993 dst = put_trailing_semicolon_back(
994 dst_without_trailing_semicolon, has_trailing_semicolon
996 dst = dst.rstrip("\n")
998 raise NothingChanged from None
1002 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1003 """If notebook is marked as non-Python, don't format it.
1005 All notebook metadata fields are optional, see
1006 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1007 if a notebook has empty metadata, we will try to parse it anyway.
1009 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1010 if language is not None and language != "python":
1011 raise NothingChanged from None
1014 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1015 """Format Jupyter notebook.
1017 Operate cell-by-cell, only on code cells, only for Python notebooks.
1018 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1020 trailing_newline = src_contents[-1] == "\n"
1022 nb = json.loads(src_contents)
1023 validate_metadata(nb)
1024 for cell in nb["cells"]:
1025 if cell.get("cell_type", None) == "code":
1027 src = "".join(cell["source"])
1028 dst = format_cell(src, fast=fast, mode=mode)
1029 except NothingChanged:
1032 cell["source"] = dst.splitlines(keepends=True)
1035 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1036 if trailing_newline:
1037 dst_contents = dst_contents + "\n"
1040 raise NothingChanged
1043 def format_str(src_contents: str, *, mode: Mode) -> str:
1044 """Reformat a string and return new contents.
1046 `mode` determines formatting options, such as how many characters per line are
1050 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1051 def f(arg: str = "") -> None:
1054 A more complex example:
1057 ... black.format_str(
1058 ... "def f(arg:str='')->None: hey",
1059 ... mode=black.Mode(
1060 ... target_versions={black.TargetVersion.PY36},
1062 ... string_normalization=False,
1073 dst_contents = _format_str_once(src_contents, mode=mode)
1074 # Forced second pass to work around optional trailing commas (becoming
1075 # forced trailing commas on pass 2) interacting differently with optional
1076 # parentheses. Admittedly ugly.
1077 if src_contents != dst_contents:
1078 return _format_str_once(dst_contents, mode=mode)
1082 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1083 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1084 dst_blocks: List[LinesBlock] = []
1085 if mode.target_versions:
1086 versions = mode.target_versions
1088 future_imports = get_future_imports(src_node)
1089 versions = detect_target_versions(src_node, future_imports=future_imports)
1091 normalize_fmt_off(src_node, preview=mode.preview)
1092 lines = LineGenerator(mode=mode)
1093 elt = EmptyLineTracker(mode=mode)
1094 split_line_features = {
1096 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1097 if supports_feature(versions, feature)
1099 block: Optional[LinesBlock] = None
1100 for current_line in lines.visit(src_node):
1101 block = elt.maybe_empty_lines(current_line)
1102 dst_blocks.append(block)
1103 for line in transform_line(
1104 current_line, mode=mode, features=split_line_features
1106 block.content_lines.append(str(line))
1108 dst_blocks[-1].after = 0
1110 for block in dst_blocks:
1111 dst_contents.extend(block.all_lines())
1112 return "".join(dst_contents)
1115 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1116 """Return a tuple of (decoded_contents, encoding, newline).
1118 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1119 universal newlines (i.e. only contains LF).
1121 srcbuf = io.BytesIO(src)
1122 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1124 return "", encoding, "\n"
1126 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1128 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1129 return tiow.read(), encoding, newline
1132 def get_features_used( # noqa: C901
1133 node: Node, *, future_imports: Optional[Set[str]] = None
1135 """Return a set of (relatively) new Python features used in this file.
1137 Currently looking for:
1139 - self-documenting expressions in f-strings (f"{x=}");
1140 - underscores in numeric literals;
1141 - trailing commas after * or ** in function signatures and calls;
1142 - positional only arguments in function signatures and lambdas;
1143 - assignment expression;
1144 - relaxed decorator syntax;
1145 - usage of __future__ flags (annotations);
1146 - print / exec statements;
1148 features: Set[Feature] = set()
1151 FUTURE_FLAG_TO_FEATURE[future_import]
1152 for future_import in future_imports
1153 if future_import in FUTURE_FLAG_TO_FEATURE
1156 for n in node.pre_order():
1157 if is_string_token(n):
1158 value_head = n.value[:2]
1159 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1160 features.add(Feature.F_STRINGS)
1161 if Feature.DEBUG_F_STRINGS not in features:
1162 for span_beg, span_end in iter_fexpr_spans(n.value):
1163 if n.value[span_beg : span_end - 1].rstrip().endswith("="):
1164 features.add(Feature.DEBUG_F_STRINGS)
1167 elif is_number_token(n):
1169 features.add(Feature.NUMERIC_UNDERSCORES)
1171 elif n.type == token.SLASH:
1172 if n.parent and n.parent.type in {
1177 features.add(Feature.POS_ONLY_ARGUMENTS)
1179 elif n.type == token.COLONEQUAL:
1180 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1182 elif n.type == syms.decorator:
1183 if len(n.children) > 1 and not is_simple_decorator_expression(
1186 features.add(Feature.RELAXED_DECORATORS)
1189 n.type in {syms.typedargslist, syms.arglist}
1191 and n.children[-1].type == token.COMMA
1193 if n.type == syms.typedargslist:
1194 feature = Feature.TRAILING_COMMA_IN_DEF
1196 feature = Feature.TRAILING_COMMA_IN_CALL
1198 for ch in n.children:
1199 if ch.type in STARS:
1200 features.add(feature)
1202 if ch.type == syms.argument:
1203 for argch in ch.children:
1204 if argch.type in STARS:
1205 features.add(feature)
1208 n.type in {syms.return_stmt, syms.yield_expr}
1209 and len(n.children) >= 2
1210 and n.children[1].type == syms.testlist_star_expr
1211 and any(child.type == syms.star_expr for child in n.children[1].children)
1213 features.add(Feature.UNPACKING_ON_FLOW)
1216 n.type == syms.annassign
1217 and len(n.children) >= 4
1218 and n.children[3].type == syms.testlist_star_expr
1220 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1223 n.type == syms.except_clause
1224 and len(n.children) >= 2
1225 and n.children[1].type == token.STAR
1227 features.add(Feature.EXCEPT_STAR)
1229 elif n.type in {syms.subscriptlist, syms.trailer} and any(
1230 child.type == syms.star_expr for child in n.children
1232 features.add(Feature.VARIADIC_GENERICS)
1235 n.type == syms.tname_star
1236 and len(n.children) == 3
1237 and n.children[2].type == syms.star_expr
1239 features.add(Feature.VARIADIC_GENERICS)
1244 def detect_target_versions(
1245 node: Node, *, future_imports: Optional[Set[str]] = None
1246 ) -> Set[TargetVersion]:
1247 """Detect the version to target based on the nodes used."""
1248 features = get_features_used(node, future_imports=future_imports)
1250 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1254 def get_future_imports(node: Node) -> Set[str]:
1255 """Return a set of __future__ imports in the file."""
1256 imports: Set[str] = set()
1258 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1259 for child in children:
1260 if isinstance(child, Leaf):
1261 if child.type == token.NAME:
1264 elif child.type == syms.import_as_name:
1265 orig_name = child.children[0]
1266 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1267 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1268 yield orig_name.value
1270 elif child.type == syms.import_as_names:
1271 yield from get_imports_from_children(child.children)
1274 raise AssertionError("Invalid syntax parsing imports")
1276 for child in node.children:
1277 if child.type != syms.simple_stmt:
1280 first_child = child.children[0]
1281 if isinstance(first_child, Leaf):
1282 # Continue looking if we see a docstring; otherwise stop.
1284 len(child.children) == 2
1285 and first_child.type == token.STRING
1286 and child.children[1].type == token.NEWLINE
1292 elif first_child.type == syms.import_from:
1293 module_name = first_child.children[1]
1294 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1297 imports |= set(get_imports_from_children(first_child.children[3:]))
1304 def assert_equivalent(src: str, dst: str) -> None:
1305 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1307 src_ast = parse_ast(src)
1308 except Exception as exc:
1309 raise AssertionError(
1310 "cannot use --safe with this file; failed to parse source file AST: "
1312 "This could be caused by running Black with an older Python version "
1313 "that does not support new syntax used in your source file."
1317 dst_ast = parse_ast(dst)
1318 except Exception as exc:
1319 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1320 raise AssertionError(
1321 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1322 "Please report a bug on https://github.com/psf/black/issues. "
1323 f"This invalid output might be helpful: {log}"
1326 src_ast_str = "\n".join(stringify_ast(src_ast))
1327 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1328 if src_ast_str != dst_ast_str:
1329 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1330 raise AssertionError(
1331 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1332 " source. Please report a bug on "
1333 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1337 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1338 """Raise AssertionError if `dst` reformats differently the second time."""
1339 # We shouldn't call format_str() here, because that formats the string
1340 # twice and may hide a bug where we bounce back and forth between two
1342 newdst = _format_str_once(dst, mode=mode)
1346 diff(src, dst, "source", "first pass"),
1347 diff(dst, newdst, "first pass", "second pass"),
1349 raise AssertionError(
1350 "INTERNAL ERROR: Black produced different code on the second pass of the"
1351 " formatter. Please report a bug on https://github.com/psf/black/issues."
1352 f" This diff might be helpful: {log}"
1357 def nullcontext() -> Iterator[None]:
1358 """Return an empty context manager.
1360 To be used like `nullcontext` in Python 3.7.
1365 def patch_click() -> None:
1366 """Make Click not crash on Python 3.6 with LANG=C.
1368 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1369 default which restricts paths that it can access during the lifetime of the
1370 application. Click refuses to work in this scenario by raising a RuntimeError.
1372 In case of Black the likelihood that non-ASCII characters are going to be used in
1373 file paths is minimal since it's Python source code. Moreover, this crash was
1374 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1376 modules: List[Any] = []
1378 from click import core
1382 modules.append(core)
1384 # Removed in Click 8.1.0 and newer; we keep this around for users who have
1385 # older versions installed.
1386 from click import _unicodefun # type: ignore
1390 modules.append(_unicodefun)
1392 for module in modules:
1393 if hasattr(module, "_verify_python3_env"):
1394 module._verify_python3_env = lambda: None
1395 if hasattr(module, "_verify_python_env"):
1396 module._verify_python_env = lambda: None
1399 def patched_main() -> None:
1400 # PyInstaller patches multiprocessing to need freeze_support() even in non-Windows
1401 # environments so just assume we always need to call it if frozen.
1402 if getattr(sys, "frozen", False):
1403 from multiprocessing import freeze_support
1411 if __name__ == "__main__":