All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
8 from contextlib import contextmanager
9 from dataclasses import replace
10 from datetime import datetime, timezone
12 from json.decoder import JSONDecodeError
13 from pathlib import Path
31 from click.core import ParameterSource
32 from mypy_extensions import mypyc_attr
33 from pathspec import PathSpec
34 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
36 from _black_version import version as __version__
37 from black.cache import Cache, get_cache_info, read_cache, write_cache
38 from black.comments import normalize_fmt_off
39 from black.const import (
45 from black.files import (
48 find_user_pyproject_toml,
51 normalize_path_maybe_ignore,
53 wrap_stream_for_windows,
55 from black.handle_ipynb_magics import (
58 jupyter_dependencies_are_installed,
60 put_trailing_semicolon_back,
61 remove_trailing_semicolon,
64 from black.linegen import LN, LineGenerator, transform_line
65 from black.lines import EmptyLineTracker, LinesBlock
66 from black.mode import (
67 FUTURE_FLAG_TO_FEATURE,
74 from black.nodes import (
77 is_simple_decorator_expression,
81 from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
82 from black.parsing import InvalidInput # noqa F401
83 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
84 from black.report import Changed, NothingChanged, Report
85 from black.trans import iter_fexpr_spans
86 from blib2to3.pgen2 import token
87 from blib2to3.pytree import Leaf, Node
89 COMPILED = Path(__file__).suffix in (".pyd", ".so")
97 class WriteBack(Enum):
105 def from_configuration(
106 cls, *, check: bool, diff: bool, color: bool = False
108 if check and not diff:
112 return cls.COLOR_DIFF
114 return cls.DIFF if diff else cls.YES
117 # Legacy name, left for integrations.
121 def read_pyproject_toml(
122 ctx: click.Context, param: click.Parameter, value: Optional[str]
124 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
126 Returns the path to a successfully found and read configuration file, None
130 value = find_pyproject_toml(
131 ctx.params.get("src", ()), ctx.params.get("stdin_filename", None)
137 config = parse_pyproject_toml(value)
138 except (OSError, ValueError) as e:
139 raise click.FileError(
140 filename=value, hint=f"Error reading configuration file: {e}"
146 # Sanitize the values to be Click friendly. For more information please see:
147 # https://github.com/psf/black/issues/1458
148 # https://github.com/pallets/click/issues/1567
150 k: str(v) if not isinstance(v, (list, dict)) else v
151 for k, v in config.items()
154 target_version = config.get("target_version")
155 if target_version is not None and not isinstance(target_version, list):
156 raise click.BadOptionUsage(
157 "target-version", "Config key target-version must be a list"
160 default_map: Dict[str, Any] = {}
162 default_map.update(ctx.default_map)
163 default_map.update(config)
165 ctx.default_map = default_map
169 def target_version_option_callback(
170 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
171 ) -> List[TargetVersion]:
172 """Compute the target versions from a --target-version flag.
174 This is its own function because mypy couldn't infer the type correctly
175 when it was a lambda, causing mypyc trouble.
177 return [TargetVersion[val.upper()] for val in v]
180 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
181 """Compile a regular expression string in `regex`.
183 If it contains newlines, use verbose mode.
186 regex = "(?x)" + regex
187 compiled: Pattern[str] = re.compile(regex)
193 param: click.Parameter,
194 value: Optional[str],
195 ) -> Optional[Pattern[str]]:
197 return re_compile_maybe_verbose(value) if value is not None else None
198 except re.error as e:
199 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
203 context_settings={"help_option_names": ["-h", "--help"]},
204 # While Click does set this field automatically using the docstring, mypyc
205 # (annoyingly) strips 'em so we need to set it here too.
206 help="The uncompromising code formatter.",
208 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
213 default=DEFAULT_LINE_LENGTH,
214 help="How many characters per line to allow.",
220 type=click.Choice([v.name.lower() for v in TargetVersion]),
221 callback=target_version_option_callback,
224 "Python versions that should be supported by Black's output. By default, Black"
225 " will try to infer this from the project metadata in pyproject.toml. If this"
226 " does not yield conclusive results, Black will use per-file auto-detection."
233 "Format all input files like typing stubs regardless of file extension (useful"
234 " when piping source on standard input)."
241 "Format all input files like Jupyter Notebooks regardless of file extension "
242 "(useful when piping source on standard input)."
246 "--python-cell-magics",
249 "When processing Jupyter Notebooks, add the given magic to the list"
250 f" of known python-magics ({', '.join(sorted(PYTHON_CELL_MAGICS))})."
251 " Useful for formatting cells with custom python magics."
257 "--skip-source-first-line",
259 help="Skip the first line of the source code.",
263 "--skip-string-normalization",
265 help="Don't normalize string quotes or prefixes.",
269 "--skip-magic-trailing-comma",
271 help="Don't use trailing commas as a reason to split lines.",
274 "--experimental-string-processing",
277 help="(DEPRECATED and now included in --preview) Normalize string literals.",
283 "Enable potentially disruptive style changes that may be added to Black's main"
284 " functionality in the next major release."
291 "Don't write the files back, just return the status. Return code 0 means"
292 " nothing would change. Return code 1 means some files would be reformatted."
293 " Return code 123 means there was an internal error."
299 help="Don't write the files back, just output a diff for each file on stdout.",
302 "--color/--no-color",
304 help="Show colored diff. Only applies when `--diff` is given.",
309 help="If --fast given, skip temporary sanity checks. [default: --safe]",
312 "--required-version",
315 "Require a specific version of Black to be running (useful for unifying results"
316 " across many environments e.g. with a pyproject.toml file). It can be"
317 " either a major version number or an exact version."
323 default=DEFAULT_INCLUDES,
324 callback=validate_regex,
326 "A regular expression that matches files and directories that should be"
327 " included on recursive searches. An empty value means all files are included"
328 " regardless of the name. Use forward slashes for directories on all platforms"
329 " (Windows, too). Exclusions are calculated first, inclusions later."
336 callback=validate_regex,
338 "A regular expression that matches files and directories that should be"
339 " excluded on recursive searches. An empty value means no paths are excluded."
340 " Use forward slashes for directories on all platforms (Windows, too)."
341 " Exclusions are calculated first, inclusions later. [default:"
342 f" {DEFAULT_EXCLUDES}]"
349 callback=validate_regex,
351 "Like --exclude, but adds additional files and directories on top of the"
352 " excluded ones. (Useful if you simply want to add to the default)"
358 callback=validate_regex,
360 "Like --exclude, but files and directories matching this regex will be "
361 "excluded even when they are passed explicitly as arguments."
369 "The name of the file when passing it through stdin. Useful to make "
370 "sure Black will respect --force-exclude option on some "
371 "editors that rely on using stdin."
377 type=click.IntRange(min=1),
380 "Number of parallel workers [default: BLACK_NUM_WORKERS environment variable "
381 "or number of CPUs in the system]"
389 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
390 " those with 2>/dev/null."
398 "Also emit messages to stderr about files that were not changed or were ignored"
399 " due to exclusion patterns."
402 @click.version_option(
405 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
406 f"Python ({platform.python_implementation()}) {platform.python_version()}"
413 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
429 callback=read_pyproject_toml,
430 help="Read configuration from FILE path.",
433 def main( # noqa: C901
437 target_version: List[TargetVersion],
444 python_cell_magics: Sequence[str],
445 skip_source_first_line: bool,
446 skip_string_normalization: bool,
447 skip_magic_trailing_comma: bool,
448 experimental_string_processing: bool,
452 required_version: Optional[str],
453 include: Pattern[str],
454 exclude: Optional[Pattern[str]],
455 extend_exclude: Optional[Pattern[str]],
456 force_exclude: Optional[Pattern[str]],
457 stdin_filename: Optional[str],
458 workers: Optional[int],
459 src: Tuple[str, ...],
460 config: Optional[str],
462 """The uncompromising code formatter."""
463 ctx.ensure_object(dict)
465 if src and code is not None:
468 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
471 if not src and code is None:
472 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
476 find_project_root(src, stdin_filename) if code is None else (None, None)
478 ctx.obj["root"] = root
483 f"Identified `{root}` as project root containing a {method}.",
491 else (normalize_path_maybe_ignore(Path(source), root), source)
495 srcs_string = ", ".join(
500 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
502 for _norm, source in normalized
505 out(f"Sources to be formatted: {srcs_string}", fg="blue")
508 config_source = ctx.get_parameter_source("config")
509 user_level_config = str(find_user_pyproject_toml())
510 if config == user_level_config:
512 "Using configuration from user-level config at "
513 f"'{user_level_config}'.",
516 elif config_source in (
517 ParameterSource.DEFAULT,
518 ParameterSource.DEFAULT_MAP,
520 out("Using configuration from project root.", fg="blue")
522 out(f"Using configuration in '{config}'.", fg="blue")
524 for param, value in ctx.default_map.items():
525 out(f"{param}: {value}")
527 error_msg = "Oh no! 💥 💔 💥"
530 and required_version != __version__
531 and required_version != __version__.split(".")[0]
534 f"{error_msg} The required version `{required_version}` does not match"
535 f" the running version `{__version__}`!"
539 err("Cannot pass both `pyi` and `ipynb` flags!")
542 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
544 versions = set(target_version)
546 # We'll autodetect later.
549 target_versions=versions,
550 line_length=line_length,
553 skip_source_first_line=skip_source_first_line,
554 string_normalization=not skip_string_normalization,
555 magic_trailing_comma=not skip_magic_trailing_comma,
556 experimental_string_processing=experimental_string_processing,
558 python_cell_magics=set(python_cell_magics),
562 # Run in quiet mode by default with -c; the extra output isn't useful.
563 # You can still pass -v to get verbose output.
566 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
570 content=code, fast=fast, write_back=write_back, mode=mode, report=report
574 sources = get_sources(
581 extend_exclude=extend_exclude,
582 force_exclude=force_exclude,
584 stdin_filename=stdin_filename,
586 except GitWildMatchPatternError:
591 "No Python files are present to be formatted. Nothing to do 😴",
597 if len(sources) == 1:
601 write_back=write_back,
606 from black.concurrency import reformat_many
611 write_back=write_back,
617 if verbose or not quiet:
618 if code is None and (verbose or report.change_count or report.failure_count):
620 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
622 click.echo(str(report), err=True)
623 ctx.exit(report.return_code)
629 src: Tuple[str, ...],
632 include: Pattern[str],
633 exclude: Optional[Pattern[str]],
634 extend_exclude: Optional[Pattern[str]],
635 force_exclude: Optional[Pattern[str]],
637 stdin_filename: Optional[str],
639 """Compute the set of files to be formatted."""
640 sources: Set[Path] = set()
641 root = ctx.obj["root"]
643 using_default_exclude = exclude is None
644 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) if exclude is None else exclude
645 gitignore: Optional[Dict[Path, PathSpec]] = None
646 root_gitignore = get_gitignore(root)
649 if s == "-" and stdin_filename:
650 p = Path(stdin_filename)
656 if is_stdin or p.is_file():
657 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
658 if normalized_path is None:
661 normalized_path = "/" + normalized_path
662 # Hard-exclude any files that matches the `--force-exclude` regex.
664 force_exclude_match = force_exclude.search(normalized_path)
666 force_exclude_match = None
667 if force_exclude_match and force_exclude_match.group(0):
668 report.path_ignored(p, "matches the --force-exclude regular expression")
672 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
674 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
675 verbose=verbose, quiet=quiet
681 p = root / normalize_path_maybe_ignore(p, ctx.obj["root"], report)
682 if using_default_exclude:
684 root: root_gitignore,
704 err(f"invalid path: {s}")
709 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
712 Exit if there is no `src` provided for formatting
715 if verbose or not quiet:
721 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
724 Reformat and print out `content` without spawning child processes.
725 Similar to `reformat_one`, but for string content.
727 `fast`, `write_back`, and `mode` options are passed to
728 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
730 path = Path("<string>")
733 if format_stdin_to_stdout(
734 content=content, fast=fast, write_back=write_back, mode=mode
736 changed = Changed.YES
737 report.done(path, changed)
738 except Exception as exc:
740 traceback.print_exc()
741 report.failed(path, str(exc))
744 # diff-shades depends on being to monkeypatch this function to operate. I know it's
745 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
746 @mypyc_attr(patchable=True)
748 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
750 """Reformat a single file under `src` without spawning child processes.
752 `fast`, `write_back`, and `mode` options are passed to
753 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
760 elif str(src).startswith(STDIN_PLACEHOLDER):
762 # Use the original name again in case we want to print something
764 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
769 if src.suffix == ".pyi":
770 mode = replace(mode, is_pyi=True)
771 elif src.suffix == ".ipynb":
772 mode = replace(mode, is_ipynb=True)
773 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
774 changed = Changed.YES
777 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
778 cache = read_cache(mode)
779 res_src = src.resolve()
780 res_src_s = str(res_src)
781 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
782 changed = Changed.CACHED
783 if changed is not Changed.CACHED and format_file_in_place(
784 src, fast=fast, write_back=write_back, mode=mode
786 changed = Changed.YES
787 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
788 write_back is WriteBack.CHECK and changed is Changed.NO
790 write_cache(cache, [src], mode)
791 report.done(src, changed)
792 except Exception as exc:
794 traceback.print_exc()
795 report.failed(src, str(exc))
798 def format_file_in_place(
802 write_back: WriteBack = WriteBack.NO,
803 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
805 """Format file under `src` path. Return True if changed.
807 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
809 `mode` and `fast` options are passed to :func:`format_file_contents`.
811 if src.suffix == ".pyi":
812 mode = replace(mode, is_pyi=True)
813 elif src.suffix == ".ipynb":
814 mode = replace(mode, is_ipynb=True)
816 then = datetime.fromtimestamp(src.stat().st_mtime, timezone.utc)
818 with open(src, "rb") as buf:
819 if mode.skip_source_first_line:
820 header = buf.readline()
821 src_contents, encoding, newline = decode_bytes(buf.read())
823 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
824 except NothingChanged:
826 except JSONDecodeError:
828 f"File '{src}' cannot be parsed as valid Jupyter notebook."
830 src_contents = header.decode(encoding) + src_contents
831 dst_contents = header.decode(encoding) + dst_contents
833 if write_back == WriteBack.YES:
834 with open(src, "w", encoding=encoding, newline=newline) as f:
835 f.write(dst_contents)
836 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
837 now = datetime.now(timezone.utc)
838 src_name = f"{src}\t{then}"
839 dst_name = f"{src}\t{now}"
841 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
843 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
845 if write_back == WriteBack.COLOR_DIFF:
846 diff_contents = color_diff(diff_contents)
848 with lock or nullcontext():
849 f = io.TextIOWrapper(
855 f = wrap_stream_for_windows(f)
856 f.write(diff_contents)
862 def format_stdin_to_stdout(
865 content: Optional[str] = None,
866 write_back: WriteBack = WriteBack.NO,
869 """Format file on stdin. Return True if changed.
871 If content is None, it's read from sys.stdin.
873 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
874 write a diff to stdout. The `mode` argument is passed to
875 :func:`format_file_contents`.
877 then = datetime.now(timezone.utc)
880 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
882 src, encoding, newline = content, "utf-8", ""
886 dst = format_file_contents(src, fast=fast, mode=mode)
889 except NothingChanged:
893 f = io.TextIOWrapper(
894 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
896 if write_back == WriteBack.YES:
897 # Make sure there's a newline after the content
898 if dst and dst[-1] != "\n":
901 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
902 now = datetime.now(timezone.utc)
903 src_name = f"STDIN\t{then}"
904 dst_name = f"STDOUT\t{now}"
905 d = diff(src, dst, src_name, dst_name)
906 if write_back == WriteBack.COLOR_DIFF:
908 f = wrap_stream_for_windows(f)
913 def check_stability_and_equivalence(
914 src_contents: str, dst_contents: str, *, mode: Mode
916 """Perform stability and equivalence checks.
918 Raise AssertionError if source and destination contents are not
919 equivalent, or if a second pass of the formatter would format the
922 assert_equivalent(src_contents, dst_contents)
923 assert_stable(src_contents, dst_contents, mode=mode)
926 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
927 """Reformat contents of a file and return new contents.
929 If `fast` is False, additionally confirm that the reformatted code is
930 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
931 `mode` is passed to :func:`format_str`.
934 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
936 dst_contents = format_str(src_contents, mode=mode)
937 if src_contents == dst_contents:
940 if not fast and not mode.is_ipynb:
941 # Jupyter notebooks will already have been checked above.
942 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
946 def validate_cell(src: str, mode: Mode) -> None:
947 """Check that cell does not already contain TransformerManager transformations,
948 or non-Python cell magics, which might cause tokenizer_rt to break because of
951 If a cell contains ``!ls``, then it'll be transformed to
952 ``get_ipython().system('ls')``. However, if the cell originally contained
953 ``get_ipython().system('ls')``, then it would get transformed in the same way:
955 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
956 "get_ipython().system('ls')\n"
957 >>> TransformerManager().transform_cell("!ls")
958 "get_ipython().system('ls')\n"
960 Due to the impossibility of safely roundtripping in such situations, cells
961 containing transformed magics will be ignored.
963 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
967 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
972 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
973 """Format code in given cell of Jupyter notebook.
977 - if cell has trailing semicolon, remove it;
978 - if cell has IPython magics, mask them;
980 - reinstate IPython magics;
981 - reinstate trailing semicolon (if originally present);
982 - strip trailing newlines.
984 Cells with syntax errors will not be processed, as they
985 could potentially be automagics or multi-line magics, which
986 are currently not supported.
988 validate_cell(src, mode)
989 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
993 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
995 raise NothingChanged from None
996 masked_dst = format_str(masked_src, mode=mode)
998 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
999 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1000 dst = put_trailing_semicolon_back(
1001 dst_without_trailing_semicolon, has_trailing_semicolon
1003 dst = dst.rstrip("\n")
1005 raise NothingChanged from None
1009 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1010 """If notebook is marked as non-Python, don't format it.
1012 All notebook metadata fields are optional, see
1013 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1014 if a notebook has empty metadata, we will try to parse it anyway.
1016 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1017 if language is not None and language != "python":
1018 raise NothingChanged from None
1021 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1022 """Format Jupyter notebook.
1024 Operate cell-by-cell, only on code cells, only for Python notebooks.
1025 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1027 if not src_contents:
1028 raise NothingChanged
1030 trailing_newline = src_contents[-1] == "\n"
1032 nb = json.loads(src_contents)
1033 validate_metadata(nb)
1034 for cell in nb["cells"]:
1035 if cell.get("cell_type", None) == "code":
1037 src = "".join(cell["source"])
1038 dst = format_cell(src, fast=fast, mode=mode)
1039 except NothingChanged:
1042 cell["source"] = dst.splitlines(keepends=True)
1045 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1046 if trailing_newline:
1047 dst_contents = dst_contents + "\n"
1050 raise NothingChanged
1053 def format_str(src_contents: str, *, mode: Mode) -> str:
1054 """Reformat a string and return new contents.
1056 `mode` determines formatting options, such as how many characters per line are
1060 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1061 def f(arg: str = "") -> None:
1064 A more complex example:
1067 ... black.format_str(
1068 ... "def f(arg:str='')->None: hey",
1069 ... mode=black.Mode(
1070 ... target_versions={black.TargetVersion.PY36},
1072 ... string_normalization=False,
1083 dst_contents = _format_str_once(src_contents, mode=mode)
1084 # Forced second pass to work around optional trailing commas (becoming
1085 # forced trailing commas on pass 2) interacting differently with optional
1086 # parentheses. Admittedly ugly.
1087 if src_contents != dst_contents:
1088 return _format_str_once(dst_contents, mode=mode)
1092 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1093 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1094 dst_blocks: List[LinesBlock] = []
1095 if mode.target_versions:
1096 versions = mode.target_versions
1098 future_imports = get_future_imports(src_node)
1099 versions = detect_target_versions(src_node, future_imports=future_imports)
1101 context_manager_features = {
1103 for feature in {Feature.PARENTHESIZED_CONTEXT_MANAGERS}
1104 if supports_feature(versions, feature)
1106 normalize_fmt_off(src_node)
1107 lines = LineGenerator(mode=mode, features=context_manager_features)
1108 elt = EmptyLineTracker(mode=mode)
1109 split_line_features = {
1111 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1112 if supports_feature(versions, feature)
1114 block: Optional[LinesBlock] = None
1115 for current_line in lines.visit(src_node):
1116 block = elt.maybe_empty_lines(current_line)
1117 dst_blocks.append(block)
1118 for line in transform_line(
1119 current_line, mode=mode, features=split_line_features
1121 block.content_lines.append(str(line))
1123 dst_blocks[-1].after = 0
1125 for block in dst_blocks:
1126 dst_contents.extend(block.all_lines())
1127 if not dst_contents:
1128 # Use decode_bytes to retrieve the correct source newline (CRLF or LF),
1129 # and check if normalized_content has more than one line
1130 normalized_content, _, newline = decode_bytes(src_contents.encode("utf-8"))
1131 if "\n" in normalized_content:
1134 return "".join(dst_contents)
1137 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1138 """Return a tuple of (decoded_contents, encoding, newline).
1140 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1141 universal newlines (i.e. only contains LF).
1143 srcbuf = io.BytesIO(src)
1144 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1146 return "", encoding, "\n"
1148 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1150 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1151 return tiow.read(), encoding, newline
1154 def get_features_used( # noqa: C901
1155 node: Node, *, future_imports: Optional[Set[str]] = None
1157 """Return a set of (relatively) new Python features used in this file.
1159 Currently looking for:
1161 - self-documenting expressions in f-strings (f"{x=}");
1162 - underscores in numeric literals;
1163 - trailing commas after * or ** in function signatures and calls;
1164 - positional only arguments in function signatures and lambdas;
1165 - assignment expression;
1166 - relaxed decorator syntax;
1167 - usage of __future__ flags (annotations);
1168 - print / exec statements;
1169 - parenthesized context managers;
1172 - variadic generics;
1174 features: Set[Feature] = set()
1177 FUTURE_FLAG_TO_FEATURE[future_import]
1178 for future_import in future_imports
1179 if future_import in FUTURE_FLAG_TO_FEATURE
1182 for n in node.pre_order():
1183 if is_string_token(n):
1184 value_head = n.value[:2]
1185 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1186 features.add(Feature.F_STRINGS)
1187 if Feature.DEBUG_F_STRINGS not in features:
1188 for span_beg, span_end in iter_fexpr_spans(n.value):
1189 if n.value[span_beg : span_end - 1].rstrip().endswith("="):
1190 features.add(Feature.DEBUG_F_STRINGS)
1193 elif is_number_token(n):
1195 features.add(Feature.NUMERIC_UNDERSCORES)
1197 elif n.type == token.SLASH:
1198 if n.parent and n.parent.type in {
1203 features.add(Feature.POS_ONLY_ARGUMENTS)
1205 elif n.type == token.COLONEQUAL:
1206 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1208 elif n.type == syms.decorator:
1209 if len(n.children) > 1 and not is_simple_decorator_expression(
1212 features.add(Feature.RELAXED_DECORATORS)
1215 n.type in {syms.typedargslist, syms.arglist}
1217 and n.children[-1].type == token.COMMA
1219 if n.type == syms.typedargslist:
1220 feature = Feature.TRAILING_COMMA_IN_DEF
1222 feature = Feature.TRAILING_COMMA_IN_CALL
1224 for ch in n.children:
1225 if ch.type in STARS:
1226 features.add(feature)
1228 if ch.type == syms.argument:
1229 for argch in ch.children:
1230 if argch.type in STARS:
1231 features.add(feature)
1234 n.type in {syms.return_stmt, syms.yield_expr}
1235 and len(n.children) >= 2
1236 and n.children[1].type == syms.testlist_star_expr
1237 and any(child.type == syms.star_expr for child in n.children[1].children)
1239 features.add(Feature.UNPACKING_ON_FLOW)
1242 n.type == syms.annassign
1243 and len(n.children) >= 4
1244 and n.children[3].type == syms.testlist_star_expr
1246 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1249 n.type == syms.with_stmt
1250 and len(n.children) > 2
1251 and n.children[1].type == syms.atom
1253 atom_children = n.children[1].children
1255 len(atom_children) == 3
1256 and atom_children[0].type == token.LPAR
1257 and atom_children[1].type == syms.testlist_gexp
1258 and atom_children[2].type == token.RPAR
1260 features.add(Feature.PARENTHESIZED_CONTEXT_MANAGERS)
1262 elif n.type == syms.match_stmt:
1263 features.add(Feature.PATTERN_MATCHING)
1266 n.type == syms.except_clause
1267 and len(n.children) >= 2
1268 and n.children[1].type == token.STAR
1270 features.add(Feature.EXCEPT_STAR)
1272 elif n.type in {syms.subscriptlist, syms.trailer} and any(
1273 child.type == syms.star_expr for child in n.children
1275 features.add(Feature.VARIADIC_GENERICS)
1278 n.type == syms.tname_star
1279 and len(n.children) == 3
1280 and n.children[2].type == syms.star_expr
1282 features.add(Feature.VARIADIC_GENERICS)
1284 elif n.type in (syms.type_stmt, syms.typeparams):
1285 features.add(Feature.TYPE_PARAMS)
1290 def detect_target_versions(
1291 node: Node, *, future_imports: Optional[Set[str]] = None
1292 ) -> Set[TargetVersion]:
1293 """Detect the version to target based on the nodes used."""
1294 features = get_features_used(node, future_imports=future_imports)
1296 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1300 def get_future_imports(node: Node) -> Set[str]:
1301 """Return a set of __future__ imports in the file."""
1302 imports: Set[str] = set()
1304 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1305 for child in children:
1306 if isinstance(child, Leaf):
1307 if child.type == token.NAME:
1310 elif child.type == syms.import_as_name:
1311 orig_name = child.children[0]
1312 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1313 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1314 yield orig_name.value
1316 elif child.type == syms.import_as_names:
1317 yield from get_imports_from_children(child.children)
1320 raise AssertionError("Invalid syntax parsing imports")
1322 for child in node.children:
1323 if child.type != syms.simple_stmt:
1326 first_child = child.children[0]
1327 if isinstance(first_child, Leaf):
1328 # Continue looking if we see a docstring; otherwise stop.
1330 len(child.children) == 2
1331 and first_child.type == token.STRING
1332 and child.children[1].type == token.NEWLINE
1338 elif first_child.type == syms.import_from:
1339 module_name = first_child.children[1]
1340 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1343 imports |= set(get_imports_from_children(first_child.children[3:]))
1350 def assert_equivalent(src: str, dst: str) -> None:
1351 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1353 src_ast = parse_ast(src)
1354 except Exception as exc:
1355 raise AssertionError(
1356 "cannot use --safe with this file; failed to parse source file AST: "
1358 "This could be caused by running Black with an older Python version "
1359 "that does not support new syntax used in your source file."
1363 dst_ast = parse_ast(dst)
1364 except Exception as exc:
1365 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1366 raise AssertionError(
1367 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1368 "Please report a bug on https://github.com/psf/black/issues. "
1369 f"This invalid output might be helpful: {log}"
1372 src_ast_str = "\n".join(stringify_ast(src_ast))
1373 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1374 if src_ast_str != dst_ast_str:
1375 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1376 raise AssertionError(
1377 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1378 " source. Please report a bug on "
1379 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1383 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1384 """Raise AssertionError if `dst` reformats differently the second time."""
1385 # We shouldn't call format_str() here, because that formats the string
1386 # twice and may hide a bug where we bounce back and forth between two
1388 newdst = _format_str_once(dst, mode=mode)
1392 diff(src, dst, "source", "first pass"),
1393 diff(dst, newdst, "first pass", "second pass"),
1395 raise AssertionError(
1396 "INTERNAL ERROR: Black produced different code on the second pass of the"
1397 " formatter. Please report a bug on https://github.com/psf/black/issues."
1398 f" This diff might be helpful: {log}"
1403 def nullcontext() -> Iterator[None]:
1404 """Return an empty context manager.
1406 To be used like `nullcontext` in Python 3.7.
1411 def patch_click() -> None:
1412 """Make Click not crash on Python 3.6 with LANG=C.
1414 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1415 default which restricts paths that it can access during the lifetime of the
1416 application. Click refuses to work in this scenario by raising a RuntimeError.
1418 In case of Black the likelihood that non-ASCII characters are going to be used in
1419 file paths is minimal since it's Python source code. Moreover, this crash was
1420 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1422 modules: List[Any] = []
1424 from click import core
1428 modules.append(core)
1430 # Removed in Click 8.1.0 and newer; we keep this around for users who have
1431 # older versions installed.
1432 from click import _unicodefun # type: ignore
1436 modules.append(_unicodefun)
1438 for module in modules:
1439 if hasattr(module, "_verify_python3_env"):
1440 module._verify_python3_env = lambda: None
1441 if hasattr(module, "_verify_python_env"):
1442 module._verify_python_env = lambda: None
1445 def patched_main() -> None:
1446 # PyInstaller patches multiprocessing to need freeze_support() even in non-Windows
1447 # environments so just assume we always need to call it if frozen.
1448 if getattr(sys, "frozen", False):
1449 from multiprocessing import freeze_support
1457 if __name__ == "__main__":