All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
8 from contextlib import contextmanager
9 from dataclasses import replace
10 from datetime import datetime
12 from json.decoder import JSONDecodeError
13 from pathlib import Path
31 from click.core import ParameterSource
32 from mypy_extensions import mypyc_attr
33 from pathspec import PathSpec
34 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
36 from _black_version import version as __version__
37 from black.cache import Cache, get_cache_info, read_cache, write_cache
38 from black.comments import normalize_fmt_off
39 from black.const import (
45 from black.files import (
48 find_user_pyproject_toml,
51 normalize_path_maybe_ignore,
53 wrap_stream_for_windows,
55 from black.handle_ipynb_magics import (
58 jupyter_dependencies_are_installed,
60 put_trailing_semicolon_back,
61 remove_trailing_semicolon,
64 from black.linegen import LN, LineGenerator, transform_line
65 from black.lines import EmptyLineTracker, LinesBlock
66 from black.mode import (
67 FUTURE_FLAG_TO_FEATURE,
74 from black.nodes import (
77 is_simple_decorator_expression,
81 from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
82 from black.parsing import InvalidInput # noqa F401
83 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
84 from black.report import Changed, NothingChanged, Report
85 from black.trans import iter_fexpr_spans
86 from blib2to3.pgen2 import token
87 from blib2to3.pytree import Leaf, Node
89 COMPILED = Path(__file__).suffix in (".pyd", ".so")
97 class WriteBack(Enum):
105 def from_configuration(
106 cls, *, check: bool, diff: bool, color: bool = False
108 if check and not diff:
112 return cls.COLOR_DIFF
114 return cls.DIFF if diff else cls.YES
117 # Legacy name, left for integrations.
121 def read_pyproject_toml(
122 ctx: click.Context, param: click.Parameter, value: Optional[str]
124 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
126 Returns the path to a successfully found and read configuration file, None
130 value = find_pyproject_toml(ctx.params.get("src", ()))
135 config = parse_pyproject_toml(value)
136 except (OSError, ValueError) as e:
137 raise click.FileError(
138 filename=value, hint=f"Error reading configuration file: {e}"
144 # Sanitize the values to be Click friendly. For more information please see:
145 # https://github.com/psf/black/issues/1458
146 # https://github.com/pallets/click/issues/1567
148 k: str(v) if not isinstance(v, (list, dict)) else v
149 for k, v in config.items()
152 target_version = config.get("target_version")
153 if target_version is not None and not isinstance(target_version, list):
154 raise click.BadOptionUsage(
155 "target-version", "Config key target-version must be a list"
158 default_map: Dict[str, Any] = {}
160 default_map.update(ctx.default_map)
161 default_map.update(config)
163 ctx.default_map = default_map
167 def target_version_option_callback(
168 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
169 ) -> List[TargetVersion]:
170 """Compute the target versions from a --target-version flag.
172 This is its own function because mypy couldn't infer the type correctly
173 when it was a lambda, causing mypyc trouble.
175 return [TargetVersion[val.upper()] for val in v]
178 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
179 """Compile a regular expression string in `regex`.
181 If it contains newlines, use verbose mode.
184 regex = "(?x)" + regex
185 compiled: Pattern[str] = re.compile(regex)
191 param: click.Parameter,
192 value: Optional[str],
193 ) -> Optional[Pattern[str]]:
195 return re_compile_maybe_verbose(value) if value is not None else None
196 except re.error as e:
197 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
201 context_settings={"help_option_names": ["-h", "--help"]},
202 # While Click does set this field automatically using the docstring, mypyc
203 # (annoyingly) strips 'em so we need to set it here too.
204 help="The uncompromising code formatter.",
206 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
211 default=DEFAULT_LINE_LENGTH,
212 help="How many characters per line to allow.",
218 type=click.Choice([v.name.lower() for v in TargetVersion]),
219 callback=target_version_option_callback,
222 "Python versions that should be supported by Black's output. By default, Black"
223 " will try to infer this from the project metadata in pyproject.toml. If this"
224 " does not yield conclusive results, Black will use per-file auto-detection."
231 "Format all input files like typing stubs regardless of file extension (useful"
232 " when piping source on standard input)."
239 "Format all input files like Jupyter Notebooks regardless of file extension "
240 "(useful when piping source on standard input)."
244 "--python-cell-magics",
247 "When processing Jupyter Notebooks, add the given magic to the list"
248 f" of known python-magics ({', '.join(sorted(PYTHON_CELL_MAGICS))})."
249 " Useful for formatting cells with custom python magics."
255 "--skip-source-first-line",
257 help="Skip the first line of the source code.",
261 "--skip-string-normalization",
263 help="Don't normalize string quotes or prefixes.",
267 "--skip-magic-trailing-comma",
269 help="Don't use trailing commas as a reason to split lines.",
272 "--experimental-string-processing",
275 help="(DEPRECATED and now included in --preview) Normalize string literals.",
281 "Enable potentially disruptive style changes that may be added to Black's main"
282 " functionality in the next major release."
289 "Don't write the files back, just return the status. Return code 0 means"
290 " nothing would change. Return code 1 means some files would be reformatted."
291 " Return code 123 means there was an internal error."
297 help="Don't write the files back, just output a diff for each file on stdout.",
300 "--color/--no-color",
302 help="Show colored diff. Only applies when `--diff` is given.",
307 help="If --fast given, skip temporary sanity checks. [default: --safe]",
310 "--required-version",
313 "Require a specific version of Black to be running (useful for unifying results"
314 " across many environments e.g. with a pyproject.toml file). It can be"
315 " either a major version number or an exact version."
321 default=DEFAULT_INCLUDES,
322 callback=validate_regex,
324 "A regular expression that matches files and directories that should be"
325 " included on recursive searches. An empty value means all files are included"
326 " regardless of the name. Use forward slashes for directories on all platforms"
327 " (Windows, too). Exclusions are calculated first, inclusions later."
334 callback=validate_regex,
336 "A regular expression that matches files and directories that should be"
337 " excluded on recursive searches. An empty value means no paths are excluded."
338 " Use forward slashes for directories on all platforms (Windows, too)."
339 " Exclusions are calculated first, inclusions later. [default:"
340 f" {DEFAULT_EXCLUDES}]"
347 callback=validate_regex,
349 "Like --exclude, but adds additional files and directories on top of the"
350 " excluded ones. (Useful if you simply want to add to the default)"
356 callback=validate_regex,
358 "Like --exclude, but files and directories matching this regex will be "
359 "excluded even when they are passed explicitly as arguments."
366 "The name of the file when passing it through stdin. Useful to make "
367 "sure Black will respect --force-exclude option on some "
368 "editors that rely on using stdin."
374 type=click.IntRange(min=1),
376 help="Number of parallel workers [default: number of CPUs in the system]",
383 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
384 " those with 2>/dev/null."
392 "Also emit messages to stderr about files that were not changed or were ignored"
393 " due to exclusion patterns."
396 @click.version_option(
399 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
400 f"Python ({platform.python_implementation()}) {platform.python_version()}"
407 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
423 callback=read_pyproject_toml,
424 help="Read configuration from FILE path.",
427 def main( # noqa: C901
431 target_version: List[TargetVersion],
438 python_cell_magics: Sequence[str],
439 skip_source_first_line: bool,
440 skip_string_normalization: bool,
441 skip_magic_trailing_comma: bool,
442 experimental_string_processing: bool,
446 required_version: Optional[str],
447 include: Pattern[str],
448 exclude: Optional[Pattern[str]],
449 extend_exclude: Optional[Pattern[str]],
450 force_exclude: Optional[Pattern[str]],
451 stdin_filename: Optional[str],
452 workers: Optional[int],
453 src: Tuple[str, ...],
454 config: Optional[str],
456 """The uncompromising code formatter."""
457 ctx.ensure_object(dict)
459 if src and code is not None:
462 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
465 if not src and code is None:
466 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
470 find_project_root(src, stdin_filename) if code is None else (None, None)
472 ctx.obj["root"] = root
477 f"Identified `{root}` as project root containing a {method}.",
485 else (normalize_path_maybe_ignore(Path(source), root), source)
489 srcs_string = ", ".join(
494 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
496 for _norm, source in normalized
499 out(f"Sources to be formatted: {srcs_string}", fg="blue")
502 config_source = ctx.get_parameter_source("config")
503 user_level_config = str(find_user_pyproject_toml())
504 if config == user_level_config:
506 "Using configuration from user-level config at "
507 f"'{user_level_config}'.",
510 elif config_source in (
511 ParameterSource.DEFAULT,
512 ParameterSource.DEFAULT_MAP,
514 out("Using configuration from project root.", fg="blue")
516 out(f"Using configuration in '{config}'.", fg="blue")
518 for param, value in ctx.default_map.items():
519 out(f"{param}: {value}")
521 error_msg = "Oh no! 💥 💔 💥"
524 and required_version != __version__
525 and required_version != __version__.split(".")[0]
528 f"{error_msg} The required version `{required_version}` does not match"
529 f" the running version `{__version__}`!"
533 err("Cannot pass both `pyi` and `ipynb` flags!")
536 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
538 versions = set(target_version)
540 # We'll autodetect later.
543 target_versions=versions,
544 line_length=line_length,
547 skip_source_first_line=skip_source_first_line,
548 string_normalization=not skip_string_normalization,
549 magic_trailing_comma=not skip_magic_trailing_comma,
550 experimental_string_processing=experimental_string_processing,
552 python_cell_magics=set(python_cell_magics),
556 # Run in quiet mode by default with -c; the extra output isn't useful.
557 # You can still pass -v to get verbose output.
560 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
564 content=code, fast=fast, write_back=write_back, mode=mode, report=report
568 sources = get_sources(
575 extend_exclude=extend_exclude,
576 force_exclude=force_exclude,
578 stdin_filename=stdin_filename,
580 except GitWildMatchPatternError:
585 "No Python files are present to be formatted. Nothing to do 😴",
591 if len(sources) == 1:
595 write_back=write_back,
600 from black.concurrency import reformat_many
605 write_back=write_back,
611 if verbose or not quiet:
612 if code is None and (verbose or report.change_count or report.failure_count):
614 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
616 click.echo(str(report), err=True)
617 ctx.exit(report.return_code)
623 src: Tuple[str, ...],
626 include: Pattern[str],
627 exclude: Optional[Pattern[str]],
628 extend_exclude: Optional[Pattern[str]],
629 force_exclude: Optional[Pattern[str]],
631 stdin_filename: Optional[str],
633 """Compute the set of files to be formatted."""
634 sources: Set[Path] = set()
635 root = ctx.obj["root"]
637 using_default_exclude = exclude is None
638 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) if exclude is None else exclude
639 gitignore: Optional[Dict[Path, PathSpec]] = None
640 root_gitignore = get_gitignore(root)
643 if s == "-" and stdin_filename:
644 p = Path(stdin_filename)
650 if is_stdin or p.is_file():
651 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
652 if normalized_path is None:
655 normalized_path = "/" + normalized_path
656 # Hard-exclude any files that matches the `--force-exclude` regex.
658 force_exclude_match = force_exclude.search(normalized_path)
660 force_exclude_match = None
661 if force_exclude_match and force_exclude_match.group(0):
662 report.path_ignored(p, "matches the --force-exclude regular expression")
666 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
668 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
669 verbose=verbose, quiet=quiet
675 p = root / normalize_path_maybe_ignore(p, ctx.obj["root"], report)
676 if using_default_exclude:
678 root: root_gitignore,
698 err(f"invalid path: {s}")
703 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
706 Exit if there is no `src` provided for formatting
709 if verbose or not quiet:
715 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
718 Reformat and print out `content` without spawning child processes.
719 Similar to `reformat_one`, but for string content.
721 `fast`, `write_back`, and `mode` options are passed to
722 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
724 path = Path("<string>")
727 if format_stdin_to_stdout(
728 content=content, fast=fast, write_back=write_back, mode=mode
730 changed = Changed.YES
731 report.done(path, changed)
732 except Exception as exc:
734 traceback.print_exc()
735 report.failed(path, str(exc))
738 # diff-shades depends on being to monkeypatch this function to operate. I know it's
739 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
740 @mypyc_attr(patchable=True)
742 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
744 """Reformat a single file under `src` without spawning child processes.
746 `fast`, `write_back`, and `mode` options are passed to
747 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
754 elif str(src).startswith(STDIN_PLACEHOLDER):
756 # Use the original name again in case we want to print something
758 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
763 if src.suffix == ".pyi":
764 mode = replace(mode, is_pyi=True)
765 elif src.suffix == ".ipynb":
766 mode = replace(mode, is_ipynb=True)
767 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
768 changed = Changed.YES
771 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
772 cache = read_cache(mode)
773 res_src = src.resolve()
774 res_src_s = str(res_src)
775 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
776 changed = Changed.CACHED
777 if changed is not Changed.CACHED and format_file_in_place(
778 src, fast=fast, write_back=write_back, mode=mode
780 changed = Changed.YES
781 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
782 write_back is WriteBack.CHECK and changed is Changed.NO
784 write_cache(cache, [src], mode)
785 report.done(src, changed)
786 except Exception as exc:
788 traceback.print_exc()
789 report.failed(src, str(exc))
792 def format_file_in_place(
796 write_back: WriteBack = WriteBack.NO,
797 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
799 """Format file under `src` path. Return True if changed.
801 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
803 `mode` and `fast` options are passed to :func:`format_file_contents`.
805 if src.suffix == ".pyi":
806 mode = replace(mode, is_pyi=True)
807 elif src.suffix == ".ipynb":
808 mode = replace(mode, is_ipynb=True)
810 then = datetime.utcfromtimestamp(src.stat().st_mtime)
812 with open(src, "rb") as buf:
813 if mode.skip_source_first_line:
814 header = buf.readline()
815 src_contents, encoding, newline = decode_bytes(buf.read())
817 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
818 except NothingChanged:
820 except JSONDecodeError:
822 f"File '{src}' cannot be parsed as valid Jupyter notebook."
824 src_contents = header.decode(encoding) + src_contents
825 dst_contents = header.decode(encoding) + dst_contents
827 if write_back == WriteBack.YES:
828 with open(src, "w", encoding=encoding, newline=newline) as f:
829 f.write(dst_contents)
830 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
831 now = datetime.utcnow()
832 src_name = f"{src}\t{then} +0000"
833 dst_name = f"{src}\t{now} +0000"
835 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
837 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
839 if write_back == WriteBack.COLOR_DIFF:
840 diff_contents = color_diff(diff_contents)
842 with lock or nullcontext():
843 f = io.TextIOWrapper(
849 f = wrap_stream_for_windows(f)
850 f.write(diff_contents)
856 def format_stdin_to_stdout(
859 content: Optional[str] = None,
860 write_back: WriteBack = WriteBack.NO,
863 """Format file on stdin. Return True if changed.
865 If content is None, it's read from sys.stdin.
867 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
868 write a diff to stdout. The `mode` argument is passed to
869 :func:`format_file_contents`.
871 then = datetime.utcnow()
874 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
876 src, encoding, newline = content, "utf-8", ""
880 dst = format_file_contents(src, fast=fast, mode=mode)
883 except NothingChanged:
887 f = io.TextIOWrapper(
888 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
890 if write_back == WriteBack.YES:
891 # Make sure there's a newline after the content
892 if dst and dst[-1] != "\n":
895 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
896 now = datetime.utcnow()
897 src_name = f"STDIN\t{then} +0000"
898 dst_name = f"STDOUT\t{now} +0000"
899 d = diff(src, dst, src_name, dst_name)
900 if write_back == WriteBack.COLOR_DIFF:
902 f = wrap_stream_for_windows(f)
907 def check_stability_and_equivalence(
908 src_contents: str, dst_contents: str, *, mode: Mode
910 """Perform stability and equivalence checks.
912 Raise AssertionError if source and destination contents are not
913 equivalent, or if a second pass of the formatter would format the
916 assert_equivalent(src_contents, dst_contents)
917 assert_stable(src_contents, dst_contents, mode=mode)
920 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
921 """Reformat contents of a file and return new contents.
923 If `fast` is False, additionally confirm that the reformatted code is
924 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
925 `mode` is passed to :func:`format_str`.
928 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
930 dst_contents = format_str(src_contents, mode=mode)
931 if src_contents == dst_contents:
934 if not fast and not mode.is_ipynb:
935 # Jupyter notebooks will already have been checked above.
936 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
940 def validate_cell(src: str, mode: Mode) -> None:
941 """Check that cell does not already contain TransformerManager transformations,
942 or non-Python cell magics, which might cause tokenizer_rt to break because of
945 If a cell contains ``!ls``, then it'll be transformed to
946 ``get_ipython().system('ls')``. However, if the cell originally contained
947 ``get_ipython().system('ls')``, then it would get transformed in the same way:
949 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
950 "get_ipython().system('ls')\n"
951 >>> TransformerManager().transform_cell("!ls")
952 "get_ipython().system('ls')\n"
954 Due to the impossibility of safely roundtripping in such situations, cells
955 containing transformed magics will be ignored.
957 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
961 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
966 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
967 """Format code in given cell of Jupyter notebook.
971 - if cell has trailing semicolon, remove it;
972 - if cell has IPython magics, mask them;
974 - reinstate IPython magics;
975 - reinstate trailing semicolon (if originally present);
976 - strip trailing newlines.
978 Cells with syntax errors will not be processed, as they
979 could potentially be automagics or multi-line magics, which
980 are currently not supported.
982 validate_cell(src, mode)
983 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
987 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
989 raise NothingChanged from None
990 masked_dst = format_str(masked_src, mode=mode)
992 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
993 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
994 dst = put_trailing_semicolon_back(
995 dst_without_trailing_semicolon, has_trailing_semicolon
997 dst = dst.rstrip("\n")
999 raise NothingChanged from None
1003 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1004 """If notebook is marked as non-Python, don't format it.
1006 All notebook metadata fields are optional, see
1007 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1008 if a notebook has empty metadata, we will try to parse it anyway.
1010 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1011 if language is not None and language != "python":
1012 raise NothingChanged from None
1015 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1016 """Format Jupyter notebook.
1018 Operate cell-by-cell, only on code cells, only for Python notebooks.
1019 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1021 if not src_contents:
1022 raise NothingChanged
1024 trailing_newline = src_contents[-1] == "\n"
1026 nb = json.loads(src_contents)
1027 validate_metadata(nb)
1028 for cell in nb["cells"]:
1029 if cell.get("cell_type", None) == "code":
1031 src = "".join(cell["source"])
1032 dst = format_cell(src, fast=fast, mode=mode)
1033 except NothingChanged:
1036 cell["source"] = dst.splitlines(keepends=True)
1039 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1040 if trailing_newline:
1041 dst_contents = dst_contents + "\n"
1044 raise NothingChanged
1047 def format_str(src_contents: str, *, mode: Mode) -> str:
1048 """Reformat a string and return new contents.
1050 `mode` determines formatting options, such as how many characters per line are
1054 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1055 def f(arg: str = "") -> None:
1058 A more complex example:
1061 ... black.format_str(
1062 ... "def f(arg:str='')->None: hey",
1063 ... mode=black.Mode(
1064 ... target_versions={black.TargetVersion.PY36},
1066 ... string_normalization=False,
1077 dst_contents = _format_str_once(src_contents, mode=mode)
1078 # Forced second pass to work around optional trailing commas (becoming
1079 # forced trailing commas on pass 2) interacting differently with optional
1080 # parentheses. Admittedly ugly.
1081 if src_contents != dst_contents:
1082 return _format_str_once(dst_contents, mode=mode)
1086 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1087 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1088 dst_blocks: List[LinesBlock] = []
1089 if mode.target_versions:
1090 versions = mode.target_versions
1092 future_imports = get_future_imports(src_node)
1093 versions = detect_target_versions(src_node, future_imports=future_imports)
1095 context_manager_features = {
1097 for feature in {Feature.PARENTHESIZED_CONTEXT_MANAGERS}
1098 if supports_feature(versions, feature)
1100 normalize_fmt_off(src_node)
1101 lines = LineGenerator(mode=mode, features=context_manager_features)
1102 elt = EmptyLineTracker(mode=mode)
1103 split_line_features = {
1105 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1106 if supports_feature(versions, feature)
1108 block: Optional[LinesBlock] = None
1109 for current_line in lines.visit(src_node):
1110 block = elt.maybe_empty_lines(current_line)
1111 dst_blocks.append(block)
1112 for line in transform_line(
1113 current_line, mode=mode, features=split_line_features
1115 block.content_lines.append(str(line))
1117 dst_blocks[-1].after = 0
1119 for block in dst_blocks:
1120 dst_contents.extend(block.all_lines())
1121 if not dst_contents:
1122 # Use decode_bytes to retrieve the correct source newline (CRLF or LF),
1123 # and check if normalized_content has more than one line
1124 normalized_content, _, newline = decode_bytes(src_contents.encode("utf-8"))
1125 if "\n" in normalized_content:
1128 return "".join(dst_contents)
1131 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1132 """Return a tuple of (decoded_contents, encoding, newline).
1134 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1135 universal newlines (i.e. only contains LF).
1137 srcbuf = io.BytesIO(src)
1138 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1140 return "", encoding, "\n"
1142 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1144 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1145 return tiow.read(), encoding, newline
1148 def get_features_used( # noqa: C901
1149 node: Node, *, future_imports: Optional[Set[str]] = None
1151 """Return a set of (relatively) new Python features used in this file.
1153 Currently looking for:
1155 - self-documenting expressions in f-strings (f"{x=}");
1156 - underscores in numeric literals;
1157 - trailing commas after * or ** in function signatures and calls;
1158 - positional only arguments in function signatures and lambdas;
1159 - assignment expression;
1160 - relaxed decorator syntax;
1161 - usage of __future__ flags (annotations);
1162 - print / exec statements;
1163 - parenthesized context managers;
1166 - variadic generics;
1168 features: Set[Feature] = set()
1171 FUTURE_FLAG_TO_FEATURE[future_import]
1172 for future_import in future_imports
1173 if future_import in FUTURE_FLAG_TO_FEATURE
1176 for n in node.pre_order():
1177 if is_string_token(n):
1178 value_head = n.value[:2]
1179 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1180 features.add(Feature.F_STRINGS)
1181 if Feature.DEBUG_F_STRINGS not in features:
1182 for span_beg, span_end in iter_fexpr_spans(n.value):
1183 if n.value[span_beg : span_end - 1].rstrip().endswith("="):
1184 features.add(Feature.DEBUG_F_STRINGS)
1187 elif is_number_token(n):
1189 features.add(Feature.NUMERIC_UNDERSCORES)
1191 elif n.type == token.SLASH:
1192 if n.parent and n.parent.type in {
1197 features.add(Feature.POS_ONLY_ARGUMENTS)
1199 elif n.type == token.COLONEQUAL:
1200 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1202 elif n.type == syms.decorator:
1203 if len(n.children) > 1 and not is_simple_decorator_expression(
1206 features.add(Feature.RELAXED_DECORATORS)
1209 n.type in {syms.typedargslist, syms.arglist}
1211 and n.children[-1].type == token.COMMA
1213 if n.type == syms.typedargslist:
1214 feature = Feature.TRAILING_COMMA_IN_DEF
1216 feature = Feature.TRAILING_COMMA_IN_CALL
1218 for ch in n.children:
1219 if ch.type in STARS:
1220 features.add(feature)
1222 if ch.type == syms.argument:
1223 for argch in ch.children:
1224 if argch.type in STARS:
1225 features.add(feature)
1228 n.type in {syms.return_stmt, syms.yield_expr}
1229 and len(n.children) >= 2
1230 and n.children[1].type == syms.testlist_star_expr
1231 and any(child.type == syms.star_expr for child in n.children[1].children)
1233 features.add(Feature.UNPACKING_ON_FLOW)
1236 n.type == syms.annassign
1237 and len(n.children) >= 4
1238 and n.children[3].type == syms.testlist_star_expr
1240 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1243 n.type == syms.with_stmt
1244 and len(n.children) > 2
1245 and n.children[1].type == syms.atom
1247 atom_children = n.children[1].children
1249 len(atom_children) == 3
1250 and atom_children[0].type == token.LPAR
1251 and atom_children[1].type == syms.testlist_gexp
1252 and atom_children[2].type == token.RPAR
1254 features.add(Feature.PARENTHESIZED_CONTEXT_MANAGERS)
1256 elif n.type == syms.match_stmt:
1257 features.add(Feature.PATTERN_MATCHING)
1260 n.type == syms.except_clause
1261 and len(n.children) >= 2
1262 and n.children[1].type == token.STAR
1264 features.add(Feature.EXCEPT_STAR)
1266 elif n.type in {syms.subscriptlist, syms.trailer} and any(
1267 child.type == syms.star_expr for child in n.children
1269 features.add(Feature.VARIADIC_GENERICS)
1272 n.type == syms.tname_star
1273 and len(n.children) == 3
1274 and n.children[2].type == syms.star_expr
1276 features.add(Feature.VARIADIC_GENERICS)
1278 elif n.type in (syms.type_stmt, syms.typeparams):
1279 features.add(Feature.TYPE_PARAMS)
1284 def detect_target_versions(
1285 node: Node, *, future_imports: Optional[Set[str]] = None
1286 ) -> Set[TargetVersion]:
1287 """Detect the version to target based on the nodes used."""
1288 features = get_features_used(node, future_imports=future_imports)
1290 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1294 def get_future_imports(node: Node) -> Set[str]:
1295 """Return a set of __future__ imports in the file."""
1296 imports: Set[str] = set()
1298 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1299 for child in children:
1300 if isinstance(child, Leaf):
1301 if child.type == token.NAME:
1304 elif child.type == syms.import_as_name:
1305 orig_name = child.children[0]
1306 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1307 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1308 yield orig_name.value
1310 elif child.type == syms.import_as_names:
1311 yield from get_imports_from_children(child.children)
1314 raise AssertionError("Invalid syntax parsing imports")
1316 for child in node.children:
1317 if child.type != syms.simple_stmt:
1320 first_child = child.children[0]
1321 if isinstance(first_child, Leaf):
1322 # Continue looking if we see a docstring; otherwise stop.
1324 len(child.children) == 2
1325 and first_child.type == token.STRING
1326 and child.children[1].type == token.NEWLINE
1332 elif first_child.type == syms.import_from:
1333 module_name = first_child.children[1]
1334 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1337 imports |= set(get_imports_from_children(first_child.children[3:]))
1344 def assert_equivalent(src: str, dst: str) -> None:
1345 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1347 src_ast = parse_ast(src)
1348 except Exception as exc:
1349 raise AssertionError(
1350 "cannot use --safe with this file; failed to parse source file AST: "
1352 "This could be caused by running Black with an older Python version "
1353 "that does not support new syntax used in your source file."
1357 dst_ast = parse_ast(dst)
1358 except Exception as exc:
1359 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1360 raise AssertionError(
1361 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1362 "Please report a bug on https://github.com/psf/black/issues. "
1363 f"This invalid output might be helpful: {log}"
1366 src_ast_str = "\n".join(stringify_ast(src_ast))
1367 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1368 if src_ast_str != dst_ast_str:
1369 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1370 raise AssertionError(
1371 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1372 " source. Please report a bug on "
1373 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1377 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1378 """Raise AssertionError if `dst` reformats differently the second time."""
1379 # We shouldn't call format_str() here, because that formats the string
1380 # twice and may hide a bug where we bounce back and forth between two
1382 newdst = _format_str_once(dst, mode=mode)
1386 diff(src, dst, "source", "first pass"),
1387 diff(dst, newdst, "first pass", "second pass"),
1389 raise AssertionError(
1390 "INTERNAL ERROR: Black produced different code on the second pass of the"
1391 " formatter. Please report a bug on https://github.com/psf/black/issues."
1392 f" This diff might be helpful: {log}"
1397 def nullcontext() -> Iterator[None]:
1398 """Return an empty context manager.
1400 To be used like `nullcontext` in Python 3.7.
1405 def patch_click() -> None:
1406 """Make Click not crash on Python 3.6 with LANG=C.
1408 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1409 default which restricts paths that it can access during the lifetime of the
1410 application. Click refuses to work in this scenario by raising a RuntimeError.
1412 In case of Black the likelihood that non-ASCII characters are going to be used in
1413 file paths is minimal since it's Python source code. Moreover, this crash was
1414 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1416 modules: List[Any] = []
1418 from click import core
1422 modules.append(core)
1424 # Removed in Click 8.1.0 and newer; we keep this around for users who have
1425 # older versions installed.
1426 from click import _unicodefun # type: ignore
1430 modules.append(_unicodefun)
1432 for module in modules:
1433 if hasattr(module, "_verify_python3_env"):
1434 module._verify_python3_env = lambda: None
1435 if hasattr(module, "_verify_python_env"):
1436 module._verify_python_env = lambda: None
1439 def patched_main() -> None:
1440 # PyInstaller patches multiprocessing to need freeze_support() even in non-Windows
1441 # environments so just assume we always need to call it if frozen.
1442 if getattr(sys, "frozen", False):
1443 from multiprocessing import freeze_support
1451 if __name__ == "__main__":