All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
8 from contextlib import contextmanager
9 from dataclasses import replace
10 from datetime import datetime
12 from json.decoder import JSONDecodeError
13 from pathlib import Path
31 from click.core import ParameterSource
32 from mypy_extensions import mypyc_attr
33 from pathspec import PathSpec
34 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
36 from _black_version import version as __version__
37 from black.cache import Cache, get_cache_info, read_cache, write_cache
38 from black.comments import normalize_fmt_off
39 from black.const import (
45 from black.files import (
48 find_user_pyproject_toml,
51 normalize_path_maybe_ignore,
53 wrap_stream_for_windows,
55 from black.handle_ipynb_magics import (
58 jupyter_dependencies_are_installed,
60 put_trailing_semicolon_back,
61 remove_trailing_semicolon,
64 from black.linegen import LN, LineGenerator, transform_line
65 from black.lines import EmptyLineTracker, LinesBlock
66 from black.mode import (
67 FUTURE_FLAG_TO_FEATURE,
74 from black.nodes import (
77 is_simple_decorator_expression,
81 from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
82 from black.parsing import InvalidInput # noqa F401
83 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
84 from black.report import Changed, NothingChanged, Report
85 from black.trans import iter_fexpr_spans
86 from blib2to3.pgen2 import token
87 from blib2to3.pytree import Leaf, Node
89 COMPILED = Path(__file__).suffix in (".pyd", ".so")
97 class WriteBack(Enum):
105 def from_configuration(
106 cls, *, check: bool, diff: bool, color: bool = False
108 if check and not diff:
112 return cls.COLOR_DIFF
114 return cls.DIFF if diff else cls.YES
117 # Legacy name, left for integrations.
121 def read_pyproject_toml(
122 ctx: click.Context, param: click.Parameter, value: Optional[str]
124 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
126 Returns the path to a successfully found and read configuration file, None
130 value = find_pyproject_toml(ctx.params.get("src", ()))
135 config = parse_pyproject_toml(value)
136 except (OSError, ValueError) as e:
137 raise click.FileError(
138 filename=value, hint=f"Error reading configuration file: {e}"
144 # Sanitize the values to be Click friendly. For more information please see:
145 # https://github.com/psf/black/issues/1458
146 # https://github.com/pallets/click/issues/1567
148 k: str(v) if not isinstance(v, (list, dict)) else v
149 for k, v in config.items()
152 target_version = config.get("target_version")
153 if target_version is not None and not isinstance(target_version, list):
154 raise click.BadOptionUsage(
155 "target-version", "Config key target-version must be a list"
158 default_map: Dict[str, Any] = {}
160 default_map.update(ctx.default_map)
161 default_map.update(config)
163 ctx.default_map = default_map
167 def target_version_option_callback(
168 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
169 ) -> List[TargetVersion]:
170 """Compute the target versions from a --target-version flag.
172 This is its own function because mypy couldn't infer the type correctly
173 when it was a lambda, causing mypyc trouble.
175 return [TargetVersion[val.upper()] for val in v]
178 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
179 """Compile a regular expression string in `regex`.
181 If it contains newlines, use verbose mode.
184 regex = "(?x)" + regex
185 compiled: Pattern[str] = re.compile(regex)
191 param: click.Parameter,
192 value: Optional[str],
193 ) -> Optional[Pattern[str]]:
195 return re_compile_maybe_verbose(value) if value is not None else None
196 except re.error as e:
197 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
201 context_settings={"help_option_names": ["-h", "--help"]},
202 # While Click does set this field automatically using the docstring, mypyc
203 # (annoyingly) strips 'em so we need to set it here too.
204 help="The uncompromising code formatter.",
206 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
211 default=DEFAULT_LINE_LENGTH,
212 help="How many characters per line to allow.",
218 type=click.Choice([v.name.lower() for v in TargetVersion]),
219 callback=target_version_option_callback,
222 "Python versions that should be supported by Black's output. By default, Black"
223 " will try to infer this from the project metadata in pyproject.toml. If this"
224 " does not yield conclusive results, Black will use per-file auto-detection."
231 "Format all input files like typing stubs regardless of file extension (useful"
232 " when piping source on standard input)."
239 "Format all input files like Jupyter Notebooks regardless of file extension "
240 "(useful when piping source on standard input)."
244 "--python-cell-magics",
247 "When processing Jupyter Notebooks, add the given magic to the list"
248 f" of known python-magics ({', '.join(sorted(PYTHON_CELL_MAGICS))})."
249 " Useful for formatting cells with custom python magics."
255 "--skip-source-first-line",
257 help="Skip the first line of the source code.",
261 "--skip-string-normalization",
263 help="Don't normalize string quotes or prefixes.",
267 "--skip-magic-trailing-comma",
269 help="Don't use trailing commas as a reason to split lines.",
272 "--experimental-string-processing",
275 help="(DEPRECATED and now included in --preview) Normalize string literals.",
281 "Enable potentially disruptive style changes that may be added to Black's main"
282 " functionality in the next major release."
289 "Don't write the files back, just return the status. Return code 0 means"
290 " nothing would change. Return code 1 means some files would be reformatted."
291 " Return code 123 means there was an internal error."
297 help="Don't write the files back, just output a diff for each file on stdout.",
300 "--color/--no-color",
302 help="Show colored diff. Only applies when `--diff` is given.",
307 help="If --fast given, skip temporary sanity checks. [default: --safe]",
310 "--required-version",
313 "Require a specific version of Black to be running (useful for unifying results"
314 " across many environments e.g. with a pyproject.toml file). It can be"
315 " either a major version number or an exact version."
321 default=DEFAULT_INCLUDES,
322 callback=validate_regex,
324 "A regular expression that matches files and directories that should be"
325 " included on recursive searches. An empty value means all files are included"
326 " regardless of the name. Use forward slashes for directories on all platforms"
327 " (Windows, too). Exclusions are calculated first, inclusions later."
334 callback=validate_regex,
336 "A regular expression that matches files and directories that should be"
337 " excluded on recursive searches. An empty value means no paths are excluded."
338 " Use forward slashes for directories on all platforms (Windows, too)."
339 " Exclusions are calculated first, inclusions later. [default:"
340 f" {DEFAULT_EXCLUDES}]"
347 callback=validate_regex,
349 "Like --exclude, but adds additional files and directories on top of the"
350 " excluded ones. (Useful if you simply want to add to the default)"
356 callback=validate_regex,
358 "Like --exclude, but files and directories matching this regex will be "
359 "excluded even when they are passed explicitly as arguments."
366 "The name of the file when passing it through stdin. Useful to make "
367 "sure Black will respect --force-exclude option on some "
368 "editors that rely on using stdin."
374 type=click.IntRange(min=1),
376 help="Number of parallel workers [default: number of CPUs in the system]",
383 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
384 " those with 2>/dev/null."
392 "Also emit messages to stderr about files that were not changed or were ignored"
393 " due to exclusion patterns."
396 @click.version_option(
399 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
400 f"Python ({platform.python_implementation()}) {platform.python_version()}"
407 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
423 callback=read_pyproject_toml,
424 help="Read configuration from FILE path.",
427 def main( # noqa: C901
431 target_version: List[TargetVersion],
438 python_cell_magics: Sequence[str],
439 skip_source_first_line: bool,
440 skip_string_normalization: bool,
441 skip_magic_trailing_comma: bool,
442 experimental_string_processing: bool,
446 required_version: Optional[str],
447 include: Pattern[str],
448 exclude: Optional[Pattern[str]],
449 extend_exclude: Optional[Pattern[str]],
450 force_exclude: Optional[Pattern[str]],
451 stdin_filename: Optional[str],
452 workers: Optional[int],
453 src: Tuple[str, ...],
454 config: Optional[str],
456 """The uncompromising code formatter."""
457 ctx.ensure_object(dict)
459 if src and code is not None:
462 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
465 if not src and code is None:
466 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
470 find_project_root(src, stdin_filename) if code is None else (None, None)
472 ctx.obj["root"] = root
477 f"Identified `{root}` as project root containing a {method}.",
485 else (normalize_path_maybe_ignore(Path(source), root), source)
489 srcs_string = ", ".join(
494 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
496 for _norm, source in normalized
499 out(f"Sources to be formatted: {srcs_string}", fg="blue")
502 config_source = ctx.get_parameter_source("config")
503 user_level_config = str(find_user_pyproject_toml())
504 if config == user_level_config:
507 "Using configuration from user-level config at "
508 f"'{user_level_config}'."
512 elif config_source in (
513 ParameterSource.DEFAULT,
514 ParameterSource.DEFAULT_MAP,
516 out("Using configuration from project root.", fg="blue")
518 out(f"Using configuration in '{config}'.", fg="blue")
520 for param, value in ctx.default_map.items():
521 out(f"{param}: {value}")
523 error_msg = "Oh no! 💥 💔 💥"
526 and required_version != __version__
527 and required_version != __version__.split(".")[0]
530 f"{error_msg} The required version `{required_version}` does not match"
531 f" the running version `{__version__}`!"
535 err("Cannot pass both `pyi` and `ipynb` flags!")
538 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
540 versions = set(target_version)
542 # We'll autodetect later.
545 target_versions=versions,
546 line_length=line_length,
549 skip_source_first_line=skip_source_first_line,
550 string_normalization=not skip_string_normalization,
551 magic_trailing_comma=not skip_magic_trailing_comma,
552 experimental_string_processing=experimental_string_processing,
554 python_cell_magics=set(python_cell_magics),
558 # Run in quiet mode by default with -c; the extra output isn't useful.
559 # You can still pass -v to get verbose output.
562 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
566 content=code, fast=fast, write_back=write_back, mode=mode, report=report
570 sources = get_sources(
577 extend_exclude=extend_exclude,
578 force_exclude=force_exclude,
580 stdin_filename=stdin_filename,
582 except GitWildMatchPatternError:
587 "No Python files are present to be formatted. Nothing to do 😴",
593 if len(sources) == 1:
597 write_back=write_back,
602 from black.concurrency import reformat_many
607 write_back=write_back,
613 if verbose or not quiet:
614 if code is None and (verbose or report.change_count or report.failure_count):
616 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
618 click.echo(str(report), err=True)
619 ctx.exit(report.return_code)
625 src: Tuple[str, ...],
628 include: Pattern[str],
629 exclude: Optional[Pattern[str]],
630 extend_exclude: Optional[Pattern[str]],
631 force_exclude: Optional[Pattern[str]],
633 stdin_filename: Optional[str],
635 """Compute the set of files to be formatted."""
636 sources: Set[Path] = set()
637 root = ctx.obj["root"]
639 using_default_exclude = exclude is None
640 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) if exclude is None else exclude
641 gitignore: Optional[Dict[Path, PathSpec]] = None
642 root_gitignore = get_gitignore(root)
645 if s == "-" and stdin_filename:
646 p = Path(stdin_filename)
652 if is_stdin or p.is_file():
653 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
654 if normalized_path is None:
657 normalized_path = "/" + normalized_path
658 # Hard-exclude any files that matches the `--force-exclude` regex.
660 force_exclude_match = force_exclude.search(normalized_path)
662 force_exclude_match = None
663 if force_exclude_match and force_exclude_match.group(0):
664 report.path_ignored(p, "matches the --force-exclude regular expression")
668 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
670 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
671 verbose=verbose, quiet=quiet
677 p = root / normalize_path_maybe_ignore(p, ctx.obj["root"], report)
678 if using_default_exclude:
680 root: root_gitignore,
700 err(f"invalid path: {s}")
705 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
708 Exit if there is no `src` provided for formatting
711 if verbose or not quiet:
717 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
720 Reformat and print out `content` without spawning child processes.
721 Similar to `reformat_one`, but for string content.
723 `fast`, `write_back`, and `mode` options are passed to
724 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
726 path = Path("<string>")
729 if format_stdin_to_stdout(
730 content=content, fast=fast, write_back=write_back, mode=mode
732 changed = Changed.YES
733 report.done(path, changed)
734 except Exception as exc:
736 traceback.print_exc()
737 report.failed(path, str(exc))
740 # diff-shades depends on being to monkeypatch this function to operate. I know it's
741 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
742 @mypyc_attr(patchable=True)
744 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
746 """Reformat a single file under `src` without spawning child processes.
748 `fast`, `write_back`, and `mode` options are passed to
749 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
756 elif str(src).startswith(STDIN_PLACEHOLDER):
758 # Use the original name again in case we want to print something
760 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
765 if src.suffix == ".pyi":
766 mode = replace(mode, is_pyi=True)
767 elif src.suffix == ".ipynb":
768 mode = replace(mode, is_ipynb=True)
769 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
770 changed = Changed.YES
773 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
774 cache = read_cache(mode)
775 res_src = src.resolve()
776 res_src_s = str(res_src)
777 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
778 changed = Changed.CACHED
779 if changed is not Changed.CACHED and format_file_in_place(
780 src, fast=fast, write_back=write_back, mode=mode
782 changed = Changed.YES
783 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
784 write_back is WriteBack.CHECK and changed is Changed.NO
786 write_cache(cache, [src], mode)
787 report.done(src, changed)
788 except Exception as exc:
790 traceback.print_exc()
791 report.failed(src, str(exc))
794 def format_file_in_place(
798 write_back: WriteBack = WriteBack.NO,
799 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
801 """Format file under `src` path. Return True if changed.
803 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
805 `mode` and `fast` options are passed to :func:`format_file_contents`.
807 if src.suffix == ".pyi":
808 mode = replace(mode, is_pyi=True)
809 elif src.suffix == ".ipynb":
810 mode = replace(mode, is_ipynb=True)
812 then = datetime.utcfromtimestamp(src.stat().st_mtime)
814 with open(src, "rb") as buf:
815 if mode.skip_source_first_line:
816 header = buf.readline()
817 src_contents, encoding, newline = decode_bytes(buf.read())
819 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
820 except NothingChanged:
822 except JSONDecodeError:
824 f"File '{src}' cannot be parsed as valid Jupyter notebook."
826 src_contents = header.decode(encoding) + src_contents
827 dst_contents = header.decode(encoding) + dst_contents
829 if write_back == WriteBack.YES:
830 with open(src, "w", encoding=encoding, newline=newline) as f:
831 f.write(dst_contents)
832 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
833 now = datetime.utcnow()
834 src_name = f"{src}\t{then} +0000"
835 dst_name = f"{src}\t{now} +0000"
837 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
839 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
841 if write_back == WriteBack.COLOR_DIFF:
842 diff_contents = color_diff(diff_contents)
844 with lock or nullcontext():
845 f = io.TextIOWrapper(
851 f = wrap_stream_for_windows(f)
852 f.write(diff_contents)
858 def format_stdin_to_stdout(
861 content: Optional[str] = None,
862 write_back: WriteBack = WriteBack.NO,
865 """Format file on stdin. Return True if changed.
867 If content is None, it's read from sys.stdin.
869 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
870 write a diff to stdout. The `mode` argument is passed to
871 :func:`format_file_contents`.
873 then = datetime.utcnow()
876 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
878 src, encoding, newline = content, "utf-8", ""
882 dst = format_file_contents(src, fast=fast, mode=mode)
885 except NothingChanged:
889 f = io.TextIOWrapper(
890 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
892 if write_back == WriteBack.YES:
893 # Make sure there's a newline after the content
894 if dst and dst[-1] != "\n":
897 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
898 now = datetime.utcnow()
899 src_name = f"STDIN\t{then} +0000"
900 dst_name = f"STDOUT\t{now} +0000"
901 d = diff(src, dst, src_name, dst_name)
902 if write_back == WriteBack.COLOR_DIFF:
904 f = wrap_stream_for_windows(f)
909 def check_stability_and_equivalence(
910 src_contents: str, dst_contents: str, *, mode: Mode
912 """Perform stability and equivalence checks.
914 Raise AssertionError if source and destination contents are not
915 equivalent, or if a second pass of the formatter would format the
918 assert_equivalent(src_contents, dst_contents)
919 assert_stable(src_contents, dst_contents, mode=mode)
922 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
923 """Reformat contents of a file and return new contents.
925 If `fast` is False, additionally confirm that the reformatted code is
926 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
927 `mode` is passed to :func:`format_str`.
930 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
932 dst_contents = format_str(src_contents, mode=mode)
933 if src_contents == dst_contents:
936 if not fast and not mode.is_ipynb:
937 # Jupyter notebooks will already have been checked above.
938 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
942 def validate_cell(src: str, mode: Mode) -> None:
943 """Check that cell does not already contain TransformerManager transformations,
944 or non-Python cell magics, which might cause tokenizer_rt to break because of
947 If a cell contains ``!ls``, then it'll be transformed to
948 ``get_ipython().system('ls')``. However, if the cell originally contained
949 ``get_ipython().system('ls')``, then it would get transformed in the same way:
951 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
952 "get_ipython().system('ls')\n"
953 >>> TransformerManager().transform_cell("!ls")
954 "get_ipython().system('ls')\n"
956 Due to the impossibility of safely roundtripping in such situations, cells
957 containing transformed magics will be ignored.
959 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
963 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
968 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
969 """Format code in given cell of Jupyter notebook.
973 - if cell has trailing semicolon, remove it;
974 - if cell has IPython magics, mask them;
976 - reinstate IPython magics;
977 - reinstate trailing semicolon (if originally present);
978 - strip trailing newlines.
980 Cells with syntax errors will not be processed, as they
981 could potentially be automagics or multi-line magics, which
982 are currently not supported.
984 validate_cell(src, mode)
985 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
989 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
991 raise NothingChanged from None
992 masked_dst = format_str(masked_src, mode=mode)
994 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
995 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
996 dst = put_trailing_semicolon_back(
997 dst_without_trailing_semicolon, has_trailing_semicolon
999 dst = dst.rstrip("\n")
1001 raise NothingChanged from None
1005 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1006 """If notebook is marked as non-Python, don't format it.
1008 All notebook metadata fields are optional, see
1009 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1010 if a notebook has empty metadata, we will try to parse it anyway.
1012 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1013 if language is not None and language != "python":
1014 raise NothingChanged from None
1017 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1018 """Format Jupyter notebook.
1020 Operate cell-by-cell, only on code cells, only for Python notebooks.
1021 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1023 if not src_contents:
1024 raise NothingChanged
1026 trailing_newline = src_contents[-1] == "\n"
1028 nb = json.loads(src_contents)
1029 validate_metadata(nb)
1030 for cell in nb["cells"]:
1031 if cell.get("cell_type", None) == "code":
1033 src = "".join(cell["source"])
1034 dst = format_cell(src, fast=fast, mode=mode)
1035 except NothingChanged:
1038 cell["source"] = dst.splitlines(keepends=True)
1041 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1042 if trailing_newline:
1043 dst_contents = dst_contents + "\n"
1046 raise NothingChanged
1049 def format_str(src_contents: str, *, mode: Mode) -> str:
1050 """Reformat a string and return new contents.
1052 `mode` determines formatting options, such as how many characters per line are
1056 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1057 def f(arg: str = "") -> None:
1060 A more complex example:
1063 ... black.format_str(
1064 ... "def f(arg:str='')->None: hey",
1065 ... mode=black.Mode(
1066 ... target_versions={black.TargetVersion.PY36},
1068 ... string_normalization=False,
1079 dst_contents = _format_str_once(src_contents, mode=mode)
1080 # Forced second pass to work around optional trailing commas (becoming
1081 # forced trailing commas on pass 2) interacting differently with optional
1082 # parentheses. Admittedly ugly.
1083 if src_contents != dst_contents:
1084 return _format_str_once(dst_contents, mode=mode)
1088 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1089 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1090 dst_blocks: List[LinesBlock] = []
1091 if mode.target_versions:
1092 versions = mode.target_versions
1094 future_imports = get_future_imports(src_node)
1095 versions = detect_target_versions(src_node, future_imports=future_imports)
1097 context_manager_features = {
1099 for feature in {Feature.PARENTHESIZED_CONTEXT_MANAGERS}
1100 if supports_feature(versions, feature)
1102 normalize_fmt_off(src_node)
1103 lines = LineGenerator(mode=mode, features=context_manager_features)
1104 elt = EmptyLineTracker(mode=mode)
1105 split_line_features = {
1107 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1108 if supports_feature(versions, feature)
1110 block: Optional[LinesBlock] = None
1111 for current_line in lines.visit(src_node):
1112 block = elt.maybe_empty_lines(current_line)
1113 dst_blocks.append(block)
1114 for line in transform_line(
1115 current_line, mode=mode, features=split_line_features
1117 block.content_lines.append(str(line))
1119 dst_blocks[-1].after = 0
1121 for block in dst_blocks:
1122 dst_contents.extend(block.all_lines())
1123 if not dst_contents:
1124 # Use decode_bytes to retrieve the correct source newline (CRLF or LF),
1125 # and check if normalized_content has more than one line
1126 normalized_content, _, newline = decode_bytes(src_contents.encode("utf-8"))
1127 if "\n" in normalized_content:
1130 return "".join(dst_contents)
1133 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1134 """Return a tuple of (decoded_contents, encoding, newline).
1136 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1137 universal newlines (i.e. only contains LF).
1139 srcbuf = io.BytesIO(src)
1140 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1142 return "", encoding, "\n"
1144 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1146 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1147 return tiow.read(), encoding, newline
1150 def get_features_used( # noqa: C901
1151 node: Node, *, future_imports: Optional[Set[str]] = None
1153 """Return a set of (relatively) new Python features used in this file.
1155 Currently looking for:
1157 - self-documenting expressions in f-strings (f"{x=}");
1158 - underscores in numeric literals;
1159 - trailing commas after * or ** in function signatures and calls;
1160 - positional only arguments in function signatures and lambdas;
1161 - assignment expression;
1162 - relaxed decorator syntax;
1163 - usage of __future__ flags (annotations);
1164 - print / exec statements;
1165 - parenthesized context managers;
1168 - variadic generics;
1170 features: Set[Feature] = set()
1173 FUTURE_FLAG_TO_FEATURE[future_import]
1174 for future_import in future_imports
1175 if future_import in FUTURE_FLAG_TO_FEATURE
1178 for n in node.pre_order():
1179 if is_string_token(n):
1180 value_head = n.value[:2]
1181 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1182 features.add(Feature.F_STRINGS)
1183 if Feature.DEBUG_F_STRINGS not in features:
1184 for span_beg, span_end in iter_fexpr_spans(n.value):
1185 if n.value[span_beg : span_end - 1].rstrip().endswith("="):
1186 features.add(Feature.DEBUG_F_STRINGS)
1189 elif is_number_token(n):
1191 features.add(Feature.NUMERIC_UNDERSCORES)
1193 elif n.type == token.SLASH:
1194 if n.parent and n.parent.type in {
1199 features.add(Feature.POS_ONLY_ARGUMENTS)
1201 elif n.type == token.COLONEQUAL:
1202 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1204 elif n.type == syms.decorator:
1205 if len(n.children) > 1 and not is_simple_decorator_expression(
1208 features.add(Feature.RELAXED_DECORATORS)
1211 n.type in {syms.typedargslist, syms.arglist}
1213 and n.children[-1].type == token.COMMA
1215 if n.type == syms.typedargslist:
1216 feature = Feature.TRAILING_COMMA_IN_DEF
1218 feature = Feature.TRAILING_COMMA_IN_CALL
1220 for ch in n.children:
1221 if ch.type in STARS:
1222 features.add(feature)
1224 if ch.type == syms.argument:
1225 for argch in ch.children:
1226 if argch.type in STARS:
1227 features.add(feature)
1230 n.type in {syms.return_stmt, syms.yield_expr}
1231 and len(n.children) >= 2
1232 and n.children[1].type == syms.testlist_star_expr
1233 and any(child.type == syms.star_expr for child in n.children[1].children)
1235 features.add(Feature.UNPACKING_ON_FLOW)
1238 n.type == syms.annassign
1239 and len(n.children) >= 4
1240 and n.children[3].type == syms.testlist_star_expr
1242 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1245 n.type == syms.with_stmt
1246 and len(n.children) > 2
1247 and n.children[1].type == syms.atom
1249 atom_children = n.children[1].children
1251 len(atom_children) == 3
1252 and atom_children[0].type == token.LPAR
1253 and atom_children[1].type == syms.testlist_gexp
1254 and atom_children[2].type == token.RPAR
1256 features.add(Feature.PARENTHESIZED_CONTEXT_MANAGERS)
1258 elif n.type == syms.match_stmt:
1259 features.add(Feature.PATTERN_MATCHING)
1262 n.type == syms.except_clause
1263 and len(n.children) >= 2
1264 and n.children[1].type == token.STAR
1266 features.add(Feature.EXCEPT_STAR)
1268 elif n.type in {syms.subscriptlist, syms.trailer} and any(
1269 child.type == syms.star_expr for child in n.children
1271 features.add(Feature.VARIADIC_GENERICS)
1274 n.type == syms.tname_star
1275 and len(n.children) == 3
1276 and n.children[2].type == syms.star_expr
1278 features.add(Feature.VARIADIC_GENERICS)
1283 def detect_target_versions(
1284 node: Node, *, future_imports: Optional[Set[str]] = None
1285 ) -> Set[TargetVersion]:
1286 """Detect the version to target based on the nodes used."""
1287 features = get_features_used(node, future_imports=future_imports)
1289 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1293 def get_future_imports(node: Node) -> Set[str]:
1294 """Return a set of __future__ imports in the file."""
1295 imports: Set[str] = set()
1297 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1298 for child in children:
1299 if isinstance(child, Leaf):
1300 if child.type == token.NAME:
1303 elif child.type == syms.import_as_name:
1304 orig_name = child.children[0]
1305 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1306 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1307 yield orig_name.value
1309 elif child.type == syms.import_as_names:
1310 yield from get_imports_from_children(child.children)
1313 raise AssertionError("Invalid syntax parsing imports")
1315 for child in node.children:
1316 if child.type != syms.simple_stmt:
1319 first_child = child.children[0]
1320 if isinstance(first_child, Leaf):
1321 # Continue looking if we see a docstring; otherwise stop.
1323 len(child.children) == 2
1324 and first_child.type == token.STRING
1325 and child.children[1].type == token.NEWLINE
1331 elif first_child.type == syms.import_from:
1332 module_name = first_child.children[1]
1333 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1336 imports |= set(get_imports_from_children(first_child.children[3:]))
1343 def assert_equivalent(src: str, dst: str) -> None:
1344 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1346 src_ast = parse_ast(src)
1347 except Exception as exc:
1348 raise AssertionError(
1349 "cannot use --safe with this file; failed to parse source file AST: "
1351 "This could be caused by running Black with an older Python version "
1352 "that does not support new syntax used in your source file."
1356 dst_ast = parse_ast(dst)
1357 except Exception as exc:
1358 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1359 raise AssertionError(
1360 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1361 "Please report a bug on https://github.com/psf/black/issues. "
1362 f"This invalid output might be helpful: {log}"
1365 src_ast_str = "\n".join(stringify_ast(src_ast))
1366 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1367 if src_ast_str != dst_ast_str:
1368 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1369 raise AssertionError(
1370 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1371 " source. Please report a bug on "
1372 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1376 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1377 """Raise AssertionError if `dst` reformats differently the second time."""
1378 # We shouldn't call format_str() here, because that formats the string
1379 # twice and may hide a bug where we bounce back and forth between two
1381 newdst = _format_str_once(dst, mode=mode)
1385 diff(src, dst, "source", "first pass"),
1386 diff(dst, newdst, "first pass", "second pass"),
1388 raise AssertionError(
1389 "INTERNAL ERROR: Black produced different code on the second pass of the"
1390 " formatter. Please report a bug on https://github.com/psf/black/issues."
1391 f" This diff might be helpful: {log}"
1396 def nullcontext() -> Iterator[None]:
1397 """Return an empty context manager.
1399 To be used like `nullcontext` in Python 3.7.
1404 def patch_click() -> None:
1405 """Make Click not crash on Python 3.6 with LANG=C.
1407 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1408 default which restricts paths that it can access during the lifetime of the
1409 application. Click refuses to work in this scenario by raising a RuntimeError.
1411 In case of Black the likelihood that non-ASCII characters are going to be used in
1412 file paths is minimal since it's Python source code. Moreover, this crash was
1413 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1415 modules: List[Any] = []
1417 from click import core
1421 modules.append(core)
1423 # Removed in Click 8.1.0 and newer; we keep this around for users who have
1424 # older versions installed.
1425 from click import _unicodefun # type: ignore
1429 modules.append(_unicodefun)
1431 for module in modules:
1432 if hasattr(module, "_verify_python3_env"):
1433 module._verify_python3_env = lambda: None
1434 if hasattr(module, "_verify_python_env"):
1435 module._verify_python_env = lambda: None
1438 def patched_main() -> None:
1439 # PyInstaller patches multiprocessing to need freeze_support() even in non-Windows
1440 # environments so just assume we always need to call it if frozen.
1441 if getattr(sys, "frozen", False):
1442 from multiprocessing import freeze_support
1450 if __name__ == "__main__":