All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
2 from json.decoder import JSONDecodeError
4 from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
5 from contextlib import contextmanager
6 from datetime import datetime
9 from multiprocessing import Manager, freeze_support
11 from pathlib import Path
12 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
33 from dataclasses import replace
36 from black.const import DEFAULT_LINE_LENGTH, DEFAULT_INCLUDES, DEFAULT_EXCLUDES
37 from black.const import STDIN_PLACEHOLDER
38 from black.nodes import STARS, syms, is_simple_decorator_expression
39 from black.lines import Line, EmptyLineTracker
40 from black.linegen import transform_line, LineGenerator, LN
41 from black.comments import normalize_fmt_off
42 from black.mode import Mode, TargetVersion
43 from black.mode import Feature, supports_feature, VERSION_TO_FEATURES
44 from black.cache import read_cache, write_cache, get_cache_info, filter_cached, Cache
45 from black.concurrency import cancel, shutdown, maybe_install_uvloop
46 from black.output import dump_to_file, ipynb_diff, diff, color_diff, out, err
47 from black.report import Report, Changed, NothingChanged
48 from black.files import find_project_root, find_pyproject_toml, parse_pyproject_toml
49 from black.files import gen_python_files, get_gitignore, normalize_path_maybe_ignore
50 from black.files import wrap_stream_for_windows
51 from black.parsing import InvalidInput # noqa F401
52 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
53 from black.handle_ipynb_magics import (
56 remove_trailing_semicolon,
57 put_trailing_semicolon_back,
59 jupyter_dependencies_are_installed,
64 from blib2to3.pytree import Node, Leaf
65 from blib2to3.pgen2 import token
67 from _black_version import version as __version__
75 class WriteBack(Enum):
83 def from_configuration(
84 cls, *, check: bool, diff: bool, color: bool = False
86 if check and not diff:
92 return cls.DIFF if diff else cls.YES
95 # Legacy name, left for integrations.
99 def read_pyproject_toml(
100 ctx: click.Context, param: click.Parameter, value: Optional[str]
102 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
104 Returns the path to a successfully found and read configuration file, None
108 value = find_pyproject_toml(ctx.params.get("src", ()))
113 config = parse_pyproject_toml(value)
114 except (OSError, ValueError) as e:
115 raise click.FileError(
116 filename=value, hint=f"Error reading configuration file: {e}"
122 # Sanitize the values to be Click friendly. For more information please see:
123 # https://github.com/psf/black/issues/1458
124 # https://github.com/pallets/click/issues/1567
126 k: str(v) if not isinstance(v, (list, dict)) else v
127 for k, v in config.items()
130 target_version = config.get("target_version")
131 if target_version is not None and not isinstance(target_version, list):
132 raise click.BadOptionUsage(
133 "target-version", "Config key target-version must be a list"
136 default_map: Dict[str, Any] = {}
138 default_map.update(ctx.default_map)
139 default_map.update(config)
141 ctx.default_map = default_map
145 def target_version_option_callback(
146 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
147 ) -> List[TargetVersion]:
148 """Compute the target versions from a --target-version flag.
150 This is its own function because mypy couldn't infer the type correctly
151 when it was a lambda, causing mypyc trouble.
153 return [TargetVersion[val.upper()] for val in v]
156 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
157 """Compile a regular expression string in `regex`.
159 If it contains newlines, use verbose mode.
162 regex = "(?x)" + regex
163 compiled: Pattern[str] = re.compile(regex)
169 param: click.Parameter,
170 value: Optional[str],
171 ) -> Optional[Pattern]:
173 return re_compile_maybe_verbose(value) if value is not None else None
175 raise click.BadParameter("Not a valid regular expression")
178 @click.command(context_settings=dict(help_option_names=["-h", "--help"]))
179 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
184 default=DEFAULT_LINE_LENGTH,
185 help="How many characters per line to allow.",
191 type=click.Choice([v.name.lower() for v in TargetVersion]),
192 callback=target_version_option_callback,
195 "Python versions that should be supported by Black's output. [default: per-file"
203 "Format all input files like typing stubs regardless of file extension (useful"
204 " when piping source on standard input)."
211 "Format all input files like Jupyter Notebooks regardless of file extension "
212 "(useful when piping source on standard input)."
217 "--skip-string-normalization",
219 help="Don't normalize string quotes or prefixes.",
223 "--skip-magic-trailing-comma",
225 help="Don't use trailing commas as a reason to split lines.",
228 "--experimental-string-processing",
232 "Experimental option that performs more normalization on string literals."
233 " Currently disabled because it leads to some crashes."
240 "Don't write the files back, just return the status. Return code 0 means"
241 " nothing would change. Return code 1 means some files would be reformatted."
242 " Return code 123 means there was an internal error."
248 help="Don't write the files back, just output a diff for each file on stdout.",
251 "--color/--no-color",
253 help="Show colored diff. Only applies when `--diff` is given.",
258 help="If --fast given, skip temporary sanity checks. [default: --safe]",
261 "--required-version",
264 "Require a specific version of Black to be running (useful for unifying results"
265 " across many environments e.g. with a pyproject.toml file)."
271 default=DEFAULT_INCLUDES,
272 callback=validate_regex,
274 "A regular expression that matches files and directories that should be"
275 " included on recursive searches. An empty value means all files are included"
276 " regardless of the name. Use forward slashes for directories on all platforms"
277 " (Windows, too). Exclusions are calculated first, inclusions later."
284 callback=validate_regex,
286 "A regular expression that matches files and directories that should be"
287 " excluded on recursive searches. An empty value means no paths are excluded."
288 " Use forward slashes for directories on all platforms (Windows, too)."
289 " Exclusions are calculated first, inclusions later. [default:"
290 f" {DEFAULT_EXCLUDES}]"
297 callback=validate_regex,
299 "Like --exclude, but adds additional files and directories on top of the"
300 " excluded ones. (Useful if you simply want to add to the default)"
306 callback=validate_regex,
308 "Like --exclude, but files and directories matching this regex will be "
309 "excluded even when they are passed explicitly as arguments."
316 "The name of the file when passing it through stdin. Useful to make "
317 "sure Black will respect --force-exclude option on some "
318 "editors that rely on using stdin."
326 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
327 " those with 2>/dev/null."
335 "Also emit messages to stderr about files that were not changed or were ignored"
336 " due to exclusion patterns."
339 @click.version_option(version=__version__)
344 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
360 callback=read_pyproject_toml,
361 help="Read configuration from FILE path.",
368 target_version: List[TargetVersion],
375 skip_string_normalization: bool,
376 skip_magic_trailing_comma: bool,
377 experimental_string_processing: bool,
380 required_version: str,
382 exclude: Optional[Pattern],
383 extend_exclude: Optional[Pattern],
384 force_exclude: Optional[Pattern],
385 stdin_filename: Optional[str],
386 src: Tuple[str, ...],
387 config: Optional[str],
389 """The uncompromising code formatter."""
390 if config and verbose:
391 out(f"Using configuration from {config}.", bold=False, fg="blue")
393 error_msg = "Oh no! 💥 💔 💥"
394 if required_version and required_version != __version__:
396 f"{error_msg} The required version `{required_version}` does not match"
397 f" the running version `{__version__}`!"
401 err("Cannot pass both `pyi` and `ipynb` flags!")
404 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
406 versions = set(target_version)
408 # We'll autodetect later.
411 target_versions=versions,
412 line_length=line_length,
415 string_normalization=not skip_string_normalization,
416 magic_trailing_comma=not skip_magic_trailing_comma,
417 experimental_string_processing=experimental_string_processing,
421 # Run in quiet mode by default with -c; the extra output isn't useful.
422 # You can still pass -v to get verbose output.
425 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
429 content=code, fast=fast, write_back=write_back, mode=mode, report=report
433 sources = get_sources(
440 extend_exclude=extend_exclude,
441 force_exclude=force_exclude,
443 stdin_filename=stdin_filename,
445 except GitWildMatchPatternError:
450 "No Python files are present to be formatted. Nothing to do 😴",
456 if len(sources) == 1:
460 write_back=write_back,
468 write_back=write_back,
473 if verbose or not quiet:
474 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
476 click.echo(str(report), err=True)
477 ctx.exit(report.return_code)
483 src: Tuple[str, ...],
486 include: Pattern[str],
487 exclude: Optional[Pattern[str]],
488 extend_exclude: Optional[Pattern[str]],
489 force_exclude: Optional[Pattern[str]],
491 stdin_filename: Optional[str],
493 """Compute the set of files to be formatted."""
495 root = find_project_root(src)
496 sources: Set[Path] = set()
497 path_empty(src, "No Path provided. Nothing to do 😴", quiet, verbose, ctx)
500 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
501 gitignore = get_gitignore(root)
506 if s == "-" and stdin_filename:
507 p = Path(stdin_filename)
513 if is_stdin or p.is_file():
514 normalized_path = normalize_path_maybe_ignore(p, root, report)
515 if normalized_path is None:
518 normalized_path = "/" + normalized_path
519 # Hard-exclude any files that matches the `--force-exclude` regex.
521 force_exclude_match = force_exclude.search(normalized_path)
523 force_exclude_match = None
524 if force_exclude_match and force_exclude_match.group(0):
525 report.path_ignored(p, "matches the --force-exclude regular expression")
529 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
531 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
532 verbose=verbose, quiet=quiet
555 err(f"invalid path: {s}")
560 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
563 Exit if there is no `src` provided for formatting
566 if verbose or not quiet:
572 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
575 Reformat and print out `content` without spawning child processes.
576 Similar to `reformat_one`, but for string content.
578 `fast`, `write_back`, and `mode` options are passed to
579 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
581 path = Path("<string>")
584 if format_stdin_to_stdout(
585 content=content, fast=fast, write_back=write_back, mode=mode
587 changed = Changed.YES
588 report.done(path, changed)
589 except Exception as exc:
591 traceback.print_exc()
592 report.failed(path, str(exc))
596 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
598 """Reformat a single file under `src` without spawning child processes.
600 `fast`, `write_back`, and `mode` options are passed to
601 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
608 elif str(src).startswith(STDIN_PLACEHOLDER):
610 # Use the original name again in case we want to print something
612 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
617 if src.suffix == ".pyi":
618 mode = replace(mode, is_pyi=True)
619 elif src.suffix == ".ipynb":
620 mode = replace(mode, is_ipynb=True)
621 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
622 changed = Changed.YES
625 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
626 cache = read_cache(mode)
627 res_src = src.resolve()
628 res_src_s = str(res_src)
629 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
630 changed = Changed.CACHED
631 if changed is not Changed.CACHED and format_file_in_place(
632 src, fast=fast, write_back=write_back, mode=mode
634 changed = Changed.YES
635 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
636 write_back is WriteBack.CHECK and changed is Changed.NO
638 write_cache(cache, [src], mode)
639 report.done(src, changed)
640 except Exception as exc:
642 traceback.print_exc()
643 report.failed(src, str(exc))
647 sources: Set[Path], fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
649 """Reformat multiple files using a ProcessPoolExecutor."""
651 loop = asyncio.get_event_loop()
652 worker_count = os.cpu_count()
653 if sys.platform == "win32":
654 # Work around https://bugs.python.org/issue26903
655 worker_count = min(worker_count, 60)
657 executor = ProcessPoolExecutor(max_workers=worker_count)
658 except (ImportError, OSError):
659 # we arrive here if the underlying system does not support multi-processing
660 # like in AWS Lambda or Termux, in which case we gracefully fallback to
661 # a ThreadPoolExecutor with just a single worker (more workers would not do us
662 # any good due to the Global Interpreter Lock)
663 executor = ThreadPoolExecutor(max_workers=1)
666 loop.run_until_complete(
670 write_back=write_back,
679 if executor is not None:
683 async def schedule_formatting(
686 write_back: WriteBack,
689 loop: asyncio.AbstractEventLoop,
692 """Run formatting of `sources` in parallel using the provided `executor`.
694 (Use ProcessPoolExecutors for actual parallelism.)
696 `write_back`, `fast`, and `mode` options are passed to
697 :func:`format_file_in_place`.
700 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
701 cache = read_cache(mode)
702 sources, cached = filter_cached(cache, sources)
703 for src in sorted(cached):
704 report.done(src, Changed.CACHED)
709 sources_to_cache = []
711 if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
712 # For diff output, we need locks to ensure we don't interleave output
713 # from different processes.
715 lock = manager.Lock()
717 asyncio.ensure_future(
718 loop.run_in_executor(
719 executor, format_file_in_place, src, fast, mode, write_back, lock
722 for src in sorted(sources)
724 pending = tasks.keys()
726 loop.add_signal_handler(signal.SIGINT, cancel, pending)
727 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
728 except NotImplementedError:
729 # There are no good alternatives for these on Windows.
732 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
734 src = tasks.pop(task)
736 cancelled.append(task)
737 elif task.exception():
738 report.failed(src, str(task.exception()))
740 changed = Changed.YES if task.result() else Changed.NO
741 # If the file was written back or was successfully checked as
742 # well-formatted, store this information in the cache.
743 if write_back is WriteBack.YES or (
744 write_back is WriteBack.CHECK and changed is Changed.NO
746 sources_to_cache.append(src)
747 report.done(src, changed)
749 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
751 write_cache(cache, sources_to_cache, mode)
754 def format_file_in_place(
758 write_back: WriteBack = WriteBack.NO,
759 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
761 """Format file under `src` path. Return True if changed.
763 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
765 `mode` and `fast` options are passed to :func:`format_file_contents`.
767 if src.suffix == ".pyi":
768 mode = replace(mode, is_pyi=True)
769 elif src.suffix == ".ipynb":
770 mode = replace(mode, is_ipynb=True)
772 then = datetime.utcfromtimestamp(src.stat().st_mtime)
773 with open(src, "rb") as buf:
774 src_contents, encoding, newline = decode_bytes(buf.read())
776 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
777 except NothingChanged:
779 except JSONDecodeError:
780 raise ValueError(f"File '{src}' cannot be parsed as valid Jupyter notebook.")
782 if write_back == WriteBack.YES:
783 with open(src, "w", encoding=encoding, newline=newline) as f:
784 f.write(dst_contents)
785 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
786 now = datetime.utcnow()
787 src_name = f"{src}\t{then} +0000"
788 dst_name = f"{src}\t{now} +0000"
790 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
792 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
794 if write_back == WriteBack.COLOR_DIFF:
795 diff_contents = color_diff(diff_contents)
797 with lock or nullcontext():
798 f = io.TextIOWrapper(
804 f = wrap_stream_for_windows(f)
805 f.write(diff_contents)
811 def format_stdin_to_stdout(
814 content: Optional[str] = None,
815 write_back: WriteBack = WriteBack.NO,
818 """Format file on stdin. Return True if changed.
820 If content is None, it's read from sys.stdin.
822 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
823 write a diff to stdout. The `mode` argument is passed to
824 :func:`format_file_contents`.
826 then = datetime.utcnow()
829 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
831 src, encoding, newline = content, "utf-8", ""
835 dst = format_file_contents(src, fast=fast, mode=mode)
838 except NothingChanged:
842 f = io.TextIOWrapper(
843 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
845 if write_back == WriteBack.YES:
846 # Make sure there's a newline after the content
847 if dst and dst[-1] != "\n":
850 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
851 now = datetime.utcnow()
852 src_name = f"STDIN\t{then} +0000"
853 dst_name = f"STDOUT\t{now} +0000"
854 d = diff(src, dst, src_name, dst_name)
855 if write_back == WriteBack.COLOR_DIFF:
857 f = wrap_stream_for_windows(f)
862 def check_stability_and_equivalence(
863 src_contents: str, dst_contents: str, *, mode: Mode
865 """Perform stability and equivalence checks.
867 Raise AssertionError if source and destination contents are not
868 equivalent, or if a second pass of the formatter would format the
871 assert_equivalent(src_contents, dst_contents)
873 # Forced second pass to work around optional trailing commas (becoming
874 # forced trailing commas on pass 2) interacting differently with optional
875 # parentheses. Admittedly ugly.
876 dst_contents_pass2 = format_str(dst_contents, mode=mode)
877 if dst_contents != dst_contents_pass2:
878 dst_contents = dst_contents_pass2
879 assert_equivalent(src_contents, dst_contents, pass_num=2)
880 assert_stable(src_contents, dst_contents, mode=mode)
881 # Note: no need to explicitly call `assert_stable` if `dst_contents` was
882 # the same as `dst_contents_pass2`.
885 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
886 """Reformat contents of a file and return new contents.
888 If `fast` is False, additionally confirm that the reformatted code is
889 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
890 `mode` is passed to :func:`format_str`.
892 if not src_contents.strip():
896 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
898 dst_contents = format_str(src_contents, mode=mode)
899 if src_contents == dst_contents:
902 if not fast and not mode.is_ipynb:
903 # Jupyter notebooks will already have been checked above.
904 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
908 def validate_cell(src: str) -> None:
909 """Check that cell does not already contain TransformerManager transformations.
911 If a cell contains ``!ls``, then it'll be transformed to
912 ``get_ipython().system('ls')``. However, if the cell originally contained
913 ``get_ipython().system('ls')``, then it would get transformed in the same way:
915 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
916 "get_ipython().system('ls')\n"
917 >>> TransformerManager().transform_cell("!ls")
918 "get_ipython().system('ls')\n"
920 Due to the impossibility of safely roundtripping in such situations, cells
921 containing transformed magics will be ignored.
923 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
927 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
928 """Format code in given cell of Jupyter notebook.
932 - if cell has trailing semicolon, remove it;
933 - if cell has IPython magics, mask them;
935 - reinstate IPython magics;
936 - reinstate trailing semicolon (if originally present);
937 - strip trailing newlines.
939 Cells with syntax errors will not be processed, as they
940 could potentially be automagics or multi-line magics, which
941 are currently not supported.
944 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
948 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
951 masked_dst = format_str(masked_src, mode=mode)
953 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
954 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
955 dst = put_trailing_semicolon_back(
956 dst_without_trailing_semicolon, has_trailing_semicolon
958 dst = dst.rstrip("\n")
964 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
965 """If notebook is marked as non-Python, don't format it.
967 All notebook metadata fields are optional, see
968 https://nbformat.readthedocs.io/en/latest/format_description.html. So
969 if a notebook has empty metadata, we will try to parse it anyway.
971 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
972 if language is not None and language != "python":
976 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
977 """Format Jupyter notebook.
979 Operate cell-by-cell, only on code cells, only for Python notebooks.
980 If the ``.ipynb`` originally had a trailing newline, it'll be preseved.
982 trailing_newline = src_contents[-1] == "\n"
984 nb = json.loads(src_contents)
985 validate_metadata(nb)
986 for cell in nb["cells"]:
987 if cell.get("cell_type", None) == "code":
989 src = "".join(cell["source"])
990 dst = format_cell(src, fast=fast, mode=mode)
991 except NothingChanged:
994 cell["source"] = dst.splitlines(keepends=True)
997 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
999 dst_contents = dst_contents + "\n"
1002 raise NothingChanged
1005 def format_str(src_contents: str, *, mode: Mode) -> FileContent:
1006 """Reformat a string and return new contents.
1008 `mode` determines formatting options, such as how many characters per line are
1012 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1013 def f(arg: str = "") -> None:
1016 A more complex example:
1019 ... black.format_str(
1020 ... "def f(arg:str='')->None: hey",
1021 ... mode=black.Mode(
1022 ... target_versions={black.TargetVersion.PY36},
1024 ... string_normalization=False,
1035 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1037 future_imports = get_future_imports(src_node)
1038 if mode.target_versions:
1039 versions = mode.target_versions
1041 versions = detect_target_versions(src_node)
1042 normalize_fmt_off(src_node)
1043 lines = LineGenerator(
1045 remove_u_prefix="unicode_literals" in future_imports
1046 or supports_feature(versions, Feature.UNICODE_LITERALS),
1048 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1049 empty_line = Line(mode=mode)
1051 split_line_features = {
1053 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1054 if supports_feature(versions, feature)
1056 for current_line in lines.visit(src_node):
1057 dst_contents.append(str(empty_line) * after)
1058 before, after = elt.maybe_empty_lines(current_line)
1059 dst_contents.append(str(empty_line) * before)
1060 for line in transform_line(
1061 current_line, mode=mode, features=split_line_features
1063 dst_contents.append(str(line))
1064 return "".join(dst_contents)
1067 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1068 """Return a tuple of (decoded_contents, encoding, newline).
1070 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1071 universal newlines (i.e. only contains LF).
1073 srcbuf = io.BytesIO(src)
1074 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1076 return "", encoding, "\n"
1078 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1080 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1081 return tiow.read(), encoding, newline
1084 def get_features_used(node: Node) -> Set[Feature]:
1085 """Return a set of (relatively) new Python features used in this file.
1087 Currently looking for:
1089 - underscores in numeric literals;
1090 - trailing commas after * or ** in function signatures and calls;
1091 - positional only arguments in function signatures and lambdas;
1092 - assignment expression;
1093 - relaxed decorator syntax;
1095 features: Set[Feature] = set()
1096 for n in node.pre_order():
1097 if n.type == token.STRING:
1098 value_head = n.value[:2] # type: ignore
1099 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1100 features.add(Feature.F_STRINGS)
1102 elif n.type == token.NUMBER:
1103 if "_" in n.value: # type: ignore
1104 features.add(Feature.NUMERIC_UNDERSCORES)
1106 elif n.type == token.SLASH:
1107 if n.parent and n.parent.type in {syms.typedargslist, syms.arglist}:
1108 features.add(Feature.POS_ONLY_ARGUMENTS)
1110 elif n.type == token.COLONEQUAL:
1111 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1113 elif n.type == syms.decorator:
1114 if len(n.children) > 1 and not is_simple_decorator_expression(
1117 features.add(Feature.RELAXED_DECORATORS)
1120 n.type in {syms.typedargslist, syms.arglist}
1122 and n.children[-1].type == token.COMMA
1124 if n.type == syms.typedargslist:
1125 feature = Feature.TRAILING_COMMA_IN_DEF
1127 feature = Feature.TRAILING_COMMA_IN_CALL
1129 for ch in n.children:
1130 if ch.type in STARS:
1131 features.add(feature)
1133 if ch.type == syms.argument:
1134 for argch in ch.children:
1135 if argch.type in STARS:
1136 features.add(feature)
1141 def detect_target_versions(node: Node) -> Set[TargetVersion]:
1142 """Detect the version to target based on the nodes used."""
1143 features = get_features_used(node)
1145 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1149 def get_future_imports(node: Node) -> Set[str]:
1150 """Return a set of __future__ imports in the file."""
1151 imports: Set[str] = set()
1153 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1154 for child in children:
1155 if isinstance(child, Leaf):
1156 if child.type == token.NAME:
1159 elif child.type == syms.import_as_name:
1160 orig_name = child.children[0]
1161 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1162 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1163 yield orig_name.value
1165 elif child.type == syms.import_as_names:
1166 yield from get_imports_from_children(child.children)
1169 raise AssertionError("Invalid syntax parsing imports")
1171 for child in node.children:
1172 if child.type != syms.simple_stmt:
1175 first_child = child.children[0]
1176 if isinstance(first_child, Leaf):
1177 # Continue looking if we see a docstring; otherwise stop.
1179 len(child.children) == 2
1180 and first_child.type == token.STRING
1181 and child.children[1].type == token.NEWLINE
1187 elif first_child.type == syms.import_from:
1188 module_name = first_child.children[1]
1189 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1192 imports |= set(get_imports_from_children(first_child.children[3:]))
1199 def assert_equivalent(src: str, dst: str, *, pass_num: int = 1) -> None:
1200 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1202 src_ast = parse_ast(src)
1203 except Exception as exc:
1204 raise AssertionError(
1205 "cannot use --safe with this file; failed to parse source file. AST"
1206 f" error message: {exc}"
1210 dst_ast = parse_ast(dst)
1211 except Exception as exc:
1212 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1213 raise AssertionError(
1214 f"INTERNAL ERROR: Black produced invalid code on pass {pass_num}: {exc}. "
1215 "Please report a bug on https://github.com/psf/black/issues. "
1216 f"This invalid output might be helpful: {log}"
1219 src_ast_str = "\n".join(stringify_ast(src_ast))
1220 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1221 if src_ast_str != dst_ast_str:
1222 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1223 raise AssertionError(
1224 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1225 f" source on pass {pass_num}. Please report a bug on "
1226 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1230 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1231 """Raise AssertionError if `dst` reformats differently the second time."""
1232 newdst = format_str(dst, mode=mode)
1236 diff(src, dst, "source", "first pass"),
1237 diff(dst, newdst, "first pass", "second pass"),
1239 raise AssertionError(
1240 "INTERNAL ERROR: Black produced different code on the second pass of the"
1241 " formatter. Please report a bug on https://github.com/psf/black/issues."
1242 f" This diff might be helpful: {log}"
1247 def nullcontext() -> Iterator[None]:
1248 """Return an empty context manager.
1250 To be used like `nullcontext` in Python 3.7.
1255 def patch_click() -> None:
1256 """Make Click not crash on Python 3.6 with LANG=C.
1258 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1259 default which restricts paths that it can access during the lifetime of the
1260 application. Click refuses to work in this scenario by raising a RuntimeError.
1262 In case of Black the likelihood that non-ASCII characters are going to be used in
1263 file paths is minimal since it's Python source code. Moreover, this crash was
1264 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1267 from click import core
1268 from click import _unicodefun # type: ignore
1269 except ModuleNotFoundError:
1272 for module in (core, _unicodefun):
1273 if hasattr(module, "_verify_python3_env"):
1274 module._verify_python3_env = lambda: None # type: ignore
1275 if hasattr(module, "_verify_python_env"):
1276 module._verify_python_env = lambda: None # type: ignore
1279 def patched_main() -> None:
1280 maybe_install_uvloop()
1286 if __name__ == "__main__":