All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
11 from contextlib import contextmanager
12 from dataclasses import replace
13 from datetime import datetime
15 from json.decoder import JSONDecodeError
16 from multiprocessing import Manager, freeze_support
17 from pathlib import Path
36 from click.core import ParameterSource
37 from mypy_extensions import mypyc_attr
38 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
40 from _black_version import version as __version__
41 from black.cache import Cache, filter_cached, get_cache_info, read_cache, write_cache
42 from black.comments import normalize_fmt_off
43 from black.concurrency import cancel, maybe_install_uvloop, shutdown
44 from black.const import (
50 from black.files import (
53 find_user_pyproject_toml,
56 normalize_path_maybe_ignore,
58 wrap_stream_for_windows,
60 from black.handle_ipynb_magics import (
63 jupyter_dependencies_are_installed,
65 put_trailing_semicolon_back,
66 remove_trailing_semicolon,
69 from black.linegen import LN, LineGenerator, transform_line
70 from black.lines import EmptyLineTracker, Line
71 from black.mode import (
72 FUTURE_FLAG_TO_FEATURE,
79 from black.nodes import (
82 is_simple_decorator_expression,
86 from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
87 from black.parsing import InvalidInput # noqa F401
88 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
89 from black.report import Changed, NothingChanged, Report
90 from black.trans import iter_fexpr_spans
91 from blib2to3.pgen2 import token
92 from blib2to3.pytree import Leaf, Node
95 from concurrent.futures import Executor
97 COMPILED = Path(__file__).suffix in (".pyd", ".so")
105 class WriteBack(Enum):
113 def from_configuration(
114 cls, *, check: bool, diff: bool, color: bool = False
116 if check and not diff:
120 return cls.COLOR_DIFF
122 return cls.DIFF if diff else cls.YES
125 # Legacy name, left for integrations.
128 DEFAULT_WORKERS = os.cpu_count()
131 def read_pyproject_toml(
132 ctx: click.Context, param: click.Parameter, value: Optional[str]
134 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
136 Returns the path to a successfully found and read configuration file, None
140 value = find_pyproject_toml(ctx.params.get("src", ()))
145 config = parse_pyproject_toml(value)
146 except (OSError, ValueError) as e:
147 raise click.FileError(
148 filename=value, hint=f"Error reading configuration file: {e}"
154 # Sanitize the values to be Click friendly. For more information please see:
155 # https://github.com/psf/black/issues/1458
156 # https://github.com/pallets/click/issues/1567
158 k: str(v) if not isinstance(v, (list, dict)) else v
159 for k, v in config.items()
162 target_version = config.get("target_version")
163 if target_version is not None and not isinstance(target_version, list):
164 raise click.BadOptionUsage(
165 "target-version", "Config key target-version must be a list"
168 default_map: Dict[str, Any] = {}
170 default_map.update(ctx.default_map)
171 default_map.update(config)
173 ctx.default_map = default_map
177 def target_version_option_callback(
178 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
179 ) -> List[TargetVersion]:
180 """Compute the target versions from a --target-version flag.
182 This is its own function because mypy couldn't infer the type correctly
183 when it was a lambda, causing mypyc trouble.
185 return [TargetVersion[val.upper()] for val in v]
188 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
189 """Compile a regular expression string in `regex`.
191 If it contains newlines, use verbose mode.
194 regex = "(?x)" + regex
195 compiled: Pattern[str] = re.compile(regex)
201 param: click.Parameter,
202 value: Optional[str],
203 ) -> Optional[Pattern[str]]:
205 return re_compile_maybe_verbose(value) if value is not None else None
206 except re.error as e:
207 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
211 context_settings={"help_option_names": ["-h", "--help"]},
212 # While Click does set this field automatically using the docstring, mypyc
213 # (annoyingly) strips 'em so we need to set it here too.
214 help="The uncompromising code formatter.",
216 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
221 default=DEFAULT_LINE_LENGTH,
222 help="How many characters per line to allow.",
228 type=click.Choice([v.name.lower() for v in TargetVersion]),
229 callback=target_version_option_callback,
232 "Python versions that should be supported by Black's output. [default: per-file"
240 "Format all input files like typing stubs regardless of file extension (useful"
241 " when piping source on standard input)."
248 "Format all input files like Jupyter Notebooks regardless of file extension "
249 "(useful when piping source on standard input)."
253 "--python-cell-magics",
256 "When processing Jupyter Notebooks, add the given magic to the list"
257 f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
258 " Useful for formatting cells with custom python magics."
264 "--skip-string-normalization",
266 help="Don't normalize string quotes or prefixes.",
270 "--skip-magic-trailing-comma",
272 help="Don't use trailing commas as a reason to split lines.",
275 "--experimental-string-processing",
278 help="(DEPRECATED and now included in --preview) Normalize string literals.",
284 "Enable potentially disruptive style changes that may be added to Black's main"
285 " functionality in the next major release."
292 "Don't write the files back, just return the status. Return code 0 means"
293 " nothing would change. Return code 1 means some files would be reformatted."
294 " Return code 123 means there was an internal error."
300 help="Don't write the files back, just output a diff for each file on stdout.",
303 "--color/--no-color",
305 help="Show colored diff. Only applies when `--diff` is given.",
310 help="If --fast given, skip temporary sanity checks. [default: --safe]",
313 "--required-version",
316 "Require a specific version of Black to be running (useful for unifying results"
317 " across many environments e.g. with a pyproject.toml file). It can be"
318 " either a major version number or an exact version."
324 default=DEFAULT_INCLUDES,
325 callback=validate_regex,
327 "A regular expression that matches files and directories that should be"
328 " included on recursive searches. An empty value means all files are included"
329 " regardless of the name. Use forward slashes for directories on all platforms"
330 " (Windows, too). Exclusions are calculated first, inclusions later."
337 callback=validate_regex,
339 "A regular expression that matches files and directories that should be"
340 " excluded on recursive searches. An empty value means no paths are excluded."
341 " Use forward slashes for directories on all platforms (Windows, too)."
342 " Exclusions are calculated first, inclusions later. [default:"
343 f" {DEFAULT_EXCLUDES}]"
350 callback=validate_regex,
352 "Like --exclude, but adds additional files and directories on top of the"
353 " excluded ones. (Useful if you simply want to add to the default)"
359 callback=validate_regex,
361 "Like --exclude, but files and directories matching this regex will be "
362 "excluded even when they are passed explicitly as arguments."
369 "The name of the file when passing it through stdin. Useful to make "
370 "sure Black will respect --force-exclude option on some "
371 "editors that rely on using stdin."
377 type=click.IntRange(min=1),
378 default=DEFAULT_WORKERS,
380 help="Number of parallel workers",
387 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
388 " those with 2>/dev/null."
396 "Also emit messages to stderr about files that were not changed or were ignored"
397 " due to exclusion patterns."
400 @click.version_option(
403 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
404 f"Python ({platform.python_implementation()}) {platform.python_version()}"
411 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
427 callback=read_pyproject_toml,
428 help="Read configuration from FILE path.",
431 def main( # noqa: C901
435 target_version: List[TargetVersion],
442 python_cell_magics: Sequence[str],
443 skip_string_normalization: bool,
444 skip_magic_trailing_comma: bool,
445 experimental_string_processing: bool,
449 required_version: Optional[str],
450 include: Pattern[str],
451 exclude: Optional[Pattern[str]],
452 extend_exclude: Optional[Pattern[str]],
453 force_exclude: Optional[Pattern[str]],
454 stdin_filename: Optional[str],
456 src: Tuple[str, ...],
457 config: Optional[str],
459 """The uncompromising code formatter."""
460 ctx.ensure_object(dict)
462 if src and code is not None:
465 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
468 if not src and code is None:
469 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
472 root, method = find_project_root(src) if code is None else (None, None)
473 ctx.obj["root"] = root
478 f"Identified `{root}` as project root containing a {method}.",
483 (normalize_path_maybe_ignore(Path(source), root), source)
486 srcs_string = ", ".join(
490 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
491 for _norm, source in normalized
494 out(f"Sources to be formatted: {srcs_string}", fg="blue")
497 config_source = ctx.get_parameter_source("config")
498 user_level_config = str(find_user_pyproject_toml())
499 if config == user_level_config:
501 "Using configuration from user-level config at "
502 f"'{user_level_config}'.",
505 elif config_source in (
506 ParameterSource.DEFAULT,
507 ParameterSource.DEFAULT_MAP,
509 out("Using configuration from project root.", fg="blue")
511 out(f"Using configuration in '{config}'.", fg="blue")
513 error_msg = "Oh no! 💥 💔 💥"
516 and required_version != __version__
517 and required_version != __version__.split(".")[0]
520 f"{error_msg} The required version `{required_version}` does not match"
521 f" the running version `{__version__}`!"
525 err("Cannot pass both `pyi` and `ipynb` flags!")
528 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
530 versions = set(target_version)
532 # We'll autodetect later.
535 target_versions=versions,
536 line_length=line_length,
539 string_normalization=not skip_string_normalization,
540 magic_trailing_comma=not skip_magic_trailing_comma,
541 experimental_string_processing=experimental_string_processing,
543 python_cell_magics=set(python_cell_magics),
547 # Run in quiet mode by default with -c; the extra output isn't useful.
548 # You can still pass -v to get verbose output.
551 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
555 content=code, fast=fast, write_back=write_back, mode=mode, report=report
559 sources = get_sources(
566 extend_exclude=extend_exclude,
567 force_exclude=force_exclude,
569 stdin_filename=stdin_filename,
571 except GitWildMatchPatternError:
576 "No Python files are present to be formatted. Nothing to do 😴",
582 if len(sources) == 1:
586 write_back=write_back,
594 write_back=write_back,
600 if verbose or not quiet:
601 if code is None and (verbose or report.change_count or report.failure_count):
603 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
605 click.echo(str(report), err=True)
606 ctx.exit(report.return_code)
612 src: Tuple[str, ...],
615 include: Pattern[str],
616 exclude: Optional[Pattern[str]],
617 extend_exclude: Optional[Pattern[str]],
618 force_exclude: Optional[Pattern[str]],
620 stdin_filename: Optional[str],
622 """Compute the set of files to be formatted."""
623 sources: Set[Path] = set()
626 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
627 gitignore = get_gitignore(ctx.obj["root"])
632 if s == "-" and stdin_filename:
633 p = Path(stdin_filename)
639 if is_stdin or p.is_file():
640 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
641 if normalized_path is None:
644 normalized_path = "/" + normalized_path
645 # Hard-exclude any files that matches the `--force-exclude` regex.
647 force_exclude_match = force_exclude.search(normalized_path)
649 force_exclude_match = None
650 if force_exclude_match and force_exclude_match.group(0):
651 report.path_ignored(p, "matches the --force-exclude regular expression")
655 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
657 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
658 verbose=verbose, quiet=quiet
681 err(f"invalid path: {s}")
686 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
689 Exit if there is no `src` provided for formatting
692 if verbose or not quiet:
698 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
701 Reformat and print out `content` without spawning child processes.
702 Similar to `reformat_one`, but for string content.
704 `fast`, `write_back`, and `mode` options are passed to
705 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
707 path = Path("<string>")
710 if format_stdin_to_stdout(
711 content=content, fast=fast, write_back=write_back, mode=mode
713 changed = Changed.YES
714 report.done(path, changed)
715 except Exception as exc:
717 traceback.print_exc()
718 report.failed(path, str(exc))
721 # diff-shades depends on being to monkeypatch this function to operate. I know it's
722 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
723 @mypyc_attr(patchable=True)
725 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
727 """Reformat a single file under `src` without spawning child processes.
729 `fast`, `write_back`, and `mode` options are passed to
730 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
737 elif str(src).startswith(STDIN_PLACEHOLDER):
739 # Use the original name again in case we want to print something
741 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
746 if src.suffix == ".pyi":
747 mode = replace(mode, is_pyi=True)
748 elif src.suffix == ".ipynb":
749 mode = replace(mode, is_ipynb=True)
750 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
751 changed = Changed.YES
754 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
755 cache = read_cache(mode)
756 res_src = src.resolve()
757 res_src_s = str(res_src)
758 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
759 changed = Changed.CACHED
760 if changed is not Changed.CACHED and format_file_in_place(
761 src, fast=fast, write_back=write_back, mode=mode
763 changed = Changed.YES
764 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
765 write_back is WriteBack.CHECK and changed is Changed.NO
767 write_cache(cache, [src], mode)
768 report.done(src, changed)
769 except Exception as exc:
771 traceback.print_exc()
772 report.failed(src, str(exc))
775 # diff-shades depends on being to monkeypatch this function to operate. I know it's
776 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
777 @mypyc_attr(patchable=True)
781 write_back: WriteBack,
784 workers: Optional[int],
786 """Reformat multiple files using a ProcessPoolExecutor."""
787 from concurrent.futures import Executor, ProcessPoolExecutor, ThreadPoolExecutor
790 worker_count = workers if workers is not None else DEFAULT_WORKERS
791 if sys.platform == "win32":
792 # Work around https://bugs.python.org/issue26903
793 assert worker_count is not None
794 worker_count = min(worker_count, 60)
796 executor = ProcessPoolExecutor(max_workers=worker_count)
797 except (ImportError, NotImplementedError, OSError):
798 # we arrive here if the underlying system does not support multi-processing
799 # like in AWS Lambda or Termux, in which case we gracefully fallback to
800 # a ThreadPoolExecutor with just a single worker (more workers would not do us
801 # any good due to the Global Interpreter Lock)
802 executor = ThreadPoolExecutor(max_workers=1)
804 loop = asyncio.new_event_loop()
805 asyncio.set_event_loop(loop)
807 loop.run_until_complete(
811 write_back=write_back,
822 asyncio.set_event_loop(None)
823 if executor is not None:
827 async def schedule_formatting(
830 write_back: WriteBack,
833 loop: asyncio.AbstractEventLoop,
834 executor: "Executor",
836 """Run formatting of `sources` in parallel using the provided `executor`.
838 (Use ProcessPoolExecutors for actual parallelism.)
840 `write_back`, `fast`, and `mode` options are passed to
841 :func:`format_file_in_place`.
844 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
845 cache = read_cache(mode)
846 sources, cached = filter_cached(cache, sources)
847 for src in sorted(cached):
848 report.done(src, Changed.CACHED)
853 sources_to_cache = []
855 if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
856 # For diff output, we need locks to ensure we don't interleave output
857 # from different processes.
859 lock = manager.Lock()
861 asyncio.ensure_future(
862 loop.run_in_executor(
863 executor, format_file_in_place, src, fast, mode, write_back, lock
866 for src in sorted(sources)
868 pending = tasks.keys()
870 loop.add_signal_handler(signal.SIGINT, cancel, pending)
871 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
872 except NotImplementedError:
873 # There are no good alternatives for these on Windows.
876 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
878 src = tasks.pop(task)
880 cancelled.append(task)
881 elif task.exception():
882 report.failed(src, str(task.exception()))
884 changed = Changed.YES if task.result() else Changed.NO
885 # If the file was written back or was successfully checked as
886 # well-formatted, store this information in the cache.
887 if write_back is WriteBack.YES or (
888 write_back is WriteBack.CHECK and changed is Changed.NO
890 sources_to_cache.append(src)
891 report.done(src, changed)
893 if sys.version_info >= (3, 7):
894 await asyncio.gather(*cancelled, return_exceptions=True)
896 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
898 write_cache(cache, sources_to_cache, mode)
901 def format_file_in_place(
905 write_back: WriteBack = WriteBack.NO,
906 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
908 """Format file under `src` path. Return True if changed.
910 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
912 `mode` and `fast` options are passed to :func:`format_file_contents`.
914 if src.suffix == ".pyi":
915 mode = replace(mode, is_pyi=True)
916 elif src.suffix == ".ipynb":
917 mode = replace(mode, is_ipynb=True)
919 then = datetime.utcfromtimestamp(src.stat().st_mtime)
920 with open(src, "rb") as buf:
921 src_contents, encoding, newline = decode_bytes(buf.read())
923 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
924 except NothingChanged:
926 except JSONDecodeError:
928 f"File '{src}' cannot be parsed as valid Jupyter notebook."
931 if write_back == WriteBack.YES:
932 with open(src, "w", encoding=encoding, newline=newline) as f:
933 f.write(dst_contents)
934 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
935 now = datetime.utcnow()
936 src_name = f"{src}\t{then} +0000"
937 dst_name = f"{src}\t{now} +0000"
939 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
941 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
943 if write_back == WriteBack.COLOR_DIFF:
944 diff_contents = color_diff(diff_contents)
946 with lock or nullcontext():
947 f = io.TextIOWrapper(
953 f = wrap_stream_for_windows(f)
954 f.write(diff_contents)
960 def format_stdin_to_stdout(
963 content: Optional[str] = None,
964 write_back: WriteBack = WriteBack.NO,
967 """Format file on stdin. Return True if changed.
969 If content is None, it's read from sys.stdin.
971 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
972 write a diff to stdout. The `mode` argument is passed to
973 :func:`format_file_contents`.
975 then = datetime.utcnow()
978 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
980 src, encoding, newline = content, "utf-8", ""
984 dst = format_file_contents(src, fast=fast, mode=mode)
987 except NothingChanged:
991 f = io.TextIOWrapper(
992 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
994 if write_back == WriteBack.YES:
995 # Make sure there's a newline after the content
996 if dst and dst[-1] != "\n":
999 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
1000 now = datetime.utcnow()
1001 src_name = f"STDIN\t{then} +0000"
1002 dst_name = f"STDOUT\t{now} +0000"
1003 d = diff(src, dst, src_name, dst_name)
1004 if write_back == WriteBack.COLOR_DIFF:
1006 f = wrap_stream_for_windows(f)
1011 def check_stability_and_equivalence(
1012 src_contents: str, dst_contents: str, *, mode: Mode
1014 """Perform stability and equivalence checks.
1016 Raise AssertionError if source and destination contents are not
1017 equivalent, or if a second pass of the formatter would format the
1018 content differently.
1020 assert_equivalent(src_contents, dst_contents)
1021 assert_stable(src_contents, dst_contents, mode=mode)
1024 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1025 """Reformat contents of a file and return new contents.
1027 If `fast` is False, additionally confirm that the reformatted code is
1028 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
1029 `mode` is passed to :func:`format_str`.
1031 if not src_contents.strip():
1032 raise NothingChanged
1035 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
1037 dst_contents = format_str(src_contents, mode=mode)
1038 if src_contents == dst_contents:
1039 raise NothingChanged
1041 if not fast and not mode.is_ipynb:
1042 # Jupyter notebooks will already have been checked above.
1043 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
1047 def validate_cell(src: str, mode: Mode) -> None:
1048 """Check that cell does not already contain TransformerManager transformations,
1049 or non-Python cell magics, which might cause tokenizer_rt to break because of
1052 If a cell contains ``!ls``, then it'll be transformed to
1053 ``get_ipython().system('ls')``. However, if the cell originally contained
1054 ``get_ipython().system('ls')``, then it would get transformed in the same way:
1056 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
1057 "get_ipython().system('ls')\n"
1058 >>> TransformerManager().transform_cell("!ls")
1059 "get_ipython().system('ls')\n"
1061 Due to the impossibility of safely roundtripping in such situations, cells
1062 containing transformed magics will be ignored.
1064 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
1065 raise NothingChanged
1068 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
1070 raise NothingChanged
1073 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
1074 """Format code in given cell of Jupyter notebook.
1078 - if cell has trailing semicolon, remove it;
1079 - if cell has IPython magics, mask them;
1081 - reinstate IPython magics;
1082 - reinstate trailing semicolon (if originally present);
1083 - strip trailing newlines.
1085 Cells with syntax errors will not be processed, as they
1086 could potentially be automagics or multi-line magics, which
1087 are currently not supported.
1089 validate_cell(src, mode)
1090 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
1094 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
1096 raise NothingChanged from None
1097 masked_dst = format_str(masked_src, mode=mode)
1099 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
1100 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1101 dst = put_trailing_semicolon_back(
1102 dst_without_trailing_semicolon, has_trailing_semicolon
1104 dst = dst.rstrip("\n")
1106 raise NothingChanged from None
1110 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1111 """If notebook is marked as non-Python, don't format it.
1113 All notebook metadata fields are optional, see
1114 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1115 if a notebook has empty metadata, we will try to parse it anyway.
1117 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1118 if language is not None and language != "python":
1119 raise NothingChanged from None
1122 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1123 """Format Jupyter notebook.
1125 Operate cell-by-cell, only on code cells, only for Python notebooks.
1126 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1128 trailing_newline = src_contents[-1] == "\n"
1130 nb = json.loads(src_contents)
1131 validate_metadata(nb)
1132 for cell in nb["cells"]:
1133 if cell.get("cell_type", None) == "code":
1135 src = "".join(cell["source"])
1136 dst = format_cell(src, fast=fast, mode=mode)
1137 except NothingChanged:
1140 cell["source"] = dst.splitlines(keepends=True)
1143 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1144 if trailing_newline:
1145 dst_contents = dst_contents + "\n"
1148 raise NothingChanged
1151 def format_str(src_contents: str, *, mode: Mode) -> str:
1152 """Reformat a string and return new contents.
1154 `mode` determines formatting options, such as how many characters per line are
1158 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1159 def f(arg: str = "") -> None:
1162 A more complex example:
1165 ... black.format_str(
1166 ... "def f(arg:str='')->None: hey",
1167 ... mode=black.Mode(
1168 ... target_versions={black.TargetVersion.PY36},
1170 ... string_normalization=False,
1181 dst_contents = _format_str_once(src_contents, mode=mode)
1182 # Forced second pass to work around optional trailing commas (becoming
1183 # forced trailing commas on pass 2) interacting differently with optional
1184 # parentheses. Admittedly ugly.
1185 if src_contents != dst_contents:
1186 return _format_str_once(dst_contents, mode=mode)
1190 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1191 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1193 if mode.target_versions:
1194 versions = mode.target_versions
1196 future_imports = get_future_imports(src_node)
1197 versions = detect_target_versions(src_node, future_imports=future_imports)
1199 normalize_fmt_off(src_node, preview=mode.preview)
1200 lines = LineGenerator(mode=mode)
1201 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1202 empty_line = Line(mode=mode)
1204 split_line_features = {
1206 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1207 if supports_feature(versions, feature)
1209 for current_line in lines.visit(src_node):
1210 dst_contents.append(str(empty_line) * after)
1211 before, after = elt.maybe_empty_lines(current_line)
1212 dst_contents.append(str(empty_line) * before)
1213 for line in transform_line(
1214 current_line, mode=mode, features=split_line_features
1216 dst_contents.append(str(line))
1217 return "".join(dst_contents)
1220 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1221 """Return a tuple of (decoded_contents, encoding, newline).
1223 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1224 universal newlines (i.e. only contains LF).
1226 srcbuf = io.BytesIO(src)
1227 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1229 return "", encoding, "\n"
1231 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1233 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1234 return tiow.read(), encoding, newline
1237 def get_features_used( # noqa: C901
1238 node: Node, *, future_imports: Optional[Set[str]] = None
1240 """Return a set of (relatively) new Python features used in this file.
1242 Currently looking for:
1244 - self-documenting expressions in f-strings (f"{x=}");
1245 - underscores in numeric literals;
1246 - trailing commas after * or ** in function signatures and calls;
1247 - positional only arguments in function signatures and lambdas;
1248 - assignment expression;
1249 - relaxed decorator syntax;
1250 - usage of __future__ flags (annotations);
1251 - print / exec statements;
1253 features: Set[Feature] = set()
1256 FUTURE_FLAG_TO_FEATURE[future_import]
1257 for future_import in future_imports
1258 if future_import in FUTURE_FLAG_TO_FEATURE
1261 for n in node.pre_order():
1262 if is_string_token(n):
1263 value_head = n.value[:2]
1264 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1265 features.add(Feature.F_STRINGS)
1266 if Feature.DEBUG_F_STRINGS not in features:
1267 for span_beg, span_end in iter_fexpr_spans(n.value):
1268 if n.value[span_beg : span_end - 1].rstrip().endswith("="):
1269 features.add(Feature.DEBUG_F_STRINGS)
1272 elif is_number_token(n):
1274 features.add(Feature.NUMERIC_UNDERSCORES)
1276 elif n.type == token.SLASH:
1277 if n.parent and n.parent.type in {
1282 features.add(Feature.POS_ONLY_ARGUMENTS)
1284 elif n.type == token.COLONEQUAL:
1285 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1287 elif n.type == syms.decorator:
1288 if len(n.children) > 1 and not is_simple_decorator_expression(
1291 features.add(Feature.RELAXED_DECORATORS)
1294 n.type in {syms.typedargslist, syms.arglist}
1296 and n.children[-1].type == token.COMMA
1298 if n.type == syms.typedargslist:
1299 feature = Feature.TRAILING_COMMA_IN_DEF
1301 feature = Feature.TRAILING_COMMA_IN_CALL
1303 for ch in n.children:
1304 if ch.type in STARS:
1305 features.add(feature)
1307 if ch.type == syms.argument:
1308 for argch in ch.children:
1309 if argch.type in STARS:
1310 features.add(feature)
1313 n.type in {syms.return_stmt, syms.yield_expr}
1314 and len(n.children) >= 2
1315 and n.children[1].type == syms.testlist_star_expr
1316 and any(child.type == syms.star_expr for child in n.children[1].children)
1318 features.add(Feature.UNPACKING_ON_FLOW)
1321 n.type == syms.annassign
1322 and len(n.children) >= 4
1323 and n.children[3].type == syms.testlist_star_expr
1325 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1328 n.type == syms.except_clause
1329 and len(n.children) >= 2
1330 and n.children[1].type == token.STAR
1332 features.add(Feature.EXCEPT_STAR)
1334 elif n.type in {syms.subscriptlist, syms.trailer} and any(
1335 child.type == syms.star_expr for child in n.children
1337 features.add(Feature.VARIADIC_GENERICS)
1340 n.type == syms.tname_star
1341 and len(n.children) == 3
1342 and n.children[2].type == syms.star_expr
1344 features.add(Feature.VARIADIC_GENERICS)
1349 def detect_target_versions(
1350 node: Node, *, future_imports: Optional[Set[str]] = None
1351 ) -> Set[TargetVersion]:
1352 """Detect the version to target based on the nodes used."""
1353 features = get_features_used(node, future_imports=future_imports)
1355 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1359 def get_future_imports(node: Node) -> Set[str]:
1360 """Return a set of __future__ imports in the file."""
1361 imports: Set[str] = set()
1363 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1364 for child in children:
1365 if isinstance(child, Leaf):
1366 if child.type == token.NAME:
1369 elif child.type == syms.import_as_name:
1370 orig_name = child.children[0]
1371 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1372 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1373 yield orig_name.value
1375 elif child.type == syms.import_as_names:
1376 yield from get_imports_from_children(child.children)
1379 raise AssertionError("Invalid syntax parsing imports")
1381 for child in node.children:
1382 if child.type != syms.simple_stmt:
1385 first_child = child.children[0]
1386 if isinstance(first_child, Leaf):
1387 # Continue looking if we see a docstring; otherwise stop.
1389 len(child.children) == 2
1390 and first_child.type == token.STRING
1391 and child.children[1].type == token.NEWLINE
1397 elif first_child.type == syms.import_from:
1398 module_name = first_child.children[1]
1399 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1402 imports |= set(get_imports_from_children(first_child.children[3:]))
1409 def assert_equivalent(src: str, dst: str) -> None:
1410 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1412 src_ast = parse_ast(src)
1413 except Exception as exc:
1414 raise AssertionError(
1415 "cannot use --safe with this file; failed to parse source file AST: "
1417 "This could be caused by running Black with an older Python version "
1418 "that does not support new syntax used in your source file."
1422 dst_ast = parse_ast(dst)
1423 except Exception as exc:
1424 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1425 raise AssertionError(
1426 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1427 "Please report a bug on https://github.com/psf/black/issues. "
1428 f"This invalid output might be helpful: {log}"
1431 src_ast_str = "\n".join(stringify_ast(src_ast))
1432 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1433 if src_ast_str != dst_ast_str:
1434 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1435 raise AssertionError(
1436 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1437 " source. Please report a bug on "
1438 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1442 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1443 """Raise AssertionError if `dst` reformats differently the second time."""
1444 # We shouldn't call format_str() here, because that formats the string
1445 # twice and may hide a bug where we bounce back and forth between two
1447 newdst = _format_str_once(dst, mode=mode)
1451 diff(src, dst, "source", "first pass"),
1452 diff(dst, newdst, "first pass", "second pass"),
1454 raise AssertionError(
1455 "INTERNAL ERROR: Black produced different code on the second pass of the"
1456 " formatter. Please report a bug on https://github.com/psf/black/issues."
1457 f" This diff might be helpful: {log}"
1462 def nullcontext() -> Iterator[None]:
1463 """Return an empty context manager.
1465 To be used like `nullcontext` in Python 3.7.
1470 def patch_click() -> None:
1471 """Make Click not crash on Python 3.6 with LANG=C.
1473 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1474 default which restricts paths that it can access during the lifetime of the
1475 application. Click refuses to work in this scenario by raising a RuntimeError.
1477 In case of Black the likelihood that non-ASCII characters are going to be used in
1478 file paths is minimal since it's Python source code. Moreover, this crash was
1479 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1481 modules: List[Any] = []
1483 from click import core
1487 modules.append(core)
1489 # Removed in Click 8.1.0 and newer; we keep this around for users who have
1490 # older versions installed.
1491 from click import _unicodefun # type: ignore
1495 modules.append(_unicodefun)
1497 for module in modules:
1498 if hasattr(module, "_verify_python3_env"):
1499 module._verify_python3_env = lambda: None # type: ignore
1500 if hasattr(module, "_verify_python_env"):
1501 module._verify_python_env = lambda: None # type: ignore
1504 def patched_main() -> None:
1505 maybe_install_uvloop()
1511 if __name__ == "__main__":