All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
2 from json.decoder import JSONDecodeError
4 from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
5 from contextlib import contextmanager
6 from datetime import datetime
9 from multiprocessing import Manager, freeze_support
11 from pathlib import Path
12 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
36 from click.core import ParameterSource
37 from dataclasses import replace
38 from mypy_extensions import mypyc_attr
40 from black.const import DEFAULT_LINE_LENGTH, DEFAULT_INCLUDES, DEFAULT_EXCLUDES
41 from black.const import STDIN_PLACEHOLDER
42 from black.nodes import STARS, syms, is_simple_decorator_expression
43 from black.nodes import is_string_token
44 from black.lines import Line, EmptyLineTracker
45 from black.linegen import transform_line, LineGenerator, LN
46 from black.comments import normalize_fmt_off
47 from black.mode import FUTURE_FLAG_TO_FEATURE, Mode, TargetVersion
48 from black.mode import Feature, supports_feature, VERSION_TO_FEATURES
49 from black.cache import read_cache, write_cache, get_cache_info, filter_cached, Cache
50 from black.concurrency import cancel, shutdown, maybe_install_uvloop
51 from black.output import dump_to_file, ipynb_diff, diff, color_diff, out, err
52 from black.report import Report, Changed, NothingChanged
53 from black.files import (
57 find_user_pyproject_toml,
59 from black.files import gen_python_files, get_gitignore, normalize_path_maybe_ignore
60 from black.files import wrap_stream_for_windows
61 from black.parsing import InvalidInput # noqa F401
62 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
63 from black.handle_ipynb_magics import (
66 remove_trailing_semicolon,
67 put_trailing_semicolon_back,
70 jupyter_dependencies_are_installed,
75 from blib2to3.pytree import Node, Leaf
76 from blib2to3.pgen2 import token
78 from _black_version import version as __version__
80 COMPILED = Path(__file__).suffix in (".pyd", ".so")
88 class WriteBack(Enum):
96 def from_configuration(
97 cls, *, check: bool, diff: bool, color: bool = False
99 if check and not diff:
103 return cls.COLOR_DIFF
105 return cls.DIFF if diff else cls.YES
108 # Legacy name, left for integrations.
111 DEFAULT_WORKERS = os.cpu_count()
114 def read_pyproject_toml(
115 ctx: click.Context, param: click.Parameter, value: Optional[str]
117 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
119 Returns the path to a successfully found and read configuration file, None
123 value = find_pyproject_toml(ctx.params.get("src", ()))
128 config = parse_pyproject_toml(value)
129 except (OSError, ValueError) as e:
130 raise click.FileError(
131 filename=value, hint=f"Error reading configuration file: {e}"
137 # Sanitize the values to be Click friendly. For more information please see:
138 # https://github.com/psf/black/issues/1458
139 # https://github.com/pallets/click/issues/1567
141 k: str(v) if not isinstance(v, (list, dict)) else v
142 for k, v in config.items()
145 target_version = config.get("target_version")
146 if target_version is not None and not isinstance(target_version, list):
147 raise click.BadOptionUsage(
148 "target-version", "Config key target-version must be a list"
151 default_map: Dict[str, Any] = {}
153 default_map.update(ctx.default_map)
154 default_map.update(config)
156 ctx.default_map = default_map
160 def target_version_option_callback(
161 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
162 ) -> List[TargetVersion]:
163 """Compute the target versions from a --target-version flag.
165 This is its own function because mypy couldn't infer the type correctly
166 when it was a lambda, causing mypyc trouble.
168 return [TargetVersion[val.upper()] for val in v]
171 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
172 """Compile a regular expression string in `regex`.
174 If it contains newlines, use verbose mode.
177 regex = "(?x)" + regex
178 compiled: Pattern[str] = re.compile(regex)
184 param: click.Parameter,
185 value: Optional[str],
186 ) -> Optional[Pattern[str]]:
188 return re_compile_maybe_verbose(value) if value is not None else None
189 except re.error as e:
190 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
194 context_settings={"help_option_names": ["-h", "--help"]},
195 # While Click does set this field automatically using the docstring, mypyc
196 # (annoyingly) strips 'em so we need to set it here too.
197 help="The uncompromising code formatter.",
199 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
204 default=DEFAULT_LINE_LENGTH,
205 help="How many characters per line to allow.",
211 type=click.Choice([v.name.lower() for v in TargetVersion]),
212 callback=target_version_option_callback,
215 "Python versions that should be supported by Black's output. [default: per-file"
223 "Format all input files like typing stubs regardless of file extension (useful"
224 " when piping source on standard input)."
231 "Format all input files like Jupyter Notebooks regardless of file extension "
232 "(useful when piping source on standard input)."
236 "--python-cell-magics",
239 "When processing Jupyter Notebooks, add the given magic to the list"
240 f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
241 " Useful for formatting cells with custom python magics."
247 "--skip-string-normalization",
249 help="Don't normalize string quotes or prefixes.",
253 "--skip-magic-trailing-comma",
255 help="Don't use trailing commas as a reason to split lines.",
258 "--experimental-string-processing",
261 help="(DEPRECATED and now included in --preview) Normalize string literals.",
267 "Enable potentially disruptive style changes that may be added to Black's main"
268 " functionality in the next major release."
275 "Don't write the files back, just return the status. Return code 0 means"
276 " nothing would change. Return code 1 means some files would be reformatted."
277 " Return code 123 means there was an internal error."
283 help="Don't write the files back, just output a diff for each file on stdout.",
286 "--color/--no-color",
288 help="Show colored diff. Only applies when `--diff` is given.",
293 help="If --fast given, skip temporary sanity checks. [default: --safe]",
296 "--required-version",
299 "Require a specific version of Black to be running (useful for unifying results"
300 " across many environments e.g. with a pyproject.toml file). It can be"
301 " either a major version number or an exact version."
307 default=DEFAULT_INCLUDES,
308 callback=validate_regex,
310 "A regular expression that matches files and directories that should be"
311 " included on recursive searches. An empty value means all files are included"
312 " regardless of the name. Use forward slashes for directories on all platforms"
313 " (Windows, too). Exclusions are calculated first, inclusions later."
320 callback=validate_regex,
322 "A regular expression that matches files and directories that should be"
323 " excluded on recursive searches. An empty value means no paths are excluded."
324 " Use forward slashes for directories on all platforms (Windows, too)."
325 " Exclusions are calculated first, inclusions later. [default:"
326 f" {DEFAULT_EXCLUDES}]"
333 callback=validate_regex,
335 "Like --exclude, but adds additional files and directories on top of the"
336 " excluded ones. (Useful if you simply want to add to the default)"
342 callback=validate_regex,
344 "Like --exclude, but files and directories matching this regex will be "
345 "excluded even when they are passed explicitly as arguments."
352 "The name of the file when passing it through stdin. Useful to make "
353 "sure Black will respect --force-exclude option on some "
354 "editors that rely on using stdin."
360 type=click.IntRange(min=1),
361 default=DEFAULT_WORKERS,
363 help="Number of parallel workers",
370 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
371 " those with 2>/dev/null."
379 "Also emit messages to stderr about files that were not changed or were ignored"
380 " due to exclusion patterns."
383 @click.version_option(
386 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
387 f"Python ({platform.python_implementation()}) {platform.python_version()}"
394 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
410 callback=read_pyproject_toml,
411 help="Read configuration from FILE path.",
414 def main( # noqa: C901
418 target_version: List[TargetVersion],
425 python_cell_magics: Sequence[str],
426 skip_string_normalization: bool,
427 skip_magic_trailing_comma: bool,
428 experimental_string_processing: bool,
432 required_version: Optional[str],
433 include: Pattern[str],
434 exclude: Optional[Pattern[str]],
435 extend_exclude: Optional[Pattern[str]],
436 force_exclude: Optional[Pattern[str]],
437 stdin_filename: Optional[str],
439 src: Tuple[str, ...],
440 config: Optional[str],
442 """The uncompromising code formatter."""
443 ctx.ensure_object(dict)
445 if src and code is not None:
448 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
451 if not src and code is None:
452 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
455 root, method = find_project_root(src) if code is None else (None, None)
456 ctx.obj["root"] = root
461 f"Identified `{root}` as project root containing a {method}.",
466 (normalize_path_maybe_ignore(Path(source), root), source)
469 srcs_string = ", ".join(
473 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
474 for _norm, source in normalized
477 out(f"Sources to be formatted: {srcs_string}", fg="blue")
480 config_source = ctx.get_parameter_source("config")
481 user_level_config = str(find_user_pyproject_toml())
482 if config == user_level_config:
484 "Using configuration from user-level config at "
485 f"'{user_level_config}'.",
488 elif config_source in (
489 ParameterSource.DEFAULT,
490 ParameterSource.DEFAULT_MAP,
492 out("Using configuration from project root.", fg="blue")
494 out(f"Using configuration in '{config}'.", fg="blue")
496 error_msg = "Oh no! 💥 💔 💥"
499 and required_version != __version__
500 and required_version != __version__.split(".")[0]
503 f"{error_msg} The required version `{required_version}` does not match"
504 f" the running version `{__version__}`!"
508 err("Cannot pass both `pyi` and `ipynb` flags!")
511 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
513 versions = set(target_version)
515 # We'll autodetect later.
518 target_versions=versions,
519 line_length=line_length,
522 string_normalization=not skip_string_normalization,
523 magic_trailing_comma=not skip_magic_trailing_comma,
524 experimental_string_processing=experimental_string_processing,
526 python_cell_magics=set(python_cell_magics),
530 # Run in quiet mode by default with -c; the extra output isn't useful.
531 # You can still pass -v to get verbose output.
534 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
538 content=code, fast=fast, write_back=write_back, mode=mode, report=report
542 sources = get_sources(
549 extend_exclude=extend_exclude,
550 force_exclude=force_exclude,
552 stdin_filename=stdin_filename,
554 except GitWildMatchPatternError:
559 "No Python files are present to be formatted. Nothing to do 😴",
565 if len(sources) == 1:
569 write_back=write_back,
577 write_back=write_back,
583 if verbose or not quiet:
584 if code is None and (verbose or report.change_count or report.failure_count):
586 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
588 click.echo(str(report), err=True)
589 ctx.exit(report.return_code)
595 src: Tuple[str, ...],
598 include: Pattern[str],
599 exclude: Optional[Pattern[str]],
600 extend_exclude: Optional[Pattern[str]],
601 force_exclude: Optional[Pattern[str]],
603 stdin_filename: Optional[str],
605 """Compute the set of files to be formatted."""
606 sources: Set[Path] = set()
609 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
610 gitignore = get_gitignore(ctx.obj["root"])
615 if s == "-" and stdin_filename:
616 p = Path(stdin_filename)
622 if is_stdin or p.is_file():
623 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
624 if normalized_path is None:
627 normalized_path = "/" + normalized_path
628 # Hard-exclude any files that matches the `--force-exclude` regex.
630 force_exclude_match = force_exclude.search(normalized_path)
632 force_exclude_match = None
633 if force_exclude_match and force_exclude_match.group(0):
634 report.path_ignored(p, "matches the --force-exclude regular expression")
638 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
640 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
641 verbose=verbose, quiet=quiet
664 err(f"invalid path: {s}")
669 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
672 Exit if there is no `src` provided for formatting
675 if verbose or not quiet:
681 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
684 Reformat and print out `content` without spawning child processes.
685 Similar to `reformat_one`, but for string content.
687 `fast`, `write_back`, and `mode` options are passed to
688 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
690 path = Path("<string>")
693 if format_stdin_to_stdout(
694 content=content, fast=fast, write_back=write_back, mode=mode
696 changed = Changed.YES
697 report.done(path, changed)
698 except Exception as exc:
700 traceback.print_exc()
701 report.failed(path, str(exc))
704 # diff-shades depends on being to monkeypatch this function to operate. I know it's
705 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
706 @mypyc_attr(patchable=True)
708 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
710 """Reformat a single file under `src` without spawning child processes.
712 `fast`, `write_back`, and `mode` options are passed to
713 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
720 elif str(src).startswith(STDIN_PLACEHOLDER):
722 # Use the original name again in case we want to print something
724 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
729 if src.suffix == ".pyi":
730 mode = replace(mode, is_pyi=True)
731 elif src.suffix == ".ipynb":
732 mode = replace(mode, is_ipynb=True)
733 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
734 changed = Changed.YES
737 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
738 cache = read_cache(mode)
739 res_src = src.resolve()
740 res_src_s = str(res_src)
741 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
742 changed = Changed.CACHED
743 if changed is not Changed.CACHED and format_file_in_place(
744 src, fast=fast, write_back=write_back, mode=mode
746 changed = Changed.YES
747 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
748 write_back is WriteBack.CHECK and changed is Changed.NO
750 write_cache(cache, [src], mode)
751 report.done(src, changed)
752 except Exception as exc:
754 traceback.print_exc()
755 report.failed(src, str(exc))
758 # diff-shades depends on being to monkeypatch this function to operate. I know it's
759 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
760 @mypyc_attr(patchable=True)
764 write_back: WriteBack,
767 workers: Optional[int],
769 """Reformat multiple files using a ProcessPoolExecutor."""
771 loop = asyncio.get_event_loop()
772 worker_count = workers if workers is not None else DEFAULT_WORKERS
773 if sys.platform == "win32":
774 # Work around https://bugs.python.org/issue26903
775 assert worker_count is not None
776 worker_count = min(worker_count, 60)
778 executor = ProcessPoolExecutor(max_workers=worker_count)
779 except (ImportError, NotImplementedError, OSError):
780 # we arrive here if the underlying system does not support multi-processing
781 # like in AWS Lambda or Termux, in which case we gracefully fallback to
782 # a ThreadPoolExecutor with just a single worker (more workers would not do us
783 # any good due to the Global Interpreter Lock)
784 executor = ThreadPoolExecutor(max_workers=1)
787 loop.run_until_complete(
791 write_back=write_back,
800 if executor is not None:
804 async def schedule_formatting(
807 write_back: WriteBack,
810 loop: asyncio.AbstractEventLoop,
813 """Run formatting of `sources` in parallel using the provided `executor`.
815 (Use ProcessPoolExecutors for actual parallelism.)
817 `write_back`, `fast`, and `mode` options are passed to
818 :func:`format_file_in_place`.
821 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
822 cache = read_cache(mode)
823 sources, cached = filter_cached(cache, sources)
824 for src in sorted(cached):
825 report.done(src, Changed.CACHED)
830 sources_to_cache = []
832 if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
833 # For diff output, we need locks to ensure we don't interleave output
834 # from different processes.
836 lock = manager.Lock()
838 asyncio.ensure_future(
839 loop.run_in_executor(
840 executor, format_file_in_place, src, fast, mode, write_back, lock
843 for src in sorted(sources)
845 pending = tasks.keys()
847 loop.add_signal_handler(signal.SIGINT, cancel, pending)
848 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
849 except NotImplementedError:
850 # There are no good alternatives for these on Windows.
853 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
855 src = tasks.pop(task)
857 cancelled.append(task)
858 elif task.exception():
859 report.failed(src, str(task.exception()))
861 changed = Changed.YES if task.result() else Changed.NO
862 # If the file was written back or was successfully checked as
863 # well-formatted, store this information in the cache.
864 if write_back is WriteBack.YES or (
865 write_back is WriteBack.CHECK and changed is Changed.NO
867 sources_to_cache.append(src)
868 report.done(src, changed)
870 if sys.version_info >= (3, 7):
871 await asyncio.gather(*cancelled, return_exceptions=True)
873 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
875 write_cache(cache, sources_to_cache, mode)
878 def format_file_in_place(
882 write_back: WriteBack = WriteBack.NO,
883 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
885 """Format file under `src` path. Return True if changed.
887 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
889 `mode` and `fast` options are passed to :func:`format_file_contents`.
891 if src.suffix == ".pyi":
892 mode = replace(mode, is_pyi=True)
893 elif src.suffix == ".ipynb":
894 mode = replace(mode, is_ipynb=True)
896 then = datetime.utcfromtimestamp(src.stat().st_mtime)
897 with open(src, "rb") as buf:
898 src_contents, encoding, newline = decode_bytes(buf.read())
900 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
901 except NothingChanged:
903 except JSONDecodeError:
905 f"File '{src}' cannot be parsed as valid Jupyter notebook."
908 if write_back == WriteBack.YES:
909 with open(src, "w", encoding=encoding, newline=newline) as f:
910 f.write(dst_contents)
911 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
912 now = datetime.utcnow()
913 src_name = f"{src}\t{then} +0000"
914 dst_name = f"{src}\t{now} +0000"
916 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
918 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
920 if write_back == WriteBack.COLOR_DIFF:
921 diff_contents = color_diff(diff_contents)
923 with lock or nullcontext():
924 f = io.TextIOWrapper(
930 f = wrap_stream_for_windows(f)
931 f.write(diff_contents)
937 def format_stdin_to_stdout(
940 content: Optional[str] = None,
941 write_back: WriteBack = WriteBack.NO,
944 """Format file on stdin. Return True if changed.
946 If content is None, it's read from sys.stdin.
948 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
949 write a diff to stdout. The `mode` argument is passed to
950 :func:`format_file_contents`.
952 then = datetime.utcnow()
955 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
957 src, encoding, newline = content, "utf-8", ""
961 dst = format_file_contents(src, fast=fast, mode=mode)
964 except NothingChanged:
968 f = io.TextIOWrapper(
969 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
971 if write_back == WriteBack.YES:
972 # Make sure there's a newline after the content
973 if dst and dst[-1] != "\n":
976 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
977 now = datetime.utcnow()
978 src_name = f"STDIN\t{then} +0000"
979 dst_name = f"STDOUT\t{now} +0000"
980 d = diff(src, dst, src_name, dst_name)
981 if write_back == WriteBack.COLOR_DIFF:
983 f = wrap_stream_for_windows(f)
988 def check_stability_and_equivalence(
989 src_contents: str, dst_contents: str, *, mode: Mode
991 """Perform stability and equivalence checks.
993 Raise AssertionError if source and destination contents are not
994 equivalent, or if a second pass of the formatter would format the
997 assert_equivalent(src_contents, dst_contents)
998 assert_stable(src_contents, dst_contents, mode=mode)
1001 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1002 """Reformat contents of a file and return new contents.
1004 If `fast` is False, additionally confirm that the reformatted code is
1005 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
1006 `mode` is passed to :func:`format_str`.
1008 if not src_contents.strip():
1009 raise NothingChanged
1012 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
1014 dst_contents = format_str(src_contents, mode=mode)
1015 if src_contents == dst_contents:
1016 raise NothingChanged
1018 if not fast and not mode.is_ipynb:
1019 # Jupyter notebooks will already have been checked above.
1020 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
1024 def validate_cell(src: str, mode: Mode) -> None:
1025 """Check that cell does not already contain TransformerManager transformations,
1026 or non-Python cell magics, which might cause tokenizer_rt to break because of
1029 If a cell contains ``!ls``, then it'll be transformed to
1030 ``get_ipython().system('ls')``. However, if the cell originally contained
1031 ``get_ipython().system('ls')``, then it would get transformed in the same way:
1033 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
1034 "get_ipython().system('ls')\n"
1035 >>> TransformerManager().transform_cell("!ls")
1036 "get_ipython().system('ls')\n"
1038 Due to the impossibility of safely roundtripping in such situations, cells
1039 containing transformed magics will be ignored.
1041 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
1042 raise NothingChanged
1045 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
1047 raise NothingChanged
1050 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
1051 """Format code in given cell of Jupyter notebook.
1055 - if cell has trailing semicolon, remove it;
1056 - if cell has IPython magics, mask them;
1058 - reinstate IPython magics;
1059 - reinstate trailing semicolon (if originally present);
1060 - strip trailing newlines.
1062 Cells with syntax errors will not be processed, as they
1063 could potentially be automagics or multi-line magics, which
1064 are currently not supported.
1066 validate_cell(src, mode)
1067 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
1071 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
1073 raise NothingChanged from None
1074 masked_dst = format_str(masked_src, mode=mode)
1076 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
1077 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1078 dst = put_trailing_semicolon_back(
1079 dst_without_trailing_semicolon, has_trailing_semicolon
1081 dst = dst.rstrip("\n")
1083 raise NothingChanged from None
1087 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1088 """If notebook is marked as non-Python, don't format it.
1090 All notebook metadata fields are optional, see
1091 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1092 if a notebook has empty metadata, we will try to parse it anyway.
1094 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1095 if language is not None and language != "python":
1096 raise NothingChanged from None
1099 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1100 """Format Jupyter notebook.
1102 Operate cell-by-cell, only on code cells, only for Python notebooks.
1103 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1105 trailing_newline = src_contents[-1] == "\n"
1107 nb = json.loads(src_contents)
1108 validate_metadata(nb)
1109 for cell in nb["cells"]:
1110 if cell.get("cell_type", None) == "code":
1112 src = "".join(cell["source"])
1113 dst = format_cell(src, fast=fast, mode=mode)
1114 except NothingChanged:
1117 cell["source"] = dst.splitlines(keepends=True)
1120 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1121 if trailing_newline:
1122 dst_contents = dst_contents + "\n"
1125 raise NothingChanged
1128 def format_str(src_contents: str, *, mode: Mode) -> str:
1129 """Reformat a string and return new contents.
1131 `mode` determines formatting options, such as how many characters per line are
1135 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1136 def f(arg: str = "") -> None:
1139 A more complex example:
1142 ... black.format_str(
1143 ... "def f(arg:str='')->None: hey",
1144 ... mode=black.Mode(
1145 ... target_versions={black.TargetVersion.PY36},
1147 ... string_normalization=False,
1158 dst_contents = _format_str_once(src_contents, mode=mode)
1159 # Forced second pass to work around optional trailing commas (becoming
1160 # forced trailing commas on pass 2) interacting differently with optional
1161 # parentheses. Admittedly ugly.
1162 if src_contents != dst_contents:
1163 return _format_str_once(dst_contents, mode=mode)
1167 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1168 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1170 future_imports = get_future_imports(src_node)
1171 if mode.target_versions:
1172 versions = mode.target_versions
1174 versions = detect_target_versions(src_node, future_imports=future_imports)
1176 normalize_fmt_off(src_node, preview=mode.preview)
1177 lines = LineGenerator(mode=mode)
1178 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1179 empty_line = Line(mode=mode)
1181 split_line_features = {
1183 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1184 if supports_feature(versions, feature)
1186 for current_line in lines.visit(src_node):
1187 dst_contents.append(str(empty_line) * after)
1188 before, after = elt.maybe_empty_lines(current_line)
1189 dst_contents.append(str(empty_line) * before)
1190 for line in transform_line(
1191 current_line, mode=mode, features=split_line_features
1193 dst_contents.append(str(line))
1194 return "".join(dst_contents)
1197 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1198 """Return a tuple of (decoded_contents, encoding, newline).
1200 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1201 universal newlines (i.e. only contains LF).
1203 srcbuf = io.BytesIO(src)
1204 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1206 return "", encoding, "\n"
1208 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1210 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1211 return tiow.read(), encoding, newline
1214 def get_features_used( # noqa: C901
1215 node: Node, *, future_imports: Optional[Set[str]] = None
1217 """Return a set of (relatively) new Python features used in this file.
1219 Currently looking for:
1221 - underscores in numeric literals;
1222 - trailing commas after * or ** in function signatures and calls;
1223 - positional only arguments in function signatures and lambdas;
1224 - assignment expression;
1225 - relaxed decorator syntax;
1226 - usage of __future__ flags (annotations);
1227 - print / exec statements;
1229 features: Set[Feature] = set()
1232 FUTURE_FLAG_TO_FEATURE[future_import]
1233 for future_import in future_imports
1234 if future_import in FUTURE_FLAG_TO_FEATURE
1237 for n in node.pre_order():
1238 if is_string_token(n):
1239 value_head = n.value[:2]
1240 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1241 features.add(Feature.F_STRINGS)
1243 elif n.type == token.NUMBER:
1244 assert isinstance(n, Leaf)
1246 features.add(Feature.NUMERIC_UNDERSCORES)
1248 elif n.type == token.SLASH:
1249 if n.parent and n.parent.type in {
1254 features.add(Feature.POS_ONLY_ARGUMENTS)
1256 elif n.type == token.COLONEQUAL:
1257 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1259 elif n.type == syms.decorator:
1260 if len(n.children) > 1 and not is_simple_decorator_expression(
1263 features.add(Feature.RELAXED_DECORATORS)
1266 n.type in {syms.typedargslist, syms.arglist}
1268 and n.children[-1].type == token.COMMA
1270 if n.type == syms.typedargslist:
1271 feature = Feature.TRAILING_COMMA_IN_DEF
1273 feature = Feature.TRAILING_COMMA_IN_CALL
1275 for ch in n.children:
1276 if ch.type in STARS:
1277 features.add(feature)
1279 if ch.type == syms.argument:
1280 for argch in ch.children:
1281 if argch.type in STARS:
1282 features.add(feature)
1285 n.type in {syms.return_stmt, syms.yield_expr}
1286 and len(n.children) >= 2
1287 and n.children[1].type == syms.testlist_star_expr
1288 and any(child.type == syms.star_expr for child in n.children[1].children)
1290 features.add(Feature.UNPACKING_ON_FLOW)
1293 n.type == syms.annassign
1294 and len(n.children) >= 4
1295 and n.children[3].type == syms.testlist_star_expr
1297 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1300 n.type == syms.except_clause
1301 and len(n.children) >= 2
1302 and n.children[1].type == token.STAR
1304 features.add(Feature.EXCEPT_STAR)
1309 def detect_target_versions(
1310 node: Node, *, future_imports: Optional[Set[str]] = None
1311 ) -> Set[TargetVersion]:
1312 """Detect the version to target based on the nodes used."""
1313 features = get_features_used(node, future_imports=future_imports)
1315 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1319 def get_future_imports(node: Node) -> Set[str]:
1320 """Return a set of __future__ imports in the file."""
1321 imports: Set[str] = set()
1323 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1324 for child in children:
1325 if isinstance(child, Leaf):
1326 if child.type == token.NAME:
1329 elif child.type == syms.import_as_name:
1330 orig_name = child.children[0]
1331 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1332 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1333 yield orig_name.value
1335 elif child.type == syms.import_as_names:
1336 yield from get_imports_from_children(child.children)
1339 raise AssertionError("Invalid syntax parsing imports")
1341 for child in node.children:
1342 if child.type != syms.simple_stmt:
1345 first_child = child.children[0]
1346 if isinstance(first_child, Leaf):
1347 # Continue looking if we see a docstring; otherwise stop.
1349 len(child.children) == 2
1350 and first_child.type == token.STRING
1351 and child.children[1].type == token.NEWLINE
1357 elif first_child.type == syms.import_from:
1358 module_name = first_child.children[1]
1359 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1362 imports |= set(get_imports_from_children(first_child.children[3:]))
1369 def assert_equivalent(src: str, dst: str) -> None:
1370 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1372 src_ast = parse_ast(src)
1373 except Exception as exc:
1374 raise AssertionError(
1375 "cannot use --safe with this file; failed to parse source file AST: "
1377 "This could be caused by running Black with an older Python version "
1378 "that does not support new syntax used in your source file."
1382 dst_ast = parse_ast(dst)
1383 except Exception as exc:
1384 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1385 raise AssertionError(
1386 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1387 "Please report a bug on https://github.com/psf/black/issues. "
1388 f"This invalid output might be helpful: {log}"
1391 src_ast_str = "\n".join(stringify_ast(src_ast))
1392 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1393 if src_ast_str != dst_ast_str:
1394 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1395 raise AssertionError(
1396 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1397 " source. Please report a bug on "
1398 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1402 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1403 """Raise AssertionError if `dst` reformats differently the second time."""
1404 # We shouldn't call format_str() here, because that formats the string
1405 # twice and may hide a bug where we bounce back and forth between two
1407 newdst = _format_str_once(dst, mode=mode)
1411 diff(src, dst, "source", "first pass"),
1412 diff(dst, newdst, "first pass", "second pass"),
1414 raise AssertionError(
1415 "INTERNAL ERROR: Black produced different code on the second pass of the"
1416 " formatter. Please report a bug on https://github.com/psf/black/issues."
1417 f" This diff might be helpful: {log}"
1422 def nullcontext() -> Iterator[None]:
1423 """Return an empty context manager.
1425 To be used like `nullcontext` in Python 3.7.
1430 def patch_click() -> None:
1431 """Make Click not crash on Python 3.6 with LANG=C.
1433 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1434 default which restricts paths that it can access during the lifetime of the
1435 application. Click refuses to work in this scenario by raising a RuntimeError.
1437 In case of Black the likelihood that non-ASCII characters are going to be used in
1438 file paths is minimal since it's Python source code. Moreover, this crash was
1439 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1441 modules: List[Any] = []
1443 from click import core
1447 modules.append(core)
1449 # Removed in Click 8.1.0 and newer; we keep this around for users who have
1450 # older versions installed.
1451 from click import _unicodefun # type: ignore
1455 modules.append(_unicodefun)
1457 for module in modules:
1458 if hasattr(module, "_verify_python3_env"):
1459 module._verify_python3_env = lambda: None # type: ignore
1460 if hasattr(module, "_verify_python_env"):
1461 module._verify_python_env = lambda: None # type: ignore
1464 def patched_main() -> None:
1465 maybe_install_uvloop()
1471 if __name__ == "__main__":