All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
2 from json.decoder import JSONDecodeError
4 from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
5 from contextlib import contextmanager
6 from datetime import datetime
9 from multiprocessing import Manager, freeze_support
11 from pathlib import Path
12 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
35 from click.core import ParameterSource
36 from dataclasses import replace
37 from mypy_extensions import mypyc_attr
39 from black.const import DEFAULT_LINE_LENGTH, DEFAULT_INCLUDES, DEFAULT_EXCLUDES
40 from black.const import STDIN_PLACEHOLDER
41 from black.nodes import STARS, syms, is_simple_decorator_expression
42 from black.nodes import is_string_token
43 from black.lines import Line, EmptyLineTracker
44 from black.linegen import transform_line, LineGenerator, LN
45 from black.comments import normalize_fmt_off
46 from black.mode import FUTURE_FLAG_TO_FEATURE, Mode, TargetVersion
47 from black.mode import Feature, supports_feature, VERSION_TO_FEATURES
48 from black.cache import read_cache, write_cache, get_cache_info, filter_cached, Cache
49 from black.concurrency import cancel, shutdown, maybe_install_uvloop
50 from black.output import dump_to_file, ipynb_diff, diff, color_diff, out, err
51 from black.report import Report, Changed, NothingChanged
52 from black.files import find_project_root, find_pyproject_toml, parse_pyproject_toml
53 from black.files import gen_python_files, get_gitignore, normalize_path_maybe_ignore
54 from black.files import wrap_stream_for_windows
55 from black.parsing import InvalidInput # noqa F401
56 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
57 from black.handle_ipynb_magics import (
60 remove_trailing_semicolon,
61 put_trailing_semicolon_back,
64 jupyter_dependencies_are_installed,
69 from blib2to3.pytree import Node, Leaf
70 from blib2to3.pgen2 import token
72 from _black_version import version as __version__
74 COMPILED = Path(__file__).suffix in (".pyd", ".so")
82 class WriteBack(Enum):
90 def from_configuration(
91 cls, *, check: bool, diff: bool, color: bool = False
93 if check and not diff:
99 return cls.DIFF if diff else cls.YES
102 # Legacy name, left for integrations.
105 DEFAULT_WORKERS = os.cpu_count()
108 def read_pyproject_toml(
109 ctx: click.Context, param: click.Parameter, value: Optional[str]
111 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
113 Returns the path to a successfully found and read configuration file, None
117 value = find_pyproject_toml(ctx.params.get("src", ()))
122 config = parse_pyproject_toml(value)
123 except (OSError, ValueError) as e:
124 raise click.FileError(
125 filename=value, hint=f"Error reading configuration file: {e}"
131 # Sanitize the values to be Click friendly. For more information please see:
132 # https://github.com/psf/black/issues/1458
133 # https://github.com/pallets/click/issues/1567
135 k: str(v) if not isinstance(v, (list, dict)) else v
136 for k, v in config.items()
139 target_version = config.get("target_version")
140 if target_version is not None and not isinstance(target_version, list):
141 raise click.BadOptionUsage(
142 "target-version", "Config key target-version must be a list"
145 default_map: Dict[str, Any] = {}
147 default_map.update(ctx.default_map)
148 default_map.update(config)
150 ctx.default_map = default_map
154 def target_version_option_callback(
155 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
156 ) -> List[TargetVersion]:
157 """Compute the target versions from a --target-version flag.
159 This is its own function because mypy couldn't infer the type correctly
160 when it was a lambda, causing mypyc trouble.
162 return [TargetVersion[val.upper()] for val in v]
165 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
166 """Compile a regular expression string in `regex`.
168 If it contains newlines, use verbose mode.
171 regex = "(?x)" + regex
172 compiled: Pattern[str] = re.compile(regex)
178 param: click.Parameter,
179 value: Optional[str],
180 ) -> Optional[Pattern[str]]:
182 return re_compile_maybe_verbose(value) if value is not None else None
183 except re.error as e:
184 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
188 context_settings={"help_option_names": ["-h", "--help"]},
189 # While Click does set this field automatically using the docstring, mypyc
190 # (annoyingly) strips 'em so we need to set it here too.
191 help="The uncompromising code formatter.",
193 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
198 default=DEFAULT_LINE_LENGTH,
199 help="How many characters per line to allow.",
205 type=click.Choice([v.name.lower() for v in TargetVersion]),
206 callback=target_version_option_callback,
209 "Python versions that should be supported by Black's output. [default: per-file"
217 "Format all input files like typing stubs regardless of file extension (useful"
218 " when piping source on standard input)."
225 "Format all input files like Jupyter Notebooks regardless of file extension "
226 "(useful when piping source on standard input)."
230 "--python-cell-magics",
233 "When processing Jupyter Notebooks, add the given magic to the list"
234 f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
235 " Useful for formatting cells with custom python magics."
241 "--skip-string-normalization",
243 help="Don't normalize string quotes or prefixes.",
247 "--skip-magic-trailing-comma",
249 help="Don't use trailing commas as a reason to split lines.",
252 "--experimental-string-processing",
255 help="(DEPRECATED and now included in --preview) Normalize string literals.",
261 "Enable potentially disruptive style changes that may be added to Black's main"
262 " functionality in the next major release."
269 "Don't write the files back, just return the status. Return code 0 means"
270 " nothing would change. Return code 1 means some files would be reformatted."
271 " Return code 123 means there was an internal error."
277 help="Don't write the files back, just output a diff for each file on stdout.",
280 "--color/--no-color",
282 help="Show colored diff. Only applies when `--diff` is given.",
287 help="If --fast given, skip temporary sanity checks. [default: --safe]",
290 "--required-version",
293 "Require a specific version of Black to be running (useful for unifying results"
294 " across many environments e.g. with a pyproject.toml file)."
300 default=DEFAULT_INCLUDES,
301 callback=validate_regex,
303 "A regular expression that matches files and directories that should be"
304 " included on recursive searches. An empty value means all files are included"
305 " regardless of the name. Use forward slashes for directories on all platforms"
306 " (Windows, too). Exclusions are calculated first, inclusions later."
313 callback=validate_regex,
315 "A regular expression that matches files and directories that should be"
316 " excluded on recursive searches. An empty value means no paths are excluded."
317 " Use forward slashes for directories on all platforms (Windows, too)."
318 " Exclusions are calculated first, inclusions later. [default:"
319 f" {DEFAULT_EXCLUDES}]"
326 callback=validate_regex,
328 "Like --exclude, but adds additional files and directories on top of the"
329 " excluded ones. (Useful if you simply want to add to the default)"
335 callback=validate_regex,
337 "Like --exclude, but files and directories matching this regex will be "
338 "excluded even when they are passed explicitly as arguments."
345 "The name of the file when passing it through stdin. Useful to make "
346 "sure Black will respect --force-exclude option on some "
347 "editors that rely on using stdin."
353 type=click.IntRange(min=1),
354 default=DEFAULT_WORKERS,
356 help="Number of parallel workers",
363 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
364 " those with 2>/dev/null."
372 "Also emit messages to stderr about files that were not changed or were ignored"
373 " due to exclusion patterns."
376 @click.version_option(
378 message=f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})",
384 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
400 callback=read_pyproject_toml,
401 help="Read configuration from FILE path.",
408 target_version: List[TargetVersion],
415 python_cell_magics: Sequence[str],
416 skip_string_normalization: bool,
417 skip_magic_trailing_comma: bool,
418 experimental_string_processing: bool,
422 required_version: Optional[str],
423 include: Pattern[str],
424 exclude: Optional[Pattern[str]],
425 extend_exclude: Optional[Pattern[str]],
426 force_exclude: Optional[Pattern[str]],
427 stdin_filename: Optional[str],
429 src: Tuple[str, ...],
430 config: Optional[str],
432 """The uncompromising code formatter."""
433 ctx.ensure_object(dict)
435 if src and code is not None:
438 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
441 if not src and code is None:
442 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
445 root, method = find_project_root(src) if code is None else (None, None)
446 ctx.obj["root"] = root
451 f"Identified `{root}` as project root containing a {method}.",
456 (normalize_path_maybe_ignore(Path(source), root), source)
459 srcs_string = ", ".join(
463 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
464 for _norm, source in normalized
467 out(f"Sources to be formatted: {srcs_string}", fg="blue")
470 config_source = ctx.get_parameter_source("config")
471 if config_source in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP):
472 out("Using configuration from project root.", fg="blue")
474 out(f"Using configuration in '{config}'.", fg="blue")
476 error_msg = "Oh no! 💥 💔 💥"
477 if required_version and required_version != __version__:
479 f"{error_msg} The required version `{required_version}` does not match"
480 f" the running version `{__version__}`!"
484 err("Cannot pass both `pyi` and `ipynb` flags!")
487 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
489 versions = set(target_version)
491 # We'll autodetect later.
494 target_versions=versions,
495 line_length=line_length,
498 string_normalization=not skip_string_normalization,
499 magic_trailing_comma=not skip_magic_trailing_comma,
500 experimental_string_processing=experimental_string_processing,
502 python_cell_magics=set(python_cell_magics),
506 # Run in quiet mode by default with -c; the extra output isn't useful.
507 # You can still pass -v to get verbose output.
510 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
514 content=code, fast=fast, write_back=write_back, mode=mode, report=report
518 sources = get_sources(
525 extend_exclude=extend_exclude,
526 force_exclude=force_exclude,
528 stdin_filename=stdin_filename,
530 except GitWildMatchPatternError:
535 "No Python files are present to be formatted. Nothing to do 😴",
541 if len(sources) == 1:
545 write_back=write_back,
553 write_back=write_back,
559 if verbose or not quiet:
560 if code is None and (verbose or report.change_count or report.failure_count):
562 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
564 click.echo(str(report), err=True)
565 ctx.exit(report.return_code)
571 src: Tuple[str, ...],
574 include: Pattern[str],
575 exclude: Optional[Pattern[str]],
576 extend_exclude: Optional[Pattern[str]],
577 force_exclude: Optional[Pattern[str]],
579 stdin_filename: Optional[str],
581 """Compute the set of files to be formatted."""
582 sources: Set[Path] = set()
585 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
586 gitignore = get_gitignore(ctx.obj["root"])
591 if s == "-" and stdin_filename:
592 p = Path(stdin_filename)
598 if is_stdin or p.is_file():
599 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
600 if normalized_path is None:
603 normalized_path = "/" + normalized_path
604 # Hard-exclude any files that matches the `--force-exclude` regex.
606 force_exclude_match = force_exclude.search(normalized_path)
608 force_exclude_match = None
609 if force_exclude_match and force_exclude_match.group(0):
610 report.path_ignored(p, "matches the --force-exclude regular expression")
614 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
616 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
617 verbose=verbose, quiet=quiet
640 err(f"invalid path: {s}")
645 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
648 Exit if there is no `src` provided for formatting
651 if verbose or not quiet:
657 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
660 Reformat and print out `content` without spawning child processes.
661 Similar to `reformat_one`, but for string content.
663 `fast`, `write_back`, and `mode` options are passed to
664 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
666 path = Path("<string>")
669 if format_stdin_to_stdout(
670 content=content, fast=fast, write_back=write_back, mode=mode
672 changed = Changed.YES
673 report.done(path, changed)
674 except Exception as exc:
676 traceback.print_exc()
677 report.failed(path, str(exc))
681 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
683 """Reformat a single file under `src` without spawning child processes.
685 `fast`, `write_back`, and `mode` options are passed to
686 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
693 elif str(src).startswith(STDIN_PLACEHOLDER):
695 # Use the original name again in case we want to print something
697 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
702 if src.suffix == ".pyi":
703 mode = replace(mode, is_pyi=True)
704 elif src.suffix == ".ipynb":
705 mode = replace(mode, is_ipynb=True)
706 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
707 changed = Changed.YES
710 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
711 cache = read_cache(mode)
712 res_src = src.resolve()
713 res_src_s = str(res_src)
714 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
715 changed = Changed.CACHED
716 if changed is not Changed.CACHED and format_file_in_place(
717 src, fast=fast, write_back=write_back, mode=mode
719 changed = Changed.YES
720 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
721 write_back is WriteBack.CHECK and changed is Changed.NO
723 write_cache(cache, [src], mode)
724 report.done(src, changed)
725 except Exception as exc:
727 traceback.print_exc()
728 report.failed(src, str(exc))
731 # diff-shades depends on being to monkeypatch this function to operate. I know it's
732 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
733 @mypyc_attr(patchable=True)
737 write_back: WriteBack,
740 workers: Optional[int],
742 """Reformat multiple files using a ProcessPoolExecutor."""
744 loop = asyncio.get_event_loop()
745 worker_count = workers if workers is not None else DEFAULT_WORKERS
746 if sys.platform == "win32":
747 # Work around https://bugs.python.org/issue26903
748 assert worker_count is not None
749 worker_count = min(worker_count, 60)
751 executor = ProcessPoolExecutor(max_workers=worker_count)
752 except (ImportError, NotImplementedError, OSError):
753 # we arrive here if the underlying system does not support multi-processing
754 # like in AWS Lambda or Termux, in which case we gracefully fallback to
755 # a ThreadPoolExecutor with just a single worker (more workers would not do us
756 # any good due to the Global Interpreter Lock)
757 executor = ThreadPoolExecutor(max_workers=1)
760 loop.run_until_complete(
764 write_back=write_back,
773 if executor is not None:
777 async def schedule_formatting(
780 write_back: WriteBack,
783 loop: asyncio.AbstractEventLoop,
786 """Run formatting of `sources` in parallel using the provided `executor`.
788 (Use ProcessPoolExecutors for actual parallelism.)
790 `write_back`, `fast`, and `mode` options are passed to
791 :func:`format_file_in_place`.
794 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
795 cache = read_cache(mode)
796 sources, cached = filter_cached(cache, sources)
797 for src in sorted(cached):
798 report.done(src, Changed.CACHED)
803 sources_to_cache = []
805 if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
806 # For diff output, we need locks to ensure we don't interleave output
807 # from different processes.
809 lock = manager.Lock()
811 asyncio.ensure_future(
812 loop.run_in_executor(
813 executor, format_file_in_place, src, fast, mode, write_back, lock
816 for src in sorted(sources)
818 pending = tasks.keys()
820 loop.add_signal_handler(signal.SIGINT, cancel, pending)
821 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
822 except NotImplementedError:
823 # There are no good alternatives for these on Windows.
826 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
828 src = tasks.pop(task)
830 cancelled.append(task)
831 elif task.exception():
832 report.failed(src, str(task.exception()))
834 changed = Changed.YES if task.result() else Changed.NO
835 # If the file was written back or was successfully checked as
836 # well-formatted, store this information in the cache.
837 if write_back is WriteBack.YES or (
838 write_back is WriteBack.CHECK and changed is Changed.NO
840 sources_to_cache.append(src)
841 report.done(src, changed)
843 if sys.version_info >= (3, 7):
844 await asyncio.gather(*cancelled, return_exceptions=True)
846 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
848 write_cache(cache, sources_to_cache, mode)
851 def format_file_in_place(
855 write_back: WriteBack = WriteBack.NO,
856 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
858 """Format file under `src` path. Return True if changed.
860 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
862 `mode` and `fast` options are passed to :func:`format_file_contents`.
864 if src.suffix == ".pyi":
865 mode = replace(mode, is_pyi=True)
866 elif src.suffix == ".ipynb":
867 mode = replace(mode, is_ipynb=True)
869 then = datetime.utcfromtimestamp(src.stat().st_mtime)
870 with open(src, "rb") as buf:
871 src_contents, encoding, newline = decode_bytes(buf.read())
873 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
874 except NothingChanged:
876 except JSONDecodeError:
878 f"File '{src}' cannot be parsed as valid Jupyter notebook."
881 if write_back == WriteBack.YES:
882 with open(src, "w", encoding=encoding, newline=newline) as f:
883 f.write(dst_contents)
884 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
885 now = datetime.utcnow()
886 src_name = f"{src}\t{then} +0000"
887 dst_name = f"{src}\t{now} +0000"
889 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
891 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
893 if write_back == WriteBack.COLOR_DIFF:
894 diff_contents = color_diff(diff_contents)
896 with lock or nullcontext():
897 f = io.TextIOWrapper(
903 f = wrap_stream_for_windows(f)
904 f.write(diff_contents)
910 def format_stdin_to_stdout(
913 content: Optional[str] = None,
914 write_back: WriteBack = WriteBack.NO,
917 """Format file on stdin. Return True if changed.
919 If content is None, it's read from sys.stdin.
921 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
922 write a diff to stdout. The `mode` argument is passed to
923 :func:`format_file_contents`.
925 then = datetime.utcnow()
928 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
930 src, encoding, newline = content, "utf-8", ""
934 dst = format_file_contents(src, fast=fast, mode=mode)
937 except NothingChanged:
941 f = io.TextIOWrapper(
942 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
944 if write_back == WriteBack.YES:
945 # Make sure there's a newline after the content
946 if dst and dst[-1] != "\n":
949 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
950 now = datetime.utcnow()
951 src_name = f"STDIN\t{then} +0000"
952 dst_name = f"STDOUT\t{now} +0000"
953 d = diff(src, dst, src_name, dst_name)
954 if write_back == WriteBack.COLOR_DIFF:
956 f = wrap_stream_for_windows(f)
961 def check_stability_and_equivalence(
962 src_contents: str, dst_contents: str, *, mode: Mode
964 """Perform stability and equivalence checks.
966 Raise AssertionError if source and destination contents are not
967 equivalent, or if a second pass of the formatter would format the
970 assert_equivalent(src_contents, dst_contents)
971 assert_stable(src_contents, dst_contents, mode=mode)
974 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
975 """Reformat contents of a file and return new contents.
977 If `fast` is False, additionally confirm that the reformatted code is
978 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
979 `mode` is passed to :func:`format_str`.
981 if not src_contents.strip():
985 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
987 dst_contents = format_str(src_contents, mode=mode)
988 if src_contents == dst_contents:
991 if not fast and not mode.is_ipynb:
992 # Jupyter notebooks will already have been checked above.
993 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
997 def validate_cell(src: str, mode: Mode) -> None:
998 """Check that cell does not already contain TransformerManager transformations,
999 or non-Python cell magics, which might cause tokenizer_rt to break because of
1002 If a cell contains ``!ls``, then it'll be transformed to
1003 ``get_ipython().system('ls')``. However, if the cell originally contained
1004 ``get_ipython().system('ls')``, then it would get transformed in the same way:
1006 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
1007 "get_ipython().system('ls')\n"
1008 >>> TransformerManager().transform_cell("!ls")
1009 "get_ipython().system('ls')\n"
1011 Due to the impossibility of safely roundtripping in such situations, cells
1012 containing transformed magics will be ignored.
1014 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
1015 raise NothingChanged
1018 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
1020 raise NothingChanged
1023 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
1024 """Format code in given cell of Jupyter notebook.
1028 - if cell has trailing semicolon, remove it;
1029 - if cell has IPython magics, mask them;
1031 - reinstate IPython magics;
1032 - reinstate trailing semicolon (if originally present);
1033 - strip trailing newlines.
1035 Cells with syntax errors will not be processed, as they
1036 could potentially be automagics or multi-line magics, which
1037 are currently not supported.
1039 validate_cell(src, mode)
1040 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
1044 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
1046 raise NothingChanged from None
1047 masked_dst = format_str(masked_src, mode=mode)
1049 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
1050 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1051 dst = put_trailing_semicolon_back(
1052 dst_without_trailing_semicolon, has_trailing_semicolon
1054 dst = dst.rstrip("\n")
1056 raise NothingChanged from None
1060 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1061 """If notebook is marked as non-Python, don't format it.
1063 All notebook metadata fields are optional, see
1064 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1065 if a notebook has empty metadata, we will try to parse it anyway.
1067 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1068 if language is not None and language != "python":
1069 raise NothingChanged from None
1072 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1073 """Format Jupyter notebook.
1075 Operate cell-by-cell, only on code cells, only for Python notebooks.
1076 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1078 trailing_newline = src_contents[-1] == "\n"
1080 nb = json.loads(src_contents)
1081 validate_metadata(nb)
1082 for cell in nb["cells"]:
1083 if cell.get("cell_type", None) == "code":
1085 src = "".join(cell["source"])
1086 dst = format_cell(src, fast=fast, mode=mode)
1087 except NothingChanged:
1090 cell["source"] = dst.splitlines(keepends=True)
1093 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1094 if trailing_newline:
1095 dst_contents = dst_contents + "\n"
1098 raise NothingChanged
1101 def format_str(src_contents: str, *, mode: Mode) -> str:
1102 """Reformat a string and return new contents.
1104 `mode` determines formatting options, such as how many characters per line are
1108 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1109 def f(arg: str = "") -> None:
1112 A more complex example:
1115 ... black.format_str(
1116 ... "def f(arg:str='')->None: hey",
1117 ... mode=black.Mode(
1118 ... target_versions={black.TargetVersion.PY36},
1120 ... string_normalization=False,
1131 dst_contents = _format_str_once(src_contents, mode=mode)
1132 # Forced second pass to work around optional trailing commas (becoming
1133 # forced trailing commas on pass 2) interacting differently with optional
1134 # parentheses. Admittedly ugly.
1135 if src_contents != dst_contents:
1136 return _format_str_once(dst_contents, mode=mode)
1140 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1141 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1143 future_imports = get_future_imports(src_node)
1144 if mode.target_versions:
1145 versions = mode.target_versions
1147 versions = detect_target_versions(src_node, future_imports=future_imports)
1149 normalize_fmt_off(src_node)
1150 lines = LineGenerator(mode=mode)
1151 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1152 empty_line = Line(mode=mode)
1154 split_line_features = {
1156 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1157 if supports_feature(versions, feature)
1159 for current_line in lines.visit(src_node):
1160 dst_contents.append(str(empty_line) * after)
1161 before, after = elt.maybe_empty_lines(current_line)
1162 dst_contents.append(str(empty_line) * before)
1163 for line in transform_line(
1164 current_line, mode=mode, features=split_line_features
1166 dst_contents.append(str(line))
1167 return "".join(dst_contents)
1170 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1171 """Return a tuple of (decoded_contents, encoding, newline).
1173 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1174 universal newlines (i.e. only contains LF).
1176 srcbuf = io.BytesIO(src)
1177 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1179 return "", encoding, "\n"
1181 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1183 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1184 return tiow.read(), encoding, newline
1187 def get_features_used( # noqa: C901
1188 node: Node, *, future_imports: Optional[Set[str]] = None
1190 """Return a set of (relatively) new Python features used in this file.
1192 Currently looking for:
1194 - underscores in numeric literals;
1195 - trailing commas after * or ** in function signatures and calls;
1196 - positional only arguments in function signatures and lambdas;
1197 - assignment expression;
1198 - relaxed decorator syntax;
1199 - usage of __future__ flags (annotations);
1200 - print / exec statements;
1202 features: Set[Feature] = set()
1205 FUTURE_FLAG_TO_FEATURE[future_import]
1206 for future_import in future_imports
1207 if future_import in FUTURE_FLAG_TO_FEATURE
1210 for n in node.pre_order():
1211 if is_string_token(n):
1212 value_head = n.value[:2]
1213 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1214 features.add(Feature.F_STRINGS)
1216 elif n.type == token.NUMBER:
1217 assert isinstance(n, Leaf)
1219 features.add(Feature.NUMERIC_UNDERSCORES)
1221 elif n.type == token.SLASH:
1222 if n.parent and n.parent.type in {
1227 features.add(Feature.POS_ONLY_ARGUMENTS)
1229 elif n.type == token.COLONEQUAL:
1230 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1232 elif n.type == syms.decorator:
1233 if len(n.children) > 1 and not is_simple_decorator_expression(
1236 features.add(Feature.RELAXED_DECORATORS)
1239 n.type in {syms.typedargslist, syms.arglist}
1241 and n.children[-1].type == token.COMMA
1243 if n.type == syms.typedargslist:
1244 feature = Feature.TRAILING_COMMA_IN_DEF
1246 feature = Feature.TRAILING_COMMA_IN_CALL
1248 for ch in n.children:
1249 if ch.type in STARS:
1250 features.add(feature)
1252 if ch.type == syms.argument:
1253 for argch in ch.children:
1254 if argch.type in STARS:
1255 features.add(feature)
1258 n.type in {syms.return_stmt, syms.yield_expr}
1259 and len(n.children) >= 2
1260 and n.children[1].type == syms.testlist_star_expr
1261 and any(child.type == syms.star_expr for child in n.children[1].children)
1263 features.add(Feature.UNPACKING_ON_FLOW)
1266 n.type == syms.annassign
1267 and len(n.children) >= 4
1268 and n.children[3].type == syms.testlist_star_expr
1270 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1275 def detect_target_versions(
1276 node: Node, *, future_imports: Optional[Set[str]] = None
1277 ) -> Set[TargetVersion]:
1278 """Detect the version to target based on the nodes used."""
1279 features = get_features_used(node, future_imports=future_imports)
1281 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1285 def get_future_imports(node: Node) -> Set[str]:
1286 """Return a set of __future__ imports in the file."""
1287 imports: Set[str] = set()
1289 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1290 for child in children:
1291 if isinstance(child, Leaf):
1292 if child.type == token.NAME:
1295 elif child.type == syms.import_as_name:
1296 orig_name = child.children[0]
1297 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1298 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1299 yield orig_name.value
1301 elif child.type == syms.import_as_names:
1302 yield from get_imports_from_children(child.children)
1305 raise AssertionError("Invalid syntax parsing imports")
1307 for child in node.children:
1308 if child.type != syms.simple_stmt:
1311 first_child = child.children[0]
1312 if isinstance(first_child, Leaf):
1313 # Continue looking if we see a docstring; otherwise stop.
1315 len(child.children) == 2
1316 and first_child.type == token.STRING
1317 and child.children[1].type == token.NEWLINE
1323 elif first_child.type == syms.import_from:
1324 module_name = first_child.children[1]
1325 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1328 imports |= set(get_imports_from_children(first_child.children[3:]))
1335 def assert_equivalent(src: str, dst: str) -> None:
1336 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1338 src_ast = parse_ast(src)
1339 except Exception as exc:
1340 raise AssertionError(
1341 f"cannot use --safe with this file; failed to parse source file AST: "
1343 f"This could be caused by running Black with an older Python version "
1344 f"that does not support new syntax used in your source file."
1348 dst_ast = parse_ast(dst)
1349 except Exception as exc:
1350 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1351 raise AssertionError(
1352 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1353 "Please report a bug on https://github.com/psf/black/issues. "
1354 f"This invalid output might be helpful: {log}"
1357 src_ast_str = "\n".join(stringify_ast(src_ast))
1358 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1359 if src_ast_str != dst_ast_str:
1360 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1361 raise AssertionError(
1362 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1363 f" source. Please report a bug on "
1364 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1368 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1369 """Raise AssertionError if `dst` reformats differently the second time."""
1370 # We shouldn't call format_str() here, because that formats the string
1371 # twice and may hide a bug where we bounce back and forth between two
1373 newdst = _format_str_once(dst, mode=mode)
1377 diff(src, dst, "source", "first pass"),
1378 diff(dst, newdst, "first pass", "second pass"),
1380 raise AssertionError(
1381 "INTERNAL ERROR: Black produced different code on the second pass of the"
1382 " formatter. Please report a bug on https://github.com/psf/black/issues."
1383 f" This diff might be helpful: {log}"
1388 def nullcontext() -> Iterator[None]:
1389 """Return an empty context manager.
1391 To be used like `nullcontext` in Python 3.7.
1396 def patch_click() -> None:
1397 """Make Click not crash on Python 3.6 with LANG=C.
1399 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1400 default which restricts paths that it can access during the lifetime of the
1401 application. Click refuses to work in this scenario by raising a RuntimeError.
1403 In case of Black the likelihood that non-ASCII characters are going to be used in
1404 file paths is minimal since it's Python source code. Moreover, this crash was
1405 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1408 from click import core
1409 from click import _unicodefun
1410 except ModuleNotFoundError:
1413 for module in (core, _unicodefun):
1414 if hasattr(module, "_verify_python3_env"):
1415 module._verify_python3_env = lambda: None # type: ignore
1416 if hasattr(module, "_verify_python_env"):
1417 module._verify_python_env = lambda: None # type: ignore
1420 def patched_main() -> None:
1421 maybe_install_uvloop()
1427 if __name__ == "__main__":