All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
2 from json.decoder import JSONDecodeError
4 from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
5 from contextlib import contextmanager
6 from datetime import datetime
9 from multiprocessing import Manager, freeze_support
11 from pathlib import Path
12 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
35 from click.core import ParameterSource
36 from dataclasses import replace
37 from mypy_extensions import mypyc_attr
39 from black.const import DEFAULT_LINE_LENGTH, DEFAULT_INCLUDES, DEFAULT_EXCLUDES
40 from black.const import STDIN_PLACEHOLDER
41 from black.nodes import STARS, syms, is_simple_decorator_expression
42 from black.nodes import is_string_token
43 from black.lines import Line, EmptyLineTracker
44 from black.linegen import transform_line, LineGenerator, LN
45 from black.comments import normalize_fmt_off
46 from black.mode import FUTURE_FLAG_TO_FEATURE, Mode, TargetVersion
47 from black.mode import Feature, supports_feature, VERSION_TO_FEATURES
48 from black.cache import read_cache, write_cache, get_cache_info, filter_cached, Cache
49 from black.concurrency import cancel, shutdown, maybe_install_uvloop
50 from black.output import dump_to_file, ipynb_diff, diff, color_diff, out, err
51 from black.report import Report, Changed, NothingChanged
52 from black.files import (
56 find_user_pyproject_toml,
58 from black.files import gen_python_files, get_gitignore, normalize_path_maybe_ignore
59 from black.files import wrap_stream_for_windows
60 from black.parsing import InvalidInput # noqa F401
61 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
62 from black.handle_ipynb_magics import (
65 remove_trailing_semicolon,
66 put_trailing_semicolon_back,
69 jupyter_dependencies_are_installed,
74 from blib2to3.pytree import Node, Leaf
75 from blib2to3.pgen2 import token
77 from _black_version import version as __version__
79 COMPILED = Path(__file__).suffix in (".pyd", ".so")
87 class WriteBack(Enum):
95 def from_configuration(
96 cls, *, check: bool, diff: bool, color: bool = False
98 if check and not diff:
102 return cls.COLOR_DIFF
104 return cls.DIFF if diff else cls.YES
107 # Legacy name, left for integrations.
110 DEFAULT_WORKERS = os.cpu_count()
113 def read_pyproject_toml(
114 ctx: click.Context, param: click.Parameter, value: Optional[str]
116 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
118 Returns the path to a successfully found and read configuration file, None
122 value = find_pyproject_toml(ctx.params.get("src", ()))
127 config = parse_pyproject_toml(value)
128 except (OSError, ValueError) as e:
129 raise click.FileError(
130 filename=value, hint=f"Error reading configuration file: {e}"
136 # Sanitize the values to be Click friendly. For more information please see:
137 # https://github.com/psf/black/issues/1458
138 # https://github.com/pallets/click/issues/1567
140 k: str(v) if not isinstance(v, (list, dict)) else v
141 for k, v in config.items()
144 target_version = config.get("target_version")
145 if target_version is not None and not isinstance(target_version, list):
146 raise click.BadOptionUsage(
147 "target-version", "Config key target-version must be a list"
150 default_map: Dict[str, Any] = {}
152 default_map.update(ctx.default_map)
153 default_map.update(config)
155 ctx.default_map = default_map
159 def target_version_option_callback(
160 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
161 ) -> List[TargetVersion]:
162 """Compute the target versions from a --target-version flag.
164 This is its own function because mypy couldn't infer the type correctly
165 when it was a lambda, causing mypyc trouble.
167 return [TargetVersion[val.upper()] for val in v]
170 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
171 """Compile a regular expression string in `regex`.
173 If it contains newlines, use verbose mode.
176 regex = "(?x)" + regex
177 compiled: Pattern[str] = re.compile(regex)
183 param: click.Parameter,
184 value: Optional[str],
185 ) -> Optional[Pattern[str]]:
187 return re_compile_maybe_verbose(value) if value is not None else None
188 except re.error as e:
189 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
193 context_settings={"help_option_names": ["-h", "--help"]},
194 # While Click does set this field automatically using the docstring, mypyc
195 # (annoyingly) strips 'em so we need to set it here too.
196 help="The uncompromising code formatter.",
198 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
203 default=DEFAULT_LINE_LENGTH,
204 help="How many characters per line to allow.",
210 type=click.Choice([v.name.lower() for v in TargetVersion]),
211 callback=target_version_option_callback,
214 "Python versions that should be supported by Black's output. [default: per-file"
222 "Format all input files like typing stubs regardless of file extension (useful"
223 " when piping source on standard input)."
230 "Format all input files like Jupyter Notebooks regardless of file extension "
231 "(useful when piping source on standard input)."
235 "--python-cell-magics",
238 "When processing Jupyter Notebooks, add the given magic to the list"
239 f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
240 " Useful for formatting cells with custom python magics."
246 "--skip-string-normalization",
248 help="Don't normalize string quotes or prefixes.",
252 "--skip-magic-trailing-comma",
254 help="Don't use trailing commas as a reason to split lines.",
257 "--experimental-string-processing",
260 help="(DEPRECATED and now included in --preview) Normalize string literals.",
266 "Enable potentially disruptive style changes that may be added to Black's main"
267 " functionality in the next major release."
274 "Don't write the files back, just return the status. Return code 0 means"
275 " nothing would change. Return code 1 means some files would be reformatted."
276 " Return code 123 means there was an internal error."
282 help="Don't write the files back, just output a diff for each file on stdout.",
285 "--color/--no-color",
287 help="Show colored diff. Only applies when `--diff` is given.",
292 help="If --fast given, skip temporary sanity checks. [default: --safe]",
295 "--required-version",
298 "Require a specific version of Black to be running (useful for unifying results"
299 " across many environments e.g. with a pyproject.toml file). It can be"
300 " either a major version number or an exact version."
306 default=DEFAULT_INCLUDES,
307 callback=validate_regex,
309 "A regular expression that matches files and directories that should be"
310 " included on recursive searches. An empty value means all files are included"
311 " regardless of the name. Use forward slashes for directories on all platforms"
312 " (Windows, too). Exclusions are calculated first, inclusions later."
319 callback=validate_regex,
321 "A regular expression that matches files and directories that should be"
322 " excluded on recursive searches. An empty value means no paths are excluded."
323 " Use forward slashes for directories on all platforms (Windows, too)."
324 " Exclusions are calculated first, inclusions later. [default:"
325 f" {DEFAULT_EXCLUDES}]"
332 callback=validate_regex,
334 "Like --exclude, but adds additional files and directories on top of the"
335 " excluded ones. (Useful if you simply want to add to the default)"
341 callback=validate_regex,
343 "Like --exclude, but files and directories matching this regex will be "
344 "excluded even when they are passed explicitly as arguments."
351 "The name of the file when passing it through stdin. Useful to make "
352 "sure Black will respect --force-exclude option on some "
353 "editors that rely on using stdin."
359 type=click.IntRange(min=1),
360 default=DEFAULT_WORKERS,
362 help="Number of parallel workers",
369 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
370 " those with 2>/dev/null."
378 "Also emit messages to stderr about files that were not changed or were ignored"
379 " due to exclusion patterns."
382 @click.version_option(
384 message=f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})",
390 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
406 callback=read_pyproject_toml,
407 help="Read configuration from FILE path.",
410 def main( # noqa: C901
414 target_version: List[TargetVersion],
421 python_cell_magics: Sequence[str],
422 skip_string_normalization: bool,
423 skip_magic_trailing_comma: bool,
424 experimental_string_processing: bool,
428 required_version: Optional[str],
429 include: Pattern[str],
430 exclude: Optional[Pattern[str]],
431 extend_exclude: Optional[Pattern[str]],
432 force_exclude: Optional[Pattern[str]],
433 stdin_filename: Optional[str],
435 src: Tuple[str, ...],
436 config: Optional[str],
438 """The uncompromising code formatter."""
439 ctx.ensure_object(dict)
441 if src and code is not None:
444 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
447 if not src and code is None:
448 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
451 root, method = find_project_root(src) if code is None else (None, None)
452 ctx.obj["root"] = root
457 f"Identified `{root}` as project root containing a {method}.",
462 (normalize_path_maybe_ignore(Path(source), root), source)
465 srcs_string = ", ".join(
469 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
470 for _norm, source in normalized
473 out(f"Sources to be formatted: {srcs_string}", fg="blue")
476 config_source = ctx.get_parameter_source("config")
477 user_level_config = str(find_user_pyproject_toml())
478 if config == user_level_config:
480 "Using configuration from user-level config at "
481 f"'{user_level_config}'.",
484 elif config_source in (
485 ParameterSource.DEFAULT,
486 ParameterSource.DEFAULT_MAP,
488 out("Using configuration from project root.", fg="blue")
490 out(f"Using configuration in '{config}'.", fg="blue")
492 error_msg = "Oh no! 💥 💔 💥"
495 and required_version != __version__
496 and required_version != __version__.split(".")[0]
499 f"{error_msg} The required version `{required_version}` does not match"
500 f" the running version `{__version__}`!"
504 err("Cannot pass both `pyi` and `ipynb` flags!")
507 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
509 versions = set(target_version)
511 # We'll autodetect later.
514 target_versions=versions,
515 line_length=line_length,
518 string_normalization=not skip_string_normalization,
519 magic_trailing_comma=not skip_magic_trailing_comma,
520 experimental_string_processing=experimental_string_processing,
522 python_cell_magics=set(python_cell_magics),
526 # Run in quiet mode by default with -c; the extra output isn't useful.
527 # You can still pass -v to get verbose output.
530 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
534 content=code, fast=fast, write_back=write_back, mode=mode, report=report
538 sources = get_sources(
545 extend_exclude=extend_exclude,
546 force_exclude=force_exclude,
548 stdin_filename=stdin_filename,
550 except GitWildMatchPatternError:
555 "No Python files are present to be formatted. Nothing to do 😴",
561 if len(sources) == 1:
565 write_back=write_back,
573 write_back=write_back,
579 if verbose or not quiet:
580 if code is None and (verbose or report.change_count or report.failure_count):
582 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
584 click.echo(str(report), err=True)
585 ctx.exit(report.return_code)
591 src: Tuple[str, ...],
594 include: Pattern[str],
595 exclude: Optional[Pattern[str]],
596 extend_exclude: Optional[Pattern[str]],
597 force_exclude: Optional[Pattern[str]],
599 stdin_filename: Optional[str],
601 """Compute the set of files to be formatted."""
602 sources: Set[Path] = set()
605 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
606 gitignore = get_gitignore(ctx.obj["root"])
611 if s == "-" and stdin_filename:
612 p = Path(stdin_filename)
618 if is_stdin or p.is_file():
619 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
620 if normalized_path is None:
623 normalized_path = "/" + normalized_path
624 # Hard-exclude any files that matches the `--force-exclude` regex.
626 force_exclude_match = force_exclude.search(normalized_path)
628 force_exclude_match = None
629 if force_exclude_match and force_exclude_match.group(0):
630 report.path_ignored(p, "matches the --force-exclude regular expression")
634 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
636 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
637 verbose=verbose, quiet=quiet
660 err(f"invalid path: {s}")
665 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
668 Exit if there is no `src` provided for formatting
671 if verbose or not quiet:
677 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
680 Reformat and print out `content` without spawning child processes.
681 Similar to `reformat_one`, but for string content.
683 `fast`, `write_back`, and `mode` options are passed to
684 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
686 path = Path("<string>")
689 if format_stdin_to_stdout(
690 content=content, fast=fast, write_back=write_back, mode=mode
692 changed = Changed.YES
693 report.done(path, changed)
694 except Exception as exc:
696 traceback.print_exc()
697 report.failed(path, str(exc))
700 # diff-shades depends on being to monkeypatch this function to operate. I know it's
701 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
702 @mypyc_attr(patchable=True)
704 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
706 """Reformat a single file under `src` without spawning child processes.
708 `fast`, `write_back`, and `mode` options are passed to
709 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
716 elif str(src).startswith(STDIN_PLACEHOLDER):
718 # Use the original name again in case we want to print something
720 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
725 if src.suffix == ".pyi":
726 mode = replace(mode, is_pyi=True)
727 elif src.suffix == ".ipynb":
728 mode = replace(mode, is_ipynb=True)
729 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
730 changed = Changed.YES
733 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
734 cache = read_cache(mode)
735 res_src = src.resolve()
736 res_src_s = str(res_src)
737 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
738 changed = Changed.CACHED
739 if changed is not Changed.CACHED and format_file_in_place(
740 src, fast=fast, write_back=write_back, mode=mode
742 changed = Changed.YES
743 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
744 write_back is WriteBack.CHECK and changed is Changed.NO
746 write_cache(cache, [src], mode)
747 report.done(src, changed)
748 except Exception as exc:
750 traceback.print_exc()
751 report.failed(src, str(exc))
754 # diff-shades depends on being to monkeypatch this function to operate. I know it's
755 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
756 @mypyc_attr(patchable=True)
760 write_back: WriteBack,
763 workers: Optional[int],
765 """Reformat multiple files using a ProcessPoolExecutor."""
767 loop = asyncio.get_event_loop()
768 worker_count = workers if workers is not None else DEFAULT_WORKERS
769 if sys.platform == "win32":
770 # Work around https://bugs.python.org/issue26903
771 assert worker_count is not None
772 worker_count = min(worker_count, 60)
774 executor = ProcessPoolExecutor(max_workers=worker_count)
775 except (ImportError, NotImplementedError, OSError):
776 # we arrive here if the underlying system does not support multi-processing
777 # like in AWS Lambda or Termux, in which case we gracefully fallback to
778 # a ThreadPoolExecutor with just a single worker (more workers would not do us
779 # any good due to the Global Interpreter Lock)
780 executor = ThreadPoolExecutor(max_workers=1)
783 loop.run_until_complete(
787 write_back=write_back,
796 if executor is not None:
800 async def schedule_formatting(
803 write_back: WriteBack,
806 loop: asyncio.AbstractEventLoop,
809 """Run formatting of `sources` in parallel using the provided `executor`.
811 (Use ProcessPoolExecutors for actual parallelism.)
813 `write_back`, `fast`, and `mode` options are passed to
814 :func:`format_file_in_place`.
817 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
818 cache = read_cache(mode)
819 sources, cached = filter_cached(cache, sources)
820 for src in sorted(cached):
821 report.done(src, Changed.CACHED)
826 sources_to_cache = []
828 if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
829 # For diff output, we need locks to ensure we don't interleave output
830 # from different processes.
832 lock = manager.Lock()
834 asyncio.ensure_future(
835 loop.run_in_executor(
836 executor, format_file_in_place, src, fast, mode, write_back, lock
839 for src in sorted(sources)
841 pending = tasks.keys()
843 loop.add_signal_handler(signal.SIGINT, cancel, pending)
844 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
845 except NotImplementedError:
846 # There are no good alternatives for these on Windows.
849 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
851 src = tasks.pop(task)
853 cancelled.append(task)
854 elif task.exception():
855 report.failed(src, str(task.exception()))
857 changed = Changed.YES if task.result() else Changed.NO
858 # If the file was written back or was successfully checked as
859 # well-formatted, store this information in the cache.
860 if write_back is WriteBack.YES or (
861 write_back is WriteBack.CHECK and changed is Changed.NO
863 sources_to_cache.append(src)
864 report.done(src, changed)
866 if sys.version_info >= (3, 7):
867 await asyncio.gather(*cancelled, return_exceptions=True)
869 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
871 write_cache(cache, sources_to_cache, mode)
874 def format_file_in_place(
878 write_back: WriteBack = WriteBack.NO,
879 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
881 """Format file under `src` path. Return True if changed.
883 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
885 `mode` and `fast` options are passed to :func:`format_file_contents`.
887 if src.suffix == ".pyi":
888 mode = replace(mode, is_pyi=True)
889 elif src.suffix == ".ipynb":
890 mode = replace(mode, is_ipynb=True)
892 then = datetime.utcfromtimestamp(src.stat().st_mtime)
893 with open(src, "rb") as buf:
894 src_contents, encoding, newline = decode_bytes(buf.read())
896 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
897 except NothingChanged:
899 except JSONDecodeError:
901 f"File '{src}' cannot be parsed as valid Jupyter notebook."
904 if write_back == WriteBack.YES:
905 with open(src, "w", encoding=encoding, newline=newline) as f:
906 f.write(dst_contents)
907 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
908 now = datetime.utcnow()
909 src_name = f"{src}\t{then} +0000"
910 dst_name = f"{src}\t{now} +0000"
912 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
914 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
916 if write_back == WriteBack.COLOR_DIFF:
917 diff_contents = color_diff(diff_contents)
919 with lock or nullcontext():
920 f = io.TextIOWrapper(
926 f = wrap_stream_for_windows(f)
927 f.write(diff_contents)
933 def format_stdin_to_stdout(
936 content: Optional[str] = None,
937 write_back: WriteBack = WriteBack.NO,
940 """Format file on stdin. Return True if changed.
942 If content is None, it's read from sys.stdin.
944 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
945 write a diff to stdout. The `mode` argument is passed to
946 :func:`format_file_contents`.
948 then = datetime.utcnow()
951 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
953 src, encoding, newline = content, "utf-8", ""
957 dst = format_file_contents(src, fast=fast, mode=mode)
960 except NothingChanged:
964 f = io.TextIOWrapper(
965 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
967 if write_back == WriteBack.YES:
968 # Make sure there's a newline after the content
969 if dst and dst[-1] != "\n":
972 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
973 now = datetime.utcnow()
974 src_name = f"STDIN\t{then} +0000"
975 dst_name = f"STDOUT\t{now} +0000"
976 d = diff(src, dst, src_name, dst_name)
977 if write_back == WriteBack.COLOR_DIFF:
979 f = wrap_stream_for_windows(f)
984 def check_stability_and_equivalence(
985 src_contents: str, dst_contents: str, *, mode: Mode
987 """Perform stability and equivalence checks.
989 Raise AssertionError if source and destination contents are not
990 equivalent, or if a second pass of the formatter would format the
993 assert_equivalent(src_contents, dst_contents)
994 assert_stable(src_contents, dst_contents, mode=mode)
997 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
998 """Reformat contents of a file and return new contents.
1000 If `fast` is False, additionally confirm that the reformatted code is
1001 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
1002 `mode` is passed to :func:`format_str`.
1004 if not src_contents.strip():
1005 raise NothingChanged
1008 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
1010 dst_contents = format_str(src_contents, mode=mode)
1011 if src_contents == dst_contents:
1012 raise NothingChanged
1014 if not fast and not mode.is_ipynb:
1015 # Jupyter notebooks will already have been checked above.
1016 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
1020 def validate_cell(src: str, mode: Mode) -> None:
1021 """Check that cell does not already contain TransformerManager transformations,
1022 or non-Python cell magics, which might cause tokenizer_rt to break because of
1025 If a cell contains ``!ls``, then it'll be transformed to
1026 ``get_ipython().system('ls')``. However, if the cell originally contained
1027 ``get_ipython().system('ls')``, then it would get transformed in the same way:
1029 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
1030 "get_ipython().system('ls')\n"
1031 >>> TransformerManager().transform_cell("!ls")
1032 "get_ipython().system('ls')\n"
1034 Due to the impossibility of safely roundtripping in such situations, cells
1035 containing transformed magics will be ignored.
1037 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
1038 raise NothingChanged
1041 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
1043 raise NothingChanged
1046 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
1047 """Format code in given cell of Jupyter notebook.
1051 - if cell has trailing semicolon, remove it;
1052 - if cell has IPython magics, mask them;
1054 - reinstate IPython magics;
1055 - reinstate trailing semicolon (if originally present);
1056 - strip trailing newlines.
1058 Cells with syntax errors will not be processed, as they
1059 could potentially be automagics or multi-line magics, which
1060 are currently not supported.
1062 validate_cell(src, mode)
1063 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
1067 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
1069 raise NothingChanged from None
1070 masked_dst = format_str(masked_src, mode=mode)
1072 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
1073 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1074 dst = put_trailing_semicolon_back(
1075 dst_without_trailing_semicolon, has_trailing_semicolon
1077 dst = dst.rstrip("\n")
1079 raise NothingChanged from None
1083 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1084 """If notebook is marked as non-Python, don't format it.
1086 All notebook metadata fields are optional, see
1087 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1088 if a notebook has empty metadata, we will try to parse it anyway.
1090 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1091 if language is not None and language != "python":
1092 raise NothingChanged from None
1095 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1096 """Format Jupyter notebook.
1098 Operate cell-by-cell, only on code cells, only for Python notebooks.
1099 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1101 trailing_newline = src_contents[-1] == "\n"
1103 nb = json.loads(src_contents)
1104 validate_metadata(nb)
1105 for cell in nb["cells"]:
1106 if cell.get("cell_type", None) == "code":
1108 src = "".join(cell["source"])
1109 dst = format_cell(src, fast=fast, mode=mode)
1110 except NothingChanged:
1113 cell["source"] = dst.splitlines(keepends=True)
1116 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1117 if trailing_newline:
1118 dst_contents = dst_contents + "\n"
1121 raise NothingChanged
1124 def format_str(src_contents: str, *, mode: Mode) -> str:
1125 """Reformat a string and return new contents.
1127 `mode` determines formatting options, such as how many characters per line are
1131 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1132 def f(arg: str = "") -> None:
1135 A more complex example:
1138 ... black.format_str(
1139 ... "def f(arg:str='')->None: hey",
1140 ... mode=black.Mode(
1141 ... target_versions={black.TargetVersion.PY36},
1143 ... string_normalization=False,
1154 dst_contents = _format_str_once(src_contents, mode=mode)
1155 # Forced second pass to work around optional trailing commas (becoming
1156 # forced trailing commas on pass 2) interacting differently with optional
1157 # parentheses. Admittedly ugly.
1158 if src_contents != dst_contents:
1159 return _format_str_once(dst_contents, mode=mode)
1163 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1164 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1166 future_imports = get_future_imports(src_node)
1167 if mode.target_versions:
1168 versions = mode.target_versions
1170 versions = detect_target_versions(src_node, future_imports=future_imports)
1172 normalize_fmt_off(src_node, preview=mode.preview)
1173 lines = LineGenerator(mode=mode)
1174 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1175 empty_line = Line(mode=mode)
1177 split_line_features = {
1179 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1180 if supports_feature(versions, feature)
1182 for current_line in lines.visit(src_node):
1183 dst_contents.append(str(empty_line) * after)
1184 before, after = elt.maybe_empty_lines(current_line)
1185 dst_contents.append(str(empty_line) * before)
1186 for line in transform_line(
1187 current_line, mode=mode, features=split_line_features
1189 dst_contents.append(str(line))
1190 return "".join(dst_contents)
1193 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1194 """Return a tuple of (decoded_contents, encoding, newline).
1196 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1197 universal newlines (i.e. only contains LF).
1199 srcbuf = io.BytesIO(src)
1200 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1202 return "", encoding, "\n"
1204 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1206 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1207 return tiow.read(), encoding, newline
1210 def get_features_used( # noqa: C901
1211 node: Node, *, future_imports: Optional[Set[str]] = None
1213 """Return a set of (relatively) new Python features used in this file.
1215 Currently looking for:
1217 - underscores in numeric literals;
1218 - trailing commas after * or ** in function signatures and calls;
1219 - positional only arguments in function signatures and lambdas;
1220 - assignment expression;
1221 - relaxed decorator syntax;
1222 - usage of __future__ flags (annotations);
1223 - print / exec statements;
1225 features: Set[Feature] = set()
1228 FUTURE_FLAG_TO_FEATURE[future_import]
1229 for future_import in future_imports
1230 if future_import in FUTURE_FLAG_TO_FEATURE
1233 for n in node.pre_order():
1234 if is_string_token(n):
1235 value_head = n.value[:2]
1236 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1237 features.add(Feature.F_STRINGS)
1239 elif n.type == token.NUMBER:
1240 assert isinstance(n, Leaf)
1242 features.add(Feature.NUMERIC_UNDERSCORES)
1244 elif n.type == token.SLASH:
1245 if n.parent and n.parent.type in {
1250 features.add(Feature.POS_ONLY_ARGUMENTS)
1252 elif n.type == token.COLONEQUAL:
1253 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1255 elif n.type == syms.decorator:
1256 if len(n.children) > 1 and not is_simple_decorator_expression(
1259 features.add(Feature.RELAXED_DECORATORS)
1262 n.type in {syms.typedargslist, syms.arglist}
1264 and n.children[-1].type == token.COMMA
1266 if n.type == syms.typedargslist:
1267 feature = Feature.TRAILING_COMMA_IN_DEF
1269 feature = Feature.TRAILING_COMMA_IN_CALL
1271 for ch in n.children:
1272 if ch.type in STARS:
1273 features.add(feature)
1275 if ch.type == syms.argument:
1276 for argch in ch.children:
1277 if argch.type in STARS:
1278 features.add(feature)
1281 n.type in {syms.return_stmt, syms.yield_expr}
1282 and len(n.children) >= 2
1283 and n.children[1].type == syms.testlist_star_expr
1284 and any(child.type == syms.star_expr for child in n.children[1].children)
1286 features.add(Feature.UNPACKING_ON_FLOW)
1289 n.type == syms.annassign
1290 and len(n.children) >= 4
1291 and n.children[3].type == syms.testlist_star_expr
1293 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1298 def detect_target_versions(
1299 node: Node, *, future_imports: Optional[Set[str]] = None
1300 ) -> Set[TargetVersion]:
1301 """Detect the version to target based on the nodes used."""
1302 features = get_features_used(node, future_imports=future_imports)
1304 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1308 def get_future_imports(node: Node) -> Set[str]:
1309 """Return a set of __future__ imports in the file."""
1310 imports: Set[str] = set()
1312 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1313 for child in children:
1314 if isinstance(child, Leaf):
1315 if child.type == token.NAME:
1318 elif child.type == syms.import_as_name:
1319 orig_name = child.children[0]
1320 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1321 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1322 yield orig_name.value
1324 elif child.type == syms.import_as_names:
1325 yield from get_imports_from_children(child.children)
1328 raise AssertionError("Invalid syntax parsing imports")
1330 for child in node.children:
1331 if child.type != syms.simple_stmt:
1334 first_child = child.children[0]
1335 if isinstance(first_child, Leaf):
1336 # Continue looking if we see a docstring; otherwise stop.
1338 len(child.children) == 2
1339 and first_child.type == token.STRING
1340 and child.children[1].type == token.NEWLINE
1346 elif first_child.type == syms.import_from:
1347 module_name = first_child.children[1]
1348 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1351 imports |= set(get_imports_from_children(first_child.children[3:]))
1358 def assert_equivalent(src: str, dst: str) -> None:
1359 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1361 src_ast = parse_ast(src)
1362 except Exception as exc:
1363 raise AssertionError(
1364 "cannot use --safe with this file; failed to parse source file AST: "
1366 "This could be caused by running Black with an older Python version "
1367 "that does not support new syntax used in your source file."
1371 dst_ast = parse_ast(dst)
1372 except Exception as exc:
1373 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1374 raise AssertionError(
1375 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1376 "Please report a bug on https://github.com/psf/black/issues. "
1377 f"This invalid output might be helpful: {log}"
1380 src_ast_str = "\n".join(stringify_ast(src_ast))
1381 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1382 if src_ast_str != dst_ast_str:
1383 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1384 raise AssertionError(
1385 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1386 " source. Please report a bug on "
1387 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1391 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1392 """Raise AssertionError if `dst` reformats differently the second time."""
1393 # We shouldn't call format_str() here, because that formats the string
1394 # twice and may hide a bug where we bounce back and forth between two
1396 newdst = _format_str_once(dst, mode=mode)
1400 diff(src, dst, "source", "first pass"),
1401 diff(dst, newdst, "first pass", "second pass"),
1403 raise AssertionError(
1404 "INTERNAL ERROR: Black produced different code on the second pass of the"
1405 " formatter. Please report a bug on https://github.com/psf/black/issues."
1406 f" This diff might be helpful: {log}"
1411 def nullcontext() -> Iterator[None]:
1412 """Return an empty context manager.
1414 To be used like `nullcontext` in Python 3.7.
1419 def patch_click() -> None:
1420 """Make Click not crash on Python 3.6 with LANG=C.
1422 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1423 default which restricts paths that it can access during the lifetime of the
1424 application. Click refuses to work in this scenario by raising a RuntimeError.
1426 In case of Black the likelihood that non-ASCII characters are going to be used in
1427 file paths is minimal since it's Python source code. Moreover, this crash was
1428 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1431 from click import core
1432 from click import _unicodefun
1433 except ModuleNotFoundError:
1436 for module in (core, _unicodefun):
1437 if hasattr(module, "_verify_python3_env"):
1438 module._verify_python3_env = lambda: None # type: ignore
1439 if hasattr(module, "_verify_python_env"):
1440 module._verify_python_env = lambda: None # type: ignore
1443 def patched_main() -> None:
1444 maybe_install_uvloop()
1450 if __name__ == "__main__":