All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
11 from contextlib import contextmanager
12 from dataclasses import replace
13 from datetime import datetime
15 from json.decoder import JSONDecodeError
16 from multiprocessing import Manager, freeze_support
17 from pathlib import Path
36 from click.core import ParameterSource
37 from mypy_extensions import mypyc_attr
38 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
40 from _black_version import version as __version__
41 from black.cache import Cache, filter_cached, get_cache_info, read_cache, write_cache
42 from black.comments import normalize_fmt_off
43 from black.concurrency import cancel, maybe_install_uvloop, shutdown
44 from black.const import (
50 from black.files import (
53 find_user_pyproject_toml,
56 normalize_path_maybe_ignore,
58 wrap_stream_for_windows,
60 from black.handle_ipynb_magics import (
63 jupyter_dependencies_are_installed,
65 put_trailing_semicolon_back,
66 remove_trailing_semicolon,
69 from black.linegen import LN, LineGenerator, transform_line
70 from black.lines import EmptyLineTracker, Line
71 from black.mode import (
72 FUTURE_FLAG_TO_FEATURE,
79 from black.nodes import (
82 is_simple_decorator_expression,
86 from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
87 from black.parsing import InvalidInput # noqa F401
88 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
89 from black.report import Changed, NothingChanged, Report
90 from black.trans import iter_fexpr_spans
91 from blib2to3.pgen2 import token
92 from blib2to3.pytree import Leaf, Node
95 from concurrent.futures import Executor
97 COMPILED = Path(__file__).suffix in (".pyd", ".so")
105 class WriteBack(Enum):
113 def from_configuration(
114 cls, *, check: bool, diff: bool, color: bool = False
116 if check and not diff:
120 return cls.COLOR_DIFF
122 return cls.DIFF if diff else cls.YES
125 # Legacy name, left for integrations.
128 DEFAULT_WORKERS = os.cpu_count()
131 def read_pyproject_toml(
132 ctx: click.Context, param: click.Parameter, value: Optional[str]
134 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
136 Returns the path to a successfully found and read configuration file, None
140 value = find_pyproject_toml(ctx.params.get("src", ()))
145 config = parse_pyproject_toml(value)
146 except (OSError, ValueError) as e:
147 raise click.FileError(
148 filename=value, hint=f"Error reading configuration file: {e}"
154 # Sanitize the values to be Click friendly. For more information please see:
155 # https://github.com/psf/black/issues/1458
156 # https://github.com/pallets/click/issues/1567
158 k: str(v) if not isinstance(v, (list, dict)) else v
159 for k, v in config.items()
162 target_version = config.get("target_version")
163 if target_version is not None and not isinstance(target_version, list):
164 raise click.BadOptionUsage(
165 "target-version", "Config key target-version must be a list"
168 default_map: Dict[str, Any] = {}
170 default_map.update(ctx.default_map)
171 default_map.update(config)
173 ctx.default_map = default_map
177 def target_version_option_callback(
178 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
179 ) -> List[TargetVersion]:
180 """Compute the target versions from a --target-version flag.
182 This is its own function because mypy couldn't infer the type correctly
183 when it was a lambda, causing mypyc trouble.
185 return [TargetVersion[val.upper()] for val in v]
188 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
189 """Compile a regular expression string in `regex`.
191 If it contains newlines, use verbose mode.
194 regex = "(?x)" + regex
195 compiled: Pattern[str] = re.compile(regex)
201 param: click.Parameter,
202 value: Optional[str],
203 ) -> Optional[Pattern[str]]:
205 return re_compile_maybe_verbose(value) if value is not None else None
206 except re.error as e:
207 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
211 context_settings={"help_option_names": ["-h", "--help"]},
212 # While Click does set this field automatically using the docstring, mypyc
213 # (annoyingly) strips 'em so we need to set it here too.
214 help="The uncompromising code formatter.",
216 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
221 default=DEFAULT_LINE_LENGTH,
222 help="How many characters per line to allow.",
228 type=click.Choice([v.name.lower() for v in TargetVersion]),
229 callback=target_version_option_callback,
232 "Python versions that should be supported by Black's output. [default: per-file"
240 "Format all input files like typing stubs regardless of file extension (useful"
241 " when piping source on standard input)."
248 "Format all input files like Jupyter Notebooks regardless of file extension "
249 "(useful when piping source on standard input)."
253 "--python-cell-magics",
256 "When processing Jupyter Notebooks, add the given magic to the list"
257 f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
258 " Useful for formatting cells with custom python magics."
264 "--skip-string-normalization",
266 help="Don't normalize string quotes or prefixes.",
270 "--skip-magic-trailing-comma",
272 help="Don't use trailing commas as a reason to split lines.",
275 "--experimental-string-processing",
278 help="(DEPRECATED and now included in --preview) Normalize string literals.",
284 "Enable potentially disruptive style changes that may be added to Black's main"
285 " functionality in the next major release."
292 "Don't write the files back, just return the status. Return code 0 means"
293 " nothing would change. Return code 1 means some files would be reformatted."
294 " Return code 123 means there was an internal error."
300 help="Don't write the files back, just output a diff for each file on stdout.",
303 "--color/--no-color",
305 help="Show colored diff. Only applies when `--diff` is given.",
310 help="If --fast given, skip temporary sanity checks. [default: --safe]",
313 "--required-version",
316 "Require a specific version of Black to be running (useful for unifying results"
317 " across many environments e.g. with a pyproject.toml file). It can be"
318 " either a major version number or an exact version."
324 default=DEFAULT_INCLUDES,
325 callback=validate_regex,
327 "A regular expression that matches files and directories that should be"
328 " included on recursive searches. An empty value means all files are included"
329 " regardless of the name. Use forward slashes for directories on all platforms"
330 " (Windows, too). Exclusions are calculated first, inclusions later."
337 callback=validate_regex,
339 "A regular expression that matches files and directories that should be"
340 " excluded on recursive searches. An empty value means no paths are excluded."
341 " Use forward slashes for directories on all platforms (Windows, too)."
342 " Exclusions are calculated first, inclusions later. [default:"
343 f" {DEFAULT_EXCLUDES}]"
350 callback=validate_regex,
352 "Like --exclude, but adds additional files and directories on top of the"
353 " excluded ones. (Useful if you simply want to add to the default)"
359 callback=validate_regex,
361 "Like --exclude, but files and directories matching this regex will be "
362 "excluded even when they are passed explicitly as arguments."
369 "The name of the file when passing it through stdin. Useful to make "
370 "sure Black will respect --force-exclude option on some "
371 "editors that rely on using stdin."
377 type=click.IntRange(min=1),
378 default=DEFAULT_WORKERS,
380 help="Number of parallel workers",
387 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
388 " those with 2>/dev/null."
396 "Also emit messages to stderr about files that were not changed or were ignored"
397 " due to exclusion patterns."
400 @click.version_option(
403 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
404 f"Python ({platform.python_implementation()}) {platform.python_version()}"
411 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
427 callback=read_pyproject_toml,
428 help="Read configuration from FILE path.",
431 def main( # noqa: C901
435 target_version: List[TargetVersion],
442 python_cell_magics: Sequence[str],
443 skip_string_normalization: bool,
444 skip_magic_trailing_comma: bool,
445 experimental_string_processing: bool,
449 required_version: Optional[str],
450 include: Pattern[str],
451 exclude: Optional[Pattern[str]],
452 extend_exclude: Optional[Pattern[str]],
453 force_exclude: Optional[Pattern[str]],
454 stdin_filename: Optional[str],
456 src: Tuple[str, ...],
457 config: Optional[str],
459 """The uncompromising code formatter."""
460 ctx.ensure_object(dict)
462 if src and code is not None:
465 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
468 if not src and code is None:
469 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
473 find_project_root(src, stdin_filename) if code is None else (None, None)
475 ctx.obj["root"] = root
480 f"Identified `{root}` as project root containing a {method}.",
487 else (normalize_path_maybe_ignore(Path(source), root), source)
490 srcs_string = ", ".join(
494 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
495 for _norm, source in normalized
498 out(f"Sources to be formatted: {srcs_string}", fg="blue")
501 config_source = ctx.get_parameter_source("config")
502 user_level_config = str(find_user_pyproject_toml())
503 if config == user_level_config:
505 "Using configuration from user-level config at "
506 f"'{user_level_config}'.",
509 elif config_source in (
510 ParameterSource.DEFAULT,
511 ParameterSource.DEFAULT_MAP,
513 out("Using configuration from project root.", fg="blue")
515 out(f"Using configuration in '{config}'.", fg="blue")
517 error_msg = "Oh no! 💥 💔 💥"
520 and required_version != __version__
521 and required_version != __version__.split(".")[0]
524 f"{error_msg} The required version `{required_version}` does not match"
525 f" the running version `{__version__}`!"
529 err("Cannot pass both `pyi` and `ipynb` flags!")
532 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
534 versions = set(target_version)
536 # We'll autodetect later.
539 target_versions=versions,
540 line_length=line_length,
543 string_normalization=not skip_string_normalization,
544 magic_trailing_comma=not skip_magic_trailing_comma,
545 experimental_string_processing=experimental_string_processing,
547 python_cell_magics=set(python_cell_magics),
551 # Run in quiet mode by default with -c; the extra output isn't useful.
552 # You can still pass -v to get verbose output.
555 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
559 content=code, fast=fast, write_back=write_back, mode=mode, report=report
563 sources = get_sources(
570 extend_exclude=extend_exclude,
571 force_exclude=force_exclude,
573 stdin_filename=stdin_filename,
575 except GitWildMatchPatternError:
580 "No Python files are present to be formatted. Nothing to do 😴",
586 if len(sources) == 1:
590 write_back=write_back,
598 write_back=write_back,
604 if verbose or not quiet:
605 if code is None and (verbose or report.change_count or report.failure_count):
607 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
609 click.echo(str(report), err=True)
610 ctx.exit(report.return_code)
616 src: Tuple[str, ...],
619 include: Pattern[str],
620 exclude: Optional[Pattern[str]],
621 extend_exclude: Optional[Pattern[str]],
622 force_exclude: Optional[Pattern[str]],
624 stdin_filename: Optional[str],
626 """Compute the set of files to be formatted."""
627 sources: Set[Path] = set()
630 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
631 gitignore = get_gitignore(ctx.obj["root"])
636 if s == "-" and stdin_filename:
637 p = Path(stdin_filename)
643 if is_stdin or p.is_file():
644 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
645 if normalized_path is None:
648 normalized_path = "/" + normalized_path
649 # Hard-exclude any files that matches the `--force-exclude` regex.
651 force_exclude_match = force_exclude.search(normalized_path)
653 force_exclude_match = None
654 if force_exclude_match and force_exclude_match.group(0):
655 report.path_ignored(p, "matches the --force-exclude regular expression")
659 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
661 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
662 verbose=verbose, quiet=quiet
685 err(f"invalid path: {s}")
690 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
693 Exit if there is no `src` provided for formatting
696 if verbose or not quiet:
702 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
705 Reformat and print out `content` without spawning child processes.
706 Similar to `reformat_one`, but for string content.
708 `fast`, `write_back`, and `mode` options are passed to
709 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
711 path = Path("<string>")
714 if format_stdin_to_stdout(
715 content=content, fast=fast, write_back=write_back, mode=mode
717 changed = Changed.YES
718 report.done(path, changed)
719 except Exception as exc:
721 traceback.print_exc()
722 report.failed(path, str(exc))
725 # diff-shades depends on being to monkeypatch this function to operate. I know it's
726 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
727 @mypyc_attr(patchable=True)
729 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
731 """Reformat a single file under `src` without spawning child processes.
733 `fast`, `write_back`, and `mode` options are passed to
734 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
741 elif str(src).startswith(STDIN_PLACEHOLDER):
743 # Use the original name again in case we want to print something
745 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
750 if src.suffix == ".pyi":
751 mode = replace(mode, is_pyi=True)
752 elif src.suffix == ".ipynb":
753 mode = replace(mode, is_ipynb=True)
754 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
755 changed = Changed.YES
758 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
759 cache = read_cache(mode)
760 res_src = src.resolve()
761 res_src_s = str(res_src)
762 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
763 changed = Changed.CACHED
764 if changed is not Changed.CACHED and format_file_in_place(
765 src, fast=fast, write_back=write_back, mode=mode
767 changed = Changed.YES
768 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
769 write_back is WriteBack.CHECK and changed is Changed.NO
771 write_cache(cache, [src], mode)
772 report.done(src, changed)
773 except Exception as exc:
775 traceback.print_exc()
776 report.failed(src, str(exc))
779 # diff-shades depends on being to monkeypatch this function to operate. I know it's
780 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
781 @mypyc_attr(patchable=True)
785 write_back: WriteBack,
788 workers: Optional[int],
790 """Reformat multiple files using a ProcessPoolExecutor."""
791 from concurrent.futures import Executor, ProcessPoolExecutor, ThreadPoolExecutor
794 worker_count = workers if workers is not None else DEFAULT_WORKERS
795 if sys.platform == "win32":
796 # Work around https://bugs.python.org/issue26903
797 assert worker_count is not None
798 worker_count = min(worker_count, 60)
800 executor = ProcessPoolExecutor(max_workers=worker_count)
801 except (ImportError, NotImplementedError, OSError):
802 # we arrive here if the underlying system does not support multi-processing
803 # like in AWS Lambda or Termux, in which case we gracefully fallback to
804 # a ThreadPoolExecutor with just a single worker (more workers would not do us
805 # any good due to the Global Interpreter Lock)
806 executor = ThreadPoolExecutor(max_workers=1)
808 loop = asyncio.new_event_loop()
809 asyncio.set_event_loop(loop)
811 loop.run_until_complete(
815 write_back=write_back,
826 asyncio.set_event_loop(None)
827 if executor is not None:
831 async def schedule_formatting(
834 write_back: WriteBack,
837 loop: asyncio.AbstractEventLoop,
838 executor: "Executor",
840 """Run formatting of `sources` in parallel using the provided `executor`.
842 (Use ProcessPoolExecutors for actual parallelism.)
844 `write_back`, `fast`, and `mode` options are passed to
845 :func:`format_file_in_place`.
848 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
849 cache = read_cache(mode)
850 sources, cached = filter_cached(cache, sources)
851 for src in sorted(cached):
852 report.done(src, Changed.CACHED)
857 sources_to_cache = []
859 if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
860 # For diff output, we need locks to ensure we don't interleave output
861 # from different processes.
863 lock = manager.Lock()
865 asyncio.ensure_future(
866 loop.run_in_executor(
867 executor, format_file_in_place, src, fast, mode, write_back, lock
870 for src in sorted(sources)
872 pending = tasks.keys()
874 loop.add_signal_handler(signal.SIGINT, cancel, pending)
875 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
876 except NotImplementedError:
877 # There are no good alternatives for these on Windows.
880 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
882 src = tasks.pop(task)
884 cancelled.append(task)
885 elif task.exception():
886 report.failed(src, str(task.exception()))
888 changed = Changed.YES if task.result() else Changed.NO
889 # If the file was written back or was successfully checked as
890 # well-formatted, store this information in the cache.
891 if write_back is WriteBack.YES or (
892 write_back is WriteBack.CHECK and changed is Changed.NO
894 sources_to_cache.append(src)
895 report.done(src, changed)
897 if sys.version_info >= (3, 7):
898 await asyncio.gather(*cancelled, return_exceptions=True)
900 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
902 write_cache(cache, sources_to_cache, mode)
905 def format_file_in_place(
909 write_back: WriteBack = WriteBack.NO,
910 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
912 """Format file under `src` path. Return True if changed.
914 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
916 `mode` and `fast` options are passed to :func:`format_file_contents`.
918 if src.suffix == ".pyi":
919 mode = replace(mode, is_pyi=True)
920 elif src.suffix == ".ipynb":
921 mode = replace(mode, is_ipynb=True)
923 then = datetime.utcfromtimestamp(src.stat().st_mtime)
924 with open(src, "rb") as buf:
925 src_contents, encoding, newline = decode_bytes(buf.read())
927 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
928 except NothingChanged:
930 except JSONDecodeError:
932 f"File '{src}' cannot be parsed as valid Jupyter notebook."
935 if write_back == WriteBack.YES:
936 with open(src, "w", encoding=encoding, newline=newline) as f:
937 f.write(dst_contents)
938 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
939 now = datetime.utcnow()
940 src_name = f"{src}\t{then} +0000"
941 dst_name = f"{src}\t{now} +0000"
943 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
945 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
947 if write_back == WriteBack.COLOR_DIFF:
948 diff_contents = color_diff(diff_contents)
950 with lock or nullcontext():
951 f = io.TextIOWrapper(
957 f = wrap_stream_for_windows(f)
958 f.write(diff_contents)
964 def format_stdin_to_stdout(
967 content: Optional[str] = None,
968 write_back: WriteBack = WriteBack.NO,
971 """Format file on stdin. Return True if changed.
973 If content is None, it's read from sys.stdin.
975 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
976 write a diff to stdout. The `mode` argument is passed to
977 :func:`format_file_contents`.
979 then = datetime.utcnow()
982 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
984 src, encoding, newline = content, "utf-8", ""
988 dst = format_file_contents(src, fast=fast, mode=mode)
991 except NothingChanged:
995 f = io.TextIOWrapper(
996 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
998 if write_back == WriteBack.YES:
999 # Make sure there's a newline after the content
1000 if dst and dst[-1] != "\n":
1003 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
1004 now = datetime.utcnow()
1005 src_name = f"STDIN\t{then} +0000"
1006 dst_name = f"STDOUT\t{now} +0000"
1007 d = diff(src, dst, src_name, dst_name)
1008 if write_back == WriteBack.COLOR_DIFF:
1010 f = wrap_stream_for_windows(f)
1015 def check_stability_and_equivalence(
1016 src_contents: str, dst_contents: str, *, mode: Mode
1018 """Perform stability and equivalence checks.
1020 Raise AssertionError if source and destination contents are not
1021 equivalent, or if a second pass of the formatter would format the
1022 content differently.
1024 assert_equivalent(src_contents, dst_contents)
1025 assert_stable(src_contents, dst_contents, mode=mode)
1028 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1029 """Reformat contents of a file and return new contents.
1031 If `fast` is False, additionally confirm that the reformatted code is
1032 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
1033 `mode` is passed to :func:`format_str`.
1035 if not src_contents.strip():
1036 raise NothingChanged
1039 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
1041 dst_contents = format_str(src_contents, mode=mode)
1042 if src_contents == dst_contents:
1043 raise NothingChanged
1045 if not fast and not mode.is_ipynb:
1046 # Jupyter notebooks will already have been checked above.
1047 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
1051 def validate_cell(src: str, mode: Mode) -> None:
1052 """Check that cell does not already contain TransformerManager transformations,
1053 or non-Python cell magics, which might cause tokenizer_rt to break because of
1056 If a cell contains ``!ls``, then it'll be transformed to
1057 ``get_ipython().system('ls')``. However, if the cell originally contained
1058 ``get_ipython().system('ls')``, then it would get transformed in the same way:
1060 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
1061 "get_ipython().system('ls')\n"
1062 >>> TransformerManager().transform_cell("!ls")
1063 "get_ipython().system('ls')\n"
1065 Due to the impossibility of safely roundtripping in such situations, cells
1066 containing transformed magics will be ignored.
1068 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
1069 raise NothingChanged
1072 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
1074 raise NothingChanged
1077 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
1078 """Format code in given cell of Jupyter notebook.
1082 - if cell has trailing semicolon, remove it;
1083 - if cell has IPython magics, mask them;
1085 - reinstate IPython magics;
1086 - reinstate trailing semicolon (if originally present);
1087 - strip trailing newlines.
1089 Cells with syntax errors will not be processed, as they
1090 could potentially be automagics or multi-line magics, which
1091 are currently not supported.
1093 validate_cell(src, mode)
1094 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
1098 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
1100 raise NothingChanged from None
1101 masked_dst = format_str(masked_src, mode=mode)
1103 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
1104 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1105 dst = put_trailing_semicolon_back(
1106 dst_without_trailing_semicolon, has_trailing_semicolon
1108 dst = dst.rstrip("\n")
1110 raise NothingChanged from None
1114 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1115 """If notebook is marked as non-Python, don't format it.
1117 All notebook metadata fields are optional, see
1118 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1119 if a notebook has empty metadata, we will try to parse it anyway.
1121 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1122 if language is not None and language != "python":
1123 raise NothingChanged from None
1126 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1127 """Format Jupyter notebook.
1129 Operate cell-by-cell, only on code cells, only for Python notebooks.
1130 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1132 trailing_newline = src_contents[-1] == "\n"
1134 nb = json.loads(src_contents)
1135 validate_metadata(nb)
1136 for cell in nb["cells"]:
1137 if cell.get("cell_type", None) == "code":
1139 src = "".join(cell["source"])
1140 dst = format_cell(src, fast=fast, mode=mode)
1141 except NothingChanged:
1144 cell["source"] = dst.splitlines(keepends=True)
1147 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1148 if trailing_newline:
1149 dst_contents = dst_contents + "\n"
1152 raise NothingChanged
1155 def format_str(src_contents: str, *, mode: Mode) -> str:
1156 """Reformat a string and return new contents.
1158 `mode` determines formatting options, such as how many characters per line are
1162 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1163 def f(arg: str = "") -> None:
1166 A more complex example:
1169 ... black.format_str(
1170 ... "def f(arg:str='')->None: hey",
1171 ... mode=black.Mode(
1172 ... target_versions={black.TargetVersion.PY36},
1174 ... string_normalization=False,
1185 dst_contents = _format_str_once(src_contents, mode=mode)
1186 # Forced second pass to work around optional trailing commas (becoming
1187 # forced trailing commas on pass 2) interacting differently with optional
1188 # parentheses. Admittedly ugly.
1189 if src_contents != dst_contents:
1190 return _format_str_once(dst_contents, mode=mode)
1194 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1195 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1197 if mode.target_versions:
1198 versions = mode.target_versions
1200 future_imports = get_future_imports(src_node)
1201 versions = detect_target_versions(src_node, future_imports=future_imports)
1203 normalize_fmt_off(src_node, preview=mode.preview)
1204 lines = LineGenerator(mode=mode)
1205 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1206 empty_line = Line(mode=mode)
1208 split_line_features = {
1210 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1211 if supports_feature(versions, feature)
1213 for current_line in lines.visit(src_node):
1214 dst_contents.append(str(empty_line) * after)
1215 before, after = elt.maybe_empty_lines(current_line)
1216 dst_contents.append(str(empty_line) * before)
1217 for line in transform_line(
1218 current_line, mode=mode, features=split_line_features
1220 dst_contents.append(str(line))
1221 return "".join(dst_contents)
1224 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1225 """Return a tuple of (decoded_contents, encoding, newline).
1227 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1228 universal newlines (i.e. only contains LF).
1230 srcbuf = io.BytesIO(src)
1231 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1233 return "", encoding, "\n"
1235 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1237 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1238 return tiow.read(), encoding, newline
1241 def get_features_used( # noqa: C901
1242 node: Node, *, future_imports: Optional[Set[str]] = None
1244 """Return a set of (relatively) new Python features used in this file.
1246 Currently looking for:
1248 - self-documenting expressions in f-strings (f"{x=}");
1249 - underscores in numeric literals;
1250 - trailing commas after * or ** in function signatures and calls;
1251 - positional only arguments in function signatures and lambdas;
1252 - assignment expression;
1253 - relaxed decorator syntax;
1254 - usage of __future__ flags (annotations);
1255 - print / exec statements;
1257 features: Set[Feature] = set()
1260 FUTURE_FLAG_TO_FEATURE[future_import]
1261 for future_import in future_imports
1262 if future_import in FUTURE_FLAG_TO_FEATURE
1265 for n in node.pre_order():
1266 if is_string_token(n):
1267 value_head = n.value[:2]
1268 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1269 features.add(Feature.F_STRINGS)
1270 if Feature.DEBUG_F_STRINGS not in features:
1271 for span_beg, span_end in iter_fexpr_spans(n.value):
1272 if n.value[span_beg : span_end - 1].rstrip().endswith("="):
1273 features.add(Feature.DEBUG_F_STRINGS)
1276 elif is_number_token(n):
1278 features.add(Feature.NUMERIC_UNDERSCORES)
1280 elif n.type == token.SLASH:
1281 if n.parent and n.parent.type in {
1286 features.add(Feature.POS_ONLY_ARGUMENTS)
1288 elif n.type == token.COLONEQUAL:
1289 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1291 elif n.type == syms.decorator:
1292 if len(n.children) > 1 and not is_simple_decorator_expression(
1295 features.add(Feature.RELAXED_DECORATORS)
1298 n.type in {syms.typedargslist, syms.arglist}
1300 and n.children[-1].type == token.COMMA
1302 if n.type == syms.typedargslist:
1303 feature = Feature.TRAILING_COMMA_IN_DEF
1305 feature = Feature.TRAILING_COMMA_IN_CALL
1307 for ch in n.children:
1308 if ch.type in STARS:
1309 features.add(feature)
1311 if ch.type == syms.argument:
1312 for argch in ch.children:
1313 if argch.type in STARS:
1314 features.add(feature)
1317 n.type in {syms.return_stmt, syms.yield_expr}
1318 and len(n.children) >= 2
1319 and n.children[1].type == syms.testlist_star_expr
1320 and any(child.type == syms.star_expr for child in n.children[1].children)
1322 features.add(Feature.UNPACKING_ON_FLOW)
1325 n.type == syms.annassign
1326 and len(n.children) >= 4
1327 and n.children[3].type == syms.testlist_star_expr
1329 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1332 n.type == syms.except_clause
1333 and len(n.children) >= 2
1334 and n.children[1].type == token.STAR
1336 features.add(Feature.EXCEPT_STAR)
1338 elif n.type in {syms.subscriptlist, syms.trailer} and any(
1339 child.type == syms.star_expr for child in n.children
1341 features.add(Feature.VARIADIC_GENERICS)
1344 n.type == syms.tname_star
1345 and len(n.children) == 3
1346 and n.children[2].type == syms.star_expr
1348 features.add(Feature.VARIADIC_GENERICS)
1353 def detect_target_versions(
1354 node: Node, *, future_imports: Optional[Set[str]] = None
1355 ) -> Set[TargetVersion]:
1356 """Detect the version to target based on the nodes used."""
1357 features = get_features_used(node, future_imports=future_imports)
1359 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1363 def get_future_imports(node: Node) -> Set[str]:
1364 """Return a set of __future__ imports in the file."""
1365 imports: Set[str] = set()
1367 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1368 for child in children:
1369 if isinstance(child, Leaf):
1370 if child.type == token.NAME:
1373 elif child.type == syms.import_as_name:
1374 orig_name = child.children[0]
1375 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1376 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1377 yield orig_name.value
1379 elif child.type == syms.import_as_names:
1380 yield from get_imports_from_children(child.children)
1383 raise AssertionError("Invalid syntax parsing imports")
1385 for child in node.children:
1386 if child.type != syms.simple_stmt:
1389 first_child = child.children[0]
1390 if isinstance(first_child, Leaf):
1391 # Continue looking if we see a docstring; otherwise stop.
1393 len(child.children) == 2
1394 and first_child.type == token.STRING
1395 and child.children[1].type == token.NEWLINE
1401 elif first_child.type == syms.import_from:
1402 module_name = first_child.children[1]
1403 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1406 imports |= set(get_imports_from_children(first_child.children[3:]))
1413 def assert_equivalent(src: str, dst: str) -> None:
1414 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1416 src_ast = parse_ast(src)
1417 except Exception as exc:
1418 raise AssertionError(
1419 "cannot use --safe with this file; failed to parse source file AST: "
1421 "This could be caused by running Black with an older Python version "
1422 "that does not support new syntax used in your source file."
1426 dst_ast = parse_ast(dst)
1427 except Exception as exc:
1428 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1429 raise AssertionError(
1430 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1431 "Please report a bug on https://github.com/psf/black/issues. "
1432 f"This invalid output might be helpful: {log}"
1435 src_ast_str = "\n".join(stringify_ast(src_ast))
1436 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1437 if src_ast_str != dst_ast_str:
1438 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1439 raise AssertionError(
1440 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1441 " source. Please report a bug on "
1442 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1446 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1447 """Raise AssertionError if `dst` reformats differently the second time."""
1448 # We shouldn't call format_str() here, because that formats the string
1449 # twice and may hide a bug where we bounce back and forth between two
1451 newdst = _format_str_once(dst, mode=mode)
1455 diff(src, dst, "source", "first pass"),
1456 diff(dst, newdst, "first pass", "second pass"),
1458 raise AssertionError(
1459 "INTERNAL ERROR: Black produced different code on the second pass of the"
1460 " formatter. Please report a bug on https://github.com/psf/black/issues."
1461 f" This diff might be helpful: {log}"
1466 def nullcontext() -> Iterator[None]:
1467 """Return an empty context manager.
1469 To be used like `nullcontext` in Python 3.7.
1474 def patch_click() -> None:
1475 """Make Click not crash on Python 3.6 with LANG=C.
1477 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1478 default which restricts paths that it can access during the lifetime of the
1479 application. Click refuses to work in this scenario by raising a RuntimeError.
1481 In case of Black the likelihood that non-ASCII characters are going to be used in
1482 file paths is minimal since it's Python source code. Moreover, this crash was
1483 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1485 modules: List[Any] = []
1487 from click import core
1491 modules.append(core)
1493 # Removed in Click 8.1.0 and newer; we keep this around for users who have
1494 # older versions installed.
1495 from click import _unicodefun # type: ignore
1499 modules.append(_unicodefun)
1501 for module in modules:
1502 if hasattr(module, "_verify_python3_env"):
1503 module._verify_python3_env = lambda: None # type: ignore
1504 if hasattr(module, "_verify_python_env"):
1505 module._verify_python_env = lambda: None # type: ignore
1508 def patched_main() -> None:
1509 maybe_install_uvloop()
1515 if __name__ == "__main__":