All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
11 from contextlib import contextmanager
12 from dataclasses import replace
13 from datetime import datetime
15 from json.decoder import JSONDecodeError
16 from multiprocessing import Manager, freeze_support
17 from pathlib import Path
36 from click.core import ParameterSource
37 from mypy_extensions import mypyc_attr
38 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
40 from _black_version import version as __version__
41 from black.cache import Cache, filter_cached, get_cache_info, read_cache, write_cache
42 from black.comments import normalize_fmt_off
43 from black.concurrency import cancel, maybe_install_uvloop, shutdown
44 from black.const import (
50 from black.files import (
53 find_user_pyproject_toml,
56 normalize_path_maybe_ignore,
58 wrap_stream_for_windows,
60 from black.handle_ipynb_magics import (
63 jupyter_dependencies_are_installed,
65 put_trailing_semicolon_back,
66 remove_trailing_semicolon,
69 from black.linegen import LN, LineGenerator, transform_line
70 from black.lines import EmptyLineTracker, Line
71 from black.mode import (
72 FUTURE_FLAG_TO_FEATURE,
79 from black.nodes import (
82 is_simple_decorator_expression,
86 from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
87 from black.parsing import InvalidInput # noqa F401
88 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
89 from black.report import Changed, NothingChanged, Report
90 from blib2to3.pgen2 import token
91 from blib2to3.pytree import Leaf, Node
94 from concurrent.futures import Executor
96 COMPILED = Path(__file__).suffix in (".pyd", ".so")
104 class WriteBack(Enum):
112 def from_configuration(
113 cls, *, check: bool, diff: bool, color: bool = False
115 if check and not diff:
119 return cls.COLOR_DIFF
121 return cls.DIFF if diff else cls.YES
124 # Legacy name, left for integrations.
127 DEFAULT_WORKERS = os.cpu_count()
130 def read_pyproject_toml(
131 ctx: click.Context, param: click.Parameter, value: Optional[str]
133 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
135 Returns the path to a successfully found and read configuration file, None
139 value = find_pyproject_toml(ctx.params.get("src", ()))
144 config = parse_pyproject_toml(value)
145 except (OSError, ValueError) as e:
146 raise click.FileError(
147 filename=value, hint=f"Error reading configuration file: {e}"
153 # Sanitize the values to be Click friendly. For more information please see:
154 # https://github.com/psf/black/issues/1458
155 # https://github.com/pallets/click/issues/1567
157 k: str(v) if not isinstance(v, (list, dict)) else v
158 for k, v in config.items()
161 target_version = config.get("target_version")
162 if target_version is not None and not isinstance(target_version, list):
163 raise click.BadOptionUsage(
164 "target-version", "Config key target-version must be a list"
167 default_map: Dict[str, Any] = {}
169 default_map.update(ctx.default_map)
170 default_map.update(config)
172 ctx.default_map = default_map
176 def target_version_option_callback(
177 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
178 ) -> List[TargetVersion]:
179 """Compute the target versions from a --target-version flag.
181 This is its own function because mypy couldn't infer the type correctly
182 when it was a lambda, causing mypyc trouble.
184 return [TargetVersion[val.upper()] for val in v]
187 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
188 """Compile a regular expression string in `regex`.
190 If it contains newlines, use verbose mode.
193 regex = "(?x)" + regex
194 compiled: Pattern[str] = re.compile(regex)
200 param: click.Parameter,
201 value: Optional[str],
202 ) -> Optional[Pattern[str]]:
204 return re_compile_maybe_verbose(value) if value is not None else None
205 except re.error as e:
206 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
210 context_settings={"help_option_names": ["-h", "--help"]},
211 # While Click does set this field automatically using the docstring, mypyc
212 # (annoyingly) strips 'em so we need to set it here too.
213 help="The uncompromising code formatter.",
215 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
220 default=DEFAULT_LINE_LENGTH,
221 help="How many characters per line to allow.",
227 type=click.Choice([v.name.lower() for v in TargetVersion]),
228 callback=target_version_option_callback,
231 "Python versions that should be supported by Black's output. [default: per-file"
239 "Format all input files like typing stubs regardless of file extension (useful"
240 " when piping source on standard input)."
247 "Format all input files like Jupyter Notebooks regardless of file extension "
248 "(useful when piping source on standard input)."
252 "--python-cell-magics",
255 "When processing Jupyter Notebooks, add the given magic to the list"
256 f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
257 " Useful for formatting cells with custom python magics."
263 "--skip-string-normalization",
265 help="Don't normalize string quotes or prefixes.",
269 "--skip-magic-trailing-comma",
271 help="Don't use trailing commas as a reason to split lines.",
274 "--experimental-string-processing",
277 help="(DEPRECATED and now included in --preview) Normalize string literals.",
283 "Enable potentially disruptive style changes that may be added to Black's main"
284 " functionality in the next major release."
291 "Don't write the files back, just return the status. Return code 0 means"
292 " nothing would change. Return code 1 means some files would be reformatted."
293 " Return code 123 means there was an internal error."
299 help="Don't write the files back, just output a diff for each file on stdout.",
302 "--color/--no-color",
304 help="Show colored diff. Only applies when `--diff` is given.",
309 help="If --fast given, skip temporary sanity checks. [default: --safe]",
312 "--required-version",
315 "Require a specific version of Black to be running (useful for unifying results"
316 " across many environments e.g. with a pyproject.toml file). It can be"
317 " either a major version number or an exact version."
323 default=DEFAULT_INCLUDES,
324 callback=validate_regex,
326 "A regular expression that matches files and directories that should be"
327 " included on recursive searches. An empty value means all files are included"
328 " regardless of the name. Use forward slashes for directories on all platforms"
329 " (Windows, too). Exclusions are calculated first, inclusions later."
336 callback=validate_regex,
338 "A regular expression that matches files and directories that should be"
339 " excluded on recursive searches. An empty value means no paths are excluded."
340 " Use forward slashes for directories on all platforms (Windows, too)."
341 " Exclusions are calculated first, inclusions later. [default:"
342 f" {DEFAULT_EXCLUDES}]"
349 callback=validate_regex,
351 "Like --exclude, but adds additional files and directories on top of the"
352 " excluded ones. (Useful if you simply want to add to the default)"
358 callback=validate_regex,
360 "Like --exclude, but files and directories matching this regex will be "
361 "excluded even when they are passed explicitly as arguments."
368 "The name of the file when passing it through stdin. Useful to make "
369 "sure Black will respect --force-exclude option on some "
370 "editors that rely on using stdin."
376 type=click.IntRange(min=1),
377 default=DEFAULT_WORKERS,
379 help="Number of parallel workers",
386 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
387 " those with 2>/dev/null."
395 "Also emit messages to stderr about files that were not changed or were ignored"
396 " due to exclusion patterns."
399 @click.version_option(
402 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
403 f"Python ({platform.python_implementation()}) {platform.python_version()}"
410 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
426 callback=read_pyproject_toml,
427 help="Read configuration from FILE path.",
430 def main( # noqa: C901
434 target_version: List[TargetVersion],
441 python_cell_magics: Sequence[str],
442 skip_string_normalization: bool,
443 skip_magic_trailing_comma: bool,
444 experimental_string_processing: bool,
448 required_version: Optional[str],
449 include: Pattern[str],
450 exclude: Optional[Pattern[str]],
451 extend_exclude: Optional[Pattern[str]],
452 force_exclude: Optional[Pattern[str]],
453 stdin_filename: Optional[str],
455 src: Tuple[str, ...],
456 config: Optional[str],
458 """The uncompromising code formatter."""
459 ctx.ensure_object(dict)
461 if src and code is not None:
464 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
467 if not src and code is None:
468 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
471 root, method = find_project_root(src) if code is None else (None, None)
472 ctx.obj["root"] = root
477 f"Identified `{root}` as project root containing a {method}.",
482 (normalize_path_maybe_ignore(Path(source), root), source)
485 srcs_string = ", ".join(
489 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
490 for _norm, source in normalized
493 out(f"Sources to be formatted: {srcs_string}", fg="blue")
496 config_source = ctx.get_parameter_source("config")
497 user_level_config = str(find_user_pyproject_toml())
498 if config == user_level_config:
500 "Using configuration from user-level config at "
501 f"'{user_level_config}'.",
504 elif config_source in (
505 ParameterSource.DEFAULT,
506 ParameterSource.DEFAULT_MAP,
508 out("Using configuration from project root.", fg="blue")
510 out(f"Using configuration in '{config}'.", fg="blue")
512 error_msg = "Oh no! 💥 💔 💥"
515 and required_version != __version__
516 and required_version != __version__.split(".")[0]
519 f"{error_msg} The required version `{required_version}` does not match"
520 f" the running version `{__version__}`!"
524 err("Cannot pass both `pyi` and `ipynb` flags!")
527 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
529 versions = set(target_version)
531 # We'll autodetect later.
534 target_versions=versions,
535 line_length=line_length,
538 string_normalization=not skip_string_normalization,
539 magic_trailing_comma=not skip_magic_trailing_comma,
540 experimental_string_processing=experimental_string_processing,
542 python_cell_magics=set(python_cell_magics),
546 # Run in quiet mode by default with -c; the extra output isn't useful.
547 # You can still pass -v to get verbose output.
550 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
554 content=code, fast=fast, write_back=write_back, mode=mode, report=report
558 sources = get_sources(
565 extend_exclude=extend_exclude,
566 force_exclude=force_exclude,
568 stdin_filename=stdin_filename,
570 except GitWildMatchPatternError:
575 "No Python files are present to be formatted. Nothing to do 😴",
581 if len(sources) == 1:
585 write_back=write_back,
593 write_back=write_back,
599 if verbose or not quiet:
600 if code is None and (verbose or report.change_count or report.failure_count):
602 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
604 click.echo(str(report), err=True)
605 ctx.exit(report.return_code)
611 src: Tuple[str, ...],
614 include: Pattern[str],
615 exclude: Optional[Pattern[str]],
616 extend_exclude: Optional[Pattern[str]],
617 force_exclude: Optional[Pattern[str]],
619 stdin_filename: Optional[str],
621 """Compute the set of files to be formatted."""
622 sources: Set[Path] = set()
625 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
626 gitignore = get_gitignore(ctx.obj["root"])
631 if s == "-" and stdin_filename:
632 p = Path(stdin_filename)
638 if is_stdin or p.is_file():
639 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
640 if normalized_path is None:
643 normalized_path = "/" + normalized_path
644 # Hard-exclude any files that matches the `--force-exclude` regex.
646 force_exclude_match = force_exclude.search(normalized_path)
648 force_exclude_match = None
649 if force_exclude_match and force_exclude_match.group(0):
650 report.path_ignored(p, "matches the --force-exclude regular expression")
654 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
656 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
657 verbose=verbose, quiet=quiet
680 err(f"invalid path: {s}")
685 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
688 Exit if there is no `src` provided for formatting
691 if verbose or not quiet:
697 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
700 Reformat and print out `content` without spawning child processes.
701 Similar to `reformat_one`, but for string content.
703 `fast`, `write_back`, and `mode` options are passed to
704 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
706 path = Path("<string>")
709 if format_stdin_to_stdout(
710 content=content, fast=fast, write_back=write_back, mode=mode
712 changed = Changed.YES
713 report.done(path, changed)
714 except Exception as exc:
716 traceback.print_exc()
717 report.failed(path, str(exc))
720 # diff-shades depends on being to monkeypatch this function to operate. I know it's
721 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
722 @mypyc_attr(patchable=True)
724 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
726 """Reformat a single file under `src` without spawning child processes.
728 `fast`, `write_back`, and `mode` options are passed to
729 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
736 elif str(src).startswith(STDIN_PLACEHOLDER):
738 # Use the original name again in case we want to print something
740 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
745 if src.suffix == ".pyi":
746 mode = replace(mode, is_pyi=True)
747 elif src.suffix == ".ipynb":
748 mode = replace(mode, is_ipynb=True)
749 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
750 changed = Changed.YES
753 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
754 cache = read_cache(mode)
755 res_src = src.resolve()
756 res_src_s = str(res_src)
757 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
758 changed = Changed.CACHED
759 if changed is not Changed.CACHED and format_file_in_place(
760 src, fast=fast, write_back=write_back, mode=mode
762 changed = Changed.YES
763 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
764 write_back is WriteBack.CHECK and changed is Changed.NO
766 write_cache(cache, [src], mode)
767 report.done(src, changed)
768 except Exception as exc:
770 traceback.print_exc()
771 report.failed(src, str(exc))
774 # diff-shades depends on being to monkeypatch this function to operate. I know it's
775 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
776 @mypyc_attr(patchable=True)
780 write_back: WriteBack,
783 workers: Optional[int],
785 """Reformat multiple files using a ProcessPoolExecutor."""
786 from concurrent.futures import Executor, ProcessPoolExecutor, ThreadPoolExecutor
789 worker_count = workers if workers is not None else DEFAULT_WORKERS
790 if sys.platform == "win32":
791 # Work around https://bugs.python.org/issue26903
792 assert worker_count is not None
793 worker_count = min(worker_count, 60)
795 executor = ProcessPoolExecutor(max_workers=worker_count)
796 except (ImportError, NotImplementedError, OSError):
797 # we arrive here if the underlying system does not support multi-processing
798 # like in AWS Lambda or Termux, in which case we gracefully fallback to
799 # a ThreadPoolExecutor with just a single worker (more workers would not do us
800 # any good due to the Global Interpreter Lock)
801 executor = ThreadPoolExecutor(max_workers=1)
803 loop = asyncio.new_event_loop()
804 asyncio.set_event_loop(loop)
806 loop.run_until_complete(
810 write_back=write_back,
821 asyncio.set_event_loop(None)
822 if executor is not None:
826 async def schedule_formatting(
829 write_back: WriteBack,
832 loop: asyncio.AbstractEventLoop,
833 executor: "Executor",
835 """Run formatting of `sources` in parallel using the provided `executor`.
837 (Use ProcessPoolExecutors for actual parallelism.)
839 `write_back`, `fast`, and `mode` options are passed to
840 :func:`format_file_in_place`.
843 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
844 cache = read_cache(mode)
845 sources, cached = filter_cached(cache, sources)
846 for src in sorted(cached):
847 report.done(src, Changed.CACHED)
852 sources_to_cache = []
854 if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
855 # For diff output, we need locks to ensure we don't interleave output
856 # from different processes.
858 lock = manager.Lock()
860 asyncio.ensure_future(
861 loop.run_in_executor(
862 executor, format_file_in_place, src, fast, mode, write_back, lock
865 for src in sorted(sources)
867 pending = tasks.keys()
869 loop.add_signal_handler(signal.SIGINT, cancel, pending)
870 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
871 except NotImplementedError:
872 # There are no good alternatives for these on Windows.
875 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
877 src = tasks.pop(task)
879 cancelled.append(task)
880 elif task.exception():
881 report.failed(src, str(task.exception()))
883 changed = Changed.YES if task.result() else Changed.NO
884 # If the file was written back or was successfully checked as
885 # well-formatted, store this information in the cache.
886 if write_back is WriteBack.YES or (
887 write_back is WriteBack.CHECK and changed is Changed.NO
889 sources_to_cache.append(src)
890 report.done(src, changed)
892 if sys.version_info >= (3, 7):
893 await asyncio.gather(*cancelled, return_exceptions=True)
895 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
897 write_cache(cache, sources_to_cache, mode)
900 def format_file_in_place(
904 write_back: WriteBack = WriteBack.NO,
905 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
907 """Format file under `src` path. Return True if changed.
909 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
911 `mode` and `fast` options are passed to :func:`format_file_contents`.
913 if src.suffix == ".pyi":
914 mode = replace(mode, is_pyi=True)
915 elif src.suffix == ".ipynb":
916 mode = replace(mode, is_ipynb=True)
918 then = datetime.utcfromtimestamp(src.stat().st_mtime)
919 with open(src, "rb") as buf:
920 src_contents, encoding, newline = decode_bytes(buf.read())
922 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
923 except NothingChanged:
925 except JSONDecodeError:
927 f"File '{src}' cannot be parsed as valid Jupyter notebook."
930 if write_back == WriteBack.YES:
931 with open(src, "w", encoding=encoding, newline=newline) as f:
932 f.write(dst_contents)
933 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
934 now = datetime.utcnow()
935 src_name = f"{src}\t{then} +0000"
936 dst_name = f"{src}\t{now} +0000"
938 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
940 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
942 if write_back == WriteBack.COLOR_DIFF:
943 diff_contents = color_diff(diff_contents)
945 with lock or nullcontext():
946 f = io.TextIOWrapper(
952 f = wrap_stream_for_windows(f)
953 f.write(diff_contents)
959 def format_stdin_to_stdout(
962 content: Optional[str] = None,
963 write_back: WriteBack = WriteBack.NO,
966 """Format file on stdin. Return True if changed.
968 If content is None, it's read from sys.stdin.
970 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
971 write a diff to stdout. The `mode` argument is passed to
972 :func:`format_file_contents`.
974 then = datetime.utcnow()
977 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
979 src, encoding, newline = content, "utf-8", ""
983 dst = format_file_contents(src, fast=fast, mode=mode)
986 except NothingChanged:
990 f = io.TextIOWrapper(
991 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
993 if write_back == WriteBack.YES:
994 # Make sure there's a newline after the content
995 if dst and dst[-1] != "\n":
998 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
999 now = datetime.utcnow()
1000 src_name = f"STDIN\t{then} +0000"
1001 dst_name = f"STDOUT\t{now} +0000"
1002 d = diff(src, dst, src_name, dst_name)
1003 if write_back == WriteBack.COLOR_DIFF:
1005 f = wrap_stream_for_windows(f)
1010 def check_stability_and_equivalence(
1011 src_contents: str, dst_contents: str, *, mode: Mode
1013 """Perform stability and equivalence checks.
1015 Raise AssertionError if source and destination contents are not
1016 equivalent, or if a second pass of the formatter would format the
1017 content differently.
1019 assert_equivalent(src_contents, dst_contents)
1020 assert_stable(src_contents, dst_contents, mode=mode)
1023 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1024 """Reformat contents of a file and return new contents.
1026 If `fast` is False, additionally confirm that the reformatted code is
1027 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
1028 `mode` is passed to :func:`format_str`.
1030 if not src_contents.strip():
1031 raise NothingChanged
1034 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
1036 dst_contents = format_str(src_contents, mode=mode)
1037 if src_contents == dst_contents:
1038 raise NothingChanged
1040 if not fast and not mode.is_ipynb:
1041 # Jupyter notebooks will already have been checked above.
1042 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
1046 def validate_cell(src: str, mode: Mode) -> None:
1047 """Check that cell does not already contain TransformerManager transformations,
1048 or non-Python cell magics, which might cause tokenizer_rt to break because of
1051 If a cell contains ``!ls``, then it'll be transformed to
1052 ``get_ipython().system('ls')``. However, if the cell originally contained
1053 ``get_ipython().system('ls')``, then it would get transformed in the same way:
1055 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
1056 "get_ipython().system('ls')\n"
1057 >>> TransformerManager().transform_cell("!ls")
1058 "get_ipython().system('ls')\n"
1060 Due to the impossibility of safely roundtripping in such situations, cells
1061 containing transformed magics will be ignored.
1063 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
1064 raise NothingChanged
1067 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
1069 raise NothingChanged
1072 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
1073 """Format code in given cell of Jupyter notebook.
1077 - if cell has trailing semicolon, remove it;
1078 - if cell has IPython magics, mask them;
1080 - reinstate IPython magics;
1081 - reinstate trailing semicolon (if originally present);
1082 - strip trailing newlines.
1084 Cells with syntax errors will not be processed, as they
1085 could potentially be automagics or multi-line magics, which
1086 are currently not supported.
1088 validate_cell(src, mode)
1089 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
1093 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
1095 raise NothingChanged from None
1096 masked_dst = format_str(masked_src, mode=mode)
1098 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
1099 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1100 dst = put_trailing_semicolon_back(
1101 dst_without_trailing_semicolon, has_trailing_semicolon
1103 dst = dst.rstrip("\n")
1105 raise NothingChanged from None
1109 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1110 """If notebook is marked as non-Python, don't format it.
1112 All notebook metadata fields are optional, see
1113 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1114 if a notebook has empty metadata, we will try to parse it anyway.
1116 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1117 if language is not None and language != "python":
1118 raise NothingChanged from None
1121 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1122 """Format Jupyter notebook.
1124 Operate cell-by-cell, only on code cells, only for Python notebooks.
1125 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1127 trailing_newline = src_contents[-1] == "\n"
1129 nb = json.loads(src_contents)
1130 validate_metadata(nb)
1131 for cell in nb["cells"]:
1132 if cell.get("cell_type", None) == "code":
1134 src = "".join(cell["source"])
1135 dst = format_cell(src, fast=fast, mode=mode)
1136 except NothingChanged:
1139 cell["source"] = dst.splitlines(keepends=True)
1142 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1143 if trailing_newline:
1144 dst_contents = dst_contents + "\n"
1147 raise NothingChanged
1150 def format_str(src_contents: str, *, mode: Mode) -> str:
1151 """Reformat a string and return new contents.
1153 `mode` determines formatting options, such as how many characters per line are
1157 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1158 def f(arg: str = "") -> None:
1161 A more complex example:
1164 ... black.format_str(
1165 ... "def f(arg:str='')->None: hey",
1166 ... mode=black.Mode(
1167 ... target_versions={black.TargetVersion.PY36},
1169 ... string_normalization=False,
1180 dst_contents = _format_str_once(src_contents, mode=mode)
1181 # Forced second pass to work around optional trailing commas (becoming
1182 # forced trailing commas on pass 2) interacting differently with optional
1183 # parentheses. Admittedly ugly.
1184 if src_contents != dst_contents:
1185 return _format_str_once(dst_contents, mode=mode)
1189 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1190 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1192 if mode.target_versions:
1193 versions = mode.target_versions
1195 future_imports = get_future_imports(src_node)
1196 versions = detect_target_versions(src_node, future_imports=future_imports)
1198 normalize_fmt_off(src_node, preview=mode.preview)
1199 lines = LineGenerator(mode=mode)
1200 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1201 empty_line = Line(mode=mode)
1203 split_line_features = {
1205 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1206 if supports_feature(versions, feature)
1208 for current_line in lines.visit(src_node):
1209 dst_contents.append(str(empty_line) * after)
1210 before, after = elt.maybe_empty_lines(current_line)
1211 dst_contents.append(str(empty_line) * before)
1212 for line in transform_line(
1213 current_line, mode=mode, features=split_line_features
1215 dst_contents.append(str(line))
1216 return "".join(dst_contents)
1219 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1220 """Return a tuple of (decoded_contents, encoding, newline).
1222 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1223 universal newlines (i.e. only contains LF).
1225 srcbuf = io.BytesIO(src)
1226 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1228 return "", encoding, "\n"
1230 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1232 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1233 return tiow.read(), encoding, newline
1236 def get_features_used( # noqa: C901
1237 node: Node, *, future_imports: Optional[Set[str]] = None
1239 """Return a set of (relatively) new Python features used in this file.
1241 Currently looking for:
1243 - underscores in numeric literals;
1244 - trailing commas after * or ** in function signatures and calls;
1245 - positional only arguments in function signatures and lambdas;
1246 - assignment expression;
1247 - relaxed decorator syntax;
1248 - usage of __future__ flags (annotations);
1249 - print / exec statements;
1251 features: Set[Feature] = set()
1254 FUTURE_FLAG_TO_FEATURE[future_import]
1255 for future_import in future_imports
1256 if future_import in FUTURE_FLAG_TO_FEATURE
1259 for n in node.pre_order():
1260 if is_string_token(n):
1261 value_head = n.value[:2]
1262 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1263 features.add(Feature.F_STRINGS)
1265 elif is_number_token(n):
1267 features.add(Feature.NUMERIC_UNDERSCORES)
1269 elif n.type == token.SLASH:
1270 if n.parent and n.parent.type in {
1275 features.add(Feature.POS_ONLY_ARGUMENTS)
1277 elif n.type == token.COLONEQUAL:
1278 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1280 elif n.type == syms.decorator:
1281 if len(n.children) > 1 and not is_simple_decorator_expression(
1284 features.add(Feature.RELAXED_DECORATORS)
1287 n.type in {syms.typedargslist, syms.arglist}
1289 and n.children[-1].type == token.COMMA
1291 if n.type == syms.typedargslist:
1292 feature = Feature.TRAILING_COMMA_IN_DEF
1294 feature = Feature.TRAILING_COMMA_IN_CALL
1296 for ch in n.children:
1297 if ch.type in STARS:
1298 features.add(feature)
1300 if ch.type == syms.argument:
1301 for argch in ch.children:
1302 if argch.type in STARS:
1303 features.add(feature)
1306 n.type in {syms.return_stmt, syms.yield_expr}
1307 and len(n.children) >= 2
1308 and n.children[1].type == syms.testlist_star_expr
1309 and any(child.type == syms.star_expr for child in n.children[1].children)
1311 features.add(Feature.UNPACKING_ON_FLOW)
1314 n.type == syms.annassign
1315 and len(n.children) >= 4
1316 and n.children[3].type == syms.testlist_star_expr
1318 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1321 n.type == syms.except_clause
1322 and len(n.children) >= 2
1323 and n.children[1].type == token.STAR
1325 features.add(Feature.EXCEPT_STAR)
1327 elif n.type in {syms.subscriptlist, syms.trailer} and any(
1328 child.type == syms.star_expr for child in n.children
1330 features.add(Feature.VARIADIC_GENERICS)
1333 n.type == syms.tname_star
1334 and len(n.children) == 3
1335 and n.children[2].type == syms.star_expr
1337 features.add(Feature.VARIADIC_GENERICS)
1342 def detect_target_versions(
1343 node: Node, *, future_imports: Optional[Set[str]] = None
1344 ) -> Set[TargetVersion]:
1345 """Detect the version to target based on the nodes used."""
1346 features = get_features_used(node, future_imports=future_imports)
1348 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1352 def get_future_imports(node: Node) -> Set[str]:
1353 """Return a set of __future__ imports in the file."""
1354 imports: Set[str] = set()
1356 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1357 for child in children:
1358 if isinstance(child, Leaf):
1359 if child.type == token.NAME:
1362 elif child.type == syms.import_as_name:
1363 orig_name = child.children[0]
1364 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1365 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1366 yield orig_name.value
1368 elif child.type == syms.import_as_names:
1369 yield from get_imports_from_children(child.children)
1372 raise AssertionError("Invalid syntax parsing imports")
1374 for child in node.children:
1375 if child.type != syms.simple_stmt:
1378 first_child = child.children[0]
1379 if isinstance(first_child, Leaf):
1380 # Continue looking if we see a docstring; otherwise stop.
1382 len(child.children) == 2
1383 and first_child.type == token.STRING
1384 and child.children[1].type == token.NEWLINE
1390 elif first_child.type == syms.import_from:
1391 module_name = first_child.children[1]
1392 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1395 imports |= set(get_imports_from_children(first_child.children[3:]))
1402 def assert_equivalent(src: str, dst: str) -> None:
1403 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1405 src_ast = parse_ast(src)
1406 except Exception as exc:
1407 raise AssertionError(
1408 "cannot use --safe with this file; failed to parse source file AST: "
1410 "This could be caused by running Black with an older Python version "
1411 "that does not support new syntax used in your source file."
1415 dst_ast = parse_ast(dst)
1416 except Exception as exc:
1417 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1418 raise AssertionError(
1419 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1420 "Please report a bug on https://github.com/psf/black/issues. "
1421 f"This invalid output might be helpful: {log}"
1424 src_ast_str = "\n".join(stringify_ast(src_ast))
1425 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1426 if src_ast_str != dst_ast_str:
1427 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1428 raise AssertionError(
1429 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1430 " source. Please report a bug on "
1431 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1435 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1436 """Raise AssertionError if `dst` reformats differently the second time."""
1437 # We shouldn't call format_str() here, because that formats the string
1438 # twice and may hide a bug where we bounce back and forth between two
1440 newdst = _format_str_once(dst, mode=mode)
1444 diff(src, dst, "source", "first pass"),
1445 diff(dst, newdst, "first pass", "second pass"),
1447 raise AssertionError(
1448 "INTERNAL ERROR: Black produced different code on the second pass of the"
1449 " formatter. Please report a bug on https://github.com/psf/black/issues."
1450 f" This diff might be helpful: {log}"
1455 def nullcontext() -> Iterator[None]:
1456 """Return an empty context manager.
1458 To be used like `nullcontext` in Python 3.7.
1463 def patch_click() -> None:
1464 """Make Click not crash on Python 3.6 with LANG=C.
1466 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1467 default which restricts paths that it can access during the lifetime of the
1468 application. Click refuses to work in this scenario by raising a RuntimeError.
1470 In case of Black the likelihood that non-ASCII characters are going to be used in
1471 file paths is minimal since it's Python source code. Moreover, this crash was
1472 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1474 modules: List[Any] = []
1476 from click import core
1480 modules.append(core)
1482 # Removed in Click 8.1.0 and newer; we keep this around for users who have
1483 # older versions installed.
1484 from click import _unicodefun # type: ignore
1488 modules.append(_unicodefun)
1490 for module in modules:
1491 if hasattr(module, "_verify_python3_env"):
1492 module._verify_python3_env = lambda: None # type: ignore
1493 if hasattr(module, "_verify_python_env"):
1494 module._verify_python_env = lambda: None # type: ignore
1497 def patched_main() -> None:
1498 maybe_install_uvloop()
1504 if __name__ == "__main__":