All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
2 from json.decoder import JSONDecodeError
4 from contextlib import contextmanager
5 from datetime import datetime
8 from multiprocessing import Manager, freeze_support
10 from pathlib import Path
11 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
36 from click.core import ParameterSource
37 from dataclasses import replace
38 from mypy_extensions import mypyc_attr
40 from black.const import DEFAULT_LINE_LENGTH, DEFAULT_INCLUDES, DEFAULT_EXCLUDES
41 from black.const import STDIN_PLACEHOLDER
42 from black.nodes import STARS, syms, is_simple_decorator_expression
43 from black.nodes import is_string_token, is_number_token
44 from black.lines import Line, EmptyLineTracker
45 from black.linegen import transform_line, LineGenerator, LN
46 from black.comments import normalize_fmt_off
47 from black.mode import FUTURE_FLAG_TO_FEATURE, Mode, TargetVersion
48 from black.mode import Feature, supports_feature, VERSION_TO_FEATURES
49 from black.cache import read_cache, write_cache, get_cache_info, filter_cached, Cache
50 from black.concurrency import cancel, shutdown, maybe_install_uvloop
51 from black.output import dump_to_file, ipynb_diff, diff, color_diff, out, err
52 from black.report import Report, Changed, NothingChanged
53 from black.files import (
57 find_user_pyproject_toml,
59 from black.files import gen_python_files, get_gitignore, normalize_path_maybe_ignore
60 from black.files import wrap_stream_for_windows
61 from black.parsing import InvalidInput # noqa F401
62 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
63 from black.handle_ipynb_magics import (
66 remove_trailing_semicolon,
67 put_trailing_semicolon_back,
70 jupyter_dependencies_are_installed,
75 from blib2to3.pytree import Node, Leaf
76 from blib2to3.pgen2 import token
78 from _black_version import version as __version__
81 from concurrent.futures import Executor
83 COMPILED = Path(__file__).suffix in (".pyd", ".so")
91 class WriteBack(Enum):
99 def from_configuration(
100 cls, *, check: bool, diff: bool, color: bool = False
102 if check and not diff:
106 return cls.COLOR_DIFF
108 return cls.DIFF if diff else cls.YES
111 # Legacy name, left for integrations.
114 DEFAULT_WORKERS = os.cpu_count()
117 def read_pyproject_toml(
118 ctx: click.Context, param: click.Parameter, value: Optional[str]
120 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
122 Returns the path to a successfully found and read configuration file, None
126 value = find_pyproject_toml(ctx.params.get("src", ()))
131 config = parse_pyproject_toml(value)
132 except (OSError, ValueError) as e:
133 raise click.FileError(
134 filename=value, hint=f"Error reading configuration file: {e}"
140 # Sanitize the values to be Click friendly. For more information please see:
141 # https://github.com/psf/black/issues/1458
142 # https://github.com/pallets/click/issues/1567
144 k: str(v) if not isinstance(v, (list, dict)) else v
145 for k, v in config.items()
148 target_version = config.get("target_version")
149 if target_version is not None and not isinstance(target_version, list):
150 raise click.BadOptionUsage(
151 "target-version", "Config key target-version must be a list"
154 default_map: Dict[str, Any] = {}
156 default_map.update(ctx.default_map)
157 default_map.update(config)
159 ctx.default_map = default_map
163 def target_version_option_callback(
164 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
165 ) -> List[TargetVersion]:
166 """Compute the target versions from a --target-version flag.
168 This is its own function because mypy couldn't infer the type correctly
169 when it was a lambda, causing mypyc trouble.
171 return [TargetVersion[val.upper()] for val in v]
174 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
175 """Compile a regular expression string in `regex`.
177 If it contains newlines, use verbose mode.
180 regex = "(?x)" + regex
181 compiled: Pattern[str] = re.compile(regex)
187 param: click.Parameter,
188 value: Optional[str],
189 ) -> Optional[Pattern[str]]:
191 return re_compile_maybe_verbose(value) if value is not None else None
192 except re.error as e:
193 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
197 context_settings={"help_option_names": ["-h", "--help"]},
198 # While Click does set this field automatically using the docstring, mypyc
199 # (annoyingly) strips 'em so we need to set it here too.
200 help="The uncompromising code formatter.",
202 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
207 default=DEFAULT_LINE_LENGTH,
208 help="How many characters per line to allow.",
214 type=click.Choice([v.name.lower() for v in TargetVersion]),
215 callback=target_version_option_callback,
218 "Python versions that should be supported by Black's output. [default: per-file"
226 "Format all input files like typing stubs regardless of file extension (useful"
227 " when piping source on standard input)."
234 "Format all input files like Jupyter Notebooks regardless of file extension "
235 "(useful when piping source on standard input)."
239 "--python-cell-magics",
242 "When processing Jupyter Notebooks, add the given magic to the list"
243 f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
244 " Useful for formatting cells with custom python magics."
250 "--skip-string-normalization",
252 help="Don't normalize string quotes or prefixes.",
256 "--skip-magic-trailing-comma",
258 help="Don't use trailing commas as a reason to split lines.",
261 "--experimental-string-processing",
264 help="(DEPRECATED and now included in --preview) Normalize string literals.",
270 "Enable potentially disruptive style changes that may be added to Black's main"
271 " functionality in the next major release."
278 "Don't write the files back, just return the status. Return code 0 means"
279 " nothing would change. Return code 1 means some files would be reformatted."
280 " Return code 123 means there was an internal error."
286 help="Don't write the files back, just output a diff for each file on stdout.",
289 "--color/--no-color",
291 help="Show colored diff. Only applies when `--diff` is given.",
296 help="If --fast given, skip temporary sanity checks. [default: --safe]",
299 "--required-version",
302 "Require a specific version of Black to be running (useful for unifying results"
303 " across many environments e.g. with a pyproject.toml file). It can be"
304 " either a major version number or an exact version."
310 default=DEFAULT_INCLUDES,
311 callback=validate_regex,
313 "A regular expression that matches files and directories that should be"
314 " included on recursive searches. An empty value means all files are included"
315 " regardless of the name. Use forward slashes for directories on all platforms"
316 " (Windows, too). Exclusions are calculated first, inclusions later."
323 callback=validate_regex,
325 "A regular expression that matches files and directories that should be"
326 " excluded on recursive searches. An empty value means no paths are excluded."
327 " Use forward slashes for directories on all platforms (Windows, too)."
328 " Exclusions are calculated first, inclusions later. [default:"
329 f" {DEFAULT_EXCLUDES}]"
336 callback=validate_regex,
338 "Like --exclude, but adds additional files and directories on top of the"
339 " excluded ones. (Useful if you simply want to add to the default)"
345 callback=validate_regex,
347 "Like --exclude, but files and directories matching this regex will be "
348 "excluded even when they are passed explicitly as arguments."
355 "The name of the file when passing it through stdin. Useful to make "
356 "sure Black will respect --force-exclude option on some "
357 "editors that rely on using stdin."
363 type=click.IntRange(min=1),
364 default=DEFAULT_WORKERS,
366 help="Number of parallel workers",
373 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
374 " those with 2>/dev/null."
382 "Also emit messages to stderr about files that were not changed or were ignored"
383 " due to exclusion patterns."
386 @click.version_option(
389 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
390 f"Python ({platform.python_implementation()}) {platform.python_version()}"
397 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
413 callback=read_pyproject_toml,
414 help="Read configuration from FILE path.",
417 def main( # noqa: C901
421 target_version: List[TargetVersion],
428 python_cell_magics: Sequence[str],
429 skip_string_normalization: bool,
430 skip_magic_trailing_comma: bool,
431 experimental_string_processing: bool,
435 required_version: Optional[str],
436 include: Pattern[str],
437 exclude: Optional[Pattern[str]],
438 extend_exclude: Optional[Pattern[str]],
439 force_exclude: Optional[Pattern[str]],
440 stdin_filename: Optional[str],
442 src: Tuple[str, ...],
443 config: Optional[str],
445 """The uncompromising code formatter."""
446 ctx.ensure_object(dict)
448 if src and code is not None:
451 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
454 if not src and code is None:
455 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
458 root, method = find_project_root(src) if code is None else (None, None)
459 ctx.obj["root"] = root
464 f"Identified `{root}` as project root containing a {method}.",
469 (normalize_path_maybe_ignore(Path(source), root), source)
472 srcs_string = ", ".join(
476 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
477 for _norm, source in normalized
480 out(f"Sources to be formatted: {srcs_string}", fg="blue")
483 config_source = ctx.get_parameter_source("config")
484 user_level_config = str(find_user_pyproject_toml())
485 if config == user_level_config:
487 "Using configuration from user-level config at "
488 f"'{user_level_config}'.",
491 elif config_source in (
492 ParameterSource.DEFAULT,
493 ParameterSource.DEFAULT_MAP,
495 out("Using configuration from project root.", fg="blue")
497 out(f"Using configuration in '{config}'.", fg="blue")
499 error_msg = "Oh no! 💥 💔 💥"
502 and required_version != __version__
503 and required_version != __version__.split(".")[0]
506 f"{error_msg} The required version `{required_version}` does not match"
507 f" the running version `{__version__}`!"
511 err("Cannot pass both `pyi` and `ipynb` flags!")
514 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
516 versions = set(target_version)
518 # We'll autodetect later.
521 target_versions=versions,
522 line_length=line_length,
525 string_normalization=not skip_string_normalization,
526 magic_trailing_comma=not skip_magic_trailing_comma,
527 experimental_string_processing=experimental_string_processing,
529 python_cell_magics=set(python_cell_magics),
533 # Run in quiet mode by default with -c; the extra output isn't useful.
534 # You can still pass -v to get verbose output.
537 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
541 content=code, fast=fast, write_back=write_back, mode=mode, report=report
545 sources = get_sources(
552 extend_exclude=extend_exclude,
553 force_exclude=force_exclude,
555 stdin_filename=stdin_filename,
557 except GitWildMatchPatternError:
562 "No Python files are present to be formatted. Nothing to do 😴",
568 if len(sources) == 1:
572 write_back=write_back,
580 write_back=write_back,
586 if verbose or not quiet:
587 if code is None and (verbose or report.change_count or report.failure_count):
589 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
591 click.echo(str(report), err=True)
592 ctx.exit(report.return_code)
598 src: Tuple[str, ...],
601 include: Pattern[str],
602 exclude: Optional[Pattern[str]],
603 extend_exclude: Optional[Pattern[str]],
604 force_exclude: Optional[Pattern[str]],
606 stdin_filename: Optional[str],
608 """Compute the set of files to be formatted."""
609 sources: Set[Path] = set()
612 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
613 gitignore = get_gitignore(ctx.obj["root"])
618 if s == "-" and stdin_filename:
619 p = Path(stdin_filename)
625 if is_stdin or p.is_file():
626 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
627 if normalized_path is None:
630 normalized_path = "/" + normalized_path
631 # Hard-exclude any files that matches the `--force-exclude` regex.
633 force_exclude_match = force_exclude.search(normalized_path)
635 force_exclude_match = None
636 if force_exclude_match and force_exclude_match.group(0):
637 report.path_ignored(p, "matches the --force-exclude regular expression")
641 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
643 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
644 verbose=verbose, quiet=quiet
667 err(f"invalid path: {s}")
672 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
675 Exit if there is no `src` provided for formatting
678 if verbose or not quiet:
684 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
687 Reformat and print out `content` without spawning child processes.
688 Similar to `reformat_one`, but for string content.
690 `fast`, `write_back`, and `mode` options are passed to
691 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
693 path = Path("<string>")
696 if format_stdin_to_stdout(
697 content=content, fast=fast, write_back=write_back, mode=mode
699 changed = Changed.YES
700 report.done(path, changed)
701 except Exception as exc:
703 traceback.print_exc()
704 report.failed(path, str(exc))
707 # diff-shades depends on being to monkeypatch this function to operate. I know it's
708 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
709 @mypyc_attr(patchable=True)
711 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
713 """Reformat a single file under `src` without spawning child processes.
715 `fast`, `write_back`, and `mode` options are passed to
716 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
723 elif str(src).startswith(STDIN_PLACEHOLDER):
725 # Use the original name again in case we want to print something
727 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
732 if src.suffix == ".pyi":
733 mode = replace(mode, is_pyi=True)
734 elif src.suffix == ".ipynb":
735 mode = replace(mode, is_ipynb=True)
736 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
737 changed = Changed.YES
740 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
741 cache = read_cache(mode)
742 res_src = src.resolve()
743 res_src_s = str(res_src)
744 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
745 changed = Changed.CACHED
746 if changed is not Changed.CACHED and format_file_in_place(
747 src, fast=fast, write_back=write_back, mode=mode
749 changed = Changed.YES
750 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
751 write_back is WriteBack.CHECK and changed is Changed.NO
753 write_cache(cache, [src], mode)
754 report.done(src, changed)
755 except Exception as exc:
757 traceback.print_exc()
758 report.failed(src, str(exc))
761 # diff-shades depends on being to monkeypatch this function to operate. I know it's
762 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
763 @mypyc_attr(patchable=True)
767 write_back: WriteBack,
770 workers: Optional[int],
772 """Reformat multiple files using a ProcessPoolExecutor."""
773 from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
776 worker_count = workers if workers is not None else DEFAULT_WORKERS
777 if sys.platform == "win32":
778 # Work around https://bugs.python.org/issue26903
779 assert worker_count is not None
780 worker_count = min(worker_count, 60)
782 executor = ProcessPoolExecutor(max_workers=worker_count)
783 except (ImportError, NotImplementedError, OSError):
784 # we arrive here if the underlying system does not support multi-processing
785 # like in AWS Lambda or Termux, in which case we gracefully fallback to
786 # a ThreadPoolExecutor with just a single worker (more workers would not do us
787 # any good due to the Global Interpreter Lock)
788 executor = ThreadPoolExecutor(max_workers=1)
790 loop = asyncio.new_event_loop()
791 asyncio.set_event_loop(loop)
793 loop.run_until_complete(
797 write_back=write_back,
808 asyncio.set_event_loop(None)
809 if executor is not None:
813 async def schedule_formatting(
816 write_back: WriteBack,
819 loop: asyncio.AbstractEventLoop,
820 executor: "Executor",
822 """Run formatting of `sources` in parallel using the provided `executor`.
824 (Use ProcessPoolExecutors for actual parallelism.)
826 `write_back`, `fast`, and `mode` options are passed to
827 :func:`format_file_in_place`.
830 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
831 cache = read_cache(mode)
832 sources, cached = filter_cached(cache, sources)
833 for src in sorted(cached):
834 report.done(src, Changed.CACHED)
839 sources_to_cache = []
841 if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
842 # For diff output, we need locks to ensure we don't interleave output
843 # from different processes.
845 lock = manager.Lock()
847 asyncio.ensure_future(
848 loop.run_in_executor(
849 executor, format_file_in_place, src, fast, mode, write_back, lock
852 for src in sorted(sources)
854 pending = tasks.keys()
856 loop.add_signal_handler(signal.SIGINT, cancel, pending)
857 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
858 except NotImplementedError:
859 # There are no good alternatives for these on Windows.
862 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
864 src = tasks.pop(task)
866 cancelled.append(task)
867 elif task.exception():
868 report.failed(src, str(task.exception()))
870 changed = Changed.YES if task.result() else Changed.NO
871 # If the file was written back or was successfully checked as
872 # well-formatted, store this information in the cache.
873 if write_back is WriteBack.YES or (
874 write_back is WriteBack.CHECK and changed is Changed.NO
876 sources_to_cache.append(src)
877 report.done(src, changed)
879 if sys.version_info >= (3, 7):
880 await asyncio.gather(*cancelled, return_exceptions=True)
882 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
884 write_cache(cache, sources_to_cache, mode)
887 def format_file_in_place(
891 write_back: WriteBack = WriteBack.NO,
892 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
894 """Format file under `src` path. Return True if changed.
896 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
898 `mode` and `fast` options are passed to :func:`format_file_contents`.
900 if src.suffix == ".pyi":
901 mode = replace(mode, is_pyi=True)
902 elif src.suffix == ".ipynb":
903 mode = replace(mode, is_ipynb=True)
905 then = datetime.utcfromtimestamp(src.stat().st_mtime)
906 with open(src, "rb") as buf:
907 src_contents, encoding, newline = decode_bytes(buf.read())
909 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
910 except NothingChanged:
912 except JSONDecodeError:
914 f"File '{src}' cannot be parsed as valid Jupyter notebook."
917 if write_back == WriteBack.YES:
918 with open(src, "w", encoding=encoding, newline=newline) as f:
919 f.write(dst_contents)
920 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
921 now = datetime.utcnow()
922 src_name = f"{src}\t{then} +0000"
923 dst_name = f"{src}\t{now} +0000"
925 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
927 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
929 if write_back == WriteBack.COLOR_DIFF:
930 diff_contents = color_diff(diff_contents)
932 with lock or nullcontext():
933 f = io.TextIOWrapper(
939 f = wrap_stream_for_windows(f)
940 f.write(diff_contents)
946 def format_stdin_to_stdout(
949 content: Optional[str] = None,
950 write_back: WriteBack = WriteBack.NO,
953 """Format file on stdin. Return True if changed.
955 If content is None, it's read from sys.stdin.
957 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
958 write a diff to stdout. The `mode` argument is passed to
959 :func:`format_file_contents`.
961 then = datetime.utcnow()
964 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
966 src, encoding, newline = content, "utf-8", ""
970 dst = format_file_contents(src, fast=fast, mode=mode)
973 except NothingChanged:
977 f = io.TextIOWrapper(
978 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
980 if write_back == WriteBack.YES:
981 # Make sure there's a newline after the content
982 if dst and dst[-1] != "\n":
985 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
986 now = datetime.utcnow()
987 src_name = f"STDIN\t{then} +0000"
988 dst_name = f"STDOUT\t{now} +0000"
989 d = diff(src, dst, src_name, dst_name)
990 if write_back == WriteBack.COLOR_DIFF:
992 f = wrap_stream_for_windows(f)
997 def check_stability_and_equivalence(
998 src_contents: str, dst_contents: str, *, mode: Mode
1000 """Perform stability and equivalence checks.
1002 Raise AssertionError if source and destination contents are not
1003 equivalent, or if a second pass of the formatter would format the
1004 content differently.
1006 assert_equivalent(src_contents, dst_contents)
1007 assert_stable(src_contents, dst_contents, mode=mode)
1010 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1011 """Reformat contents of a file and return new contents.
1013 If `fast` is False, additionally confirm that the reformatted code is
1014 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
1015 `mode` is passed to :func:`format_str`.
1017 if not src_contents.strip():
1018 raise NothingChanged
1021 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
1023 dst_contents = format_str(src_contents, mode=mode)
1024 if src_contents == dst_contents:
1025 raise NothingChanged
1027 if not fast and not mode.is_ipynb:
1028 # Jupyter notebooks will already have been checked above.
1029 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
1033 def validate_cell(src: str, mode: Mode) -> None:
1034 """Check that cell does not already contain TransformerManager transformations,
1035 or non-Python cell magics, which might cause tokenizer_rt to break because of
1038 If a cell contains ``!ls``, then it'll be transformed to
1039 ``get_ipython().system('ls')``. However, if the cell originally contained
1040 ``get_ipython().system('ls')``, then it would get transformed in the same way:
1042 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
1043 "get_ipython().system('ls')\n"
1044 >>> TransformerManager().transform_cell("!ls")
1045 "get_ipython().system('ls')\n"
1047 Due to the impossibility of safely roundtripping in such situations, cells
1048 containing transformed magics will be ignored.
1050 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
1051 raise NothingChanged
1054 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
1056 raise NothingChanged
1059 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
1060 """Format code in given cell of Jupyter notebook.
1064 - if cell has trailing semicolon, remove it;
1065 - if cell has IPython magics, mask them;
1067 - reinstate IPython magics;
1068 - reinstate trailing semicolon (if originally present);
1069 - strip trailing newlines.
1071 Cells with syntax errors will not be processed, as they
1072 could potentially be automagics or multi-line magics, which
1073 are currently not supported.
1075 validate_cell(src, mode)
1076 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
1080 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
1082 raise NothingChanged from None
1083 masked_dst = format_str(masked_src, mode=mode)
1085 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
1086 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1087 dst = put_trailing_semicolon_back(
1088 dst_without_trailing_semicolon, has_trailing_semicolon
1090 dst = dst.rstrip("\n")
1092 raise NothingChanged from None
1096 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1097 """If notebook is marked as non-Python, don't format it.
1099 All notebook metadata fields are optional, see
1100 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1101 if a notebook has empty metadata, we will try to parse it anyway.
1103 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1104 if language is not None and language != "python":
1105 raise NothingChanged from None
1108 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1109 """Format Jupyter notebook.
1111 Operate cell-by-cell, only on code cells, only for Python notebooks.
1112 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1114 trailing_newline = src_contents[-1] == "\n"
1116 nb = json.loads(src_contents)
1117 validate_metadata(nb)
1118 for cell in nb["cells"]:
1119 if cell.get("cell_type", None) == "code":
1121 src = "".join(cell["source"])
1122 dst = format_cell(src, fast=fast, mode=mode)
1123 except NothingChanged:
1126 cell["source"] = dst.splitlines(keepends=True)
1129 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1130 if trailing_newline:
1131 dst_contents = dst_contents + "\n"
1134 raise NothingChanged
1137 def format_str(src_contents: str, *, mode: Mode) -> str:
1138 """Reformat a string and return new contents.
1140 `mode` determines formatting options, such as how many characters per line are
1144 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1145 def f(arg: str = "") -> None:
1148 A more complex example:
1151 ... black.format_str(
1152 ... "def f(arg:str='')->None: hey",
1153 ... mode=black.Mode(
1154 ... target_versions={black.TargetVersion.PY36},
1156 ... string_normalization=False,
1167 dst_contents = _format_str_once(src_contents, mode=mode)
1168 # Forced second pass to work around optional trailing commas (becoming
1169 # forced trailing commas on pass 2) interacting differently with optional
1170 # parentheses. Admittedly ugly.
1171 if src_contents != dst_contents:
1172 return _format_str_once(dst_contents, mode=mode)
1176 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1177 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1179 if mode.target_versions:
1180 versions = mode.target_versions
1182 future_imports = get_future_imports(src_node)
1183 versions = detect_target_versions(src_node, future_imports=future_imports)
1185 normalize_fmt_off(src_node, preview=mode.preview)
1186 lines = LineGenerator(mode=mode)
1187 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1188 empty_line = Line(mode=mode)
1190 split_line_features = {
1192 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1193 if supports_feature(versions, feature)
1195 for current_line in lines.visit(src_node):
1196 dst_contents.append(str(empty_line) * after)
1197 before, after = elt.maybe_empty_lines(current_line)
1198 dst_contents.append(str(empty_line) * before)
1199 for line in transform_line(
1200 current_line, mode=mode, features=split_line_features
1202 dst_contents.append(str(line))
1203 return "".join(dst_contents)
1206 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1207 """Return a tuple of (decoded_contents, encoding, newline).
1209 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1210 universal newlines (i.e. only contains LF).
1212 srcbuf = io.BytesIO(src)
1213 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1215 return "", encoding, "\n"
1217 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1219 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1220 return tiow.read(), encoding, newline
1223 def get_features_used( # noqa: C901
1224 node: Node, *, future_imports: Optional[Set[str]] = None
1226 """Return a set of (relatively) new Python features used in this file.
1228 Currently looking for:
1230 - underscores in numeric literals;
1231 - trailing commas after * or ** in function signatures and calls;
1232 - positional only arguments in function signatures and lambdas;
1233 - assignment expression;
1234 - relaxed decorator syntax;
1235 - usage of __future__ flags (annotations);
1236 - print / exec statements;
1238 features: Set[Feature] = set()
1241 FUTURE_FLAG_TO_FEATURE[future_import]
1242 for future_import in future_imports
1243 if future_import in FUTURE_FLAG_TO_FEATURE
1246 for n in node.pre_order():
1247 if is_string_token(n):
1248 value_head = n.value[:2]
1249 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1250 features.add(Feature.F_STRINGS)
1252 elif is_number_token(n):
1254 features.add(Feature.NUMERIC_UNDERSCORES)
1256 elif n.type == token.SLASH:
1257 if n.parent and n.parent.type in {
1262 features.add(Feature.POS_ONLY_ARGUMENTS)
1264 elif n.type == token.COLONEQUAL:
1265 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1267 elif n.type == syms.decorator:
1268 if len(n.children) > 1 and not is_simple_decorator_expression(
1271 features.add(Feature.RELAXED_DECORATORS)
1274 n.type in {syms.typedargslist, syms.arglist}
1276 and n.children[-1].type == token.COMMA
1278 if n.type == syms.typedargslist:
1279 feature = Feature.TRAILING_COMMA_IN_DEF
1281 feature = Feature.TRAILING_COMMA_IN_CALL
1283 for ch in n.children:
1284 if ch.type in STARS:
1285 features.add(feature)
1287 if ch.type == syms.argument:
1288 for argch in ch.children:
1289 if argch.type in STARS:
1290 features.add(feature)
1293 n.type in {syms.return_stmt, syms.yield_expr}
1294 and len(n.children) >= 2
1295 and n.children[1].type == syms.testlist_star_expr
1296 and any(child.type == syms.star_expr for child in n.children[1].children)
1298 features.add(Feature.UNPACKING_ON_FLOW)
1301 n.type == syms.annassign
1302 and len(n.children) >= 4
1303 and n.children[3].type == syms.testlist_star_expr
1305 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1308 n.type == syms.except_clause
1309 and len(n.children) >= 2
1310 and n.children[1].type == token.STAR
1312 features.add(Feature.EXCEPT_STAR)
1314 elif n.type in {syms.subscriptlist, syms.trailer} and any(
1315 child.type == syms.star_expr for child in n.children
1317 features.add(Feature.VARIADIC_GENERICS)
1320 n.type == syms.tname_star
1321 and len(n.children) == 3
1322 and n.children[2].type == syms.star_expr
1324 features.add(Feature.VARIADIC_GENERICS)
1329 def detect_target_versions(
1330 node: Node, *, future_imports: Optional[Set[str]] = None
1331 ) -> Set[TargetVersion]:
1332 """Detect the version to target based on the nodes used."""
1333 features = get_features_used(node, future_imports=future_imports)
1335 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1339 def get_future_imports(node: Node) -> Set[str]:
1340 """Return a set of __future__ imports in the file."""
1341 imports: Set[str] = set()
1343 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1344 for child in children:
1345 if isinstance(child, Leaf):
1346 if child.type == token.NAME:
1349 elif child.type == syms.import_as_name:
1350 orig_name = child.children[0]
1351 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1352 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1353 yield orig_name.value
1355 elif child.type == syms.import_as_names:
1356 yield from get_imports_from_children(child.children)
1359 raise AssertionError("Invalid syntax parsing imports")
1361 for child in node.children:
1362 if child.type != syms.simple_stmt:
1365 first_child = child.children[0]
1366 if isinstance(first_child, Leaf):
1367 # Continue looking if we see a docstring; otherwise stop.
1369 len(child.children) == 2
1370 and first_child.type == token.STRING
1371 and child.children[1].type == token.NEWLINE
1377 elif first_child.type == syms.import_from:
1378 module_name = first_child.children[1]
1379 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1382 imports |= set(get_imports_from_children(first_child.children[3:]))
1389 def assert_equivalent(src: str, dst: str) -> None:
1390 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1392 src_ast = parse_ast(src)
1393 except Exception as exc:
1394 raise AssertionError(
1395 "cannot use --safe with this file; failed to parse source file AST: "
1397 "This could be caused by running Black with an older Python version "
1398 "that does not support new syntax used in your source file."
1402 dst_ast = parse_ast(dst)
1403 except Exception as exc:
1404 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1405 raise AssertionError(
1406 f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1407 "Please report a bug on https://github.com/psf/black/issues. "
1408 f"This invalid output might be helpful: {log}"
1411 src_ast_str = "\n".join(stringify_ast(src_ast))
1412 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1413 if src_ast_str != dst_ast_str:
1414 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1415 raise AssertionError(
1416 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1417 " source. Please report a bug on "
1418 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1422 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1423 """Raise AssertionError if `dst` reformats differently the second time."""
1424 # We shouldn't call format_str() here, because that formats the string
1425 # twice and may hide a bug where we bounce back and forth between two
1427 newdst = _format_str_once(dst, mode=mode)
1431 diff(src, dst, "source", "first pass"),
1432 diff(dst, newdst, "first pass", "second pass"),
1434 raise AssertionError(
1435 "INTERNAL ERROR: Black produced different code on the second pass of the"
1436 " formatter. Please report a bug on https://github.com/psf/black/issues."
1437 f" This diff might be helpful: {log}"
1442 def nullcontext() -> Iterator[None]:
1443 """Return an empty context manager.
1445 To be used like `nullcontext` in Python 3.7.
1450 def patch_click() -> None:
1451 """Make Click not crash on Python 3.6 with LANG=C.
1453 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1454 default which restricts paths that it can access during the lifetime of the
1455 application. Click refuses to work in this scenario by raising a RuntimeError.
1457 In case of Black the likelihood that non-ASCII characters are going to be used in
1458 file paths is minimal since it's Python source code. Moreover, this crash was
1459 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1461 modules: List[Any] = []
1463 from click import core
1467 modules.append(core)
1469 # Removed in Click 8.1.0 and newer; we keep this around for users who have
1470 # older versions installed.
1471 from click import _unicodefun # type: ignore
1475 modules.append(_unicodefun)
1477 for module in modules:
1478 if hasattr(module, "_verify_python3_env"):
1479 module._verify_python3_env = lambda: None # type: ignore
1480 if hasattr(module, "_verify_python_env"):
1481 module._verify_python_env = lambda: None # type: ignore
1484 def patched_main() -> None:
1485 maybe_install_uvloop()
1491 if __name__ == "__main__":