All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
2 from json.decoder import JSONDecodeError
4 from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
5 from contextlib import contextmanager
6 from datetime import datetime
9 from multiprocessing import Manager, freeze_support
11 from pathlib import Path
12 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
34 from click.core import ParameterSource
35 from dataclasses import replace
36 from mypy_extensions import mypyc_attr
38 from black.const import DEFAULT_LINE_LENGTH, DEFAULT_INCLUDES, DEFAULT_EXCLUDES
39 from black.const import STDIN_PLACEHOLDER
40 from black.nodes import STARS, syms, is_simple_decorator_expression
41 from black.nodes import is_string_token
42 from black.lines import Line, EmptyLineTracker
43 from black.linegen import transform_line, LineGenerator, LN
44 from black.comments import normalize_fmt_off
45 from black.mode import FUTURE_FLAG_TO_FEATURE, Mode, TargetVersion
46 from black.mode import Feature, supports_feature, VERSION_TO_FEATURES
47 from black.cache import read_cache, write_cache, get_cache_info, filter_cached, Cache
48 from black.concurrency import cancel, shutdown, maybe_install_uvloop
49 from black.output import dump_to_file, ipynb_diff, diff, color_diff, out, err
50 from black.report import Report, Changed, NothingChanged
51 from black.files import find_project_root, find_pyproject_toml, parse_pyproject_toml
52 from black.files import gen_python_files, get_gitignore, normalize_path_maybe_ignore
53 from black.files import wrap_stream_for_windows
54 from black.parsing import InvalidInput # noqa F401
55 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
56 from black.handle_ipynb_magics import (
59 remove_trailing_semicolon,
60 put_trailing_semicolon_back,
63 jupyter_dependencies_are_installed,
68 from blib2to3.pytree import Node, Leaf
69 from blib2to3.pgen2 import token
71 from _black_version import version as __version__
73 COMPILED = Path(__file__).suffix in (".pyd", ".so")
81 class WriteBack(Enum):
89 def from_configuration(
90 cls, *, check: bool, diff: bool, color: bool = False
92 if check and not diff:
98 return cls.DIFF if diff else cls.YES
101 # Legacy name, left for integrations.
104 DEFAULT_WORKERS = os.cpu_count()
107 def read_pyproject_toml(
108 ctx: click.Context, param: click.Parameter, value: Optional[str]
110 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
112 Returns the path to a successfully found and read configuration file, None
116 value = find_pyproject_toml(ctx.params.get("src", ()))
121 config = parse_pyproject_toml(value)
122 except (OSError, ValueError) as e:
123 raise click.FileError(
124 filename=value, hint=f"Error reading configuration file: {e}"
130 # Sanitize the values to be Click friendly. For more information please see:
131 # https://github.com/psf/black/issues/1458
132 # https://github.com/pallets/click/issues/1567
134 k: str(v) if not isinstance(v, (list, dict)) else v
135 for k, v in config.items()
138 target_version = config.get("target_version")
139 if target_version is not None and not isinstance(target_version, list):
140 raise click.BadOptionUsage(
141 "target-version", "Config key target-version must be a list"
144 default_map: Dict[str, Any] = {}
146 default_map.update(ctx.default_map)
147 default_map.update(config)
149 ctx.default_map = default_map
153 def target_version_option_callback(
154 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
155 ) -> List[TargetVersion]:
156 """Compute the target versions from a --target-version flag.
158 This is its own function because mypy couldn't infer the type correctly
159 when it was a lambda, causing mypyc trouble.
161 return [TargetVersion[val.upper()] for val in v]
164 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
165 """Compile a regular expression string in `regex`.
167 If it contains newlines, use verbose mode.
170 regex = "(?x)" + regex
171 compiled: Pattern[str] = re.compile(regex)
177 param: click.Parameter,
178 value: Optional[str],
179 ) -> Optional[Pattern[str]]:
181 return re_compile_maybe_verbose(value) if value is not None else None
182 except re.error as e:
183 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
187 context_settings={"help_option_names": ["-h", "--help"]},
188 # While Click does set this field automatically using the docstring, mypyc
189 # (annoyingly) strips 'em so we need to set it here too.
190 help="The uncompromising code formatter.",
192 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
197 default=DEFAULT_LINE_LENGTH,
198 help="How many characters per line to allow.",
204 type=click.Choice([v.name.lower() for v in TargetVersion]),
205 callback=target_version_option_callback,
208 "Python versions that should be supported by Black's output. [default: per-file"
216 "Format all input files like typing stubs regardless of file extension (useful"
217 " when piping source on standard input)."
224 "Format all input files like Jupyter Notebooks regardless of file extension "
225 "(useful when piping source on standard input)."
230 "--skip-string-normalization",
232 help="Don't normalize string quotes or prefixes.",
236 "--skip-magic-trailing-comma",
238 help="Don't use trailing commas as a reason to split lines.",
241 "--experimental-string-processing",
245 "Experimental option that performs more normalization on string literals."
246 " Currently disabled because it leads to some crashes."
253 "Don't write the files back, just return the status. Return code 0 means"
254 " nothing would change. Return code 1 means some files would be reformatted."
255 " Return code 123 means there was an internal error."
261 help="Don't write the files back, just output a diff for each file on stdout.",
264 "--color/--no-color",
266 help="Show colored diff. Only applies when `--diff` is given.",
271 help="If --fast given, skip temporary sanity checks. [default: --safe]",
274 "--required-version",
277 "Require a specific version of Black to be running (useful for unifying results"
278 " across many environments e.g. with a pyproject.toml file)."
284 default=DEFAULT_INCLUDES,
285 callback=validate_regex,
287 "A regular expression that matches files and directories that should be"
288 " included on recursive searches. An empty value means all files are included"
289 " regardless of the name. Use forward slashes for directories on all platforms"
290 " (Windows, too). Exclusions are calculated first, inclusions later."
297 callback=validate_regex,
299 "A regular expression that matches files and directories that should be"
300 " excluded on recursive searches. An empty value means no paths are excluded."
301 " Use forward slashes for directories on all platforms (Windows, too)."
302 " Exclusions are calculated first, inclusions later. [default:"
303 f" {DEFAULT_EXCLUDES}]"
310 callback=validate_regex,
312 "Like --exclude, but adds additional files and directories on top of the"
313 " excluded ones. (Useful if you simply want to add to the default)"
319 callback=validate_regex,
321 "Like --exclude, but files and directories matching this regex will be "
322 "excluded even when they are passed explicitly as arguments."
329 "The name of the file when passing it through stdin. Useful to make "
330 "sure Black will respect --force-exclude option on some "
331 "editors that rely on using stdin."
337 type=click.IntRange(min=1),
338 default=DEFAULT_WORKERS,
340 help="Number of parallel workers",
347 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
348 " those with 2>/dev/null."
356 "Also emit messages to stderr about files that were not changed or were ignored"
357 " due to exclusion patterns."
360 @click.version_option(
362 message=f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})",
368 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
384 callback=read_pyproject_toml,
385 help="Read configuration from FILE path.",
392 target_version: List[TargetVersion],
399 skip_string_normalization: bool,
400 skip_magic_trailing_comma: bool,
401 experimental_string_processing: bool,
404 required_version: Optional[str],
405 include: Pattern[str],
406 exclude: Optional[Pattern[str]],
407 extend_exclude: Optional[Pattern[str]],
408 force_exclude: Optional[Pattern[str]],
409 stdin_filename: Optional[str],
411 src: Tuple[str, ...],
412 config: Optional[str],
414 """The uncompromising code formatter."""
415 ctx.ensure_object(dict)
416 root, method = find_project_root(src) if code is None else (None, None)
417 ctx.obj["root"] = root
422 f"Identified `{root}` as project root containing a {method}.",
427 (normalize_path_maybe_ignore(Path(source), root), source)
430 srcs_string = ", ".join(
434 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
435 for _norm, source in normalized
438 out(f"Sources to be formatted: {srcs_string}", fg="blue")
441 config_source = ctx.get_parameter_source("config")
442 if config_source in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP):
443 out("Using configuration from project root.", fg="blue")
445 out(f"Using configuration in '{config}'.", fg="blue")
447 error_msg = "Oh no! 💥 💔 💥"
448 if required_version and required_version != __version__:
450 f"{error_msg} The required version `{required_version}` does not match"
451 f" the running version `{__version__}`!"
455 err("Cannot pass both `pyi` and `ipynb` flags!")
458 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
460 versions = set(target_version)
462 # We'll autodetect later.
465 target_versions=versions,
466 line_length=line_length,
469 string_normalization=not skip_string_normalization,
470 magic_trailing_comma=not skip_magic_trailing_comma,
471 experimental_string_processing=experimental_string_processing,
475 # Run in quiet mode by default with -c; the extra output isn't useful.
476 # You can still pass -v to get verbose output.
479 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
483 content=code, fast=fast, write_back=write_back, mode=mode, report=report
487 sources = get_sources(
494 extend_exclude=extend_exclude,
495 force_exclude=force_exclude,
497 stdin_filename=stdin_filename,
499 except GitWildMatchPatternError:
504 "No Python files are present to be formatted. Nothing to do 😴",
510 if len(sources) == 1:
514 write_back=write_back,
522 write_back=write_back,
528 if verbose or not quiet:
529 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
531 click.echo(str(report), err=True)
532 ctx.exit(report.return_code)
538 src: Tuple[str, ...],
541 include: Pattern[str],
542 exclude: Optional[Pattern[str]],
543 extend_exclude: Optional[Pattern[str]],
544 force_exclude: Optional[Pattern[str]],
546 stdin_filename: Optional[str],
548 """Compute the set of files to be formatted."""
549 sources: Set[Path] = set()
550 path_empty(src, "No Path provided. Nothing to do 😴", quiet, verbose, ctx)
553 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
554 gitignore = get_gitignore(ctx.obj["root"])
559 if s == "-" and stdin_filename:
560 p = Path(stdin_filename)
566 if is_stdin or p.is_file():
567 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
568 if normalized_path is None:
571 normalized_path = "/" + normalized_path
572 # Hard-exclude any files that matches the `--force-exclude` regex.
574 force_exclude_match = force_exclude.search(normalized_path)
576 force_exclude_match = None
577 if force_exclude_match and force_exclude_match.group(0):
578 report.path_ignored(p, "matches the --force-exclude regular expression")
582 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
584 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
585 verbose=verbose, quiet=quiet
608 err(f"invalid path: {s}")
613 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
616 Exit if there is no `src` provided for formatting
619 if verbose or not quiet:
625 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
628 Reformat and print out `content` without spawning child processes.
629 Similar to `reformat_one`, but for string content.
631 `fast`, `write_back`, and `mode` options are passed to
632 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
634 path = Path("<string>")
637 if format_stdin_to_stdout(
638 content=content, fast=fast, write_back=write_back, mode=mode
640 changed = Changed.YES
641 report.done(path, changed)
642 except Exception as exc:
644 traceback.print_exc()
645 report.failed(path, str(exc))
649 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
651 """Reformat a single file under `src` without spawning child processes.
653 `fast`, `write_back`, and `mode` options are passed to
654 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
661 elif str(src).startswith(STDIN_PLACEHOLDER):
663 # Use the original name again in case we want to print something
665 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
670 if src.suffix == ".pyi":
671 mode = replace(mode, is_pyi=True)
672 elif src.suffix == ".ipynb":
673 mode = replace(mode, is_ipynb=True)
674 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
675 changed = Changed.YES
678 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
679 cache = read_cache(mode)
680 res_src = src.resolve()
681 res_src_s = str(res_src)
682 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
683 changed = Changed.CACHED
684 if changed is not Changed.CACHED and format_file_in_place(
685 src, fast=fast, write_back=write_back, mode=mode
687 changed = Changed.YES
688 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
689 write_back is WriteBack.CHECK and changed is Changed.NO
691 write_cache(cache, [src], mode)
692 report.done(src, changed)
693 except Exception as exc:
695 traceback.print_exc()
696 report.failed(src, str(exc))
699 # diff-shades depends on being to monkeypatch this function to operate. I know it's
700 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
701 @mypyc_attr(patchable=True)
705 write_back: WriteBack,
708 workers: Optional[int],
710 """Reformat multiple files using a ProcessPoolExecutor."""
712 loop = asyncio.get_event_loop()
713 worker_count = workers if workers is not None else DEFAULT_WORKERS
714 if sys.platform == "win32":
715 # Work around https://bugs.python.org/issue26903
716 assert worker_count is not None
717 worker_count = min(worker_count, 60)
719 executor = ProcessPoolExecutor(max_workers=worker_count)
720 except (ImportError, NotImplementedError, OSError):
721 # we arrive here if the underlying system does not support multi-processing
722 # like in AWS Lambda or Termux, in which case we gracefully fallback to
723 # a ThreadPoolExecutor with just a single worker (more workers would not do us
724 # any good due to the Global Interpreter Lock)
725 executor = ThreadPoolExecutor(max_workers=1)
728 loop.run_until_complete(
732 write_back=write_back,
741 if executor is not None:
745 async def schedule_formatting(
748 write_back: WriteBack,
751 loop: asyncio.AbstractEventLoop,
754 """Run formatting of `sources` in parallel using the provided `executor`.
756 (Use ProcessPoolExecutors for actual parallelism.)
758 `write_back`, `fast`, and `mode` options are passed to
759 :func:`format_file_in_place`.
762 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
763 cache = read_cache(mode)
764 sources, cached = filter_cached(cache, sources)
765 for src in sorted(cached):
766 report.done(src, Changed.CACHED)
771 sources_to_cache = []
773 if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
774 # For diff output, we need locks to ensure we don't interleave output
775 # from different processes.
777 lock = manager.Lock()
779 asyncio.ensure_future(
780 loop.run_in_executor(
781 executor, format_file_in_place, src, fast, mode, write_back, lock
784 for src in sorted(sources)
786 pending = tasks.keys()
788 loop.add_signal_handler(signal.SIGINT, cancel, pending)
789 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
790 except NotImplementedError:
791 # There are no good alternatives for these on Windows.
794 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
796 src = tasks.pop(task)
798 cancelled.append(task)
799 elif task.exception():
800 report.failed(src, str(task.exception()))
802 changed = Changed.YES if task.result() else Changed.NO
803 # If the file was written back or was successfully checked as
804 # well-formatted, store this information in the cache.
805 if write_back is WriteBack.YES or (
806 write_back is WriteBack.CHECK and changed is Changed.NO
808 sources_to_cache.append(src)
809 report.done(src, changed)
811 if sys.version_info >= (3, 7):
812 await asyncio.gather(*cancelled, return_exceptions=True)
814 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
816 write_cache(cache, sources_to_cache, mode)
819 def format_file_in_place(
823 write_back: WriteBack = WriteBack.NO,
824 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
826 """Format file under `src` path. Return True if changed.
828 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
830 `mode` and `fast` options are passed to :func:`format_file_contents`.
832 if src.suffix == ".pyi":
833 mode = replace(mode, is_pyi=True)
834 elif src.suffix == ".ipynb":
835 mode = replace(mode, is_ipynb=True)
837 then = datetime.utcfromtimestamp(src.stat().st_mtime)
838 with open(src, "rb") as buf:
839 src_contents, encoding, newline = decode_bytes(buf.read())
841 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
842 except NothingChanged:
844 except JSONDecodeError:
846 f"File '{src}' cannot be parsed as valid Jupyter notebook."
849 if write_back == WriteBack.YES:
850 with open(src, "w", encoding=encoding, newline=newline) as f:
851 f.write(dst_contents)
852 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
853 now = datetime.utcnow()
854 src_name = f"{src}\t{then} +0000"
855 dst_name = f"{src}\t{now} +0000"
857 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
859 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
861 if write_back == WriteBack.COLOR_DIFF:
862 diff_contents = color_diff(diff_contents)
864 with lock or nullcontext():
865 f = io.TextIOWrapper(
871 f = wrap_stream_for_windows(f)
872 f.write(diff_contents)
878 def format_stdin_to_stdout(
881 content: Optional[str] = None,
882 write_back: WriteBack = WriteBack.NO,
885 """Format file on stdin. Return True if changed.
887 If content is None, it's read from sys.stdin.
889 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
890 write a diff to stdout. The `mode` argument is passed to
891 :func:`format_file_contents`.
893 then = datetime.utcnow()
896 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
898 src, encoding, newline = content, "utf-8", ""
902 dst = format_file_contents(src, fast=fast, mode=mode)
905 except NothingChanged:
909 f = io.TextIOWrapper(
910 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
912 if write_back == WriteBack.YES:
913 # Make sure there's a newline after the content
914 if dst and dst[-1] != "\n":
917 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
918 now = datetime.utcnow()
919 src_name = f"STDIN\t{then} +0000"
920 dst_name = f"STDOUT\t{now} +0000"
921 d = diff(src, dst, src_name, dst_name)
922 if write_back == WriteBack.COLOR_DIFF:
924 f = wrap_stream_for_windows(f)
929 def check_stability_and_equivalence(
930 src_contents: str, dst_contents: str, *, mode: Mode
932 """Perform stability and equivalence checks.
934 Raise AssertionError if source and destination contents are not
935 equivalent, or if a second pass of the formatter would format the
938 assert_equivalent(src_contents, dst_contents)
940 # Forced second pass to work around optional trailing commas (becoming
941 # forced trailing commas on pass 2) interacting differently with optional
942 # parentheses. Admittedly ugly.
943 dst_contents_pass2 = format_str(dst_contents, mode=mode)
944 if dst_contents != dst_contents_pass2:
945 dst_contents = dst_contents_pass2
946 assert_equivalent(src_contents, dst_contents, pass_num=2)
947 assert_stable(src_contents, dst_contents, mode=mode)
948 # Note: no need to explicitly call `assert_stable` if `dst_contents` was
949 # the same as `dst_contents_pass2`.
952 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
953 """Reformat contents of a file and return new contents.
955 If `fast` is False, additionally confirm that the reformatted code is
956 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
957 `mode` is passed to :func:`format_str`.
959 if not src_contents.strip():
963 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
965 dst_contents = format_str(src_contents, mode=mode)
966 if src_contents == dst_contents:
969 if not fast and not mode.is_ipynb:
970 # Jupyter notebooks will already have been checked above.
971 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
975 def validate_cell(src: str) -> None:
976 """Check that cell does not already contain TransformerManager transformations,
977 or non-Python cell magics, which might cause tokenizer_rt to break because of
980 If a cell contains ``!ls``, then it'll be transformed to
981 ``get_ipython().system('ls')``. However, if the cell originally contained
982 ``get_ipython().system('ls')``, then it would get transformed in the same way:
984 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
985 "get_ipython().system('ls')\n"
986 >>> TransformerManager().transform_cell("!ls")
987 "get_ipython().system('ls')\n"
989 Due to the impossibility of safely roundtripping in such situations, cells
990 containing transformed magics will be ignored.
992 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
994 if src[:2] == "%%" and src.split()[0][2:] not in PYTHON_CELL_MAGICS:
998 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
999 """Format code in given cell of Jupyter notebook.
1003 - if cell has trailing semicolon, remove it;
1004 - if cell has IPython magics, mask them;
1006 - reinstate IPython magics;
1007 - reinstate trailing semicolon (if originally present);
1008 - strip trailing newlines.
1010 Cells with syntax errors will not be processed, as they
1011 could potentially be automagics or multi-line magics, which
1012 are currently not supported.
1015 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
1019 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
1021 raise NothingChanged from None
1022 masked_dst = format_str(masked_src, mode=mode)
1024 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
1025 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1026 dst = put_trailing_semicolon_back(
1027 dst_without_trailing_semicolon, has_trailing_semicolon
1029 dst = dst.rstrip("\n")
1031 raise NothingChanged from None
1035 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1036 """If notebook is marked as non-Python, don't format it.
1038 All notebook metadata fields are optional, see
1039 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1040 if a notebook has empty metadata, we will try to parse it anyway.
1042 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1043 if language is not None and language != "python":
1044 raise NothingChanged from None
1047 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1048 """Format Jupyter notebook.
1050 Operate cell-by-cell, only on code cells, only for Python notebooks.
1051 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1053 trailing_newline = src_contents[-1] == "\n"
1055 nb = json.loads(src_contents)
1056 validate_metadata(nb)
1057 for cell in nb["cells"]:
1058 if cell.get("cell_type", None) == "code":
1060 src = "".join(cell["source"])
1061 dst = format_cell(src, fast=fast, mode=mode)
1062 except NothingChanged:
1065 cell["source"] = dst.splitlines(keepends=True)
1068 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1069 if trailing_newline:
1070 dst_contents = dst_contents + "\n"
1073 raise NothingChanged
1076 def format_str(src_contents: str, *, mode: Mode) -> FileContent:
1077 """Reformat a string and return new contents.
1079 `mode` determines formatting options, such as how many characters per line are
1083 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1084 def f(arg: str = "") -> None:
1087 A more complex example:
1090 ... black.format_str(
1091 ... "def f(arg:str='')->None: hey",
1092 ... mode=black.Mode(
1093 ... target_versions={black.TargetVersion.PY36},
1095 ... string_normalization=False,
1106 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1108 future_imports = get_future_imports(src_node)
1109 if mode.target_versions:
1110 versions = mode.target_versions
1112 versions = detect_target_versions(src_node, future_imports=future_imports)
1114 normalize_fmt_off(src_node)
1115 lines = LineGenerator(mode=mode)
1116 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1117 empty_line = Line(mode=mode)
1119 split_line_features = {
1121 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1122 if supports_feature(versions, feature)
1124 for current_line in lines.visit(src_node):
1125 dst_contents.append(str(empty_line) * after)
1126 before, after = elt.maybe_empty_lines(current_line)
1127 dst_contents.append(str(empty_line) * before)
1128 for line in transform_line(
1129 current_line, mode=mode, features=split_line_features
1131 dst_contents.append(str(line))
1132 return "".join(dst_contents)
1135 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1136 """Return a tuple of (decoded_contents, encoding, newline).
1138 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1139 universal newlines (i.e. only contains LF).
1141 srcbuf = io.BytesIO(src)
1142 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1144 return "", encoding, "\n"
1146 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1148 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1149 return tiow.read(), encoding, newline
1152 def get_features_used( # noqa: C901
1153 node: Node, *, future_imports: Optional[Set[str]] = None
1155 """Return a set of (relatively) new Python features used in this file.
1157 Currently looking for:
1159 - underscores in numeric literals;
1160 - trailing commas after * or ** in function signatures and calls;
1161 - positional only arguments in function signatures and lambdas;
1162 - assignment expression;
1163 - relaxed decorator syntax;
1164 - usage of __future__ flags (annotations);
1165 - print / exec statements;
1167 features: Set[Feature] = set()
1170 FUTURE_FLAG_TO_FEATURE[future_import]
1171 for future_import in future_imports
1172 if future_import in FUTURE_FLAG_TO_FEATURE
1175 for n in node.pre_order():
1176 if is_string_token(n):
1177 value_head = n.value[:2]
1178 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1179 features.add(Feature.F_STRINGS)
1181 elif n.type == token.NUMBER:
1182 assert isinstance(n, Leaf)
1184 features.add(Feature.NUMERIC_UNDERSCORES)
1186 elif n.type == token.SLASH:
1187 if n.parent and n.parent.type in {
1192 features.add(Feature.POS_ONLY_ARGUMENTS)
1194 elif n.type == token.COLONEQUAL:
1195 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1197 elif n.type == syms.decorator:
1198 if len(n.children) > 1 and not is_simple_decorator_expression(
1201 features.add(Feature.RELAXED_DECORATORS)
1204 n.type in {syms.typedargslist, syms.arglist}
1206 and n.children[-1].type == token.COMMA
1208 if n.type == syms.typedargslist:
1209 feature = Feature.TRAILING_COMMA_IN_DEF
1211 feature = Feature.TRAILING_COMMA_IN_CALL
1213 for ch in n.children:
1214 if ch.type in STARS:
1215 features.add(feature)
1217 if ch.type == syms.argument:
1218 for argch in ch.children:
1219 if argch.type in STARS:
1220 features.add(feature)
1223 n.type in {syms.return_stmt, syms.yield_expr}
1224 and len(n.children) >= 2
1225 and n.children[1].type == syms.testlist_star_expr
1226 and any(child.type == syms.star_expr for child in n.children[1].children)
1228 features.add(Feature.UNPACKING_ON_FLOW)
1231 n.type == syms.annassign
1232 and len(n.children) >= 4
1233 and n.children[3].type == syms.testlist_star_expr
1235 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1240 def detect_target_versions(
1241 node: Node, *, future_imports: Optional[Set[str]] = None
1242 ) -> Set[TargetVersion]:
1243 """Detect the version to target based on the nodes used."""
1244 features = get_features_used(node, future_imports=future_imports)
1246 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1250 def get_future_imports(node: Node) -> Set[str]:
1251 """Return a set of __future__ imports in the file."""
1252 imports: Set[str] = set()
1254 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1255 for child in children:
1256 if isinstance(child, Leaf):
1257 if child.type == token.NAME:
1260 elif child.type == syms.import_as_name:
1261 orig_name = child.children[0]
1262 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1263 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1264 yield orig_name.value
1266 elif child.type == syms.import_as_names:
1267 yield from get_imports_from_children(child.children)
1270 raise AssertionError("Invalid syntax parsing imports")
1272 for child in node.children:
1273 if child.type != syms.simple_stmt:
1276 first_child = child.children[0]
1277 if isinstance(first_child, Leaf):
1278 # Continue looking if we see a docstring; otherwise stop.
1280 len(child.children) == 2
1281 and first_child.type == token.STRING
1282 and child.children[1].type == token.NEWLINE
1288 elif first_child.type == syms.import_from:
1289 module_name = first_child.children[1]
1290 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1293 imports |= set(get_imports_from_children(first_child.children[3:]))
1300 def assert_equivalent(src: str, dst: str, *, pass_num: int = 1) -> None:
1301 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1303 src_ast = parse_ast(src)
1304 except Exception as exc:
1305 raise AssertionError(
1306 f"cannot use --safe with this file; failed to parse source file: {exc}"
1310 dst_ast = parse_ast(dst)
1311 except Exception as exc:
1312 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1313 raise AssertionError(
1314 f"INTERNAL ERROR: Black produced invalid code on pass {pass_num}: {exc}. "
1315 "Please report a bug on https://github.com/psf/black/issues. "
1316 f"This invalid output might be helpful: {log}"
1319 src_ast_str = "\n".join(stringify_ast(src_ast))
1320 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1321 if src_ast_str != dst_ast_str:
1322 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1323 raise AssertionError(
1324 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1325 f" source on pass {pass_num}. Please report a bug on "
1326 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1330 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1331 """Raise AssertionError if `dst` reformats differently the second time."""
1332 newdst = format_str(dst, mode=mode)
1336 diff(src, dst, "source", "first pass"),
1337 diff(dst, newdst, "first pass", "second pass"),
1339 raise AssertionError(
1340 "INTERNAL ERROR: Black produced different code on the second pass of the"
1341 " formatter. Please report a bug on https://github.com/psf/black/issues."
1342 f" This diff might be helpful: {log}"
1347 def nullcontext() -> Iterator[None]:
1348 """Return an empty context manager.
1350 To be used like `nullcontext` in Python 3.7.
1355 def patch_click() -> None:
1356 """Make Click not crash on Python 3.6 with LANG=C.
1358 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1359 default which restricts paths that it can access during the lifetime of the
1360 application. Click refuses to work in this scenario by raising a RuntimeError.
1362 In case of Black the likelihood that non-ASCII characters are going to be used in
1363 file paths is minimal since it's Python source code. Moreover, this crash was
1364 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1367 from click import core
1368 from click import _unicodefun
1369 except ModuleNotFoundError:
1372 for module in (core, _unicodefun):
1373 if hasattr(module, "_verify_python3_env"):
1374 module._verify_python3_env = lambda: None # type: ignore
1375 if hasattr(module, "_verify_python_env"):
1376 module._verify_python_env = lambda: None # type: ignore
1379 def patched_main() -> None:
1380 maybe_install_uvloop()
1386 if __name__ == "__main__":