All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
2 from json.decoder import JSONDecodeError
4 from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
5 from contextlib import contextmanager
6 from datetime import datetime
9 from multiprocessing import Manager, freeze_support
11 from pathlib import Path
12 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
35 from click.core import ParameterSource
36 from dataclasses import replace
37 from mypy_extensions import mypyc_attr
39 from black.const import DEFAULT_LINE_LENGTH, DEFAULT_INCLUDES, DEFAULT_EXCLUDES
40 from black.const import STDIN_PLACEHOLDER
41 from black.nodes import STARS, syms, is_simple_decorator_expression
42 from black.nodes import is_string_token
43 from black.lines import Line, EmptyLineTracker
44 from black.linegen import transform_line, LineGenerator, LN
45 from black.comments import normalize_fmt_off
46 from black.mode import FUTURE_FLAG_TO_FEATURE, Mode, TargetVersion
47 from black.mode import Feature, supports_feature, VERSION_TO_FEATURES
48 from black.cache import read_cache, write_cache, get_cache_info, filter_cached, Cache
49 from black.concurrency import cancel, shutdown, maybe_install_uvloop
50 from black.output import dump_to_file, ipynb_diff, diff, color_diff, out, err
51 from black.report import Report, Changed, NothingChanged
52 from black.files import find_project_root, find_pyproject_toml, parse_pyproject_toml
53 from black.files import gen_python_files, get_gitignore, normalize_path_maybe_ignore
54 from black.files import wrap_stream_for_windows
55 from black.parsing import InvalidInput # noqa F401
56 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
57 from black.handle_ipynb_magics import (
60 remove_trailing_semicolon,
61 put_trailing_semicolon_back,
64 jupyter_dependencies_are_installed,
69 from blib2to3.pytree import Node, Leaf
70 from blib2to3.pgen2 import token
72 from _black_version import version as __version__
74 COMPILED = Path(__file__).suffix in (".pyd", ".so")
82 class WriteBack(Enum):
90 def from_configuration(
91 cls, *, check: bool, diff: bool, color: bool = False
93 if check and not diff:
99 return cls.DIFF if diff else cls.YES
102 # Legacy name, left for integrations.
105 DEFAULT_WORKERS = os.cpu_count()
108 def read_pyproject_toml(
109 ctx: click.Context, param: click.Parameter, value: Optional[str]
111 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
113 Returns the path to a successfully found and read configuration file, None
117 value = find_pyproject_toml(ctx.params.get("src", ()))
122 config = parse_pyproject_toml(value)
123 except (OSError, ValueError) as e:
124 raise click.FileError(
125 filename=value, hint=f"Error reading configuration file: {e}"
131 # Sanitize the values to be Click friendly. For more information please see:
132 # https://github.com/psf/black/issues/1458
133 # https://github.com/pallets/click/issues/1567
135 k: str(v) if not isinstance(v, (list, dict)) else v
136 for k, v in config.items()
139 target_version = config.get("target_version")
140 if target_version is not None and not isinstance(target_version, list):
141 raise click.BadOptionUsage(
142 "target-version", "Config key target-version must be a list"
145 default_map: Dict[str, Any] = {}
147 default_map.update(ctx.default_map)
148 default_map.update(config)
150 ctx.default_map = default_map
154 def target_version_option_callback(
155 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
156 ) -> List[TargetVersion]:
157 """Compute the target versions from a --target-version flag.
159 This is its own function because mypy couldn't infer the type correctly
160 when it was a lambda, causing mypyc trouble.
162 return [TargetVersion[val.upper()] for val in v]
165 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
166 """Compile a regular expression string in `regex`.
168 If it contains newlines, use verbose mode.
171 regex = "(?x)" + regex
172 compiled: Pattern[str] = re.compile(regex)
178 param: click.Parameter,
179 value: Optional[str],
180 ) -> Optional[Pattern[str]]:
182 return re_compile_maybe_verbose(value) if value is not None else None
183 except re.error as e:
184 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
188 context_settings={"help_option_names": ["-h", "--help"]},
189 # While Click does set this field automatically using the docstring, mypyc
190 # (annoyingly) strips 'em so we need to set it here too.
191 help="The uncompromising code formatter.",
193 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
198 default=DEFAULT_LINE_LENGTH,
199 help="How many characters per line to allow.",
205 type=click.Choice([v.name.lower() for v in TargetVersion]),
206 callback=target_version_option_callback,
209 "Python versions that should be supported by Black's output. [default: per-file"
217 "Format all input files like typing stubs regardless of file extension (useful"
218 " when piping source on standard input)."
225 "Format all input files like Jupyter Notebooks regardless of file extension "
226 "(useful when piping source on standard input)."
230 "--python-cell-magics",
233 "When processing Jupyter Notebooks, add the given magic to the list"
234 f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
235 " Useful for formatting cells with custom python magics."
241 "--skip-string-normalization",
243 help="Don't normalize string quotes or prefixes.",
247 "--skip-magic-trailing-comma",
249 help="Don't use trailing commas as a reason to split lines.",
252 "--experimental-string-processing",
255 help="(DEPRECATED and now included in --preview) Normalize string literals.",
261 "Enable potentially disruptive style changes that will be added to Black's main"
262 " functionality in the next major release."
269 "Don't write the files back, just return the status. Return code 0 means"
270 " nothing would change. Return code 1 means some files would be reformatted."
271 " Return code 123 means there was an internal error."
277 help="Don't write the files back, just output a diff for each file on stdout.",
280 "--color/--no-color",
282 help="Show colored diff. Only applies when `--diff` is given.",
287 help="If --fast given, skip temporary sanity checks. [default: --safe]",
290 "--required-version",
293 "Require a specific version of Black to be running (useful for unifying results"
294 " across many environments e.g. with a pyproject.toml file)."
300 default=DEFAULT_INCLUDES,
301 callback=validate_regex,
303 "A regular expression that matches files and directories that should be"
304 " included on recursive searches. An empty value means all files are included"
305 " regardless of the name. Use forward slashes for directories on all platforms"
306 " (Windows, too). Exclusions are calculated first, inclusions later."
313 callback=validate_regex,
315 "A regular expression that matches files and directories that should be"
316 " excluded on recursive searches. An empty value means no paths are excluded."
317 " Use forward slashes for directories on all platforms (Windows, too)."
318 " Exclusions are calculated first, inclusions later. [default:"
319 f" {DEFAULT_EXCLUDES}]"
326 callback=validate_regex,
328 "Like --exclude, but adds additional files and directories on top of the"
329 " excluded ones. (Useful if you simply want to add to the default)"
335 callback=validate_regex,
337 "Like --exclude, but files and directories matching this regex will be "
338 "excluded even when they are passed explicitly as arguments."
345 "The name of the file when passing it through stdin. Useful to make "
346 "sure Black will respect --force-exclude option on some "
347 "editors that rely on using stdin."
353 type=click.IntRange(min=1),
354 default=DEFAULT_WORKERS,
356 help="Number of parallel workers",
363 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
364 " those with 2>/dev/null."
372 "Also emit messages to stderr about files that were not changed or were ignored"
373 " due to exclusion patterns."
376 @click.version_option(
378 message=f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})",
384 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
400 callback=read_pyproject_toml,
401 help="Read configuration from FILE path.",
408 target_version: List[TargetVersion],
415 python_cell_magics: Sequence[str],
416 skip_string_normalization: bool,
417 skip_magic_trailing_comma: bool,
418 experimental_string_processing: bool,
422 required_version: Optional[str],
423 include: Pattern[str],
424 exclude: Optional[Pattern[str]],
425 extend_exclude: Optional[Pattern[str]],
426 force_exclude: Optional[Pattern[str]],
427 stdin_filename: Optional[str],
429 src: Tuple[str, ...],
430 config: Optional[str],
432 """The uncompromising code formatter."""
433 ctx.ensure_object(dict)
434 root, method = find_project_root(src) if code is None else (None, None)
435 ctx.obj["root"] = root
440 f"Identified `{root}` as project root containing a {method}.",
445 (normalize_path_maybe_ignore(Path(source), root), source)
448 srcs_string = ", ".join(
452 else f'\033[31m"{source} (skipping - invalid)"\033[34m'
453 for _norm, source in normalized
456 out(f"Sources to be formatted: {srcs_string}", fg="blue")
459 config_source = ctx.get_parameter_source("config")
460 if config_source in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP):
461 out("Using configuration from project root.", fg="blue")
463 out(f"Using configuration in '{config}'.", fg="blue")
465 error_msg = "Oh no! 💥 💔 💥"
466 if required_version and required_version != __version__:
468 f"{error_msg} The required version `{required_version}` does not match"
469 f" the running version `{__version__}`!"
473 err("Cannot pass both `pyi` and `ipynb` flags!")
476 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
478 versions = set(target_version)
480 # We'll autodetect later.
483 target_versions=versions,
484 line_length=line_length,
487 string_normalization=not skip_string_normalization,
488 magic_trailing_comma=not skip_magic_trailing_comma,
489 experimental_string_processing=experimental_string_processing,
491 python_cell_magics=set(python_cell_magics),
495 # Run in quiet mode by default with -c; the extra output isn't useful.
496 # You can still pass -v to get verbose output.
499 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
503 content=code, fast=fast, write_back=write_back, mode=mode, report=report
507 sources = get_sources(
514 extend_exclude=extend_exclude,
515 force_exclude=force_exclude,
517 stdin_filename=stdin_filename,
519 except GitWildMatchPatternError:
524 "No Python files are present to be formatted. Nothing to do 😴",
530 if len(sources) == 1:
534 write_back=write_back,
542 write_back=write_back,
548 if verbose or not quiet:
549 if code is None and (verbose or report.change_count or report.failure_count):
551 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
553 click.echo(str(report), err=True)
554 ctx.exit(report.return_code)
560 src: Tuple[str, ...],
563 include: Pattern[str],
564 exclude: Optional[Pattern[str]],
565 extend_exclude: Optional[Pattern[str]],
566 force_exclude: Optional[Pattern[str]],
568 stdin_filename: Optional[str],
570 """Compute the set of files to be formatted."""
571 sources: Set[Path] = set()
572 path_empty(src, "No Path provided. Nothing to do 😴", quiet, verbose, ctx)
575 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
576 gitignore = get_gitignore(ctx.obj["root"])
581 if s == "-" and stdin_filename:
582 p = Path(stdin_filename)
588 if is_stdin or p.is_file():
589 normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
590 if normalized_path is None:
593 normalized_path = "/" + normalized_path
594 # Hard-exclude any files that matches the `--force-exclude` regex.
596 force_exclude_match = force_exclude.search(normalized_path)
598 force_exclude_match = None
599 if force_exclude_match and force_exclude_match.group(0):
600 report.path_ignored(p, "matches the --force-exclude regular expression")
604 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
606 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
607 verbose=verbose, quiet=quiet
630 err(f"invalid path: {s}")
635 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
638 Exit if there is no `src` provided for formatting
641 if verbose or not quiet:
647 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
650 Reformat and print out `content` without spawning child processes.
651 Similar to `reformat_one`, but for string content.
653 `fast`, `write_back`, and `mode` options are passed to
654 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
656 path = Path("<string>")
659 if format_stdin_to_stdout(
660 content=content, fast=fast, write_back=write_back, mode=mode
662 changed = Changed.YES
663 report.done(path, changed)
664 except Exception as exc:
666 traceback.print_exc()
667 report.failed(path, str(exc))
671 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
673 """Reformat a single file under `src` without spawning child processes.
675 `fast`, `write_back`, and `mode` options are passed to
676 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
683 elif str(src).startswith(STDIN_PLACEHOLDER):
685 # Use the original name again in case we want to print something
687 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
692 if src.suffix == ".pyi":
693 mode = replace(mode, is_pyi=True)
694 elif src.suffix == ".ipynb":
695 mode = replace(mode, is_ipynb=True)
696 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
697 changed = Changed.YES
700 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
701 cache = read_cache(mode)
702 res_src = src.resolve()
703 res_src_s = str(res_src)
704 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
705 changed = Changed.CACHED
706 if changed is not Changed.CACHED and format_file_in_place(
707 src, fast=fast, write_back=write_back, mode=mode
709 changed = Changed.YES
710 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
711 write_back is WriteBack.CHECK and changed is Changed.NO
713 write_cache(cache, [src], mode)
714 report.done(src, changed)
715 except Exception as exc:
717 traceback.print_exc()
718 report.failed(src, str(exc))
721 # diff-shades depends on being to monkeypatch this function to operate. I know it's
722 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
723 @mypyc_attr(patchable=True)
727 write_back: WriteBack,
730 workers: Optional[int],
732 """Reformat multiple files using a ProcessPoolExecutor."""
734 loop = asyncio.get_event_loop()
735 worker_count = workers if workers is not None else DEFAULT_WORKERS
736 if sys.platform == "win32":
737 # Work around https://bugs.python.org/issue26903
738 assert worker_count is not None
739 worker_count = min(worker_count, 60)
741 executor = ProcessPoolExecutor(max_workers=worker_count)
742 except (ImportError, NotImplementedError, OSError):
743 # we arrive here if the underlying system does not support multi-processing
744 # like in AWS Lambda or Termux, in which case we gracefully fallback to
745 # a ThreadPoolExecutor with just a single worker (more workers would not do us
746 # any good due to the Global Interpreter Lock)
747 executor = ThreadPoolExecutor(max_workers=1)
750 loop.run_until_complete(
754 write_back=write_back,
763 if executor is not None:
767 async def schedule_formatting(
770 write_back: WriteBack,
773 loop: asyncio.AbstractEventLoop,
776 """Run formatting of `sources` in parallel using the provided `executor`.
778 (Use ProcessPoolExecutors for actual parallelism.)
780 `write_back`, `fast`, and `mode` options are passed to
781 :func:`format_file_in_place`.
784 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
785 cache = read_cache(mode)
786 sources, cached = filter_cached(cache, sources)
787 for src in sorted(cached):
788 report.done(src, Changed.CACHED)
793 sources_to_cache = []
795 if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
796 # For diff output, we need locks to ensure we don't interleave output
797 # from different processes.
799 lock = manager.Lock()
801 asyncio.ensure_future(
802 loop.run_in_executor(
803 executor, format_file_in_place, src, fast, mode, write_back, lock
806 for src in sorted(sources)
808 pending = tasks.keys()
810 loop.add_signal_handler(signal.SIGINT, cancel, pending)
811 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
812 except NotImplementedError:
813 # There are no good alternatives for these on Windows.
816 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
818 src = tasks.pop(task)
820 cancelled.append(task)
821 elif task.exception():
822 report.failed(src, str(task.exception()))
824 changed = Changed.YES if task.result() else Changed.NO
825 # If the file was written back or was successfully checked as
826 # well-formatted, store this information in the cache.
827 if write_back is WriteBack.YES or (
828 write_back is WriteBack.CHECK and changed is Changed.NO
830 sources_to_cache.append(src)
831 report.done(src, changed)
833 if sys.version_info >= (3, 7):
834 await asyncio.gather(*cancelled, return_exceptions=True)
836 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
838 write_cache(cache, sources_to_cache, mode)
841 def format_file_in_place(
845 write_back: WriteBack = WriteBack.NO,
846 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
848 """Format file under `src` path. Return True if changed.
850 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
852 `mode` and `fast` options are passed to :func:`format_file_contents`.
854 if src.suffix == ".pyi":
855 mode = replace(mode, is_pyi=True)
856 elif src.suffix == ".ipynb":
857 mode = replace(mode, is_ipynb=True)
859 then = datetime.utcfromtimestamp(src.stat().st_mtime)
860 with open(src, "rb") as buf:
861 src_contents, encoding, newline = decode_bytes(buf.read())
863 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
864 except NothingChanged:
866 except JSONDecodeError:
868 f"File '{src}' cannot be parsed as valid Jupyter notebook."
871 if write_back == WriteBack.YES:
872 with open(src, "w", encoding=encoding, newline=newline) as f:
873 f.write(dst_contents)
874 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
875 now = datetime.utcnow()
876 src_name = f"{src}\t{then} +0000"
877 dst_name = f"{src}\t{now} +0000"
879 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
881 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
883 if write_back == WriteBack.COLOR_DIFF:
884 diff_contents = color_diff(diff_contents)
886 with lock or nullcontext():
887 f = io.TextIOWrapper(
893 f = wrap_stream_for_windows(f)
894 f.write(diff_contents)
900 def format_stdin_to_stdout(
903 content: Optional[str] = None,
904 write_back: WriteBack = WriteBack.NO,
907 """Format file on stdin. Return True if changed.
909 If content is None, it's read from sys.stdin.
911 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
912 write a diff to stdout. The `mode` argument is passed to
913 :func:`format_file_contents`.
915 then = datetime.utcnow()
918 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
920 src, encoding, newline = content, "utf-8", ""
924 dst = format_file_contents(src, fast=fast, mode=mode)
927 except NothingChanged:
931 f = io.TextIOWrapper(
932 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
934 if write_back == WriteBack.YES:
935 # Make sure there's a newline after the content
936 if dst and dst[-1] != "\n":
939 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
940 now = datetime.utcnow()
941 src_name = f"STDIN\t{then} +0000"
942 dst_name = f"STDOUT\t{now} +0000"
943 d = diff(src, dst, src_name, dst_name)
944 if write_back == WriteBack.COLOR_DIFF:
946 f = wrap_stream_for_windows(f)
951 def check_stability_and_equivalence(
952 src_contents: str, dst_contents: str, *, mode: Mode
954 """Perform stability and equivalence checks.
956 Raise AssertionError if source and destination contents are not
957 equivalent, or if a second pass of the formatter would format the
960 assert_equivalent(src_contents, dst_contents)
962 # Forced second pass to work around optional trailing commas (becoming
963 # forced trailing commas on pass 2) interacting differently with optional
964 # parentheses. Admittedly ugly.
965 dst_contents_pass2 = format_str(dst_contents, mode=mode)
966 if dst_contents != dst_contents_pass2:
967 dst_contents = dst_contents_pass2
968 assert_equivalent(src_contents, dst_contents, pass_num=2)
969 assert_stable(src_contents, dst_contents, mode=mode)
970 # Note: no need to explicitly call `assert_stable` if `dst_contents` was
971 # the same as `dst_contents_pass2`.
974 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
975 """Reformat contents of a file and return new contents.
977 If `fast` is False, additionally confirm that the reformatted code is
978 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
979 `mode` is passed to :func:`format_str`.
981 if not src_contents.strip():
985 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
987 dst_contents = format_str(src_contents, mode=mode)
988 if src_contents == dst_contents:
991 if not fast and not mode.is_ipynb:
992 # Jupyter notebooks will already have been checked above.
993 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
997 def validate_cell(src: str, mode: Mode) -> None:
998 """Check that cell does not already contain TransformerManager transformations,
999 or non-Python cell magics, which might cause tokenizer_rt to break because of
1002 If a cell contains ``!ls``, then it'll be transformed to
1003 ``get_ipython().system('ls')``. However, if the cell originally contained
1004 ``get_ipython().system('ls')``, then it would get transformed in the same way:
1006 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
1007 "get_ipython().system('ls')\n"
1008 >>> TransformerManager().transform_cell("!ls")
1009 "get_ipython().system('ls')\n"
1011 Due to the impossibility of safely roundtripping in such situations, cells
1012 containing transformed magics will be ignored.
1014 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
1015 raise NothingChanged
1018 and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
1020 raise NothingChanged
1023 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
1024 """Format code in given cell of Jupyter notebook.
1028 - if cell has trailing semicolon, remove it;
1029 - if cell has IPython magics, mask them;
1031 - reinstate IPython magics;
1032 - reinstate trailing semicolon (if originally present);
1033 - strip trailing newlines.
1035 Cells with syntax errors will not be processed, as they
1036 could potentially be automagics or multi-line magics, which
1037 are currently not supported.
1039 validate_cell(src, mode)
1040 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
1044 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
1046 raise NothingChanged from None
1047 masked_dst = format_str(masked_src, mode=mode)
1049 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
1050 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1051 dst = put_trailing_semicolon_back(
1052 dst_without_trailing_semicolon, has_trailing_semicolon
1054 dst = dst.rstrip("\n")
1056 raise NothingChanged from None
1060 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1061 """If notebook is marked as non-Python, don't format it.
1063 All notebook metadata fields are optional, see
1064 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1065 if a notebook has empty metadata, we will try to parse it anyway.
1067 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1068 if language is not None and language != "python":
1069 raise NothingChanged from None
1072 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1073 """Format Jupyter notebook.
1075 Operate cell-by-cell, only on code cells, only for Python notebooks.
1076 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1078 trailing_newline = src_contents[-1] == "\n"
1080 nb = json.loads(src_contents)
1081 validate_metadata(nb)
1082 for cell in nb["cells"]:
1083 if cell.get("cell_type", None) == "code":
1085 src = "".join(cell["source"])
1086 dst = format_cell(src, fast=fast, mode=mode)
1087 except NothingChanged:
1090 cell["source"] = dst.splitlines(keepends=True)
1093 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1094 if trailing_newline:
1095 dst_contents = dst_contents + "\n"
1098 raise NothingChanged
1101 def format_str(src_contents: str, *, mode: Mode) -> FileContent:
1102 """Reformat a string and return new contents.
1104 `mode` determines formatting options, such as how many characters per line are
1108 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1109 def f(arg: str = "") -> None:
1112 A more complex example:
1115 ... black.format_str(
1116 ... "def f(arg:str='')->None: hey",
1117 ... mode=black.Mode(
1118 ... target_versions={black.TargetVersion.PY36},
1120 ... string_normalization=False,
1131 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1133 future_imports = get_future_imports(src_node)
1134 if mode.target_versions:
1135 versions = mode.target_versions
1137 versions = detect_target_versions(src_node, future_imports=future_imports)
1139 normalize_fmt_off(src_node)
1140 lines = LineGenerator(mode=mode)
1141 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1142 empty_line = Line(mode=mode)
1144 split_line_features = {
1146 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1147 if supports_feature(versions, feature)
1149 for current_line in lines.visit(src_node):
1150 dst_contents.append(str(empty_line) * after)
1151 before, after = elt.maybe_empty_lines(current_line)
1152 dst_contents.append(str(empty_line) * before)
1153 for line in transform_line(
1154 current_line, mode=mode, features=split_line_features
1156 dst_contents.append(str(line))
1157 return "".join(dst_contents)
1160 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1161 """Return a tuple of (decoded_contents, encoding, newline).
1163 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1164 universal newlines (i.e. only contains LF).
1166 srcbuf = io.BytesIO(src)
1167 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1169 return "", encoding, "\n"
1171 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1173 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1174 return tiow.read(), encoding, newline
1177 def get_features_used( # noqa: C901
1178 node: Node, *, future_imports: Optional[Set[str]] = None
1180 """Return a set of (relatively) new Python features used in this file.
1182 Currently looking for:
1184 - underscores in numeric literals;
1185 - trailing commas after * or ** in function signatures and calls;
1186 - positional only arguments in function signatures and lambdas;
1187 - assignment expression;
1188 - relaxed decorator syntax;
1189 - usage of __future__ flags (annotations);
1190 - print / exec statements;
1192 features: Set[Feature] = set()
1195 FUTURE_FLAG_TO_FEATURE[future_import]
1196 for future_import in future_imports
1197 if future_import in FUTURE_FLAG_TO_FEATURE
1200 for n in node.pre_order():
1201 if is_string_token(n):
1202 value_head = n.value[:2]
1203 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1204 features.add(Feature.F_STRINGS)
1206 elif n.type == token.NUMBER:
1207 assert isinstance(n, Leaf)
1209 features.add(Feature.NUMERIC_UNDERSCORES)
1211 elif n.type == token.SLASH:
1212 if n.parent and n.parent.type in {
1217 features.add(Feature.POS_ONLY_ARGUMENTS)
1219 elif n.type == token.COLONEQUAL:
1220 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1222 elif n.type == syms.decorator:
1223 if len(n.children) > 1 and not is_simple_decorator_expression(
1226 features.add(Feature.RELAXED_DECORATORS)
1229 n.type in {syms.typedargslist, syms.arglist}
1231 and n.children[-1].type == token.COMMA
1233 if n.type == syms.typedargslist:
1234 feature = Feature.TRAILING_COMMA_IN_DEF
1236 feature = Feature.TRAILING_COMMA_IN_CALL
1238 for ch in n.children:
1239 if ch.type in STARS:
1240 features.add(feature)
1242 if ch.type == syms.argument:
1243 for argch in ch.children:
1244 if argch.type in STARS:
1245 features.add(feature)
1248 n.type in {syms.return_stmt, syms.yield_expr}
1249 and len(n.children) >= 2
1250 and n.children[1].type == syms.testlist_star_expr
1251 and any(child.type == syms.star_expr for child in n.children[1].children)
1253 features.add(Feature.UNPACKING_ON_FLOW)
1256 n.type == syms.annassign
1257 and len(n.children) >= 4
1258 and n.children[3].type == syms.testlist_star_expr
1260 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1265 def detect_target_versions(
1266 node: Node, *, future_imports: Optional[Set[str]] = None
1267 ) -> Set[TargetVersion]:
1268 """Detect the version to target based on the nodes used."""
1269 features = get_features_used(node, future_imports=future_imports)
1271 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1275 def get_future_imports(node: Node) -> Set[str]:
1276 """Return a set of __future__ imports in the file."""
1277 imports: Set[str] = set()
1279 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1280 for child in children:
1281 if isinstance(child, Leaf):
1282 if child.type == token.NAME:
1285 elif child.type == syms.import_as_name:
1286 orig_name = child.children[0]
1287 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1288 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1289 yield orig_name.value
1291 elif child.type == syms.import_as_names:
1292 yield from get_imports_from_children(child.children)
1295 raise AssertionError("Invalid syntax parsing imports")
1297 for child in node.children:
1298 if child.type != syms.simple_stmt:
1301 first_child = child.children[0]
1302 if isinstance(first_child, Leaf):
1303 # Continue looking if we see a docstring; otherwise stop.
1305 len(child.children) == 2
1306 and first_child.type == token.STRING
1307 and child.children[1].type == token.NEWLINE
1313 elif first_child.type == syms.import_from:
1314 module_name = first_child.children[1]
1315 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1318 imports |= set(get_imports_from_children(first_child.children[3:]))
1325 def assert_equivalent(src: str, dst: str, *, pass_num: int = 1) -> None:
1326 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1328 src_ast = parse_ast(src)
1329 except Exception as exc:
1330 raise AssertionError(
1331 f"cannot use --safe with this file; failed to parse source file AST: "
1333 f"This could be caused by running Black with an older Python version "
1334 f"that does not support new syntax used in your source file."
1338 dst_ast = parse_ast(dst)
1339 except Exception as exc:
1340 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1341 raise AssertionError(
1342 f"INTERNAL ERROR: Black produced invalid code on pass {pass_num}: {exc}. "
1343 "Please report a bug on https://github.com/psf/black/issues. "
1344 f"This invalid output might be helpful: {log}"
1347 src_ast_str = "\n".join(stringify_ast(src_ast))
1348 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1349 if src_ast_str != dst_ast_str:
1350 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1351 raise AssertionError(
1352 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1353 f" source on pass {pass_num}. Please report a bug on "
1354 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1358 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1359 """Raise AssertionError if `dst` reformats differently the second time."""
1360 newdst = format_str(dst, mode=mode)
1364 diff(src, dst, "source", "first pass"),
1365 diff(dst, newdst, "first pass", "second pass"),
1367 raise AssertionError(
1368 "INTERNAL ERROR: Black produced different code on the second pass of the"
1369 " formatter. Please report a bug on https://github.com/psf/black/issues."
1370 f" This diff might be helpful: {log}"
1375 def nullcontext() -> Iterator[None]:
1376 """Return an empty context manager.
1378 To be used like `nullcontext` in Python 3.7.
1383 def patch_click() -> None:
1384 """Make Click not crash on Python 3.6 with LANG=C.
1386 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1387 default which restricts paths that it can access during the lifetime of the
1388 application. Click refuses to work in this scenario by raising a RuntimeError.
1390 In case of Black the likelihood that non-ASCII characters are going to be used in
1391 file paths is minimal since it's Python source code. Moreover, this crash was
1392 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1395 from click import core
1396 from click import _unicodefun
1397 except ModuleNotFoundError:
1400 for module in (core, _unicodefun):
1401 if hasattr(module, "_verify_python3_env"):
1402 module._verify_python3_env = lambda: None # type: ignore
1403 if hasattr(module, "_verify_python_env"):
1404 module._verify_python_env = lambda: None # type: ignore
1407 def patched_main() -> None:
1408 maybe_install_uvloop()
1414 if __name__ == "__main__":