All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
2 from json.decoder import JSONDecodeError
4 from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
5 from contextlib import contextmanager
6 from datetime import datetime
9 from multiprocessing import Manager, freeze_support
11 from pathlib import Path
12 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
34 from dataclasses import replace
35 from mypy_extensions import mypyc_attr
37 from black.const import DEFAULT_LINE_LENGTH, DEFAULT_INCLUDES, DEFAULT_EXCLUDES
38 from black.const import STDIN_PLACEHOLDER
39 from black.nodes import STARS, syms, is_simple_decorator_expression
40 from black.lines import Line, EmptyLineTracker
41 from black.linegen import transform_line, LineGenerator, LN
42 from black.comments import normalize_fmt_off
43 from black.mode import FUTURE_FLAG_TO_FEATURE, Mode, TargetVersion
44 from black.mode import Feature, supports_feature, VERSION_TO_FEATURES
45 from black.cache import read_cache, write_cache, get_cache_info, filter_cached, Cache
46 from black.concurrency import cancel, shutdown, maybe_install_uvloop
47 from black.output import dump_to_file, ipynb_diff, diff, color_diff, out, err
48 from black.report import Report, Changed, NothingChanged
49 from black.files import find_project_root, find_pyproject_toml, parse_pyproject_toml
50 from black.files import gen_python_files, get_gitignore, normalize_path_maybe_ignore
51 from black.files import wrap_stream_for_windows
52 from black.parsing import InvalidInput # noqa F401
53 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
54 from black.handle_ipynb_magics import (
57 remove_trailing_semicolon,
58 put_trailing_semicolon_back,
61 jupyter_dependencies_are_installed,
66 from blib2to3.pytree import Node, Leaf
67 from blib2to3.pgen2 import token
69 from _black_version import version as __version__
71 COMPILED = Path(__file__).suffix in (".pyd", ".so")
79 class WriteBack(Enum):
87 def from_configuration(
88 cls, *, check: bool, diff: bool, color: bool = False
90 if check and not diff:
96 return cls.DIFF if diff else cls.YES
99 # Legacy name, left for integrations.
102 DEFAULT_WORKERS = os.cpu_count()
105 def read_pyproject_toml(
106 ctx: click.Context, param: click.Parameter, value: Optional[str]
108 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
110 Returns the path to a successfully found and read configuration file, None
114 value = find_pyproject_toml(ctx.params.get("src", ()))
119 config = parse_pyproject_toml(value)
120 except (OSError, ValueError) as e:
121 raise click.FileError(
122 filename=value, hint=f"Error reading configuration file: {e}"
128 # Sanitize the values to be Click friendly. For more information please see:
129 # https://github.com/psf/black/issues/1458
130 # https://github.com/pallets/click/issues/1567
132 k: str(v) if not isinstance(v, (list, dict)) else v
133 for k, v in config.items()
136 target_version = config.get("target_version")
137 if target_version is not None and not isinstance(target_version, list):
138 raise click.BadOptionUsage(
139 "target-version", "Config key target-version must be a list"
142 default_map: Dict[str, Any] = {}
144 default_map.update(ctx.default_map)
145 default_map.update(config)
147 ctx.default_map = default_map
151 def target_version_option_callback(
152 c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
153 ) -> List[TargetVersion]:
154 """Compute the target versions from a --target-version flag.
156 This is its own function because mypy couldn't infer the type correctly
157 when it was a lambda, causing mypyc trouble.
159 return [TargetVersion[val.upper()] for val in v]
162 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
163 """Compile a regular expression string in `regex`.
165 If it contains newlines, use verbose mode.
168 regex = "(?x)" + regex
169 compiled: Pattern[str] = re.compile(regex)
175 param: click.Parameter,
176 value: Optional[str],
177 ) -> Optional[Pattern[str]]:
179 return re_compile_maybe_verbose(value) if value is not None else None
180 except re.error as e:
181 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
185 context_settings={"help_option_names": ["-h", "--help"]},
186 # While Click does set this field automatically using the docstring, mypyc
187 # (annoyingly) strips 'em so we need to set it here too.
188 help="The uncompromising code formatter.",
190 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
195 default=DEFAULT_LINE_LENGTH,
196 help="How many characters per line to allow.",
202 type=click.Choice([v.name.lower() for v in TargetVersion]),
203 callback=target_version_option_callback,
206 "Python versions that should be supported by Black's output. [default: per-file"
214 "Format all input files like typing stubs regardless of file extension (useful"
215 " when piping source on standard input)."
222 "Format all input files like Jupyter Notebooks regardless of file extension "
223 "(useful when piping source on standard input)."
228 "--skip-string-normalization",
230 help="Don't normalize string quotes or prefixes.",
234 "--skip-magic-trailing-comma",
236 help="Don't use trailing commas as a reason to split lines.",
239 "--experimental-string-processing",
243 "Experimental option that performs more normalization on string literals."
244 " Currently disabled because it leads to some crashes."
251 "Don't write the files back, just return the status. Return code 0 means"
252 " nothing would change. Return code 1 means some files would be reformatted."
253 " Return code 123 means there was an internal error."
259 help="Don't write the files back, just output a diff for each file on stdout.",
262 "--color/--no-color",
264 help="Show colored diff. Only applies when `--diff` is given.",
269 help="If --fast given, skip temporary sanity checks. [default: --safe]",
272 "--required-version",
275 "Require a specific version of Black to be running (useful for unifying results"
276 " across many environments e.g. with a pyproject.toml file)."
282 default=DEFAULT_INCLUDES,
283 callback=validate_regex,
285 "A regular expression that matches files and directories that should be"
286 " included on recursive searches. An empty value means all files are included"
287 " regardless of the name. Use forward slashes for directories on all platforms"
288 " (Windows, too). Exclusions are calculated first, inclusions later."
295 callback=validate_regex,
297 "A regular expression that matches files and directories that should be"
298 " excluded on recursive searches. An empty value means no paths are excluded."
299 " Use forward slashes for directories on all platforms (Windows, too)."
300 " Exclusions are calculated first, inclusions later. [default:"
301 f" {DEFAULT_EXCLUDES}]"
308 callback=validate_regex,
310 "Like --exclude, but adds additional files and directories on top of the"
311 " excluded ones. (Useful if you simply want to add to the default)"
317 callback=validate_regex,
319 "Like --exclude, but files and directories matching this regex will be "
320 "excluded even when they are passed explicitly as arguments."
327 "The name of the file when passing it through stdin. Useful to make "
328 "sure Black will respect --force-exclude option on some "
329 "editors that rely on using stdin."
335 type=click.IntRange(min=1),
336 default=DEFAULT_WORKERS,
338 help="Number of parallel workers",
345 "Don't emit non-error messages to stderr. Errors are still emitted; silence"
346 " those with 2>/dev/null."
354 "Also emit messages to stderr about files that were not changed or were ignored"
355 " due to exclusion patterns."
358 @click.version_option(
360 message=f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})",
366 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
382 callback=read_pyproject_toml,
383 help="Read configuration from FILE path.",
390 target_version: List[TargetVersion],
397 skip_string_normalization: bool,
398 skip_magic_trailing_comma: bool,
399 experimental_string_processing: bool,
402 required_version: Optional[str],
403 include: Pattern[str],
404 exclude: Optional[Pattern[str]],
405 extend_exclude: Optional[Pattern[str]],
406 force_exclude: Optional[Pattern[str]],
407 stdin_filename: Optional[str],
409 src: Tuple[str, ...],
410 config: Optional[str],
412 """The uncompromising code formatter."""
413 if config and verbose:
414 out(f"Using configuration from {config}.", bold=False, fg="blue")
416 error_msg = "Oh no! 💥 💔 💥"
417 if required_version and required_version != __version__:
419 f"{error_msg} The required version `{required_version}` does not match"
420 f" the running version `{__version__}`!"
424 err("Cannot pass both `pyi` and `ipynb` flags!")
427 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
429 versions = set(target_version)
431 # We'll autodetect later.
434 target_versions=versions,
435 line_length=line_length,
438 string_normalization=not skip_string_normalization,
439 magic_trailing_comma=not skip_magic_trailing_comma,
440 experimental_string_processing=experimental_string_processing,
444 # Run in quiet mode by default with -c; the extra output isn't useful.
445 # You can still pass -v to get verbose output.
448 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
452 content=code, fast=fast, write_back=write_back, mode=mode, report=report
456 sources = get_sources(
463 extend_exclude=extend_exclude,
464 force_exclude=force_exclude,
466 stdin_filename=stdin_filename,
468 except GitWildMatchPatternError:
473 "No Python files are present to be formatted. Nothing to do 😴",
479 if len(sources) == 1:
483 write_back=write_back,
491 write_back=write_back,
497 if verbose or not quiet:
498 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
500 click.echo(str(report), err=True)
501 ctx.exit(report.return_code)
507 src: Tuple[str, ...],
510 include: Pattern[str],
511 exclude: Optional[Pattern[str]],
512 extend_exclude: Optional[Pattern[str]],
513 force_exclude: Optional[Pattern[str]],
515 stdin_filename: Optional[str],
517 """Compute the set of files to be formatted."""
519 root = find_project_root(src)
520 sources: Set[Path] = set()
521 path_empty(src, "No Path provided. Nothing to do 😴", quiet, verbose, ctx)
524 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
525 gitignore = get_gitignore(root)
530 if s == "-" and stdin_filename:
531 p = Path(stdin_filename)
537 if is_stdin or p.is_file():
538 normalized_path = normalize_path_maybe_ignore(p, root, report)
539 if normalized_path is None:
542 normalized_path = "/" + normalized_path
543 # Hard-exclude any files that matches the `--force-exclude` regex.
545 force_exclude_match = force_exclude.search(normalized_path)
547 force_exclude_match = None
548 if force_exclude_match and force_exclude_match.group(0):
549 report.path_ignored(p, "matches the --force-exclude regular expression")
553 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
555 if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
556 verbose=verbose, quiet=quiet
579 err(f"invalid path: {s}")
584 src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
587 Exit if there is no `src` provided for formatting
590 if verbose or not quiet:
596 content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
599 Reformat and print out `content` without spawning child processes.
600 Similar to `reformat_one`, but for string content.
602 `fast`, `write_back`, and `mode` options are passed to
603 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
605 path = Path("<string>")
608 if format_stdin_to_stdout(
609 content=content, fast=fast, write_back=write_back, mode=mode
611 changed = Changed.YES
612 report.done(path, changed)
613 except Exception as exc:
615 traceback.print_exc()
616 report.failed(path, str(exc))
620 src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
622 """Reformat a single file under `src` without spawning child processes.
624 `fast`, `write_back`, and `mode` options are passed to
625 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
632 elif str(src).startswith(STDIN_PLACEHOLDER):
634 # Use the original name again in case we want to print something
636 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
641 if src.suffix == ".pyi":
642 mode = replace(mode, is_pyi=True)
643 elif src.suffix == ".ipynb":
644 mode = replace(mode, is_ipynb=True)
645 if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
646 changed = Changed.YES
649 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
650 cache = read_cache(mode)
651 res_src = src.resolve()
652 res_src_s = str(res_src)
653 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
654 changed = Changed.CACHED
655 if changed is not Changed.CACHED and format_file_in_place(
656 src, fast=fast, write_back=write_back, mode=mode
658 changed = Changed.YES
659 if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
660 write_back is WriteBack.CHECK and changed is Changed.NO
662 write_cache(cache, [src], mode)
663 report.done(src, changed)
664 except Exception as exc:
666 traceback.print_exc()
667 report.failed(src, str(exc))
670 # diff-shades depends on being to monkeypatch this function to operate. I know it's
671 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
672 @mypyc_attr(patchable=True)
676 write_back: WriteBack,
679 workers: Optional[int],
681 """Reformat multiple files using a ProcessPoolExecutor."""
683 loop = asyncio.get_event_loop()
684 worker_count = workers if workers is not None else DEFAULT_WORKERS
685 if sys.platform == "win32":
686 # Work around https://bugs.python.org/issue26903
687 assert worker_count is not None
688 worker_count = min(worker_count, 60)
690 executor = ProcessPoolExecutor(max_workers=worker_count)
691 except (ImportError, NotImplementedError, OSError):
692 # we arrive here if the underlying system does not support multi-processing
693 # like in AWS Lambda or Termux, in which case we gracefully fallback to
694 # a ThreadPoolExecutor with just a single worker (more workers would not do us
695 # any good due to the Global Interpreter Lock)
696 executor = ThreadPoolExecutor(max_workers=1)
699 loop.run_until_complete(
703 write_back=write_back,
712 if executor is not None:
716 async def schedule_formatting(
719 write_back: WriteBack,
722 loop: asyncio.AbstractEventLoop,
725 """Run formatting of `sources` in parallel using the provided `executor`.
727 (Use ProcessPoolExecutors for actual parallelism.)
729 `write_back`, `fast`, and `mode` options are passed to
730 :func:`format_file_in_place`.
733 if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
734 cache = read_cache(mode)
735 sources, cached = filter_cached(cache, sources)
736 for src in sorted(cached):
737 report.done(src, Changed.CACHED)
742 sources_to_cache = []
744 if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
745 # For diff output, we need locks to ensure we don't interleave output
746 # from different processes.
748 lock = manager.Lock()
750 asyncio.ensure_future(
751 loop.run_in_executor(
752 executor, format_file_in_place, src, fast, mode, write_back, lock
755 for src in sorted(sources)
757 pending = tasks.keys()
759 loop.add_signal_handler(signal.SIGINT, cancel, pending)
760 loop.add_signal_handler(signal.SIGTERM, cancel, pending)
761 except NotImplementedError:
762 # There are no good alternatives for these on Windows.
765 done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
767 src = tasks.pop(task)
769 cancelled.append(task)
770 elif task.exception():
771 report.failed(src, str(task.exception()))
773 changed = Changed.YES if task.result() else Changed.NO
774 # If the file was written back or was successfully checked as
775 # well-formatted, store this information in the cache.
776 if write_back is WriteBack.YES or (
777 write_back is WriteBack.CHECK and changed is Changed.NO
779 sources_to_cache.append(src)
780 report.done(src, changed)
782 if sys.version_info >= (3, 7):
783 await asyncio.gather(*cancelled, return_exceptions=True)
785 await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
787 write_cache(cache, sources_to_cache, mode)
790 def format_file_in_place(
794 write_back: WriteBack = WriteBack.NO,
795 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
797 """Format file under `src` path. Return True if changed.
799 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
801 `mode` and `fast` options are passed to :func:`format_file_contents`.
803 if src.suffix == ".pyi":
804 mode = replace(mode, is_pyi=True)
805 elif src.suffix == ".ipynb":
806 mode = replace(mode, is_ipynb=True)
808 then = datetime.utcfromtimestamp(src.stat().st_mtime)
809 with open(src, "rb") as buf:
810 src_contents, encoding, newline = decode_bytes(buf.read())
812 dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
813 except NothingChanged:
815 except JSONDecodeError:
817 f"File '{src}' cannot be parsed as valid Jupyter notebook."
820 if write_back == WriteBack.YES:
821 with open(src, "w", encoding=encoding, newline=newline) as f:
822 f.write(dst_contents)
823 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
824 now = datetime.utcnow()
825 src_name = f"{src}\t{then} +0000"
826 dst_name = f"{src}\t{now} +0000"
828 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
830 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
832 if write_back == WriteBack.COLOR_DIFF:
833 diff_contents = color_diff(diff_contents)
835 with lock or nullcontext():
836 f = io.TextIOWrapper(
842 f = wrap_stream_for_windows(f)
843 f.write(diff_contents)
849 def format_stdin_to_stdout(
852 content: Optional[str] = None,
853 write_back: WriteBack = WriteBack.NO,
856 """Format file on stdin. Return True if changed.
858 If content is None, it's read from sys.stdin.
860 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
861 write a diff to stdout. The `mode` argument is passed to
862 :func:`format_file_contents`.
864 then = datetime.utcnow()
867 src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
869 src, encoding, newline = content, "utf-8", ""
873 dst = format_file_contents(src, fast=fast, mode=mode)
876 except NothingChanged:
880 f = io.TextIOWrapper(
881 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
883 if write_back == WriteBack.YES:
884 # Make sure there's a newline after the content
885 if dst and dst[-1] != "\n":
888 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
889 now = datetime.utcnow()
890 src_name = f"STDIN\t{then} +0000"
891 dst_name = f"STDOUT\t{now} +0000"
892 d = diff(src, dst, src_name, dst_name)
893 if write_back == WriteBack.COLOR_DIFF:
895 f = wrap_stream_for_windows(f)
900 def check_stability_and_equivalence(
901 src_contents: str, dst_contents: str, *, mode: Mode
903 """Perform stability and equivalence checks.
905 Raise AssertionError if source and destination contents are not
906 equivalent, or if a second pass of the formatter would format the
909 assert_equivalent(src_contents, dst_contents)
911 # Forced second pass to work around optional trailing commas (becoming
912 # forced trailing commas on pass 2) interacting differently with optional
913 # parentheses. Admittedly ugly.
914 dst_contents_pass2 = format_str(dst_contents, mode=mode)
915 if dst_contents != dst_contents_pass2:
916 dst_contents = dst_contents_pass2
917 assert_equivalent(src_contents, dst_contents, pass_num=2)
918 assert_stable(src_contents, dst_contents, mode=mode)
919 # Note: no need to explicitly call `assert_stable` if `dst_contents` was
920 # the same as `dst_contents_pass2`.
923 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
924 """Reformat contents of a file and return new contents.
926 If `fast` is False, additionally confirm that the reformatted code is
927 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
928 `mode` is passed to :func:`format_str`.
930 if not src_contents.strip():
934 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
936 dst_contents = format_str(src_contents, mode=mode)
937 if src_contents == dst_contents:
940 if not fast and not mode.is_ipynb:
941 # Jupyter notebooks will already have been checked above.
942 check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
946 def validate_cell(src: str) -> None:
947 """Check that cell does not already contain TransformerManager transformations,
948 or non-Python cell magics, which might cause tokenizer_rt to break because of
951 If a cell contains ``!ls``, then it'll be transformed to
952 ``get_ipython().system('ls')``. However, if the cell originally contained
953 ``get_ipython().system('ls')``, then it would get transformed in the same way:
955 >>> TransformerManager().transform_cell("get_ipython().system('ls')")
956 "get_ipython().system('ls')\n"
957 >>> TransformerManager().transform_cell("!ls")
958 "get_ipython().system('ls')\n"
960 Due to the impossibility of safely roundtripping in such situations, cells
961 containing transformed magics will be ignored.
963 if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
965 if src[:2] == "%%" and src.split()[0][2:] not in PYTHON_CELL_MAGICS:
969 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
970 """Format code in given cell of Jupyter notebook.
974 - if cell has trailing semicolon, remove it;
975 - if cell has IPython magics, mask them;
977 - reinstate IPython magics;
978 - reinstate trailing semicolon (if originally present);
979 - strip trailing newlines.
981 Cells with syntax errors will not be processed, as they
982 could potentially be automagics or multi-line magics, which
983 are currently not supported.
986 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
990 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
992 raise NothingChanged from None
993 masked_dst = format_str(masked_src, mode=mode)
995 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
996 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
997 dst = put_trailing_semicolon_back(
998 dst_without_trailing_semicolon, has_trailing_semicolon
1000 dst = dst.rstrip("\n")
1002 raise NothingChanged from None
1006 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1007 """If notebook is marked as non-Python, don't format it.
1009 All notebook metadata fields are optional, see
1010 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1011 if a notebook has empty metadata, we will try to parse it anyway.
1013 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1014 if language is not None and language != "python":
1015 raise NothingChanged from None
1018 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1019 """Format Jupyter notebook.
1021 Operate cell-by-cell, only on code cells, only for Python notebooks.
1022 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1024 trailing_newline = src_contents[-1] == "\n"
1026 nb = json.loads(src_contents)
1027 validate_metadata(nb)
1028 for cell in nb["cells"]:
1029 if cell.get("cell_type", None) == "code":
1031 src = "".join(cell["source"])
1032 dst = format_cell(src, fast=fast, mode=mode)
1033 except NothingChanged:
1036 cell["source"] = dst.splitlines(keepends=True)
1039 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1040 if trailing_newline:
1041 dst_contents = dst_contents + "\n"
1044 raise NothingChanged
1047 def format_str(src_contents: str, *, mode: Mode) -> FileContent:
1048 """Reformat a string and return new contents.
1050 `mode` determines formatting options, such as how many characters per line are
1054 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1055 def f(arg: str = "") -> None:
1058 A more complex example:
1061 ... black.format_str(
1062 ... "def f(arg:str='')->None: hey",
1063 ... mode=black.Mode(
1064 ... target_versions={black.TargetVersion.PY36},
1066 ... string_normalization=False,
1077 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1079 future_imports = get_future_imports(src_node)
1080 if mode.target_versions:
1081 versions = mode.target_versions
1083 versions = detect_target_versions(src_node, future_imports=future_imports)
1085 # TODO: fully drop support and this code hopefully in January 2022 :D
1086 if TargetVersion.PY27 in mode.target_versions or versions == {TargetVersion.PY27}:
1088 "DEPRECATION: Python 2 support will be removed in the first stable release "
1089 "expected in January 2022."
1091 err(msg, fg="yellow", bold=True)
1093 normalize_fmt_off(src_node)
1094 lines = LineGenerator(
1096 remove_u_prefix="unicode_literals" in future_imports
1097 or supports_feature(versions, Feature.UNICODE_LITERALS),
1099 elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1100 empty_line = Line(mode=mode)
1102 split_line_features = {
1104 for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1105 if supports_feature(versions, feature)
1107 for current_line in lines.visit(src_node):
1108 dst_contents.append(str(empty_line) * after)
1109 before, after = elt.maybe_empty_lines(current_line)
1110 dst_contents.append(str(empty_line) * before)
1111 for line in transform_line(
1112 current_line, mode=mode, features=split_line_features
1114 dst_contents.append(str(line))
1115 return "".join(dst_contents)
1118 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1119 """Return a tuple of (decoded_contents, encoding, newline).
1121 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1122 universal newlines (i.e. only contains LF).
1124 srcbuf = io.BytesIO(src)
1125 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1127 return "", encoding, "\n"
1129 newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1131 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1132 return tiow.read(), encoding, newline
1135 def get_features_used( # noqa: C901
1136 node: Node, *, future_imports: Optional[Set[str]] = None
1138 """Return a set of (relatively) new Python features used in this file.
1140 Currently looking for:
1142 - underscores in numeric literals;
1143 - trailing commas after * or ** in function signatures and calls;
1144 - positional only arguments in function signatures and lambdas;
1145 - assignment expression;
1146 - relaxed decorator syntax;
1147 - usage of __future__ flags (annotations);
1148 - print / exec statements;
1150 features: Set[Feature] = set()
1153 FUTURE_FLAG_TO_FEATURE[future_import]
1154 for future_import in future_imports
1155 if future_import in FUTURE_FLAG_TO_FEATURE
1158 for n in node.pre_order():
1159 if n.type == token.STRING:
1160 value_head = n.value[:2] # type: ignore
1161 if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1162 features.add(Feature.F_STRINGS)
1164 elif n.type == token.NUMBER:
1165 assert isinstance(n, Leaf)
1167 features.add(Feature.NUMERIC_UNDERSCORES)
1168 elif n.value.endswith(("L", "l")):
1170 features.add(Feature.LONG_INT_LITERAL)
1171 elif len(n.value) >= 2 and n.value[0] == "0" and n.value[1].isdigit():
1172 # Python 2: 0123; 00123; ...
1173 if not all(char == "0" for char in n.value):
1174 # although we don't want to match 0000 or similar
1175 features.add(Feature.OCTAL_INT_LITERAL)
1177 elif n.type == token.SLASH:
1178 if n.parent and n.parent.type in {
1183 features.add(Feature.POS_ONLY_ARGUMENTS)
1185 elif n.type == token.COLONEQUAL:
1186 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1188 elif n.type == syms.decorator:
1189 if len(n.children) > 1 and not is_simple_decorator_expression(
1192 features.add(Feature.RELAXED_DECORATORS)
1195 n.type in {syms.typedargslist, syms.arglist}
1197 and n.children[-1].type == token.COMMA
1199 if n.type == syms.typedargslist:
1200 feature = Feature.TRAILING_COMMA_IN_DEF
1202 feature = Feature.TRAILING_COMMA_IN_CALL
1204 for ch in n.children:
1205 if ch.type in STARS:
1206 features.add(feature)
1208 if ch.type == syms.argument:
1209 for argch in ch.children:
1210 if argch.type in STARS:
1211 features.add(feature)
1214 n.type in {syms.return_stmt, syms.yield_expr}
1215 and len(n.children) >= 2
1216 and n.children[1].type == syms.testlist_star_expr
1217 and any(child.type == syms.star_expr for child in n.children[1].children)
1219 features.add(Feature.UNPACKING_ON_FLOW)
1222 n.type == syms.annassign
1223 and len(n.children) >= 4
1224 and n.children[3].type == syms.testlist_star_expr
1226 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1228 # Python 2 only features (for its deprecation) except for integers, see above
1229 elif n.type == syms.print_stmt:
1230 features.add(Feature.PRINT_STMT)
1231 elif n.type == syms.exec_stmt:
1232 features.add(Feature.EXEC_STMT)
1233 elif n.type == syms.tfpdef:
1234 # def set_position((x, y), value):
1236 features.add(Feature.AUTOMATIC_PARAMETER_UNPACKING)
1237 elif n.type == syms.except_clause:
1240 # except Exception, err:
1242 if len(n.children) >= 4:
1243 if n.children[-2].type == token.COMMA:
1244 features.add(Feature.COMMA_STYLE_EXCEPT)
1245 elif n.type == syms.raise_stmt:
1246 # raise Exception, "msg"
1247 if len(n.children) >= 4:
1248 if n.children[-2].type == token.COMMA:
1249 features.add(Feature.COMMA_STYLE_RAISE)
1250 elif n.type == token.BACKQUOTE:
1251 # `i'm surprised this ever existed`
1252 features.add(Feature.BACKQUOTE_REPR)
1257 def detect_target_versions(
1258 node: Node, *, future_imports: Optional[Set[str]] = None
1259 ) -> Set[TargetVersion]:
1260 """Detect the version to target based on the nodes used."""
1261 features = get_features_used(node, future_imports=future_imports)
1263 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1267 def get_future_imports(node: Node) -> Set[str]:
1268 """Return a set of __future__ imports in the file."""
1269 imports: Set[str] = set()
1271 def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1272 for child in children:
1273 if isinstance(child, Leaf):
1274 if child.type == token.NAME:
1277 elif child.type == syms.import_as_name:
1278 orig_name = child.children[0]
1279 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1280 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1281 yield orig_name.value
1283 elif child.type == syms.import_as_names:
1284 yield from get_imports_from_children(child.children)
1287 raise AssertionError("Invalid syntax parsing imports")
1289 for child in node.children:
1290 if child.type != syms.simple_stmt:
1293 first_child = child.children[0]
1294 if isinstance(first_child, Leaf):
1295 # Continue looking if we see a docstring; otherwise stop.
1297 len(child.children) == 2
1298 and first_child.type == token.STRING
1299 and child.children[1].type == token.NEWLINE
1305 elif first_child.type == syms.import_from:
1306 module_name = first_child.children[1]
1307 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1310 imports |= set(get_imports_from_children(first_child.children[3:]))
1317 def assert_equivalent(src: str, dst: str, *, pass_num: int = 1) -> None:
1318 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1320 src_ast = parse_ast(src)
1321 except Exception as exc:
1322 raise AssertionError(
1323 f"cannot use --safe with this file; failed to parse source file: {exc}"
1327 dst_ast = parse_ast(dst)
1328 except Exception as exc:
1329 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1330 raise AssertionError(
1331 f"INTERNAL ERROR: Black produced invalid code on pass {pass_num}: {exc}. "
1332 "Please report a bug on https://github.com/psf/black/issues. "
1333 f"This invalid output might be helpful: {log}"
1336 src_ast_str = "\n".join(stringify_ast(src_ast))
1337 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1338 if src_ast_str != dst_ast_str:
1339 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1340 raise AssertionError(
1341 "INTERNAL ERROR: Black produced code that is not equivalent to the"
1342 f" source on pass {pass_num}. Please report a bug on "
1343 f"https://github.com/psf/black/issues. This diff might be helpful: {log}"
1347 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1348 """Raise AssertionError if `dst` reformats differently the second time."""
1349 newdst = format_str(dst, mode=mode)
1353 diff(src, dst, "source", "first pass"),
1354 diff(dst, newdst, "first pass", "second pass"),
1356 raise AssertionError(
1357 "INTERNAL ERROR: Black produced different code on the second pass of the"
1358 " formatter. Please report a bug on https://github.com/psf/black/issues."
1359 f" This diff might be helpful: {log}"
1364 def nullcontext() -> Iterator[None]:
1365 """Return an empty context manager.
1367 To be used like `nullcontext` in Python 3.7.
1372 def patch_click() -> None:
1373 """Make Click not crash on Python 3.6 with LANG=C.
1375 On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1376 default which restricts paths that it can access during the lifetime of the
1377 application. Click refuses to work in this scenario by raising a RuntimeError.
1379 In case of Black the likelihood that non-ASCII characters are going to be used in
1380 file paths is minimal since it's Python source code. Moreover, this crash was
1381 spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1384 from click import core
1385 from click import _unicodefun
1386 except ModuleNotFoundError:
1389 for module in (core, _unicodefun):
1390 if hasattr(module, "_verify_python3_env"):
1391 module._verify_python3_env = lambda: None # type: ignore
1392 if hasattr(module, "_verify_python_env"):
1393 module._verify_python_env = lambda: None # type: ignore
1396 def patched_main() -> None:
1397 maybe_install_uvloop()
1403 if __name__ == "__main__":