]> git.madduck.net Git - etc/vim.git/blob - src/black/__init__.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Fix two docstring crashes (#3451)
[etc/vim.git] / src / black / __init__.py
1 import io
2 import json
3 import platform
4 import re
5 import sys
6 import tokenize
7 import traceback
8 from contextlib import contextmanager
9 from dataclasses import replace
10 from datetime import datetime
11 from enum import Enum
12 from json.decoder import JSONDecodeError
13 from pathlib import Path
14 from typing import (
15     Any,
16     Dict,
17     Generator,
18     Iterator,
19     List,
20     MutableMapping,
21     Optional,
22     Pattern,
23     Sequence,
24     Set,
25     Sized,
26     Tuple,
27     Union,
28 )
29
30 import click
31 from click.core import ParameterSource
32 from mypy_extensions import mypyc_attr
33 from pathspec import PathSpec
34 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
35
36 from _black_version import version as __version__
37 from black.cache import Cache, get_cache_info, read_cache, write_cache
38 from black.comments import normalize_fmt_off
39 from black.const import (
40     DEFAULT_EXCLUDES,
41     DEFAULT_INCLUDES,
42     DEFAULT_LINE_LENGTH,
43     STDIN_PLACEHOLDER,
44 )
45 from black.files import (
46     find_project_root,
47     find_pyproject_toml,
48     find_user_pyproject_toml,
49     gen_python_files,
50     get_gitignore,
51     normalize_path_maybe_ignore,
52     parse_pyproject_toml,
53     wrap_stream_for_windows,
54 )
55 from black.handle_ipynb_magics import (
56     PYTHON_CELL_MAGICS,
57     TRANSFORMED_MAGICS,
58     jupyter_dependencies_are_installed,
59     mask_cell,
60     put_trailing_semicolon_back,
61     remove_trailing_semicolon,
62     unmask_cell,
63 )
64 from black.linegen import LN, LineGenerator, transform_line
65 from black.lines import EmptyLineTracker, LinesBlock
66 from black.mode import (
67     FUTURE_FLAG_TO_FEATURE,
68     VERSION_TO_FEATURES,
69     Feature,
70     Mode,
71     TargetVersion,
72     supports_feature,
73 )
74 from black.nodes import (
75     STARS,
76     is_number_token,
77     is_simple_decorator_expression,
78     is_string_token,
79     syms,
80 )
81 from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
82 from black.parsing import InvalidInput  # noqa F401
83 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
84 from black.report import Changed, NothingChanged, Report
85 from black.trans import iter_fexpr_spans
86 from blib2to3.pgen2 import token
87 from blib2to3.pytree import Leaf, Node
88
89 COMPILED = Path(__file__).suffix in (".pyd", ".so")
90
91 # types
92 FileContent = str
93 Encoding = str
94 NewLine = str
95
96
97 class WriteBack(Enum):
98     NO = 0
99     YES = 1
100     DIFF = 2
101     CHECK = 3
102     COLOR_DIFF = 4
103
104     @classmethod
105     def from_configuration(
106         cls, *, check: bool, diff: bool, color: bool = False
107     ) -> "WriteBack":
108         if check and not diff:
109             return cls.CHECK
110
111         if diff and color:
112             return cls.COLOR_DIFF
113
114         return cls.DIFF if diff else cls.YES
115
116
117 # Legacy name, left for integrations.
118 FileMode = Mode
119
120
121 def read_pyproject_toml(
122     ctx: click.Context, param: click.Parameter, value: Optional[str]
123 ) -> Optional[str]:
124     """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
125
126     Returns the path to a successfully found and read configuration file, None
127     otherwise.
128     """
129     if not value:
130         value = find_pyproject_toml(ctx.params.get("src", ()))
131         if value is None:
132             return None
133
134     try:
135         config = parse_pyproject_toml(value)
136     except (OSError, ValueError) as e:
137         raise click.FileError(
138             filename=value, hint=f"Error reading configuration file: {e}"
139         ) from None
140
141     if not config:
142         return None
143     else:
144         # Sanitize the values to be Click friendly. For more information please see:
145         # https://github.com/psf/black/issues/1458
146         # https://github.com/pallets/click/issues/1567
147         config = {
148             k: str(v) if not isinstance(v, (list, dict)) else v
149             for k, v in config.items()
150         }
151
152     target_version = config.get("target_version")
153     if target_version is not None and not isinstance(target_version, list):
154         raise click.BadOptionUsage(
155             "target-version", "Config key target-version must be a list"
156         )
157
158     default_map: Dict[str, Any] = {}
159     if ctx.default_map:
160         default_map.update(ctx.default_map)
161     default_map.update(config)
162
163     ctx.default_map = default_map
164     return value
165
166
167 def target_version_option_callback(
168     c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
169 ) -> List[TargetVersion]:
170     """Compute the target versions from a --target-version flag.
171
172     This is its own function because mypy couldn't infer the type correctly
173     when it was a lambda, causing mypyc trouble.
174     """
175     return [TargetVersion[val.upper()] for val in v]
176
177
178 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
179     """Compile a regular expression string in `regex`.
180
181     If it contains newlines, use verbose mode.
182     """
183     if "\n" in regex:
184         regex = "(?x)" + regex
185     compiled: Pattern[str] = re.compile(regex)
186     return compiled
187
188
189 def validate_regex(
190     ctx: click.Context,
191     param: click.Parameter,
192     value: Optional[str],
193 ) -> Optional[Pattern[str]]:
194     try:
195         return re_compile_maybe_verbose(value) if value is not None else None
196     except re.error as e:
197         raise click.BadParameter(f"Not a valid regular expression: {e}") from None
198
199
200 @click.command(
201     context_settings={"help_option_names": ["-h", "--help"]},
202     # While Click does set this field automatically using the docstring, mypyc
203     # (annoyingly) strips 'em so we need to set it here too.
204     help="The uncompromising code formatter.",
205 )
206 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
207 @click.option(
208     "-l",
209     "--line-length",
210     type=int,
211     default=DEFAULT_LINE_LENGTH,
212     help="How many characters per line to allow.",
213     show_default=True,
214 )
215 @click.option(
216     "-t",
217     "--target-version",
218     type=click.Choice([v.name.lower() for v in TargetVersion]),
219     callback=target_version_option_callback,
220     multiple=True,
221     help=(
222         "Python versions that should be supported by Black's output. [default: per-file"
223         " auto-detection]"
224     ),
225 )
226 @click.option(
227     "--pyi",
228     is_flag=True,
229     help=(
230         "Format all input files like typing stubs regardless of file extension (useful"
231         " when piping source on standard input)."
232     ),
233 )
234 @click.option(
235     "--ipynb",
236     is_flag=True,
237     help=(
238         "Format all input files like Jupyter Notebooks regardless of file extension "
239         "(useful when piping source on standard input)."
240     ),
241 )
242 @click.option(
243     "--python-cell-magics",
244     multiple=True,
245     help=(
246         "When processing Jupyter Notebooks, add the given magic to the list"
247         f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
248         " Useful for formatting cells with custom python magics."
249     ),
250     default=[],
251 )
252 @click.option(
253     "-x",
254     "--skip-source-first-line",
255     is_flag=True,
256     help="Skip the first line of the source code.",
257 )
258 @click.option(
259     "-S",
260     "--skip-string-normalization",
261     is_flag=True,
262     help="Don't normalize string quotes or prefixes.",
263 )
264 @click.option(
265     "-C",
266     "--skip-magic-trailing-comma",
267     is_flag=True,
268     help="Don't use trailing commas as a reason to split lines.",
269 )
270 @click.option(
271     "--experimental-string-processing",
272     is_flag=True,
273     hidden=True,
274     help="(DEPRECATED and now included in --preview) Normalize string literals.",
275 )
276 @click.option(
277     "--preview",
278     is_flag=True,
279     help=(
280         "Enable potentially disruptive style changes that may be added to Black's main"
281         " functionality in the next major release."
282     ),
283 )
284 @click.option(
285     "--check",
286     is_flag=True,
287     help=(
288         "Don't write the files back, just return the status. Return code 0 means"
289         " nothing would change. Return code 1 means some files would be reformatted."
290         " Return code 123 means there was an internal error."
291     ),
292 )
293 @click.option(
294     "--diff",
295     is_flag=True,
296     help="Don't write the files back, just output a diff for each file on stdout.",
297 )
298 @click.option(
299     "--color/--no-color",
300     is_flag=True,
301     help="Show colored diff. Only applies when `--diff` is given.",
302 )
303 @click.option(
304     "--fast/--safe",
305     is_flag=True,
306     help="If --fast given, skip temporary sanity checks. [default: --safe]",
307 )
308 @click.option(
309     "--required-version",
310     type=str,
311     help=(
312         "Require a specific version of Black to be running (useful for unifying results"
313         " across many environments e.g. with a pyproject.toml file). It can be"
314         " either a major version number or an exact version."
315     ),
316 )
317 @click.option(
318     "--include",
319     type=str,
320     default=DEFAULT_INCLUDES,
321     callback=validate_regex,
322     help=(
323         "A regular expression that matches files and directories that should be"
324         " included on recursive searches. An empty value means all files are included"
325         " regardless of the name. Use forward slashes for directories on all platforms"
326         " (Windows, too). Exclusions are calculated first, inclusions later."
327     ),
328     show_default=True,
329 )
330 @click.option(
331     "--exclude",
332     type=str,
333     callback=validate_regex,
334     help=(
335         "A regular expression that matches files and directories that should be"
336         " excluded on recursive searches. An empty value means no paths are excluded."
337         " Use forward slashes for directories on all platforms (Windows, too)."
338         " Exclusions are calculated first, inclusions later. [default:"
339         f" {DEFAULT_EXCLUDES}]"
340     ),
341     show_default=False,
342 )
343 @click.option(
344     "--extend-exclude",
345     type=str,
346     callback=validate_regex,
347     help=(
348         "Like --exclude, but adds additional files and directories on top of the"
349         " excluded ones. (Useful if you simply want to add to the default)"
350     ),
351 )
352 @click.option(
353     "--force-exclude",
354     type=str,
355     callback=validate_regex,
356     help=(
357         "Like --exclude, but files and directories matching this regex will be "
358         "excluded even when they are passed explicitly as arguments."
359     ),
360 )
361 @click.option(
362     "--stdin-filename",
363     type=str,
364     help=(
365         "The name of the file when passing it through stdin. Useful to make "
366         "sure Black will respect --force-exclude option on some "
367         "editors that rely on using stdin."
368     ),
369 )
370 @click.option(
371     "-W",
372     "--workers",
373     type=click.IntRange(min=1),
374     default=None,
375     help="Number of parallel workers [default: number of CPUs in the system]",
376 )
377 @click.option(
378     "-q",
379     "--quiet",
380     is_flag=True,
381     help=(
382         "Don't emit non-error messages to stderr. Errors are still emitted; silence"
383         " those with 2>/dev/null."
384     ),
385 )
386 @click.option(
387     "-v",
388     "--verbose",
389     is_flag=True,
390     help=(
391         "Also emit messages to stderr about files that were not changed or were ignored"
392         " due to exclusion patterns."
393     ),
394 )
395 @click.version_option(
396     version=__version__,
397     message=(
398         f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
399         f"Python ({platform.python_implementation()}) {platform.python_version()}"
400     ),
401 )
402 @click.argument(
403     "src",
404     nargs=-1,
405     type=click.Path(
406         exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
407     ),
408     is_eager=True,
409     metavar="SRC ...",
410 )
411 @click.option(
412     "--config",
413     type=click.Path(
414         exists=True,
415         file_okay=True,
416         dir_okay=False,
417         readable=True,
418         allow_dash=False,
419         path_type=str,
420     ),
421     is_eager=True,
422     callback=read_pyproject_toml,
423     help="Read configuration from FILE path.",
424 )
425 @click.pass_context
426 def main(  # noqa: C901
427     ctx: click.Context,
428     code: Optional[str],
429     line_length: int,
430     target_version: List[TargetVersion],
431     check: bool,
432     diff: bool,
433     color: bool,
434     fast: bool,
435     pyi: bool,
436     ipynb: bool,
437     python_cell_magics: Sequence[str],
438     skip_source_first_line: bool,
439     skip_string_normalization: bool,
440     skip_magic_trailing_comma: bool,
441     experimental_string_processing: bool,
442     preview: bool,
443     quiet: bool,
444     verbose: bool,
445     required_version: Optional[str],
446     include: Pattern[str],
447     exclude: Optional[Pattern[str]],
448     extend_exclude: Optional[Pattern[str]],
449     force_exclude: Optional[Pattern[str]],
450     stdin_filename: Optional[str],
451     workers: Optional[int],
452     src: Tuple[str, ...],
453     config: Optional[str],
454 ) -> None:
455     """The uncompromising code formatter."""
456     ctx.ensure_object(dict)
457
458     if src and code is not None:
459         out(
460             main.get_usage(ctx)
461             + "\n\n'SRC' and 'code' cannot be passed simultaneously."
462         )
463         ctx.exit(1)
464     if not src and code is None:
465         out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
466         ctx.exit(1)
467
468     root, method = (
469         find_project_root(src, stdin_filename) if code is None else (None, None)
470     )
471     ctx.obj["root"] = root
472
473     if verbose:
474         if root:
475             out(
476                 f"Identified `{root}` as project root containing a {method}.",
477                 fg="blue",
478             )
479
480             normalized = [
481                 (
482                     (source, source)
483                     if source == "-"
484                     else (normalize_path_maybe_ignore(Path(source), root), source)
485                 )
486                 for source in src
487             ]
488             srcs_string = ", ".join(
489                 [
490                     (
491                         f'"{_norm}"'
492                         if _norm
493                         else f'\033[31m"{source} (skipping - invalid)"\033[34m'
494                     )
495                     for _norm, source in normalized
496                 ]
497             )
498             out(f"Sources to be formatted: {srcs_string}", fg="blue")
499
500         if config:
501             config_source = ctx.get_parameter_source("config")
502             user_level_config = str(find_user_pyproject_toml())
503             if config == user_level_config:
504                 out(
505                     (
506                         "Using configuration from user-level config at "
507                         f"'{user_level_config}'."
508                     ),
509                     fg="blue",
510                 )
511             elif config_source in (
512                 ParameterSource.DEFAULT,
513                 ParameterSource.DEFAULT_MAP,
514             ):
515                 out("Using configuration from project root.", fg="blue")
516             else:
517                 out(f"Using configuration in '{config}'.", fg="blue")
518             if ctx.default_map:
519                 for param, value in ctx.default_map.items():
520                     out(f"{param}: {value}")
521
522     error_msg = "Oh no! 💥 💔 💥"
523     if (
524         required_version
525         and required_version != __version__
526         and required_version != __version__.split(".")[0]
527     ):
528         err(
529             f"{error_msg} The required version `{required_version}` does not match"
530             f" the running version `{__version__}`!"
531         )
532         ctx.exit(1)
533     if ipynb and pyi:
534         err("Cannot pass both `pyi` and `ipynb` flags!")
535         ctx.exit(1)
536
537     write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
538     if target_version:
539         versions = set(target_version)
540     else:
541         # We'll autodetect later.
542         versions = set()
543     mode = Mode(
544         target_versions=versions,
545         line_length=line_length,
546         is_pyi=pyi,
547         is_ipynb=ipynb,
548         skip_source_first_line=skip_source_first_line,
549         string_normalization=not skip_string_normalization,
550         magic_trailing_comma=not skip_magic_trailing_comma,
551         experimental_string_processing=experimental_string_processing,
552         preview=preview,
553         python_cell_magics=set(python_cell_magics),
554     )
555
556     if code is not None:
557         # Run in quiet mode by default with -c; the extra output isn't useful.
558         # You can still pass -v to get verbose output.
559         quiet = True
560
561     report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
562
563     if code is not None:
564         reformat_code(
565             content=code, fast=fast, write_back=write_back, mode=mode, report=report
566         )
567     else:
568         try:
569             sources = get_sources(
570                 ctx=ctx,
571                 src=src,
572                 quiet=quiet,
573                 verbose=verbose,
574                 include=include,
575                 exclude=exclude,
576                 extend_exclude=extend_exclude,
577                 force_exclude=force_exclude,
578                 report=report,
579                 stdin_filename=stdin_filename,
580             )
581         except GitWildMatchPatternError:
582             ctx.exit(1)
583
584         path_empty(
585             sources,
586             "No Python files are present to be formatted. Nothing to do 😴",
587             quiet,
588             verbose,
589             ctx,
590         )
591
592         if len(sources) == 1:
593             reformat_one(
594                 src=sources.pop(),
595                 fast=fast,
596                 write_back=write_back,
597                 mode=mode,
598                 report=report,
599             )
600         else:
601             from black.concurrency import reformat_many
602
603             reformat_many(
604                 sources=sources,
605                 fast=fast,
606                 write_back=write_back,
607                 mode=mode,
608                 report=report,
609                 workers=workers,
610             )
611
612     if verbose or not quiet:
613         if code is None and (verbose or report.change_count or report.failure_count):
614             out()
615         out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
616         if code is None:
617             click.echo(str(report), err=True)
618     ctx.exit(report.return_code)
619
620
621 def get_sources(
622     *,
623     ctx: click.Context,
624     src: Tuple[str, ...],
625     quiet: bool,
626     verbose: bool,
627     include: Pattern[str],
628     exclude: Optional[Pattern[str]],
629     extend_exclude: Optional[Pattern[str]],
630     force_exclude: Optional[Pattern[str]],
631     report: "Report",
632     stdin_filename: Optional[str],
633 ) -> Set[Path]:
634     """Compute the set of files to be formatted."""
635     sources: Set[Path] = set()
636     root = ctx.obj["root"]
637
638     using_default_exclude = exclude is None
639     exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) if exclude is None else exclude
640     gitignore: Optional[Dict[Path, PathSpec]] = None
641     root_gitignore = get_gitignore(root)
642
643     for s in src:
644         if s == "-" and stdin_filename:
645             p = Path(stdin_filename)
646             is_stdin = True
647         else:
648             p = Path(s)
649             is_stdin = False
650
651         if is_stdin or p.is_file():
652             normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
653             if normalized_path is None:
654                 continue
655
656             normalized_path = "/" + normalized_path
657             # Hard-exclude any files that matches the `--force-exclude` regex.
658             if force_exclude:
659                 force_exclude_match = force_exclude.search(normalized_path)
660             else:
661                 force_exclude_match = None
662             if force_exclude_match and force_exclude_match.group(0):
663                 report.path_ignored(p, "matches the --force-exclude regular expression")
664                 continue
665
666             if is_stdin:
667                 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
668
669             if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
670                 verbose=verbose, quiet=quiet
671             ):
672                 continue
673
674             sources.add(p)
675         elif p.is_dir():
676             if using_default_exclude:
677                 gitignore = {
678                     root: root_gitignore,
679                     root / p: get_gitignore(p),
680                 }
681             sources.update(
682                 gen_python_files(
683                     p.iterdir(),
684                     ctx.obj["root"],
685                     include,
686                     exclude,
687                     extend_exclude,
688                     force_exclude,
689                     report,
690                     gitignore,
691                     verbose=verbose,
692                     quiet=quiet,
693                 )
694             )
695         elif s == "-":
696             sources.add(p)
697         else:
698             err(f"invalid path: {s}")
699     return sources
700
701
702 def path_empty(
703     src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
704 ) -> None:
705     """
706     Exit if there is no `src` provided for formatting
707     """
708     if not src:
709         if verbose or not quiet:
710             out(msg)
711         ctx.exit(0)
712
713
714 def reformat_code(
715     content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
716 ) -> None:
717     """
718     Reformat and print out `content` without spawning child processes.
719     Similar to `reformat_one`, but for string content.
720
721     `fast`, `write_back`, and `mode` options are passed to
722     :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
723     """
724     path = Path("<string>")
725     try:
726         changed = Changed.NO
727         if format_stdin_to_stdout(
728             content=content, fast=fast, write_back=write_back, mode=mode
729         ):
730             changed = Changed.YES
731         report.done(path, changed)
732     except Exception as exc:
733         if report.verbose:
734             traceback.print_exc()
735         report.failed(path, str(exc))
736
737
738 # diff-shades depends on being to monkeypatch this function to operate. I know it's
739 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
740 @mypyc_attr(patchable=True)
741 def reformat_one(
742     src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
743 ) -> None:
744     """Reformat a single file under `src` without spawning child processes.
745
746     `fast`, `write_back`, and `mode` options are passed to
747     :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
748     """
749     try:
750         changed = Changed.NO
751
752         if str(src) == "-":
753             is_stdin = True
754         elif str(src).startswith(STDIN_PLACEHOLDER):
755             is_stdin = True
756             # Use the original name again in case we want to print something
757             # to the user
758             src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
759         else:
760             is_stdin = False
761
762         if is_stdin:
763             if src.suffix == ".pyi":
764                 mode = replace(mode, is_pyi=True)
765             elif src.suffix == ".ipynb":
766                 mode = replace(mode, is_ipynb=True)
767             if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
768                 changed = Changed.YES
769         else:
770             cache: Cache = {}
771             if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
772                 cache = read_cache(mode)
773                 res_src = src.resolve()
774                 res_src_s = str(res_src)
775                 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
776                     changed = Changed.CACHED
777             if changed is not Changed.CACHED and format_file_in_place(
778                 src, fast=fast, write_back=write_back, mode=mode
779             ):
780                 changed = Changed.YES
781             if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
782                 write_back is WriteBack.CHECK and changed is Changed.NO
783             ):
784                 write_cache(cache, [src], mode)
785         report.done(src, changed)
786     except Exception as exc:
787         if report.verbose:
788             traceback.print_exc()
789         report.failed(src, str(exc))
790
791
792 def format_file_in_place(
793     src: Path,
794     fast: bool,
795     mode: Mode,
796     write_back: WriteBack = WriteBack.NO,
797     lock: Any = None,  # multiprocessing.Manager().Lock() is some crazy proxy
798 ) -> bool:
799     """Format file under `src` path. Return True if changed.
800
801     If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
802     code to the file.
803     `mode` and `fast` options are passed to :func:`format_file_contents`.
804     """
805     if src.suffix == ".pyi":
806         mode = replace(mode, is_pyi=True)
807     elif src.suffix == ".ipynb":
808         mode = replace(mode, is_ipynb=True)
809
810     then = datetime.utcfromtimestamp(src.stat().st_mtime)
811     header = b""
812     with open(src, "rb") as buf:
813         if mode.skip_source_first_line:
814             header = buf.readline()
815         src_contents, encoding, newline = decode_bytes(buf.read())
816     try:
817         dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
818     except NothingChanged:
819         return False
820     except JSONDecodeError:
821         raise ValueError(
822             f"File '{src}' cannot be parsed as valid Jupyter notebook."
823         ) from None
824     src_contents = header.decode(encoding) + src_contents
825     dst_contents = header.decode(encoding) + dst_contents
826
827     if write_back == WriteBack.YES:
828         with open(src, "w", encoding=encoding, newline=newline) as f:
829             f.write(dst_contents)
830     elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
831         now = datetime.utcnow()
832         src_name = f"{src}\t{then} +0000"
833         dst_name = f"{src}\t{now} +0000"
834         if mode.is_ipynb:
835             diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
836         else:
837             diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
838
839         if write_back == WriteBack.COLOR_DIFF:
840             diff_contents = color_diff(diff_contents)
841
842         with lock or nullcontext():
843             f = io.TextIOWrapper(
844                 sys.stdout.buffer,
845                 encoding=encoding,
846                 newline=newline,
847                 write_through=True,
848             )
849             f = wrap_stream_for_windows(f)
850             f.write(diff_contents)
851             f.detach()
852
853     return True
854
855
856 def format_stdin_to_stdout(
857     fast: bool,
858     *,
859     content: Optional[str] = None,
860     write_back: WriteBack = WriteBack.NO,
861     mode: Mode,
862 ) -> bool:
863     """Format file on stdin. Return True if changed.
864
865     If content is None, it's read from sys.stdin.
866
867     If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
868     write a diff to stdout. The `mode` argument is passed to
869     :func:`format_file_contents`.
870     """
871     then = datetime.utcnow()
872
873     if content is None:
874         src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
875     else:
876         src, encoding, newline = content, "utf-8", ""
877
878     dst = src
879     try:
880         dst = format_file_contents(src, fast=fast, mode=mode)
881         return True
882
883     except NothingChanged:
884         return False
885
886     finally:
887         f = io.TextIOWrapper(
888             sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
889         )
890         if write_back == WriteBack.YES:
891             # Make sure there's a newline after the content
892             if dst and dst[-1] != "\n":
893                 dst += "\n"
894             f.write(dst)
895         elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
896             now = datetime.utcnow()
897             src_name = f"STDIN\t{then} +0000"
898             dst_name = f"STDOUT\t{now} +0000"
899             d = diff(src, dst, src_name, dst_name)
900             if write_back == WriteBack.COLOR_DIFF:
901                 d = color_diff(d)
902                 f = wrap_stream_for_windows(f)
903             f.write(d)
904         f.detach()
905
906
907 def check_stability_and_equivalence(
908     src_contents: str, dst_contents: str, *, mode: Mode
909 ) -> None:
910     """Perform stability and equivalence checks.
911
912     Raise AssertionError if source and destination contents are not
913     equivalent, or if a second pass of the formatter would format the
914     content differently.
915     """
916     assert_equivalent(src_contents, dst_contents)
917     assert_stable(src_contents, dst_contents, mode=mode)
918
919
920 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
921     """Reformat contents of a file and return new contents.
922
923     If `fast` is False, additionally confirm that the reformatted code is
924     valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
925     `mode` is passed to :func:`format_str`.
926     """
927     if not mode.preview and not src_contents.strip():
928         raise NothingChanged
929
930     if mode.is_ipynb:
931         dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
932     else:
933         dst_contents = format_str(src_contents, mode=mode)
934     if src_contents == dst_contents:
935         raise NothingChanged
936
937     if not fast and not mode.is_ipynb:
938         # Jupyter notebooks will already have been checked above.
939         check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
940     return dst_contents
941
942
943 def validate_cell(src: str, mode: Mode) -> None:
944     """Check that cell does not already contain TransformerManager transformations,
945     or non-Python cell magics, which might cause tokenizer_rt to break because of
946     indentations.
947
948     If a cell contains ``!ls``, then it'll be transformed to
949     ``get_ipython().system('ls')``. However, if the cell originally contained
950     ``get_ipython().system('ls')``, then it would get transformed in the same way:
951
952         >>> TransformerManager().transform_cell("get_ipython().system('ls')")
953         "get_ipython().system('ls')\n"
954         >>> TransformerManager().transform_cell("!ls")
955         "get_ipython().system('ls')\n"
956
957     Due to the impossibility of safely roundtripping in such situations, cells
958     containing transformed magics will be ignored.
959     """
960     if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
961         raise NothingChanged
962     if (
963         src[:2] == "%%"
964         and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
965     ):
966         raise NothingChanged
967
968
969 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
970     """Format code in given cell of Jupyter notebook.
971
972     General idea is:
973
974       - if cell has trailing semicolon, remove it;
975       - if cell has IPython magics, mask them;
976       - format cell;
977       - reinstate IPython magics;
978       - reinstate trailing semicolon (if originally present);
979       - strip trailing newlines.
980
981     Cells with syntax errors will not be processed, as they
982     could potentially be automagics or multi-line magics, which
983     are currently not supported.
984     """
985     validate_cell(src, mode)
986     src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
987         src
988     )
989     try:
990         masked_src, replacements = mask_cell(src_without_trailing_semicolon)
991     except SyntaxError:
992         raise NothingChanged from None
993     masked_dst = format_str(masked_src, mode=mode)
994     if not fast:
995         check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
996     dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
997     dst = put_trailing_semicolon_back(
998         dst_without_trailing_semicolon, has_trailing_semicolon
999     )
1000     dst = dst.rstrip("\n")
1001     if dst == src:
1002         raise NothingChanged from None
1003     return dst
1004
1005
1006 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1007     """If notebook is marked as non-Python, don't format it.
1008
1009     All notebook metadata fields are optional, see
1010     https://nbformat.readthedocs.io/en/latest/format_description.html. So
1011     if a notebook has empty metadata, we will try to parse it anyway.
1012     """
1013     language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1014     if language is not None and language != "python":
1015         raise NothingChanged from None
1016
1017
1018 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1019     """Format Jupyter notebook.
1020
1021     Operate cell-by-cell, only on code cells, only for Python notebooks.
1022     If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1023     """
1024     if mode.preview and not src_contents:
1025         raise NothingChanged
1026
1027     trailing_newline = src_contents[-1] == "\n"
1028     modified = False
1029     nb = json.loads(src_contents)
1030     validate_metadata(nb)
1031     for cell in nb["cells"]:
1032         if cell.get("cell_type", None) == "code":
1033             try:
1034                 src = "".join(cell["source"])
1035                 dst = format_cell(src, fast=fast, mode=mode)
1036             except NothingChanged:
1037                 pass
1038             else:
1039                 cell["source"] = dst.splitlines(keepends=True)
1040                 modified = True
1041     if modified:
1042         dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1043         if trailing_newline:
1044             dst_contents = dst_contents + "\n"
1045         return dst_contents
1046     else:
1047         raise NothingChanged
1048
1049
1050 def format_str(src_contents: str, *, mode: Mode) -> str:
1051     """Reformat a string and return new contents.
1052
1053     `mode` determines formatting options, such as how many characters per line are
1054     allowed.  Example:
1055
1056     >>> import black
1057     >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1058     def f(arg: str = "") -> None:
1059         ...
1060
1061     A more complex example:
1062
1063     >>> print(
1064     ...   black.format_str(
1065     ...     "def f(arg:str='')->None: hey",
1066     ...     mode=black.Mode(
1067     ...       target_versions={black.TargetVersion.PY36},
1068     ...       line_length=10,
1069     ...       string_normalization=False,
1070     ...       is_pyi=False,
1071     ...     ),
1072     ...   ),
1073     ... )
1074     def f(
1075         arg: str = '',
1076     ) -> None:
1077         hey
1078
1079     """
1080     dst_contents = _format_str_once(src_contents, mode=mode)
1081     # Forced second pass to work around optional trailing commas (becoming
1082     # forced trailing commas on pass 2) interacting differently with optional
1083     # parentheses.  Admittedly ugly.
1084     if src_contents != dst_contents:
1085         return _format_str_once(dst_contents, mode=mode)
1086     return dst_contents
1087
1088
1089 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1090     src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1091     dst_blocks: List[LinesBlock] = []
1092     if mode.target_versions:
1093         versions = mode.target_versions
1094     else:
1095         future_imports = get_future_imports(src_node)
1096         versions = detect_target_versions(src_node, future_imports=future_imports)
1097
1098     normalize_fmt_off(src_node, preview=mode.preview)
1099     lines = LineGenerator(mode=mode)
1100     elt = EmptyLineTracker(mode=mode)
1101     split_line_features = {
1102         feature
1103         for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1104         if supports_feature(versions, feature)
1105     }
1106     block: Optional[LinesBlock] = None
1107     for current_line in lines.visit(src_node):
1108         block = elt.maybe_empty_lines(current_line)
1109         dst_blocks.append(block)
1110         for line in transform_line(
1111             current_line, mode=mode, features=split_line_features
1112         ):
1113             block.content_lines.append(str(line))
1114     if dst_blocks:
1115         dst_blocks[-1].after = 0
1116     dst_contents = []
1117     for block in dst_blocks:
1118         dst_contents.extend(block.all_lines())
1119     if mode.preview and not dst_contents:
1120         # Use decode_bytes to retrieve the correct source newline (CRLF or LF),
1121         # and check if normalized_content has more than one line
1122         normalized_content, _, newline = decode_bytes(src_contents.encode("utf-8"))
1123         if "\n" in normalized_content:
1124             return newline
1125         return ""
1126     return "".join(dst_contents)
1127
1128
1129 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1130     """Return a tuple of (decoded_contents, encoding, newline).
1131
1132     `newline` is either CRLF or LF but `decoded_contents` is decoded with
1133     universal newlines (i.e. only contains LF).
1134     """
1135     srcbuf = io.BytesIO(src)
1136     encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1137     if not lines:
1138         return "", encoding, "\n"
1139
1140     newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1141     srcbuf.seek(0)
1142     with io.TextIOWrapper(srcbuf, encoding) as tiow:
1143         return tiow.read(), encoding, newline
1144
1145
1146 def get_features_used(  # noqa: C901
1147     node: Node, *, future_imports: Optional[Set[str]] = None
1148 ) -> Set[Feature]:
1149     """Return a set of (relatively) new Python features used in this file.
1150
1151     Currently looking for:
1152     - f-strings;
1153     - self-documenting expressions in f-strings (f"{x=}");
1154     - underscores in numeric literals;
1155     - trailing commas after * or ** in function signatures and calls;
1156     - positional only arguments in function signatures and lambdas;
1157     - assignment expression;
1158     - relaxed decorator syntax;
1159     - usage of __future__ flags (annotations);
1160     - print / exec statements;
1161     """
1162     features: Set[Feature] = set()
1163     if future_imports:
1164         features |= {
1165             FUTURE_FLAG_TO_FEATURE[future_import]
1166             for future_import in future_imports
1167             if future_import in FUTURE_FLAG_TO_FEATURE
1168         }
1169
1170     for n in node.pre_order():
1171         if is_string_token(n):
1172             value_head = n.value[:2]
1173             if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1174                 features.add(Feature.F_STRINGS)
1175                 if Feature.DEBUG_F_STRINGS not in features:
1176                     for span_beg, span_end in iter_fexpr_spans(n.value):
1177                         if n.value[span_beg : span_end - 1].rstrip().endswith("="):
1178                             features.add(Feature.DEBUG_F_STRINGS)
1179                             break
1180
1181         elif is_number_token(n):
1182             if "_" in n.value:
1183                 features.add(Feature.NUMERIC_UNDERSCORES)
1184
1185         elif n.type == token.SLASH:
1186             if n.parent and n.parent.type in {
1187                 syms.typedargslist,
1188                 syms.arglist,
1189                 syms.varargslist,
1190             }:
1191                 features.add(Feature.POS_ONLY_ARGUMENTS)
1192
1193         elif n.type == token.COLONEQUAL:
1194             features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1195
1196         elif n.type == syms.decorator:
1197             if len(n.children) > 1 and not is_simple_decorator_expression(
1198                 n.children[1]
1199             ):
1200                 features.add(Feature.RELAXED_DECORATORS)
1201
1202         elif (
1203             n.type in {syms.typedargslist, syms.arglist}
1204             and n.children
1205             and n.children[-1].type == token.COMMA
1206         ):
1207             if n.type == syms.typedargslist:
1208                 feature = Feature.TRAILING_COMMA_IN_DEF
1209             else:
1210                 feature = Feature.TRAILING_COMMA_IN_CALL
1211
1212             for ch in n.children:
1213                 if ch.type in STARS:
1214                     features.add(feature)
1215
1216                 if ch.type == syms.argument:
1217                     for argch in ch.children:
1218                         if argch.type in STARS:
1219                             features.add(feature)
1220
1221         elif (
1222             n.type in {syms.return_stmt, syms.yield_expr}
1223             and len(n.children) >= 2
1224             and n.children[1].type == syms.testlist_star_expr
1225             and any(child.type == syms.star_expr for child in n.children[1].children)
1226         ):
1227             features.add(Feature.UNPACKING_ON_FLOW)
1228
1229         elif (
1230             n.type == syms.annassign
1231             and len(n.children) >= 4
1232             and n.children[3].type == syms.testlist_star_expr
1233         ):
1234             features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1235
1236         elif (
1237             n.type == syms.except_clause
1238             and len(n.children) >= 2
1239             and n.children[1].type == token.STAR
1240         ):
1241             features.add(Feature.EXCEPT_STAR)
1242
1243         elif n.type in {syms.subscriptlist, syms.trailer} and any(
1244             child.type == syms.star_expr for child in n.children
1245         ):
1246             features.add(Feature.VARIADIC_GENERICS)
1247
1248         elif (
1249             n.type == syms.tname_star
1250             and len(n.children) == 3
1251             and n.children[2].type == syms.star_expr
1252         ):
1253             features.add(Feature.VARIADIC_GENERICS)
1254
1255     return features
1256
1257
1258 def detect_target_versions(
1259     node: Node, *, future_imports: Optional[Set[str]] = None
1260 ) -> Set[TargetVersion]:
1261     """Detect the version to target based on the nodes used."""
1262     features = get_features_used(node, future_imports=future_imports)
1263     return {
1264         version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1265     }
1266
1267
1268 def get_future_imports(node: Node) -> Set[str]:
1269     """Return a set of __future__ imports in the file."""
1270     imports: Set[str] = set()
1271
1272     def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1273         for child in children:
1274             if isinstance(child, Leaf):
1275                 if child.type == token.NAME:
1276                     yield child.value
1277
1278             elif child.type == syms.import_as_name:
1279                 orig_name = child.children[0]
1280                 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1281                 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1282                 yield orig_name.value
1283
1284             elif child.type == syms.import_as_names:
1285                 yield from get_imports_from_children(child.children)
1286
1287             else:
1288                 raise AssertionError("Invalid syntax parsing imports")
1289
1290     for child in node.children:
1291         if child.type != syms.simple_stmt:
1292             break
1293
1294         first_child = child.children[0]
1295         if isinstance(first_child, Leaf):
1296             # Continue looking if we see a docstring; otherwise stop.
1297             if (
1298                 len(child.children) == 2
1299                 and first_child.type == token.STRING
1300                 and child.children[1].type == token.NEWLINE
1301             ):
1302                 continue
1303
1304             break
1305
1306         elif first_child.type == syms.import_from:
1307             module_name = first_child.children[1]
1308             if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1309                 break
1310
1311             imports |= set(get_imports_from_children(first_child.children[3:]))
1312         else:
1313             break
1314
1315     return imports
1316
1317
1318 def assert_equivalent(src: str, dst: str) -> None:
1319     """Raise AssertionError if `src` and `dst` aren't equivalent."""
1320     try:
1321         src_ast = parse_ast(src)
1322     except Exception as exc:
1323         raise AssertionError(
1324             "cannot use --safe with this file; failed to parse source file AST: "
1325             f"{exc}\n"
1326             "This could be caused by running Black with an older Python version "
1327             "that does not support new syntax used in your source file."
1328         ) from exc
1329
1330     try:
1331         dst_ast = parse_ast(dst)
1332     except Exception as exc:
1333         log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1334         raise AssertionError(
1335             f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1336             "Please report a bug on https://github.com/psf/black/issues.  "
1337             f"This invalid output might be helpful: {log}"
1338         ) from None
1339
1340     src_ast_str = "\n".join(stringify_ast(src_ast))
1341     dst_ast_str = "\n".join(stringify_ast(dst_ast))
1342     if src_ast_str != dst_ast_str:
1343         log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1344         raise AssertionError(
1345             "INTERNAL ERROR: Black produced code that is not equivalent to the"
1346             " source.  Please report a bug on "
1347             f"https://github.com/psf/black/issues.  This diff might be helpful: {log}"
1348         ) from None
1349
1350
1351 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1352     """Raise AssertionError if `dst` reformats differently the second time."""
1353     # We shouldn't call format_str() here, because that formats the string
1354     # twice and may hide a bug where we bounce back and forth between two
1355     # versions.
1356     newdst = _format_str_once(dst, mode=mode)
1357     if dst != newdst:
1358         log = dump_to_file(
1359             str(mode),
1360             diff(src, dst, "source", "first pass"),
1361             diff(dst, newdst, "first pass", "second pass"),
1362         )
1363         raise AssertionError(
1364             "INTERNAL ERROR: Black produced different code on the second pass of the"
1365             " formatter.  Please report a bug on https://github.com/psf/black/issues."
1366             f"  This diff might be helpful: {log}"
1367         ) from None
1368
1369
1370 @contextmanager
1371 def nullcontext() -> Iterator[None]:
1372     """Return an empty context manager.
1373
1374     To be used like `nullcontext` in Python 3.7.
1375     """
1376     yield
1377
1378
1379 def patch_click() -> None:
1380     """Make Click not crash on Python 3.6 with LANG=C.
1381
1382     On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1383     default which restricts paths that it can access during the lifetime of the
1384     application.  Click refuses to work in this scenario by raising a RuntimeError.
1385
1386     In case of Black the likelihood that non-ASCII characters are going to be used in
1387     file paths is minimal since it's Python source code.  Moreover, this crash was
1388     spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1389     """
1390     modules: List[Any] = []
1391     try:
1392         from click import core
1393     except ImportError:
1394         pass
1395     else:
1396         modules.append(core)
1397     try:
1398         # Removed in Click 8.1.0 and newer; we keep this around for users who have
1399         # older versions installed.
1400         from click import _unicodefun  # type: ignore
1401     except ImportError:
1402         pass
1403     else:
1404         modules.append(_unicodefun)
1405
1406     for module in modules:
1407         if hasattr(module, "_verify_python3_env"):
1408             module._verify_python3_env = lambda: None
1409         if hasattr(module, "_verify_python_env"):
1410             module._verify_python_env = lambda: None
1411
1412
1413 def patched_main() -> None:
1414     # PyInstaller patches multiprocessing to need freeze_support() even in non-Windows
1415     # environments so just assume we always need to call it if frozen.
1416     if getattr(sys, "frozen", False):
1417         from multiprocessing import freeze_support
1418
1419         freeze_support()
1420
1421     patch_click()
1422     main()
1423
1424
1425 if __name__ == "__main__":
1426     patched_main()