]> git.madduck.net Git - etc/vim.git/blob - src/black/__init__.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Add isort to linting toolchain
[etc/vim.git] / src / black / __init__.py
1 import asyncio
2 from json.decoder import JSONDecodeError
3 import json
4 from contextlib import contextmanager
5 from datetime import datetime
6 from enum import Enum
7 import io
8 from multiprocessing import Manager, freeze_support
9 import os
10 from pathlib import Path
11 from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
12 import platform
13 import re
14 import signal
15 import sys
16 import tokenize
17 import traceback
18 from typing import (
19     TYPE_CHECKING,
20     Any,
21     Dict,
22     Generator,
23     Iterator,
24     List,
25     MutableMapping,
26     Optional,
27     Pattern,
28     Sequence,
29     Set,
30     Sized,
31     Tuple,
32     Union,
33 )
34
35 import click
36 from click.core import ParameterSource
37 from dataclasses import replace
38 from mypy_extensions import mypyc_attr
39
40 from black.const import DEFAULT_LINE_LENGTH, DEFAULT_INCLUDES, DEFAULT_EXCLUDES
41 from black.const import STDIN_PLACEHOLDER
42 from black.nodes import STARS, syms, is_simple_decorator_expression
43 from black.nodes import is_string_token, is_number_token
44 from black.lines import Line, EmptyLineTracker
45 from black.linegen import transform_line, LineGenerator, LN
46 from black.comments import normalize_fmt_off
47 from black.mode import FUTURE_FLAG_TO_FEATURE, Mode, TargetVersion
48 from black.mode import Feature, supports_feature, VERSION_TO_FEATURES
49 from black.cache import read_cache, write_cache, get_cache_info, filter_cached, Cache
50 from black.concurrency import cancel, shutdown, maybe_install_uvloop
51 from black.output import dump_to_file, ipynb_diff, diff, color_diff, out, err
52 from black.report import Report, Changed, NothingChanged
53 from black.files import (
54     find_project_root,
55     find_pyproject_toml,
56     parse_pyproject_toml,
57     find_user_pyproject_toml,
58 )
59 from black.files import gen_python_files, get_gitignore, normalize_path_maybe_ignore
60 from black.files import wrap_stream_for_windows
61 from black.parsing import InvalidInput  # noqa F401
62 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
63 from black.handle_ipynb_magics import (
64     mask_cell,
65     unmask_cell,
66     remove_trailing_semicolon,
67     put_trailing_semicolon_back,
68     TRANSFORMED_MAGICS,
69     PYTHON_CELL_MAGICS,
70     jupyter_dependencies_are_installed,
71 )
72
73
74 # lib2to3 fork
75 from blib2to3.pytree import Node, Leaf
76 from blib2to3.pgen2 import token
77
78 from _black_version import version as __version__
79
80 if TYPE_CHECKING:
81     from concurrent.futures import Executor
82
83 COMPILED = Path(__file__).suffix in (".pyd", ".so")
84
85 # types
86 FileContent = str
87 Encoding = str
88 NewLine = str
89
90
91 class WriteBack(Enum):
92     NO = 0
93     YES = 1
94     DIFF = 2
95     CHECK = 3
96     COLOR_DIFF = 4
97
98     @classmethod
99     def from_configuration(
100         cls, *, check: bool, diff: bool, color: bool = False
101     ) -> "WriteBack":
102         if check and not diff:
103             return cls.CHECK
104
105         if diff and color:
106             return cls.COLOR_DIFF
107
108         return cls.DIFF if diff else cls.YES
109
110
111 # Legacy name, left for integrations.
112 FileMode = Mode
113
114 DEFAULT_WORKERS = os.cpu_count()
115
116
117 def read_pyproject_toml(
118     ctx: click.Context, param: click.Parameter, value: Optional[str]
119 ) -> Optional[str]:
120     """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
121
122     Returns the path to a successfully found and read configuration file, None
123     otherwise.
124     """
125     if not value:
126         value = find_pyproject_toml(ctx.params.get("src", ()))
127         if value is None:
128             return None
129
130     try:
131         config = parse_pyproject_toml(value)
132     except (OSError, ValueError) as e:
133         raise click.FileError(
134             filename=value, hint=f"Error reading configuration file: {e}"
135         ) from None
136
137     if not config:
138         return None
139     else:
140         # Sanitize the values to be Click friendly. For more information please see:
141         # https://github.com/psf/black/issues/1458
142         # https://github.com/pallets/click/issues/1567
143         config = {
144             k: str(v) if not isinstance(v, (list, dict)) else v
145             for k, v in config.items()
146         }
147
148     target_version = config.get("target_version")
149     if target_version is not None and not isinstance(target_version, list):
150         raise click.BadOptionUsage(
151             "target-version", "Config key target-version must be a list"
152         )
153
154     default_map: Dict[str, Any] = {}
155     if ctx.default_map:
156         default_map.update(ctx.default_map)
157     default_map.update(config)
158
159     ctx.default_map = default_map
160     return value
161
162
163 def target_version_option_callback(
164     c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...]
165 ) -> List[TargetVersion]:
166     """Compute the target versions from a --target-version flag.
167
168     This is its own function because mypy couldn't infer the type correctly
169     when it was a lambda, causing mypyc trouble.
170     """
171     return [TargetVersion[val.upper()] for val in v]
172
173
174 def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
175     """Compile a regular expression string in `regex`.
176
177     If it contains newlines, use verbose mode.
178     """
179     if "\n" in regex:
180         regex = "(?x)" + regex
181     compiled: Pattern[str] = re.compile(regex)
182     return compiled
183
184
185 def validate_regex(
186     ctx: click.Context,
187     param: click.Parameter,
188     value: Optional[str],
189 ) -> Optional[Pattern[str]]:
190     try:
191         return re_compile_maybe_verbose(value) if value is not None else None
192     except re.error as e:
193         raise click.BadParameter(f"Not a valid regular expression: {e}") from None
194
195
196 @click.command(
197     context_settings={"help_option_names": ["-h", "--help"]},
198     # While Click does set this field automatically using the docstring, mypyc
199     # (annoyingly) strips 'em so we need to set it here too.
200     help="The uncompromising code formatter.",
201 )
202 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
203 @click.option(
204     "-l",
205     "--line-length",
206     type=int,
207     default=DEFAULT_LINE_LENGTH,
208     help="How many characters per line to allow.",
209     show_default=True,
210 )
211 @click.option(
212     "-t",
213     "--target-version",
214     type=click.Choice([v.name.lower() for v in TargetVersion]),
215     callback=target_version_option_callback,
216     multiple=True,
217     help=(
218         "Python versions that should be supported by Black's output. [default: per-file"
219         " auto-detection]"
220     ),
221 )
222 @click.option(
223     "--pyi",
224     is_flag=True,
225     help=(
226         "Format all input files like typing stubs regardless of file extension (useful"
227         " when piping source on standard input)."
228     ),
229 )
230 @click.option(
231     "--ipynb",
232     is_flag=True,
233     help=(
234         "Format all input files like Jupyter Notebooks regardless of file extension "
235         "(useful when piping source on standard input)."
236     ),
237 )
238 @click.option(
239     "--python-cell-magics",
240     multiple=True,
241     help=(
242         "When processing Jupyter Notebooks, add the given magic to the list"
243         f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
244         " Useful for formatting cells with custom python magics."
245     ),
246     default=[],
247 )
248 @click.option(
249     "-S",
250     "--skip-string-normalization",
251     is_flag=True,
252     help="Don't normalize string quotes or prefixes.",
253 )
254 @click.option(
255     "-C",
256     "--skip-magic-trailing-comma",
257     is_flag=True,
258     help="Don't use trailing commas as a reason to split lines.",
259 )
260 @click.option(
261     "--experimental-string-processing",
262     is_flag=True,
263     hidden=True,
264     help="(DEPRECATED and now included in --preview) Normalize string literals.",
265 )
266 @click.option(
267     "--preview",
268     is_flag=True,
269     help=(
270         "Enable potentially disruptive style changes that may be added to Black's main"
271         " functionality in the next major release."
272     ),
273 )
274 @click.option(
275     "--check",
276     is_flag=True,
277     help=(
278         "Don't write the files back, just return the status. Return code 0 means"
279         " nothing would change. Return code 1 means some files would be reformatted."
280         " Return code 123 means there was an internal error."
281     ),
282 )
283 @click.option(
284     "--diff",
285     is_flag=True,
286     help="Don't write the files back, just output a diff for each file on stdout.",
287 )
288 @click.option(
289     "--color/--no-color",
290     is_flag=True,
291     help="Show colored diff. Only applies when `--diff` is given.",
292 )
293 @click.option(
294     "--fast/--safe",
295     is_flag=True,
296     help="If --fast given, skip temporary sanity checks. [default: --safe]",
297 )
298 @click.option(
299     "--required-version",
300     type=str,
301     help=(
302         "Require a specific version of Black to be running (useful for unifying results"
303         " across many environments e.g. with a pyproject.toml file). It can be"
304         " either a major version number or an exact version."
305     ),
306 )
307 @click.option(
308     "--include",
309     type=str,
310     default=DEFAULT_INCLUDES,
311     callback=validate_regex,
312     help=(
313         "A regular expression that matches files and directories that should be"
314         " included on recursive searches. An empty value means all files are included"
315         " regardless of the name. Use forward slashes for directories on all platforms"
316         " (Windows, too). Exclusions are calculated first, inclusions later."
317     ),
318     show_default=True,
319 )
320 @click.option(
321     "--exclude",
322     type=str,
323     callback=validate_regex,
324     help=(
325         "A regular expression that matches files and directories that should be"
326         " excluded on recursive searches. An empty value means no paths are excluded."
327         " Use forward slashes for directories on all platforms (Windows, too)."
328         " Exclusions are calculated first, inclusions later. [default:"
329         f" {DEFAULT_EXCLUDES}]"
330     ),
331     show_default=False,
332 )
333 @click.option(
334     "--extend-exclude",
335     type=str,
336     callback=validate_regex,
337     help=(
338         "Like --exclude, but adds additional files and directories on top of the"
339         " excluded ones. (Useful if you simply want to add to the default)"
340     ),
341 )
342 @click.option(
343     "--force-exclude",
344     type=str,
345     callback=validate_regex,
346     help=(
347         "Like --exclude, but files and directories matching this regex will be "
348         "excluded even when they are passed explicitly as arguments."
349     ),
350 )
351 @click.option(
352     "--stdin-filename",
353     type=str,
354     help=(
355         "The name of the file when passing it through stdin. Useful to make "
356         "sure Black will respect --force-exclude option on some "
357         "editors that rely on using stdin."
358     ),
359 )
360 @click.option(
361     "-W",
362     "--workers",
363     type=click.IntRange(min=1),
364     default=DEFAULT_WORKERS,
365     show_default=True,
366     help="Number of parallel workers",
367 )
368 @click.option(
369     "-q",
370     "--quiet",
371     is_flag=True,
372     help=(
373         "Don't emit non-error messages to stderr. Errors are still emitted; silence"
374         " those with 2>/dev/null."
375     ),
376 )
377 @click.option(
378     "-v",
379     "--verbose",
380     is_flag=True,
381     help=(
382         "Also emit messages to stderr about files that were not changed or were ignored"
383         " due to exclusion patterns."
384     ),
385 )
386 @click.version_option(
387     version=__version__,
388     message=(
389         f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
390         f"Python ({platform.python_implementation()}) {platform.python_version()}"
391     ),
392 )
393 @click.argument(
394     "src",
395     nargs=-1,
396     type=click.Path(
397         exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
398     ),
399     is_eager=True,
400     metavar="SRC ...",
401 )
402 @click.option(
403     "--config",
404     type=click.Path(
405         exists=True,
406         file_okay=True,
407         dir_okay=False,
408         readable=True,
409         allow_dash=False,
410         path_type=str,
411     ),
412     is_eager=True,
413     callback=read_pyproject_toml,
414     help="Read configuration from FILE path.",
415 )
416 @click.pass_context
417 def main(  # noqa: C901
418     ctx: click.Context,
419     code: Optional[str],
420     line_length: int,
421     target_version: List[TargetVersion],
422     check: bool,
423     diff: bool,
424     color: bool,
425     fast: bool,
426     pyi: bool,
427     ipynb: bool,
428     python_cell_magics: Sequence[str],
429     skip_string_normalization: bool,
430     skip_magic_trailing_comma: bool,
431     experimental_string_processing: bool,
432     preview: bool,
433     quiet: bool,
434     verbose: bool,
435     required_version: Optional[str],
436     include: Pattern[str],
437     exclude: Optional[Pattern[str]],
438     extend_exclude: Optional[Pattern[str]],
439     force_exclude: Optional[Pattern[str]],
440     stdin_filename: Optional[str],
441     workers: int,
442     src: Tuple[str, ...],
443     config: Optional[str],
444 ) -> None:
445     """The uncompromising code formatter."""
446     ctx.ensure_object(dict)
447
448     if src and code is not None:
449         out(
450             main.get_usage(ctx)
451             + "\n\n'SRC' and 'code' cannot be passed simultaneously."
452         )
453         ctx.exit(1)
454     if not src and code is None:
455         out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
456         ctx.exit(1)
457
458     root, method = find_project_root(src) if code is None else (None, None)
459     ctx.obj["root"] = root
460
461     if verbose:
462         if root:
463             out(
464                 f"Identified `{root}` as project root containing a {method}.",
465                 fg="blue",
466             )
467
468             normalized = [
469                 (normalize_path_maybe_ignore(Path(source), root), source)
470                 for source in src
471             ]
472             srcs_string = ", ".join(
473                 [
474                     f'"{_norm}"'
475                     if _norm
476                     else f'\033[31m"{source} (skipping - invalid)"\033[34m'
477                     for _norm, source in normalized
478                 ]
479             )
480             out(f"Sources to be formatted: {srcs_string}", fg="blue")
481
482         if config:
483             config_source = ctx.get_parameter_source("config")
484             user_level_config = str(find_user_pyproject_toml())
485             if config == user_level_config:
486                 out(
487                     "Using configuration from user-level config at "
488                     f"'{user_level_config}'.",
489                     fg="blue",
490                 )
491             elif config_source in (
492                 ParameterSource.DEFAULT,
493                 ParameterSource.DEFAULT_MAP,
494             ):
495                 out("Using configuration from project root.", fg="blue")
496             else:
497                 out(f"Using configuration in '{config}'.", fg="blue")
498
499     error_msg = "Oh no! 💥 💔 💥"
500     if (
501         required_version
502         and required_version != __version__
503         and required_version != __version__.split(".")[0]
504     ):
505         err(
506             f"{error_msg} The required version `{required_version}` does not match"
507             f" the running version `{__version__}`!"
508         )
509         ctx.exit(1)
510     if ipynb and pyi:
511         err("Cannot pass both `pyi` and `ipynb` flags!")
512         ctx.exit(1)
513
514     write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
515     if target_version:
516         versions = set(target_version)
517     else:
518         # We'll autodetect later.
519         versions = set()
520     mode = Mode(
521         target_versions=versions,
522         line_length=line_length,
523         is_pyi=pyi,
524         is_ipynb=ipynb,
525         string_normalization=not skip_string_normalization,
526         magic_trailing_comma=not skip_magic_trailing_comma,
527         experimental_string_processing=experimental_string_processing,
528         preview=preview,
529         python_cell_magics=set(python_cell_magics),
530     )
531
532     if code is not None:
533         # Run in quiet mode by default with -c; the extra output isn't useful.
534         # You can still pass -v to get verbose output.
535         quiet = True
536
537     report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
538
539     if code is not None:
540         reformat_code(
541             content=code, fast=fast, write_back=write_back, mode=mode, report=report
542         )
543     else:
544         try:
545             sources = get_sources(
546                 ctx=ctx,
547                 src=src,
548                 quiet=quiet,
549                 verbose=verbose,
550                 include=include,
551                 exclude=exclude,
552                 extend_exclude=extend_exclude,
553                 force_exclude=force_exclude,
554                 report=report,
555                 stdin_filename=stdin_filename,
556             )
557         except GitWildMatchPatternError:
558             ctx.exit(1)
559
560         path_empty(
561             sources,
562             "No Python files are present to be formatted. Nothing to do 😴",
563             quiet,
564             verbose,
565             ctx,
566         )
567
568         if len(sources) == 1:
569             reformat_one(
570                 src=sources.pop(),
571                 fast=fast,
572                 write_back=write_back,
573                 mode=mode,
574                 report=report,
575             )
576         else:
577             reformat_many(
578                 sources=sources,
579                 fast=fast,
580                 write_back=write_back,
581                 mode=mode,
582                 report=report,
583                 workers=workers,
584             )
585
586     if verbose or not quiet:
587         if code is None and (verbose or report.change_count or report.failure_count):
588             out()
589         out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
590         if code is None:
591             click.echo(str(report), err=True)
592     ctx.exit(report.return_code)
593
594
595 def get_sources(
596     *,
597     ctx: click.Context,
598     src: Tuple[str, ...],
599     quiet: bool,
600     verbose: bool,
601     include: Pattern[str],
602     exclude: Optional[Pattern[str]],
603     extend_exclude: Optional[Pattern[str]],
604     force_exclude: Optional[Pattern[str]],
605     report: "Report",
606     stdin_filename: Optional[str],
607 ) -> Set[Path]:
608     """Compute the set of files to be formatted."""
609     sources: Set[Path] = set()
610
611     if exclude is None:
612         exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
613         gitignore = get_gitignore(ctx.obj["root"])
614     else:
615         gitignore = None
616
617     for s in src:
618         if s == "-" and stdin_filename:
619             p = Path(stdin_filename)
620             is_stdin = True
621         else:
622             p = Path(s)
623             is_stdin = False
624
625         if is_stdin or p.is_file():
626             normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
627             if normalized_path is None:
628                 continue
629
630             normalized_path = "/" + normalized_path
631             # Hard-exclude any files that matches the `--force-exclude` regex.
632             if force_exclude:
633                 force_exclude_match = force_exclude.search(normalized_path)
634             else:
635                 force_exclude_match = None
636             if force_exclude_match and force_exclude_match.group(0):
637                 report.path_ignored(p, "matches the --force-exclude regular expression")
638                 continue
639
640             if is_stdin:
641                 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
642
643             if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
644                 verbose=verbose, quiet=quiet
645             ):
646                 continue
647
648             sources.add(p)
649         elif p.is_dir():
650             sources.update(
651                 gen_python_files(
652                     p.iterdir(),
653                     ctx.obj["root"],
654                     include,
655                     exclude,
656                     extend_exclude,
657                     force_exclude,
658                     report,
659                     gitignore,
660                     verbose=verbose,
661                     quiet=quiet,
662                 )
663             )
664         elif s == "-":
665             sources.add(p)
666         else:
667             err(f"invalid path: {s}")
668     return sources
669
670
671 def path_empty(
672     src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context
673 ) -> None:
674     """
675     Exit if there is no `src` provided for formatting
676     """
677     if not src:
678         if verbose or not quiet:
679             out(msg)
680         ctx.exit(0)
681
682
683 def reformat_code(
684     content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
685 ) -> None:
686     """
687     Reformat and print out `content` without spawning child processes.
688     Similar to `reformat_one`, but for string content.
689
690     `fast`, `write_back`, and `mode` options are passed to
691     :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
692     """
693     path = Path("<string>")
694     try:
695         changed = Changed.NO
696         if format_stdin_to_stdout(
697             content=content, fast=fast, write_back=write_back, mode=mode
698         ):
699             changed = Changed.YES
700         report.done(path, changed)
701     except Exception as exc:
702         if report.verbose:
703             traceback.print_exc()
704         report.failed(path, str(exc))
705
706
707 # diff-shades depends on being to monkeypatch this function to operate. I know it's
708 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
709 @mypyc_attr(patchable=True)
710 def reformat_one(
711     src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
712 ) -> None:
713     """Reformat a single file under `src` without spawning child processes.
714
715     `fast`, `write_back`, and `mode` options are passed to
716     :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
717     """
718     try:
719         changed = Changed.NO
720
721         if str(src) == "-":
722             is_stdin = True
723         elif str(src).startswith(STDIN_PLACEHOLDER):
724             is_stdin = True
725             # Use the original name again in case we want to print something
726             # to the user
727             src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
728         else:
729             is_stdin = False
730
731         if is_stdin:
732             if src.suffix == ".pyi":
733                 mode = replace(mode, is_pyi=True)
734             elif src.suffix == ".ipynb":
735                 mode = replace(mode, is_ipynb=True)
736             if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
737                 changed = Changed.YES
738         else:
739             cache: Cache = {}
740             if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
741                 cache = read_cache(mode)
742                 res_src = src.resolve()
743                 res_src_s = str(res_src)
744                 if res_src_s in cache and cache[res_src_s] == get_cache_info(res_src):
745                     changed = Changed.CACHED
746             if changed is not Changed.CACHED and format_file_in_place(
747                 src, fast=fast, write_back=write_back, mode=mode
748             ):
749                 changed = Changed.YES
750             if (write_back is WriteBack.YES and changed is not Changed.CACHED) or (
751                 write_back is WriteBack.CHECK and changed is Changed.NO
752             ):
753                 write_cache(cache, [src], mode)
754         report.done(src, changed)
755     except Exception as exc:
756         if report.verbose:
757             traceback.print_exc()
758         report.failed(src, str(exc))
759
760
761 # diff-shades depends on being to monkeypatch this function to operate. I know it's
762 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
763 @mypyc_attr(patchable=True)
764 def reformat_many(
765     sources: Set[Path],
766     fast: bool,
767     write_back: WriteBack,
768     mode: Mode,
769     report: "Report",
770     workers: Optional[int],
771 ) -> None:
772     """Reformat multiple files using a ProcessPoolExecutor."""
773     from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
774
775     executor: Executor
776     worker_count = workers if workers is not None else DEFAULT_WORKERS
777     if sys.platform == "win32":
778         # Work around https://bugs.python.org/issue26903
779         assert worker_count is not None
780         worker_count = min(worker_count, 60)
781     try:
782         executor = ProcessPoolExecutor(max_workers=worker_count)
783     except (ImportError, NotImplementedError, OSError):
784         # we arrive here if the underlying system does not support multi-processing
785         # like in AWS Lambda or Termux, in which case we gracefully fallback to
786         # a ThreadPoolExecutor with just a single worker (more workers would not do us
787         # any good due to the Global Interpreter Lock)
788         executor = ThreadPoolExecutor(max_workers=1)
789
790     loop = asyncio.new_event_loop()
791     asyncio.set_event_loop(loop)
792     try:
793         loop.run_until_complete(
794             schedule_formatting(
795                 sources=sources,
796                 fast=fast,
797                 write_back=write_back,
798                 mode=mode,
799                 report=report,
800                 loop=loop,
801                 executor=executor,
802             )
803         )
804     finally:
805         try:
806             shutdown(loop)
807         finally:
808             asyncio.set_event_loop(None)
809         if executor is not None:
810             executor.shutdown()
811
812
813 async def schedule_formatting(
814     sources: Set[Path],
815     fast: bool,
816     write_back: WriteBack,
817     mode: Mode,
818     report: "Report",
819     loop: asyncio.AbstractEventLoop,
820     executor: "Executor",
821 ) -> None:
822     """Run formatting of `sources` in parallel using the provided `executor`.
823
824     (Use ProcessPoolExecutors for actual parallelism.)
825
826     `write_back`, `fast`, and `mode` options are passed to
827     :func:`format_file_in_place`.
828     """
829     cache: Cache = {}
830     if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
831         cache = read_cache(mode)
832         sources, cached = filter_cached(cache, sources)
833         for src in sorted(cached):
834             report.done(src, Changed.CACHED)
835     if not sources:
836         return
837
838     cancelled = []
839     sources_to_cache = []
840     lock = None
841     if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
842         # For diff output, we need locks to ensure we don't interleave output
843         # from different processes.
844         manager = Manager()
845         lock = manager.Lock()
846     tasks = {
847         asyncio.ensure_future(
848             loop.run_in_executor(
849                 executor, format_file_in_place, src, fast, mode, write_back, lock
850             )
851         ): src
852         for src in sorted(sources)
853     }
854     pending = tasks.keys()
855     try:
856         loop.add_signal_handler(signal.SIGINT, cancel, pending)
857         loop.add_signal_handler(signal.SIGTERM, cancel, pending)
858     except NotImplementedError:
859         # There are no good alternatives for these on Windows.
860         pass
861     while pending:
862         done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
863         for task in done:
864             src = tasks.pop(task)
865             if task.cancelled():
866                 cancelled.append(task)
867             elif task.exception():
868                 report.failed(src, str(task.exception()))
869             else:
870                 changed = Changed.YES if task.result() else Changed.NO
871                 # If the file was written back or was successfully checked as
872                 # well-formatted, store this information in the cache.
873                 if write_back is WriteBack.YES or (
874                     write_back is WriteBack.CHECK and changed is Changed.NO
875                 ):
876                     sources_to_cache.append(src)
877                 report.done(src, changed)
878     if cancelled:
879         if sys.version_info >= (3, 7):
880             await asyncio.gather(*cancelled, return_exceptions=True)
881         else:
882             await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
883     if sources_to_cache:
884         write_cache(cache, sources_to_cache, mode)
885
886
887 def format_file_in_place(
888     src: Path,
889     fast: bool,
890     mode: Mode,
891     write_back: WriteBack = WriteBack.NO,
892     lock: Any = None,  # multiprocessing.Manager().Lock() is some crazy proxy
893 ) -> bool:
894     """Format file under `src` path. Return True if changed.
895
896     If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
897     code to the file.
898     `mode` and `fast` options are passed to :func:`format_file_contents`.
899     """
900     if src.suffix == ".pyi":
901         mode = replace(mode, is_pyi=True)
902     elif src.suffix == ".ipynb":
903         mode = replace(mode, is_ipynb=True)
904
905     then = datetime.utcfromtimestamp(src.stat().st_mtime)
906     with open(src, "rb") as buf:
907         src_contents, encoding, newline = decode_bytes(buf.read())
908     try:
909         dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
910     except NothingChanged:
911         return False
912     except JSONDecodeError:
913         raise ValueError(
914             f"File '{src}' cannot be parsed as valid Jupyter notebook."
915         ) from None
916
917     if write_back == WriteBack.YES:
918         with open(src, "w", encoding=encoding, newline=newline) as f:
919             f.write(dst_contents)
920     elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
921         now = datetime.utcnow()
922         src_name = f"{src}\t{then} +0000"
923         dst_name = f"{src}\t{now} +0000"
924         if mode.is_ipynb:
925             diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
926         else:
927             diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
928
929         if write_back == WriteBack.COLOR_DIFF:
930             diff_contents = color_diff(diff_contents)
931
932         with lock or nullcontext():
933             f = io.TextIOWrapper(
934                 sys.stdout.buffer,
935                 encoding=encoding,
936                 newline=newline,
937                 write_through=True,
938             )
939             f = wrap_stream_for_windows(f)
940             f.write(diff_contents)
941             f.detach()
942
943     return True
944
945
946 def format_stdin_to_stdout(
947     fast: bool,
948     *,
949     content: Optional[str] = None,
950     write_back: WriteBack = WriteBack.NO,
951     mode: Mode,
952 ) -> bool:
953     """Format file on stdin. Return True if changed.
954
955     If content is None, it's read from sys.stdin.
956
957     If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
958     write a diff to stdout. The `mode` argument is passed to
959     :func:`format_file_contents`.
960     """
961     then = datetime.utcnow()
962
963     if content is None:
964         src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
965     else:
966         src, encoding, newline = content, "utf-8", ""
967
968     dst = src
969     try:
970         dst = format_file_contents(src, fast=fast, mode=mode)
971         return True
972
973     except NothingChanged:
974         return False
975
976     finally:
977         f = io.TextIOWrapper(
978             sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
979         )
980         if write_back == WriteBack.YES:
981             # Make sure there's a newline after the content
982             if dst and dst[-1] != "\n":
983                 dst += "\n"
984             f.write(dst)
985         elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
986             now = datetime.utcnow()
987             src_name = f"STDIN\t{then} +0000"
988             dst_name = f"STDOUT\t{now} +0000"
989             d = diff(src, dst, src_name, dst_name)
990             if write_back == WriteBack.COLOR_DIFF:
991                 d = color_diff(d)
992                 f = wrap_stream_for_windows(f)
993             f.write(d)
994         f.detach()
995
996
997 def check_stability_and_equivalence(
998     src_contents: str, dst_contents: str, *, mode: Mode
999 ) -> None:
1000     """Perform stability and equivalence checks.
1001
1002     Raise AssertionError if source and destination contents are not
1003     equivalent, or if a second pass of the formatter would format the
1004     content differently.
1005     """
1006     assert_equivalent(src_contents, dst_contents)
1007     assert_stable(src_contents, dst_contents, mode=mode)
1008
1009
1010 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1011     """Reformat contents of a file and return new contents.
1012
1013     If `fast` is False, additionally confirm that the reformatted code is
1014     valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
1015     `mode` is passed to :func:`format_str`.
1016     """
1017     if not src_contents.strip():
1018         raise NothingChanged
1019
1020     if mode.is_ipynb:
1021         dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
1022     else:
1023         dst_contents = format_str(src_contents, mode=mode)
1024     if src_contents == dst_contents:
1025         raise NothingChanged
1026
1027     if not fast and not mode.is_ipynb:
1028         # Jupyter notebooks will already have been checked above.
1029         check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
1030     return dst_contents
1031
1032
1033 def validate_cell(src: str, mode: Mode) -> None:
1034     """Check that cell does not already contain TransformerManager transformations,
1035     or non-Python cell magics, which might cause tokenizer_rt to break because of
1036     indentations.
1037
1038     If a cell contains ``!ls``, then it'll be transformed to
1039     ``get_ipython().system('ls')``. However, if the cell originally contained
1040     ``get_ipython().system('ls')``, then it would get transformed in the same way:
1041
1042         >>> TransformerManager().transform_cell("get_ipython().system('ls')")
1043         "get_ipython().system('ls')\n"
1044         >>> TransformerManager().transform_cell("!ls")
1045         "get_ipython().system('ls')\n"
1046
1047     Due to the impossibility of safely roundtripping in such situations, cells
1048     containing transformed magics will be ignored.
1049     """
1050     if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
1051         raise NothingChanged
1052     if (
1053         src[:2] == "%%"
1054         and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
1055     ):
1056         raise NothingChanged
1057
1058
1059 def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
1060     """Format code in given cell of Jupyter notebook.
1061
1062     General idea is:
1063
1064       - if cell has trailing semicolon, remove it;
1065       - if cell has IPython magics, mask them;
1066       - format cell;
1067       - reinstate IPython magics;
1068       - reinstate trailing semicolon (if originally present);
1069       - strip trailing newlines.
1070
1071     Cells with syntax errors will not be processed, as they
1072     could potentially be automagics or multi-line magics, which
1073     are currently not supported.
1074     """
1075     validate_cell(src, mode)
1076     src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
1077         src
1078     )
1079     try:
1080         masked_src, replacements = mask_cell(src_without_trailing_semicolon)
1081     except SyntaxError:
1082         raise NothingChanged from None
1083     masked_dst = format_str(masked_src, mode=mode)
1084     if not fast:
1085         check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
1086     dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1087     dst = put_trailing_semicolon_back(
1088         dst_without_trailing_semicolon, has_trailing_semicolon
1089     )
1090     dst = dst.rstrip("\n")
1091     if dst == src:
1092         raise NothingChanged from None
1093     return dst
1094
1095
1096 def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1097     """If notebook is marked as non-Python, don't format it.
1098
1099     All notebook metadata fields are optional, see
1100     https://nbformat.readthedocs.io/en/latest/format_description.html. So
1101     if a notebook has empty metadata, we will try to parse it anyway.
1102     """
1103     language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1104     if language is not None and language != "python":
1105         raise NothingChanged from None
1106
1107
1108 def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1109     """Format Jupyter notebook.
1110
1111     Operate cell-by-cell, only on code cells, only for Python notebooks.
1112     If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1113     """
1114     trailing_newline = src_contents[-1] == "\n"
1115     modified = False
1116     nb = json.loads(src_contents)
1117     validate_metadata(nb)
1118     for cell in nb["cells"]:
1119         if cell.get("cell_type", None) == "code":
1120             try:
1121                 src = "".join(cell["source"])
1122                 dst = format_cell(src, fast=fast, mode=mode)
1123             except NothingChanged:
1124                 pass
1125             else:
1126                 cell["source"] = dst.splitlines(keepends=True)
1127                 modified = True
1128     if modified:
1129         dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1130         if trailing_newline:
1131             dst_contents = dst_contents + "\n"
1132         return dst_contents
1133     else:
1134         raise NothingChanged
1135
1136
1137 def format_str(src_contents: str, *, mode: Mode) -> str:
1138     """Reformat a string and return new contents.
1139
1140     `mode` determines formatting options, such as how many characters per line are
1141     allowed.  Example:
1142
1143     >>> import black
1144     >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1145     def f(arg: str = "") -> None:
1146         ...
1147
1148     A more complex example:
1149
1150     >>> print(
1151     ...   black.format_str(
1152     ...     "def f(arg:str='')->None: hey",
1153     ...     mode=black.Mode(
1154     ...       target_versions={black.TargetVersion.PY36},
1155     ...       line_length=10,
1156     ...       string_normalization=False,
1157     ...       is_pyi=False,
1158     ...     ),
1159     ...   ),
1160     ... )
1161     def f(
1162         arg: str = '',
1163     ) -> None:
1164         hey
1165
1166     """
1167     dst_contents = _format_str_once(src_contents, mode=mode)
1168     # Forced second pass to work around optional trailing commas (becoming
1169     # forced trailing commas on pass 2) interacting differently with optional
1170     # parentheses.  Admittedly ugly.
1171     if src_contents != dst_contents:
1172         return _format_str_once(dst_contents, mode=mode)
1173     return dst_contents
1174
1175
1176 def _format_str_once(src_contents: str, *, mode: Mode) -> str:
1177     src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1178     dst_contents = []
1179     if mode.target_versions:
1180         versions = mode.target_versions
1181     else:
1182         future_imports = get_future_imports(src_node)
1183         versions = detect_target_versions(src_node, future_imports=future_imports)
1184
1185     normalize_fmt_off(src_node, preview=mode.preview)
1186     lines = LineGenerator(mode=mode)
1187     elt = EmptyLineTracker(is_pyi=mode.is_pyi)
1188     empty_line = Line(mode=mode)
1189     after = 0
1190     split_line_features = {
1191         feature
1192         for feature in {Feature.TRAILING_COMMA_IN_CALL, Feature.TRAILING_COMMA_IN_DEF}
1193         if supports_feature(versions, feature)
1194     }
1195     for current_line in lines.visit(src_node):
1196         dst_contents.append(str(empty_line) * after)
1197         before, after = elt.maybe_empty_lines(current_line)
1198         dst_contents.append(str(empty_line) * before)
1199         for line in transform_line(
1200             current_line, mode=mode, features=split_line_features
1201         ):
1202             dst_contents.append(str(line))
1203     return "".join(dst_contents)
1204
1205
1206 def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
1207     """Return a tuple of (decoded_contents, encoding, newline).
1208
1209     `newline` is either CRLF or LF but `decoded_contents` is decoded with
1210     universal newlines (i.e. only contains LF).
1211     """
1212     srcbuf = io.BytesIO(src)
1213     encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1214     if not lines:
1215         return "", encoding, "\n"
1216
1217     newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n"
1218     srcbuf.seek(0)
1219     with io.TextIOWrapper(srcbuf, encoding) as tiow:
1220         return tiow.read(), encoding, newline
1221
1222
1223 def get_features_used(  # noqa: C901
1224     node: Node, *, future_imports: Optional[Set[str]] = None
1225 ) -> Set[Feature]:
1226     """Return a set of (relatively) new Python features used in this file.
1227
1228     Currently looking for:
1229     - f-strings;
1230     - underscores in numeric literals;
1231     - trailing commas after * or ** in function signatures and calls;
1232     - positional only arguments in function signatures and lambdas;
1233     - assignment expression;
1234     - relaxed decorator syntax;
1235     - usage of __future__ flags (annotations);
1236     - print / exec statements;
1237     """
1238     features: Set[Feature] = set()
1239     if future_imports:
1240         features |= {
1241             FUTURE_FLAG_TO_FEATURE[future_import]
1242             for future_import in future_imports
1243             if future_import in FUTURE_FLAG_TO_FEATURE
1244         }
1245
1246     for n in node.pre_order():
1247         if is_string_token(n):
1248             value_head = n.value[:2]
1249             if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
1250                 features.add(Feature.F_STRINGS)
1251
1252         elif is_number_token(n):
1253             if "_" in n.value:
1254                 features.add(Feature.NUMERIC_UNDERSCORES)
1255
1256         elif n.type == token.SLASH:
1257             if n.parent and n.parent.type in {
1258                 syms.typedargslist,
1259                 syms.arglist,
1260                 syms.varargslist,
1261             }:
1262                 features.add(Feature.POS_ONLY_ARGUMENTS)
1263
1264         elif n.type == token.COLONEQUAL:
1265             features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1266
1267         elif n.type == syms.decorator:
1268             if len(n.children) > 1 and not is_simple_decorator_expression(
1269                 n.children[1]
1270             ):
1271                 features.add(Feature.RELAXED_DECORATORS)
1272
1273         elif (
1274             n.type in {syms.typedargslist, syms.arglist}
1275             and n.children
1276             and n.children[-1].type == token.COMMA
1277         ):
1278             if n.type == syms.typedargslist:
1279                 feature = Feature.TRAILING_COMMA_IN_DEF
1280             else:
1281                 feature = Feature.TRAILING_COMMA_IN_CALL
1282
1283             for ch in n.children:
1284                 if ch.type in STARS:
1285                     features.add(feature)
1286
1287                 if ch.type == syms.argument:
1288                     for argch in ch.children:
1289                         if argch.type in STARS:
1290                             features.add(feature)
1291
1292         elif (
1293             n.type in {syms.return_stmt, syms.yield_expr}
1294             and len(n.children) >= 2
1295             and n.children[1].type == syms.testlist_star_expr
1296             and any(child.type == syms.star_expr for child in n.children[1].children)
1297         ):
1298             features.add(Feature.UNPACKING_ON_FLOW)
1299
1300         elif (
1301             n.type == syms.annassign
1302             and len(n.children) >= 4
1303             and n.children[3].type == syms.testlist_star_expr
1304         ):
1305             features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1306
1307         elif (
1308             n.type == syms.except_clause
1309             and len(n.children) >= 2
1310             and n.children[1].type == token.STAR
1311         ):
1312             features.add(Feature.EXCEPT_STAR)
1313
1314         elif n.type in {syms.subscriptlist, syms.trailer} and any(
1315             child.type == syms.star_expr for child in n.children
1316         ):
1317             features.add(Feature.VARIADIC_GENERICS)
1318
1319         elif (
1320             n.type == syms.tname_star
1321             and len(n.children) == 3
1322             and n.children[2].type == syms.star_expr
1323         ):
1324             features.add(Feature.VARIADIC_GENERICS)
1325
1326     return features
1327
1328
1329 def detect_target_versions(
1330     node: Node, *, future_imports: Optional[Set[str]] = None
1331 ) -> Set[TargetVersion]:
1332     """Detect the version to target based on the nodes used."""
1333     features = get_features_used(node, future_imports=future_imports)
1334     return {
1335         version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1336     }
1337
1338
1339 def get_future_imports(node: Node) -> Set[str]:
1340     """Return a set of __future__ imports in the file."""
1341     imports: Set[str] = set()
1342
1343     def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
1344         for child in children:
1345             if isinstance(child, Leaf):
1346                 if child.type == token.NAME:
1347                     yield child.value
1348
1349             elif child.type == syms.import_as_name:
1350                 orig_name = child.children[0]
1351                 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1352                 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1353                 yield orig_name.value
1354
1355             elif child.type == syms.import_as_names:
1356                 yield from get_imports_from_children(child.children)
1357
1358             else:
1359                 raise AssertionError("Invalid syntax parsing imports")
1360
1361     for child in node.children:
1362         if child.type != syms.simple_stmt:
1363             break
1364
1365         first_child = child.children[0]
1366         if isinstance(first_child, Leaf):
1367             # Continue looking if we see a docstring; otherwise stop.
1368             if (
1369                 len(child.children) == 2
1370                 and first_child.type == token.STRING
1371                 and child.children[1].type == token.NEWLINE
1372             ):
1373                 continue
1374
1375             break
1376
1377         elif first_child.type == syms.import_from:
1378             module_name = first_child.children[1]
1379             if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1380                 break
1381
1382             imports |= set(get_imports_from_children(first_child.children[3:]))
1383         else:
1384             break
1385
1386     return imports
1387
1388
1389 def assert_equivalent(src: str, dst: str) -> None:
1390     """Raise AssertionError if `src` and `dst` aren't equivalent."""
1391     try:
1392         src_ast = parse_ast(src)
1393     except Exception as exc:
1394         raise AssertionError(
1395             "cannot use --safe with this file; failed to parse source file AST: "
1396             f"{exc}\n"
1397             "This could be caused by running Black with an older Python version "
1398             "that does not support new syntax used in your source file."
1399         ) from exc
1400
1401     try:
1402         dst_ast = parse_ast(dst)
1403     except Exception as exc:
1404         log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1405         raise AssertionError(
1406             f"INTERNAL ERROR: Black produced invalid code: {exc}. "
1407             "Please report a bug on https://github.com/psf/black/issues.  "
1408             f"This invalid output might be helpful: {log}"
1409         ) from None
1410
1411     src_ast_str = "\n".join(stringify_ast(src_ast))
1412     dst_ast_str = "\n".join(stringify_ast(dst_ast))
1413     if src_ast_str != dst_ast_str:
1414         log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1415         raise AssertionError(
1416             "INTERNAL ERROR: Black produced code that is not equivalent to the"
1417             " source.  Please report a bug on "
1418             f"https://github.com/psf/black/issues.  This diff might be helpful: {log}"
1419         ) from None
1420
1421
1422 def assert_stable(src: str, dst: str, mode: Mode) -> None:
1423     """Raise AssertionError if `dst` reformats differently the second time."""
1424     # We shouldn't call format_str() here, because that formats the string
1425     # twice and may hide a bug where we bounce back and forth between two
1426     # versions.
1427     newdst = _format_str_once(dst, mode=mode)
1428     if dst != newdst:
1429         log = dump_to_file(
1430             str(mode),
1431             diff(src, dst, "source", "first pass"),
1432             diff(dst, newdst, "first pass", "second pass"),
1433         )
1434         raise AssertionError(
1435             "INTERNAL ERROR: Black produced different code on the second pass of the"
1436             " formatter.  Please report a bug on https://github.com/psf/black/issues."
1437             f"  This diff might be helpful: {log}"
1438         ) from None
1439
1440
1441 @contextmanager
1442 def nullcontext() -> Iterator[None]:
1443     """Return an empty context manager.
1444
1445     To be used like `nullcontext` in Python 3.7.
1446     """
1447     yield
1448
1449
1450 def patch_click() -> None:
1451     """Make Click not crash on Python 3.6 with LANG=C.
1452
1453     On certain misconfigured environments, Python 3 selects the ASCII encoding as the
1454     default which restricts paths that it can access during the lifetime of the
1455     application.  Click refuses to work in this scenario by raising a RuntimeError.
1456
1457     In case of Black the likelihood that non-ASCII characters are going to be used in
1458     file paths is minimal since it's Python source code.  Moreover, this crash was
1459     spurious on Python 3.7 thanks to PEP 538 and PEP 540.
1460     """
1461     modules: List[Any] = []
1462     try:
1463         from click import core
1464     except ImportError:
1465         pass
1466     else:
1467         modules.append(core)
1468     try:
1469         # Removed in Click 8.1.0 and newer; we keep this around for users who have
1470         # older versions installed.
1471         from click import _unicodefun  # type: ignore
1472     except ImportError:
1473         pass
1474     else:
1475         modules.append(_unicodefun)
1476
1477     for module in modules:
1478         if hasattr(module, "_verify_python3_env"):
1479             module._verify_python3_env = lambda: None  # type: ignore
1480         if hasattr(module, "_verify_python_env"):
1481             module._verify_python_env = lambda: None  # type: ignore
1482
1483
1484 def patched_main() -> None:
1485     maybe_install_uvloop()
1486     freeze_support()
1487     patch_click()
1488     main()
1489
1490
1491 if __name__ == "__main__":
1492     patched_main()