]> git.madduck.net Git - etc/vim.git/blob - src/black/concurrency.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Report all stacktraces in verbose mode (#3938)
[etc/vim.git] / src / black / concurrency.py
1 """
2 Formatting many files at once via multiprocessing. Contains entrypoint and utilities.
3
4 NOTE: this module is only imported if we need to format several files at once.
5 """
6
7 import asyncio
8 import logging
9 import os
10 import signal
11 import sys
12 import traceback
13 from concurrent.futures import Executor, ProcessPoolExecutor, ThreadPoolExecutor
14 from multiprocessing import Manager
15 from pathlib import Path
16 from typing import Any, Iterable, Optional, Set
17
18 from mypy_extensions import mypyc_attr
19
20 from black import WriteBack, format_file_in_place
21 from black.cache import Cache
22 from black.mode import Mode
23 from black.output import err
24 from black.report import Changed, Report
25
26
27 def maybe_install_uvloop() -> None:
28     """If our environment has uvloop installed we use it.
29
30     This is called only from command-line entry points to avoid
31     interfering with the parent process if Black is used as a library.
32     """
33     try:
34         import uvloop
35
36         uvloop.install()
37     except ImportError:
38         pass
39
40
41 def cancel(tasks: Iterable["asyncio.Task[Any]"]) -> None:
42     """asyncio signal handler that cancels all `tasks` and reports to stderr."""
43     err("Aborted!")
44     for task in tasks:
45         task.cancel()
46
47
48 def shutdown(loop: asyncio.AbstractEventLoop) -> None:
49     """Cancel all pending tasks on `loop`, wait for them, and close the loop."""
50     try:
51         # This part is borrowed from asyncio/runners.py in Python 3.7b2.
52         to_cancel = [task for task in asyncio.all_tasks(loop) if not task.done()]
53         if not to_cancel:
54             return
55
56         for task in to_cancel:
57             task.cancel()
58         loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
59     finally:
60         # `concurrent.futures.Future` objects cannot be cancelled once they
61         # are already running. There might be some when the `shutdown()` happened.
62         # Silence their logger's spew about the event loop being closed.
63         cf_logger = logging.getLogger("concurrent.futures")
64         cf_logger.setLevel(logging.CRITICAL)
65         loop.close()
66
67
68 # diff-shades depends on being to monkeypatch this function to operate. I know it's
69 # not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
70 @mypyc_attr(patchable=True)
71 def reformat_many(
72     sources: Set[Path],
73     fast: bool,
74     write_back: WriteBack,
75     mode: Mode,
76     report: Report,
77     workers: Optional[int],
78 ) -> None:
79     """Reformat multiple files using a ProcessPoolExecutor."""
80     maybe_install_uvloop()
81
82     executor: Executor
83     if workers is None:
84         workers = int(os.environ.get("BLACK_NUM_WORKERS", 0))
85         workers = workers or os.cpu_count() or 1
86     if sys.platform == "win32":
87         # Work around https://bugs.python.org/issue26903
88         workers = min(workers, 60)
89     try:
90         executor = ProcessPoolExecutor(max_workers=workers)
91     except (ImportError, NotImplementedError, OSError):
92         # we arrive here if the underlying system does not support multi-processing
93         # like in AWS Lambda or Termux, in which case we gracefully fallback to
94         # a ThreadPoolExecutor with just a single worker (more workers would not do us
95         # any good due to the Global Interpreter Lock)
96         executor = ThreadPoolExecutor(max_workers=1)
97
98     loop = asyncio.new_event_loop()
99     asyncio.set_event_loop(loop)
100     try:
101         loop.run_until_complete(
102             schedule_formatting(
103                 sources=sources,
104                 fast=fast,
105                 write_back=write_back,
106                 mode=mode,
107                 report=report,
108                 loop=loop,
109                 executor=executor,
110             )
111         )
112     finally:
113         try:
114             shutdown(loop)
115         finally:
116             asyncio.set_event_loop(None)
117         if executor is not None:
118             executor.shutdown()
119
120
121 async def schedule_formatting(
122     sources: Set[Path],
123     fast: bool,
124     write_back: WriteBack,
125     mode: Mode,
126     report: "Report",
127     loop: asyncio.AbstractEventLoop,
128     executor: "Executor",
129 ) -> None:
130     """Run formatting of `sources` in parallel using the provided `executor`.
131
132     (Use ProcessPoolExecutors for actual parallelism.)
133
134     `write_back`, `fast`, and `mode` options are passed to
135     :func:`format_file_in_place`.
136     """
137     cache = Cache.read(mode)
138     if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
139         sources, cached = cache.filtered_cached(sources)
140         for src in sorted(cached):
141             report.done(src, Changed.CACHED)
142     if not sources:
143         return
144
145     cancelled = []
146     sources_to_cache = []
147     lock = None
148     if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
149         # For diff output, we need locks to ensure we don't interleave output
150         # from different processes.
151         manager = Manager()
152         lock = manager.Lock()
153     tasks = {
154         asyncio.ensure_future(
155             loop.run_in_executor(
156                 executor, format_file_in_place, src, fast, mode, write_back, lock
157             )
158         ): src
159         for src in sorted(sources)
160     }
161     pending = tasks.keys()
162     try:
163         loop.add_signal_handler(signal.SIGINT, cancel, pending)
164         loop.add_signal_handler(signal.SIGTERM, cancel, pending)
165     except NotImplementedError:
166         # There are no good alternatives for these on Windows.
167         pass
168     while pending:
169         done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
170         for task in done:
171             src = tasks.pop(task)
172             if task.cancelled():
173                 cancelled.append(task)
174             elif exc := task.exception():
175                 if report.verbose:
176                     traceback.print_exception(type(exc), exc, exc.__traceback__)
177                 report.failed(src, str(exc))
178             else:
179                 changed = Changed.YES if task.result() else Changed.NO
180                 # If the file was written back or was successfully checked as
181                 # well-formatted, store this information in the cache.
182                 if write_back is WriteBack.YES or (
183                     write_back is WriteBack.CHECK and changed is Changed.NO
184                 ):
185                     sources_to_cache.append(src)
186                 report.done(src, changed)
187     if cancelled:
188         await asyncio.gather(*cancelled, return_exceptions=True)
189     if sources_to_cache:
190         cache.write(sources_to_cache)