import asyncio
from json.decoder import JSONDecodeError
import json
-from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
from contextlib import contextmanager
from datetime import datetime
from enum import Enum
import tokenize
import traceback
from typing import (
+ TYPE_CHECKING,
Any,
Dict,
Generator,
from black.const import DEFAULT_LINE_LENGTH, DEFAULT_INCLUDES, DEFAULT_EXCLUDES
from black.const import STDIN_PLACEHOLDER
from black.nodes import STARS, syms, is_simple_decorator_expression
-from black.nodes import is_string_token
+from black.nodes import is_string_token, is_number_token
from black.lines import Line, EmptyLineTracker
from black.linegen import transform_line, LineGenerator, LN
from black.comments import normalize_fmt_off
from _black_version import version as __version__
+if TYPE_CHECKING:
+ from concurrent.futures import Executor
+
COMPILED = Path(__file__).suffix in (".pyd", ".so")
# types
workers: Optional[int],
) -> None:
"""Reformat multiple files using a ProcessPoolExecutor."""
+ from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
+
executor: Executor
- loop = asyncio.get_event_loop()
worker_count = workers if workers is not None else DEFAULT_WORKERS
if sys.platform == "win32":
# Work around https://bugs.python.org/issue26903
# any good due to the Global Interpreter Lock)
executor = ThreadPoolExecutor(max_workers=1)
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
try:
loop.run_until_complete(
schedule_formatting(
)
)
finally:
- shutdown(loop)
+ try:
+ shutdown(loop)
+ finally:
+ asyncio.set_event_loop(None)
if executor is not None:
executor.shutdown()
mode: Mode,
report: "Report",
loop: asyncio.AbstractEventLoop,
- executor: Executor,
+ executor: "Executor",
) -> None:
"""Run formatting of `sources` in parallel using the provided `executor`.
def _format_str_once(src_contents: str, *, mode: Mode) -> str:
src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
dst_contents = []
- future_imports = get_future_imports(src_node)
if mode.target_versions:
versions = mode.target_versions
else:
+ future_imports = get_future_imports(src_node)
versions = detect_target_versions(src_node, future_imports=future_imports)
normalize_fmt_off(src_node, preview=mode.preview)
if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
features.add(Feature.F_STRINGS)
- elif n.type == token.NUMBER:
- assert isinstance(n, Leaf)
+ elif is_number_token(n):
if "_" in n.value:
features.add(Feature.NUMERIC_UNDERSCORES)
):
features.add(Feature.EXCEPT_STAR)
+ elif n.type in {syms.subscriptlist, syms.trailer} and any(
+ child.type == syms.star_expr for child in n.children
+ ):
+ features.add(Feature.VARIADIC_GENERICS)
+
+ elif (
+ n.type == syms.tname_star
+ and len(n.children) == 3
+ and n.children[2].type == syms.star_expr
+ ):
+ features.add(Feature.VARIADIC_GENERICS)
+
return features