-from functools import lru_cache
import io
import os
-from pathlib import Path
import sys
+from functools import lru_cache
+from pathlib import Path
from typing import (
+ TYPE_CHECKING,
Any,
Dict,
Iterable,
Sequence,
Tuple,
Union,
- TYPE_CHECKING,
)
+from mypy_extensions import mypyc_attr
+from packaging.specifiers import InvalidSpecifier, Specifier, SpecifierSet
+from packaging.version import InvalidVersion, Version
from pathspec import PathSpec
-import toml
+from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
+if sys.version_info >= (3, 11):
+ try:
+ import tomllib
+ except ImportError:
+ # Help users on older alphas
+ if not TYPE_CHECKING:
+ import tomli as tomllib
+else:
+ import tomli as tomllib
+
+from black.handle_ipynb_magics import jupyter_dependencies_are_installed
+from black.mode import TargetVersion
from black.output import err
from black.report import Report
@lru_cache()
-def find_project_root(srcs: Sequence[str]) -> Path:
+def find_project_root(
+ srcs: Sequence[str], stdin_filename: Optional[str] = None
+) -> Tuple[Path, str]:
"""Return a directory containing .git, .hg, or pyproject.toml.
That directory will be a common parent of all files and directories
If no directory in the tree contains a marker that would specify it's the
project root, the root of the file system is returned.
+
+ Returns a two-tuple with the first element as the project root path and
+ the second element as a string describing the method by which the
+ project root was discovered.
"""
+ if stdin_filename is not None:
+ srcs = tuple(stdin_filename if s == "-" else s for s in srcs)
if not srcs:
- return Path("/").resolve()
+ srcs = [str(Path.cwd().resolve())]
path_srcs = [Path(Path.cwd(), src).resolve() for src in srcs]
for directory in (common_base, *common_base.parents):
if (directory / ".git").exists():
- return directory
+ return directory, ".git directory"
if (directory / ".hg").is_dir():
- return directory
+ return directory, ".hg directory"
if (directory / "pyproject.toml").is_file():
- return directory
+ return directory, "pyproject.toml"
- return directory
+ return directory, "file system root"
def find_pyproject_toml(path_search_start: Tuple[str, ...]) -> Optional[str]:
"""Find the absolute filepath to a pyproject.toml if it exists"""
- path_project_root = find_project_root(path_search_start)
+ path_project_root, _ = find_project_root(path_search_start)
path_pyproject_toml = path_project_root / "pyproject.toml"
if path_pyproject_toml.is_file():
return str(path_pyproject_toml)
if path_user_pyproject_toml.is_file()
else None
)
- except PermissionError as e:
+ except (PermissionError, RuntimeError) as e:
# We do not have access to the user-level config directory, so ignore it.
err(f"Ignoring user configuration directory due to {e!r}")
return None
+@mypyc_attr(patchable=True)
def parse_pyproject_toml(path_config: str) -> Dict[str, Any]:
- """Parse a pyproject toml file, pulling out relevant parts for Black
+ """Parse a pyproject toml file, pulling out relevant parts for Black.
+
+ If parsing fails, will raise a tomllib.TOMLDecodeError.
+ """
+ with open(path_config, "rb") as f:
+ pyproject_toml = tomllib.load(f)
+ config: Dict[str, Any] = pyproject_toml.get("tool", {}).get("black", {})
+ config = {k.replace("--", "").replace("-", "_"): v for k, v in config.items()}
+
+ if "target_version" not in config:
+ inferred_target_version = infer_target_version(pyproject_toml)
+ if inferred_target_version is not None:
+ config["target_version"] = [v.name.lower() for v in inferred_target_version]
+
+ return config
+
+
+def infer_target_version(
+ pyproject_toml: Dict[str, Any]
+) -> Optional[List[TargetVersion]]:
+ """Infer Black's target version from the project metadata in pyproject.toml.
+
+ Supports the PyPA standard format (PEP 621):
+ https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#requires-python
+
+ If the target version cannot be inferred, returns None.
+ """
+ project_metadata = pyproject_toml.get("project", {})
+ requires_python = project_metadata.get("requires-python", None)
+ if requires_python is not None:
+ try:
+ return parse_req_python_version(requires_python)
+ except InvalidVersion:
+ pass
+ try:
+ return parse_req_python_specifier(requires_python)
+ except (InvalidSpecifier, InvalidVersion):
+ pass
+
+ return None
+
+
+def parse_req_python_version(requires_python: str) -> Optional[List[TargetVersion]]:
+ """Parse a version string (i.e. ``"3.7"``) to a list of TargetVersion.
+
+ If parsing fails, will raise a packaging.version.InvalidVersion error.
+ If the parsed version cannot be mapped to a valid TargetVersion, returns None.
+ """
+ version = Version(requires_python)
+ if version.release[0] != 3:
+ return None
+ try:
+ return [TargetVersion(version.release[1])]
+ except (IndexError, ValueError):
+ return None
+
+
+def parse_req_python_specifier(requires_python: str) -> Optional[List[TargetVersion]]:
+ """Parse a specifier string (i.e. ``">=3.7,<3.10"``) to a list of TargetVersion.
+
+ If parsing fails, will raise a packaging.specifiers.InvalidSpecifier error.
+ If the parsed specifier cannot be mapped to a valid TargetVersion, returns None.
+ """
+ specifier_set = strip_specifier_set(SpecifierSet(requires_python))
+ if not specifier_set:
+ return None
+
+ target_version_map = {f"3.{v.value}": v for v in TargetVersion}
+ compatible_versions: List[str] = list(specifier_set.filter(target_version_map))
+ if compatible_versions:
+ return [target_version_map[v] for v in compatible_versions]
+ return None
+
+
+def strip_specifier_set(specifier_set: SpecifierSet) -> SpecifierSet:
+ """Strip minor versions for some specifiers in the specifier set.
- If parsing fails, will raise a toml.TomlDecodeError
+ For background on version specifiers, see PEP 440:
+ https://peps.python.org/pep-0440/#version-specifiers
"""
- pyproject_toml = toml.load(path_config)
- config = pyproject_toml.get("tool", {}).get("black", {})
- return {k.replace("--", "").replace("-", "_"): v for k, v in config.items()}
+ specifiers = []
+ for s in specifier_set:
+ if "*" in str(s):
+ specifiers.append(s)
+ elif s.operator in ["~=", "==", ">=", "==="]:
+ version = Version(s.version)
+ stripped = Specifier(f"{s.operator}{version.major}.{version.minor}")
+ specifiers.append(stripped)
+ elif s.operator == ">":
+ version = Version(s.version)
+ if len(version.release) > 2:
+ s = Specifier(f">={version.major}.{version.minor}")
+ specifiers.append(s)
+ else:
+ specifiers.append(s)
+
+ return SpecifierSet(",".join(str(s) for s in specifiers))
@lru_cache()
This looks for ~\.black on Windows and ~/.config/black on Linux and other
Unix systems.
+
+ May raise:
+ - RuntimeError: if the current user has no homedir
+ - PermissionError: if the current process cannot access the user's homedir
"""
if sys.platform == "win32":
# Windows
if gitignore.is_file():
with gitignore.open(encoding="utf-8") as gf:
lines = gf.readlines()
- return PathSpec.from_lines("gitwildmatch", lines)
+ try:
+ return PathSpec.from_lines("gitwildmatch", lines)
+ except GitWildMatchPatternError as e:
+ err(f"Could not parse {gitignore}: {e}")
+ raise
def normalize_path_maybe_ignore(
- path: Path, root: Path, report: Report
+ path: Path,
+ root: Path,
+ report: Optional[Report] = None,
) -> Optional[str]:
"""Normalize `path`. May return `None` if `path` was ignored.
"""
try:
abspath = path if path.is_absolute() else Path.cwd() / path
- normalized_path = abspath.resolve().relative_to(root).as_posix()
+ normalized_path = abspath.resolve()
+ try:
+ root_relative_path = normalized_path.relative_to(root).as_posix()
+ except ValueError:
+ if report:
+ report.path_ignored(
+ path, f"is a symbolic link that points outside {root}"
+ )
+ return None
+
except OSError as e:
- report.path_ignored(path, f"cannot be read because {e}")
+ if report:
+ report.path_ignored(path, f"cannot be read because {e}")
return None
- except ValueError:
- if path.is_symlink():
- report.path_ignored(path, f"is a symbolic link that points outside {root}")
- return None
+ return root_relative_path
- raise
- return normalized_path
+def path_is_ignored(
+ path: Path, gitignore_dict: Dict[Path, PathSpec], report: Report
+) -> bool:
+ for gitignore_path, pattern in gitignore_dict.items():
+ relative_path = normalize_path_maybe_ignore(path, gitignore_path, report)
+ if relative_path is None:
+ break
+ if pattern.match_file(relative_path):
+ report.path_ignored(path, "matches a .gitignore file content")
+ return True
+ return False
def path_is_excluded(
extend_exclude: Optional[Pattern[str]],
force_exclude: Optional[Pattern[str]],
report: Report,
- gitignore: Optional[PathSpec],
+ gitignore_dict: Optional[Dict[Path, PathSpec]],
+ *,
+ verbose: bool,
+ quiet: bool,
) -> Iterator[Path]:
"""Generate all files under `path` whose paths are not excluded by the
`exclude_regex`, `extend_exclude`, or `force_exclude` regexes,
`report` is where output about exclusions goes.
"""
+
assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}"
for child in paths:
normalized_path = normalize_path_maybe_ignore(child, root, report)
continue
# First ignore files matching .gitignore, if passed
- if gitignore is not None and gitignore.match_file(normalized_path):
- report.path_ignored(child, "matches the .gitignore file content")
+ if gitignore_dict and path_is_ignored(child, gitignore_dict, report):
continue
# Then ignore with `--exclude` `--extend-exclude` and `--force-exclude` options.
if child.is_dir():
# If gitignore is None, gitignore usage is disabled, while a Falsey
# gitignore is when the directory doesn't have a .gitignore file.
+ if gitignore_dict is not None:
+ new_gitignore_dict = {
+ **gitignore_dict,
+ root / child: get_gitignore(child),
+ }
+ else:
+ new_gitignore_dict = None
yield from gen_python_files(
child.iterdir(),
root,
extend_exclude,
force_exclude,
report,
- gitignore + get_gitignore(child) if gitignore is not None else None,
+ new_gitignore_dict,
+ verbose=verbose,
+ quiet=quiet,
)
elif child.is_file():
+ if child.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
+ verbose=verbose, quiet=quiet
+ ):
+ continue
include_match = include.search(normalized_path) if include else True
if include_match:
yield child