import colorama # noqa: F401
DEFAULT_LINE_LENGTH = 88
-DEFAULT_EXCLUDES = r"/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|\.svn|_build|buck-out|build|dist)/" # noqa: B950
+DEFAULT_EXCLUDES = r"/(\.direnv|\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|\.svn|_build|buck-out|build|dist)/" # noqa: B950
DEFAULT_INCLUDES = r"\.pyi?$"
CACHE_DIR = Path(user_cache_dir("black", version=__version__))
return all(feature in VERSION_TO_FEATURES[version] for version in target_versions)
-def find_pyproject_toml(path_search_start: str) -> Optional[str]:
+def find_pyproject_toml(path_search_start: Iterable[str]) -> Optional[str]:
"""Find the absolute filepath to a pyproject.toml if it exists"""
path_project_root = find_project_root(path_search_start)
path_pyproject_toml = path_project_root / "pyproject.toml"
if not config:
return None
+ else:
+ # Sanitize the values to be Click friendly. For more information please see:
+ # https://github.com/psf/black/issues/1458
+ # https://github.com/pallets/click/issues/1567
+ config = {
+ k: str(v) if not isinstance(v, (list, dict)) else v
+ for k, v in config.items()
+ }
target_version = config.get("target_version")
if target_version is not None and not isinstance(target_version, list):
raise click.BadOptionUsage(
- "target-version", f"Config key target-version must be a list"
+ "target-version", "Config key target-version must be a list"
)
default_map: Dict[str, Any] = {}
),
is_eager=True,
callback=read_pyproject_toml,
- help="Read configuration from PATH.",
+ help="Read configuration from FILE path.",
)
@click.pass_context
def main(
root = find_project_root(src)
sources: Set[Path] = set()
path_empty(src, "No Path provided. Nothing to do 😴", quiet, verbose, ctx)
- exclude_regexes = [exclude_regex]
- if force_exclude_regex is not None:
- exclude_regexes.append(force_exclude_regex)
+ gitignore = get_gitignore(root)
for s in src:
p = Path(s)
p.iterdir(),
root,
include_regex,
- exclude_regexes,
+ exclude_regex,
+ force_exclude_regex,
report,
- get_gitignore(root),
+ gitignore,
)
)
elif s == "-":
sources.add(p)
elif p.is_file():
- sources.update(
- gen_python_files(
- [p], root, None, exclude_regexes, report, get_gitignore(root)
- )
- )
+ normalized_path = normalize_path_maybe_ignore(p, root, report)
+ if normalized_path is None:
+ continue
+
+ normalized_path = "/" + normalized_path
+ # Hard-exclude any files that matches the `--force-exclude` regex.
+ if force_exclude_regex:
+ force_exclude_match = force_exclude_regex.search(normalized_path)
+ else:
+ force_exclude_match = None
+ if force_exclude_match and force_exclude_match.group(0):
+ report.path_ignored(p, "matches the --force-exclude regular expression")
+ continue
+
+ sources.add(p)
else:
err(f"invalid path: {s}")
return sources
write_cache(cache, [src], mode)
report.done(src, changed)
except Exception as exc:
+ if report.verbose:
+ traceback.print_exc()
report.failed(src, str(exc))
# one line in the original code.
# Grab the first and last line numbers, skipping generated leaves
- first_line = next((l.lineno for l in self.leaves if l.lineno != 0), 0)
- last_line = next((l.lineno for l in reversed(self.leaves) if l.lineno != 0), 0)
+ first_line = next((leaf.lineno for leaf in self.leaves if leaf.lineno != 0), 0)
+ last_line = next(
+ (leaf.lineno for leaf in reversed(self.leaves) if leaf.lineno != 0), 0
+ )
if first_line == last_line:
# We look at the last two leaves since a comma or an
# split altogether.
result: List[Line] = []
try:
- for l in transform(line, features):
- if str(l).strip("\n") == line_str:
+ for transformed_line in transform(line, features):
+ if str(transformed_line).strip("\n") == line_str:
raise CannotTransform(
"Line transformer returned an unchanged result"
)
result.extend(
transform_line(
- l,
+ transformed_line,
line_length=line_length,
normalize_strings=normalize_strings,
features=features,
StringTransformer to be applied?
Transformations:
- If the given Line meets all of the above requirments, which string
+ If the given Line meets all of the above requirements, which string
transformations can you expect to be applied to it by this
StringTransformer?
new_line.append(string_leaf)
append_leaves(
- new_line, line, LL[string_idx + 1 : rpar_idx] + LL[rpar_idx + 1 :],
+ new_line, line, LL[string_idx + 1 : rpar_idx] + LL[rpar_idx + 1 :]
)
LL[rpar_idx].remove()
# We MAY choose to drop the 'f' prefix from substrings that don't
# contain any f-expressions, but ONLY if the original f-string
- # containes at least one f-expression. Otherwise, we will alter the AST
+ # contains at least one f-expression. Otherwise, we will alter the AST
# of the program.
drop_pointless_f_prefix = ("f" in prefix) and re.search(
self.RE_FEXPR, LL[string_idx].value, re.VERBOSE
max_bidx = max_break_idx - 2 if line_needs_plus() else max_break_idx
maybe_break_idx = self.__get_break_idx(rest_value, max_bidx)
if maybe_break_idx is None:
- # If we are unable to algorthmically determine a good split
+ # If we are unable to algorithmically determine a good split
# and this string has custom splits registered to it, we
# fall back to using them--which means we have to start
# over from the beginning.
class StringParser:
"""
A state machine that aids in parsing a string's "trailer", which can be
- either non-existant, an old-style formatting sequence (e.g. `% varX` or `%
+ either non-existent, an old-style formatting sequence (e.g. `% varX` or `%
(varX, varY)`), or a method-call / attribute access (e.g. `.format(varX,
varY)`).
was directly after the string leaf in question (e.g. if our target
string is `line.leaves[i]` then the first call to this method must
be `line.leaves[i + 1]`).
- * On the next call to this function, the leaf paramater passed in
+ * On the next call to this function, the leaf parameter passed in
MUST be the leaf directly following @leaf.
Returns:
All of the leaves in @leaves are duplicated. The duplicates are then
appended to @new_line and used to replace their originals in the underlying
- Node structure. Any comments attatched to the old leaves are reattached to
+ Node structure. Any comments attached to the old leaves are reattached to
the new leaves.
Pre-conditions:
set(@leaves) is a subset of set(@old_line.leaves).
"""
for old_leaf in leaves:
- assert old_leaf in old_line.leaves
-
new_leaf = Leaf(old_leaf.type, old_leaf.value)
replace_child(old_leaf, new_leaf)
new_line.append(new_leaf)
no_commas = (
original.is_def
and opening_bracket.value == "("
- and not any(l.type == token.COMMA for l in leaves)
+ and not any(leaf.type == token.COMMA for leaf in leaves)
)
if original.is_import or no_commas:
@wraps(split_func)
def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
- for l in split_func(line, features):
- normalize_prefix(l.leaves[0], inside_brackets=True)
- yield l
+ for line in split_func(line, features):
+ normalize_prefix(line.leaves[0], inside_brackets=True)
+ yield line
return split_wrapper
return PathSpec.from_lines("gitwildmatch", lines)
+def normalize_path_maybe_ignore(
+ path: Path, root: Path, report: "Report"
+) -> Optional[str]:
+ """Normalize `path`. May return `None` if `path` was ignored.
+
+ `report` is where "path ignored" output goes.
+ """
+ try:
+ normalized_path = path.resolve().relative_to(root).as_posix()
+ except OSError as e:
+ report.path_ignored(path, f"cannot be read because {e}")
+ return None
+
+ except ValueError:
+ if path.is_symlink():
+ report.path_ignored(path, f"is a symbolic link that points outside {root}")
+ return None
+
+ raise
+
+ return normalized_path
+
+
def gen_python_files(
paths: Iterable[Path],
root: Path,
include: Optional[Pattern[str]],
- exclude_regexes: Iterable[Pattern[str]],
+ exclude: Pattern[str],
+ force_exclude: Optional[Pattern[str]],
report: "Report",
gitignore: PathSpec,
) -> Iterator[Path]:
"""Generate all files under `path` whose paths are not excluded by the
- `exclude` regex, but are included by the `include` regex.
+ `exclude_regex` or `force_exclude` regexes, but are included by the `include` regex.
Symbolic links pointing outside of the `root` directory are ignored.
"""
assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}"
for child in paths:
- # Then ignore with `exclude` option.
- try:
- normalized_path = child.resolve().relative_to(root).as_posix()
- except OSError as e:
- report.path_ignored(child, f"cannot be read because {e}")
+ normalized_path = normalize_path_maybe_ignore(child, root, report)
+ if normalized_path is None:
continue
- except ValueError:
- if child.is_symlink():
- report.path_ignored(
- child, f"is a symbolic link that points outside {root}"
- )
- continue
-
- raise
# First ignore files matching .gitignore
if gitignore.match_file(normalized_path):
report.path_ignored(child, "matches the .gitignore file content")
continue
+ # Then ignore with `--exclude` and `--force-exclude` options.
normalized_path = "/" + normalized_path
if child.is_dir():
normalized_path += "/"
- is_excluded = False
- for exclude in exclude_regexes:
- exclude_match = exclude.search(normalized_path) if exclude else None
- if exclude_match and exclude_match.group(0):
- report.path_ignored(child, "matches the --exclude regular expression")
- is_excluded = True
- break
- if is_excluded:
+ exclude_match = exclude.search(normalized_path) if exclude else None
+ if exclude_match and exclude_match.group(0):
+ report.path_ignored(child, "matches the --exclude regular expression")
+ continue
+
+ force_exclude_match = (
+ force_exclude.search(normalized_path) if force_exclude else None
+ )
+ if force_exclude_match and force_exclude_match.group(0):
+ report.path_ignored(child, "matches the --force-exclude regular expression")
continue
if child.is_dir():
yield from gen_python_files(
- child.iterdir(), root, include, exclude_regexes, report, gitignore
+ child.iterdir(),
+ root,
+ include,
+ exclude,
+ force_exclude,
+ report,
+ gitignore,
)
elif child.is_file():
def find_project_root(srcs: Iterable[str]) -> Path:
"""Return a directory containing .git, .hg, or pyproject.toml.
- That directory can be one of the directories passed in `srcs` or their
- common parent.
+ That directory will be a common parent of all files and directories
+ passed in `srcs`.
If no directory in the tree contains a marker that would specify it's the
project root, the root of the file system is returned.
if not srcs:
return Path("/").resolve()
- common_base = min(Path(src).resolve() for src in srcs)
- if common_base.is_dir():
- # Append a fake file so `parents` below returns `common_base_dir`, too.
- common_base /= "fake-file"
- for directory in common_base.parents:
+ path_srcs = [Path(Path.cwd(), src).resolve() for src in srcs]
+
+ # A list of lists of parents for each 'src'. 'src' is included as a
+ # "parent" of itself if it is a directory
+ src_parents = [
+ list(path.parents) + ([path] if path.is_dir() else []) for path in path_srcs
+ ]
+
+ common_base = max(
+ set.intersection(*(set(parents) for parents in src_parents)),
+ key=lambda path: path.parts,
+ )
+
+ for directory in (common_base, *common_base.parents):
if (directory / ".git").exists():
return directory
and field == "value"
and isinstance(value, str)
):
- normalized = re.sub(r" *\n[ \t]+", "\n ", value).strip()
+ normalized = re.sub(r" *\n[ \t]*", "\n", value).strip()
else:
normalized = value
yield f"{' ' * (depth+2)}{normalized!r}, # {value.__class__.__name__}"