]> git.madduck.net Git - etc/vim.git/blobdiff - src/black/__init__.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Switch to Furo (#2793)
[etc/vim.git] / src / black / __init__.py
index c61bc8c1d60e2f2b681a663e2b94435ce1439bf0..eaf72f9c2b3f4d12ae275d01472112931af331e9 100644 (file)
@@ -1,4 +1,6 @@
 import asyncio
+from json.decoder import JSONDecodeError
+import json
 from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
 from contextlib import contextmanager
 from datetime import datetime
@@ -7,7 +9,8 @@ import io
 from multiprocessing import Manager, freeze_support
 import os
 from pathlib import Path
-import regex as re
+from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
+import re
 import signal
 import sys
 import tokenize
@@ -18,34 +21,48 @@ from typing import (
     Generator,
     Iterator,
     List,
+    MutableMapping,
     Optional,
     Pattern,
+    Sequence,
     Set,
     Sized,
     Tuple,
     Union,
 )
 
-from dataclasses import replace
 import click
+from click.core import ParameterSource
+from dataclasses import replace
+from mypy_extensions import mypyc_attr
 
 from black.const import DEFAULT_LINE_LENGTH, DEFAULT_INCLUDES, DEFAULT_EXCLUDES
 from black.const import STDIN_PLACEHOLDER
 from black.nodes import STARS, syms, is_simple_decorator_expression
+from black.nodes import is_string_token
 from black.lines import Line, EmptyLineTracker
 from black.linegen import transform_line, LineGenerator, LN
 from black.comments import normalize_fmt_off
-from black.mode import Mode, TargetVersion
+from black.mode import FUTURE_FLAG_TO_FEATURE, Mode, TargetVersion
 from black.mode import Feature, supports_feature, VERSION_TO_FEATURES
 from black.cache import read_cache, write_cache, get_cache_info, filter_cached, Cache
-from black.concurrency import cancel, shutdown
-from black.output import dump_to_file, diff, color_diff, out, err
-from black.report import Report, Changed
+from black.concurrency import cancel, shutdown, maybe_install_uvloop
+from black.output import dump_to_file, ipynb_diff, diff, color_diff, out, err
+from black.report import Report, Changed, NothingChanged
 from black.files import find_project_root, find_pyproject_toml, parse_pyproject_toml
 from black.files import gen_python_files, get_gitignore, normalize_path_maybe_ignore
 from black.files import wrap_stream_for_windows
 from black.parsing import InvalidInput  # noqa F401
 from black.parsing import lib2to3_parse, parse_ast, stringify_ast
+from black.handle_ipynb_magics import (
+    mask_cell,
+    unmask_cell,
+    remove_trailing_semicolon,
+    put_trailing_semicolon_back,
+    TRANSFORMED_MAGICS,
+    PYTHON_CELL_MAGICS,
+    jupyter_dependencies_are_installed,
+)
 
 
 # lib2to3 fork
@@ -54,6 +71,7 @@ from blib2to3.pgen2 import token
 
 from _black_version import version as __version__
 
+COMPILED = Path(__file__).suffix in (".pyd", ".so")
 
 # types
 FileContent = str
@@ -61,10 +79,6 @@ Encoding = str
 NewLine = str
 
 
-class NothingChanged(UserWarning):
-    """Raised when reformatted code is the same as source."""
-
-
 class WriteBack(Enum):
     NO = 0
     YES = 1
@@ -88,6 +102,8 @@ class WriteBack(Enum):
 # Legacy name, left for integrations.
 FileMode = Mode
 
+DEFAULT_WORKERS = os.cpu_count()
+
 
 def read_pyproject_toml(
     ctx: click.Context, param: click.Parameter, value: Optional[str]
@@ -107,7 +123,7 @@ def read_pyproject_toml(
     except (OSError, ValueError) as e:
         raise click.FileError(
             filename=value, hint=f"Error reading configuration file: {e}"
-        )
+        ) from None
 
     if not config:
         return None
@@ -161,14 +177,19 @@ def validate_regex(
     ctx: click.Context,
     param: click.Parameter,
     value: Optional[str],
-) -> Optional[Pattern]:
+) -> Optional[Pattern[str]]:
     try:
         return re_compile_maybe_verbose(value) if value is not None else None
-    except re.error:
-        raise click.BadParameter("Not a valid regular expression")
+    except re.error as e:
+        raise click.BadParameter(f"Not a valid regular expression: {e}") from None
 
 
-@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
+@click.command(
+    context_settings={"help_option_names": ["-h", "--help"]},
+    # While Click does set this field automatically using the docstring, mypyc
+    # (annoyingly) strips 'em so we need to set it here too.
+    help="The uncompromising code formatter.",
+)
 @click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
 @click.option(
     "-l",
@@ -197,6 +218,24 @@ def validate_regex(
         " when piping source on standard input)."
     ),
 )
+@click.option(
+    "--ipynb",
+    is_flag=True,
+    help=(
+        "Format all input files like Jupyter Notebooks regardless of file extension "
+        "(useful when piping source on standard input)."
+    ),
+)
+@click.option(
+    "--python-cell-magics",
+    multiple=True,
+    help=(
+        "When processing Jupyter Notebooks, add the given magic to the list"
+        f" of known python-magics ({', '.join(PYTHON_CELL_MAGICS)})."
+        " Useful for formatting cells with custom python magics."
+    ),
+    default=[],
+)
 @click.option(
     "-S",
     "--skip-string-normalization",
@@ -213,9 +252,14 @@ def validate_regex(
     "--experimental-string-processing",
     is_flag=True,
     hidden=True,
+    help="(DEPRECATED and now included in --preview) Normalize string literals.",
+)
+@click.option(
+    "--preview",
+    is_flag=True,
     help=(
-        "Experimental option that performs more normalization on string literals."
-        " Currently disabled because it leads to some crashes."
+        "Enable potentially disruptive style changes that will be added to Black's main"
+        " functionality in the next major release."
     ),
 )
 @click.option(
@@ -242,6 +286,14 @@ def validate_regex(
     is_flag=True,
     help="If --fast given, skip temporary sanity checks. [default: --safe]",
 )
+@click.option(
+    "--required-version",
+    type=str,
+    help=(
+        "Require a specific version of Black to be running (useful for unifying results"
+        " across many environments e.g. with a pyproject.toml file)."
+    ),
+)
 @click.option(
     "--include",
     type=str,
@@ -295,6 +347,14 @@ def validate_regex(
         "editors that rely on using stdin."
     ),
 )
+@click.option(
+    "-W",
+    "--workers",
+    type=click.IntRange(min=1),
+    default=DEFAULT_WORKERS,
+    show_default=True,
+    help="Number of parallel workers",
+)
 @click.option(
     "-q",
     "--quiet",
@@ -313,7 +373,10 @@ def validate_regex(
         " due to exclusion patterns."
     ),
 )
-@click.version_option(version=__version__)
+@click.version_option(
+    version=__version__,
+    message=f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})",
+)
 @click.argument(
     "src",
     nargs=-1,
@@ -321,6 +384,7 @@ def validate_regex(
         exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
     ),
     is_eager=True,
+    metavar="SRC ...",
 )
 @click.option(
     "--config",
@@ -347,20 +411,68 @@ def main(
     color: bool,
     fast: bool,
     pyi: bool,
+    ipynb: bool,
+    python_cell_magics: Sequence[str],
     skip_string_normalization: bool,
     skip_magic_trailing_comma: bool,
     experimental_string_processing: bool,
+    preview: bool,
     quiet: bool,
     verbose: bool,
-    include: Pattern,
-    exclude: Optional[Pattern],
-    extend_exclude: Optional[Pattern],
-    force_exclude: Optional[Pattern],
+    required_version: Optional[str],
+    include: Pattern[str],
+    exclude: Optional[Pattern[str]],
+    extend_exclude: Optional[Pattern[str]],
+    force_exclude: Optional[Pattern[str]],
     stdin_filename: Optional[str],
+    workers: int,
     src: Tuple[str, ...],
     config: Optional[str],
 ) -> None:
     """The uncompromising code formatter."""
+    ctx.ensure_object(dict)
+    root, method = find_project_root(src) if code is None else (None, None)
+    ctx.obj["root"] = root
+
+    if verbose:
+        if root:
+            out(
+                f"Identified `{root}` as project root containing a {method}.",
+                fg="blue",
+            )
+
+            normalized = [
+                (normalize_path_maybe_ignore(Path(source), root), source)
+                for source in src
+            ]
+            srcs_string = ", ".join(
+                [
+                    f'"{_norm}"'
+                    if _norm
+                    else f'\033[31m"{source} (skipping - invalid)"\033[34m'
+                    for _norm, source in normalized
+                ]
+            )
+            out(f"Sources to be formatted: {srcs_string}", fg="blue")
+
+        if config:
+            config_source = ctx.get_parameter_source("config")
+            if config_source in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP):
+                out("Using configuration from project root.", fg="blue")
+            else:
+                out(f"Using configuration in '{config}'.", fg="blue")
+
+    error_msg = "Oh no! 💥 💔 💥"
+    if required_version and required_version != __version__:
+        err(
+            f"{error_msg} The required version `{required_version}` does not match"
+            f" the running version `{__version__}`!"
+        )
+        ctx.exit(1)
+    if ipynb and pyi:
+        err("Cannot pass both `pyi` and `ipynb` flags!")
+        ctx.exit(1)
+
     write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
     if target_version:
         versions = set(target_version)
@@ -371,53 +483,74 @@ def main(
         target_versions=versions,
         line_length=line_length,
         is_pyi=pyi,
+        is_ipynb=ipynb,
         string_normalization=not skip_string_normalization,
         magic_trailing_comma=not skip_magic_trailing_comma,
         experimental_string_processing=experimental_string_processing,
+        preview=preview,
+        python_cell_magics=set(python_cell_magics),
     )
-    if config and verbose:
-        out(f"Using configuration from {config}.", bold=False, fg="blue")
+
     if code is not None:
-        print(format_str(code, mode=mode))
-        ctx.exit(0)
-    report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
-    sources = get_sources(
-        ctx=ctx,
-        src=src,
-        quiet=quiet,
-        verbose=verbose,
-        include=include,
-        exclude=exclude,
-        extend_exclude=extend_exclude,
-        force_exclude=force_exclude,
-        report=report,
-        stdin_filename=stdin_filename,
-    )
+        # Run in quiet mode by default with -c; the extra output isn't useful.
+        # You can still pass -v to get verbose output.
+        quiet = True
 
-    path_empty(
-        sources,
-        "No Python files are present to be formatted. Nothing to do 😴",
-        quiet,
-        verbose,
-        ctx,
-    )
+    report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
 
-    if len(sources) == 1:
-        reformat_one(
-            src=sources.pop(),
-            fast=fast,
-            write_back=write_back,
-            mode=mode,
-            report=report,
+    if code is not None:
+        reformat_code(
+            content=code, fast=fast, write_back=write_back, mode=mode, report=report
         )
     else:
-        reformat_many(
-            sources=sources, fast=fast, write_back=write_back, mode=mode, report=report
+        try:
+            sources = get_sources(
+                ctx=ctx,
+                src=src,
+                quiet=quiet,
+                verbose=verbose,
+                include=include,
+                exclude=exclude,
+                extend_exclude=extend_exclude,
+                force_exclude=force_exclude,
+                report=report,
+                stdin_filename=stdin_filename,
+            )
+        except GitWildMatchPatternError:
+            ctx.exit(1)
+
+        path_empty(
+            sources,
+            "No Python files are present to be formatted. Nothing to do 😴",
+            quiet,
+            verbose,
+            ctx,
         )
 
+        if len(sources) == 1:
+            reformat_one(
+                src=sources.pop(),
+                fast=fast,
+                write_back=write_back,
+                mode=mode,
+                report=report,
+            )
+        else:
+            reformat_many(
+                sources=sources,
+                fast=fast,
+                write_back=write_back,
+                mode=mode,
+                report=report,
+                workers=workers,
+            )
+
     if verbose or not quiet:
-        out("Oh no! 💥 💔 💥" if report.return_code else "All done! ✨ 🍰 ✨")
-        click.secho(str(report), err=True)
+        if code is None and (verbose or report.change_count or report.failure_count):
+            out()
+        out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
+        if code is None:
+            click.echo(str(report), err=True)
     ctx.exit(report.return_code)
 
 
@@ -435,14 +568,12 @@ def get_sources(
     stdin_filename: Optional[str],
 ) -> Set[Path]:
     """Compute the set of files to be formatted."""
-
-    root = find_project_root(src)
     sources: Set[Path] = set()
     path_empty(src, "No Path provided. Nothing to do 😴", quiet, verbose, ctx)
 
     if exclude is None:
         exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES)
-        gitignore = get_gitignore(root)
+        gitignore = get_gitignore(ctx.obj["root"])
     else:
         gitignore = None
 
@@ -455,7 +586,7 @@ def get_sources(
             is_stdin = False
 
         if is_stdin or p.is_file():
-            normalized_path = normalize_path_maybe_ignore(p, root, report)
+            normalized_path = normalize_path_maybe_ignore(p, ctx.obj["root"], report)
             if normalized_path is None:
                 continue
 
@@ -472,18 +603,25 @@ def get_sources(
             if is_stdin:
                 p = Path(f"{STDIN_PLACEHOLDER}{str(p)}")
 
+            if p.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
+                verbose=verbose, quiet=quiet
+            ):
+                continue
+
             sources.add(p)
         elif p.is_dir():
             sources.update(
                 gen_python_files(
                     p.iterdir(),
-                    root,
+                    ctx.obj["root"],
                     include,
                     exclude,
                     extend_exclude,
                     force_exclude,
                     report,
                     gitignore,
+                    verbose=verbose,
+                    quiet=quiet,
                 )
             )
         elif s == "-":
@@ -499,11 +637,36 @@ def path_empty(
     """
     Exit if there is no `src` provided for formatting
     """
-    if not src and (verbose or not quiet):
-        out(msg)
+    if not src:
+        if verbose or not quiet:
+            out(msg)
         ctx.exit(0)
 
 
+def reformat_code(
+    content: str, fast: bool, write_back: WriteBack, mode: Mode, report: Report
+) -> None:
+    """
+    Reformat and print out `content` without spawning child processes.
+    Similar to `reformat_one`, but for string content.
+
+    `fast`, `write_back`, and `mode` options are passed to
+    :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
+    """
+    path = Path("<string>")
+    try:
+        changed = Changed.NO
+        if format_stdin_to_stdout(
+            content=content, fast=fast, write_back=write_back, mode=mode
+        ):
+            changed = Changed.YES
+        report.done(path, changed)
+    except Exception as exc:
+        if report.verbose:
+            traceback.print_exc()
+        report.failed(path, str(exc))
+
+
 def reformat_one(
     src: Path, fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
 ) -> None:
@@ -528,6 +691,8 @@ def reformat_one(
         if is_stdin:
             if src.suffix == ".pyi":
                 mode = replace(mode, is_pyi=True)
+            elif src.suffix == ".ipynb":
+                mode = replace(mode, is_ipynb=True)
             if format_stdin_to_stdout(fast=fast, write_back=write_back, mode=mode):
                 changed = Changed.YES
         else:
@@ -553,19 +718,28 @@ def reformat_one(
         report.failed(src, str(exc))
 
 
+# diff-shades depends on being to monkeypatch this function to operate. I know it's
+# not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
+@mypyc_attr(patchable=True)
 def reformat_many(
-    sources: Set[Path], fast: bool, write_back: WriteBack, mode: Mode, report: "Report"
+    sources: Set[Path],
+    fast: bool,
+    write_back: WriteBack,
+    mode: Mode,
+    report: "Report",
+    workers: Optional[int],
 ) -> None:
     """Reformat multiple files using a ProcessPoolExecutor."""
     executor: Executor
     loop = asyncio.get_event_loop()
-    worker_count = os.cpu_count()
+    worker_count = workers if workers is not None else DEFAULT_WORKERS
     if sys.platform == "win32":
         # Work around https://bugs.python.org/issue26903
+        assert worker_count is not None
         worker_count = min(worker_count, 60)
     try:
         executor = ProcessPoolExecutor(max_workers=worker_count)
-    except (ImportError, OSError):
+    except (ImportError, NotImplementedError, OSError):
         # we arrive here if the underlying system does not support multi-processing
         # like in AWS Lambda or Termux, in which case we gracefully fallback to
         # a ThreadPoolExecutor with just a single worker (more workers would not do us
@@ -656,7 +830,10 @@ async def schedule_formatting(
                     sources_to_cache.append(src)
                 report.done(src, changed)
     if cancelled:
-        await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
+        if sys.version_info >= (3, 7):
+            await asyncio.gather(*cancelled, return_exceptions=True)
+        else:
+            await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
     if sources_to_cache:
         write_cache(cache, sources_to_cache, mode)
 
@@ -676,6 +853,8 @@ def format_file_in_place(
     """
     if src.suffix == ".pyi":
         mode = replace(mode, is_pyi=True)
+    elif src.suffix == ".ipynb":
+        mode = replace(mode, is_ipynb=True)
 
     then = datetime.utcfromtimestamp(src.stat().st_mtime)
     with open(src, "rb") as buf:
@@ -684,6 +863,10 @@ def format_file_in_place(
         dst_contents = format_file_contents(src_contents, fast=fast, mode=mode)
     except NothingChanged:
         return False
+    except JSONDecodeError:
+        raise ValueError(
+            f"File '{src}' cannot be parsed as valid Jupyter notebook."
+        ) from None
 
     if write_back == WriteBack.YES:
         with open(src, "w", encoding=encoding, newline=newline) as f:
@@ -692,7 +875,10 @@ def format_file_in_place(
         now = datetime.utcnow()
         src_name = f"{src}\t{then} +0000"
         dst_name = f"{src}\t{now} +0000"
-        diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
+        if mode.is_ipynb:
+            diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
+        else:
+            diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
 
         if write_back == WriteBack.COLOR_DIFF:
             diff_contents = color_diff(diff_contents)
@@ -712,16 +898,27 @@ def format_file_in_place(
 
 
 def format_stdin_to_stdout(
-    fast: bool, *, write_back: WriteBack = WriteBack.NO, mode: Mode
+    fast: bool,
+    *,
+    content: Optional[str] = None,
+    write_back: WriteBack = WriteBack.NO,
+    mode: Mode,
 ) -> bool:
     """Format file on stdin. Return True if changed.
 
+    If content is None, it's read from sys.stdin.
+
     If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
     write a diff to stdout. The `mode` argument is passed to
     :func:`format_file_contents`.
     """
     then = datetime.utcnow()
-    src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
+
+    if content is None:
+        src, encoding, newline = decode_bytes(sys.stdin.buffer.read())
+    else:
+        src, encoding, newline = content, "utf-8", ""
+
     dst = src
     try:
         dst = format_file_contents(src, fast=fast, mode=mode)
@@ -735,6 +932,9 @@ def format_stdin_to_stdout(
             sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
         )
         if write_back == WriteBack.YES:
+            # Make sure there's a newline after the content
+            if dst and dst[-1] != "\n":
+                dst += "\n"
             f.write(dst)
         elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
             now = datetime.utcnow()
@@ -748,6 +948,29 @@ def format_stdin_to_stdout(
         f.detach()
 
 
+def check_stability_and_equivalence(
+    src_contents: str, dst_contents: str, *, mode: Mode
+) -> None:
+    """Perform stability and equivalence checks.
+
+    Raise AssertionError if source and destination contents are not
+    equivalent, or if a second pass of the formatter would format the
+    content differently.
+    """
+    assert_equivalent(src_contents, dst_contents)
+
+    # Forced second pass to work around optional trailing commas (becoming
+    # forced trailing commas on pass 2) interacting differently with optional
+    # parentheses.  Admittedly ugly.
+    dst_contents_pass2 = format_str(dst_contents, mode=mode)
+    if dst_contents != dst_contents_pass2:
+        dst_contents = dst_contents_pass2
+        assert_equivalent(src_contents, dst_contents, pass_num=2)
+        assert_stable(src_contents, dst_contents, mode=mode)
+    # Note: no need to explicitly call `assert_stable` if `dst_contents` was
+    # the same as `dst_contents_pass2`.
+
+
 def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
     """Reformat contents of a file and return new contents.
 
@@ -758,26 +981,123 @@ def format_file_contents(src_contents: str, *, fast: bool, mode: Mode) -> FileCo
     if not src_contents.strip():
         raise NothingChanged
 
-    dst_contents = format_str(src_contents, mode=mode)
+    if mode.is_ipynb:
+        dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
+    else:
+        dst_contents = format_str(src_contents, mode=mode)
     if src_contents == dst_contents:
         raise NothingChanged
 
-    if not fast:
-        assert_equivalent(src_contents, dst_contents)
-
-        # Forced second pass to work around optional trailing commas (becoming
-        # forced trailing commas on pass 2) interacting differently with optional
-        # parentheses.  Admittedly ugly.
-        dst_contents_pass2 = format_str(dst_contents, mode=mode)
-        if dst_contents != dst_contents_pass2:
-            dst_contents = dst_contents_pass2
-            assert_equivalent(src_contents, dst_contents, pass_num=2)
-            assert_stable(src_contents, dst_contents, mode=mode)
-        # Note: no need to explicitly call `assert_stable` if `dst_contents` was
-        # the same as `dst_contents_pass2`.
+    if not fast and not mode.is_ipynb:
+        # Jupyter notebooks will already have been checked above.
+        check_stability_and_equivalence(src_contents, dst_contents, mode=mode)
     return dst_contents
 
 
+def validate_cell(src: str, mode: Mode) -> None:
+    """Check that cell does not already contain TransformerManager transformations,
+    or non-Python cell magics, which might cause tokenizer_rt to break because of
+    indentations.
+
+    If a cell contains ``!ls``, then it'll be transformed to
+    ``get_ipython().system('ls')``. However, if the cell originally contained
+    ``get_ipython().system('ls')``, then it would get transformed in the same way:
+
+        >>> TransformerManager().transform_cell("get_ipython().system('ls')")
+        "get_ipython().system('ls')\n"
+        >>> TransformerManager().transform_cell("!ls")
+        "get_ipython().system('ls')\n"
+
+    Due to the impossibility of safely roundtripping in such situations, cells
+    containing transformed magics will be ignored.
+    """
+    if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
+        raise NothingChanged
+    if (
+        src[:2] == "%%"
+        and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics
+    ):
+        raise NothingChanged
+
+
+def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
+    """Format code in given cell of Jupyter notebook.
+
+    General idea is:
+
+      - if cell has trailing semicolon, remove it;
+      - if cell has IPython magics, mask them;
+      - format cell;
+      - reinstate IPython magics;
+      - reinstate trailing semicolon (if originally present);
+      - strip trailing newlines.
+
+    Cells with syntax errors will not be processed, as they
+    could potentially be automagics or multi-line magics, which
+    are currently not supported.
+    """
+    validate_cell(src, mode)
+    src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
+        src
+    )
+    try:
+        masked_src, replacements = mask_cell(src_without_trailing_semicolon)
+    except SyntaxError:
+        raise NothingChanged from None
+    masked_dst = format_str(masked_src, mode=mode)
+    if not fast:
+        check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
+    dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
+    dst = put_trailing_semicolon_back(
+        dst_without_trailing_semicolon, has_trailing_semicolon
+    )
+    dst = dst.rstrip("\n")
+    if dst == src:
+        raise NothingChanged from None
+    return dst
+
+
+def validate_metadata(nb: MutableMapping[str, Any]) -> None:
+    """If notebook is marked as non-Python, don't format it.
+
+    All notebook metadata fields are optional, see
+    https://nbformat.readthedocs.io/en/latest/format_description.html. So
+    if a notebook has empty metadata, we will try to parse it anyway.
+    """
+    language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
+    if language is not None and language != "python":
+        raise NothingChanged from None
+
+
+def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
+    """Format Jupyter notebook.
+
+    Operate cell-by-cell, only on code cells, only for Python notebooks.
+    If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
+    """
+    trailing_newline = src_contents[-1] == "\n"
+    modified = False
+    nb = json.loads(src_contents)
+    validate_metadata(nb)
+    for cell in nb["cells"]:
+        if cell.get("cell_type", None) == "code":
+            try:
+                src = "".join(cell["source"])
+                dst = format_cell(src, fast=fast, mode=mode)
+            except NothingChanged:
+                pass
+            else:
+                cell["source"] = dst.splitlines(keepends=True)
+                modified = True
+    if modified:
+        dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
+        if trailing_newline:
+            dst_contents = dst_contents + "\n"
+        return dst_contents
+    else:
+        raise NothingChanged
+
+
 def format_str(src_contents: str, *, mode: Mode) -> FileContent:
     """Reformat a string and return new contents.
 
@@ -814,13 +1134,10 @@ def format_str(src_contents: str, *, mode: Mode) -> FileContent:
     if mode.target_versions:
         versions = mode.target_versions
     else:
-        versions = detect_target_versions(src_node)
+        versions = detect_target_versions(src_node, future_imports=future_imports)
+
     normalize_fmt_off(src_node)
-    lines = LineGenerator(
-        mode=mode,
-        remove_u_prefix="unicode_literals" in future_imports
-        or supports_feature(versions, Feature.UNICODE_LITERALS),
-    )
+    lines = LineGenerator(mode=mode)
     elt = EmptyLineTracker(is_pyi=mode.is_pyi)
     empty_line = Line(mode=mode)
     after = 0
@@ -857,7 +1174,9 @@ def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]:
         return tiow.read(), encoding, newline
 
 
-def get_features_used(node: Node) -> Set[Feature]:
+def get_features_used(  # noqa: C901
+    node: Node, *, future_imports: Optional[Set[str]] = None
+) -> Set[Feature]:
     """Return a set of (relatively) new Python features used in this file.
 
     Currently looking for:
@@ -867,20 +1186,34 @@ def get_features_used(node: Node) -> Set[Feature]:
     - positional only arguments in function signatures and lambdas;
     - assignment expression;
     - relaxed decorator syntax;
+    - usage of __future__ flags (annotations);
+    - print / exec statements;
     """
     features: Set[Feature] = set()
+    if future_imports:
+        features |= {
+            FUTURE_FLAG_TO_FEATURE[future_import]
+            for future_import in future_imports
+            if future_import in FUTURE_FLAG_TO_FEATURE
+        }
+
     for n in node.pre_order():
-        if n.type == token.STRING:
-            value_head = n.value[:2]  # type: ignore
+        if is_string_token(n):
+            value_head = n.value[:2]
             if value_head in {'f"', 'F"', "f'", "F'", "rf", "fr", "RF", "FR"}:
                 features.add(Feature.F_STRINGS)
 
         elif n.type == token.NUMBER:
-            if "_" in n.value:  # type: ignore
+            assert isinstance(n, Leaf)
+            if "_" in n.value:
                 features.add(Feature.NUMERIC_UNDERSCORES)
 
         elif n.type == token.SLASH:
-            if n.parent and n.parent.type in {syms.typedargslist, syms.arglist}:
+            if n.parent and n.parent.type in {
+                syms.typedargslist,
+                syms.arglist,
+                syms.varargslist,
+            }:
                 features.add(Feature.POS_ONLY_ARGUMENTS)
 
         elif n.type == token.COLONEQUAL:
@@ -911,12 +1244,29 @@ def get_features_used(node: Node) -> Set[Feature]:
                         if argch.type in STARS:
                             features.add(feature)
 
+        elif (
+            n.type in {syms.return_stmt, syms.yield_expr}
+            and len(n.children) >= 2
+            and n.children[1].type == syms.testlist_star_expr
+            and any(child.type == syms.star_expr for child in n.children[1].children)
+        ):
+            features.add(Feature.UNPACKING_ON_FLOW)
+
+        elif (
+            n.type == syms.annassign
+            and len(n.children) >= 4
+            and n.children[3].type == syms.testlist_star_expr
+        ):
+            features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
+
     return features
 
 
-def detect_target_versions(node: Node) -> Set[TargetVersion]:
+def detect_target_versions(
+    node: Node, *, future_imports: Optional[Set[str]] = None
+) -> Set[TargetVersion]:
     """Detect the version to target based on the nodes used."""
-    features = get_features_used(node)
+    features = get_features_used(node, future_imports=future_imports)
     return {
         version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
     }
@@ -978,9 +1328,11 @@ def assert_equivalent(src: str, dst: str, *, pass_num: int = 1) -> None:
         src_ast = parse_ast(src)
     except Exception as exc:
         raise AssertionError(
-            "cannot use --safe with this file; failed to parse source file.  AST"
-            f" error message: {exc}"
-        )
+            f"cannot use --safe with this file; failed to parse source file AST: "
+            f"{exc}\n"
+            f"This could be caused by running Black with an older Python version "
+            f"that does not support new syntax used in your source file."
+        ) from exc
 
     try:
         dst_ast = parse_ast(dst)
@@ -1029,7 +1381,7 @@ def nullcontext() -> Iterator[None]:
 
 
 def patch_click() -> None:
-    """Make Click not crash.
+    """Make Click not crash on Python 3.6 with LANG=C.
 
     On certain misconfigured environments, Python 3 selects the ASCII encoding as the
     default which restricts paths that it can access during the lifetime of the
@@ -1041,16 +1393,19 @@ def patch_click() -> None:
     """
     try:
         from click import core
-        from click import _unicodefun  # type: ignore
+        from click import _unicodefun
     except ModuleNotFoundError:
         return
 
     for module in (core, _unicodefun):
         if hasattr(module, "_verify_python3_env"):
-            module._verify_python3_env = lambda: None
+            module._verify_python3_env = lambda: None  # type: ignore
+        if hasattr(module, "_verify_python_env"):
+            module._verify_python_env = lambda: None  # type: ignore
 
 
 def patched_main() -> None:
+    maybe_install_uvloop()
     freeze_support()
     patch_click()
     main()