+ def test_cache_broken_file(self) -> None:
+ mode = DEFAULT_MODE
+ with cache_dir() as workspace:
+ cache_file = get_cache_file(mode)
+ with cache_file.open("w") as fobj:
+ fobj.write("this is not a pickle")
+ self.assertEqual(black.read_cache(mode), {})
+ src = (workspace / "test.py").resolve()
+ with src.open("w") as fobj:
+ fobj.write("print('hello')")
+ self.invokeBlack([str(src)])
+ cache = black.read_cache(mode)
+ self.assertIn(str(src), cache)
+
+ def test_cache_single_file_already_cached(self) -> None:
+ mode = DEFAULT_MODE
+ with cache_dir() as workspace:
+ src = (workspace / "test.py").resolve()
+ with src.open("w") as fobj:
+ fobj.write("print('hello')")
+ black.write_cache({}, [src], mode)
+ self.invokeBlack([str(src)])
+ with src.open("r") as fobj:
+ self.assertEqual(fobj.read(), "print('hello')")
+
+ @event_loop()
+ def test_cache_multiple_files(self) -> None:
+ mode = DEFAULT_MODE
+ with cache_dir() as workspace, patch(
+ "black.ProcessPoolExecutor", new=ThreadPoolExecutor
+ ):
+ one = (workspace / "one.py").resolve()
+ with one.open("w") as fobj:
+ fobj.write("print('hello')")
+ two = (workspace / "two.py").resolve()
+ with two.open("w") as fobj:
+ fobj.write("print('hello')")
+ black.write_cache({}, [one], mode)
+ self.invokeBlack([str(workspace)])
+ with one.open("r") as fobj:
+ self.assertEqual(fobj.read(), "print('hello')")
+ with two.open("r") as fobj:
+ self.assertEqual(fobj.read(), 'print("hello")\n')
+ cache = black.read_cache(mode)
+ self.assertIn(str(one), cache)
+ self.assertIn(str(two), cache)
+
+ def test_no_cache_when_writeback_diff(self) -> None:
+ mode = DEFAULT_MODE
+ with cache_dir() as workspace:
+ src = (workspace / "test.py").resolve()
+ with src.open("w") as fobj:
+ fobj.write("print('hello')")
+ with patch("black.read_cache") as read_cache, patch(
+ "black.write_cache"
+ ) as write_cache:
+ self.invokeBlack([str(src), "--diff"])
+ cache_file = get_cache_file(mode)
+ self.assertFalse(cache_file.exists())
+ write_cache.assert_not_called()
+ read_cache.assert_not_called()
+
+ def test_no_cache_when_writeback_color_diff(self) -> None:
+ mode = DEFAULT_MODE
+ with cache_dir() as workspace:
+ src = (workspace / "test.py").resolve()
+ with src.open("w") as fobj:
+ fobj.write("print('hello')")
+ with patch("black.read_cache") as read_cache, patch(
+ "black.write_cache"
+ ) as write_cache:
+ self.invokeBlack([str(src), "--diff", "--color"])
+ cache_file = get_cache_file(mode)
+ self.assertFalse(cache_file.exists())
+ write_cache.assert_not_called()
+ read_cache.assert_not_called()
+
+ @event_loop()
+ def test_output_locking_when_writeback_diff(self) -> None:
+ with cache_dir() as workspace:
+ for tag in range(0, 4):
+ src = (workspace / f"test{tag}.py").resolve()
+ with src.open("w") as fobj:
+ fobj.write("print('hello')")
+ with patch("black.Manager", wraps=multiprocessing.Manager) as mgr:
+ self.invokeBlack(["--diff", str(workspace)], exit_code=0)
+ # this isn't quite doing what we want, but if it _isn't_
+ # called then we cannot be using the lock it provides
+ mgr.assert_called()
+
+ @event_loop()
+ def test_output_locking_when_writeback_color_diff(self) -> None:
+ with cache_dir() as workspace:
+ for tag in range(0, 4):
+ src = (workspace / f"test{tag}.py").resolve()
+ with src.open("w") as fobj:
+ fobj.write("print('hello')")
+ with patch("black.Manager", wraps=multiprocessing.Manager) as mgr:
+ self.invokeBlack(["--diff", "--color", str(workspace)], exit_code=0)
+ # this isn't quite doing what we want, but if it _isn't_
+ # called then we cannot be using the lock it provides
+ mgr.assert_called()
+
+ def test_no_cache_when_stdin(self) -> None:
+ mode = DEFAULT_MODE
+ with cache_dir():
+ result = CliRunner().invoke(
+ black.main, ["-"], input=BytesIO(b"print('hello')")
+ )
+ self.assertEqual(result.exit_code, 0)
+ cache_file = get_cache_file(mode)
+ self.assertFalse(cache_file.exists())
+
+ def test_read_cache_no_cachefile(self) -> None:
+ mode = DEFAULT_MODE
+ with cache_dir():
+ self.assertEqual(black.read_cache(mode), {})
+
+ def test_write_cache_read_cache(self) -> None:
+ mode = DEFAULT_MODE
+ with cache_dir() as workspace:
+ src = (workspace / "test.py").resolve()
+ src.touch()
+ black.write_cache({}, [src], mode)
+ cache = black.read_cache(mode)
+ self.assertIn(str(src), cache)
+ self.assertEqual(cache[str(src)], black.get_cache_info(src))
+
+ def test_filter_cached(self) -> None:
+ with TemporaryDirectory() as workspace:
+ path = Path(workspace)
+ uncached = (path / "uncached").resolve()
+ cached = (path / "cached").resolve()
+ cached_but_changed = (path / "changed").resolve()
+ uncached.touch()
+ cached.touch()
+ cached_but_changed.touch()
+ cache = {
+ str(cached): black.get_cache_info(cached),
+ str(cached_but_changed): (0.0, 0),
+ }
+ todo, done = black.filter_cached(
+ cache, {uncached, cached, cached_but_changed}
+ )
+ self.assertEqual(todo, {uncached, cached_but_changed})
+ self.assertEqual(done, {cached})
+
+ def test_write_cache_creates_directory_if_needed(self) -> None:
+ mode = DEFAULT_MODE
+ with cache_dir(exists=False) as workspace:
+ self.assertFalse(workspace.exists())
+ black.write_cache({}, [], mode)
+ self.assertTrue(workspace.exists())
+
+ @event_loop()
+ def test_failed_formatting_does_not_get_cached(self) -> None:
+ mode = DEFAULT_MODE
+ with cache_dir() as workspace, patch(
+ "black.ProcessPoolExecutor", new=ThreadPoolExecutor
+ ):
+ failing = (workspace / "failing.py").resolve()
+ with failing.open("w") as fobj:
+ fobj.write("not actually python")
+ clean = (workspace / "clean.py").resolve()
+ with clean.open("w") as fobj:
+ fobj.write('print("hello")\n')
+ self.invokeBlack([str(workspace)], exit_code=123)
+ cache = black.read_cache(mode)
+ self.assertNotIn(str(failing), cache)
+ self.assertIn(str(clean), cache)
+
+ def test_write_cache_write_fail(self) -> None:
+ mode = DEFAULT_MODE
+ with cache_dir(), patch.object(Path, "open") as mock:
+ mock.side_effect = OSError
+ black.write_cache({}, [], mode)
+
+ @event_loop()
+ @patch("black.ProcessPoolExecutor", MagicMock(side_effect=OSError))
+ def test_works_in_mono_process_only_environment(self) -> None:
+ with cache_dir() as workspace:
+ for f in [
+ (workspace / "one.py").resolve(),
+ (workspace / "two.py").resolve(),
+ ]:
+ f.write_text('print("hello")\n')
+ self.invokeBlack([str(workspace)])
+
+ @event_loop()
+ def test_check_diff_use_together(self) -> None:
+ with cache_dir():
+ # Files which will be reformatted.
+ src1 = (THIS_DIR / "data" / "string_quotes.py").resolve()
+ self.invokeBlack([str(src1), "--diff", "--check"], exit_code=1)
+ # Files which will not be reformatted.
+ src2 = (THIS_DIR / "data" / "composition.py").resolve()
+ self.invokeBlack([str(src2), "--diff", "--check"])
+ # Multi file command.
+ self.invokeBlack([str(src1), str(src2), "--diff", "--check"], exit_code=1)
+
+ def test_no_files(self) -> None:
+ with cache_dir():
+ # Without an argument, black exits with error code 0.
+ self.invokeBlack([])
+
+ def test_broken_symlink(self) -> None:
+ with cache_dir() as workspace:
+ symlink = workspace / "broken_link.py"
+ try:
+ symlink.symlink_to("nonexistent.py")
+ except OSError as e:
+ self.skipTest(f"Can't create symlinks: {e}")
+ self.invokeBlack([str(workspace.resolve())])
+
+ def test_read_cache_line_lengths(self) -> None:
+ mode = DEFAULT_MODE
+ short_mode = replace(DEFAULT_MODE, line_length=1)
+ with cache_dir() as workspace:
+ path = (workspace / "file.py").resolve()
+ path.touch()
+ black.write_cache({}, [path], mode)
+ one = black.read_cache(mode)
+ self.assertIn(str(path), one)
+ two = black.read_cache(short_mode)
+ self.assertNotIn(str(path), two)
+
+ def test_single_file_force_pyi(self) -> None:
+ pyi_mode = replace(DEFAULT_MODE, is_pyi=True)
+ contents, expected = read_data("force_pyi")
+ with cache_dir() as workspace:
+ path = (workspace / "file.py").resolve()
+ with open(path, "w") as fh:
+ fh.write(contents)
+ self.invokeBlack([str(path), "--pyi"])
+ with open(path, "r") as fh:
+ actual = fh.read()
+ # verify cache with --pyi is separate
+ pyi_cache = black.read_cache(pyi_mode)
+ self.assertIn(str(path), pyi_cache)
+ normal_cache = black.read_cache(DEFAULT_MODE)
+ self.assertNotIn(str(path), normal_cache)
+ self.assertFormatEqual(expected, actual)
+ black.assert_equivalent(contents, actual)
+ black.assert_stable(contents, actual, pyi_mode)
+
+ @event_loop()
+ def test_multi_file_force_pyi(self) -> None:
+ reg_mode = DEFAULT_MODE
+ pyi_mode = replace(DEFAULT_MODE, is_pyi=True)
+ contents, expected = read_data("force_pyi")
+ with cache_dir() as workspace:
+ paths = [
+ (workspace / "file1.py").resolve(),
+ (workspace / "file2.py").resolve(),
+ ]
+ for path in paths:
+ with open(path, "w") as fh:
+ fh.write(contents)
+ self.invokeBlack([str(p) for p in paths] + ["--pyi"])
+ for path in paths:
+ with open(path, "r") as fh:
+ actual = fh.read()
+ self.assertEqual(actual, expected)
+ # verify cache with --pyi is separate
+ pyi_cache = black.read_cache(pyi_mode)
+ normal_cache = black.read_cache(reg_mode)
+ for path in paths:
+ self.assertIn(str(path), pyi_cache)
+ self.assertNotIn(str(path), normal_cache)
+
+ def test_pipe_force_pyi(self) -> None:
+ source, expected = read_data("force_pyi")
+ result = CliRunner().invoke(
+ black.main, ["-", "-q", "--pyi"], input=BytesIO(source.encode("utf8"))
+ )
+ self.assertEqual(result.exit_code, 0)
+ actual = result.output
+ self.assertFormatEqual(actual, expected)
+
+ def test_single_file_force_py36(self) -> None:
+ reg_mode = DEFAULT_MODE
+ py36_mode = replace(DEFAULT_MODE, target_versions=PY36_VERSIONS)
+ source, expected = read_data("force_py36")
+ with cache_dir() as workspace:
+ path = (workspace / "file.py").resolve()
+ with open(path, "w") as fh:
+ fh.write(source)
+ self.invokeBlack([str(path), *PY36_ARGS])
+ with open(path, "r") as fh:
+ actual = fh.read()
+ # verify cache with --target-version is separate
+ py36_cache = black.read_cache(py36_mode)
+ self.assertIn(str(path), py36_cache)
+ normal_cache = black.read_cache(reg_mode)
+ self.assertNotIn(str(path), normal_cache)
+ self.assertEqual(actual, expected)
+
+ @event_loop()
+ def test_multi_file_force_py36(self) -> None:
+ reg_mode = DEFAULT_MODE
+ py36_mode = replace(DEFAULT_MODE, target_versions=PY36_VERSIONS)
+ source, expected = read_data("force_py36")
+ with cache_dir() as workspace:
+ paths = [
+ (workspace / "file1.py").resolve(),
+ (workspace / "file2.py").resolve(),
+ ]
+ for path in paths:
+ with open(path, "w") as fh:
+ fh.write(source)
+ self.invokeBlack([str(p) for p in paths] + PY36_ARGS)
+ for path in paths:
+ with open(path, "r") as fh:
+ actual = fh.read()
+ self.assertEqual(actual, expected)
+ # verify cache with --target-version is separate
+ pyi_cache = black.read_cache(py36_mode)
+ normal_cache = black.read_cache(reg_mode)
+ for path in paths:
+ self.assertIn(str(path), pyi_cache)
+ self.assertNotIn(str(path), normal_cache)
+
+ def test_pipe_force_py36(self) -> None:
+ source, expected = read_data("force_py36")
+ result = CliRunner().invoke(
+ black.main,
+ ["-", "-q", "--target-version=py36"],
+ input=BytesIO(source.encode("utf8")),
+ )
+ self.assertEqual(result.exit_code, 0)
+ actual = result.output
+ self.assertFormatEqual(actual, expected)
+
+ def test_include_exclude(self) -> None:
+ path = THIS_DIR / "data" / "include_exclude_tests"
+ include = re.compile(r"\.pyi?$")
+ exclude = re.compile(r"/exclude/|/\.definitely_exclude/")
+ report = black.Report()
+ gitignore = PathSpec.from_lines("gitwildmatch", [])
+ sources: List[Path] = []
+ expected = [
+ Path(path / "b/dont_exclude/a.py"),
+ Path(path / "b/dont_exclude/a.pyi"),
+ ]
+ this_abs = THIS_DIR.resolve()
+ sources.extend(
+ black.gen_python_files(
+ path.iterdir(),
+ this_abs,
+ include,
+ exclude,
+ None,
+ None,
+ report,
+ gitignore,
+ )
+ )
+ self.assertEqual(sorted(expected), sorted(sources))
+
+ def test_gitingore_used_as_default(self) -> None:
+ path = Path(THIS_DIR / "data" / "include_exclude_tests")
+ include = re.compile(r"\.pyi?$")
+ extend_exclude = re.compile(r"/exclude/")
+ src = str(path / "b/")
+ report = black.Report()
+ expected: List[Path] = [
+ path / "b/.definitely_exclude/a.py",
+ path / "b/.definitely_exclude/a.pyi",
+ ]
+ sources = list(
+ black.get_sources(
+ ctx=FakeContext(),
+ src=(src,),
+ quiet=True,
+ verbose=False,
+ include=include,
+ exclude=None,
+ extend_exclude=extend_exclude,
+ force_exclude=None,
+ report=report,
+ stdin_filename=None,
+ )
+ )
+ self.assertEqual(sorted(expected), sorted(sources))
+
+ @patch("black.find_project_root", lambda *args: THIS_DIR.resolve())
+ def test_exclude_for_issue_1572(self) -> None:
+ # Exclude shouldn't touch files that were explicitly given to Black through the
+ # CLI. Exclude is supposed to only apply to the recursive discovery of files.
+ # https://github.com/psf/black/issues/1572
+ path = THIS_DIR / "data" / "include_exclude_tests"
+ include = ""
+ exclude = r"/exclude/|a\.py"
+ src = str(path / "b/exclude/a.py")
+ report = black.Report()
+ expected = [Path(path / "b/exclude/a.py")]
+ sources = list(
+ black.get_sources(
+ ctx=FakeContext(),
+ src=(src,),
+ quiet=True,
+ verbose=False,
+ include=re.compile(include),
+ exclude=re.compile(exclude),
+ extend_exclude=None,
+ force_exclude=None,
+ report=report,
+ stdin_filename=None,
+ )
+ )
+ self.assertEqual(sorted(expected), sorted(sources))
+
+ @patch("black.find_project_root", lambda *args: THIS_DIR.resolve())
+ def test_get_sources_with_stdin(self) -> None:
+ include = ""
+ exclude = r"/exclude/|a\.py"
+ src = "-"
+ report = black.Report()
+ expected = [Path("-")]
+ sources = list(
+ black.get_sources(
+ ctx=FakeContext(),
+ src=(src,),
+ quiet=True,
+ verbose=False,
+ include=re.compile(include),
+ exclude=re.compile(exclude),
+ extend_exclude=None,
+ force_exclude=None,
+ report=report,
+ stdin_filename=None,
+ )
+ )
+ self.assertEqual(sorted(expected), sorted(sources))
+
+ @patch("black.find_project_root", lambda *args: THIS_DIR.resolve())
+ def test_get_sources_with_stdin_filename(self) -> None:
+ include = ""
+ exclude = r"/exclude/|a\.py"
+ src = "-"
+ report = black.Report()
+ stdin_filename = str(THIS_DIR / "data/collections.py")
+ expected = [Path(f"__BLACK_STDIN_FILENAME__{stdin_filename}")]
+ sources = list(
+ black.get_sources(
+ ctx=FakeContext(),
+ src=(src,),
+ quiet=True,
+ verbose=False,
+ include=re.compile(include),
+ exclude=re.compile(exclude),
+ extend_exclude=None,
+ force_exclude=None,
+ report=report,
+ stdin_filename=stdin_filename,
+ )
+ )
+ self.assertEqual(sorted(expected), sorted(sources))
+
+ @patch("black.find_project_root", lambda *args: THIS_DIR.resolve())
+ def test_get_sources_with_stdin_filename_and_exclude(self) -> None:
+ # Exclude shouldn't exclude stdin_filename since it is mimicing the
+ # file being passed directly. This is the same as
+ # test_exclude_for_issue_1572
+ path = THIS_DIR / "data" / "include_exclude_tests"
+ include = ""
+ exclude = r"/exclude/|a\.py"
+ src = "-"
+ report = black.Report()
+ stdin_filename = str(path / "b/exclude/a.py")
+ expected = [Path(f"__BLACK_STDIN_FILENAME__{stdin_filename}")]
+ sources = list(
+ black.get_sources(
+ ctx=FakeContext(),
+ src=(src,),
+ quiet=True,
+ verbose=False,
+ include=re.compile(include),
+ exclude=re.compile(exclude),
+ extend_exclude=None,
+ force_exclude=None,
+ report=report,
+ stdin_filename=stdin_filename,
+ )
+ )
+ self.assertEqual(sorted(expected), sorted(sources))
+
+ @patch("black.find_project_root", lambda *args: THIS_DIR.resolve())
+ def test_get_sources_with_stdin_filename_and_extend_exclude(self) -> None:
+ # Extend exclude shouldn't exclude stdin_filename since it is mimicking the
+ # file being passed directly. This is the same as
+ # test_exclude_for_issue_1572
+ path = THIS_DIR / "data" / "include_exclude_tests"
+ include = ""
+ extend_exclude = r"/exclude/|a\.py"
+ src = "-"
+ report = black.Report()
+ stdin_filename = str(path / "b/exclude/a.py")
+ expected = [Path(f"__BLACK_STDIN_FILENAME__{stdin_filename}")]
+ sources = list(
+ black.get_sources(
+ ctx=FakeContext(),
+ src=(src,),
+ quiet=True,
+ verbose=False,
+ include=re.compile(include),
+ exclude=re.compile(""),
+ extend_exclude=re.compile(extend_exclude),
+ force_exclude=None,
+ report=report,
+ stdin_filename=stdin_filename,
+ )
+ )
+ self.assertEqual(sorted(expected), sorted(sources))
+
+ @patch("black.find_project_root", lambda *args: THIS_DIR.resolve())
+ def test_get_sources_with_stdin_filename_and_force_exclude(self) -> None:
+ # Force exclude should exclude the file when passing it through
+ # stdin_filename
+ path = THIS_DIR / "data" / "include_exclude_tests"
+ include = ""
+ force_exclude = r"/exclude/|a\.py"
+ src = "-"
+ report = black.Report()
+ stdin_filename = str(path / "b/exclude/a.py")
+ sources = list(
+ black.get_sources(
+ ctx=FakeContext(),
+ src=(src,),
+ quiet=True,
+ verbose=False,
+ include=re.compile(include),
+ exclude=re.compile(""),
+ extend_exclude=None,
+ force_exclude=re.compile(force_exclude),
+ report=report,
+ stdin_filename=stdin_filename,
+ )
+ )
+ self.assertEqual([], sorted(sources))
+
+ def test_reformat_one_with_stdin(self) -> None:
+ with patch(
+ "black.format_stdin_to_stdout",
+ return_value=lambda *args, **kwargs: black.Changed.YES,
+ ) as fsts:
+ report = MagicMock()
+ path = Path("-")
+ black.reformat_one(
+ path,
+ fast=True,
+ write_back=black.WriteBack.YES,
+ mode=DEFAULT_MODE,
+ report=report,
+ )
+ fsts.assert_called_once()
+ report.done.assert_called_with(path, black.Changed.YES)
+
+ def test_reformat_one_with_stdin_filename(self) -> None:
+ with patch(
+ "black.format_stdin_to_stdout",
+ return_value=lambda *args, **kwargs: black.Changed.YES,
+ ) as fsts:
+ report = MagicMock()
+ p = "foo.py"
+ path = Path(f"__BLACK_STDIN_FILENAME__{p}")
+ expected = Path(p)
+ black.reformat_one(
+ path,
+ fast=True,
+ write_back=black.WriteBack.YES,
+ mode=DEFAULT_MODE,
+ report=report,
+ )
+ fsts.assert_called_once_with(
+ fast=True, write_back=black.WriteBack.YES, mode=DEFAULT_MODE
+ )
+ # __BLACK_STDIN_FILENAME__ should have been stripped
+ report.done.assert_called_with(expected, black.Changed.YES)
+
+ def test_reformat_one_with_stdin_filename_pyi(self) -> None:
+ with patch(
+ "black.format_stdin_to_stdout",
+ return_value=lambda *args, **kwargs: black.Changed.YES,
+ ) as fsts:
+ report = MagicMock()
+ p = "foo.pyi"
+ path = Path(f"__BLACK_STDIN_FILENAME__{p}")
+ expected = Path(p)
+ black.reformat_one(
+ path,
+ fast=True,
+ write_back=black.WriteBack.YES,
+ mode=DEFAULT_MODE,
+ report=report,
+ )
+ fsts.assert_called_once_with(
+ fast=True,
+ write_back=black.WriteBack.YES,
+ mode=replace(DEFAULT_MODE, is_pyi=True),
+ )
+ # __BLACK_STDIN_FILENAME__ should have been stripped
+ report.done.assert_called_with(expected, black.Changed.YES)
+
+ def test_reformat_one_with_stdin_and_existing_path(self) -> None:
+ with patch(
+ "black.format_stdin_to_stdout",
+ return_value=lambda *args, **kwargs: black.Changed.YES,
+ ) as fsts:
+ report = MagicMock()
+ # Even with an existing file, since we are forcing stdin, black
+ # should output to stdout and not modify the file inplace
+ p = Path(str(THIS_DIR / "data/collections.py"))
+ # Make sure is_file actually returns True
+ self.assertTrue(p.is_file())
+ path = Path(f"__BLACK_STDIN_FILENAME__{p}")
+ expected = Path(p)
+ black.reformat_one(
+ path,
+ fast=True,
+ write_back=black.WriteBack.YES,
+ mode=DEFAULT_MODE,
+ report=report,
+ )
+ fsts.assert_called_once()
+ # __BLACK_STDIN_FILENAME__ should have been stripped
+ report.done.assert_called_with(expected, black.Changed.YES)
+
+ def test_gitignore_exclude(self) -> None:
+ path = THIS_DIR / "data" / "include_exclude_tests"
+ include = re.compile(r"\.pyi?$")
+ exclude = re.compile(r"")
+ report = black.Report()
+ gitignore = PathSpec.from_lines(
+ "gitwildmatch", ["exclude/", ".definitely_exclude"]
+ )
+ sources: List[Path] = []
+ expected = [
+ Path(path / "b/dont_exclude/a.py"),
+ Path(path / "b/dont_exclude/a.pyi"),
+ ]
+ this_abs = THIS_DIR.resolve()
+ sources.extend(
+ black.gen_python_files(
+ path.iterdir(),
+ this_abs,
+ include,
+ exclude,
+ None,
+ None,
+ report,
+ gitignore,
+ )
+ )
+ self.assertEqual(sorted(expected), sorted(sources))
+
+ def test_empty_include(self) -> None:
+ path = THIS_DIR / "data" / "include_exclude_tests"
+ report = black.Report()
+ gitignore = PathSpec.from_lines("gitwildmatch", [])
+ empty = re.compile(r"")
+ sources: List[Path] = []
+ expected = [
+ Path(path / "b/exclude/a.pie"),
+ Path(path / "b/exclude/a.py"),
+ Path(path / "b/exclude/a.pyi"),
+ Path(path / "b/dont_exclude/a.pie"),
+ Path(path / "b/dont_exclude/a.py"),
+ Path(path / "b/dont_exclude/a.pyi"),
+ Path(path / "b/.definitely_exclude/a.pie"),
+ Path(path / "b/.definitely_exclude/a.py"),
+ Path(path / "b/.definitely_exclude/a.pyi"),
+ Path(path / ".gitignore"),
+ Path(path / "pyproject.toml"),
+ ]
+ this_abs = THIS_DIR.resolve()
+ sources.extend(
+ black.gen_python_files(
+ path.iterdir(),
+ this_abs,
+ empty,
+ re.compile(black.DEFAULT_EXCLUDES),
+ None,
+ None,
+ report,
+ gitignore,
+ )
+ )
+ self.assertEqual(sorted(expected), sorted(sources))
+
+ def test_extend_exclude(self) -> None:
+ path = THIS_DIR / "data" / "include_exclude_tests"
+ report = black.Report()
+ gitignore = PathSpec.from_lines("gitwildmatch", [])
+ sources: List[Path] = []
+ expected = [
+ Path(path / "b/exclude/a.py"),
+ Path(path / "b/dont_exclude/a.py"),
+ ]
+ this_abs = THIS_DIR.resolve()
+ sources.extend(
+ black.gen_python_files(
+ path.iterdir(),
+ this_abs,
+ re.compile(black.DEFAULT_INCLUDES),
+ re.compile(r"\.pyi$"),
+ re.compile(r"\.definitely_exclude"),
+ None,
+ report,
+ gitignore,
+ )
+ )
+ self.assertEqual(sorted(expected), sorted(sources))
+
+ def test_invalid_cli_regex(self) -> None:
+ for option in ["--include", "--exclude", "--extend-exclude", "--force-exclude"]:
+ self.invokeBlack(["-", option, "**()(!!*)"], exit_code=2)
+
+ def test_preserves_line_endings(self) -> None:
+ with TemporaryDirectory() as workspace:
+ test_file = Path(workspace) / "test.py"
+ for nl in ["\n", "\r\n"]:
+ contents = nl.join(["def f( ):", " pass"])
+ test_file.write_bytes(contents.encode())
+ ff(test_file, write_back=black.WriteBack.YES)
+ updated_contents: bytes = test_file.read_bytes()
+ self.assertIn(nl.encode(), updated_contents)
+ if nl == "\n":
+ self.assertNotIn(b"\r\n", updated_contents)
+
+ def test_preserves_line_endings_via_stdin(self) -> None:
+ for nl in ["\n", "\r\n"]:
+ contents = nl.join(["def f( ):", " pass"])
+ runner = BlackRunner()
+ result = runner.invoke(
+ black.main, ["-", "--fast"], input=BytesIO(contents.encode("utf8"))
+ )
+ self.assertEqual(result.exit_code, 0)
+ output = result.stdout_bytes
+ self.assertIn(nl.encode("utf8"), output)
+ if nl == "\n":
+ self.assertNotIn(b"\r\n", output)
+
+ def test_assert_equivalent_different_asts(self) -> None:
+ with self.assertRaises(AssertionError):
+ black.assert_equivalent("{}", "None")
+
+ def test_symlink_out_of_root_directory(self) -> None:
+ path = MagicMock()
+ root = THIS_DIR.resolve()
+ child = MagicMock()
+ include = re.compile(black.DEFAULT_INCLUDES)
+ exclude = re.compile(black.DEFAULT_EXCLUDES)
+ report = black.Report()
+ gitignore = PathSpec.from_lines("gitwildmatch", [])
+ # `child` should behave like a symlink which resolved path is clearly
+ # outside of the `root` directory.
+ path.iterdir.return_value = [child]
+ child.resolve.return_value = Path("/a/b/c")
+ child.as_posix.return_value = "/a/b/c"
+ child.is_symlink.return_value = True
+ try:
+ list(
+ black.gen_python_files(
+ path.iterdir(),
+ root,
+ include,
+ exclude,
+ None,
+ None,
+ report,
+ gitignore,
+ )
+ )
+ except ValueError as ve:
+ self.fail(f"`get_python_files_in_dir()` failed: {ve}")
+ path.iterdir.assert_called_once()
+ child.resolve.assert_called_once()
+ child.is_symlink.assert_called_once()
+ # `child` should behave like a strange file which resolved path is clearly
+ # outside of the `root` directory.
+ child.is_symlink.return_value = False
+ with self.assertRaises(ValueError):
+ list(
+ black.gen_python_files(
+ path.iterdir(),
+ root,
+ include,
+ exclude,
+ None,
+ None,
+ report,
+ gitignore,
+ )
+ )
+ path.iterdir.assert_called()
+ self.assertEqual(path.iterdir.call_count, 2)
+ child.resolve.assert_called()
+ self.assertEqual(child.resolve.call_count, 2)
+ child.is_symlink.assert_called()
+ self.assertEqual(child.is_symlink.call_count, 2)
+
+ def test_shhh_click(self) -> None:
+ try:
+ from click import _unicodefun # type: ignore
+ except ModuleNotFoundError:
+ self.skipTest("Incompatible Click version")
+ if not hasattr(_unicodefun, "_verify_python3_env"):
+ self.skipTest("Incompatible Click version")
+ # First, let's see if Click is crashing with a preferred ASCII charset.
+ with patch("locale.getpreferredencoding") as gpe:
+ gpe.return_value = "ASCII"
+ with self.assertRaises(RuntimeError):
+ _unicodefun._verify_python3_env()
+ # Now, let's silence Click...
+ black.patch_click()
+ # ...and confirm it's silent.
+ with patch("locale.getpreferredencoding") as gpe:
+ gpe.return_value = "ASCII"
+ try:
+ _unicodefun._verify_python3_env()
+ except RuntimeError as re:
+ self.fail(f"`patch_click()` failed, exception still raised: {re}")
+
+ def test_root_logger_not_used_directly(self) -> None:
+ def fail(*args: Any, **kwargs: Any) -> None:
+ self.fail("Record created with root logger")
+
+ with patch.multiple(
+ logging.root,
+ debug=fail,
+ info=fail,
+ warning=fail,
+ error=fail,
+ critical=fail,
+ log=fail,
+ ):
+ ff(THIS_DIR / "util.py")
+
+ def test_invalid_config_return_code(self) -> None:
+ tmp_file = Path(black.dump_to_file())
+ try:
+ tmp_config = Path(black.dump_to_file())
+ tmp_config.unlink()
+ args = ["--config", str(tmp_config), str(tmp_file)]
+ self.invokeBlack(args, exit_code=2, ignore_config=False)
+ finally:
+ tmp_file.unlink()
+
+ def test_parse_pyproject_toml(self) -> None:
+ test_toml_file = THIS_DIR / "test.toml"
+ config = black.parse_pyproject_toml(str(test_toml_file))
+ self.assertEqual(config["verbose"], 1)
+ self.assertEqual(config["check"], "no")
+ self.assertEqual(config["diff"], "y")
+ self.assertEqual(config["color"], True)
+ self.assertEqual(config["line_length"], 79)
+ self.assertEqual(config["target_version"], ["py36", "py37", "py38"])
+ self.assertEqual(config["exclude"], r"\.pyi?$")
+ self.assertEqual(config["include"], r"\.py?$")
+
+ def test_read_pyproject_toml(self) -> None:
+ test_toml_file = THIS_DIR / "test.toml"
+ fake_ctx = FakeContext()
+ black.read_pyproject_toml(fake_ctx, FakeParameter(), str(test_toml_file))
+ config = fake_ctx.default_map
+ self.assertEqual(config["verbose"], "1")
+ self.assertEqual(config["check"], "no")
+ self.assertEqual(config["diff"], "y")
+ self.assertEqual(config["color"], "True")
+ self.assertEqual(config["line_length"], "79")
+ self.assertEqual(config["target_version"], ["py36", "py37", "py38"])
+ self.assertEqual(config["exclude"], r"\.pyi?$")
+ self.assertEqual(config["include"], r"\.py?$")
+
+ def test_find_project_root(self) -> None:
+ with TemporaryDirectory() as workspace:
+ root = Path(workspace)
+ test_dir = root / "test"
+ test_dir.mkdir()
+
+ src_dir = root / "src"
+ src_dir.mkdir()
+
+ root_pyproject = root / "pyproject.toml"
+ root_pyproject.touch()
+ src_pyproject = src_dir / "pyproject.toml"
+ src_pyproject.touch()
+ src_python = src_dir / "foo.py"
+ src_python.touch()
+
+ self.assertEqual(
+ black.find_project_root((src_dir, test_dir)), root.resolve()
+ )
+ self.assertEqual(black.find_project_root((src_dir,)), src_dir.resolve())
+ self.assertEqual(black.find_project_root((src_python,)), src_dir.resolve())
+
+ @patch(
+ "black.files.find_user_pyproject_toml",
+ black.files.find_user_pyproject_toml.__wrapped__,
+ )
+ def test_find_user_pyproject_toml_linux(self) -> None:
+ if system() == "Windows":
+ return
+
+ # Test if XDG_CONFIG_HOME is checked
+ with TemporaryDirectory() as workspace:
+ tmp_user_config = Path(workspace) / "black"
+ with patch.dict("os.environ", {"XDG_CONFIG_HOME": workspace}):
+ self.assertEqual(
+ black.files.find_user_pyproject_toml(), tmp_user_config.resolve()
+ )
+
+ # Test fallback for XDG_CONFIG_HOME
+ with patch.dict("os.environ"):
+ os.environ.pop("XDG_CONFIG_HOME", None)
+ fallback_user_config = Path("~/.config").expanduser() / "black"
+ self.assertEqual(
+ black.files.find_user_pyproject_toml(), fallback_user_config.resolve()
+ )
+
+ def test_find_user_pyproject_toml_windows(self) -> None:
+ if system() != "Windows":
+ return
+
+ user_config_path = Path.home() / ".black"
+ self.assertEqual(
+ black.files.find_user_pyproject_toml(), user_config_path.resolve()
+ )
+
+ def test_bpo_33660_workaround(self) -> None:
+ if system() == "Windows":
+ return
+
+ # https://bugs.python.org/issue33660
+
+ old_cwd = Path.cwd()
+ try:
+ root = Path("/")
+ os.chdir(str(root))
+ path = Path("workspace") / "project"
+ report = black.Report(verbose=True)
+ normalized_path = black.normalize_path_maybe_ignore(path, root, report)
+ self.assertEqual(normalized_path, "workspace/project")
+ finally:
+ os.chdir(str(old_cwd))
+
+ def test_newline_comment_interaction(self) -> None:
+ source = "class A:\\\r\n# type: ignore\n pass\n"
+ output = black.format_str(source, mode=DEFAULT_MODE)
+ black.assert_stable(source, output, mode=DEFAULT_MODE)
+
+ def test_bpo_2142_workaround(self) -> None:
+
+ # https://bugs.python.org/issue2142
+
+ source, _ = read_data("missing_final_newline.py")
+ # read_data adds a trailing newline
+ source = source.rstrip()
+ expected, _ = read_data("missing_final_newline.diff")
+ tmp_file = Path(black.dump_to_file(source, ensure_final_newline=False))
+ diff_header = re.compile(
+ rf"{re.escape(str(tmp_file))}\t\d\d\d\d-\d\d-\d\d "
+ r"\d\d:\d\d:\d\d\.\d\d\d\d\d\d \+\d\d\d\d"
+ )
+ try:
+ result = BlackRunner().invoke(black.main, ["--diff", str(tmp_file)])
+ self.assertEqual(result.exit_code, 0)
+ finally:
+ os.unlink(tmp_file)
+ actual = result.output
+ actual = diff_header.sub(DETERMINISTIC_HEADER, actual)
+ self.assertEqual(actual, expected)
+
+ @pytest.mark.python2
+ def test_docstring_reformat_for_py27(self) -> None:
+ """
+ Check that stripping trailing whitespace from Python 2 docstrings
+ doesn't trigger a "not equivalent to source" error
+ """
+ source = (
+ b'def foo():\r\n """Testing\r\n Testing """\r\n print "Foo"\r\n'
+ )
+ expected = 'def foo():\n """Testing\n Testing"""\n print "Foo"\n'
+
+ result = CliRunner().invoke(
+ black.main,
+ ["-", "-q", "--target-version=py27"],
+ input=BytesIO(source),
+ )
+
+ self.assertEqual(result.exit_code, 0)
+ actual = result.output
+ self.assertFormatEqual(actual, expected)
+
+
+with open(black.__file__, "r", encoding="utf-8") as _bf:
+ black_source_lines = _bf.readlines()
+
+
+def tracefunc(frame: types.FrameType, event: str, arg: Any) -> Callable:
+ """Show function calls `from black/__init__.py` as they happen.
+
+ Register this with `sys.settrace()` in a test you're debugging.
+ """
+ if event != "call":
+ return tracefunc
+
+ stack = len(inspect.stack()) - 19
+ stack *= 2
+ filename = frame.f_code.co_filename
+ lineno = frame.f_lineno
+ func_sig_lineno = lineno - 1
+ funcname = black_source_lines[func_sig_lineno].strip()
+ while funcname.startswith("@"):
+ func_sig_lineno += 1
+ funcname = black_source_lines[func_sig_lineno].strip()
+ if "black/__init__.py" in filename:
+ print(f"{' ' * stack}{lineno}:{funcname}")
+ return tracefunc
+