]>
git.madduck.net Git - etc/vim.git/blobdiff - tests/test_black.py
madduck's git repository
Every one of the projects in this repository is available at the canonical
URL git://git.madduck.net/madduck/pub/<projectpath> — see
each project's metadata for the exact URL.
All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@ git. madduck. net .
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
SSH access, as well as push access can be individually
arranged .
If you use my repositories frequently, consider adding the following
snippet to ~/.gitconfig and using the third clone URL listed for each
project:
[url "git://git.madduck.net/madduck/"]
insteadOf = madduck:
@patch("black.dump_to_file", dump_to_stderr)
def test_stub(self) -> None:
@patch("black.dump_to_file", dump_to_stderr)
def test_stub(self) -> None:
+ mode = black.FileMode.PYI
source, expected = read_data("stub.pyi")
source, expected = read_data("stub.pyi")
- actual = fs(source, is_pyi=Tru e)
+ actual = fs(source, mode=mod e)
self.assertFormatEqual(expected, actual)
self.assertFormatEqual(expected, actual)
- black.assert_stable(source, actual, line_length=ll, is_pyi=Tru e)
+ black.assert_stable(source, actual, line_length=ll, mode=mod e)
@patch("black.dump_to_file", dump_to_stderr)
def test_fmtonoff(self) -> None:
@patch("black.dump_to_file", dump_to_stderr)
def test_fmtonoff(self) -> None:
self.assertEqual("".join(err_lines), "")
def test_cache_broken_file(self) -> None:
self.assertEqual("".join(err_lines), "")
def test_cache_broken_file(self) -> None:
+ mode = black.FileMode.AUTO_DETECT
with cache_dir() as workspace:
with cache_dir() as workspace:
- cache_file = black.get_cache_file(black.DEFAULT_LINE_LENGTH)
+ cache_file = black.get_cache_file(black.DEFAULT_LINE_LENGTH, mode )
with cache_file.open("w") as fobj:
fobj.write("this is not a pickle")
with cache_file.open("w") as fobj:
fobj.write("this is not a pickle")
- self.assertEqual(black.read_cache(black.DEFAULT_LINE_LENGTH), {})
+ self.assertEqual(black.read_cache(black.DEFAULT_LINE_LENGTH, mode ), {})
src = (workspace / "test.py").resolve()
with src.open("w") as fobj:
fobj.write("print('hello')")
result = CliRunner().invoke(black.main, [str(src)])
self.assertEqual(result.exit_code, 0)
src = (workspace / "test.py").resolve()
with src.open("w") as fobj:
fobj.write("print('hello')")
result = CliRunner().invoke(black.main, [str(src)])
self.assertEqual(result.exit_code, 0)
- cache = black.read_cache(black.DEFAULT_LINE_LENGTH)
+ cache = black.read_cache(black.DEFAULT_LINE_LENGTH, mode )
self.assertIn(src, cache)
def test_cache_single_file_already_cached(self) -> None:
self.assertIn(src, cache)
def test_cache_single_file_already_cached(self) -> None:
+ mode = black.FileMode.AUTO_DETECT
with cache_dir() as workspace:
src = (workspace / "test.py").resolve()
with src.open("w") as fobj:
fobj.write("print('hello')")
with cache_dir() as workspace:
src = (workspace / "test.py").resolve()
with src.open("w") as fobj:
fobj.write("print('hello')")
- black.write_cache({}, [src], black.DEFAULT_LINE_LENGTH)
+ black.write_cache({}, [src], black.DEFAULT_LINE_LENGTH, mode )
result = CliRunner().invoke(black.main, [str(src)])
self.assertEqual(result.exit_code, 0)
with src.open("r") as fobj:
result = CliRunner().invoke(black.main, [str(src)])
self.assertEqual(result.exit_code, 0)
with src.open("r") as fobj:
@event_loop(close=False)
def test_cache_multiple_files(self) -> None:
@event_loop(close=False)
def test_cache_multiple_files(self) -> None:
+ mode = black.FileMode.AUTO_DETECT
with cache_dir() as workspace, patch(
"black.ProcessPoolExecutor", new=ThreadPoolExecutor
):
with cache_dir() as workspace, patch(
"black.ProcessPoolExecutor", new=ThreadPoolExecutor
):
two = (workspace / "two.py").resolve()
with two.open("w") as fobj:
fobj.write("print('hello')")
two = (workspace / "two.py").resolve()
with two.open("w") as fobj:
fobj.write("print('hello')")
- black.write_cache({}, [one], black.DEFAULT_LINE_LENGTH)
+ black.write_cache({}, [one], black.DEFAULT_LINE_LENGTH, mode )
result = CliRunner().invoke(black.main, [str(workspace)])
self.assertEqual(result.exit_code, 0)
with one.open("r") as fobj:
self.assertEqual(fobj.read(), "print('hello')")
with two.open("r") as fobj:
self.assertEqual(fobj.read(), 'print("hello")\n')
result = CliRunner().invoke(black.main, [str(workspace)])
self.assertEqual(result.exit_code, 0)
with one.open("r") as fobj:
self.assertEqual(fobj.read(), "print('hello')")
with two.open("r") as fobj:
self.assertEqual(fobj.read(), 'print("hello")\n')
- cache = black.read_cache(black.DEFAULT_LINE_LENGTH)
+ cache = black.read_cache(black.DEFAULT_LINE_LENGTH, mode )
self.assertIn(one, cache)
self.assertIn(two, cache)
def test_no_cache_when_writeback_diff(self) -> None:
self.assertIn(one, cache)
self.assertIn(two, cache)
def test_no_cache_when_writeback_diff(self) -> None:
+ mode = black.FileMode.AUTO_DETECT
with cache_dir() as workspace:
src = (workspace / "test.py").resolve()
with src.open("w") as fobj:
fobj.write("print('hello')")
result = CliRunner().invoke(black.main, [str(src), "--diff"])
self.assertEqual(result.exit_code, 0)
with cache_dir() as workspace:
src = (workspace / "test.py").resolve()
with src.open("w") as fobj:
fobj.write("print('hello')")
result = CliRunner().invoke(black.main, [str(src), "--diff"])
self.assertEqual(result.exit_code, 0)
- cache_file = black.get_cache_file(black.DEFAULT_LINE_LENGTH)
+ cache_file = black.get_cache_file(black.DEFAULT_LINE_LENGTH, mode )
self.assertFalse(cache_file.exists())
def test_no_cache_when_stdin(self) -> None:
self.assertFalse(cache_file.exists())
def test_no_cache_when_stdin(self) -> None:
+ mode = black.FileMode.AUTO_DETECT
with cache_dir():
result = CliRunner().invoke(black.main, ["-"], input="print('hello')")
self.assertEqual(result.exit_code, 0)
with cache_dir():
result = CliRunner().invoke(black.main, ["-"], input="print('hello')")
self.assertEqual(result.exit_code, 0)
- cache_file = black.get_cache_file(black.DEFAULT_LINE_LENGTH)
+ cache_file = black.get_cache_file(black.DEFAULT_LINE_LENGTH, mode )
self.assertFalse(cache_file.exists())
def test_read_cache_no_cachefile(self) -> None:
self.assertFalse(cache_file.exists())
def test_read_cache_no_cachefile(self) -> None:
+ mode = black.FileMode.AUTO_DETECT
- self.assertEqual(black.read_cache(black.DEFAULT_LINE_LENGTH), {})
+ self.assertEqual(black.read_cache(black.DEFAULT_LINE_LENGTH, mode ), {})
def test_write_cache_read_cache(self) -> None:
def test_write_cache_read_cache(self) -> None:
+ mode = black.FileMode.AUTO_DETECT
with cache_dir() as workspace:
src = (workspace / "test.py").resolve()
src.touch()
with cache_dir() as workspace:
src = (workspace / "test.py").resolve()
src.touch()
- black.write_cache({}, [src], black.DEFAULT_LINE_LENGTH)
- cache = black.read_cache(black.DEFAULT_LINE_LENGTH)
+ black.write_cache({}, [src], black.DEFAULT_LINE_LENGTH, mode )
+ cache = black.read_cache(black.DEFAULT_LINE_LENGTH, mode )
self.assertIn(src, cache)
self.assertEqual(cache[src], black.get_cache_info(src))
self.assertIn(src, cache)
self.assertEqual(cache[src], black.get_cache_info(src))
self.assertEqual(done, [cached])
def test_write_cache_creates_directory_if_needed(self) -> None:
self.assertEqual(done, [cached])
def test_write_cache_creates_directory_if_needed(self) -> None:
+ mode = black.FileMode.AUTO_DETECT
with cache_dir(exists=False) as workspace:
self.assertFalse(workspace.exists())
with cache_dir(exists=False) as workspace:
self.assertFalse(workspace.exists())
- black.write_cache({}, [], black.DEFAULT_LINE_LENGTH)
+ black.write_cache({}, [], black.DEFAULT_LINE_LENGTH, mode )
self.assertTrue(workspace.exists())
@event_loop(close=False)
def test_failed_formatting_does_not_get_cached(self) -> None:
self.assertTrue(workspace.exists())
@event_loop(close=False)
def test_failed_formatting_does_not_get_cached(self) -> None:
+ mode = black.FileMode.AUTO_DETECT
with cache_dir() as workspace, patch(
"black.ProcessPoolExecutor", new=ThreadPoolExecutor
):
with cache_dir() as workspace, patch(
"black.ProcessPoolExecutor", new=ThreadPoolExecutor
):
fobj.write('print("hello")\n')
result = CliRunner().invoke(black.main, [str(workspace)])
self.assertEqual(result.exit_code, 123)
fobj.write('print("hello")\n')
result = CliRunner().invoke(black.main, [str(workspace)])
self.assertEqual(result.exit_code, 123)
- cache = black.read_cache(black.DEFAULT_LINE_LENGTH)
+ cache = black.read_cache(black.DEFAULT_LINE_LENGTH, mode )
self.assertNotIn(failing, cache)
self.assertIn(clean, cache)
def test_write_cache_write_fail(self) -> None:
self.assertNotIn(failing, cache)
self.assertIn(clean, cache)
def test_write_cache_write_fail(self) -> None:
+ mode = black.FileMode.AUTO_DETECT
with cache_dir(), patch.object(Path, "open") as mock:
mock.side_effect = OSError
with cache_dir(), patch.object(Path, "open") as mock:
mock.side_effect = OSError
- black.write_cache({}, [], black.DEFAULT_LINE_LENGTH)
+ black.write_cache({}, [], black.DEFAULT_LINE_LENGTH, mode )
@event_loop(close=False)
def test_check_diff_use_together(self) -> None:
@event_loop(close=False)
def test_check_diff_use_together(self) -> None:
self.assertEqual(result.exit_code, 0)
def test_read_cache_line_lengths(self) -> None:
self.assertEqual(result.exit_code, 0)
def test_read_cache_line_lengths(self) -> None:
+ mode = black.FileMode.AUTO_DETECT
with cache_dir() as workspace:
path = (workspace / "file.py").resolve()
path.touch()
with cache_dir() as workspace:
path = (workspace / "file.py").resolve()
path.touch()
- black.write_cache({}, [path], 1)
- one = black.read_cache(1)
+ black.write_cache({}, [path], 1, mode )
+ one = black.read_cache(1, mode )
- two = black.read_cache(2)
+ two = black.read_cache(2, mode )
self.assertNotIn(path, two)
def test_single_file_force_pyi(self) -> None:
self.assertNotIn(path, two)
def test_single_file_force_pyi(self) -> None:
+ reg_mode = black.FileMode.AUTO_DETECT
+ pyi_mode = black.FileMode.PYI
contents, expected = read_data("force_pyi")
with cache_dir() as workspace:
path = (workspace / "file.py").resolve()
contents, expected = read_data("force_pyi")
with cache_dir() as workspace:
path = (workspace / "file.py").resolve()
with open(path, "r") as fh:
actual = fh.read()
# verify cache with --pyi is separate
with open(path, "r") as fh:
actual = fh.read()
# verify cache with --pyi is separate
- pyi_cache = black.read_cache(black.DEFAULT_LINE_LENGTH, pyi=Tru e)
+ pyi_cache = black.read_cache(black.DEFAULT_LINE_LENGTH, pyi_mod e)
self.assertIn(path, pyi_cache)
self.assertIn(path, pyi_cache)
- normal_cache = black.read_cache(black.DEFAULT_LINE_LENGTH)
+ normal_cache = black.read_cache(black.DEFAULT_LINE_LENGTH, reg_mode )
self.assertNotIn(path, normal_cache)
self.assertEqual(actual, expected)
@event_loop(close=False)
def test_multi_file_force_pyi(self) -> None:
self.assertNotIn(path, normal_cache)
self.assertEqual(actual, expected)
@event_loop(close=False)
def test_multi_file_force_pyi(self) -> None:
+ reg_mode = black.FileMode.AUTO_DETECT
+ pyi_mode = black.FileMode.PYI
contents, expected = read_data("force_pyi")
with cache_dir() as workspace:
paths = [
contents, expected = read_data("force_pyi")
with cache_dir() as workspace:
paths = [
actual = fh.read()
self.assertEqual(actual, expected)
# verify cache with --pyi is separate
actual = fh.read()
self.assertEqual(actual, expected)
# verify cache with --pyi is separate
- pyi_cache = black.read_cache(black.DEFAULT_LINE_LENGTH, pyi=Tru e)
- normal_cache = black.read_cache(black.DEFAULT_LINE_LENGTH)
+ pyi_cache = black.read_cache(black.DEFAULT_LINE_LENGTH, pyi_mod e)
+ normal_cache = black.read_cache(black.DEFAULT_LINE_LENGTH, reg_mode )
for path in paths:
self.assertIn(path, pyi_cache)
self.assertNotIn(path, normal_cache)
for path in paths:
self.assertIn(path, pyi_cache)
self.assertNotIn(path, normal_cache)
self.assertFormatEqual(actual, expected)
def test_single_file_force_py36(self) -> None:
self.assertFormatEqual(actual, expected)
def test_single_file_force_py36(self) -> None:
+ reg_mode = black.FileMode.AUTO_DETECT
+ py36_mode = black.FileMode.PYTHON36
source, expected = read_data("force_py36")
with cache_dir() as workspace:
path = (workspace / "file.py").resolve()
source, expected = read_data("force_py36")
with cache_dir() as workspace:
path = (workspace / "file.py").resolve()
with open(path, "r") as fh:
actual = fh.read()
# verify cache with --py36 is separate
with open(path, "r") as fh:
actual = fh.read()
# verify cache with --py36 is separate
- py36_cache = black.read_cache(black.DEFAULT_LINE_LENGTH, py36=Tru e)
+ py36_cache = black.read_cache(black.DEFAULT_LINE_LENGTH, py36_mod e)
self.assertIn(path, py36_cache)
self.assertIn(path, py36_cache)
- normal_cache = black.read_cache(black.DEFAULT_LINE_LENGTH)
+ normal_cache = black.read_cache(black.DEFAULT_LINE_LENGTH, reg_mode )
self.assertNotIn(path, normal_cache)
self.assertEqual(actual, expected)
@event_loop(close=False)
def test_multi_file_force_py36(self) -> None:
self.assertNotIn(path, normal_cache)
self.assertEqual(actual, expected)
@event_loop(close=False)
def test_multi_file_force_py36(self) -> None:
+ reg_mode = black.FileMode.AUTO_DETECT
+ py36_mode = black.FileMode.PYTHON36
source, expected = read_data("force_py36")
with cache_dir() as workspace:
paths = [
source, expected = read_data("force_py36")
with cache_dir() as workspace:
paths = [
actual = fh.read()
self.assertEqual(actual, expected)
# verify cache with --py36 is separate
actual = fh.read()
self.assertEqual(actual, expected)
# verify cache with --py36 is separate
- pyi_cache = black.read_cache(black.DEFAULT_LINE_LENGTH, py36=Tru e)
- normal_cache = black.read_cache(black.DEFAULT_LINE_LENGTH)
+ pyi_cache = black.read_cache(black.DEFAULT_LINE_LENGTH, py36_mod e)
+ normal_cache = black.read_cache(black.DEFAULT_LINE_LENGTH, reg_mode )
for path in paths:
self.assertIn(path, pyi_cache)
self.assertNotIn(path, normal_cache)
for path in paths:
self.assertIn(path, pyi_cache)
self.assertNotIn(path, normal_cache)