+CACHE_DIR = Path(user_cache_dir("black", version=__version__))
+CACHE_FILE = CACHE_DIR / "cache.pickle"
+
+
+def read_cache() -> Cache:
+ """Read the cache if it exists and is well formed.
+
+ If it is not well formed, the call to write_cache later should resolve the issue.
+ """
+ if not CACHE_FILE.exists():
+ return {}
+
+ with CACHE_FILE.open("rb") as fobj:
+ try:
+ cache: Cache = pickle.load(fobj)
+ except pickle.UnpicklingError:
+ return {}
+
+ return cache
+
+
+def get_cache_info(path: Path) -> CacheInfo:
+ """Return the information used to check if a file is already formatted or not."""
+ stat = path.stat()
+ return stat.st_mtime, stat.st_size
+
+
+def filter_cached(
+ cache: Cache, sources: Iterable[Path]
+) -> Tuple[List[Path], List[Path]]:
+ """Split a list of paths into two.
+
+ The first list contains paths of files that modified on disk or are not in the
+ cache. The other list contains paths to non-modified files.
+ """
+ todo, done = [], []
+ for src in sources:
+ src = src.resolve()
+ if cache.get(src) != get_cache_info(src):
+ todo.append(src)
+ else:
+ done.append(src)
+ return todo, done
+
+
+def write_cache(cache: Cache, sources: List[Path]) -> None:
+ """Update the cache file."""
+ try:
+ if not CACHE_DIR.exists():
+ CACHE_DIR.mkdir(parents=True)
+ new_cache = {**cache, **{src.resolve(): get_cache_info(src) for src in sources}}
+ with CACHE_FILE.open("wb") as fobj:
+ pickle.dump(new_cache, fobj, protocol=pickle.HIGHEST_PROTOCOL)
+ except OSError:
+ pass
+
+