+def detect_target_versions(node: Node) -> Set[TargetVersion]:
+ """Detect the version to target based on the nodes used."""
+ features = get_features_used(node)
+ return {
+ version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
+ }
+
+
+def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:
+ """Generate sets of closing bracket IDs that should be omitted in a RHS.
+
+ Brackets can be omitted if the entire trailer up to and including
+ a preceding closing bracket fits in one line.
+
+ Yielded sets are cumulative (contain results of previous yields, too). First
+ set is empty.
+ """
+
+ omit: Set[LeafID] = set()
+ yield omit
+
+ length = 4 * line.depth
+ opening_bracket = None
+ closing_bracket = None
+ inner_brackets: Set[LeafID] = set()
+ for index, leaf, leaf_length in enumerate_with_length(line, reversed=True):
+ length += leaf_length
+ if length > line_length:
+ break
+
+ has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
+ if leaf.type == STANDALONE_COMMENT or has_inline_comment:
+ break
+
+ if opening_bracket:
+ if leaf is opening_bracket:
+ opening_bracket = None
+ elif leaf.type in CLOSING_BRACKETS:
+ inner_brackets.add(id(leaf))
+ elif leaf.type in CLOSING_BRACKETS:
+ if index > 0 and line.leaves[index - 1].type in OPENING_BRACKETS:
+ # Empty brackets would fail a split so treat them as "inner"
+ # brackets (e.g. only add them to the `omit` set if another
+ # pair of brackets was good enough.
+ inner_brackets.add(id(leaf))
+ continue
+
+ if closing_bracket:
+ omit.add(id(closing_bracket))
+ omit.update(inner_brackets)
+ inner_brackets.clear()
+ yield omit
+
+ if leaf.value:
+ opening_bracket = leaf.opening_bracket
+ closing_bracket = leaf
+
+
+def get_future_imports(node: Node) -> Set[str]:
+ """Return a set of __future__ imports in the file."""
+ imports: Set[str] = set()
+
+ def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]:
+ for child in children:
+ if isinstance(child, Leaf):
+ if child.type == token.NAME:
+ yield child.value
+ elif child.type == syms.import_as_name:
+ orig_name = child.children[0]
+ assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
+ assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
+ yield orig_name.value
+ elif child.type == syms.import_as_names:
+ yield from get_imports_from_children(child.children)
+ else:
+ raise AssertionError("Invalid syntax parsing imports")
+
+ for child in node.children:
+ if child.type != syms.simple_stmt:
+ break
+ first_child = child.children[0]
+ if isinstance(first_child, Leaf):
+ # Continue looking if we see a docstring; otherwise stop.
+ if (
+ len(child.children) == 2
+ and first_child.type == token.STRING
+ and child.children[1].type == token.NEWLINE
+ ):
+ continue
+ else:
+ break
+ elif first_child.type == syms.import_from:
+ module_name = first_child.children[1]
+ if not isinstance(module_name, Leaf) or module_name.value != "__future__":
+ break
+ imports |= set(get_imports_from_children(first_child.children[3:]))
+ else:
+ break
+ return imports