]> git.madduck.net Git - etc/vim.git/commitdiff

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Restore ability to format code with legacy usage of `async` as a name
authorŁukasz Langa <lukasz@langa.pl>
Wed, 21 Mar 2018 01:05:20 +0000 (18:05 -0700)
committerŁukasz Langa <lukasz@langa.pl>
Wed, 21 Mar 2018 01:54:01 +0000 (18:54 -0700)
Fixes #20
Fixes #42

README.md
black.py
blib2to3/Grammar.txt
blib2to3/Grammar3.6.4.final.0.pickle
blib2to3/README
blib2to3/pgen2/token.py
blib2to3/pgen2/tokenize.py

index a124429127a1f3e338fb956fff66f95455134823..1b833089bbe489591547a6056f0eb03e48f9f839 100644 (file)
--- a/README.md
+++ b/README.md
@@ -261,6 +261,10 @@ More details can be found in [CONTRIBUTING](CONTRIBUTING.md).
 
 * added ability to pipe formatting from stdin to stdin (#25)
 
+* restored ability to format code with legacy usage of `async` as
+  a name (#20, #42)
+
+
 ### 18.3a2
 
 * changed positioning of binary operators to occur at beginning of lines
index 6bfef500d6d44ea79d8d1eca2030127c7433339d..74329d2389901323d2c743f9a55758c6107417f5 100644 (file)
--- a/black.py
+++ b/black.py
@@ -486,8 +486,7 @@ class Line:
         return (
             (first_leaf.type == token.NAME and first_leaf.value == 'def')
             or (
-                first_leaf.type == token.NAME
-                and first_leaf.value == 'async'
+                first_leaf.type == token.ASYNC
                 and second_leaf is not None
                 and second_leaf.type == token.NAME
                 and second_leaf.value == 'def'
@@ -816,7 +815,7 @@ class LineGenerator(Visitor[Line]):
         for child in children:
             yield from self.visit(child)
 
-            if child.type == token.NAME and child.value == 'async':  # type: ignore
+            if child.type == token.ASYNC:
                 break
 
         internal_stmt = next(children)
index b19b4a21fadd7211e5525a9f944a5b74396a48ff..4905c91e635e8a40a10b255258db4c96a303a56f 100644 (file)
@@ -15,7 +15,7 @@ eval_input: testlist NEWLINE* ENDMARKER
 decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
 decorators: decorator+
 decorated: decorators (classdef | funcdef | async_funcdef)
-async_funcdef: 'async' funcdef
+async_funcdef: ASYNC funcdef
 funcdef: 'def' NAME parameters ['->' test] ':' suite
 parameters: '(' [typedargslist] ')'
 typedargslist: ((tfpdef ['=' test] ',')*
@@ -66,7 +66,7 @@ exec_stmt: 'exec' expr ['in' test [',' test]]
 assert_stmt: 'assert' test [',' test]
 
 compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
-async_stmt: 'async' (funcdef | with_stmt | for_stmt)
+async_stmt: ASYNC (funcdef | with_stmt | for_stmt)
 if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
 while_stmt: 'while' test ':' suite ['else' ':' suite]
 for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
@@ -105,7 +105,7 @@ shift_expr: arith_expr (('<<'|'>>') arith_expr)*
 arith_expr: term (('+'|'-') term)*
 term: factor (('*'|'@'|'/'|'%'|'//') factor)*
 factor: ('+'|'-'|'~') factor | power
-power: ['await'] atom trailer* ['**' factor]
+power: [AWAIT] atom trailer* ['**' factor]
 atom: ('(' [yield_expr|testlist_gexp] ')' |
        '[' [listmaker] ']' |
        '{' [dictsetmaker] '}' |
@@ -142,7 +142,7 @@ argument: ( test [comp_for] |
            star_expr )
 
 comp_iter: comp_for | comp_if
-comp_for: ['async'] 'for' exprlist 'in' or_test [comp_iter]
+comp_for: [ASYNC] 'for' exprlist 'in' or_test [comp_iter]
 comp_if: 'if' old_test [comp_iter]
 
 # As noted above, testlist_safe extends the syntax allowed in list
@@ -161,7 +161,7 @@ comp_if: 'if' old_test [comp_iter]
 #
 # See https://bugs.python.org/issue27494
 old_comp_iter: old_comp_for | old_comp_if
-old_comp_for: ['async'] 'for' exprlist 'in' testlist_safe [old_comp_iter]
+old_comp_for: [ASYNC] 'for' exprlist 'in' testlist_safe [old_comp_iter]
 old_comp_if: 'if' old_test [old_comp_iter]
 
 testlist1: test (',' test)*
index da228142c52b22d4e8d48b206b6741e972af18fd..4d1922bffc279845707eb71d7bd30ba0aa2ff81a 100644 (file)
Binary files a/blib2to3/Grammar3.6.4.final.0.pickle and b/blib2to3/Grammar3.6.4.final.0.pickle differ
index 9a9cf6152d559d34b7626bcdf1ed2533717bea1e..773aaf6d069a04cf32521f23cd30a0b8faa7643f 100644 (file)
@@ -5,5 +5,7 @@ Reasons for forking:
 - consistent handling of f-strings for users of Python < 3.6.2
 - backport of BPO-33064 that fixes parsing files with trailing commas after
   *args and **kwargs
+- backport of GH-6143 that restores the ability to reformat legacy usage of
+  `async`
 - better ability to debug (better reprs for starters)
 - ability to Cythonize
index 7599396611b232af2bd1ba60f4f6823f8161184f..1a679554d2db4e86c8d56eaca49b85113c1cb312 100755 (executable)
@@ -62,8 +62,10 @@ OP = 52
 COMMENT = 53
 NL = 54
 RARROW = 55
-ERRORTOKEN = 56
-N_TOKENS = 57
+AWAIT = 56
+ASYNC = 57
+ERRORTOKEN = 58
+N_TOKENS = 59
 NT_OFFSET = 256
 #--end constants--
 
index 14560e4fddff6224abbafd228229ed88605bd1a3..45afc5f4e53fcf7d0f1f602638804a93726d50c1 100644 (file)
@@ -234,7 +234,7 @@ class Untokenizer:
         for tok in iterable:
             toknum, tokval = tok[:2]
 
-            if toknum in (NAME, NUMBER):
+            if toknum in (NAME, NUMBER, ASYNC, AWAIT):
                 tokval += ' '
 
             if toknum == INDENT:
@@ -380,6 +380,12 @@ def generate_tokens(readline):
     contline = None
     indents = [0]
 
+    # 'stashed' and 'async_*' are used for async/await parsing
+    stashed = None
+    async_def = False
+    async_def_indent = 0
+    async_def_nl = False
+
     while 1:                                   # loop over lines in stream
         try:
             line = readline()
@@ -420,6 +426,10 @@ def generate_tokens(readline):
                 pos = pos + 1
             if pos == max: break
 
+            if stashed:
+                yield stashed
+                stashed = None
+
             if line[pos] in '#\r\n':           # skip comments or blank lines
                 if line[pos] == '#':
                     comment_token = line[pos:].rstrip('\r\n')
@@ -443,8 +453,18 @@ def generate_tokens(readline):
                         ("<tokenize>", lnum, pos, line))
                 indents = indents[:-1]
 
+                if async_def and async_def_indent >= indents[-1]:
+                    async_def = False
+                    async_def_nl = False
+                    async_def_indent = 0
+
                 yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
 
+            if async_def and async_def_nl and async_def_indent >= indents[-1]:
+                async_def = False
+                async_def_nl = False
+                async_def_indent = 0
+
         else:                                  # continued statement
             if not line:
                 raise TokenError("EOF in multi-line statement", (lnum, 0))
@@ -464,10 +484,18 @@ def generate_tokens(readline):
                     newline = NEWLINE
                     if parenlev > 0:
                         newline = NL
+                    elif async_def:
+                        async_def_nl = True
+                    if stashed:
+                        yield stashed
+                        stashed = None
                     yield (newline, token, spos, epos, line)
 
                 elif initial == '#':
                     assert not token.endswith("\n")
+                    if stashed:
+                        yield stashed
+                        stashed = None
                     yield (COMMENT, token, spos, epos, line)
                 elif token in triple_quoted:
                     endprog = endprogs[token]
@@ -475,6 +503,9 @@ def generate_tokens(readline):
                     if endmatch:                           # all on one line
                         pos = endmatch.end(0)
                         token = line[start:pos]
+                        if stashed:
+                            yield stashed
+                            stashed = None
                         yield (STRING, token, spos, (lnum, pos), line)
                     else:
                         strstart = (lnum, start)           # multiple lines
@@ -492,22 +523,63 @@ def generate_tokens(readline):
                         contline = line
                         break
                     else:                                  # ordinary string
+                        if stashed:
+                            yield stashed
+                            stashed = None
                         yield (STRING, token, spos, epos, line)
                 elif initial in namechars:                 # ordinary name
-                    yield (NAME, token, spos, epos, line)
+                    if token in ('async', 'await'):
+                        if async_def:
+                            yield (ASYNC if token == 'async' else AWAIT,
+                                   token, spos, epos, line)
+                            continue
+
+                    tok = (NAME, token, spos, epos, line)
+                    if token == 'async' and not stashed:
+                        stashed = tok
+                        continue
+
+                    if token == 'def':
+                        if (stashed
+                                and stashed[0] == NAME
+                                and stashed[1] == 'async'):
+
+                            async_def = True
+                            async_def_indent = indents[-1]
+
+                            yield (ASYNC, stashed[1],
+                                   stashed[2], stashed[3],
+                                   stashed[4])
+                            stashed = None
+
+                    if stashed:
+                        yield stashed
+                        stashed = None
+
+                    yield tok
                 elif initial == '\\':                      # continued stmt
                     # This yield is new; needed for better idempotency:
+                    if stashed:
+                        yield stashed
+                        stashed = None
                     yield (NL, token, spos, (lnum, pos), line)
                     continued = 1
                 else:
                     if initial in '([{': parenlev = parenlev + 1
                     elif initial in ')]}': parenlev = parenlev - 1
+                    if stashed:
+                        yield stashed
+                        stashed = None
                     yield (OP, token, spos, epos, line)
             else:
                 yield (ERRORTOKEN, line[pos],
                            (lnum, pos), (lnum, pos+1), line)
                 pos = pos + 1
 
+    if stashed:
+        yield stashed
+        stashed = None
+
     for indent in indents[1:]:                 # pop remaining indent levels
         yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
     yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')