]> git.madduck.net Git - etc/vim.git/commitdiff

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Fix and speedup diff-shades integration (#2773)
authorRichard Si <63936253+ichard26@users.noreply.github.com>
Thu, 20 Jan 2022 03:05:58 +0000 (22:05 -0500)
committerGitHub <noreply@github.com>
Thu, 20 Jan 2022 03:05:58 +0000 (19:05 -0800)
.github/mypyc-requirements.txt [new file with mode: 0644]
.github/workflows/diff_shades.yml
.github/workflows/diff_shades_comment.yml
docs/contributing/gauging_changes.md
scripts/diff_shades_gha_helper.py
src/black/parsing.py

diff --git a/.github/mypyc-requirements.txt b/.github/mypyc-requirements.txt
new file mode 100644 (file)
index 0000000..4542673
--- /dev/null
@@ -0,0 +1,14 @@
+mypy == 0.920
+
+# A bunch of packages for type information
+mypy-extensions >= 0.4.3
+tomli >= 0.10.2
+types-typed-ast >= 1.4.2
+types-dataclasses >= 0.1.3
+typing-extensions > 3.10.0.1
+click >= 8.0.0
+platformdirs >= 2.1.0
+
+# And because build isolation is disabled, we'll need to pull this too
+setuptools-scm[toml] >= 6.3.1
+wheel
index a8a443e2cce96a148fada9f706e7d1e7bb28ce0c..68cc2383306f05b9f929f8ee335db1d67ca1b761 100644 (file)
@@ -3,10 +3,10 @@ name: diff-shades
 on:
   push:
     branches: [main]
 on:
   push:
     branches: [main]
-    paths-ignore: ["docs/**", "tests/**", "*.md"]
+    paths-ignore: ["docs/**", "tests/**", "**.md", "**.rst"]
 
   pull_request:
 
   pull_request:
-    paths-ignore: ["docs/**", "tests/**", "*.md"]
+    paths-ignore: ["docs/**", "tests/**", "**.md", "**.rst"]
 
   workflow_dispatch:
     inputs:
 
   workflow_dispatch:
     inputs:
@@ -27,10 +27,18 @@ on:
         description: "Custom Black arguments (eg. -S)"
         required: false
 
         description: "Custom Black arguments (eg. -S)"
         required: false
 
+concurrency:
+  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
+  cancel-in-progress: true
+
 jobs:
   analysis:
     name: analysis / linux
     runs-on: ubuntu-latest
 jobs:
   analysis:
     name: analysis / linux
     runs-on: ubuntu-latest
+    env:
+      # Clang is less picky with the C code it's given than gcc (and may
+      # generate faster binaries too).
+      CC: clang-12
 
     steps:
       - name: Checkout this repository (full clone)
 
     steps:
       - name: Checkout this repository (full clone)
@@ -45,6 +53,7 @@ jobs:
           python -m pip install pip --upgrade
           python -m pip install https://github.com/ichard26/diff-shades/archive/stable.zip
           python -m pip install click packaging urllib3
           python -m pip install pip --upgrade
           python -m pip install https://github.com/ichard26/diff-shades/archive/stable.zip
           python -m pip install click packaging urllib3
+          python -m pip install -r .github/mypyc-requirements.txt
           # After checking out old revisions, this might not exist so we'll use a copy.
           cat scripts/diff_shades_gha_helper.py > helper.py
           git config user.name "diff-shades-gha"
           # After checking out old revisions, this might not exist so we'll use a copy.
           cat scripts/diff_shades_gha_helper.py > helper.py
           git config user.name "diff-shades-gha"
@@ -66,11 +75,14 @@ jobs:
           path: ${{ steps.config.outputs.baseline-analysis }}
           key: ${{ steps.config.outputs.baseline-cache-key }}
 
           path: ${{ steps.config.outputs.baseline-analysis }}
           key: ${{ steps.config.outputs.baseline-cache-key }}
 
-      - name: Install baseline revision
+      - name: Build and install baseline revision
         if: steps.baseline-cache.outputs.cache-hit != 'true'
         env:
           GITHUB_TOKEN: ${{ github.token }}
         if: steps.baseline-cache.outputs.cache-hit != 'true'
         env:
           GITHUB_TOKEN: ${{ github.token }}
-        run: ${{ steps.config.outputs.baseline-setup-cmd }} && python -m pip install .
+        run: >
+          ${{ steps.config.outputs.baseline-setup-cmd }}
+          && python setup.py --use-mypyc bdist_wheel
+          && python -m pip install dist/*.whl && rm build dist -r
 
       - name: Analyze baseline revision
         if: steps.baseline-cache.outputs.cache-hit != 'true'
 
       - name: Analyze baseline revision
         if: steps.baseline-cache.outputs.cache-hit != 'true'
@@ -78,10 +90,13 @@ jobs:
           diff-shades analyze -v --work-dir projects-cache/
           ${{ steps.config.outputs.baseline-analysis }} -- ${{ github.event.inputs.baseline-args }}
 
           diff-shades analyze -v --work-dir projects-cache/
           ${{ steps.config.outputs.baseline-analysis }} -- ${{ github.event.inputs.baseline-args }}
 
-      - name: Install target revision
+      - name: Build and install target revision
         env:
           GITHUB_TOKEN: ${{ github.token }}
         env:
           GITHUB_TOKEN: ${{ github.token }}
-        run: ${{ steps.config.outputs.target-setup-cmd }} && python -m pip install .
+        run: >
+          ${{ steps.config.outputs.target-setup-cmd }}
+          && python setup.py --use-mypyc bdist_wheel
+          && python -m pip install dist/*.whl
 
       - name: Analyze target revision
         run: >
 
       - name: Analyze target revision
         run: >
@@ -118,13 +133,14 @@ jobs:
           python helper.py comment-body
           ${{ steps.config.outputs.baseline-analysis }} ${{ steps.config.outputs.target-analysis }}
           ${{ steps.config.outputs.baseline-sha }} ${{ steps.config.outputs.target-sha }}
           python helper.py comment-body
           ${{ steps.config.outputs.baseline-analysis }} ${{ steps.config.outputs.target-analysis }}
           ${{ steps.config.outputs.baseline-sha }} ${{ steps.config.outputs.target-sha }}
+          ${{ github.event.pull_request.number }}
 
       - name: Upload summary file (PR only)
         if: github.event_name == 'pull_request'
         uses: actions/upload-artifact@v2
         with:
 
       - name: Upload summary file (PR only)
         if: github.event_name == 'pull_request'
         uses: actions/upload-artifact@v2
         with:
-          name: .pr-comment-body.md
-          path: .pr-comment-body.md
+          name: .pr-comment.json
+          path: .pr-comment.json
 
         # This is last so the diff-shades-comment workflow can still work even if we
         # end up detecting failed files and failing the run.
 
         # This is last so the diff-shades-comment workflow can still work even if we
         # end up detecting failed files and failing the run.
index bdd903218008475278eb36df962fddefec587107..0433bbbf85ff5818d9f6d9b3d3c4a629a9087747 100644 (file)
@@ -31,7 +31,7 @@ jobs:
       - name: Try to find pre-existing PR comment
         if: steps.metadata.outputs.needs-comment == 'true'
         id: find-comment
       - name: Try to find pre-existing PR comment
         if: steps.metadata.outputs.needs-comment == 'true'
         id: find-comment
-        uses: peter-evans/find-comment@v1
+        uses: peter-evans/find-comment@d2dae40ed151c634e4189471272b57e76ec19ba8
         with:
           issue-number: ${{ steps.metadata.outputs.pr-number }}
           comment-author: "github-actions[bot]"
         with:
           issue-number: ${{ steps.metadata.outputs.pr-number }}
           comment-author: "github-actions[bot]"
@@ -39,7 +39,7 @@ jobs:
 
       - name: Create or update PR comment
         if: steps.metadata.outputs.needs-comment == 'true'
 
       - name: Create or update PR comment
         if: steps.metadata.outputs.needs-comment == 'true'
-        uses: peter-evans/create-or-update-comment@v1
+        uses: peter-evans/create-or-update-comment@a35cf36e5301d70b76f316e867e7788a55a31dae
         with:
           comment-id: ${{ steps.find-comment.outputs.comment-id }}
           issue-number: ${{ steps.metadata.outputs.pr-number }}
         with:
           comment-id: ${{ steps.find-comment.outputs.comment-id }}
           issue-number: ${{ steps.metadata.outputs.pr-number }}
index 3cfa98b3df8678312c33a723eaa8f10dccb5edda..9b38fe1b6287f98fe1b3464206693e855459af9f 100644 (file)
@@ -74,7 +74,7 @@ to further information. If there's a pre-existing diff-shades comment, it'll be
 instead the next time the workflow is triggered on the same PR.
 
 The workflow uploads 3-4 artifacts upon completion: the two generated analyses (they
 instead the next time the workflow is triggered on the same PR.
 
 The workflow uploads 3-4 artifacts upon completion: the two generated analyses (they
-have the .json file extension), `diff.html`, and `.pr-comment-body.md` if triggered by a
+have the .json file extension), `diff.html`, and `.pr-comment.json` if triggered by a
 PR. The last one is downloaded by the `diff-shades-comment` workflow and shouldn't be
 downloaded locally. `diff.html` comes in handy for push-based or manually triggered
 runs. And the analyses exist just in case you want to do further analysis using the
 PR. The last one is downloaded by the `diff-shades-comment` workflow and shouldn't be
 downloaded locally. `diff.html` comes in handy for push-based or manually triggered
 runs. And the analyses exist just in case you want to do further analysis using the
index 21e04a590a11bff6d597bd2f16a8e68bd4ab9c38..f1f7f2be91c3919bc000750f2ee0638b68cb551f 100644 (file)
@@ -23,7 +23,7 @@ import sys
 import zipfile
 from io import BytesIO
 from pathlib import Path
 import zipfile
 from io import BytesIO
 from pathlib import Path
-from typing import Any, Dict, Optional, Tuple
+from typing import Any, Optional, Tuple
 
 import click
 import urllib3
 
 import click
 import urllib3
@@ -34,7 +34,7 @@ if sys.version_info >= (3, 8):
 else:
     from typing_extensions import Final, Literal
 
 else:
     from typing_extensions import Final, Literal
 
-COMMENT_BODY_FILE: Final = ".pr-comment-body.md"
+COMMENT_FILE: Final = ".pr-comment.json"
 DIFF_STEP_NAME: Final = "Generate HTML diff report"
 DOCS_URL: Final = (
     "https://black.readthedocs.io/en/latest/"
 DIFF_STEP_NAME: Final = "Generate HTML diff report"
 DOCS_URL: Final = (
     "https://black.readthedocs.io/en/latest/"
@@ -55,19 +55,16 @@ def set_output(name: str, value: str) -> None:
     print(f"::set-output name={name}::{value}")
 
 
     print(f"::set-output name={name}::{value}")
 
 
-def http_get(
-    url: str,
-    is_json: bool = True,
-    headers: Optional[Dict[str, str]] = None,
-    **kwargs: Any,
-) -> Any:
-    headers = headers or {}
+def http_get(url: str, is_json: bool = True, **kwargs: Any) -> Any:
+    headers = kwargs.get("headers") or {}
     headers["User-Agent"] = USER_AGENT
     if "github" in url:
         if GH_API_TOKEN:
             headers["Authorization"] = f"token {GH_API_TOKEN}"
         headers["Accept"] = "application/vnd.github.v3+json"
     headers["User-Agent"] = USER_AGENT
     if "github" in url:
         if GH_API_TOKEN:
             headers["Authorization"] = f"token {GH_API_TOKEN}"
         headers["Accept"] = "application/vnd.github.v3+json"
-    r = http.request("GET", url, headers=headers, **kwargs)
+    kwargs["headers"] = headers
+
+    r = http.request("GET", url, **kwargs)
     if is_json:
         data = json.loads(r.data.decode("utf-8"))
     else:
     if is_json:
         data = json.loads(r.data.decode("utf-8"))
     else:
@@ -199,8 +196,9 @@ def config(
 @click.argument("target", type=click.Path(exists=True, path_type=Path))
 @click.argument("baseline-sha")
 @click.argument("target-sha")
 @click.argument("target", type=click.Path(exists=True, path_type=Path))
 @click.argument("baseline-sha")
 @click.argument("target-sha")
+@click.argument("pr-num", type=int)
 def comment_body(
 def comment_body(
-    baseline: Path, target: Path, baseline_sha: str, target_sha: str
+    baseline: Path, target: Path, baseline_sha: str, target_sha: str, pr_num: int
 ) -> None:
     # fmt: off
     cmd = [
 ) -> None:
     # fmt: off
     cmd = [
@@ -225,45 +223,43 @@ def comment_body(
         f"[**What is this?**]({DOCS_URL}) | [Workflow run]($workflow-run-url) |"
         " [diff-shades documentation](https://github.com/ichard26/diff-shades#readme)"
     )
         f"[**What is this?**]({DOCS_URL}) | [Workflow run]($workflow-run-url) |"
         " [diff-shades documentation](https://github.com/ichard26/diff-shades#readme)"
     )
-    print(f"[INFO]: writing half-completed comment body to {COMMENT_BODY_FILE}")
-    with open(COMMENT_BODY_FILE, "w", encoding="utf-8") as f:
-        f.write(body)
+    print(f"[INFO]: writing comment details to {COMMENT_FILE}")
+    with open(COMMENT_FILE, "w", encoding="utf-8") as f:
+        json.dump({"body": body, "pr-number": pr_num}, f)
 
 
 @main.command("comment-details", help="Get PR comment resources from a workflow run.")
 @click.argument("run-id")
 def comment_details(run_id: str) -> None:
     data = http_get(f"https://api.github.com/repos/{REPO}/actions/runs/{run_id}")
 
 
 @main.command("comment-details", help="Get PR comment resources from a workflow run.")
 @click.argument("run-id")
 def comment_details(run_id: str) -> None:
     data = http_get(f"https://api.github.com/repos/{REPO}/actions/runs/{run_id}")
-    if data["event"] != "pull_request":
+    if data["event"] != "pull_request" or data["conclusion"] == "cancelled":
         set_output("needs-comment", "false")
         return
 
     set_output("needs-comment", "true")
         set_output("needs-comment", "false")
         return
 
     set_output("needs-comment", "true")
-    pulls = data["pull_requests"]
-    assert len(pulls) == 1
-    pr_number = pulls[0]["number"]
-    set_output("pr-number", str(pr_number))
-
-    jobs_data = http_get(data["jobs_url"])
-    assert len(jobs_data["jobs"]) == 1, "multiple jobs not supported nor tested"
-    job = jobs_data["jobs"][0]
+    jobs = http_get(data["jobs_url"])["jobs"]
+    assert len(jobs) == 1, "multiple jobs not supported nor tested"
+    job = jobs[0]
     steps = {s["name"]: s["number"] for s in job["steps"]}
     diff_step = steps[DIFF_STEP_NAME]
     diff_url = job["html_url"] + f"#step:{diff_step}:1"
 
     artifacts_data = http_get(data["artifacts_url"])["artifacts"]
     artifacts = {a["name"]: a["archive_download_url"] for a in artifacts_data}
     steps = {s["name"]: s["number"] for s in job["steps"]}
     diff_step = steps[DIFF_STEP_NAME]
     diff_url = job["html_url"] + f"#step:{diff_step}:1"
 
     artifacts_data = http_get(data["artifacts_url"])["artifacts"]
     artifacts = {a["name"]: a["archive_download_url"] for a in artifacts_data}
-    body_url = artifacts[COMMENT_BODY_FILE]
-    body_zip = BytesIO(http_get(body_url, is_json=False))
-    with zipfile.ZipFile(body_zip) as zfile:
-        with zfile.open(COMMENT_BODY_FILE) as rf:
-            body = rf.read().decode("utf-8")
+    comment_url = artifacts[COMMENT_FILE]
+    comment_zip = BytesIO(http_get(comment_url, is_json=False))
+    with zipfile.ZipFile(comment_zip) as zfile:
+        with zfile.open(COMMENT_FILE) as rf:
+            comment_data = json.loads(rf.read().decode("utf-8"))
+
+    set_output("pr-number", str(comment_data["pr-number"]))
+    body = comment_data["body"]
     # It's more convenient to fill in these fields after the first workflow is done
     # since this command can access the workflows API (doing it in the main workflow
     # while it's still in progress seems impossible).
     body = body.replace("$workflow-run-url", data["html_url"])
     body = body.replace("$job-diff-url", diff_url)
     # It's more convenient to fill in these fields after the first workflow is done
     # since this command can access the workflows API (doing it in the main workflow
     # while it's still in progress seems impossible).
     body = body.replace("$workflow-run-url", data["html_url"])
     body = body.replace("$job-diff-url", diff_url)
-    # https://github.community/t/set-output-truncates-multiline-strings/16852/3
+    # https://github.community/t/set-output-truncates-multiline-strings/16852/3
     escaped = body.replace("%", "%25").replace("\n", "%0A").replace("\r", "%0D")
     set_output("comment-body", escaped)
 
     escaped = body.replace("%", "%25").replace("\n", "%0A").replace("\r", "%0D")
     set_output("comment-body", escaped)
 
index 13fa67ee84dabb07f183f8bee0bd8b8742fac490..6b63368871c80c29a4e353d4c126d141235d0f61 100644 (file)
@@ -206,7 +206,7 @@ def stringify_ast(node: Union[ast.AST, ast3.AST], depth: int = 0) -> Iterator[st
                 break
 
         try:
                 break
 
         try:
-            value = getattr(node, field)
+            value: object = getattr(node, field)
         except AttributeError:
             continue
 
         except AttributeError:
             continue
 
@@ -237,6 +237,7 @@ def stringify_ast(node: Union[ast.AST, ast3.AST], depth: int = 0) -> Iterator[st
             yield from stringify_ast(value, depth + 2)
 
         else:
             yield from stringify_ast(value, depth + 2)
 
         else:
+            normalized: object
             # Constant strings may be indented across newlines, if they are
             # docstrings; fold spaces after newlines when comparing. Similarly,
             # trailing and leading space may be removed.
             # Constant strings may be indented across newlines, if they are
             # docstrings; fold spaces after newlines when comparing. Similarly,
             # trailing and leading space may be removed.