Просмотр исходного кода

Try using clangd-tidy (#5763)

Run clangd-tidy in parallel with clang-tidy, to experimentally see
whether it works reasonably well. These may produce slightly different
results, and it's not clear that clangd-tidy will be better, so being
cautious about switching.

A real possibility here is this is slower in some cases (building
compile commands takes ~6m below), but faster in the extremely slow
cases (when clang-tidy takes >10m).

For contrast:

- clang-tidy:
https://github.com/carbon-language/carbon-lang/actions/runs/16038096026/job/45254162180?pr=5763
- clangd-tidy:
https://github.com/carbon-language/carbon-lang/actions/runs/16038096427/job/45254164217?pr=5763
Jon Ross-Perkins 9 месяцев назад
Родитель
Сommit
6db13532ca

+ 1 - 1
.github/actions/build-setup-common/action.yml

@@ -68,7 +68,7 @@ runs:
 
         # Enable remote cache for our CI but minimize downloads.
         build --remote_cache=https://storage.googleapis.com/carbon-builds-github-v${CACHE_VERSION}
-        build --remote_download_minimal
+        build --remote_download_outputs=minimal
 
         # We import a special key into every action in order to key the Bazel
         # remote cache in a way that avoids collisions between different

+ 4 - 0
.github/actions/test-setup/action.yml

@@ -19,6 +19,8 @@ inputs:
 outputs:
   has_code:
     value: ${{ steps.filter.outputs.has_code}}
+  has_cpp_files:
+    value: ${{ steps.filter.outputs.has_cpp_files}}
 
 runs:
   using: composite
@@ -32,6 +34,8 @@ runs:
         filters: |
           has_code:
             - '!{**/*.md,LICENSE,CODEOWNERS,.git*}'
+          has_cpp_files:
+            - '{**/*.cpp,**/*.h}'
 
     # Disable uploads when the remote cache is read-only.
     - name: Set up remote cache access (read-only)

+ 83 - 0
.github/workflows/clangd_tidy.yaml

@@ -0,0 +1,83 @@
+# Part of the Carbon Language project, under the Apache License v2.0 with LLVM
+# Exceptions. See /LICENSE for license information.
+# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+
+name: 'Clang Tidy (clangd)'
+
+on:
+  push:
+    branches: [trunk, action-test]
+  pull_request:
+  merge_group:
+
+permissions:
+  contents: read # For actions/checkout.
+  pull-requests: read # For dorny/paths-filter to read pull requests.
+
+# Cancel previous workflows on the PR when there are multiple fast commits.
+# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#concurrency
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
+  cancel-in-progress: true
+
+jobs:
+  clang-tidy:
+    runs-on: ubuntu-22.04
+
+    steps:
+      - name: Harden Runner
+        uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 # v2.8.1
+        with:
+          egress-policy: block
+          # When adding endpoints, see README.md.
+          # prettier-ignore
+          allowed-endpoints: >
+            *.dl.sourceforge.net:443
+            api.github.com:443
+            bcr.bazel.build:443
+            downloads.sourceforge.net:443
+            github.com:443
+            mirrors.kernel.org:443
+            nodejs.org:443
+            oauth2.googleapis.com:443
+            objects.githubusercontent.com:443
+            pypi.org:443
+            releases.bazel.build:443
+            sourceforge.net:443
+            storage.googleapis.com:443
+
+      - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+
+      - id: filter
+        uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
+        with:
+          base:
+            ${{ github.event_name == 'pull_request' &&
+            github.event.pull_request.base.sha || (github.event_name ==
+            'merge_group' && github.event.merge_group.base_sha ||
+            github.event.push.before ) }}
+          filters: |
+            has_cpp:
+              - '{**/*.cpp,**/*.h}'
+          list-files: 'shell'
+
+      - uses: ./.github/actions/build-setup-common
+        if: steps.filter.outputs.has_cpp == 'true'
+        with:
+          matrix_runner: 'ubuntu-22.04'
+          remote_cache_upload: '--remote_upload_local_results=false'
+
+      - name: Create compile commands
+        if: steps.filter.outputs.has_cpp == 'true'
+        run: ./scripts/create_compdb.py
+
+      - name: Install clangd-tidy
+        if: steps.filter.outputs.has_cpp == 'true'
+        run: pip install clangd-tidy==1.1.0.post2
+
+      - name: Run clangd-tidy
+        if: steps.filter.outputs.has_cpp == 'true'
+        env:
+          FILTER_FILES: ${{ steps.filter.outputs.has_cpp_files }}
+        run: |
+          clangd-tidy -p . -j 10 $FILTER_FILES

+ 9 - 4
scripts/create_compdb.py

@@ -37,9 +37,10 @@ def _build_generated_files(bazel: str, logtostderr: bool) -> None:
     kinds_query = (
         "filter("
         ' ".*\\.(h|cpp|cc|c|cxx|def|inc)$",'
+        ' kind("(generated file|manifest_as_cpp)",'
         # tree_sitter is excluded here because it causes the query to failure on
         # `@platforms`.
-        ' kind("generated file", deps(//... except //utils/tree_sitter/...))'
+        "      deps(//... except //utils/tree_sitter/...))"
         ")"
     )
     log_to = None
@@ -50,13 +51,14 @@ def _build_generated_files(bazel: str, logtostderr: bool) -> None:
         stderr=log_to,
         encoding="utf-8",
     ).splitlines()
-    print(f"Found {len(generated_file_labels)} generated files...")
+    print(f"Found {len(generated_file_labels)} generated files...", flush=True)
 
     # Directly build these labels so that indexing can find them. Allow this to
     # fail in case there are build errors in the client, and just warn the user
     # that they may be missing generated files.
     subprocess.check_call(
-        [bazel, "build", "--keep_going"] + generated_file_labels
+        [bazel, "build", "--keep_going", "--remote_download_outputs=toplevel"]
+        + generated_file_labels
     )
 
 
@@ -77,7 +79,10 @@ def main() -> None:
 
     _build_generated_files(bazel, args.alsologtostderr)
 
-    print("Generating compile_commands.json (may take a few minutes)...")
+    print(
+        "Generating compile_commands.json (may take a few minutes)...",
+        flush=True,
+    )
     subprocess.run([bazel, "run", "@hedron_compile_commands//:refresh_all"])