|
| 1 | +name: PR Lint |
| 2 | + |
| 3 | +on: |
| 4 | + workflow_call: |
| 5 | + |
| 6 | +jobs: |
| 7 | + pr-lint: |
| 8 | + runs-on: ubuntu-latest |
| 9 | + steps: |
| 10 | + - name: Validate PR title + description + commit signatures |
| 11 | + uses: actions/github-script@v7 |
| 12 | + with: |
| 13 | + script: | |
| 14 | + const pr = context.payload.pull_request; |
| 15 | + const title = (pr.title || "").trim(); |
| 16 | + const body = (pr.body || "").trim(); |
| 17 | +
|
| 18 | + // 1) Title pattern: SDKS-1234 <something> |
| 19 | + // - allow multiple spaces, but require at least one space after ticket |
| 20 | + const titleRe = /^SDKS-\d+\s+\S.+/; |
| 21 | + if (!titleRe.test(title)) { |
| 22 | + core.setFailed( |
| 23 | + `PR title must match "SDKS-#### <description>". Got: "${title}"` |
| 24 | + ); |
| 25 | + return; |
| 26 | + } |
| 27 | +
|
| 28 | + // 2) Description exists |
| 29 | + if (!body || body.length < 10) { |
| 30 | + core.setFailed("PR description must be present and meaningful (>= 10 chars)."); |
| 31 | + return; |
| 32 | + } |
| 33 | +
|
| 34 | + // 3) All commits are signed (GitHub verification) |
| 35 | + // Note: "verified" is GitHub's verification result for the commit signature. |
| 36 | + const { owner, repo } = context.repo; |
| 37 | +
|
| 38 | + // paginate commits in PR |
| 39 | + const commits = await github.paginate( |
| 40 | + github.rest.pulls.listCommits, |
| 41 | + { owner, repo, pull_number: pr.number, per_page: 100 } |
| 42 | + ); |
| 43 | +
|
| 44 | + const failures = []; |
| 45 | + for (const c of commits) { |
| 46 | + const sha = c.sha; |
| 47 | + // Fetch commit detail to reliably get verification state |
| 48 | + const commitResp = await github.rest.repos.getCommit({ owner, repo, ref: sha }); |
| 49 | + const v = commitResp.data?.commit?.verification; |
| 50 | +
|
| 51 | + const verified = v?.verified === true; |
| 52 | + const reason = v?.reason || "unknown"; |
| 53 | +
|
| 54 | + if (!verified) { |
| 55 | + failures.push(`${sha.substring(0, 7)} (${reason})`); |
| 56 | + } |
| 57 | + } |
| 58 | +
|
| 59 | + if (failures.length) { |
| 60 | + core.setFailed( |
| 61 | + `All commits must be signed (Verified). Unsigned/unverified commits:\n- ` + |
| 62 | + failures.join("\n- ") |
| 63 | + ); |
| 64 | + return; |
| 65 | + } |
| 66 | +
|
| 67 | + core.info("PR metadata + commit signature checks passed."); |
| 68 | +
|
| 69 | + - name: Checkout PR head (for copyirght header checks) |
| 70 | + uses: actions/checkout@v4 |
| 71 | + with: |
| 72 | + fetch-depth: 1 |
| 73 | + |
| 74 | + - name: Validate copyright headers on changed files |
| 75 | + env: |
| 76 | + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
| 77 | + PR_NUMBER: ${{ github.event.pull_request.number }} |
| 78 | + REPO: ${{ github.repository }} |
| 79 | + run: | |
| 80 | + set -euo pipefail |
| 81 | + YEAR="$(date +'%Y')" |
| 82 | + echo "Current year: ${YEAR}" |
| 83 | +
|
| 84 | + python3 - << 'PY' |
| 85 | + import json, os, re, sys, subprocess |
| 86 | +
|
| 87 | + repo = os.environ["REPO"] |
| 88 | + pr_number = os.environ["PR_NUMBER"] |
| 89 | + year = subprocess.check_output(["date", "+%Y"], text=True).strip() |
| 90 | +
|
| 91 | + # Fetch changed files JSON (paginated) via gh |
| 92 | + cmd = [ |
| 93 | + "gh","api", |
| 94 | + "-H","Accept: application/vnd.github+json", |
| 95 | + f"/repos/{repo}/pulls/{pr_number}/files?per_page=100", |
| 96 | + "--paginate" |
| 97 | + ] |
| 98 | + raw = subprocess.check_output(cmd, text=True).strip() |
| 99 | + if not raw: |
| 100 | + print("❌ gh api returned empty response; cannot validate files.") |
| 101 | + sys.exit(1) |
| 102 | +
|
| 103 | + files = json.loads(raw) |
| 104 | +
|
| 105 | + skip_ext = { |
| 106 | + ".png",".jpg",".jpeg",".gif",".webp",".ico", |
| 107 | + ".pdf",".zip",".jar",".aar",".so",".dylib", |
| 108 | + ".keystore",".jks",".p12",".mobileprovision", |
| 109 | + ".ttf",".otf",".mp4",".mov",".wav",".mp3", |
| 110 | + ".lock" |
| 111 | + } |
| 112 | + skip_paths_re = re.compile( |
| 113 | + r"^(?:\.github/|\.idea/|\.gradle/|build/|dist/|DerivedData/|Pods/|Carthage/|\.swiftpm/|node_modules/)" |
| 114 | + ) |
| 115 | +
|
| 116 | + # Adjust this to your canonical header text. |
| 117 | + header_re = re.compile( |
| 118 | + rf"Copyright\s*\(c\)\s*(?:\d{{4}}\s*-\s*)?{re.escape(year)}\s+Ping Identity Corporation\.", |
| 119 | + re.IGNORECASE |
| 120 | + ) |
| 121 | +
|
| 122 | + bad = [] |
| 123 | + checked = [] |
| 124 | +
|
| 125 | + for f in files: |
| 126 | + status = f.get("status") |
| 127 | + path = f.get("filename") |
| 128 | + if status == "removed" or not path: |
| 129 | + continue |
| 130 | + if skip_paths_re.search(path): |
| 131 | + continue |
| 132 | +
|
| 133 | + ext = "." + path.split(".")[-1].lower() if "." in path else "" |
| 134 | + if ext in skip_ext: |
| 135 | + continue |
| 136 | +
|
| 137 | + if status not in ("added","modified","renamed"): |
| 138 | + continue |
| 139 | +
|
| 140 | + try: |
| 141 | + data = open(path, "rb").read() |
| 142 | + except FileNotFoundError: |
| 143 | + # If renamed, the new path should exist; if not, skip. |
| 144 | + continue |
| 145 | +
|
| 146 | + # Skip binary-ish |
| 147 | + if b"\x00" in data[:4096]: |
| 148 | + continue |
| 149 | +
|
| 150 | + text = data.decode("utf-8", errors="replace") |
| 151 | + head = text[:3000] |
| 152 | +
|
| 153 | + checked.append(path) |
| 154 | + if not header_re.search(head): |
| 155 | + bad.append(path) |
| 156 | +
|
| 157 | + if bad: |
| 158 | + print("\n❌ Copyright header check failed.") |
| 159 | + print(f"Expected current year {year} in header for these changed files:") |
| 160 | + for p in bad: |
| 161 | + print(f" - {p}") |
| 162 | + print("\nExpected header examples:") |
| 163 | + print(f" Copyright (c) {year} Ping Identity Corporation. All rights reserved.") |
| 164 | + print(f" Copyright (c) 2024-{year} Ping Identity Corporation. All rights reserved.") |
| 165 | + sys.exit(1) |
| 166 | +
|
| 167 | + print(f"\n✅ Copyright header check passed for {len(checked)} file(s).") |
| 168 | + PY |
| 169 | + shell: bash |
0 commit comments