Compare commits
No commits in common. "main" and "test/mobile-palette-context-coverage" have entirely different histories.
main
...
test/mobil
@ -49,11 +49,6 @@ if [ "$MERGED" != "true" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# NOTE: no || true — with set -euo pipefail, jq parse failures (e.g. field
|
||||
# missing from API response) propagate as hard errors. Use jq's // operator
|
||||
# for graceful defaults instead of bash || true guards. This was re-added by
|
||||
# 8c343e3a ("fix(gitea): add || true guards to jq pipelines") — reverted
|
||||
# here because the guards mask silent failures that hide malformed API responses.
|
||||
MERGE_SHA=$(echo "$PR" | jq -r '.merge_commit_sha // empty')
|
||||
MERGED_BY=$(echo "$PR" | jq -r '.merged_by.login // "unknown"')
|
||||
TITLE=$(echo "$PR" | jq -r '.title // ""')
|
||||
@ -102,9 +97,6 @@ fi
|
||||
|
||||
# 5. Emit structured audit event.
|
||||
NOW=$(date -u +%Y-%m-%dT%H:%M:%SZ)
|
||||
# jq -R (raw input) converts each line to a JSON string; jq -s wraps into array.
|
||||
# If FAILED_CHECKS is unexpectedly empty (shouldn't happen — we exit above),
|
||||
# this produces []. No || true needed.
|
||||
FAILED_JSON=$(printf '%s\n' "${FAILED_CHECKS[@]}" | jq -R . | jq -s .)
|
||||
|
||||
# Print as a single-line JSON so Vector's parse_json transform can pick
|
||||
|
||||
@ -301,19 +301,7 @@ def expected_context(job_key: str, workflow_name: str = "ci") -> str:
|
||||
# Drift detection
|
||||
# --------------------------------------------------------------------------
|
||||
def detect_drift(branch: str) -> tuple[list[str], dict]:
|
||||
"""Returns (findings, debug). Empty findings == no drift.
|
||||
|
||||
Raises:
|
||||
ApiError: propagated from the protection fetch only when the
|
||||
failure is likely a transient Gitea outage (5xx).
|
||||
403/404 from the protection endpoint is treated as
|
||||
"cannot determine drift for this branch" — a token-
|
||||
scope issue (missing repo-admin on DRIFT_BOT_TOKEN) or
|
||||
a repo with no protection set should not turn the
|
||||
hourly cron red. The workflow continues to the next
|
||||
branch; no [ci-drift] issue is filed for a branch
|
||||
whose protection cannot be read.
|
||||
"""
|
||||
"""Returns (findings, debug). Empty findings == no drift."""
|
||||
findings: list[str] = []
|
||||
|
||||
ci_doc = load_yaml(CI_WORKFLOW_PATH)
|
||||
@ -325,50 +313,9 @@ def detect_drift(branch: str) -> tuple[list[str], dict]:
|
||||
env_set = required_checks_env(audit_doc)
|
||||
|
||||
# Protection
|
||||
# api() raises ApiError on non-2xx. Transient 5xx should fail loud.
|
||||
# 403/404 means the token lacks repo-admin scope (Gitea 1.22.6's
|
||||
# branch_protections endpoint requires it — see DRIFT_BOT_TOKEN
|
||||
# provisioning trail in ci-required-drift.yml). Treat as
|
||||
# "cannot determine drift for this branch" — skip without turning
|
||||
# the workflow red. Surface a clear diagnostic so the operator
|
||||
# knows what to fix.
|
||||
contexts: set[str] = set()
|
||||
protection_path = f"/repos/{OWNER}/{NAME}/branch_protections/{branch}"
|
||||
try:
|
||||
_, protection = api("GET", protection_path)
|
||||
except ApiError as e:
|
||||
# Isolate the HTTP status from the error message.
|
||||
http_status: int | None = None
|
||||
msg = str(e)
|
||||
# ApiError message format: "{method} {path} → HTTP {status}: {body}"
|
||||
import re as _re
|
||||
|
||||
m = _re.search(r"HTTP (\d{3})", msg)
|
||||
if m:
|
||||
http_status = int(m.group(1))
|
||||
if http_status in (403, 404):
|
||||
# Token lacks scope OR branch has no protection. Cannot
|
||||
# determine drift — skip this branch. Do NOT exit non-zero;
|
||||
# the issue IS the alarm, not a red workflow.
|
||||
sys.stderr.write(
|
||||
f"::error::GET {protection_path} returned HTTP {http_status} — "
|
||||
f"DRIFT_BOT_TOKEN lacks repo-admin scope (Gitea 1.22.6 "
|
||||
f"requires it for this endpoint) OR branch has no protection "
|
||||
f"configured. Cannot determine drift for {branch}; "
|
||||
f"skipping. Fix: grant repo-admin to mc-drift-bot or "
|
||||
f"configure protection on {branch}.\n"
|
||||
)
|
||||
debug = {
|
||||
"branch": branch,
|
||||
"ci_jobs": sorted(jobs),
|
||||
"sentinel_needs": sorted(needs),
|
||||
"protection_contexts_skipped": True,
|
||||
"protection_http_status": http_status,
|
||||
"audit_env_checks": sorted(env_set),
|
||||
}
|
||||
return [], debug
|
||||
# 5xx — propagate (transient outage, fail loud per design).
|
||||
raise
|
||||
# api() raises ApiError on non-2xx; let it propagate so a transient
|
||||
# 500 fails the run loudly rather than producing a "no drift" lie.
|
||||
_, protection = api("GET", f"/repos/{OWNER}/{NAME}/branch_protections/{branch}")
|
||||
if not isinstance(protection, dict):
|
||||
sys.stderr.write(
|
||||
f"::error::protection response for {branch} not a JSON object\n"
|
||||
|
||||
@ -1,369 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""gitea-merge-queue — conservative serialized merge bot for Gitea.
|
||||
|
||||
Gitea 1.22.6 has auto-merge (`pull_auto_merge`) but no GitHub-style merge
|
||||
queue. This script provides the missing serialized policy in user space:
|
||||
|
||||
1. Pick the oldest open PR carrying QUEUE_LABEL.
|
||||
2. Refuse to act unless main is green.
|
||||
3. Refuse fork PRs; the queue may only mutate same-repo branches.
|
||||
4. If the PR branch does not contain current main, call Gitea's
|
||||
/pulls/{n}/update endpoint and stop. CI must rerun on the updated head.
|
||||
5. If the updated PR head has all required contexts green, merge with the
|
||||
non-bypass merge actor token.
|
||||
|
||||
The script is intentionally one-PR-per-run. Workflow/cron concurrency should
|
||||
serialize invocations so two green PRs cannot merge against the same main.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import dataclasses
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from typing import Any
|
||||
|
||||
|
||||
def _env(key: str, *, default: str = "") -> str:
|
||||
return os.environ.get(key, default)
|
||||
|
||||
|
||||
GITEA_TOKEN = _env("GITEA_TOKEN")
|
||||
GITEA_HOST = _env("GITEA_HOST")
|
||||
REPO = _env("REPO")
|
||||
WATCH_BRANCH = _env("WATCH_BRANCH", default="main")
|
||||
QUEUE_LABEL = _env("QUEUE_LABEL", default="merge-queue")
|
||||
HOLD_LABEL = _env("HOLD_LABEL", default="merge-queue-hold")
|
||||
UPDATE_STYLE = _env("UPDATE_STYLE", default="merge")
|
||||
REQUIRED_CONTEXTS_RAW = _env(
|
||||
"REQUIRED_CONTEXTS",
|
||||
default=(
|
||||
"CI / all-required (pull_request),"
|
||||
"sop-checklist / all-items-acked (pull_request)"
|
||||
),
|
||||
)
|
||||
|
||||
OWNER, NAME = (REPO.split("/", 1) + [""])[:2] if REPO else ("", "")
|
||||
API = f"https://{GITEA_HOST}/api/v1" if GITEA_HOST else ""
|
||||
|
||||
|
||||
class ApiError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class MergeDecision:
|
||||
ready: bool
|
||||
action: str
|
||||
reason: str
|
||||
|
||||
|
||||
def _require_runtime_env() -> None:
|
||||
for key in ("GITEA_TOKEN", "GITEA_HOST", "REPO", "WATCH_BRANCH", "QUEUE_LABEL"):
|
||||
if not os.environ.get(key):
|
||||
sys.stderr.write(f"::error::missing required env var: {key}\n")
|
||||
sys.exit(2)
|
||||
if UPDATE_STYLE not in {"merge", "rebase"}:
|
||||
sys.stderr.write("::error::UPDATE_STYLE must be merge or rebase\n")
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def api(
|
||||
method: str,
|
||||
path: str,
|
||||
*,
|
||||
body: dict | None = None,
|
||||
query: dict[str, str] | None = None,
|
||||
expect_json: bool = True,
|
||||
) -> tuple[int, Any]:
|
||||
url = f"{API}{path}"
|
||||
if query:
|
||||
url = f"{url}?{urllib.parse.urlencode(query)}"
|
||||
data = None
|
||||
headers = {
|
||||
"Authorization": f"token {GITEA_TOKEN}",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
if body is not None:
|
||||
data = json.dumps(body).encode("utf-8")
|
||||
headers["Content-Type"] = "application/json"
|
||||
req = urllib.request.Request(url, method=method, data=data, headers=headers)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=30) as resp:
|
||||
raw = resp.read()
|
||||
status = resp.status
|
||||
except urllib.error.HTTPError as exc:
|
||||
raw = exc.read()
|
||||
status = exc.code
|
||||
|
||||
if not (200 <= status < 300):
|
||||
snippet = raw[:500].decode("utf-8", errors="replace") if raw else ""
|
||||
raise ApiError(f"{method} {path} -> HTTP {status}: {snippet}")
|
||||
if not raw:
|
||||
return status, None
|
||||
try:
|
||||
return status, json.loads(raw)
|
||||
except json.JSONDecodeError as exc:
|
||||
if expect_json:
|
||||
raise ApiError(f"{method} {path} -> HTTP {status} non-JSON: {exc}") from exc
|
||||
return status, {"_raw": raw.decode("utf-8", errors="replace")}
|
||||
|
||||
|
||||
def required_contexts(raw: str) -> list[str]:
|
||||
return [part.strip() for part in raw.split(",") if part.strip()]
|
||||
|
||||
|
||||
def status_state(status: dict) -> str:
|
||||
return str(status.get("status") or status.get("state") or "").lower()
|
||||
|
||||
|
||||
def latest_statuses_by_context(statuses: list[dict]) -> dict[str, dict]:
|
||||
latest: dict[str, dict] = {}
|
||||
for status in statuses:
|
||||
context = status.get("context")
|
||||
if isinstance(context, str) and context not in latest:
|
||||
latest[context] = status
|
||||
return latest
|
||||
|
||||
|
||||
def required_contexts_green(
|
||||
latest_statuses: dict[str, dict],
|
||||
contexts: list[str],
|
||||
) -> tuple[bool, list[str]]:
|
||||
missing_or_bad: list[str] = []
|
||||
for context in contexts:
|
||||
status = latest_statuses.get(context)
|
||||
state = status_state(status or {})
|
||||
if state != "success":
|
||||
missing_or_bad.append(f"{context}={state or 'missing'}")
|
||||
return not missing_or_bad, missing_or_bad
|
||||
|
||||
|
||||
def label_names(issue: dict) -> set[str]:
|
||||
return {
|
||||
label["name"]
|
||||
for label in issue.get("labels", [])
|
||||
if isinstance(label, dict) and isinstance(label.get("name"), str)
|
||||
}
|
||||
|
||||
|
||||
def choose_next_queued_issue(
|
||||
issues: list[dict],
|
||||
*,
|
||||
queue_label: str,
|
||||
hold_label: str = "",
|
||||
) -> dict | None:
|
||||
candidates = []
|
||||
for issue in issues:
|
||||
labels = label_names(issue)
|
||||
if queue_label not in labels:
|
||||
continue
|
||||
if hold_label and hold_label in labels:
|
||||
continue
|
||||
if "pull_request" not in issue:
|
||||
continue
|
||||
candidates.append(issue)
|
||||
candidates.sort(key=lambda issue: (issue.get("created_at") or "", int(issue["number"])))
|
||||
return candidates[0] if candidates else None
|
||||
|
||||
|
||||
def pr_contains_base_sha(commits: list[dict], base_sha: str) -> bool:
|
||||
for commit in commits:
|
||||
sha = commit.get("sha") or commit.get("id")
|
||||
if sha == base_sha:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def pr_has_current_base(pr: dict, commits: list[dict], main_sha: str) -> bool:
|
||||
if pr.get("merge_base") == main_sha:
|
||||
return True
|
||||
return pr_contains_base_sha(commits, main_sha)
|
||||
|
||||
|
||||
def evaluate_merge_readiness(
|
||||
*,
|
||||
main_status: dict,
|
||||
pr_status: dict,
|
||||
required_contexts: list[str],
|
||||
pr_has_current_base: bool,
|
||||
) -> MergeDecision:
|
||||
main_state = str(main_status.get("state") or "").lower()
|
||||
if main_state != "success":
|
||||
return MergeDecision(False, "pause", f"main status is {main_state or 'missing'}")
|
||||
if not pr_has_current_base:
|
||||
return MergeDecision(False, "update", "PR head does not contain current main")
|
||||
|
||||
pr_state = str(pr_status.get("state") or "").lower()
|
||||
if pr_state != "success":
|
||||
return MergeDecision(False, "wait", f"PR combined status is {pr_state or 'missing'}")
|
||||
|
||||
latest = latest_statuses_by_context(pr_status.get("statuses") or [])
|
||||
ok, missing_or_bad = required_contexts_green(latest, required_contexts)
|
||||
if not ok:
|
||||
return MergeDecision(False, "wait", "required contexts not green: " + ", ".join(missing_or_bad))
|
||||
return MergeDecision(True, "merge", "ready")
|
||||
|
||||
|
||||
def get_branch_head(branch: str) -> str:
|
||||
_, body = api("GET", f"/repos/{OWNER}/{NAME}/branches/{branch}")
|
||||
commit = body.get("commit") if isinstance(body, dict) else None
|
||||
sha = commit.get("id") if isinstance(commit, dict) else None
|
||||
if not isinstance(sha, str) or len(sha) < 7:
|
||||
raise ApiError(f"branch {branch} response missing commit id")
|
||||
return sha
|
||||
|
||||
|
||||
def get_combined_status(sha: str) -> dict:
|
||||
_, body = api("GET", f"/repos/{OWNER}/{NAME}/commits/{sha}/status")
|
||||
if not isinstance(body, dict):
|
||||
raise ApiError(f"status for {sha} response not object")
|
||||
return body
|
||||
|
||||
|
||||
def list_queued_issues() -> list[dict]:
|
||||
_, body = api(
|
||||
"GET",
|
||||
f"/repos/{OWNER}/{NAME}/issues",
|
||||
query={
|
||||
"state": "open",
|
||||
"type": "pulls",
|
||||
"labels": QUEUE_LABEL,
|
||||
"limit": "50",
|
||||
},
|
||||
)
|
||||
if not isinstance(body, list):
|
||||
raise ApiError("queued issues response not list")
|
||||
return body
|
||||
|
||||
|
||||
def get_pull(pr_number: int) -> dict:
|
||||
_, body = api("GET", f"/repos/{OWNER}/{NAME}/pulls/{pr_number}")
|
||||
if not isinstance(body, dict):
|
||||
raise ApiError(f"PR #{pr_number} response not object")
|
||||
return body
|
||||
|
||||
|
||||
def get_pull_commits(pr_number: int) -> list[dict]:
|
||||
_, body = api("GET", f"/repos/{OWNER}/{NAME}/pulls/{pr_number}/commits")
|
||||
if not isinstance(body, list):
|
||||
raise ApiError(f"PR #{pr_number} commits response not list")
|
||||
return body
|
||||
|
||||
|
||||
def post_comment(pr_number: int, body: str, *, dry_run: bool) -> None:
|
||||
print(f"::notice::comment PR #{pr_number}: {body.splitlines()[0][:160]}")
|
||||
if dry_run:
|
||||
return
|
||||
api("POST", f"/repos/{OWNER}/{NAME}/issues/{pr_number}/comments", body={"body": body})
|
||||
|
||||
|
||||
def update_pull(pr_number: int, *, dry_run: bool) -> None:
|
||||
print(f"::notice::updating PR #{pr_number} with base branch via style={UPDATE_STYLE}")
|
||||
if dry_run:
|
||||
return
|
||||
api(
|
||||
"POST",
|
||||
f"/repos/{OWNER}/{NAME}/pulls/{pr_number}/update",
|
||||
query={"style": UPDATE_STYLE},
|
||||
expect_json=False,
|
||||
)
|
||||
|
||||
|
||||
def merge_pull(pr_number: int, *, dry_run: bool) -> None:
|
||||
payload = {
|
||||
"Do": "merge",
|
||||
"MergeTitleField": f"Merge PR #{pr_number} via Gitea merge queue",
|
||||
"MergeMessageField": (
|
||||
"Serialized merge by gitea-merge-queue after current-main, "
|
||||
"SOP, and required CI checks were green."
|
||||
),
|
||||
}
|
||||
print(f"::notice::merging PR #{pr_number}")
|
||||
if dry_run:
|
||||
return
|
||||
api("POST", f"/repos/{OWNER}/{NAME}/pulls/{pr_number}/merge", body=payload, expect_json=False)
|
||||
|
||||
|
||||
def process_once(*, dry_run: bool = False) -> int:
|
||||
contexts = required_contexts(REQUIRED_CONTEXTS_RAW)
|
||||
main_sha = get_branch_head(WATCH_BRANCH)
|
||||
main_status = get_combined_status(main_sha)
|
||||
if str(main_status.get("state") or "").lower() != "success":
|
||||
print(f"::notice::queue paused: {WATCH_BRANCH}@{main_sha[:8]} is not green")
|
||||
return 0
|
||||
|
||||
issue = choose_next_queued_issue(
|
||||
list_queued_issues(),
|
||||
queue_label=QUEUE_LABEL,
|
||||
hold_label=HOLD_LABEL,
|
||||
)
|
||||
if not issue:
|
||||
print("::notice::merge queue empty")
|
||||
return 0
|
||||
|
||||
pr_number = int(issue["number"])
|
||||
pr = get_pull(pr_number)
|
||||
if pr.get("state") != "open":
|
||||
print(f"::notice::PR #{pr_number} is not open; skipping")
|
||||
return 0
|
||||
if pr.get("base", {}).get("ref") != WATCH_BRANCH:
|
||||
post_comment(pr_number, f"merge-queue: skipped; base branch is not `{WATCH_BRANCH}`.", dry_run=dry_run)
|
||||
return 0
|
||||
if pr.get("head", {}).get("repo_id") != pr.get("base", {}).get("repo_id"):
|
||||
post_comment(pr_number, "merge-queue: skipped; fork PRs are not supported by the serialized queue.", dry_run=dry_run)
|
||||
return 0
|
||||
|
||||
head_sha = pr.get("head", {}).get("sha")
|
||||
if not isinstance(head_sha, str) or len(head_sha) < 7:
|
||||
raise ApiError(f"PR #{pr_number} missing head sha")
|
||||
commits = get_pull_commits(pr_number)
|
||||
current_base = pr_has_current_base(pr, commits, main_sha)
|
||||
pr_status = get_combined_status(head_sha)
|
||||
decision = evaluate_merge_readiness(
|
||||
main_status=main_status,
|
||||
pr_status=pr_status,
|
||||
required_contexts=contexts,
|
||||
pr_has_current_base=current_base,
|
||||
)
|
||||
|
||||
print(f"::notice::PR #{pr_number} decision={decision.action}: {decision.reason}")
|
||||
if decision.action == "update":
|
||||
update_pull(pr_number, dry_run=dry_run)
|
||||
post_comment(
|
||||
pr_number,
|
||||
(
|
||||
f"merge-queue: updated this branch with `{WATCH_BRANCH}` at "
|
||||
f"`{main_sha[:12]}`. Waiting for CI on the refreshed head."
|
||||
),
|
||||
dry_run=dry_run,
|
||||
)
|
||||
return 0
|
||||
if decision.ready:
|
||||
latest_main_sha = get_branch_head(WATCH_BRANCH)
|
||||
if latest_main_sha != main_sha:
|
||||
print(
|
||||
f"::notice::main moved {main_sha[:8]} -> {latest_main_sha[:8]}; "
|
||||
"deferring to next tick"
|
||||
)
|
||||
return 0
|
||||
merge_pull(pr_number, dry_run=dry_run)
|
||||
return 0
|
||||
return 0
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--dry-run", action="store_true")
|
||||
args = parser.parse_args()
|
||||
_require_runtime_env()
|
||||
return process_once(dry_run=args.dry_run)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@ -1,113 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Lint workflow bash for curl status-code capture pollution.
|
||||
|
||||
The bad shape is:
|
||||
|
||||
HTTP_CODE=$(curl ... -w '%{http_code}' ... || echo "000")
|
||||
|
||||
`curl -w` writes the HTTP code to stdout before returning non-zero, so
|
||||
fallback output inside the same command substitution appends another code.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import glob
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import NamedTuple
|
||||
|
||||
|
||||
SELF = ".gitea/workflows/lint-curl-status-capture.yml"
|
||||
|
||||
|
||||
class Finding(NamedTuple):
|
||||
path: str
|
||||
snippet: str
|
||||
|
||||
|
||||
BAD_STATUS_CAPTURE = re.compile(
|
||||
r"""
|
||||
\$\(\s*
|
||||
curl\b
|
||||
[^)]*
|
||||
-w\s*['"]%\{http_code\}['"]
|
||||
[^)]*
|
||||
\|\|\s*
|
||||
(?:
|
||||
echo\s+['"]?000['"]?
|
||||
|
|
||||
printf\s+['"]000['"]
|
||||
)
|
||||
\s*\)
|
||||
""",
|
||||
re.DOTALL | re.VERBOSE,
|
||||
)
|
||||
|
||||
|
||||
def _logical_shell(content: str) -> str:
|
||||
"""Collapse bash line continuations so one curl command is one string."""
|
||||
return re.sub(r"\\\s*\n\s*", " ", content)
|
||||
|
||||
|
||||
def scan_content(path: str, content: str) -> list[Finding]:
|
||||
flat = _logical_shell(content)
|
||||
return [
|
||||
Finding(path=path, snippet=re.sub(r"\s+", " ", match.group(0)).strip()[:160])
|
||||
for match in BAD_STATUS_CAPTURE.finditer(flat)
|
||||
]
|
||||
|
||||
|
||||
def scan_paths(paths: list[str]) -> list[Finding]:
|
||||
findings: list[Finding] = []
|
||||
for path in paths:
|
||||
if path == SELF:
|
||||
continue
|
||||
content = Path(path).read_text(encoding="utf-8")
|
||||
findings.extend(scan_content(path, content))
|
||||
return findings
|
||||
|
||||
|
||||
def default_paths() -> list[str]:
|
||||
return sorted(glob.glob(".gitea/workflows/*.yml"))
|
||||
|
||||
|
||||
def print_report(findings: list[Finding]) -> None:
|
||||
if not findings:
|
||||
print("OK No curl-status-capture pollution patterns detected")
|
||||
return
|
||||
|
||||
print(f"::error::Found {len(findings)} curl-status-capture pollution site(s):")
|
||||
for finding in findings:
|
||||
print(
|
||||
f"::error file={finding.path}::Curl status-capture pollution: "
|
||||
"'|| echo/printf 000' inside a $(curl ... -w '%{http_code}' ...) "
|
||||
"subshell. On non-2xx or connection failure, curl's -w writes a "
|
||||
"status, then exits non-zero, then the fallback appends another "
|
||||
"status. Fix: route -w into a tempfile so the exit code cannot "
|
||||
"pollute stdout."
|
||||
)
|
||||
print(f" matched: {finding.snippet}...")
|
||||
|
||||
print()
|
||||
print("Fix template:")
|
||||
print(" set +e")
|
||||
print(" curl ... -w '%{http_code}' >code.txt 2>/dev/null")
|
||||
print(" set -e")
|
||||
print(' HTTP_CODE=$(cat code.txt 2>/dev/null)')
|
||||
print(' [ -z "$HTTP_CODE" ] && HTTP_CODE="000"')
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("paths", nargs="*", help="workflow files to scan")
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
paths = args.paths or default_paths()
|
||||
findings = scan_paths(paths)
|
||||
print_report(findings)
|
||||
return 1 if findings else 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@ -1,404 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""lint-required-no-paths — structural enforcement of
|
||||
`feedback_path_filtered_workflow_cant_be_required`.
|
||||
|
||||
For every workflow whose status-check context appears in
|
||||
`branch_protections/<branch>.status_check_contexts`, assert that the
|
||||
workflow's `on:` block has NO `paths:` and NO `paths-ignore:` filter.
|
||||
|
||||
A required-check workflow with a paths filter silently degrades the
|
||||
merge gate:
|
||||
|
||||
- If the PR's diff doesn't match the `paths:` glob, the workflow
|
||||
never fires.
|
||||
- Gitea (1.22.6) reports the required context as `pending` (never as
|
||||
`skipped == success`), so the PR cannot merge.
|
||||
- For a docs-only PR against `paths: ['**.go']`, the PR is
|
||||
blocked forever — no human action can produce a green.
|
||||
|
||||
The class was previously prevented only by reviewer vigilance + the
|
||||
saved memory `feedback_path_filtered_workflow_cant_be_required`. This
|
||||
script makes it a hard CI gate so a future PR adding `paths:` to a
|
||||
required workflow fails fast at PR time, not after merge when the next
|
||||
docs PR wedges main.
|
||||
|
||||
The lint runs as `.gitea/workflows/lint-required-no-paths.yml` on every
|
||||
PR. The lint workflow ITSELF must not have a paths-filter (otherwise it
|
||||
could be circumvented by a paths-non-matching PR) — that's enforced by
|
||||
self-reference and by the workflow's own `on:` block deliberately
|
||||
omitting filters.
|
||||
|
||||
Sources of truth:
|
||||
- `branch_protections/<branch>` `status_check_contexts` (the merge gate)
|
||||
- `.gitea/workflows/*.yml` `name:` + `on:` (the workflow set)
|
||||
|
||||
Context-format note (Gitea 1.22.6):
|
||||
Status-check contexts are formatted `{workflow_name} / {job_name_or_key} ({event})`.
|
||||
We parse the workflow_name prefix and walk `.gitea/workflows/*.yml` for
|
||||
a file whose `name:` attr matches. (The filename is NOT the source of
|
||||
truth; `name:` is, because Gitea formats the context from `name:`.)
|
||||
|
||||
Exit codes:
|
||||
0 — no required workflow has a paths/paths-ignore filter (clean) OR
|
||||
branch_protections endpoint returned 403/404 (token-scope issue;
|
||||
surfaced via ::error:: but non-fatal so a missing scope doesn't
|
||||
red-X every PR — fix the token, not the lint).
|
||||
1 — at least one required workflow has a paths/paths-ignore filter
|
||||
(the gate-degrading defect class).
|
||||
2 — env contract violation (missing GITEA_TOKEN/HOST/REPO/BRANCH).
|
||||
3 — workflows directory missing or workflow YAML unparseable.
|
||||
4 — protection response shape unexpected (non-dict body on 2xx).
|
||||
|
||||
Auth note: `GET /repos/.../branch_protections/{branch}` requires
|
||||
repo-admin role in Gitea 1.22.6. The workflow-default `GITHUB_TOKEN`
|
||||
is non-admin; we re-use `DRIFT_BOT_TOKEN` (same persona that powers
|
||||
ci-required-drift.yml). If `DRIFT_BOT_TOKEN` is unavailable in a future
|
||||
context, the script falls through gracefully (exit 0 + ::error::).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import yaml # PyYAML 6.0.2 — installed by the workflow before this runs.
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Environment
|
||||
# --------------------------------------------------------------------------
|
||||
def _env(key: str, *, required: bool = True, default: str | None = None) -> str:
|
||||
val = os.environ.get(key, default)
|
||||
if required and not val:
|
||||
sys.stderr.write(f"::error::missing required env var: {key}\n")
|
||||
sys.exit(2)
|
||||
return val or ""
|
||||
|
||||
|
||||
GITEA_TOKEN = _env("GITEA_TOKEN", required=False)
|
||||
GITEA_HOST = _env("GITEA_HOST", required=False)
|
||||
REPO = _env("REPO", required=False)
|
||||
BRANCH = _env("BRANCH", required=False, default="main")
|
||||
WORKFLOWS_DIR = _env(
|
||||
"WORKFLOWS_DIR", required=False, default=".gitea/workflows"
|
||||
)
|
||||
|
||||
OWNER, NAME = (REPO.split("/", 1) + [""])[:2] if REPO else ("", "")
|
||||
API = f"https://{GITEA_HOST}/api/v1" if GITEA_HOST else ""
|
||||
|
||||
|
||||
def _require_runtime_env() -> None:
|
||||
"""Enforce env contract — called from `run()` only. Tests import
|
||||
individual functions without setting the full env contract."""
|
||||
for key in ("GITEA_TOKEN", "GITEA_HOST", "REPO", "BRANCH"):
|
||||
if not os.environ.get(key):
|
||||
sys.stderr.write(f"::error::missing required env var: {key}\n")
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Tiny HTTP helper (mirrors ci-required-drift.py contract:
|
||||
# raise on non-2xx and on JSON-decode-fail when JSON expected, per
|
||||
# `feedback_api_helper_must_raise_not_return_dict`).
|
||||
# --------------------------------------------------------------------------
|
||||
class ApiError(RuntimeError):
|
||||
"""Raised when a Gitea API call cannot be trusted to have succeeded."""
|
||||
|
||||
|
||||
def api(
|
||||
method: str,
|
||||
path: str,
|
||||
*,
|
||||
body: dict | None = None,
|
||||
query: dict[str, str] | None = None,
|
||||
expect_json: bool = True,
|
||||
) -> tuple[int, Any]:
|
||||
url = f"{API}{path}"
|
||||
if query:
|
||||
url = f"{url}?{urllib.parse.urlencode(query)}"
|
||||
data = None
|
||||
headers = {
|
||||
"Authorization": f"token {GITEA_TOKEN}",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
if body is not None:
|
||||
data = json.dumps(body).encode("utf-8")
|
||||
headers["Content-Type"] = "application/json"
|
||||
req = urllib.request.Request(url, method=method, data=data, headers=headers)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=30) as resp:
|
||||
raw = resp.read()
|
||||
status = resp.status
|
||||
except urllib.error.HTTPError as e:
|
||||
raw = e.read()
|
||||
status = e.code
|
||||
|
||||
if not (200 <= status < 300):
|
||||
snippet = raw[:500].decode("utf-8", errors="replace") if raw else ""
|
||||
raise ApiError(f"{method} {path} → HTTP {status}: {snippet}")
|
||||
|
||||
if not raw:
|
||||
return status, None
|
||||
try:
|
||||
return status, json.loads(raw)
|
||||
except json.JSONDecodeError as e:
|
||||
if expect_json:
|
||||
raise ApiError(
|
||||
f"{method} {path} → HTTP {status} but body is not JSON: {e}"
|
||||
) from e
|
||||
return status, {"_raw": raw.decode("utf-8", errors="replace")}
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Status-check context parser
|
||||
# --------------------------------------------------------------------------
|
||||
# Format: "<workflow_name> / <job_name_or_key> (<event>)"
|
||||
# Examples observed on molecule-core/main:
|
||||
# "Secret scan / Scan diff for credential-shaped strings (pull_request)"
|
||||
# "sop-tier-check / tier-check (pull_request)"
|
||||
#
|
||||
# Split strategy: peel off the trailing ` (<event>)` first, then split
|
||||
# the leading `<workflow> / <rest>` on the FIRST ` / ` (workflow names
|
||||
# come from `name:` attrs which conventionally don't embed ' / '; job
|
||||
# names CAN, so we keep the rest of the slash-divided text as the job
|
||||
# name). This matches Gitea's `name: ` semantics.
|
||||
_CONTEXT_RE = re.compile(r"^(?P<workflow>.+?) / (?P<job>.+) \((?P<event>[^)]+)\)$")
|
||||
|
||||
|
||||
def parse_context(ctx: str) -> tuple[str, str, str] | None:
|
||||
"""Parse `<workflow> / <job> (<event>)` → (workflow, job, event) or None."""
|
||||
if not ctx:
|
||||
return None
|
||||
m = _CONTEXT_RE.match(ctx)
|
||||
if not m:
|
||||
return None
|
||||
return m.group("workflow"), m.group("job"), m.group("event")
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# workflow-name → file resolution
|
||||
# --------------------------------------------------------------------------
|
||||
def _iter_workflow_files() -> list[Path]:
|
||||
d = Path(WORKFLOWS_DIR)
|
||||
if not d.is_dir():
|
||||
sys.stderr.write(f"::error::workflows directory not found: {d}\n")
|
||||
sys.exit(3)
|
||||
# `.yml` and `.yaml` — Gitea accepts both (rarely used `.yaml`, but
|
||||
# don't silently miss it if a future port uses it).
|
||||
return sorted(list(d.glob("*.yml")) + list(d.glob("*.yaml")))
|
||||
|
||||
|
||||
def resolve_workflow_file(workflow_name: str) -> Path | None:
|
||||
"""Find the YAML file whose `name:` attr matches `workflow_name`.
|
||||
|
||||
Returns None if no match. Filename is NOT used as a fallback —
|
||||
Gitea's context format uses `name:`, so a `name:`-less workflow
|
||||
won't even appear in the protection list. (A YAML with no `name:`
|
||||
would default the context to the file basename, but our protection
|
||||
contexts on molecule-core are all `name:`-derived; we trust the
|
||||
same.)
|
||||
"""
|
||||
for f in _iter_workflow_files():
|
||||
try:
|
||||
doc = yaml.safe_load(f.read_text(encoding="utf-8"))
|
||||
except yaml.YAMLError as e:
|
||||
sys.stderr.write(f"::error::YAML parse error in {f}: {e}\n")
|
||||
sys.exit(3)
|
||||
if isinstance(doc, dict) and doc.get("name") == workflow_name:
|
||||
return f
|
||||
return None
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# paths-filter detection
|
||||
# --------------------------------------------------------------------------
|
||||
# Triggers that accept `paths:` / `paths-ignore:` (per GitHub Actions /
|
||||
# Gitea Actions docs): pull_request, pull_request_target, push.
|
||||
# We don't enumerate — any sub-key named `paths` or `paths-ignore`
|
||||
# inside an event mapping is flagged.
|
||||
_PATHS_KEYS = ("paths", "paths-ignore")
|
||||
|
||||
|
||||
def detect_paths_filters(workflow_path: Path) -> list[str]:
|
||||
"""Walk the workflow's `on:` block and return a list of findings, one
|
||||
per offending `paths`/`paths-ignore` key.
|
||||
|
||||
Returns:
|
||||
Empty list if the workflow has no paths/paths-ignore filter
|
||||
anywhere in its `on:` block. Otherwise, a list of human-readable
|
||||
strings naming the event and filter key + the filter contents.
|
||||
"""
|
||||
try:
|
||||
doc = yaml.safe_load(workflow_path.read_text(encoding="utf-8"))
|
||||
except yaml.YAMLError as e:
|
||||
sys.stderr.write(f"::error::YAML parse error in {workflow_path}: {e}\n")
|
||||
sys.exit(3)
|
||||
if not isinstance(doc, dict):
|
||||
return []
|
||||
|
||||
on_block = doc.get("on") or doc.get(True) # PyYAML 6 quirk: `on:`
|
||||
# under default constructor sometimes becomes the bool key `True`
|
||||
# because YAML 1.1 treats `on` as a boolean. Tolerate both.
|
||||
if on_block is None:
|
||||
return []
|
||||
|
||||
findings: list[str] = []
|
||||
|
||||
# Shape A: `on: pull_request` (string shorthand) — cannot carry filters.
|
||||
if isinstance(on_block, str):
|
||||
return []
|
||||
# Shape B: `on: [pull_request, push]` (list shorthand) — cannot carry filters.
|
||||
if isinstance(on_block, list):
|
||||
return []
|
||||
# Shape C: `on: { event: { ... } }` — the standard mapping case.
|
||||
if isinstance(on_block, dict):
|
||||
# Defensive: top-level malformed `on.paths` (someone wrote
|
||||
# `on: { paths: ['x'] }` thinking it's a workflow-level filter).
|
||||
# This is invalid syntax, but if present, flag it — it might
|
||||
# not block the workflow from registering (Gitea may ignore the
|
||||
# unknown key) and would create a false sense of "filter exists"
|
||||
# the lint should still surface.
|
||||
for k in _PATHS_KEYS:
|
||||
if k in on_block:
|
||||
v = on_block[k]
|
||||
findings.append(
|
||||
f"top-level `on.{k}` filter (malformed but present): {v!r}"
|
||||
)
|
||||
for event, event_body in on_block.items():
|
||||
if event in _PATHS_KEYS:
|
||||
continue # already handled above
|
||||
if not isinstance(event_body, dict):
|
||||
# `pull_request: null` / `pull_request: [opened]` shapes —
|
||||
# no place for a paths filter to live; skip.
|
||||
continue
|
||||
for k in _PATHS_KEYS:
|
||||
if k in event_body:
|
||||
v = event_body[k]
|
||||
findings.append(
|
||||
f"`on.{event}.{k}` filter present: {v!r}"
|
||||
)
|
||||
return findings
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Driver
|
||||
# --------------------------------------------------------------------------
|
||||
def run() -> int:
|
||||
"""Main lint entrypoint. Returns the process exit code.
|
||||
|
||||
Exit semantics (see module docstring for full table):
|
||||
0 — clean (no offending paths-filter on any required workflow),
|
||||
OR protection unreadable (403/404) — surfaced as ::error::
|
||||
but treated as non-fatal so token-scope issues don't red-X
|
||||
every PR.
|
||||
1 — at least one required workflow carries a paths/paths-ignore
|
||||
filter — the regression class this lint exists to prevent.
|
||||
"""
|
||||
_require_runtime_env()
|
||||
|
||||
protection_path = f"/repos/{OWNER}/{NAME}/branch_protections/{BRANCH}"
|
||||
try:
|
||||
_, protection = api("GET", protection_path)
|
||||
except ApiError as e:
|
||||
msg = str(e)
|
||||
m = re.search(r"HTTP (\d{3})", msg)
|
||||
http_status = int(m.group(1)) if m else None
|
||||
if http_status in (403, 404):
|
||||
sys.stderr.write(
|
||||
f"::error::GET {protection_path} returned HTTP {http_status} — "
|
||||
f"DRIFT_BOT_TOKEN lacks repo-admin scope (Gitea 1.22.6 "
|
||||
f"requires it for this endpoint) OR branch '{BRANCH}' has "
|
||||
f"no protection configured. Cannot enumerate required "
|
||||
f"checks; skipping lint with exit 0 to avoid red-X on "
|
||||
f"every PR. Fix: grant repo-admin to mc-drift-bot.\n"
|
||||
)
|
||||
return 0
|
||||
raise
|
||||
|
||||
if not isinstance(protection, dict):
|
||||
sys.stderr.write(
|
||||
f"::error::protection response for {BRANCH} not a JSON object\n"
|
||||
)
|
||||
return 4
|
||||
|
||||
contexts: list[str] = list(protection.get("status_check_contexts") or [])
|
||||
if not contexts:
|
||||
print(
|
||||
f"::notice::branch_protections/{BRANCH} has 0 required "
|
||||
f"status_check_contexts; nothing to lint. (no required contexts)"
|
||||
)
|
||||
return 0
|
||||
|
||||
print(f"::notice::Linting {len(contexts)} required context(s) for paths-filter regressions:")
|
||||
for c in contexts:
|
||||
print(f" - {c}")
|
||||
|
||||
offenders: list[tuple[str, Path, list[str]]] = []
|
||||
unresolved: list[str] = []
|
||||
|
||||
for ctx in contexts:
|
||||
parsed = parse_context(ctx)
|
||||
if parsed is None:
|
||||
print(
|
||||
f"::warning::could not parse context '{ctx}' "
|
||||
f"(expected `<workflow> / <job> (<event>)`); skipping"
|
||||
)
|
||||
unresolved.append(ctx)
|
||||
continue
|
||||
workflow_name, _job, _event = parsed
|
||||
wf_path = resolve_workflow_file(workflow_name)
|
||||
if wf_path is None:
|
||||
print(
|
||||
f"::warning::no workflow file in {WORKFLOWS_DIR} has "
|
||||
f"`name: {workflow_name}` (required context '{ctx}'); "
|
||||
f"skipping paths-filter check. "
|
||||
f"(orphaned-context detection is ci-required-drift's job.)"
|
||||
)
|
||||
unresolved.append(ctx)
|
||||
continue
|
||||
findings = detect_paths_filters(wf_path)
|
||||
if findings:
|
||||
offenders.append((workflow_name, wf_path, findings))
|
||||
else:
|
||||
print(f"::notice::OK {wf_path.name} ({workflow_name}) — no paths filter")
|
||||
|
||||
if offenders:
|
||||
print("")
|
||||
print(f"::error::Found {len(offenders)} required workflow(s) with paths/paths-ignore filters:")
|
||||
for workflow_name, wf_path, findings in offenders:
|
||||
for finding in findings:
|
||||
# ::error file=... lets Gitea Actions surface a per-file
|
||||
# annotation in the PR UI (when annotations are wired).
|
||||
print(
|
||||
f"::error file={wf_path}::Required workflow "
|
||||
f"'{workflow_name}' ({wf_path.name}) has a paths "
|
||||
f"filter that would degrade the merge gate to a "
|
||||
f"silent indefinite pending: {finding}. "
|
||||
f"See feedback_path_filtered_workflow_cant_be_required. "
|
||||
f"Fix: remove the filter and instead gate per-step "
|
||||
f"inside the job with `if: contains(steps.changed.outputs.files, ...)` "
|
||||
f"or refactor to a single-job-with-per-step-if shape."
|
||||
)
|
||||
return 1
|
||||
|
||||
print("")
|
||||
print(
|
||||
f"::notice::OK — all {len(contexts) - len(unresolved)} resolvable "
|
||||
f"required workflow(s) clean (no paths/paths-ignore filters)."
|
||||
)
|
||||
if unresolved:
|
||||
print(
|
||||
f"::notice::{len(unresolved)} required context(s) were not "
|
||||
f"resolved to a workflow file (warn-not-fail); see warnings above."
|
||||
)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(run())
|
||||
@ -1,519 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""lint-workflow-yaml — catch Gitea-1.22.6-hostile workflow YAML shapes.
|
||||
|
||||
This script enforces six structural rules that have historically caused
|
||||
silent CI failures on Gitea Actions (1.22.6) — workflows that the server's
|
||||
YAML parser rejects with `[W] ignore invalid workflow ...` and registers
|
||||
for zero events, or shape conventions that produce ambiguous status
|
||||
contexts. Each rule maps to a documented incident in saved memory.
|
||||
|
||||
Rules (4 fatal + 1 fatal cross-file + 1 heuristic-warn):
|
||||
1. `workflow_dispatch.inputs:` block — Gitea 1.22.6 mis-parses the
|
||||
`inputs` keys as sibling event types and rejects the whole file.
|
||||
Memory: feedback_gitea_workflow_dispatch_inputs_unsupported.
|
||||
Origin: 2026-05-11 PyPI freeze (publish-runtime).
|
||||
2. `on: workflow_run:` event — not enumerated in Gitea 1.22.6's
|
||||
supported event list (verified via modules/actions/workflows.go
|
||||
enumeration; task #81). Workflow registers, fires for 0 events.
|
||||
3. `name:` containing `/` — breaks the
|
||||
`<workflow> / <job> (<event>)` commit-status context convention;
|
||||
downstream parsers (sop-tier-check, status-reaper) tokenize on `/`.
|
||||
4. `name:` collision across files — Gitea routes commit-status updates
|
||||
by `name` and behavior on collision is undefined (status-reaper
|
||||
rev1 fail-loud).
|
||||
5. Cross-repo `uses: org/repo/path@ref` — blocked while
|
||||
`[actions].DEFAULT_ACTIONS_URL=github` is the server default;
|
||||
resolves to github.com/<org-suspended>/... and 404s.
|
||||
Memory: feedback_gitea_cross_repo_uses_blocked. Cross-link: task #109.
|
||||
6. (HEURISTIC, warn-not-fail) Steps reference `https://api.github.com`
|
||||
or `https://github.com/.../releases/download` without a
|
||||
workflow-level `env.GITHUB_SERVER_URL` set to the Gitea instance.
|
||||
Memory: feedback_act_runner_github_server_url.
|
||||
7. Production deploy/redeploy workflows may not rely on Gitea
|
||||
`concurrency.cancel-in-progress: false` for serialization. Gitea
|
||||
1.22.6 can cancel queued runs despite that setting.
|
||||
8. Production deploy/redeploy workflows may not dump raw CP responses or
|
||||
raw `.error` fields into CI logs/summaries.
|
||||
9. Production deploy/redeploy workflows must expose an operational control:
|
||||
kill switch for auto deploys or rollback tag for manual deploys.
|
||||
10. Docker health checks must not run `docker info | head` under pipefail.
|
||||
`head` closes the pipe early, `docker info` can exit nonzero from
|
||||
SIGPIPE, and the step can falsely report Docker daemon failure.
|
||||
|
||||
Per `feedback_smoke_test_vendor_truth_not_shape_match`: fixtures used to
|
||||
validate this lint must mirror real Gitea 1.22.6 YAML semantics, not
|
||||
Python yaml-parser quirks. The test suite at tests/test_lint_workflow_yaml.py
|
||||
includes a vendor-truth fixture (the exact publish-runtime regression).
|
||||
|
||||
Usage:
|
||||
python3 .gitea/scripts/lint-workflow-yaml.py
|
||||
Lint every `*.yml` in `.gitea/workflows/`.
|
||||
|
||||
python3 .gitea/scripts/lint-workflow-yaml.py --workflow-dir <path>
|
||||
Lint a custom directory (used by tests/test_lint_workflow_yaml.py).
|
||||
|
||||
Exit codes:
|
||||
0 — clean OR only heuristic-warnings emitted.
|
||||
1 — at least one fatal rule (1-5) violated.
|
||||
2 — YAML parse error or argv usage error.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import collections
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterable
|
||||
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
print("::error::PyYAML is required. Install with: pip install PyYAML", file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
# YAML quirk: bare `on:` at the top level parses to the Python `True`
|
||||
# (because `on` is a YAML 1.1 boolean alias). Handle both keys.
|
||||
def _get_on(d: dict) -> Any:
|
||||
if not isinstance(d, dict):
|
||||
return None
|
||||
if "on" in d:
|
||||
return d["on"]
|
||||
if True in d:
|
||||
return d[True]
|
||||
return None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Rule 1 — workflow_dispatch.inputs block (Gitea 1.22.6 parser rejects)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def check_workflow_dispatch_inputs(filename: str, doc: Any) -> list[str]:
|
||||
"""Return per-violation error lines if `workflow_dispatch.inputs` is set."""
|
||||
errors: list[str] = []
|
||||
on = _get_on(doc)
|
||||
if not isinstance(on, dict):
|
||||
return errors
|
||||
wd = on.get("workflow_dispatch")
|
||||
if isinstance(wd, dict) and wd.get("inputs"):
|
||||
errors.append(
|
||||
f"::error file={filename}::Rule 1 (FATAL): "
|
||||
f"`on.workflow_dispatch.inputs:` block detected. Gitea 1.22.6 "
|
||||
f"silently rejects the entire workflow with `[W] ignore invalid "
|
||||
f"workflow: unknown on type: map[...]`. Drop the `inputs:` block "
|
||||
f"and derive parameters from tag name / env / external query. "
|
||||
f"Memory: feedback_gitea_workflow_dispatch_inputs_unsupported."
|
||||
)
|
||||
return errors
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Rule 2 — on: workflow_run (not supported on Gitea 1.22.6)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def check_workflow_run_event(filename: str, doc: Any) -> list[str]:
|
||||
"""Return per-violation error lines if `on: workflow_run:` is used."""
|
||||
errors: list[str] = []
|
||||
on = _get_on(doc)
|
||||
if isinstance(on, dict) and "workflow_run" in on:
|
||||
errors.append(
|
||||
f"::error file={filename}::Rule 2 (FATAL): `on: workflow_run:` "
|
||||
f"event used. Gitea 1.22.6 does NOT support `workflow_run` "
|
||||
f"(verified via modules/actions/workflows.go enumeration; "
|
||||
f"task #81). Workflow will fire for zero events. Use a "
|
||||
f"`schedule:` cron OR a `push:` trigger with `paths:` filter "
|
||||
f"on the upstream workflow file as the cross-workflow gate."
|
||||
)
|
||||
elif isinstance(on, list) and "workflow_run" in on:
|
||||
errors.append(
|
||||
f"::error file={filename}::Rule 2 (FATAL): `on: workflow_run` "
|
||||
f"in event list. Not supported on Gitea 1.22.6 — task #81."
|
||||
)
|
||||
return errors
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Rule 3 — name: contains "/" (breaks status-context tokenization)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def check_name_with_slash(filename: str, doc: Any) -> list[str]:
|
||||
"""Return per-violation error lines if workflow `name:` contains a slash."""
|
||||
errors: list[str] = []
|
||||
if not isinstance(doc, dict):
|
||||
return errors
|
||||
name = doc.get("name")
|
||||
if isinstance(name, str) and "/" in name:
|
||||
errors.append(
|
||||
f"::error file={filename}::Rule 3 (FATAL): workflow `name: "
|
||||
f"{name!r}` contains `/`. The commit-status context convention "
|
||||
f"is `<workflow> / <job> (<event>)`; embedding `/` in the "
|
||||
f"workflow name makes downstream parsers (sop-tier-check, "
|
||||
f"status-reaper) tokenize ambiguously. Rename to use `-` or "
|
||||
f"` ` instead."
|
||||
)
|
||||
return errors
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Rule 4 — cross-file name collision
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def check_name_collision_across_files(
|
||||
docs_by_file: dict[str, Any],
|
||||
) -> list[str]:
|
||||
"""Return per-collision error lines if two files share the same `name:`."""
|
||||
errors: list[str] = []
|
||||
by_name: dict[str, list[str]] = collections.defaultdict(list)
|
||||
for filename, doc in docs_by_file.items():
|
||||
if isinstance(doc, dict):
|
||||
n = doc.get("name")
|
||||
if isinstance(n, str) and n:
|
||||
by_name[n].append(filename)
|
||||
for n, files in sorted(by_name.items()):
|
||||
if len(files) > 1:
|
||||
errors.append(
|
||||
f"::error::Rule 4 (FATAL): workflow `name: {n!r}` collision "
|
||||
f"across {len(files)} files: {files}. Gitea routes "
|
||||
f"commit-status updates by `name`; collision yields "
|
||||
f"undefined behavior. Give each workflow a unique `name:`."
|
||||
)
|
||||
return errors
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Rule 5 — cross-repo `uses: org/repo/path@ref`
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# `uses: <foo>@<ref>` — match the value form Gitea/act actually parse.
|
||||
# We need to distinguish:
|
||||
# - `actions/checkout@<sha>` OK (bare org/repo@ref, no subpath)
|
||||
# - `./.gitea/actions/foo` OK (local path)
|
||||
# - `docker://image:tag` OK (docker-image form)
|
||||
# - `molecule-ai/molecule-ci/.gitea/actions/audit-force-merge@main` BAD
|
||||
USES_CROSS_REPO_RE = re.compile(
|
||||
r"""^
|
||||
(?P<owner>[A-Za-z0-9_.\-]+)
|
||||
/
|
||||
(?P<repo>[A-Za-z0-9_.\-]+)
|
||||
/ # mandatory subpath separator => cross-repo composite/reusable
|
||||
(?P<path>[^@\s]+)
|
||||
@
|
||||
(?P<ref>\S+)
|
||||
$""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
|
||||
def _iter_uses(doc: Any) -> Iterable[str]:
|
||||
"""Yield every `uses:` string from job steps in a workflow document."""
|
||||
if not isinstance(doc, dict):
|
||||
return
|
||||
jobs = doc.get("jobs")
|
||||
if not isinstance(jobs, dict):
|
||||
return
|
||||
for job in jobs.values():
|
||||
if not isinstance(job, dict):
|
||||
continue
|
||||
# reusable workflow: `uses:` at the job level
|
||||
if isinstance(job.get("uses"), str):
|
||||
yield job["uses"]
|
||||
steps = job.get("steps")
|
||||
if not isinstance(steps, list):
|
||||
continue
|
||||
for step in steps:
|
||||
if isinstance(step, dict) and isinstance(step.get("uses"), str):
|
||||
yield step["uses"]
|
||||
|
||||
|
||||
def _iter_run_blocks(doc: Any) -> Iterable[str]:
|
||||
"""Yield every shell `run:` block from job steps in a workflow document."""
|
||||
if not isinstance(doc, dict):
|
||||
return
|
||||
jobs = doc.get("jobs")
|
||||
if not isinstance(jobs, dict):
|
||||
return
|
||||
for job in jobs.values():
|
||||
if not isinstance(job, dict):
|
||||
continue
|
||||
steps = job.get("steps")
|
||||
if not isinstance(steps, list):
|
||||
continue
|
||||
for step in steps:
|
||||
if isinstance(step, dict) and isinstance(step.get("run"), str):
|
||||
yield step["run"]
|
||||
|
||||
|
||||
def check_cross_repo_uses(filename: str, doc: Any) -> list[str]:
|
||||
"""Return per-violation error lines for cross-repo `uses:` references."""
|
||||
errors: list[str] = []
|
||||
for uses in _iter_uses(doc):
|
||||
# Skip docker:// and local ./
|
||||
if uses.startswith(("docker://", "./", "../")):
|
||||
continue
|
||||
m = USES_CROSS_REPO_RE.match(uses.strip())
|
||||
if m:
|
||||
errors.append(
|
||||
f"::error file={filename}::Rule 5 (FATAL): cross-repo "
|
||||
f"`uses: {uses}` detected. Gitea 1.22.6 with "
|
||||
f"`[actions].DEFAULT_ACTIONS_URL=github` resolves this to "
|
||||
f"github.com/{m.group('owner')}/{m.group('repo')} which "
|
||||
f"404s (org suspended 2026-05-06). Inline the shared bash "
|
||||
f"into `.gitea/scripts/` until task #109 (actions mirror) "
|
||||
f"ships. Memory: feedback_gitea_cross_repo_uses_blocked."
|
||||
)
|
||||
return errors
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Rule 6 — heuristic: github.com/api refs without workflow-level
|
||||
# GITHUB_SERVER_URL (WARN-not-FAIL per halt-condition 3)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Match `https://api.github.com/...` (API call) — that's the actionable
|
||||
# pattern. We intentionally do NOT match `https://github.com/.../releases/
|
||||
# download/...` (jq-release pin) nor `https://github.com/${{ github.repository
|
||||
# }}` (OCI label) because those are documented benign references on current
|
||||
# main and would 100% false-positive (3 hits, per Phase 1 audit).
|
||||
GITHUB_API_REF_RE = re.compile(
|
||||
r"https://api\.github\.com\b|https://github\.com/api/",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
PROD_CP_URL_RE = re.compile(r"https://api\.moleculesai\.app\b")
|
||||
REDEPLOY_FLEET_RE = re.compile(r"\b/cp/admin/tenants/redeploy-fleet\b")
|
||||
RUN_SETS_PIPEFAIL_RE = re.compile(r"(?m)^\s*set\s+-[^\n]*o\s+pipefail\b")
|
||||
DOCKER_INFO_HEAD_PIPE_RE = re.compile(
|
||||
r"(?m)^\s*docker\s+info\b[^\n|]*\|\s*head\b"
|
||||
)
|
||||
RAW_CP_RESPONSE_RE = re.compile(
|
||||
r"""(?x)
|
||||
(?:\bjq\s+\.\s+["']?\$HTTP_RESPONSE["']?)
|
||||
|
|
||||
(?:\bcat\s+["']?\$HTTP_RESPONSE["']?)
|
||||
|
|
||||
(?:\|\s*\.error\b)
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def _has_workflow_level_server_url(doc: Any) -> bool:
|
||||
if not isinstance(doc, dict):
|
||||
return False
|
||||
env = doc.get("env")
|
||||
if isinstance(env, dict) and "GITHUB_SERVER_URL" in env:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def check_github_server_url_missing(filename: str, doc: Any, raw: str) -> list[str]:
|
||||
"""Return warn-lines (NOT errors) if api.github.com is referenced without
|
||||
workflow-level GITHUB_SERVER_URL. Heuristic — false-positives possible.
|
||||
"""
|
||||
warns: list[str] = []
|
||||
if not GITHUB_API_REF_RE.search(raw):
|
||||
return warns
|
||||
if _has_workflow_level_server_url(doc):
|
||||
return warns
|
||||
warns.append(
|
||||
f"::warning file={filename}::Rule 6 (WARN, heuristic): file "
|
||||
f"references `https://api.github.com` without a workflow-level "
|
||||
f"`env.GITHUB_SERVER_URL: https://git.moleculesai.app`. The "
|
||||
f"act_runner default for `${{{{ github.server_url }}}}` is "
|
||||
f"github.com, which can break actions that auth-condition on "
|
||||
f"server_url (e.g. actions/setup-go). If this curl is "
|
||||
f"intentionally hitting GitHub (e.g. public release pin), ignore. "
|
||||
f"Memory: feedback_act_runner_github_server_url."
|
||||
)
|
||||
return warns
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Rule 7-9 — production CI/CD hardening rules
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _is_production_redeploy_workflow(raw: str) -> bool:
|
||||
"""Heuristic production-side-effect detector.
|
||||
|
||||
We intentionally key on the production CP host plus the redeploy-fleet
|
||||
endpoint. Staging workflows call the same endpoint on staging-api and are
|
||||
governed by looser staging verification policy.
|
||||
"""
|
||||
|
||||
return bool(PROD_CP_URL_RE.search(raw) and REDEPLOY_FLEET_RE.search(raw))
|
||||
|
||||
|
||||
def _iter_concurrency_blocks(doc: Any) -> Iterable[dict[str, Any]]:
|
||||
if not isinstance(doc, dict):
|
||||
return
|
||||
top = doc.get("concurrency")
|
||||
if isinstance(top, dict):
|
||||
yield top
|
||||
jobs = doc.get("jobs")
|
||||
if not isinstance(jobs, dict):
|
||||
return
|
||||
for job in jobs.values():
|
||||
if isinstance(job, dict) and isinstance(job.get("concurrency"), dict):
|
||||
yield job["concurrency"]
|
||||
|
||||
|
||||
def check_production_concurrency(filename: str, doc: Any, raw: str) -> list[str]:
|
||||
errors: list[str] = []
|
||||
if not _is_production_redeploy_workflow(raw):
|
||||
return errors
|
||||
for block in _iter_concurrency_blocks(doc):
|
||||
if block.get("cancel-in-progress") is False:
|
||||
errors.append(
|
||||
f"::error file={filename}::Rule 7 (FATAL): production deploy "
|
||||
f"workflow uses `concurrency.cancel-in-progress: false`. "
|
||||
f"Gitea 1.22.6 can cancel queued runs despite that setting, "
|
||||
f"so this is not a safe production serialization primitive. "
|
||||
f"Use an external queue/lock or make the deploy idempotent."
|
||||
)
|
||||
return errors
|
||||
|
||||
|
||||
def check_production_raw_response_logging(filename: str, raw: str) -> list[str]:
|
||||
errors: list[str] = []
|
||||
if not _is_production_redeploy_workflow(raw):
|
||||
return errors
|
||||
if RAW_CP_RESPONSE_RE.search(raw):
|
||||
errors.append(
|
||||
f"::error file={filename}::Rule 8 (FATAL): production deploy "
|
||||
f"workflow appears to print a raw production CP response or raw "
|
||||
f"`.error` field. CI logs are persistent and broad-read. Redact "
|
||||
f"runtime/SSM error details; print counts, booleans, status "
|
||||
f"codes, and links to restricted observability instead."
|
||||
)
|
||||
return errors
|
||||
|
||||
|
||||
def check_production_operational_control(filename: str, raw: str) -> list[str]:
|
||||
errors: list[str] = []
|
||||
if not _is_production_redeploy_workflow(raw):
|
||||
return errors
|
||||
has_kill_switch = "PROD_AUTO_DEPLOY_DISABLED" in raw
|
||||
has_rollback = "PROD_MANUAL_REDEPLOY_TARGET_TAG" in raw
|
||||
if not (has_kill_switch or has_rollback):
|
||||
errors.append(
|
||||
f"::error file={filename}::Rule 9 (FATAL): production deploy "
|
||||
f"workflow calls redeploy-fleet without an operational control. "
|
||||
f"Auto deploys need a `PROD_AUTO_DEPLOY_DISABLED` kill switch; "
|
||||
f"manual deploys need a `PROD_MANUAL_REDEPLOY_TARGET_TAG` "
|
||||
f"rollback/pin path."
|
||||
)
|
||||
return errors
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Rule 10 — docker info piped to head under pipefail
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def check_docker_info_head_pipefail(filename: str, doc: Any) -> list[str]:
|
||||
errors: list[str] = []
|
||||
for run_block in _iter_run_blocks(doc):
|
||||
if not (
|
||||
RUN_SETS_PIPEFAIL_RE.search(run_block)
|
||||
and DOCKER_INFO_HEAD_PIPE_RE.search(run_block)
|
||||
):
|
||||
continue
|
||||
errors.append(
|
||||
f"::error file={filename}::Rule 10 (FATAL): workflow runs "
|
||||
f"`docker info | head` after enabling `pipefail`. `head` can "
|
||||
f"close the pipe early, making `docker info` exit nonzero and "
|
||||
f"falsely fail the Docker daemon health check. Capture "
|
||||
f"`docker_info=\"$(docker info 2>&1)\"` first, then print a "
|
||||
f"bounded preview with `printf ... | sed -n '1,5p'`."
|
||||
)
|
||||
break
|
||||
return errors
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Driver
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
p = argparse.ArgumentParser(
|
||||
description="Lint Gitea Actions workflow YAML for 1.22.6-hostile shapes."
|
||||
)
|
||||
p.add_argument(
|
||||
"--workflow-dir",
|
||||
default=".gitea/workflows",
|
||||
help="Directory of workflow *.yml files (default: .gitea/workflows).",
|
||||
)
|
||||
args = p.parse_args(argv)
|
||||
|
||||
wf_dir = Path(args.workflow_dir)
|
||||
if not wf_dir.exists():
|
||||
# Empty / missing dir = nothing to lint, not a failure.
|
||||
print(f"::notice::No workflow directory at {wf_dir}; skipping.")
|
||||
return 0
|
||||
|
||||
yml_paths = sorted(
|
||||
glob.glob(str(wf_dir / "*.yml")) + glob.glob(str(wf_dir / "*.yaml"))
|
||||
)
|
||||
if not yml_paths:
|
||||
print(f"::notice::No workflow files in {wf_dir}; nothing to lint.")
|
||||
return 0
|
||||
|
||||
fatal_errors: list[str] = []
|
||||
warnings: list[str] = []
|
||||
docs_by_file: dict[str, Any] = {}
|
||||
|
||||
for path in yml_paths:
|
||||
rel = os.path.relpath(path)
|
||||
try:
|
||||
raw = Path(path).read_text()
|
||||
doc = yaml.safe_load(raw)
|
||||
except yaml.YAMLError as e:
|
||||
fatal_errors.append(
|
||||
f"::error file={rel}::YAML parse error: {e}. Cannot lint "
|
||||
f"a file the parser rejects."
|
||||
)
|
||||
continue
|
||||
docs_by_file[rel] = doc
|
||||
|
||||
# Per-file checks
|
||||
fatal_errors.extend(check_workflow_dispatch_inputs(rel, doc))
|
||||
fatal_errors.extend(check_workflow_run_event(rel, doc))
|
||||
fatal_errors.extend(check_name_with_slash(rel, doc))
|
||||
fatal_errors.extend(check_cross_repo_uses(rel, doc))
|
||||
fatal_errors.extend(check_production_concurrency(rel, doc, raw))
|
||||
fatal_errors.extend(check_production_raw_response_logging(rel, raw))
|
||||
fatal_errors.extend(check_production_operational_control(rel, raw))
|
||||
fatal_errors.extend(check_docker_info_head_pipefail(rel, doc))
|
||||
warnings.extend(check_github_server_url_missing(rel, doc, raw))
|
||||
|
||||
# Cross-file checks
|
||||
fatal_errors.extend(check_name_collision_across_files(docs_by_file))
|
||||
|
||||
# Emit warnings first (non-blocking)
|
||||
for w in warnings:
|
||||
print(w)
|
||||
|
||||
if not fatal_errors:
|
||||
n = len(yml_paths)
|
||||
print(
|
||||
f"::notice::lint-workflow-yaml: {n} workflow file(s) checked, "
|
||||
f"no fatal Gitea-1.22.6-hostile shapes. "
|
||||
f"({len(warnings)} heuristic warning(s) emitted.)"
|
||||
)
|
||||
return 0
|
||||
|
||||
# Emit fatal errors
|
||||
print(
|
||||
f"::error::lint-workflow-yaml: {len(fatal_errors)} fatal violation(s) "
|
||||
f"across {len(yml_paths)} workflow file(s). See rule documentation "
|
||||
f"in .gitea/scripts/lint-workflow-yaml.py docstring."
|
||||
)
|
||||
for e in fatal_errors:
|
||||
print(e)
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@ -1,509 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""lint_bp_context_emit_match — Tier 2f per internal#350.
|
||||
|
||||
Rule
|
||||
----
|
||||
For a given protected branch, every context in
|
||||
`branch_protections/<branch>.status_check_contexts` MUST be emitted
|
||||
by at least one workflow in `.gitea/workflows/*.yml`. Two contexts
|
||||
match when:
|
||||
|
||||
1. The workflow's `name:` equals the context's workflow-part (the
|
||||
prefix before ` / `).
|
||||
2. Some job in that workflow has a `name:` (or default-fallback
|
||||
job-key) equal to the context's job-part (between ` / ` and
|
||||
` (`).
|
||||
3. The workflow's `on:` block includes the context's event-part
|
||||
(in parens at the end), with Gitea's event-name mapping:
|
||||
- `pull_request` and `pull_request_target` BOTH emit
|
||||
`(pull_request)` contexts (verified empirically on
|
||||
molecule-core/main).
|
||||
- `push` emits `(push)`.
|
||||
|
||||
A BP context with no emitter blocks merges forever — Gitea treats
|
||||
absent-as-`pending`, NOT absent-as-`skipped`-as-`success`. This is
|
||||
the phantom-required-check class
|
||||
(`feedback_phantom_required_check_after_gitea_migration`).
|
||||
|
||||
The inverse direction (emitter without BP context) is INFORMATIONAL
|
||||
only — Tier 2g handles that direction at PR-time. Flagging it here
|
||||
on a daily schedule would falsely surface every transitional state
|
||||
during a BP rollout.
|
||||
|
||||
How the gate works
|
||||
------------------
|
||||
Daily scheduled run + workflow_dispatch:
|
||||
|
||||
1. GET `branch_protections/{BRANCH}` (needs DRIFT_BOT_TOKEN with
|
||||
repo-admin scope; same persona as ci-required-drift.yml).
|
||||
Graceful-degrade on 403/404 per Tier 2a contract.
|
||||
|
||||
2. Walk `.gitea/workflows/*.yml` via PyYAML AST. For each workflow,
|
||||
enumerate its emitted contexts: `{workflow.name} / {job.name or
|
||||
job-key} ({event})` for each event in `on:` that emits a status.
|
||||
|
||||
3. For each BP context, look for an emitter match. Aggregate
|
||||
orphans.
|
||||
|
||||
4. If orphans exist:
|
||||
- File or PATCH a `[ci-bp-drift]` issue (idempotency contract:
|
||||
search for exact title prefix, edit existing if open).
|
||||
- Apply labels `tier:high` + `ci-bp-drift` (lookup IDs per
|
||||
repo; per `feedback_tier_label_ids_are_per_repo`).
|
||||
- Exit 1.
|
||||
|
||||
5. If no orphans:
|
||||
- Close any existing `[ci-bp-drift]` issue with a clean-state
|
||||
comment.
|
||||
- Exit 0.
|
||||
|
||||
Exit codes
|
||||
----------
|
||||
0 — clean OR API 403/404 (graceful-degrade, surfaces ::error::).
|
||||
1 — at least one BP context has no emitter.
|
||||
2 — env contract violation, workflows-dir missing, or YAML parse
|
||||
error.
|
||||
|
||||
Env
|
||||
---
|
||||
GITEA_TOKEN — DRIFT_BOT_TOKEN (repo-admin for branch_protections)
|
||||
GITEA_HOST — e.g. git.moleculesai.app
|
||||
REPO — owner/name
|
||||
BRANCH — defaults to `main`
|
||||
WORKFLOWS_DIR — defaults to `.gitea/workflows`
|
||||
DRIFT_LABEL — defaults to `ci-bp-drift`
|
||||
|
||||
Memory cross-links
|
||||
------------------
|
||||
- internal#350 (the RFC that specs this lint)
|
||||
- feedback_phantom_required_check_after_gitea_migration
|
||||
- feedback_tier_label_ids_are_per_repo
|
||||
- reference_post_suspension_pipeline
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
sys.stderr.write(
|
||||
"::error::PyYAML is required. Install with: pip install PyYAML\n"
|
||||
)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
# Status-check context regex (mirrors lint-required-no-paths.py).
|
||||
_CONTEXT_RE = re.compile(
|
||||
r"^(?P<workflow>.+?) / (?P<job>.+) \((?P<event>[^)]+)\)$"
|
||||
)
|
||||
|
||||
# Map a workflow `on:` event-key to the context's event-part. Gitea's
|
||||
# emitter convention (verified on molecule-core):
|
||||
# - pull_request → `(pull_request)`
|
||||
# - pull_request_target → `(pull_request)` (same surface)
|
||||
# - push → `(push)`
|
||||
# - schedule → no PR status; scheduled runs don't post
|
||||
# commit-statuses unless the workflow itself does so explicitly.
|
||||
# - workflow_dispatch → manually dispatched runs may or may not
|
||||
# emit; safest to treat as "no PR status" (informational notice
|
||||
# only).
|
||||
_EVENT_MAP = {
|
||||
"pull_request": "pull_request",
|
||||
"pull_request_target": "pull_request",
|
||||
"push": "push",
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Env
|
||||
# ---------------------------------------------------------------------------
|
||||
def _env(key: str, default: str | None = None) -> str:
|
||||
v = os.environ.get(key, default)
|
||||
return v if v is not None else ""
|
||||
|
||||
|
||||
def _require_env(key: str) -> str:
|
||||
v = os.environ.get(key)
|
||||
if not v:
|
||||
sys.stderr.write(f"::error::missing required env var: {key}\n")
|
||||
sys.exit(2)
|
||||
return v
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# API helper. Mirrors lint-required-no-paths.py's contract: returns
|
||||
# (status, payload) tuple with status ∈ {"ok", "not_found", "forbidden",
|
||||
# "error"}.
|
||||
# ---------------------------------------------------------------------------
|
||||
def api(
|
||||
method: str,
|
||||
path: str,
|
||||
*,
|
||||
body: dict | None = None,
|
||||
query: dict[str, str] | None = None,
|
||||
) -> tuple[str, Any]:
|
||||
host = _env("GITEA_HOST")
|
||||
token = _env("GITEA_TOKEN")
|
||||
url = f"https://{host}/api/v1{path}"
|
||||
if query:
|
||||
url = f"{url}?{urllib.parse.urlencode(query)}"
|
||||
data = None
|
||||
headers = {
|
||||
"Authorization": f"token {token}",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
if body is not None:
|
||||
data = json.dumps(body).encode("utf-8")
|
||||
headers["Content-Type"] = "application/json"
|
||||
req = urllib.request.Request(
|
||||
url, method=method, data=data, headers=headers
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=30) as resp:
|
||||
raw = resp.read()
|
||||
if not raw:
|
||||
return ("ok", None)
|
||||
return ("ok", json.loads(raw))
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 404:
|
||||
return ("not_found", None)
|
||||
if e.code in (401, 403):
|
||||
return ("forbidden", None)
|
||||
return ("error", None)
|
||||
except (urllib.error.URLError, TimeoutError, json.JSONDecodeError):
|
||||
return ("error", None)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
def _get_on(d: Any) -> Any:
|
||||
"""YAML 1.1 boolean quirk: bare `on:` may parse to True. Handle both."""
|
||||
if not isinstance(d, dict):
|
||||
return None
|
||||
if "on" in d:
|
||||
return d["on"]
|
||||
if True in d:
|
||||
return d[True]
|
||||
return None
|
||||
|
||||
|
||||
def _on_events(doc: Any) -> set[str]:
|
||||
"""Return the set of event keys in a workflow's `on:` block.
|
||||
|
||||
Accepts all three shapes (string / list / mapping). String/list
|
||||
shapes can't carry filters but they DO emit. Returns the
|
||||
Gitea-mapped event names per `_EVENT_MAP`.
|
||||
"""
|
||||
on = _get_on(doc)
|
||||
raw_events: set[str] = set()
|
||||
if on is None:
|
||||
return raw_events
|
||||
if isinstance(on, str):
|
||||
raw_events.add(on)
|
||||
elif isinstance(on, list):
|
||||
for e in on:
|
||||
if isinstance(e, str):
|
||||
raw_events.add(e)
|
||||
elif isinstance(on, dict):
|
||||
for k in on:
|
||||
if isinstance(k, str):
|
||||
raw_events.add(k)
|
||||
return {_EVENT_MAP[e] for e in raw_events if e in _EVENT_MAP}
|
||||
|
||||
|
||||
def _job_display(jbody: dict, jkey: str) -> str:
|
||||
"""Return job's `name:` if set, else fall back to the job-key.
|
||||
|
||||
Gitea formats status contexts with the job's `name:` when set;
|
||||
when unset it uses the job key. Matches lint-required-no-paths
|
||||
convention.
|
||||
"""
|
||||
n = jbody.get("name") if isinstance(jbody, dict) else None
|
||||
if isinstance(n, str) and n:
|
||||
return n
|
||||
return jkey
|
||||
|
||||
|
||||
def workflow_contexts(doc: Any) -> set[str]:
|
||||
"""Return the set of contexts a workflow emits."""
|
||||
contexts: set[str] = set()
|
||||
if not isinstance(doc, dict):
|
||||
return contexts
|
||||
wf_name = doc.get("name")
|
||||
if not isinstance(wf_name, str) or not wf_name:
|
||||
return contexts # no name => no addressable context
|
||||
events = _on_events(doc)
|
||||
if not events:
|
||||
return contexts
|
||||
jobs = doc.get("jobs")
|
||||
if not isinstance(jobs, dict):
|
||||
return contexts
|
||||
for jkey, jbody in jobs.items():
|
||||
if jkey == "__lines__": # tolerate line-tracking annotations
|
||||
continue
|
||||
if not isinstance(jbody, dict):
|
||||
continue
|
||||
disp = _job_display(jbody, jkey)
|
||||
for ev in events:
|
||||
contexts.add(f"{wf_name} / {disp} ({ev})")
|
||||
return contexts
|
||||
|
||||
|
||||
def parse_context(ctx: str) -> tuple[str, str, str] | None:
|
||||
m = _CONTEXT_RE.match(ctx)
|
||||
if not m:
|
||||
return None
|
||||
return (m.group("workflow"), m.group("job"), m.group("event"))
|
||||
|
||||
|
||||
def _iter_workflow_files(wf_dir: Path) -> list[Path]:
|
||||
return sorted(list(wf_dir.glob("*.yml")) + list(wf_dir.glob("*.yaml")))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Issue idempotency — search for an open issue with the canonical
|
||||
# title prefix; PATCH if found, POST if not. Mirrors ci-required-drift.
|
||||
# ---------------------------------------------------------------------------
|
||||
def _canonical_title(repo: str, branch: str) -> str:
|
||||
return f"[ci-bp-drift] {repo}/{branch}: BP→emitter mismatch"
|
||||
|
||||
|
||||
def _ensure_labels(repo: str, names: list[str]) -> list[int]:
|
||||
status, labels = api("GET", f"/repos/{repo}/labels", query={"limit": "50"})
|
||||
if status != "ok" or not isinstance(labels, list):
|
||||
return []
|
||||
out: list[int] = []
|
||||
by_name = {l["name"]: l["id"] for l in labels if isinstance(l, dict)}
|
||||
for n in names:
|
||||
if n in by_name:
|
||||
out.append(by_name[n])
|
||||
return out
|
||||
|
||||
|
||||
def file_or_update_issue(
|
||||
repo: str, branch: str, orphans: list[str], emitter_orphans: list[str]
|
||||
) -> None:
|
||||
title = _canonical_title(repo, branch)
|
||||
body_lines = [
|
||||
f"BP→emitter drift detected on `{branch}` at "
|
||||
f"{os.environ.get('GITHUB_RUN_URL', '(run url unavailable)')}.",
|
||||
"",
|
||||
f"## Orphan BP contexts ({len(orphans)})",
|
||||
"",
|
||||
"These contexts are required by branch protection but NO workflow "
|
||||
"emits them. PRs merging into this branch will wait forever for a "
|
||||
"status that never arrives (Gitea treats absent-as-`pending`, NOT "
|
||||
"absent-as-`skipped`). See "
|
||||
"`feedback_phantom_required_check_after_gitea_migration`.",
|
||||
"",
|
||||
]
|
||||
for o in orphans:
|
||||
body_lines.append(f"- `{o}`")
|
||||
if emitter_orphans:
|
||||
body_lines += [
|
||||
"",
|
||||
f"## Workflows emitting contexts NOT in BP ({len(emitter_orphans)})",
|
||||
"",
|
||||
"Informational — Tier 2g handles this direction at PR-time. "
|
||||
"Listed here for completeness.",
|
||||
"",
|
||||
]
|
||||
for o in emitter_orphans:
|
||||
body_lines.append(f"- `{o}`")
|
||||
body_lines += [
|
||||
"",
|
||||
"Fix options:",
|
||||
" 1. PATCH `branch_protections/{branch}.status_check_contexts` "
|
||||
" to remove the orphan.",
|
||||
" 2. Restore the emitting workflow (if it was deleted/renamed).",
|
||||
"",
|
||||
"Linted by `.gitea/workflows/lint-bp-context-emit-match.yml` "
|
||||
"(Tier 2f, internal#350).",
|
||||
]
|
||||
body = "\n".join(body_lines)
|
||||
|
||||
# Idempotency search — find an open issue with the canonical title.
|
||||
status, hits = api(
|
||||
"GET",
|
||||
f"/repos/{repo}/issues",
|
||||
query={
|
||||
"type": "issues",
|
||||
"state": "open",
|
||||
"q": title,
|
||||
},
|
||||
)
|
||||
existing = None
|
||||
if status == "ok" and isinstance(hits, list):
|
||||
for h in hits:
|
||||
if (
|
||||
isinstance(h, dict)
|
||||
and h.get("state") == "open"
|
||||
and isinstance(h.get("title"), str)
|
||||
and h["title"].startswith(title)
|
||||
):
|
||||
existing = h
|
||||
break
|
||||
|
||||
label_ids = _ensure_labels(repo, ["ci-bp-drift", "tier:high"])
|
||||
|
||||
if existing:
|
||||
api(
|
||||
"PATCH",
|
||||
f"/repos/{repo}/issues/{existing['number']}",
|
||||
body={"body": body, "labels": label_ids} if label_ids else {"body": body},
|
||||
)
|
||||
print(
|
||||
f"::notice::Updated existing drift issue "
|
||||
f"#{existing['number']}: {existing.get('html_url', '')}"
|
||||
)
|
||||
else:
|
||||
status, posted = api(
|
||||
"POST",
|
||||
f"/repos/{repo}/issues",
|
||||
body={"title": title, "body": body, "labels": label_ids},
|
||||
)
|
||||
if status == "ok" and isinstance(posted, dict):
|
||||
print(
|
||||
f"::notice::Filed new drift issue "
|
||||
f"#{posted.get('number')}: {posted.get('html_url', '')}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Driver
|
||||
# ---------------------------------------------------------------------------
|
||||
def run() -> int:
|
||||
_require_env("GITEA_TOKEN")
|
||||
_require_env("GITEA_HOST")
|
||||
repo = _require_env("REPO")
|
||||
branch = _env("BRANCH", "main")
|
||||
wf_dir = Path(_env("WORKFLOWS_DIR", ".gitea/workflows"))
|
||||
|
||||
if not wf_dir.is_dir():
|
||||
sys.stderr.write(f"::error::workflows directory not found: {wf_dir}\n")
|
||||
return 2
|
||||
|
||||
# 1. Pull BP.
|
||||
status, bp = api("GET", f"/repos/{repo}/branch_protections/{branch}")
|
||||
if status == "forbidden":
|
||||
sys.stderr.write(
|
||||
f"::error::GET branch_protections/{branch} returned HTTP 403 — "
|
||||
f"DRIFT_BOT_TOKEN lacks repo-admin scope (Gitea 1.22.6 requires "
|
||||
f"it for this endpoint). Skipping lint with exit 0 to avoid "
|
||||
f"red-X on every run. Fix: grant repo-admin to mc-drift-bot. "
|
||||
f"Per Tier 2a contract.\n"
|
||||
)
|
||||
return 0
|
||||
if status == "not_found":
|
||||
print(
|
||||
f"::notice::branch '{branch}' has no protection configured; "
|
||||
f"nothing to lint."
|
||||
)
|
||||
return 0
|
||||
if status != "ok" or not isinstance(bp, dict):
|
||||
sys.stderr.write(
|
||||
f"::error::branch_protections/{branch} response unexpected; "
|
||||
f"status={status}. Treating as transient; exit 0.\n"
|
||||
)
|
||||
return 0
|
||||
|
||||
bp_contexts: list[str] = list(bp.get("status_check_contexts") or [])
|
||||
if not bp_contexts:
|
||||
print(
|
||||
f"::notice::branch_protections/{branch} has 0 required "
|
||||
f"status_check_contexts; nothing to lint."
|
||||
)
|
||||
return 0
|
||||
|
||||
# 2. Enumerate emitter contexts from all workflows.
|
||||
all_emitter: set[str] = set()
|
||||
for path in _iter_workflow_files(wf_dir):
|
||||
try:
|
||||
doc = yaml.safe_load(path.read_text(encoding="utf-8"))
|
||||
except yaml.YAMLError as e:
|
||||
sys.stderr.write(
|
||||
f"::error file={path}::YAML parse error: {e}; skipping.\n"
|
||||
)
|
||||
continue
|
||||
all_emitter |= workflow_contexts(doc)
|
||||
|
||||
print(
|
||||
f"::notice::Linting {len(bp_contexts)} BP context(s) for {branch} "
|
||||
f"against {len(all_emitter)} workflow-emitted context(s)."
|
||||
)
|
||||
|
||||
bp_set = set(bp_contexts)
|
||||
|
||||
# 3. Find orphans (BP-side: required but no emitter).
|
||||
bp_orphans = sorted(bp_set - all_emitter)
|
||||
|
||||
# Informational: workflow emits but BP doesn't list. Tier 2g
|
||||
# territory at PR-time. We list these as NOTICE only.
|
||||
emitter_orphans = sorted(all_emitter - bp_set)
|
||||
|
||||
if bp_orphans:
|
||||
print(
|
||||
f"::error::Found {len(bp_orphans)} BP context(s) with no "
|
||||
f"emitter — these would block merges forever (Gitea treats "
|
||||
f"absent-as-pending, not skipped):"
|
||||
)
|
||||
for o in bp_orphans:
|
||||
# Closest-match hint: name a workflow whose name-part is a
|
||||
# near-match (lev-1 typo, or same workflow with a different
|
||||
# event).
|
||||
parsed = parse_context(o)
|
||||
hint = ""
|
||||
if parsed:
|
||||
wf, _job, _ev = parsed
|
||||
candidates = sorted(
|
||||
{c for c in all_emitter if c.startswith(wf + " / ")}
|
||||
)
|
||||
if candidates:
|
||||
hint = (
|
||||
f" — closest emitter(s): {', '.join(candidates[:3])}"
|
||||
)
|
||||
print(f"::error:: - {o}{hint}")
|
||||
if emitter_orphans:
|
||||
print(
|
||||
f"::notice::Also: {len(emitter_orphans)} workflow-emitted "
|
||||
f"context(s) not in BP (informational; Tier 2g handles at "
|
||||
f"PR-time):"
|
||||
)
|
||||
for o in emitter_orphans:
|
||||
print(f"::notice:: - {o}")
|
||||
# File / patch tracking issue.
|
||||
try:
|
||||
file_or_update_issue(repo, branch, bp_orphans, emitter_orphans)
|
||||
except Exception as e:
|
||||
sys.stderr.write(
|
||||
f"::error::failed to file drift issue: {e}\n"
|
||||
)
|
||||
return 1
|
||||
|
||||
if emitter_orphans:
|
||||
print(
|
||||
f"::notice::{len(emitter_orphans)} workflow-emitted context(s) "
|
||||
f"not in BP (informational; Tier 2g handles at PR-time):"
|
||||
)
|
||||
for o in emitter_orphans:
|
||||
print(f"::notice:: - {o}")
|
||||
|
||||
print(
|
||||
f"::notice::BP/emitter match clean: all {len(bp_contexts)} required "
|
||||
f"context(s) have an emitter."
|
||||
)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(run())
|
||||
@ -1,438 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""lint_continue_on_error_tracking — Tier 2e per internal#350.
|
||||
|
||||
Rule
|
||||
----
|
||||
Every `continue-on-error: true` directive in `.gitea/workflows/*.yml`
|
||||
must be accompanied by a tracker reference comment within 2 lines
|
||||
(above OR below the directive's line). The reference is one of:
|
||||
|
||||
* `# mc#NNNN` — molecule-core issue
|
||||
* `# internal#NNNN` — molecule-ai/internal issue
|
||||
|
||||
The referenced issue must satisfy ALL of:
|
||||
|
||||
1. Exists (HTTP 200 on `/repos/{owner}/{name}/issues/{num}`)
|
||||
2. `state == "open"`
|
||||
3. `created_at` is ≤ MAX_AGE_DAYS days ago (default 14)
|
||||
|
||||
A passing reference establishes an audit trail and a forced renewal
|
||||
cadence — after 14 days the issue must either be CLOSED (the masked
|
||||
defect was fixed) or the comment must point at a NEW tracker
|
||||
(deliberate decision to keep masking, requires a paper-trail).
|
||||
|
||||
The class this prevents
|
||||
-----------------------
|
||||
Phase-3-masked failures. `continue-on-error: true` on `platform-build`
|
||||
had been hiding mc#664-class regressions for ~3 weeks before #656
|
||||
surfaced them on 2026-05-12. A 14-day cap forces a tracker review
|
||||
cycle and surfaces mask-drift within at most 14 days of the original
|
||||
defect.
|
||||
|
||||
Behaviour-based gate
|
||||
--------------------
|
||||
We parse via PyYAML AST (per `feedback_behavior_based_ast_gates`) to
|
||||
detect `continue-on-error: <truthy>` at job-key level, then map each
|
||||
location back to its source line via PyYAML's line-tracking loader.
|
||||
Comments are scanned from the raw text within a 2-line window of
|
||||
that source line. Reformatting (block-scalar vs flow-style) does not
|
||||
break the rule because the source-line anchor is the directive's
|
||||
own line.
|
||||
|
||||
Exit codes
|
||||
----------
|
||||
0 — every `continue-on-error: true` has a passing tracker, OR
|
||||
the issue-API endpoint returned 403/404 (token-scope; graceful
|
||||
degrade per Tier 2a contract — surface via ::error:: on stderr
|
||||
but don't red-X every PR over auth).
|
||||
1 — at least one violation (missing/closed/too-old/non-existent
|
||||
tracker).
|
||||
2 — env contract violation, YAML parse error, or workflows-dir
|
||||
missing.
|
||||
|
||||
Env
|
||||
---
|
||||
GITEA_TOKEN — read scope on the configured repos.
|
||||
Auto-injected `GITHUB_TOKEN` works for same-repo
|
||||
issue reads; for `internal#NNN` we need a token
|
||||
with `molecule-ai/internal` read scope. Use
|
||||
DRIFT_BOT_TOKEN (same persona as other Tier 2
|
||||
lints).
|
||||
GITEA_HOST — e.g. git.moleculesai.app
|
||||
REPO — `owner/name` for `mc#NNNN` lookups
|
||||
INTERNAL_REPO — `owner/name` for `internal#NNNN` lookups
|
||||
(defaults to derived `molecule-ai/internal`)
|
||||
WORKFLOWS_DIR — defaults to `.gitea/workflows`
|
||||
MAX_AGE_DAYS — defaults to 14
|
||||
|
||||
Memory cross-links
|
||||
------------------
|
||||
- internal#350 (the RFC that specs this lint)
|
||||
- mc#664 (the masked-3-weeks empirical case)
|
||||
- feedback_chained_defects_in_never_tested_workflows
|
||||
- feedback_behavior_based_ast_gates
|
||||
- feedback_strict_root_only_after_class_a
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
sys.stderr.write(
|
||||
"::error::PyYAML is required. Install with: pip install PyYAML\n"
|
||||
)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tracker comment regex.
|
||||
# Matches: `# mc#1234`, `# internal#42`, `# mc#1234 - description`
|
||||
# Also matches trackers embedded mid-sentence: `# see mc#1234 for details`
|
||||
# Does NOT match: `# mc1234` (missing inner #), `mc#1234` (no leading
|
||||
# comment `#`), `# MC#1234` (case-sensitive). The search is line-wide,
|
||||
# not just at the comment-marker prefix — fixes false-negative when
|
||||
# the tracker appears mid-sentence (e.g. `internal#350` after prose).
|
||||
TRACKER_RE = re.compile(
|
||||
r"(?P<slug>mc|internal)#(?P<num>\d+)\b"
|
||||
)
|
||||
|
||||
# Truthy continue-on-error values we treat as "true". PyYAML decodes
|
||||
# `continue-on-error: true` to Python `True`. `continue-on-error: "true"`
|
||||
# decodes to the string "true" — Gitea's evaluator coerces strings,
|
||||
# so we treat string-`"true"` (case-insensitive) as truthy too.
|
||||
def _is_truthy_coe(v: Any) -> bool:
|
||||
if v is True:
|
||||
return True
|
||||
if isinstance(v, str) and v.strip().lower() == "true":
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Env contract
|
||||
# ---------------------------------------------------------------------------
|
||||
def _env(key: str, default: str | None = None) -> str:
|
||||
v = os.environ.get(key, default)
|
||||
return v if v is not None else ""
|
||||
|
||||
|
||||
def _require_env(key: str) -> str:
|
||||
v = os.environ.get(key)
|
||||
if not v:
|
||||
sys.stderr.write(f"::error::missing required env var: {key}\n")
|
||||
sys.exit(2)
|
||||
return v
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PyYAML line-tracking loader. yaml.SafeLoader nodes carry
|
||||
# `start_mark.line` (0-based); using construct_mapping with `deep=True`
|
||||
# preserves that on every node. We need the line of each
|
||||
# `continue-on-error` key so we can scan the source for comments
|
||||
# near it.
|
||||
# ---------------------------------------------------------------------------
|
||||
class _LineLoader(yaml.SafeLoader):
|
||||
"""SafeLoader that annotates every dict with `__line__: {key: line}`."""
|
||||
|
||||
|
||||
def _construct_mapping(loader: yaml.SafeLoader, node: yaml.MappingNode) -> dict:
|
||||
mapping = loader.construct_mapping(node, deep=True)
|
||||
# Annotate per-key source lines so we can locate `continue-on-error`.
|
||||
lines: dict[str, int] = {}
|
||||
for k_node, _v_node in node.value:
|
||||
try:
|
||||
key = loader.construct_object(k_node, deep=True)
|
||||
except Exception:
|
||||
continue
|
||||
if isinstance(key, (str, int, bool)):
|
||||
lines[str(key)] = k_node.start_mark.line + 1 # 1-based
|
||||
if isinstance(mapping, dict):
|
||||
mapping["__lines__"] = lines
|
||||
return mapping
|
||||
|
||||
|
||||
_LineLoader.add_constructor(
|
||||
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _construct_mapping
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Issue lookup
|
||||
# ---------------------------------------------------------------------------
|
||||
def fetch_issue(slug_kind: str, num: int) -> tuple[str, dict | None]:
|
||||
"""Return `(status, payload_or_none)`.
|
||||
|
||||
status ∈ {"ok", "not_found", "forbidden", "error"}.
|
||||
"""
|
||||
repo = (
|
||||
_env("REPO") if slug_kind == "mc" else _env("INTERNAL_REPO")
|
||||
)
|
||||
if not repo:
|
||||
# Fall through gracefully — caller treats as 403 (token-scope).
|
||||
return ("forbidden", None)
|
||||
host = _env("GITEA_HOST")
|
||||
token = _env("GITEA_TOKEN")
|
||||
url = f"https://{host}/api/v1/repos/{repo}/issues/{num}"
|
||||
req = urllib.request.Request(
|
||||
url,
|
||||
headers={
|
||||
"Authorization": f"token {token}",
|
||||
"Accept": "application/json",
|
||||
},
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=20) as resp:
|
||||
return ("ok", json.loads(resp.read()))
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 404:
|
||||
return ("not_found", None)
|
||||
if e.code in (401, 403):
|
||||
return ("forbidden", None)
|
||||
return ("error", None)
|
||||
except (urllib.error.URLError, TimeoutError, json.JSONDecodeError):
|
||||
return ("error", None)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Locate every continue-on-error: <truthy> in a workflow doc, with line.
|
||||
# ---------------------------------------------------------------------------
|
||||
def find_coe_truthies(
|
||||
doc: Any, raw_lines: list[str]
|
||||
) -> list[tuple[str, int]]:
|
||||
"""Return list of (job_key, source_line_1based).
|
||||
|
||||
`doc` is the LineLoader-parsed mapping. We descend `jobs.<key>` and
|
||||
return only those whose value is truthy per `_is_truthy_coe`.
|
||||
Job-step continue-on-error is intentionally NOT considered: it
|
||||
suppresses step-level failure rollup only, not job-level. The
|
||||
masking class this lint targets is the job-level rollup.
|
||||
"""
|
||||
out: list[tuple[str, int]] = []
|
||||
if not isinstance(doc, dict):
|
||||
return out
|
||||
jobs = doc.get("jobs")
|
||||
if not isinstance(jobs, dict):
|
||||
return out
|
||||
for jkey, jbody in jobs.items():
|
||||
if jkey == "__lines__":
|
||||
continue
|
||||
if not isinstance(jbody, dict):
|
||||
continue
|
||||
if "continue-on-error" not in jbody:
|
||||
continue
|
||||
v = jbody["continue-on-error"]
|
||||
if not _is_truthy_coe(v):
|
||||
continue
|
||||
line = jbody.get("__lines__", {}).get("continue-on-error")
|
||||
if not line:
|
||||
# PyYAML line-tracking shouldn't miss but guard for safety.
|
||||
# Fall back to grepping the raw text.
|
||||
line = _grep_first_coe_line(raw_lines, jkey) or 1
|
||||
out.append((str(jkey), int(line)))
|
||||
return out
|
||||
|
||||
|
||||
def _grep_first_coe_line(raw_lines: list[str], jkey: str) -> int | None:
|
||||
"""Fallback: find the first `continue-on-error:` line after a `jkey:` line."""
|
||||
saw_job = False
|
||||
for i, line in enumerate(raw_lines, start=1):
|
||||
if re.match(rf"^\s*{re.escape(jkey)}\s*:", line):
|
||||
saw_job = True
|
||||
continue
|
||||
if saw_job and "continue-on-error" in line:
|
||||
return i
|
||||
return None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Scan window for tracker comment
|
||||
# ---------------------------------------------------------------------------
|
||||
WINDOW = 2 # lines above OR below the directive's line (inclusive)
|
||||
|
||||
|
||||
def find_tracker_in_window(
|
||||
raw_lines: list[str], line_1based: int
|
||||
) -> tuple[str, int] | None:
|
||||
"""Return (slug, num) if a `# mc#NNN`/`# internal#NNN` appears
|
||||
in raw_lines within ±WINDOW lines of `line_1based`. None otherwise.
|
||||
|
||||
We scan the directive's own line (it may carry an inline comment
|
||||
like `continue-on-error: true # mc#3`) plus ±WINDOW.
|
||||
"""
|
||||
lo = max(1, line_1based - WINDOW)
|
||||
hi = min(len(raw_lines), line_1based + WINDOW)
|
||||
for i in range(lo, hi + 1):
|
||||
line = raw_lines[i - 1]
|
||||
# Only the comment portion (after `#`) is considered, so
|
||||
# trailing-inline comments on the directive line are matched.
|
||||
m = TRACKER_RE.search(line)
|
||||
if m:
|
||||
return (m.group("slug"), int(m.group("num")))
|
||||
return None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tracker validation
|
||||
# ---------------------------------------------------------------------------
|
||||
def validate_tracker(
|
||||
slug: str, num: int, max_age_days: int
|
||||
) -> tuple[bool, str]:
|
||||
"""Return (ok?, reason). On 403, ok=True is returned with reason
|
||||
explaining graceful-degrade — caller treats 403 as a non-fatal
|
||||
skip (same as Tier 2a contract).
|
||||
"""
|
||||
status, payload = fetch_issue(slug, num)
|
||||
if status == "forbidden":
|
||||
sys.stderr.write(
|
||||
f"::error::issue {slug}#{num} unreadable (HTTP 403 — token "
|
||||
f"scope). Cannot validate; skipping this check to avoid "
|
||||
f"red-X on every PR. Fix the token, not the lint.\n"
|
||||
)
|
||||
return (True, "forbidden — skipped")
|
||||
if status == "not_found":
|
||||
return (False, f"{slug}#{num} does not exist (404)")
|
||||
if status == "error":
|
||||
sys.stderr.write(
|
||||
f"::error::issue {slug}#{num} fetch errored — treating as "
|
||||
f"unverified, skipping this check.\n"
|
||||
)
|
||||
return (True, "fetch-error — skipped")
|
||||
|
||||
assert payload is not None
|
||||
state = payload.get("state", "")
|
||||
if state != "open":
|
||||
return (False, f"{slug}#{num} state={state!r} (must be open)")
|
||||
|
||||
created = payload.get("created_at", "")
|
||||
try:
|
||||
# Gitea returns ISO-8601 with timezone; Python 3.11+
|
||||
# fromisoformat handles `Z` suffix natively from 3.11. Older
|
||||
# runtimes need explicit replace.
|
||||
created_dt = datetime.fromisoformat(created.replace("Z", "+00:00"))
|
||||
except ValueError:
|
||||
return (False, f"{slug}#{num} created_at unparseable: {created!r}")
|
||||
|
||||
age = datetime.now(timezone.utc) - created_dt
|
||||
# Inclusive boundary at MAX_AGE_DAYS: `age.days` truncates to a
|
||||
# whole-day floor, so an issue created 14d 0h 5m ago has
|
||||
# `age.days == 14` and passes; one created 15d 0h 0m ago has
|
||||
# `age.days == 15` and fails. This is the convention specified
|
||||
# in internal#350 ("≤14 days old").
|
||||
if age.days > max_age_days:
|
||||
return (
|
||||
False,
|
||||
f"{slug}#{num} is {age.days} days old (>{max_age_days}d cap). "
|
||||
f"Close-or-renew the tracker.",
|
||||
)
|
||||
return (True, f"{slug}#{num} open, {age.days}d old, ≤{max_age_days}d")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Driver
|
||||
# ---------------------------------------------------------------------------
|
||||
def _iter_workflow_files(wf_dir: Path) -> list[Path]:
|
||||
return sorted(list(wf_dir.glob("*.yml")) + list(wf_dir.glob("*.yaml")))
|
||||
|
||||
|
||||
def run() -> int:
|
||||
wf_dir = Path(_env("WORKFLOWS_DIR", ".gitea/workflows"))
|
||||
max_age = int(_env("MAX_AGE_DAYS", "14"))
|
||||
# Defaults for INTERNAL_REPO when unset (best-effort guess based on
|
||||
# the convention `mc#` = same repo, `internal#` = molecule-ai/internal).
|
||||
if not os.environ.get("INTERNAL_REPO"):
|
||||
os.environ["INTERNAL_REPO"] = "molecule-ai/internal"
|
||||
|
||||
if not wf_dir.is_dir():
|
||||
sys.stderr.write(
|
||||
f"::error::workflows directory not found: {wf_dir}\n"
|
||||
)
|
||||
return 2
|
||||
|
||||
yml_files = _iter_workflow_files(wf_dir)
|
||||
if not yml_files:
|
||||
print(f"::notice::no workflow files under {wf_dir}; nothing to lint.")
|
||||
return 0
|
||||
|
||||
violations: list[str] = []
|
||||
notices: list[str] = []
|
||||
total_coe_true = 0
|
||||
|
||||
for path in yml_files:
|
||||
raw = path.read_text(encoding="utf-8")
|
||||
raw_lines = raw.splitlines()
|
||||
try:
|
||||
doc = yaml.load(raw, Loader=_LineLoader)
|
||||
except yaml.YAMLError as e:
|
||||
sys.stderr.write(
|
||||
f"::error file={path}::YAML parse error: {e}. Skipping "
|
||||
f"this file (lint-workflow-yaml will catch separately).\n"
|
||||
)
|
||||
continue
|
||||
|
||||
coe_locs = find_coe_truthies(doc, raw_lines)
|
||||
for jkey, line in coe_locs:
|
||||
total_coe_true += 1
|
||||
tracker = find_tracker_in_window(raw_lines, line)
|
||||
if tracker is None:
|
||||
violations.append(
|
||||
f"::error file={path},line={line}::lint-continue-on-error-"
|
||||
f"tracking (Tier 2e): job '{jkey}' has "
|
||||
f"`continue-on-error: true` at line {line} with no "
|
||||
f"`# mc#NNNN` or `# internal#NNNN` tracker comment "
|
||||
f"within {WINDOW} lines. Add a tracker reference so "
|
||||
f"this mask has a forced 14-day renewal cycle. "
|
||||
f"Memory: feedback_chained_defects_in_never_tested_workflows."
|
||||
)
|
||||
continue
|
||||
slug, num = tracker
|
||||
ok, reason = validate_tracker(slug, num, max_age)
|
||||
if ok:
|
||||
notices.append(
|
||||
f"::notice::{path.name} job '{jkey}' (line {line}): "
|
||||
f"{reason}"
|
||||
)
|
||||
else:
|
||||
violations.append(
|
||||
f"::error file={path},line={line}::lint-continue-on-error-"
|
||||
f"tracking (Tier 2e): job '{jkey}' "
|
||||
f"`continue-on-error: true` references {slug}#{num}, "
|
||||
f"but {reason}. FIX: close/fix the underlying defect "
|
||||
f"and flip continue-on-error: false, OR file a fresh "
|
||||
f"tracker and update the comment."
|
||||
)
|
||||
|
||||
for n in notices:
|
||||
print(n)
|
||||
|
||||
if violations:
|
||||
print(
|
||||
f"::error::lint-continue-on-error-tracking: "
|
||||
f"{len(violations)} violation(s) across {len(yml_files)} "
|
||||
f"workflow file(s) (of {total_coe_true} `continue-on-error: "
|
||||
f"true` directives in total)."
|
||||
)
|
||||
for v in violations:
|
||||
print(v)
|
||||
return 1
|
||||
|
||||
print(
|
||||
f"::notice::lint-continue-on-error-tracking: "
|
||||
f"all {total_coe_true} `continue-on-error: true` directive(s) "
|
||||
f"have valid trackers (open, ≤{max_age}d old)."
|
||||
)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(run())
|
||||
@ -1,361 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""lint_mask_pr_atomicity — Tier 2d structural enforcement per internal#350.
|
||||
|
||||
Rule
|
||||
----
|
||||
A PR whose diff touches `.gitea/workflows/ci.yml` AND modifies EITHER:
|
||||
|
||||
- any `continue-on-error:` value, OR
|
||||
- the `all-required` sentinel job's `needs:` block
|
||||
|
||||
must EITHER:
|
||||
|
||||
- Touch BOTH atomically in the same PR (preferred), OR
|
||||
- Cross-link the paired PR via a literal `Paired: #NNN` reference in
|
||||
the PR body OR in any commit message between BASE_SHA and HEAD_SHA.
|
||||
|
||||
The class this prevents
|
||||
-----------------------
|
||||
PR#665 (interim `continue-on-error: true` on `platform-build`) and
|
||||
PR#668 (sentinel-`needs` demotion of the same job) were designed as a
|
||||
pair but merged solo — #665 landed at 04:47Z 2026-05-12, #668 was still
|
||||
open at 05:07Z when the main-red watchdog (#674) fired. Result: ~20
|
||||
minutes of `main` red and a cascade of false-positives on unrelated PRs.
|
||||
|
||||
The lint operates on the YAML AST (PyYAML), not grep, per
|
||||
`feedback_behavior_based_ast_gates`: a refactor that moves `continue-on-error`
|
||||
between job keys, or renames the `all-required` job, would still be
|
||||
detected because we walk the parsed structure.
|
||||
|
||||
Why this works on Gitea 1.22.6
|
||||
------------------------------
|
||||
We don't use any 1.22.6-missing endpoints (no `/actions/runs/*`, no
|
||||
`branch_protections/*` — Tier 2f/g need those; Tier 2d does not). All
|
||||
required inputs come from the workflow `pull_request` event payload
|
||||
(BASE_SHA, HEAD_SHA, PR_BODY) and from local git via `git show`/`git log`.
|
||||
The auto-injected `GITHUB_TOKEN` is enough; we don't need
|
||||
DRIFT_BOT_TOKEN.
|
||||
|
||||
Exit codes
|
||||
----------
|
||||
0 — ci.yml not in diff, OR diff is no-op for the rule predicates,
|
||||
OR atomicity satisfied (both touched), OR a valid `Paired: #NNN`
|
||||
reference is present.
|
||||
1 — exactly ONE of {coe, sentinel-needs} touched AND no valid
|
||||
`Paired: #NNN` reference. The split-pair regression class.
|
||||
2 — env contract violation (BASE_SHA / HEAD_SHA missing) or YAML
|
||||
parse error on either side.
|
||||
|
||||
Env
|
||||
---
|
||||
BASE_SHA — PR base (pull_request.base.sha)
|
||||
HEAD_SHA — PR head (pull_request.head.sha)
|
||||
PR_BODY — pull_request.body (may be empty)
|
||||
CI_WORKFLOW_PATH — defaults to `.gitea/workflows/ci.yml`
|
||||
SENTINEL_JOB_KEY — defaults to `all-required`
|
||||
|
||||
Memory cross-links
|
||||
------------------
|
||||
- internal#350 (the RFC that specs this lint)
|
||||
- PR#665 / PR#668 (the empirical split-pair)
|
||||
- mc#664 (the main-red incident)
|
||||
- feedback_strict_root_only_after_class_a
|
||||
- feedback_behavior_based_ast_gates
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
sys.stderr.write(
|
||||
"::error::PyYAML is required. Install with: pip install PyYAML\n"
|
||||
)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# YAML quirk: bare `on:` at the top level becomes Python `True` because
|
||||
# `on` is a YAML 1.1 boolean. Not used here but documented for future
|
||||
# editors who copy from this module.
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
# `Paired: #NNN` reference. `#` is mandatory, NNN must be digits. Any
|
||||
# surrounding markdown/whitespace is fine. The match is case-sensitive
|
||||
# on `Paired:` because lower-case `paired:` collides with conversational
|
||||
# prose ("paired: see comment above") and the convention is the exact
|
||||
# capitalisation.
|
||||
PAIRED_RE = re.compile(r"\bPaired:\s*#(?P<num>\d+)\b")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Env contract
|
||||
# ---------------------------------------------------------------------------
|
||||
def _env(key: str, default: str | None = None) -> str:
|
||||
v = os.environ.get(key, default)
|
||||
return v if v is not None else ""
|
||||
|
||||
|
||||
def _require_env(key: str) -> str:
|
||||
v = os.environ.get(key)
|
||||
if not v:
|
||||
sys.stderr.write(f"::error::missing required env var: {key}\n")
|
||||
sys.exit(2)
|
||||
return v
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# git-show helper. Returns None when the path doesn't exist on that side
|
||||
# (new file, deleted file, or rename — git returns exit 128 with "fatal:
|
||||
# path not in tree"). We treat None as "no rule predicate triggered on
|
||||
# that side".
|
||||
# ---------------------------------------------------------------------------
|
||||
def git_show(sha: str, path: str) -> str | None:
|
||||
r = subprocess.run(
|
||||
["git", "show", f"{sha}:{path}"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if r.returncode != 0:
|
||||
return None
|
||||
return r.stdout
|
||||
|
||||
|
||||
def git_log_messages(base_sha: str, head_sha: str) -> str:
|
||||
r = subprocess.run(
|
||||
["git", "log", "--format=%B", f"{base_sha}..{head_sha}"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if r.returncode != 0:
|
||||
return ""
|
||||
return r.stdout
|
||||
|
||||
|
||||
def git_diff_paths(base_sha: str, head_sha: str) -> list[str]:
|
||||
r = subprocess.run(
|
||||
["git", "diff", "--name-only", f"{base_sha}..{head_sha}"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if r.returncode != 0:
|
||||
return []
|
||||
return [p for p in r.stdout.splitlines() if p.strip()]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Predicate 1 — any `continue-on-error` value changed between base and head
|
||||
# ---------------------------------------------------------------------------
|
||||
def _collect_coe(doc: Any) -> dict[str, Any]:
|
||||
"""Walk every job in `jobs.*` and collect its continue-on-error value.
|
||||
|
||||
Returns a dict {job_key: coe_value}. Missing keys are absent from
|
||||
the dict (NOT `False` — distinguishes "added the key" from
|
||||
"unchanged absent"). Job-step `continue-on-error` is NOT considered
|
||||
— only job-level, because that's the value that masks job status
|
||||
rollup, which is the class this lint targets.
|
||||
"""
|
||||
out: dict[str, Any] = {}
|
||||
if not isinstance(doc, dict):
|
||||
return out
|
||||
jobs = doc.get("jobs")
|
||||
if not isinstance(jobs, dict):
|
||||
return out
|
||||
for k, j in jobs.items():
|
||||
if not isinstance(j, dict):
|
||||
continue
|
||||
if "continue-on-error" in j:
|
||||
out[k] = j["continue-on-error"]
|
||||
return out
|
||||
|
||||
|
||||
def coe_changed(base_doc: Any, head_doc: Any) -> tuple[bool, list[str]]:
|
||||
"""Return (changed?, [reasons]) describing per-job coe diffs."""
|
||||
base = _collect_coe(base_doc)
|
||||
head = _collect_coe(head_doc)
|
||||
reasons: list[str] = []
|
||||
all_keys = set(base) | set(head)
|
||||
for k in sorted(all_keys):
|
||||
b = base.get(k, "<absent>")
|
||||
h = head.get(k, "<absent>")
|
||||
if b != h:
|
||||
reasons.append(f"job '{k}' continue-on-error: {b!r} → {h!r}")
|
||||
return (bool(reasons), reasons)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Predicate 2 — sentinel job's `needs:` changed
|
||||
# ---------------------------------------------------------------------------
|
||||
def _collect_needs(doc: Any, sentinel_key: str) -> list[str] | None:
|
||||
"""Return the sentinel job's needs list (sorted) or None if absent."""
|
||||
if not isinstance(doc, dict):
|
||||
return None
|
||||
jobs = doc.get("jobs")
|
||||
if not isinstance(jobs, dict):
|
||||
return None
|
||||
j = jobs.get(sentinel_key)
|
||||
if not isinstance(j, dict):
|
||||
return None
|
||||
needs = j.get("needs")
|
||||
if needs is None:
|
||||
return []
|
||||
if isinstance(needs, str):
|
||||
return [needs]
|
||||
if isinstance(needs, list):
|
||||
# Sort because `needs:` is order-insensitive at the engine
|
||||
# level; a reorder is not a semantic change and shouldn't
|
||||
# trip the lint.
|
||||
return sorted(str(x) for x in needs)
|
||||
return None
|
||||
|
||||
|
||||
def sentinel_needs_changed(
|
||||
base_doc: Any, head_doc: Any, sentinel_key: str
|
||||
) -> tuple[bool, str]:
|
||||
"""Return (changed?, reason)."""
|
||||
base = _collect_needs(base_doc, sentinel_key)
|
||||
head = _collect_needs(head_doc, sentinel_key)
|
||||
if base == head:
|
||||
return (False, "")
|
||||
return (
|
||||
True,
|
||||
f"sentinel '{sentinel_key}'.needs: {base!r} → {head!r}",
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Predicate 3 — `Paired: #NNN` present in body or any commit message
|
||||
# ---------------------------------------------------------------------------
|
||||
def find_paired_refs(pr_body: str, commit_log: str) -> list[str]:
|
||||
"""Return list of `#NNN` strings found (deduped, sorted)."""
|
||||
found: set[str] = set()
|
||||
for src in (pr_body, commit_log):
|
||||
for m in PAIRED_RE.finditer(src or ""):
|
||||
found.add(m.group("num"))
|
||||
return sorted(found)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Driver
|
||||
# ---------------------------------------------------------------------------
|
||||
def _parse(content: str | None, label: str) -> Any:
|
||||
if content is None:
|
||||
return None
|
||||
try:
|
||||
return yaml.safe_load(content)
|
||||
except yaml.YAMLError as e:
|
||||
sys.stderr.write(f"::error::YAML parse error on {label}: {e}\n")
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def run() -> int:
|
||||
base_sha = _require_env("BASE_SHA")
|
||||
head_sha = _require_env("HEAD_SHA")
|
||||
pr_body = _env("PR_BODY", "")
|
||||
ci_path = _env("CI_WORKFLOW_PATH", ".gitea/workflows/ci.yml")
|
||||
sentinel_key = _env("SENTINEL_JOB_KEY", "all-required")
|
||||
|
||||
# Step 0 — is ci.yml even in the diff? If not, the lint doesn't apply.
|
||||
changed_paths = git_diff_paths(base_sha, head_sha)
|
||||
if ci_path not in changed_paths:
|
||||
print(
|
||||
f"::notice::{ci_path} not in PR diff; lint-mask-pr-atomicity "
|
||||
f"skipped (no atomicity risk)."
|
||||
)
|
||||
return 0
|
||||
|
||||
base_yml = git_show(base_sha, ci_path)
|
||||
head_yml = git_show(head_sha, ci_path)
|
||||
|
||||
base_doc = _parse(base_yml, f"{ci_path}@{base_sha}")
|
||||
head_doc = _parse(head_yml, f"{ci_path}@{head_sha}")
|
||||
|
||||
# If the file is newly added (no base), no flip is possible — every
|
||||
# value is "newly introduced", not "changed". Tier 2e covers the
|
||||
# tracking-issue check for new continue-on-error: true. Exit 0.
|
||||
if base_doc is None:
|
||||
print(
|
||||
f"::notice::{ci_path} newly added in this PR; no flip to "
|
||||
f"analyse — lint-mask-pr-atomicity skipped."
|
||||
)
|
||||
return 0
|
||||
|
||||
# If the file is deleted on head, ditto — no atomicity question.
|
||||
if head_doc is None:
|
||||
print(
|
||||
f"::notice::{ci_path} deleted in this PR; "
|
||||
f"lint-mask-pr-atomicity skipped."
|
||||
)
|
||||
return 0
|
||||
|
||||
coe_yes, coe_reasons = coe_changed(base_doc, head_doc)
|
||||
needs_yes, needs_reason = sentinel_needs_changed(
|
||||
base_doc, head_doc, sentinel_key
|
||||
)
|
||||
|
||||
if not coe_yes and not needs_yes:
|
||||
print(
|
||||
f"::notice::{ci_path} touched but neither continue-on-error "
|
||||
f"nor sentinel '{sentinel_key}'.needs changed — no atomicity "
|
||||
f"risk. OK."
|
||||
)
|
||||
return 0
|
||||
|
||||
if coe_yes and needs_yes:
|
||||
print(
|
||||
f"::notice::Atomic change detected: both continue-on-error "
|
||||
f"AND sentinel '{sentinel_key}'.needs touched in same PR. OK."
|
||||
)
|
||||
for r in coe_reasons:
|
||||
print(f" - {r}")
|
||||
print(f" - {needs_reason}")
|
||||
return 0
|
||||
|
||||
# Exactly one side touched — require Paired: #NNN reference.
|
||||
commit_log = git_log_messages(base_sha, head_sha)
|
||||
paired = find_paired_refs(pr_body, commit_log)
|
||||
|
||||
one_side = "continue-on-error" if coe_yes else f"sentinel '{sentinel_key}'.needs"
|
||||
other_side = (
|
||||
f"sentinel '{sentinel_key}'.needs" if coe_yes else "continue-on-error"
|
||||
)
|
||||
|
||||
if paired:
|
||||
print(
|
||||
f"::notice::Split-pair detected ({one_side} changed without "
|
||||
f"{other_side}), but Paired reference(s) present: "
|
||||
f"{', '.join('#' + n for n in paired)}. OK."
|
||||
)
|
||||
for r in coe_reasons:
|
||||
print(f" - {r}")
|
||||
if needs_reason:
|
||||
print(f" - {needs_reason}")
|
||||
return 0
|
||||
|
||||
# The failure mode this lint exists to prevent.
|
||||
print(
|
||||
f"::error file={ci_path}::lint-mask-pr-atomicity (Tier 2d): "
|
||||
f"PR touches {one_side} in {ci_path} but NOT {other_side}, "
|
||||
f"and no `Paired: #NNN` reference was found in the PR body or "
|
||||
f"in commit messages between {base_sha[:8]}..{head_sha[:8]}. "
|
||||
f"This is the PR#665+#668 split-pair regression class "
|
||||
f"(see internal#350, mc#664). FIX: either (a) include the "
|
||||
f"matching {other_side} change in the same PR (preferred), or "
|
||||
f"(b) add `Paired: #NNN` (literal, capital P, with `#`) to the "
|
||||
f"PR body or a commit message referencing the paired PR."
|
||||
)
|
||||
for r in coe_reasons:
|
||||
print(f" - {r}")
|
||||
if needs_reason:
|
||||
print(f" - {needs_reason}")
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(run())
|
||||
@ -1,681 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""lint-pre-flip-continue-on-error — block a PR that flips a job from
|
||||
``continue-on-error: true`` to ``continue-on-error: false`` (or removes
|
||||
the key while the base had it ``true``) without proof that the job's
|
||||
recent runs on the target branch are actually green.
|
||||
|
||||
Empirical class — PR #656 / mc#664:
|
||||
PR #656 (RFC internal#219 Phase 4) flipped 5 ``platform-build``-class
|
||||
jobs ``continue-on-error: true → false`` on the basis of a
|
||||
"verified green on main via combined-status check". But that "green"
|
||||
was the LIE produced by the prior ``continue-on-error: true``:
|
||||
Gitea Quirk #10 (internal#342 + dup #287) — when a step inside a
|
||||
job marked ``continue-on-error: true`` fails, the job-level status
|
||||
is still rolled up as ``success``. So the precondition the PR
|
||||
claimed to verify was structurally fooled by the bug being
|
||||
flipped.
|
||||
|
||||
mc#664 then captured the surfaced defects (2 unrelated, mutually-
|
||||
masked regressions):
|
||||
|
||||
Class 1: sqlmock helper drift since 2f36bb9a (24 days old)
|
||||
Class 2: OFFSEC-001 contract collision since 7d1a189f (1 day old)
|
||||
|
||||
Codified 04:35Z as hongming-pc2 charter §SOP-N rule (e)
|
||||
"run-log-grep-before-flip": pull the actual run log + grep for
|
||||
``--- FAIL`` / ``FAIL\\s`` BEFORE flipping; don't trust the masked
|
||||
combined-status.
|
||||
|
||||
This script structurally enforces that rule at PR time.
|
||||
|
||||
How it works (one PR tick):
|
||||
1. Parse the diff: compare ``.gitea/workflows/*.yml`` at PR base
|
||||
vs PR head. For each file present in both, parse the YAML AST
|
||||
and walk ``jobs.<key>.continue-on-error`` on each side. A
|
||||
"flip" is base ∈ {true} AND head ∈ {false, None/absent}. We
|
||||
coerce truthy/falsy per YAML semantics (PyYAML normalizes
|
||||
``true``/``True``/``yes`` to ``True``).
|
||||
2. For each flipped job, derive its commit-status context name as
|
||||
``"{workflow.name} / {job.name or job.key} (push)"`` — that's
|
||||
how Gitea Actions emits the context for runs on
|
||||
``main``/``staging`` (push event, see also expected_context()
|
||||
in ci-required-drift.py).
|
||||
3. Pull the last N commits of the target branch (PR base), fetch
|
||||
combined commit-status per commit, scan ``statuses[]`` for
|
||||
contexts matching ANY of the flipped jobs. For each match,
|
||||
fetch the actual run log via the web-UI route
|
||||
``{server_url}/{repo}/actions/runs/{run_id}/jobs/{job_idx}/logs``
|
||||
(per memory ``reference_gitea_actions_log_fetch`` — Gitea 1.22.6
|
||||
lacks REST ``/actions/runs/*`` endpoints; the web-UI route is the
|
||||
only working path; see ``reference_gitea_1_22_6_lacks_rest_rerun_endpoints``).
|
||||
4. Grep each log for the Go-test failure markers ``--- FAIL`` /
|
||||
``FAIL\\s+<package>`` AND the bash-step error sentinel
|
||||
``::error::``. If ANY recent log shows any of these AND the
|
||||
status itself reads ``success``, the job was masked. ``::error::``
|
||||
the flip with the offending test name + offending run URL +
|
||||
the regression commit (HEAD of the run).
|
||||
5. Exit 1 if any flips have at least one masked run; exit 0
|
||||
otherwise.
|
||||
|
||||
Halt-on-noise contract:
|
||||
- If a recent log fetch 404s (already-pruned-via-act_runner-gc,
|
||||
transient gitea-web outage): emit ``::warning::`` and treat the
|
||||
run as "log unavailable" — does NOT block the flip; logged so
|
||||
a curious reviewer can re-run.
|
||||
- If a flipped job has ZERO recent runs on the target branch (newly
|
||||
added workflow): emit ``::warning::`` "no run history to verify"
|
||||
and allow the flip. This is the only way a NEW workflow can ever
|
||||
ship with ``continue-on-error: false``; otherwise we'd have a
|
||||
chicken-and-egg.
|
||||
|
||||
Behavior-based AST gate per ``feedback_behavior_based_ast_gates``:
|
||||
- YAML parsed via PyYAML safe_load on BOTH sides of the diff
|
||||
- No grep-by-line — formatting changes (comment churn, key order)
|
||||
don't false-positive a flip
|
||||
- Job-key match — so a rename ``platform-build → core-be-build``
|
||||
appears as a DELETE + an ADD, not a flip (the delete side has no
|
||||
new value to compare against; the add side has no base side).
|
||||
|
||||
Run locally (works against this repo, requires PyYAML + Gitea token
|
||||
that can read combined-commit-status):
|
||||
|
||||
GITEA_TOKEN=... GITEA_HOST=git.moleculesai.app \\
|
||||
REPO=molecule-ai/molecule-core BASE_REF=main \\
|
||||
BASE_SHA=$(git rev-parse origin/main) \\
|
||||
HEAD_SHA=$(git rev-parse HEAD) \\
|
||||
python3 .gitea/scripts/lint_pre_flip_continue_on_error.py \\
|
||||
--dry-run
|
||||
|
||||
Cross-links: PR#656, mc#664, PR#665 (the interim re-mask),
|
||||
Quirk #10 (internal#342 + dup #287), hongming-pc2 charter §SOP-N
|
||||
rule (e), feedback_strict_root_only_after_class_a,
|
||||
feedback_no_shared_persona_token_use.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from typing import Any
|
||||
|
||||
import yaml # PyYAML 6.0.2 — installed by the workflow before this runs.
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Environment (read at module-import; runtime contract enforced in main())
|
||||
# --------------------------------------------------------------------------
|
||||
def _env(key: str, *, default: str = "") -> str:
|
||||
return os.environ.get(key, default)
|
||||
|
||||
|
||||
GITEA_TOKEN = _env("GITEA_TOKEN")
|
||||
GITEA_HOST = _env("GITEA_HOST")
|
||||
REPO = _env("REPO")
|
||||
BASE_REF = _env("BASE_REF", default="main")
|
||||
BASE_SHA = _env("BASE_SHA")
|
||||
HEAD_SHA = _env("HEAD_SHA")
|
||||
# How many recent commits to scan on the target branch. 5 by default;
|
||||
# enough to catch a job that only fails intermittently, not so many
|
||||
# that the script paginates needlessly. Per spec.
|
||||
RECENT_COMMITS_N = int(_env("RECENT_COMMITS_N", default="5"))
|
||||
|
||||
OWNER, NAME = (REPO.split("/", 1) + [""])[:2] if REPO else ("", "")
|
||||
API = f"https://{GITEA_HOST}/api/v1" if GITEA_HOST else ""
|
||||
WEB = f"https://{GITEA_HOST}" if GITEA_HOST else ""
|
||||
|
||||
# Failure markers we grep for in the run log.
|
||||
# --- FAIL — Go test failure marker
|
||||
# FAIL\s — `FAIL github.com/x/y` package-level rollup
|
||||
# ::error:: — bash-step `::error::` lines (the lint-curl-status-capture
|
||||
# pattern: a `python3 <<PY` block writing `::error::` then
|
||||
# sys.exit(1); also any shell `echo "::error::..."` from
|
||||
# jobs that wrap pytest/eslint/etc. and convert
|
||||
# non-zero exits into masked-by-CoE status)
|
||||
FAIL_PATTERNS = (
|
||||
"--- FAIL",
|
||||
"FAIL\t",
|
||||
"FAIL ",
|
||||
"::error::",
|
||||
)
|
||||
|
||||
|
||||
def _require_runtime_env() -> None:
|
||||
for key in ("GITEA_TOKEN", "GITEA_HOST", "REPO", "BASE_REF", "BASE_SHA", "HEAD_SHA"):
|
||||
if not os.environ.get(key):
|
||||
sys.stderr.write(f"::error::missing required env var: {key}\n")
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Tiny HTTP helper (no requests dependency)
|
||||
# Mirrors the api()/ApiError contract in ci-required-drift.py +
|
||||
# main-red-watchdog.py per feedback_api_helper_must_raise_not_return_dict.
|
||||
# --------------------------------------------------------------------------
|
||||
class ApiError(RuntimeError):
|
||||
"""Raised when a Gitea API/web call cannot be trusted to have succeeded.
|
||||
|
||||
Soft-failure on non-2xx is the duplicate-write bug factory in
|
||||
find-or-create flows (PR #112 Five-Axis). Here it would mean a
|
||||
transient gitea-web 502 silently allows a flip whose recent runs
|
||||
we couldn't actually verify — exactly the regression class this
|
||||
lint exists to close.
|
||||
"""
|
||||
|
||||
|
||||
def http(
|
||||
method: str,
|
||||
url: str,
|
||||
*,
|
||||
body: dict | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
expect_json: bool = True,
|
||||
timeout: int = 30,
|
||||
) -> tuple[int, Any, bytes]:
|
||||
"""Tiny HTTP helper around urllib.
|
||||
|
||||
Returns (status, parsed_or_None, raw_bytes). Raises ApiError on any
|
||||
non-2xx response. ``expect_json=False`` returns raw bytes in the
|
||||
parsed slot (for log-fetch from the web-UI which returns text/plain).
|
||||
"""
|
||||
final_headers = {
|
||||
"Authorization": f"token {GITEA_TOKEN}",
|
||||
"Accept": "application/json" if expect_json else "text/plain",
|
||||
}
|
||||
if headers:
|
||||
final_headers.update(headers)
|
||||
data = None
|
||||
if body is not None:
|
||||
data = json.dumps(body).encode("utf-8")
|
||||
final_headers["Content-Type"] = "application/json"
|
||||
req = urllib.request.Request(url, method=method, data=data, headers=final_headers)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=timeout) as resp:
|
||||
raw = resp.read()
|
||||
status = resp.status
|
||||
except urllib.error.HTTPError as e:
|
||||
raw = e.read() or b""
|
||||
status = e.code
|
||||
|
||||
if not (200 <= status < 300):
|
||||
snippet = raw[:500].decode("utf-8", errors="replace") if raw else ""
|
||||
raise ApiError(f"{method} {url} → HTTP {status}: {snippet}")
|
||||
|
||||
if not expect_json:
|
||||
return status, raw, raw
|
||||
if not raw:
|
||||
return status, None, raw
|
||||
try:
|
||||
return status, json.loads(raw), raw
|
||||
except json.JSONDecodeError as e:
|
||||
raise ApiError(f"{method} {url} → HTTP {status} but body is not JSON: {e}") from e
|
||||
|
||||
|
||||
def api(method: str, path: str, *, body: dict | None = None, query: dict[str, str] | None = None) -> tuple[int, Any]:
|
||||
"""Read-shaped Gitea REST helper. Path is API-relative (``/repos/...``)."""
|
||||
url = f"{API}{path}"
|
||||
if query:
|
||||
url = f"{url}?{urllib.parse.urlencode(query)}"
|
||||
status, parsed, _ = http(method, url, body=body, expect_json=True)
|
||||
return status, parsed
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# YAML parsing — coerce truthy/falsy for continue-on-error
|
||||
# --------------------------------------------------------------------------
|
||||
def _coerce_coe(val: Any) -> bool:
|
||||
"""Coerce a continue-on-error YAML value to bool.
|
||||
|
||||
PyYAML safe_load normalizes ``true``/``True``/``yes``/``on`` to
|
||||
Python ``True`` and ``false``/``False``/``no``/``off`` / absence
|
||||
to ``False`` (we treat absence/None as False here too — that's the
|
||||
GitHub Actions default semantics).
|
||||
|
||||
Edge cases:
|
||||
- String ``"true"`` (quoted in YAML) — kept as the string
|
||||
``"true"``, falsy under bool() but a flip we DO care about
|
||||
catching. Normalize string forms case-insensitively to bool
|
||||
so the diff is consistent with the runtime behavior of
|
||||
Gitea Actions, which YAML-parses the same way.
|
||||
"""
|
||||
if isinstance(val, bool):
|
||||
return val
|
||||
if val is None:
|
||||
return False
|
||||
if isinstance(val, str):
|
||||
return val.strip().lower() in ("true", "yes", "on", "1")
|
||||
return bool(val)
|
||||
|
||||
|
||||
def jobs_coe_map(workflow_doc: dict) -> dict[str, bool]:
|
||||
"""Return ``{job_key: continue_on_error_bool}`` for every job in
|
||||
the workflow. Job-level ``continue-on-error`` only — does NOT
|
||||
descend into per-step ``continue-on-error`` (step-level CoE
|
||||
masking is a separate class and is handled by the test suite
|
||||
+ reviewer, not by this gate — see Future Work in the workflow
|
||||
YAML).
|
||||
"""
|
||||
out: dict[str, bool] = {}
|
||||
jobs = workflow_doc.get("jobs")
|
||||
if not isinstance(jobs, dict):
|
||||
return out
|
||||
for key, job in jobs.items():
|
||||
if not isinstance(job, dict):
|
||||
continue
|
||||
out[key] = _coerce_coe(job.get("continue-on-error"))
|
||||
return out
|
||||
|
||||
|
||||
def workflow_name(workflow_doc: dict, *, fallback: str = "") -> str:
|
||||
"""Top-level ``name:`` of the workflow. Falls back to the filename
|
||||
(without extension) per Gitea Actions semantics."""
|
||||
n = workflow_doc.get("name")
|
||||
if isinstance(n, str) and n.strip():
|
||||
return n.strip()
|
||||
return fallback
|
||||
|
||||
|
||||
def job_display_name(workflow_doc: dict, job_key: str) -> str:
|
||||
"""``jobs.<key>.name`` if present, else the key. Mirrors
|
||||
expected_context() in ci-required-drift.py."""
|
||||
job = workflow_doc.get("jobs", {}).get(job_key)
|
||||
if isinstance(job, dict):
|
||||
n = job.get("name")
|
||||
if isinstance(n, str) and n.strip():
|
||||
return n.strip()
|
||||
return job_key
|
||||
|
||||
|
||||
def context_name(workflow_name_str: str, job_name_str: str, event: str = "push") -> str:
|
||||
"""Render the commit-status context the way Gitea Actions emits it.
|
||||
Default ``event="push"`` because recent-runs-on-main are push events;
|
||||
callers can override to ``"pull_request"`` for PR-context lookups."""
|
||||
return f"{workflow_name_str} / {job_name_str} ({event})"
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Diff detection — flips, not arbitrary changes
|
||||
# --------------------------------------------------------------------------
|
||||
def detect_flips(
|
||||
base_workflows: dict[str, str],
|
||||
head_workflows: dict[str, str],
|
||||
) -> list[dict]:
|
||||
"""Compare per-file CoE maps; return a list of flip records.
|
||||
|
||||
Inputs are ``{path: yaml_text}`` for both sides. Output records
|
||||
have the shape::
|
||||
|
||||
{
|
||||
"workflow_path": ".gitea/workflows/ci.yml",
|
||||
"workflow_name": "CI",
|
||||
"job_key": "platform-build",
|
||||
"job_name": "Platform (Go)",
|
||||
"context": "CI / Platform (Go) (push)",
|
||||
}
|
||||
|
||||
A flip is base[CoE] ∈ {True} AND head[CoE] ∈ {False}. Files
|
||||
only present on one side are skipped — adding a new workflow
|
||||
with ``CoE: false`` is fine (no history to mask), and removing
|
||||
a workflow can't possibly flip anything.
|
||||
"""
|
||||
flips: list[dict] = []
|
||||
for path, base_text in base_workflows.items():
|
||||
if path not in head_workflows:
|
||||
continue
|
||||
try:
|
||||
base_doc = yaml.safe_load(base_text) or {}
|
||||
head_doc = yaml.safe_load(head_workflows[path]) or {}
|
||||
except yaml.YAMLError as e:
|
||||
# Don't block on a parse error — the YAML lint workflows
|
||||
# catch invalid YAML separately. Just warn so the failing
|
||||
# file is visible.
|
||||
sys.stderr.write(f"::warning file={path}::YAML parse error: {e}\n")
|
||||
continue
|
||||
if not isinstance(base_doc, dict) or not isinstance(head_doc, dict):
|
||||
continue
|
||||
base_map = jobs_coe_map(base_doc)
|
||||
head_map = jobs_coe_map(head_doc)
|
||||
wf_name = workflow_name(head_doc, fallback=os.path.basename(path).rsplit(".", 1)[0])
|
||||
for job_key, base_val in base_map.items():
|
||||
if job_key not in head_map:
|
||||
continue # job removed — not a flip
|
||||
if base_val is True and head_map[job_key] is False:
|
||||
flips.append({
|
||||
"workflow_path": path,
|
||||
"workflow_name": wf_name,
|
||||
"job_key": job_key,
|
||||
"job_name": job_display_name(head_doc, job_key),
|
||||
"context": context_name(wf_name, job_display_name(head_doc, job_key), "push"),
|
||||
})
|
||||
return flips
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Git: snapshot every .gitea/workflows/*.yml at a SHA (no checkout)
|
||||
# --------------------------------------------------------------------------
|
||||
def _git(*args: str, cwd: str | None = None) -> str:
|
||||
"""Run ``git`` and return stdout (text)."""
|
||||
result = subprocess.run(
|
||||
["git", *args],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=False,
|
||||
cwd=cwd,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
raise RuntimeError(f"git {args!r} failed: {result.stderr.strip()}")
|
||||
return result.stdout
|
||||
|
||||
|
||||
def workflows_at_sha(sha: str, *, repo_dir: str | None = None) -> dict[str, str]:
|
||||
"""Read every ``.gitea/workflows/*.yml`` blob at ``sha``.
|
||||
|
||||
Uses ``git ls-tree`` + ``git show`` so we never need to check out
|
||||
the SHA (the workflow runs on the PR head; the base SHA is
|
||||
fetched, not checked out).
|
||||
"""
|
||||
out: dict[str, str] = {}
|
||||
listing = _git("ls-tree", "-r", "--name-only", sha, ".gitea/workflows/", cwd=repo_dir)
|
||||
for line in listing.splitlines():
|
||||
line = line.strip()
|
||||
if not line.endswith((".yml", ".yaml")):
|
||||
continue
|
||||
try:
|
||||
blob = _git("show", f"{sha}:{line}", cwd=repo_dir)
|
||||
except RuntimeError:
|
||||
# Symlink or other non-blob; skip.
|
||||
continue
|
||||
out[line] = blob
|
||||
return out
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Gitea: recent commits + per-commit combined status + log fetch
|
||||
# --------------------------------------------------------------------------
|
||||
def recent_commits_on_branch(branch: str, n: int) -> list[str]:
|
||||
"""Last `n` commit SHAs on ``branch`` (oldest→newest is fine; we
|
||||
treat them as a set). Uses the REST ``/commits`` endpoint with
|
||||
``sha=branch&limit=n``."""
|
||||
_, body = api(
|
||||
"GET",
|
||||
f"/repos/{OWNER}/{NAME}/commits",
|
||||
query={"sha": branch, "limit": str(n)},
|
||||
)
|
||||
if not isinstance(body, list):
|
||||
raise ApiError(f"/commits for {branch} returned non-list: {type(body).__name__}")
|
||||
out: list[str] = []
|
||||
for c in body:
|
||||
if isinstance(c, dict):
|
||||
sha = c.get("sha") or (c.get("commit", {}) or {}).get("id")
|
||||
if isinstance(sha, str) and len(sha) >= 7:
|
||||
out.append(sha)
|
||||
return out
|
||||
|
||||
|
||||
def combined_status(sha: str) -> dict:
|
||||
"""Combined commit status for a SHA. Same shape as
|
||||
``main-red-watchdog.get_combined_status``."""
|
||||
_, body = api("GET", f"/repos/{OWNER}/{NAME}/commits/{sha}/status")
|
||||
if not isinstance(body, dict):
|
||||
raise ApiError(f"combined-status for {sha} not a dict")
|
||||
return body
|
||||
|
||||
|
||||
def _entry_state(s: dict) -> str:
|
||||
"""Per-entry state — Gitea 1.22.6 schema asymmetry: top-level
|
||||
uses ``state``, per-entry uses ``status``. Defensive fallback per
|
||||
main-red-watchdog.py line 233."""
|
||||
return s.get("status") or s.get("state") or ""
|
||||
|
||||
|
||||
def fetch_log(target_url: str) -> str | None:
|
||||
"""Fetch a job log given its web-UI ``target_url`` (e.g.
|
||||
``/molecule-ai/molecule-core/actions/runs/13494/jobs/0``).
|
||||
|
||||
Per ``reference_gitea_actions_log_fetch``: append ``/logs`` to the
|
||||
job route. Per ``reference_gitea_1_22_6_lacks_rest_rerun_endpoints``:
|
||||
Gitea 1.22.6 lacks the REST ``/api/v1/.../actions/runs/*`` path; the
|
||||
web-UI route is the only working endpoint until 1.24+.
|
||||
|
||||
Returns the log text on success, ``None`` on 404 / log-pruned /
|
||||
network error (caller treats None as "log unavailable, warn-not-fail").
|
||||
"""
|
||||
if not target_url:
|
||||
return None
|
||||
# Normalize: target_url may be relative ("/owner/repo/...") or
|
||||
# absolute. Both need ``/logs`` appended to the job sub-path.
|
||||
if target_url.startswith("/"):
|
||||
url = f"{WEB}{target_url}"
|
||||
else:
|
||||
url = target_url
|
||||
if not url.endswith("/logs"):
|
||||
url = f"{url}/logs"
|
||||
try:
|
||||
_, body, _ = http("GET", url, expect_json=False, timeout=60)
|
||||
except ApiError as e:
|
||||
sys.stderr.write(f"::warning::log fetch failed for {url}: {e}\n")
|
||||
return None
|
||||
if isinstance(body, bytes):
|
||||
return body.decode("utf-8", errors="replace")
|
||||
return None
|
||||
|
||||
|
||||
def grep_fail_markers(log_text: str) -> list[str]:
|
||||
"""Return up to 5 sample matching lines for any FAIL_PATTERNS hit.
|
||||
Empty list = clean log."""
|
||||
matches: list[str] = []
|
||||
for line in log_text.splitlines():
|
||||
for pat in FAIL_PATTERNS:
|
||||
if pat in line:
|
||||
# Truncate to keep error output bounded.
|
||||
matches.append(line.strip()[:240])
|
||||
break
|
||||
if len(matches) >= 5:
|
||||
break
|
||||
return matches
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Verification: for one flip, scan recent runs on BASE_REF
|
||||
# --------------------------------------------------------------------------
|
||||
def verify_flip(flip: dict, branch: str, n: int) -> dict:
|
||||
"""Scan the last ``n`` commits on ``branch``. For each commit whose
|
||||
combined status contains a context matching ``flip["context"]``,
|
||||
fetch the run log and grep for FAIL markers.
|
||||
|
||||
Returns::
|
||||
|
||||
{
|
||||
"flip": flip,
|
||||
"checked_commits": int, # how many commits had a matching context
|
||||
"masked_runs": [ # runs where log shows FAIL despite status==success
|
||||
{"sha": "...", "status": "success", "target_url": "...", "samples": [...]},
|
||||
...
|
||||
],
|
||||
"fail_runs": [ # runs where status itself is failure/error
|
||||
{"sha": "...", "status": "failure", "target_url": "...", "samples": [...]},
|
||||
...
|
||||
],
|
||||
"warnings": [str], # log-unavailable warnings (not blocking)
|
||||
}
|
||||
|
||||
Blocking condition: ``masked_runs`` OR ``fail_runs`` non-empty.
|
||||
A ``success`` status with a clean log is the only "OK to flip"
|
||||
outcome (per hongming-pc2 §SOP-N rule (e)).
|
||||
"""
|
||||
target_context = flip["context"]
|
||||
result = {
|
||||
"flip": flip,
|
||||
"checked_commits": 0,
|
||||
"masked_runs": [],
|
||||
"fail_runs": [],
|
||||
"warnings": [],
|
||||
}
|
||||
|
||||
shas = recent_commits_on_branch(branch, n)
|
||||
if not shas:
|
||||
result["warnings"].append(
|
||||
f"no recent commits on {branch} (cannot verify flip)"
|
||||
)
|
||||
return result
|
||||
|
||||
for sha in shas:
|
||||
try:
|
||||
status_doc = combined_status(sha)
|
||||
except ApiError as e:
|
||||
result["warnings"].append(f"combined-status for {sha}: {e}")
|
||||
continue
|
||||
statuses = status_doc.get("statuses") or []
|
||||
# First entry matching the context name. Newest SHAs come
|
||||
# first; one entry per context per SHA is the usual shape.
|
||||
for s in statuses:
|
||||
if not isinstance(s, dict):
|
||||
continue
|
||||
if s.get("context") != target_context:
|
||||
continue
|
||||
result["checked_commits"] += 1
|
||||
state = _entry_state(s)
|
||||
target_url = s.get("target_url") or ""
|
||||
log_text = fetch_log(target_url)
|
||||
if log_text is None:
|
||||
result["warnings"].append(
|
||||
f"log unavailable for {sha} {target_context}"
|
||||
)
|
||||
# Still record the status itself if it's red — that's
|
||||
# a hard signal that doesn't need log access.
|
||||
if state in ("failure", "error"):
|
||||
result["fail_runs"].append({
|
||||
"sha": sha,
|
||||
"status": state,
|
||||
"target_url": target_url,
|
||||
"samples": ["[log unavailable; status itself is " + state + "]"],
|
||||
})
|
||||
break
|
||||
samples = grep_fail_markers(log_text)
|
||||
if state in ("failure", "error"):
|
||||
result["fail_runs"].append({
|
||||
"sha": sha,
|
||||
"status": state,
|
||||
"target_url": target_url,
|
||||
"samples": samples or ["[no FAIL markers found but status is " + state + "]"],
|
||||
})
|
||||
elif samples and state == "success":
|
||||
# The bug class: status==success while log shows FAIL.
|
||||
# That's exactly Quirk #10 (continue-on-error masking).
|
||||
result["masked_runs"].append({
|
||||
"sha": sha,
|
||||
"status": state,
|
||||
"target_url": target_url,
|
||||
"samples": samples,
|
||||
})
|
||||
# Either way, we matched one context entry for this SHA;
|
||||
# don't keep looping `statuses[]`.
|
||||
break
|
||||
|
||||
if result["checked_commits"] == 0:
|
||||
result["warnings"].append(
|
||||
f"no runs of {target_context!r} found in the last {n} commits on "
|
||||
f"{branch} — cannot verify; allowing flip with warning"
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Report rendering
|
||||
# --------------------------------------------------------------------------
|
||||
def render_flip_report(verdict: dict) -> str:
|
||||
flip = verdict["flip"]
|
||||
lines = [
|
||||
f"job: {flip['job_key']} ({flip['context']})",
|
||||
f" workflow: {flip['workflow_path']}",
|
||||
f" checked_commits: {verdict['checked_commits']}",
|
||||
]
|
||||
for run in verdict["fail_runs"]:
|
||||
url = run["target_url"]
|
||||
# target_url may be relative; render the absolute form for
|
||||
# click-through.
|
||||
if url.startswith("/"):
|
||||
url = f"{WEB}{url}"
|
||||
lines.append(f" fail run {run['sha'][:10]} (status={run['status']}): {url}")
|
||||
for sample in run["samples"]:
|
||||
lines.append(f" | {sample}")
|
||||
for run in verdict["masked_runs"]:
|
||||
url = run["target_url"]
|
||||
if url.startswith("/"):
|
||||
url = f"{WEB}{url}"
|
||||
lines.append(
|
||||
f" MASKED run {run['sha'][:10]} (status=success, log shows FAIL): {url}"
|
||||
)
|
||||
for sample in run["samples"]:
|
||||
lines.append(f" | {sample}")
|
||||
for w in verdict["warnings"]:
|
||||
lines.append(f" warning: {w}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Main
|
||||
# --------------------------------------------------------------------------
|
||||
def _parse_args(argv: list[str] | None = None) -> argparse.Namespace:
|
||||
p = argparse.ArgumentParser(
|
||||
prog="lint-pre-flip-continue-on-error",
|
||||
description="Block a PR that flips continue-on-error true→false "
|
||||
"without proof recent runs are actually green.",
|
||||
)
|
||||
p.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Detect + print findings to stdout; never exit non-zero. "
|
||||
"Useful for local testing.",
|
||||
)
|
||||
return p.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
args = _parse_args(argv)
|
||||
_require_runtime_env()
|
||||
|
||||
base_workflows = workflows_at_sha(BASE_SHA)
|
||||
head_workflows = workflows_at_sha(HEAD_SHA)
|
||||
flips = detect_flips(base_workflows, head_workflows)
|
||||
|
||||
if not flips:
|
||||
print("::notice::no continue-on-error true→false flips in this PR")
|
||||
return 0
|
||||
|
||||
print(f"::notice::detected {len(flips)} continue-on-error true→false flip(s); verifying recent runs on {BASE_REF}")
|
||||
bad_flips: list[dict] = []
|
||||
for flip in flips:
|
||||
verdict = verify_flip(flip, BASE_REF, RECENT_COMMITS_N)
|
||||
report = render_flip_report(verdict)
|
||||
if verdict["fail_runs"] or verdict["masked_runs"]:
|
||||
print(f"::error file={flip['workflow_path']}::flip of {flip['job_key']} "
|
||||
f"({flip['context']}) blocked — recent runs on {BASE_REF} show "
|
||||
f"FAIL markers OR are red. Pull each run log below + grep "
|
||||
f"`--- FAIL` / `FAIL ` / `::error::` — DON'T trust the masked "
|
||||
f"combined-status. See hongming-pc2 charter §SOP-N rule (e). "
|
||||
f"PR#656 / mc#664 reference class.")
|
||||
bad_flips.append(verdict)
|
||||
else:
|
||||
print(f"::notice::flip of {flip['job_key']} ({flip['context']}) is safe — "
|
||||
f"{verdict['checked_commits']} recent run(s), no FAIL markers")
|
||||
# Always print the per-flip detail block so the human-readable
|
||||
# report is in the run log for both safe and unsafe flips.
|
||||
print(f"::group::flip detail: {flip['job_key']}")
|
||||
print(report)
|
||||
print("::endgroup::")
|
||||
|
||||
if bad_flips and not args.dry_run:
|
||||
print(f"::error::{len(bad_flips)}/{len(flips)} flip(s) failed pre-flip verification")
|
||||
return 1
|
||||
if bad_flips and args.dry_run:
|
||||
print(f"::warning::[dry-run] {len(bad_flips)}/{len(flips)} flip(s) WOULD fail; exit 0 forced")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@ -1,526 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""lint_required_context_exists_in_bp — Tier 2g per internal#350.
|
||||
|
||||
Rule
|
||||
----
|
||||
When a PR adds a NEW commit-status emission (a context that didn't
|
||||
exist on the base side), the workflow file must carry one of three
|
||||
directive comments adjacent to the new job:
|
||||
|
||||
(a) `# bp-required: yes`
|
||||
The new context MUST already be in
|
||||
`branch_protections/<branch>.status_check_contexts`. Verified
|
||||
via Gitea API at PR time.
|
||||
|
||||
(b) `# bp-required: pending #NNN`
|
||||
Acknowledged asymmetry; references an OPEN tracking issue that
|
||||
will follow up with the BP PATCH.
|
||||
|
||||
(c) `# bp-exempt: <free-text reason>`
|
||||
Informational job, not intended to be a required gate.
|
||||
|
||||
No directive on a new emitter → FAIL with a 3-option fix-hint.
|
||||
|
||||
The class this prevents
|
||||
-----------------------
|
||||
PR#656 added `CI / all-required (pull_request)` as a sentinel context
|
||||
that workflows emit, but BP did NOT list it. When `platform-build`
|
||||
failed, `all-required` failed, but BP let the PR merge anyway →
|
||||
cascade to mc#664. With this lint, PR#656 would have been blocked
|
||||
until either the BP PATCH ran alongside OR the author added a
|
||||
`bp-required: pending` directive.
|
||||
|
||||
Why directives MUST live in the workflow YAML
|
||||
---------------------------------------------
|
||||
The directive comment lives with the emitter so a scheduled
|
||||
audit (Tier 2f, daily) can read the same source. PR-body-only
|
||||
directives invisibly evaporate on merge — the asymmetry would
|
||||
return to undetected. PR-body claims are advisory; workflow-file
|
||||
comments are the contract.
|
||||
|
||||
How "new emission" is detected
|
||||
------------------------------
|
||||
Diff base..head over `.gitea/workflows/*.yml`. For each YAML file
|
||||
that's added or modified:
|
||||
- Parse both base-side and head-side via PyYAML AST.
|
||||
- Enumerate emitted contexts on each side using the same rules as
|
||||
Tier 2f (workflow.name + job.name|key + event-mapping).
|
||||
- `new_contexts = head_contexts - base_contexts`.
|
||||
|
||||
If `new_contexts` is empty after de-dup, no rule applies → pass.
|
||||
|
||||
Per `feedback_behavior_based_ast_gates`: comment scanning uses raw
|
||||
text in a small window around the job-key line, NOT regex over the
|
||||
full file. This avoids matching `bp-required:` mentioned in a
|
||||
comment unrelated to the new job.
|
||||
|
||||
Exit codes
|
||||
----------
|
||||
0 — no new emissions, all new emissions have valid directives,
|
||||
or BP read errored (graceful-degrade per Tier 2a contract).
|
||||
1 — at least one new emission lacks a directive, or has
|
||||
`bp-required: yes` but the context is missing from BP.
|
||||
2 — env contract violation or YAML parse error.
|
||||
|
||||
Env
|
||||
---
|
||||
BASE_SHA — PR base SHA
|
||||
HEAD_SHA — PR head SHA
|
||||
GITEA_TOKEN — DRIFT_BOT_TOKEN (repo-admin for BP read)
|
||||
GITEA_HOST — e.g. git.moleculesai.app
|
||||
REPO — owner/name
|
||||
BRANCH — defaults to `main`
|
||||
WORKFLOWS_DIR — defaults to `.gitea/workflows`
|
||||
|
||||
Memory cross-links
|
||||
------------------
|
||||
- internal#350 (the RFC that specs this lint)
|
||||
- PR#656 (the empirical case that prompted Tier 2g)
|
||||
- mc#664 (the surfaced cascade)
|
||||
- feedback_phantom_required_check_after_gitea_migration (Tier 2f cousin)
|
||||
- feedback_behavior_based_ast_gates
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from typing import Any
|
||||
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
sys.stderr.write(
|
||||
"::error::PyYAML is required. Install with: pip install PyYAML\n"
|
||||
)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
# Directive comment patterns. We match `# bp-required:` OR `# bp-exempt:`,
|
||||
# both with optional surrounding whitespace and case-sensitive on the
|
||||
# `bp-` prefix (convention).
|
||||
BP_REQUIRED_YES_RE = re.compile(
|
||||
r"#\s*bp-required:\s*yes\b", re.IGNORECASE
|
||||
)
|
||||
BP_REQUIRED_PENDING_RE = re.compile(
|
||||
r"#\s*bp-required:\s*pending\s*#(?P<num>\d+)\b", re.IGNORECASE
|
||||
)
|
||||
BP_EXEMPT_RE = re.compile(
|
||||
r"#\s*bp-exempt:\s*\S", re.IGNORECASE
|
||||
)
|
||||
|
||||
|
||||
# Gitea event-mapping (same as Tier 2f).
|
||||
_EVENT_MAP = {
|
||||
"pull_request": "pull_request",
|
||||
"pull_request_target": "pull_request",
|
||||
"push": "push",
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Env
|
||||
# ---------------------------------------------------------------------------
|
||||
def _env(key: str, default: str | None = None) -> str:
|
||||
v = os.environ.get(key, default)
|
||||
return v if v is not None else ""
|
||||
|
||||
|
||||
def _require_env(key: str) -> str:
|
||||
v = os.environ.get(key)
|
||||
if not v:
|
||||
sys.stderr.write(f"::error::missing required env var: {key}\n")
|
||||
sys.exit(2)
|
||||
return v
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# API helper (same contract as Tier 2f).
|
||||
# ---------------------------------------------------------------------------
|
||||
def api(
|
||||
method: str,
|
||||
path: str,
|
||||
*,
|
||||
body: dict | None = None,
|
||||
query: dict[str, str] | None = None,
|
||||
) -> tuple[str, Any]:
|
||||
host = _env("GITEA_HOST")
|
||||
token = _env("GITEA_TOKEN")
|
||||
url = f"https://{host}/api/v1{path}"
|
||||
if query:
|
||||
url = f"{url}?{urllib.parse.urlencode(query)}"
|
||||
data = None
|
||||
headers = {
|
||||
"Authorization": f"token {token}",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
if body is not None:
|
||||
data = json.dumps(body).encode("utf-8")
|
||||
headers["Content-Type"] = "application/json"
|
||||
req = urllib.request.Request(url, method=method, data=data, headers=headers)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=30) as resp:
|
||||
raw = resp.read()
|
||||
if not raw:
|
||||
return ("ok", None)
|
||||
return ("ok", json.loads(raw))
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 404:
|
||||
return ("not_found", None)
|
||||
if e.code in (401, 403):
|
||||
return ("forbidden", None)
|
||||
return ("error", None)
|
||||
except (urllib.error.URLError, TimeoutError, json.JSONDecodeError):
|
||||
return ("error", None)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# git helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
def git_show(sha: str, path: str) -> str | None:
|
||||
r = subprocess.run(
|
||||
["git", "show", f"{sha}:{path}"], capture_output=True, text=True
|
||||
)
|
||||
if r.returncode != 0:
|
||||
return None
|
||||
return r.stdout
|
||||
|
||||
|
||||
def git_diff_paths(base: str, head: str) -> list[str]:
|
||||
r = subprocess.run(
|
||||
["git", "diff", "--name-only", f"{base}..{head}"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if r.returncode != 0:
|
||||
return []
|
||||
return [p for p in r.stdout.splitlines() if p.strip()]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Workflow context enumeration (mirror Tier 2f).
|
||||
# ---------------------------------------------------------------------------
|
||||
def _get_on(d: Any) -> Any:
|
||||
if not isinstance(d, dict):
|
||||
return None
|
||||
if "on" in d:
|
||||
return d["on"]
|
||||
if True in d:
|
||||
return d[True]
|
||||
return None
|
||||
|
||||
|
||||
def _on_events(doc: Any) -> set[str]:
|
||||
on = _get_on(doc)
|
||||
raw: set[str] = set()
|
||||
if on is None:
|
||||
return raw
|
||||
if isinstance(on, str):
|
||||
raw.add(on)
|
||||
elif isinstance(on, list):
|
||||
for e in on:
|
||||
if isinstance(e, str):
|
||||
raw.add(e)
|
||||
elif isinstance(on, dict):
|
||||
for k in on:
|
||||
if isinstance(k, str):
|
||||
raw.add(k)
|
||||
return {_EVENT_MAP[e] for e in raw if e in _EVENT_MAP}
|
||||
|
||||
|
||||
def _job_display(jbody: dict, jkey: str) -> str:
|
||||
n = jbody.get("name") if isinstance(jbody, dict) else None
|
||||
if isinstance(n, str) and n:
|
||||
return n
|
||||
return jkey
|
||||
|
||||
|
||||
def workflow_contexts(doc: Any) -> set[str]:
|
||||
if not isinstance(doc, dict):
|
||||
return set()
|
||||
wf_name = doc.get("name")
|
||||
if not isinstance(wf_name, str) or not wf_name:
|
||||
return set()
|
||||
events = _on_events(doc)
|
||||
if not events:
|
||||
return set()
|
||||
jobs = doc.get("jobs")
|
||||
if not isinstance(jobs, dict):
|
||||
return set()
|
||||
out: set[str] = set()
|
||||
for jkey, jbody in jobs.items():
|
||||
if jkey == "__lines__":
|
||||
continue
|
||||
if not isinstance(jbody, dict):
|
||||
continue
|
||||
disp = _job_display(jbody, jkey)
|
||||
for ev in events:
|
||||
out.add(f"{wf_name} / {disp} ({ev})")
|
||||
return out
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Find the source line of a job-key in a workflow YAML's raw text.
|
||||
# Used to scan for nearby directive comments.
|
||||
# ---------------------------------------------------------------------------
|
||||
def _find_job_key_line(raw_lines: list[str], jkey: str) -> int | None:
|
||||
"""Return 1-based line of `<jkey>:` under jobs:."""
|
||||
in_jobs = False
|
||||
jobs_indent = -1
|
||||
for i, line in enumerate(raw_lines, start=1):
|
||||
stripped = line.lstrip()
|
||||
if stripped.startswith("jobs:"):
|
||||
in_jobs = True
|
||||
jobs_indent = len(line) - len(stripped)
|
||||
continue
|
||||
if in_jobs:
|
||||
# Job key is the next indent level under `jobs:`.
|
||||
indent = len(line) - len(stripped)
|
||||
if stripped and indent <= jobs_indent:
|
||||
# Left the jobs: block
|
||||
in_jobs = False
|
||||
continue
|
||||
if re.match(rf"^\s*{re.escape(jkey)}\s*:", line):
|
||||
return i
|
||||
return None
|
||||
|
||||
|
||||
_DIRECTIVE_WINDOW = 3 # lines above the job-key line (inclusive)
|
||||
|
||||
|
||||
def find_directive_for_job(
|
||||
raw_text: str, jkey: str
|
||||
) -> tuple[str, str | None] | None:
|
||||
"""Return (kind, value) tuple for the first directive in a small
|
||||
window above the job-key line.
|
||||
|
||||
kind ∈ {"required-yes", "required-pending", "exempt"}.
|
||||
value is the pending-issue number for required-pending, else None.
|
||||
Returns None if no directive found.
|
||||
|
||||
We scan ABOVE the line only (the convention is the directive
|
||||
precedes the job — matches how `# mc#NNN` comments are placed
|
||||
above `continue-on-error: true`). We don't scan inside the job
|
||||
body because steps can produce false positives.
|
||||
"""
|
||||
lines = raw_text.splitlines()
|
||||
line_no = _find_job_key_line(lines, jkey)
|
||||
if line_no is None:
|
||||
return None
|
||||
lo = max(1, line_no - _DIRECTIVE_WINDOW)
|
||||
for i in range(lo, line_no):
|
||||
line = lines[i - 1]
|
||||
m = BP_REQUIRED_PENDING_RE.search(line)
|
||||
if m:
|
||||
return ("required-pending", m.group("num"))
|
||||
if BP_REQUIRED_YES_RE.search(line):
|
||||
return ("required-yes", None)
|
||||
if BP_EXEMPT_RE.search(line):
|
||||
return ("exempt", None)
|
||||
return None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Map a context back to its emitting (workflow_path, job_key) pair so
|
||||
# we know WHERE to look for the directive comment.
|
||||
# ---------------------------------------------------------------------------
|
||||
def _resolve_emitter(
|
||||
ctx: str, head_workflows: dict[str, tuple[str, Any]]
|
||||
) -> tuple[str, str] | None:
|
||||
"""Return (file_path, job_key) emitting ctx, or None."""
|
||||
m = re.match(r"^(?P<wf>.+?) / (?P<job>.+) \((?P<event>[^)]+)\)$", ctx)
|
||||
if not m:
|
||||
return None
|
||||
target_wf = m.group("wf")
|
||||
target_job_disp = m.group("job")
|
||||
for path, (_raw, doc) in head_workflows.items():
|
||||
if not isinstance(doc, dict):
|
||||
continue
|
||||
if doc.get("name") != target_wf:
|
||||
continue
|
||||
jobs = doc.get("jobs") or {}
|
||||
if not isinstance(jobs, dict):
|
||||
continue
|
||||
for jkey, jbody in jobs.items():
|
||||
if jkey == "__lines__":
|
||||
continue
|
||||
if not isinstance(jbody, dict):
|
||||
continue
|
||||
disp = _job_display(jbody, jkey)
|
||||
if disp == target_job_disp:
|
||||
return (path, jkey)
|
||||
return None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Driver
|
||||
# ---------------------------------------------------------------------------
|
||||
def run() -> int:
|
||||
base_sha = _require_env("BASE_SHA")
|
||||
head_sha = _require_env("HEAD_SHA")
|
||||
_require_env("GITEA_TOKEN")
|
||||
_require_env("GITEA_HOST")
|
||||
repo = _require_env("REPO")
|
||||
branch = _env("BRANCH", "main")
|
||||
wf_dir = _env("WORKFLOWS_DIR", ".gitea/workflows")
|
||||
|
||||
# Step 1 — find workflow files changed in the PR.
|
||||
changed = git_diff_paths(base_sha, head_sha)
|
||||
changed_workflows = [
|
||||
p
|
||||
for p in changed
|
||||
if p.startswith(wf_dir + "/")
|
||||
and (p.endswith(".yml") or p.endswith(".yaml"))
|
||||
]
|
||||
if not changed_workflows:
|
||||
print(
|
||||
"::notice::no workflow file changes in this PR; "
|
||||
"lint-required-context-exists-in-bp skipped."
|
||||
)
|
||||
return 0
|
||||
|
||||
# Step 2 — load base+head + compute new contexts.
|
||||
head_workflows: dict[str, tuple[str, Any]] = {}
|
||||
new_contexts: set[str] = set()
|
||||
for path in changed_workflows:
|
||||
base_raw = git_show(base_sha, path)
|
||||
head_raw = git_show(head_sha, path)
|
||||
if head_raw is None:
|
||||
# File deleted on head — no new emission contribution.
|
||||
continue
|
||||
try:
|
||||
head_doc = yaml.safe_load(head_raw)
|
||||
except yaml.YAMLError as e:
|
||||
sys.stderr.write(
|
||||
f"::error file={path}::YAML parse error on head: {e}\n"
|
||||
)
|
||||
return 2
|
||||
head_workflows[path] = (head_raw, head_doc)
|
||||
head_ctx = workflow_contexts(head_doc)
|
||||
base_ctx: set[str] = set()
|
||||
if base_raw is not None:
|
||||
try:
|
||||
base_doc = yaml.safe_load(base_raw)
|
||||
except yaml.YAMLError:
|
||||
base_doc = None
|
||||
if base_doc is not None:
|
||||
base_ctx = workflow_contexts(base_doc)
|
||||
new_contexts |= (head_ctx - base_ctx)
|
||||
|
||||
if not new_contexts:
|
||||
print(
|
||||
"::notice::no new context emissions detected in this PR; "
|
||||
"lint-required-context-exists-in-bp skipped."
|
||||
)
|
||||
return 0
|
||||
|
||||
# Step 3 — fetch BP context list.
|
||||
status, bp = api("GET", f"/repos/{repo}/branch_protections/{branch}")
|
||||
bp_contexts: set[str] = set()
|
||||
if status == "forbidden":
|
||||
sys.stderr.write(
|
||||
f"::error::GET branch_protections/{branch} returned HTTP 403 — "
|
||||
f"DRIFT_BOT_TOKEN lacks repo-admin scope. Cannot verify "
|
||||
f"bp-required directives; skipping lint with exit 0 per "
|
||||
f"Tier 2a contract. Fix the token, not the lint.\n"
|
||||
)
|
||||
return 0
|
||||
elif status == "not_found":
|
||||
# Branch has no protection — nothing to verify against; the
|
||||
# bp-required: yes directive can't be satisfied. Treat as
|
||||
# graceful-skip rather than red-X.
|
||||
print(
|
||||
f"::notice::branch '{branch}' has no protection; cannot verify "
|
||||
f"bp-required directives. Skipping (exit 0)."
|
||||
)
|
||||
return 0
|
||||
elif status == "ok" and isinstance(bp, dict):
|
||||
bp_contexts = set(bp.get("status_check_contexts") or [])
|
||||
else:
|
||||
sys.stderr.write(
|
||||
f"::error::branch_protections/{branch} response unexpected; "
|
||||
f"status={status}. Treating as transient; exit 0.\n"
|
||||
)
|
||||
return 0
|
||||
|
||||
# Step 4 — validate each new emission's directive.
|
||||
violations: list[str] = []
|
||||
for ctx in sorted(new_contexts):
|
||||
emitter = _resolve_emitter(ctx, head_workflows)
|
||||
if emitter is None:
|
||||
# Shouldn't happen — we just derived ctx from head_workflows.
|
||||
# Belt-and-suspenders fallback.
|
||||
violations.append(
|
||||
f"::error::new emission '{ctx}' (could not resolve emitter "
|
||||
f"file/job — bug in lint?)"
|
||||
)
|
||||
continue
|
||||
file_path, jkey = emitter
|
||||
raw_text, _ = head_workflows[file_path]
|
||||
directive = find_directive_for_job(raw_text, jkey)
|
||||
if directive is None:
|
||||
violations.append(
|
||||
f"::error file={file_path}::lint-required-context-exists-in-bp "
|
||||
f"(Tier 2g): NEW emission `{ctx}` (job '{jkey}') has no "
|
||||
f"directive comment. Add ONE of these comments on the line "
|
||||
f"directly above `{jkey}:` (within {_DIRECTIVE_WINDOW} lines):\n"
|
||||
f" - `# bp-required: yes` — and ensure the context is "
|
||||
f"already in branch_protections/{branch}.status_check_contexts.\n"
|
||||
f" - `# bp-required: pending #NNN` — acknowledged asymmetry, "
|
||||
f"references the tracking issue for the BP PATCH.\n"
|
||||
f" - `# bp-exempt: <reason>` — informational job, not a gate.\n"
|
||||
f"Memory: internal#350 (PR#656 + mc#664 empirical case)."
|
||||
)
|
||||
continue
|
||||
kind, value = directive
|
||||
if kind == "exempt":
|
||||
print(f"::notice::{ctx}: bp-exempt directive present, OK.")
|
||||
continue
|
||||
if kind == "required-pending":
|
||||
print(
|
||||
f"::notice::{ctx}: bp-required: pending #{value} — "
|
||||
f"acknowledged asymmetry, OK."
|
||||
)
|
||||
continue
|
||||
if kind == "required-yes":
|
||||
if ctx in bp_contexts:
|
||||
print(
|
||||
f"::notice::{ctx}: bp-required: yes, and context is in "
|
||||
f"BP, OK."
|
||||
)
|
||||
else:
|
||||
violations.append(
|
||||
f"::error file={file_path}::lint-required-context-exists-in-bp "
|
||||
f"(Tier 2g): job '{jkey}' has `bp-required: yes` "
|
||||
f"directive but its emitted context `{ctx}` is NOT in "
|
||||
f"`branch_protections/{branch}.status_check_contexts`. "
|
||||
f"FIX: either (a) add `{ctx}` to BP (Owners-tier PATCH), "
|
||||
f"or (b) downgrade the directive to "
|
||||
f"`# bp-required: pending #NNN` referencing the tracker "
|
||||
f"for the pending BP PATCH."
|
||||
)
|
||||
|
||||
if violations:
|
||||
print(
|
||||
f"::error::lint-required-context-exists-in-bp: "
|
||||
f"{len(violations)} violation(s) across "
|
||||
f"{len(changed_workflows)} changed workflow file(s)."
|
||||
)
|
||||
for v in violations:
|
||||
print(v)
|
||||
return 1
|
||||
|
||||
print(
|
||||
f"::notice::lint-required-context-exists-in-bp: "
|
||||
f"{len(new_contexts)} new emission(s) all directive-validated."
|
||||
)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(run())
|
||||
@ -222,20 +222,9 @@ def is_red(status: dict) -> tuple[bool, list[dict]]:
|
||||
combined = status.get("state")
|
||||
statuses = status.get("statuses") or []
|
||||
red_states = {"failure", "error"}
|
||||
# Schema asymmetry: top-level combined uses `state`, but per-entry
|
||||
# items in `statuses[]` use `status` in Gitea 1.22.6. Prefer
|
||||
# `status`; fall back to `state` defensively. Verified empirically
|
||||
# 2026-05-12 03:42Z. Pre-rev4 code only read `state` from per-entry
|
||||
# items → failed[] always empty → render_body always showed the
|
||||
# "no per-context entries were in a red state" fallback even when
|
||||
# the combined-state correctly flagged red. See
|
||||
# `feedback_smoke_test_vendor_truth_not_shape_match`.
|
||||
def _entry_state(s: dict) -> str:
|
||||
return s.get("status") or s.get("state") or ""
|
||||
|
||||
failed = [
|
||||
s for s in statuses
|
||||
if isinstance(s, dict) and _entry_state(s) in red_states
|
||||
if isinstance(s, dict) and s.get("state") in red_states
|
||||
]
|
||||
return (combined in red_states or bool(failed), failed)
|
||||
|
||||
@ -324,9 +313,7 @@ def render_body(sha: str, failed: list[dict], debug: dict) -> str:
|
||||
else:
|
||||
for s in failed:
|
||||
ctx = s.get("context", "(no context)")
|
||||
# Per-entry key is `status` in Gitea 1.22.6, not `state`
|
||||
# (see _entry_state in is_red). Fallback for forward-compat.
|
||||
state = s.get("status") or s.get("state") or "(no state)"
|
||||
state = s.get("state", "(no state)")
|
||||
url = s.get("target_url") or ""
|
||||
desc = (s.get("description") or "").strip()
|
||||
entry = f"- **{ctx}** — `{state}`"
|
||||
@ -559,11 +546,7 @@ def run_once(*, dry_run: bool = False) -> int:
|
||||
"combined_state": status.get("state"),
|
||||
"failed_contexts": [s.get("context") for s in failed],
|
||||
"all_contexts": [
|
||||
# Per-entry key is `status` in Gitea 1.22.6, not `state`.
|
||||
# Pre-rev4 debug output reported `state: None` for every
|
||||
# context, making run logs useless for triage.
|
||||
{"context": s.get("context"),
|
||||
"state": s.get("status") or s.get("state")}
|
||||
{"context": s.get("context"), "state": s.get("state")}
|
||||
for s in (status.get("statuses") or [])
|
||||
if isinstance(s, dict)
|
||||
],
|
||||
|
||||
@ -1,251 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Production auto-deploy helpers for Gitea Actions.
|
||||
|
||||
The workflow keeps network side effects in shell/curl, but centralizes the
|
||||
release decision shape here so it has unit coverage: disable flag parsing,
|
||||
target tag selection, CP payload construction, and status-context selection.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
from urllib.parse import quote
|
||||
|
||||
|
||||
TRUE_VALUES = {"1", "true", "yes", "on", "disabled", "disable"}
|
||||
PROD_CP_URL = "https://api.moleculesai.app"
|
||||
DEFAULT_REQUIRED_CONTEXTS = [
|
||||
"CI / Platform (Go) (push)",
|
||||
"CI / Canvas (Next.js) (push)",
|
||||
"CI / Shellcheck (E2E scripts) (push)",
|
||||
"CI / Python Lint & Test (push)",
|
||||
"CI / all-required (push)",
|
||||
"Secret scan / Scan diff for credential-shaped strings (push)",
|
||||
]
|
||||
TERMINAL_FAILURE_STATES = {"failure", "error", "cancelled", "canceled", "skipped"}
|
||||
|
||||
|
||||
def truthy_flag(value: str | None) -> bool:
|
||||
if value is None:
|
||||
return False
|
||||
return value.strip().lower() in TRUE_VALUES
|
||||
|
||||
|
||||
def _int_env(env: dict[str, str], name: str, default: int, minimum: int = 1) -> int:
|
||||
raw = env.get(name, "")
|
||||
if not raw:
|
||||
return default
|
||||
try:
|
||||
value = int(raw)
|
||||
except ValueError as exc:
|
||||
raise ValueError(f"{name} must be an integer, got {raw!r}") from exc
|
||||
if value < minimum:
|
||||
raise ValueError(f"{name} must be >= {minimum}, got {value}")
|
||||
return value
|
||||
|
||||
|
||||
def build_plan(env: dict[str, str]) -> dict:
|
||||
sha = env.get("GITHUB_SHA", "").strip()
|
||||
if not sha:
|
||||
raise ValueError("GITHUB_SHA is required")
|
||||
|
||||
disabled_value = env.get("PROD_AUTO_DEPLOY_DISABLED", "")
|
||||
if truthy_flag(disabled_value):
|
||||
return {
|
||||
"enabled": False,
|
||||
"sha": sha,
|
||||
"disabled_reason": f"PROD_AUTO_DEPLOY_DISABLED={disabled_value}",
|
||||
}
|
||||
|
||||
short_sha = sha[:7]
|
||||
target_tag = env.get("PROD_AUTO_DEPLOY_TARGET_TAG", "").strip() or f"staging-{short_sha}"
|
||||
canary_slug = env.get("PROD_AUTO_DEPLOY_CANARY_SLUG", "hongming").strip()
|
||||
body = {
|
||||
"target_tag": target_tag,
|
||||
"soak_seconds": _int_env(env, "PROD_AUTO_DEPLOY_SOAK_SECONDS", 60, minimum=0),
|
||||
"batch_size": _int_env(env, "PROD_AUTO_DEPLOY_BATCH_SIZE", 3),
|
||||
"dry_run": truthy_flag(env.get("PROD_AUTO_DEPLOY_DRY_RUN", "")),
|
||||
}
|
||||
if canary_slug:
|
||||
body["canary_slug"] = canary_slug
|
||||
|
||||
cp_url = env.get("CP_URL", "").strip() or PROD_CP_URL
|
||||
if cp_url != PROD_CP_URL and not truthy_flag(env.get("PROD_ALLOW_NON_PROD_CP_URL", "")):
|
||||
raise ValueError(
|
||||
f"Refusing production deploy to CP_URL={cp_url!r}; "
|
||||
f"set PROD_ALLOW_NON_PROD_CP_URL=true for an explicit non-prod drill"
|
||||
)
|
||||
|
||||
return {
|
||||
"enabled": True,
|
||||
"sha": sha,
|
||||
"short_sha": short_sha,
|
||||
"target_tag": target_tag,
|
||||
"cp_url": cp_url,
|
||||
"body": body,
|
||||
}
|
||||
|
||||
|
||||
def latest_status_for_context(statuses: list[dict], context: str) -> dict | None:
|
||||
"""Return the first matching status.
|
||||
|
||||
Gitea's combined-status response is newest-first in practice. The merge
|
||||
queue relies on the same contract; keeping the selector explicit makes
|
||||
stale duplicate contexts easy to test.
|
||||
"""
|
||||
|
||||
for status in statuses:
|
||||
if status.get("context") == context:
|
||||
return status
|
||||
return None
|
||||
|
||||
|
||||
def ci_context_state(statuses: list[dict], context: str) -> str:
|
||||
status = latest_status_for_context(statuses, context)
|
||||
if not status:
|
||||
return "missing"
|
||||
return str(status.get("status") or status.get("state") or "missing").lower()
|
||||
|
||||
|
||||
def context_is_satisfied(state: str) -> bool:
|
||||
return state == "success"
|
||||
|
||||
|
||||
def context_is_terminal_failure(state: str) -> bool:
|
||||
return state in TERMINAL_FAILURE_STATES
|
||||
|
||||
|
||||
def required_contexts(env: dict[str, str]) -> list[str]:
|
||||
raw = env.get("PROD_AUTO_DEPLOY_REQUIRED_CONTEXTS", "")
|
||||
if not raw.strip():
|
||||
return DEFAULT_REQUIRED_CONTEXTS
|
||||
return [line.strip() for line in raw.replace(",", "\n").splitlines() if line.strip()]
|
||||
|
||||
|
||||
def _api_json(url: str, token: str) -> dict:
|
||||
req = urllib.request.Request(url, headers={"Authorization": f"token {token}"})
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=20) as resp:
|
||||
return json.loads(resp.read())
|
||||
except urllib.error.HTTPError as exc:
|
||||
body = exc.read().decode("utf-8", errors="replace")[:500]
|
||||
raise RuntimeError(f"GET {url} -> HTTP {exc.code}: {body}") from exc
|
||||
|
||||
|
||||
def _api_json_optional(url: str, token: str) -> tuple[int, dict | None]:
|
||||
req = urllib.request.Request(url, headers={"Authorization": f"token {token}"})
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=20) as resp:
|
||||
return resp.status, json.loads(resp.read())
|
||||
except urllib.error.HTTPError as exc:
|
||||
if exc.code == 404:
|
||||
return exc.code, None
|
||||
body = exc.read().decode("utf-8", errors="replace")[:300]
|
||||
print(f"::warning::GET {url} -> HTTP {exc.code}: {body}", file=sys.stderr)
|
||||
return exc.code, None
|
||||
|
||||
|
||||
def live_disable_flag(env: dict[str, str]) -> str:
|
||||
"""Return a live disable value from Gitea variables when readable.
|
||||
|
||||
Gitea evaluates `${{ vars.* }}` once when the job starts. This API read is
|
||||
the emergency re-check immediately before production side effects.
|
||||
"""
|
||||
|
||||
token = env.get("GITEA_TOKEN", "").strip()
|
||||
if not token:
|
||||
return ""
|
||||
host = env.get("GITEA_HOST", "git.moleculesai.app")
|
||||
repo = env.get("GITHUB_REPOSITORY", "molecule-ai/molecule-core")
|
||||
variable = quote("PROD_AUTO_DEPLOY_DISABLED", safe="")
|
||||
url = f"https://{host}/api/v1/repos/{repo}/actions/variables/{variable}"
|
||||
status, body = _api_json_optional(url, token)
|
||||
if status != 200 or not isinstance(body, dict):
|
||||
return ""
|
||||
return str(body.get("data") or body.get("value") or "")
|
||||
|
||||
|
||||
def assert_not_disabled(env: dict[str, str]) -> None:
|
||||
plan = build_plan(env)
|
||||
if not plan.get("enabled"):
|
||||
raise RuntimeError(plan.get("disabled_reason", "production auto-deploy disabled"))
|
||||
live_value = live_disable_flag(env)
|
||||
if truthy_flag(live_value):
|
||||
raise RuntimeError(f"PROD_AUTO_DEPLOY_DISABLED={live_value} (live Gitea variable)")
|
||||
|
||||
|
||||
def wait_for_ci_context(env: dict[str, str]) -> str:
|
||||
host = env.get("GITEA_HOST", "git.moleculesai.app")
|
||||
repo = env.get("GITHUB_REPOSITORY", "molecule-ai/molecule-core")
|
||||
sha = env.get("GITHUB_SHA", "").strip()
|
||||
token = env.get("GITEA_TOKEN", "").strip()
|
||||
contexts = required_contexts(env)
|
||||
interval = _int_env(env, "CI_STATUS_POLL_INTERVAL_SECONDS", 15)
|
||||
timeout = _int_env(env, "CI_STATUS_TIMEOUT_SECONDS", 1800)
|
||||
|
||||
if not sha:
|
||||
raise ValueError("GITHUB_SHA is required")
|
||||
if not token:
|
||||
raise ValueError("GITEA_TOKEN is required to wait for CI status")
|
||||
|
||||
url = f"https://{host}/api/v1/repos/{repo}/commits/{sha}/status"
|
||||
deadline = time.time() + timeout
|
||||
last_states: dict[str, str] = {}
|
||||
while time.time() <= deadline:
|
||||
body = _api_json(url, token)
|
||||
statuses = body.get("statuses") or []
|
||||
states = {context: ci_context_state(statuses, context) for context in contexts}
|
||||
for context, state in states.items():
|
||||
if state != last_states.get(context):
|
||||
print(f"CI context {context!r}: {state}", file=sys.stderr)
|
||||
last_states = states
|
||||
|
||||
failures = [
|
||||
f"{context}={state}"
|
||||
for context, state in states.items()
|
||||
if context_is_terminal_failure(state)
|
||||
]
|
||||
if failures:
|
||||
raise RuntimeError(
|
||||
"Required CI context failed; refusing production deploy: "
|
||||
+ ", ".join(failures)
|
||||
)
|
||||
if all(context_is_satisfied(state) for state in states.values()):
|
||||
return "success"
|
||||
time.sleep(interval)
|
||||
last = ", ".join(f"{context}={state}" for context, state in last_states.items()) or "none"
|
||||
raise TimeoutError(f"Timed out waiting {timeout}s for required CI contexts; last_states={last}")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
sub = parser.add_subparsers(dest="command", required=True)
|
||||
sub.add_parser("plan", help="print production deploy plan as JSON")
|
||||
sub.add_parser("assert-enabled", help="fail if production deploy is currently disabled")
|
||||
sub.add_parser("wait-ci", help="block until required CI context is green")
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
if args.command == "plan":
|
||||
print(json.dumps(build_plan(dict(os.environ)), sort_keys=True))
|
||||
return 0
|
||||
if args.command == "assert-enabled":
|
||||
assert_not_disabled(dict(os.environ))
|
||||
return 0
|
||||
if args.command == "wait-ci":
|
||||
wait_for_ci_context(dict(os.environ))
|
||||
return 0
|
||||
except Exception as exc: # noqa: BLE001 - CLI should render operator-friendly errors.
|
||||
print(f"::error::{exc}", file=sys.stderr)
|
||||
return 1
|
||||
return 2
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@ -60,7 +60,6 @@
|
||||
# Optional:
|
||||
# REVIEW_CHECK_DEBUG=1 — per-API-call diagnostic lines
|
||||
# REVIEW_CHECK_STRICT=1 — also require review.commit_id == pr.head.sha
|
||||
# DEFAULT_BRANCH=main — branch this gate protects; non-default-base PRs no-op
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
@ -92,7 +91,7 @@ API="https://${GITEA_HOST}/api/v1"
|
||||
# secret token value in the process table for any process to read via
|
||||
# /proc/<pid>/cmdline or ps -ef). The curl config file is read by curl
|
||||
# itself and never appears in the argv of the curl subprocess.
|
||||
CURL_AUTH_FILE=$(mktemp "${TMPDIR:-/tmp}/curl-auth.XXXXXX")
|
||||
CURL_AUTH_FILE=$(mktemp -p /tmp curl-auth.XXXXXX)
|
||||
chmod 600 "$CURL_AUTH_FILE"
|
||||
printf 'header = "Authorization: token %s"\n' "$GITEA_TOKEN" > "$CURL_AUTH_FILE"
|
||||
|
||||
@ -101,10 +100,9 @@ printf 'header = "Authorization: token %s"\n' "$GITEA_TOKEN" > "$CURL_AUTH_FILE"
|
||||
PR_JSON=$(mktemp)
|
||||
REVIEWS_JSON=$(mktemp)
|
||||
TEAM_PROBE_TMP=$(mktemp)
|
||||
NA_STATUSES_TMP="" # declared here so cleanup() always has the var
|
||||
|
||||
cleanup() {
|
||||
rm -f "$CURL_AUTH_FILE" "$PR_JSON" "$REVIEWS_JSON" "$TEAM_PROBE_TMP" "${NA_STATUSES_TMP-}"
|
||||
rm -f "$CURL_AUTH_FILE" "$PR_JSON" "$REVIEWS_JSON" "$TEAM_PROBE_TMP"
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
@ -126,60 +124,18 @@ if [ "$HTTP_CODE" != "200" ]; then
|
||||
fi
|
||||
PR_AUTHOR=$(jq -r '.user.login // ""' "$PR_JSON")
|
||||
PR_HEAD_SHA=$(jq -r '.head.sha // ""' "$PR_JSON")
|
||||
PR_BASE_REF=$(jq -r '.base.ref // ""' "$PR_JSON")
|
||||
PR_STATE=$(jq -r '.state // ""' "$PR_JSON")
|
||||
DEFAULT_BRANCH="${DEFAULT_BRANCH:-main}"
|
||||
debug "pr_author=${PR_AUTHOR} pr_head=${PR_HEAD_SHA:0:7} pr_base=${PR_BASE_REF} pr_state=${PR_STATE}"
|
||||
debug "pr_author=${PR_AUTHOR} pr_head=${PR_HEAD_SHA:0:7} pr_state=${PR_STATE}"
|
||||
|
||||
if [ "$PR_STATE" != "open" ]; then
|
||||
echo "::notice::PR ${PR_NUMBER} is ${PR_STATE} — exiting 0 (closed PRs do not gate)"
|
||||
exit 0
|
||||
fi
|
||||
if [ "$PR_BASE_REF" != "$DEFAULT_BRANCH" ]; then
|
||||
echo "::notice::PR ${PR_NUMBER} targets ${PR_BASE_REF:-<unknown>} not ${DEFAULT_BRANCH} — ${TEAM}-review gate not applicable"
|
||||
exit 0
|
||||
fi
|
||||
if [ -z "$PR_AUTHOR" ] || [ -z "$PR_HEAD_SHA" ]; then
|
||||
echo "::error::PR ${PR_NUMBER} missing user.login or head.sha — webhook payload malformed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- RFC#324 §N/A follow-up: check N/A declarations status ---
|
||||
# sop-checklist.py posts `sop-checklist / na-declarations (pull_request)`
|
||||
# status when a peer posts /sop-n/a <gate>. If our gate is declared N/A,
|
||||
# the requirement for a Gitea APPROVE review is waived.
|
||||
NA_STATUSES_TMP=$(mktemp)
|
||||
HTTP_CODE=$(curl -sS -o "$NA_STATUSES_TMP" -w '%{http_code}' \
|
||||
-K "$CURL_AUTH_FILE" "${API}/repos/${OWNER}/${NAME}/statuses/${PR_HEAD_SHA}")
|
||||
debug "statuses/${PR_HEAD_SHA} → HTTP ${HTTP_CODE}"
|
||||
|
||||
if [ "$HTTP_CODE" = "200" ]; then
|
||||
# Gitea returns statuses as array; look for the na-declarations context.
|
||||
# jq: find all statuses where context == "sop-checklist / na-declarations (pull_request)"
|
||||
# and state == "success". Extract the description field.
|
||||
NA_DESC=$(jq -r '
|
||||
.[] |
|
||||
select(.context == "sop-checklist / na-declarations (pull_request)") |
|
||||
select(.state == "success") |
|
||||
.description
|
||||
' "$NA_STATUSES_TMP" 2>/dev/null | head -1)
|
||||
|
||||
if [ -n "$NA_DESC" ] && [ "$NA_DESC" != "null" ]; then
|
||||
debug "na-declarations status found: ${NA_DESC}"
|
||||
# Check if our gate appears in the N/A description.
|
||||
# The description format is "N/A: qa-review, security-review" or similar.
|
||||
if echo "$NA_DESC" | grep -iq "\\b${TEAM}-review\\b"; then
|
||||
echo "::notice::${TEAM}-review N/A — gate declared not-applicable via /sop-n/a: ${NA_DESC}"
|
||||
echo "::notice::PR ${PR_NUMBER} passes ${TEAM}-review via N/A declaration"
|
||||
rm -f "$NA_STATUSES_TMP"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
else
|
||||
debug "could not fetch statuses (HTTP ${HTTP_CODE}) — proceeding with normal eval"
|
||||
fi
|
||||
rm -f "$NA_STATUSES_TMP"
|
||||
|
||||
# --- Fetch all reviews on the PR ---
|
||||
HTTP_CODE=$(curl -sS -o "$REVIEWS_JSON" -w '%{http_code}' \
|
||||
-K "$CURL_AUTH_FILE" "${API}/repos/${OWNER}/${NAME}/pulls/${PR_NUMBER}/reviews")
|
||||
|
||||
@ -1,81 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Re-run review-check.sh for a slash-command refire and post the protected
|
||||
# pull_request status context to the PR head SHA.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
: "${GITEA_TOKEN:?GITEA_TOKEN required}"
|
||||
: "${GITEA_HOST:?GITEA_HOST required}"
|
||||
: "${REPO:?REPO required}"
|
||||
: "${PR_NUMBER:?PR_NUMBER required}"
|
||||
: "${TEAM:?TEAM required}"
|
||||
|
||||
OWNER="${REPO%%/*}"
|
||||
NAME="${REPO##*/}"
|
||||
API="https://${GITEA_HOST}/api/v1"
|
||||
CONTEXT="${TEAM}-review / approved (pull_request)"
|
||||
TARGET_URL="https://${GITEA_HOST}/${OWNER}/${NAME}/pulls/${PR_NUMBER}"
|
||||
|
||||
authfile=$(mktemp)
|
||||
prfile=$(mktemp)
|
||||
postfile=$(mktemp)
|
||||
# shellcheck disable=SC2329 # invoked by EXIT trap
|
||||
cleanup() {
|
||||
rm -f "$authfile" "$prfile" "$postfile"
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
chmod 600 "$authfile"
|
||||
printf 'header = "Authorization: token %s"\n' "$GITEA_TOKEN" > "$authfile"
|
||||
|
||||
code=$(curl -sS -o "$prfile" -w '%{http_code}' -K "$authfile" \
|
||||
"${API}/repos/${OWNER}/${NAME}/pulls/${PR_NUMBER}")
|
||||
if [ "$code" != "200" ]; then
|
||||
echo "::error::GET /pulls/${PR_NUMBER} returned HTTP ${code}"
|
||||
head -c 200 "$prfile" >&2 || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
head_sha=$(jq -r '.head.sha // ""' "$prfile")
|
||||
state=$(jq -r '.state // ""' "$prfile")
|
||||
if [ -z "$head_sha" ] || [ "$head_sha" = "null" ]; then
|
||||
echo "::error::Could not resolve PR head SHA for PR ${PR_NUMBER}"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$state" != "open" ]; then
|
||||
echo "::notice::PR ${PR_NUMBER} is ${state}; ${TEAM}-review refire is a no-op"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
set +e
|
||||
bash .gitea/scripts/review-check.sh
|
||||
rc=$?
|
||||
set -e
|
||||
|
||||
if [ "$rc" -eq 0 ]; then
|
||||
status_state="success"
|
||||
description="Refired via /${TEAM}-recheck by ${COMMENT_AUTHOR:-unknown}"
|
||||
else
|
||||
status_state="failure"
|
||||
description="Refired via /${TEAM}-recheck; ${TEAM}-review failed"
|
||||
fi
|
||||
|
||||
body=$(jq -nc \
|
||||
--arg state "$status_state" \
|
||||
--arg context "$CONTEXT" \
|
||||
--arg description "$description" \
|
||||
--arg target_url "$TARGET_URL" \
|
||||
'{state:$state, context:$context, description:$description, target_url:$target_url}')
|
||||
|
||||
code=$(curl -sS -o "$postfile" -w '%{http_code}' -X POST \
|
||||
-K "$authfile" -H "Content-Type: application/json" \
|
||||
-d "$body" \
|
||||
"${API}/repos/${OWNER}/${NAME}/statuses/${head_sha}")
|
||||
if [ "$code" != "200" ] && [ "$code" != "201" ]; then
|
||||
echo "::error::POST /statuses/${head_sha} returned HTTP ${code}"
|
||||
head -c 200 "$postfile" >&2 || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "::notice::posted ${status_state} for context=\"${CONTEXT}\" on sha=${head_sha}"
|
||||
exit "$rc"
|
||||
@ -1,973 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# sop-checklist — evaluate whether a PR has peer-acked each
|
||||
# SOP-checklist item. Posts a commit-status that branch protection
|
||||
# can require.
|
||||
#
|
||||
# RFC#351 Step 2 of 6 (implementation MVP).
|
||||
#
|
||||
# Invoked by .gitea/workflows/sop-checklist.yml on:
|
||||
# - pull_request_target: [opened, edited, synchronize, reopened]
|
||||
# - issue_comment: [created, edited, deleted]
|
||||
#
|
||||
# Flow:
|
||||
# 1. Load .gitea/sop-checklist-config.yaml (from BASE ref — trusted).
|
||||
# 2. GET /repos/{R}/pulls/{N} — author, head.sha, tier label
|
||||
# 3. GET /repos/{R}/issues/{N}/comments — extract /sop-ack and /sop-revoke
|
||||
# 4. For each checklist item:
|
||||
# a. Is the section marker present in PR body? (author answered)
|
||||
# b. Is there ≥1 unrevoked /sop-ack from a non-author whose
|
||||
# team-membership matches required_teams?
|
||||
# 5. POST /repos/{R}/statuses/{sha} — context
|
||||
# `sop-checklist / all-items-acked (pull_request)`,
|
||||
# state=success | failure | pending, description=`acked: N/M …`.
|
||||
#
|
||||
# Trust boundary (mirrors RFC#324 §A4):
|
||||
# This script is loaded from the BASE branch. The workflow's
|
||||
# actions/checkout step pins ref=base.sha. PR-HEAD code is never
|
||||
# executed. We only HTTP-call the Gitea API.
|
||||
#
|
||||
# Token scope:
|
||||
# - read:repository / read:organization to enumerate PR + comments
|
||||
# + team membership (Gitea 1.22.6 quirk: team-membership endpoint
|
||||
# returns 403 if token owner is not in the team; see review-check.sh
|
||||
# for the same gotcha — we surface the same fail-closed message).
|
||||
# - write:repository for `POST /repos/{R}/statuses/{sha}`. Unlike
|
||||
# RFC#324's pattern (which uses the JOB's own pass/fail as the
|
||||
# status), we POST the status explicitly because the gate posts
|
||||
# a single multi-item status with a richer description than a
|
||||
# bare success/failure context can carry.
|
||||
#
|
||||
# Slug normalization rules (canonical form: kebab-case):
|
||||
# - Lowercase
|
||||
# - Whitespace + underscores → single dash
|
||||
# - Strip non [a-z0-9-] characters
|
||||
# - Collapse adjacent dashes
|
||||
# - Strip leading/trailing dashes
|
||||
# - If the result is a digit string (e.g. "1"), look up via
|
||||
# config.items[*].numeric_alias to get the kebab-case slug.
|
||||
#
|
||||
# Examples:
|
||||
# "Comprehensive_Testing" → "comprehensive-testing"
|
||||
# "comprehensive testing" → "comprehensive-testing"
|
||||
# "1" → "comprehensive-testing"
|
||||
# "Five-Axis-Review" → "five-axis-review"
|
||||
#
|
||||
# Revoke semantics:
|
||||
# /sop-revoke <slug> [reason] — most-recent comment per (slug, user)
|
||||
# wins. So if Alice posts /sop-ack X then later /sop-revoke X, her ack
|
||||
# for X is invalidated. Bob's prior /sop-ack X is unaffected. If Alice
|
||||
# posts /sop-revoke X then later /sop-ack X again, the ack is restored.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from typing import Any
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Slug normalization
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_NORMALIZE_REPLACE_RE = re.compile(r"[\s_]+")
|
||||
_NORMALIZE_STRIP_RE = re.compile(r"[^a-z0-9-]")
|
||||
_NORMALIZE_DASH_RE = re.compile(r"-+")
|
||||
|
||||
|
||||
def normalize_slug(raw: str, numeric_aliases: dict[int, str] | None = None) -> str:
|
||||
"""Normalize a user-supplied slug to canonical kebab-case form.
|
||||
|
||||
See module header for the rules.
|
||||
|
||||
If the input is a pure digit string AND numeric_aliases is provided,
|
||||
the alias mapping is consulted. Unknown digits return "" so the caller
|
||||
can flag the comment as unparseable.
|
||||
"""
|
||||
if raw is None:
|
||||
return ""
|
||||
s = raw.strip().lower()
|
||||
s = _NORMALIZE_REPLACE_RE.sub("-", s)
|
||||
s = _NORMALIZE_STRIP_RE.sub("", s)
|
||||
s = _NORMALIZE_DASH_RE.sub("-", s)
|
||||
s = s.strip("-")
|
||||
if s.isdigit() and numeric_aliases is not None:
|
||||
return numeric_aliases.get(int(s), "")
|
||||
return s
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Comment parsing — /sop-ack and /sop-revoke
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# A directive must be on its own line. Permits leading whitespace.
|
||||
# Optional trailing note after the slug for /sop-ack and required reason
|
||||
# for /sop-revoke (RFC#351 open question 4 — reason is captured but not
|
||||
# yet validated; future iteration may require a min-length).
|
||||
#
|
||||
# /sop-n/a <gate> [reason] — declares a gate as not-applicable.
|
||||
# <gate> is a canonical gate name (qa-review, security-review).
|
||||
# The declaring user must be in one of the gate's required_teams.
|
||||
# Most-recent per-user declaration wins (revoke semantics mirror ack).
|
||||
_DIRECTIVE_RE = re.compile(
|
||||
r"^[ \t]*/(sop-ack|sop-revoke)[ \t]+([A-Za-z0-9_\- ]+?)(?:[ \t]+(.*))?[ \t]*$",
|
||||
re.MULTILINE,
|
||||
)
|
||||
_NA_DIRECTIVE_RE = re.compile(
|
||||
r"^[ \t]*/sop-n/?a[ \t]+([A-Za-z0-9_\-]+)(?:[ \t]+(.*))?[ \t]*$",
|
||||
re.MULTILINE,
|
||||
)
|
||||
|
||||
|
||||
def parse_directives(
|
||||
comment_body: str,
|
||||
numeric_aliases: dict[int, str],
|
||||
) -> tuple[list[tuple[str, str, str]], list[tuple[str, str, str]]]:
|
||||
"""Extract /sop-ack, /sop-revoke, and /sop-n/a directives from a comment body.
|
||||
|
||||
Returns a tuple of two lists:
|
||||
0. list of (kind, canonical_slug, note) for sop-ack/sop-revoke
|
||||
1. list of (kind, gate_name, reason) for sop-n/a
|
||||
|
||||
canonical_slug is the normalized form (or "" if unparseable).
|
||||
note/reason is the trailing free-text (may be "").
|
||||
"""
|
||||
out: list[tuple[str, str, str]] = []
|
||||
na_out: list[tuple[str, str, str]] = []
|
||||
if not comment_body:
|
||||
return out, na_out
|
||||
for m in _DIRECTIVE_RE.finditer(comment_body):
|
||||
kind = m.group(1)
|
||||
raw_slug = (m.group(2) or "").strip()
|
||||
parts = raw_slug.split()
|
||||
if not parts:
|
||||
continue
|
||||
first = parts[0]
|
||||
if len(parts) > 1:
|
||||
canonical = normalize_slug(raw_slug, numeric_aliases)
|
||||
else:
|
||||
canonical = normalize_slug(first, numeric_aliases)
|
||||
note_from_group = (m.group(3) or "").strip()
|
||||
out.append((kind, canonical, note_from_group))
|
||||
|
||||
for m in _NA_DIRECTIVE_RE.finditer(comment_body):
|
||||
gate = (m.group(1) or "").strip().lower()
|
||||
reason = (m.group(2) or "").strip()
|
||||
na_out.append(("sop-n/a", gate, reason))
|
||||
|
||||
return out, na_out
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PR body section detection
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def section_marker_present(body: str, marker: str) -> bool:
|
||||
"""Return True if `marker` appears in `body` case-insensitively
|
||||
on a non-empty line (i.e. the author actually filled it in).
|
||||
|
||||
We require the marker substring AND non-whitespace content on the
|
||||
same line OR within the next line — this prevents trivially-empty
|
||||
checklists like:
|
||||
|
||||
## SOP-Checklist
|
||||
- [ ] **Comprehensive testing performed**:
|
||||
- [ ] **Local-postgres E2E run**:
|
||||
|
||||
from auto-passing the section-present check. The peer-ack is still
|
||||
required, but answering with empty content is captured as a soft
|
||||
finding via the section-present test alone.
|
||||
"""
|
||||
if not body or not marker:
|
||||
return False
|
||||
body_lower = body.lower()
|
||||
marker_lower = marker.lower()
|
||||
idx = body_lower.find(marker_lower)
|
||||
if idx < 0:
|
||||
return False
|
||||
# Walk to end of line.
|
||||
line_end = body.find("\n", idx)
|
||||
if line_end < 0:
|
||||
line_end = len(body)
|
||||
line = body[idx + len(marker):line_end]
|
||||
# Strip the colon + checkbox tail patterns; require at least one
|
||||
# non-whitespace, non-punctuation char.
|
||||
stripped = re.sub(r"[\s\*:\-\[\]]+", "", line)
|
||||
if stripped:
|
||||
return True
|
||||
# Fall through: check the NEXT line (multi-line answers).
|
||||
next_line_end = body.find("\n", line_end + 1)
|
||||
if next_line_end < 0:
|
||||
next_line_end = len(body)
|
||||
next_line = body[line_end + 1:next_line_end]
|
||||
stripped_next = re.sub(r"[\s\*:\-\[\]]+", "", next_line)
|
||||
return bool(stripped_next)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Ack-state computation
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def compute_ack_state(
|
||||
comments: list[dict[str, Any]],
|
||||
pr_author: str,
|
||||
items_by_slug: dict[str, dict[str, Any]],
|
||||
numeric_aliases: dict[int, str],
|
||||
team_membership_probe: "callable[[str, list[str]], list[str]]",
|
||||
) -> dict[str, dict[str, Any]]:
|
||||
"""Compute per-item ack state.
|
||||
|
||||
Each comment is processed in chronological order. The most-recent
|
||||
directive per (commenter, slug) wins.
|
||||
|
||||
Returns a dict keyed by canonical slug:
|
||||
{
|
||||
"comprehensive-testing": {
|
||||
"ackers": ["bob"], # non-author, team-verified
|
||||
"rejected": {
|
||||
"self_ack": ["alice"],
|
||||
"not_in_team": ["eve"],
|
||||
}
|
||||
},
|
||||
...
|
||||
}
|
||||
"""
|
||||
# Step 1: collapse directives per (commenter, slug) — most recent wins.
|
||||
# comments are expected to come in chronological order from the
|
||||
# API (Gitea returns oldest-first by default for issues/{N}/comments).
|
||||
latest_directive: dict[tuple[str, str], str] = {} # (user, slug) → kind
|
||||
unparseable_per_user: dict[str, int] = {}
|
||||
for c in comments:
|
||||
body = c.get("body", "") or ""
|
||||
user = (c.get("user") or {}).get("login", "")
|
||||
if not user:
|
||||
continue
|
||||
directives, _na_directives = parse_directives(body, numeric_aliases)
|
||||
for kind, slug, _note in directives:
|
||||
if not slug:
|
||||
unparseable_per_user[user] = unparseable_per_user.get(user, 0) + 1
|
||||
continue
|
||||
latest_directive[(user, slug)] = kind
|
||||
|
||||
# Step 2: build candidate ackers per slug.
|
||||
# Filter out self-acks and unknown slugs.
|
||||
ackers_per_slug: dict[str, list[str]] = {s: [] for s in items_by_slug}
|
||||
rejected_self: dict[str, list[str]] = {s: [] for s in items_by_slug}
|
||||
pending_team_check: dict[str, list[str]] = {s: [] for s in items_by_slug}
|
||||
|
||||
for (user, slug), kind in latest_directive.items():
|
||||
if kind != "sop-ack":
|
||||
continue # revokes leave the (user,slug) state as "no ack"
|
||||
if slug not in items_by_slug:
|
||||
continue
|
||||
if user == pr_author:
|
||||
rejected_self[slug].append(user)
|
||||
continue
|
||||
pending_team_check[slug].append(user)
|
||||
|
||||
# Step 3: team membership probe per slug.
|
||||
rejected_not_in_team: dict[str, list[str]] = {s: [] for s in items_by_slug}
|
||||
for slug, candidates in pending_team_check.items():
|
||||
if not candidates:
|
||||
continue
|
||||
required = items_by_slug[slug]["required_teams"]
|
||||
approved = team_membership_probe(slug, candidates) # returns subset
|
||||
rejected_not_in_team[slug] = [u for u in candidates if u not in approved]
|
||||
ackers_per_slug[slug] = approved
|
||||
items_by_slug[slug]["_required_resolved"] = required
|
||||
|
||||
return {
|
||||
slug: {
|
||||
"ackers": ackers_per_slug[slug],
|
||||
"rejected": {
|
||||
"self_ack": rejected_self[slug],
|
||||
"not_in_team": rejected_not_in_team[slug],
|
||||
},
|
||||
}
|
||||
for slug in items_by_slug
|
||||
}
|
||||
|
||||
|
||||
def compute_na_state(
|
||||
comments: list[dict[str, Any]],
|
||||
pr_author: str,
|
||||
na_gates: dict[str, dict[str, Any]],
|
||||
numeric_aliases: dict[int, str],
|
||||
team_membership_probe: "callable[[str, list[str]], list[str]]",
|
||||
client: "GiteaClient",
|
||||
org: str,
|
||||
) -> dict[str, dict[str, Any]]:
|
||||
"""Compute per-gate N/A declaration state.
|
||||
|
||||
Returns a dict keyed by gate name:
|
||||
{
|
||||
"qa-review": {
|
||||
"declared": ["alice"], # non-author, team-verified, not revoked
|
||||
"rejected": ["eve (not-in-team)", "bob (self-decl)"],
|
||||
"reason": "pure-infra change — no qa surface",
|
||||
},
|
||||
...
|
||||
}
|
||||
A gate is N/A-satisfied when at least one declaration from a valid
|
||||
team member exists and has not been revoked by the same user.
|
||||
"""
|
||||
if not na_gates:
|
||||
return {}
|
||||
|
||||
# Collapse directives per (commenter, gate) — most recent wins.
|
||||
latest_na: dict[tuple[str, str], str] = {} # (user, gate) → "sop-n/a"
|
||||
latest_na_reason: dict[tuple[str, str], str] = {} # (user, gate) → reason
|
||||
for c in comments:
|
||||
body = c.get("body", "") or ""
|
||||
user = (c.get("user") or {}).get("login", "")
|
||||
if not user:
|
||||
continue
|
||||
_directives, na_directives = parse_directives(body, numeric_aliases)
|
||||
for _kind, gate, reason in na_directives:
|
||||
if gate not in na_gates:
|
||||
continue
|
||||
latest_na[(user, gate)] = "sop-n/a"
|
||||
latest_na_reason[(user, gate)] = reason
|
||||
|
||||
# Determine candidate declarers per gate.
|
||||
na_state: dict[str, dict[str, Any]] = {
|
||||
gate: {"declared": [], "rejected": [], "reason": ""}
|
||||
for gate in na_gates
|
||||
}
|
||||
pending_per_gate: dict[str, list[str]] = {gate: [] for gate in na_gates}
|
||||
|
||||
for (user, gate), kind in latest_na.items():
|
||||
if kind != "sop-n/a":
|
||||
continue
|
||||
if user == pr_author:
|
||||
na_state[gate]["rejected"].append(f"{user} (self-decl)")
|
||||
continue
|
||||
pending_per_gate[gate].append(user)
|
||||
|
||||
# Probe team membership per gate using that gate's required_teams.
|
||||
for gate, candidates in pending_per_gate.items():
|
||||
if not candidates:
|
||||
continue
|
||||
required_teams = na_gates[gate].get("required_teams", [])
|
||||
# Resolve team names → ids using the client's resolver.
|
||||
team_ids: list[int] = []
|
||||
for tn in required_teams:
|
||||
tid = client.resolve_team_id(org, tn)
|
||||
if tid is not None:
|
||||
team_ids.append(tid)
|
||||
if not team_ids:
|
||||
na_state[gate]["rejected"].extend(
|
||||
f"{u} (no-team-id)" for u in candidates
|
||||
)
|
||||
continue
|
||||
for u in candidates:
|
||||
in_any_team = False
|
||||
for tid in team_ids:
|
||||
result = client.is_team_member(tid, u)
|
||||
if result is True:
|
||||
in_any_team = True
|
||||
break
|
||||
if result is None:
|
||||
# 403 — token owner not in team. Fail-closed.
|
||||
print(
|
||||
f"::warning::na: team-probe for {u} in team-id {tid} "
|
||||
"returned 403 — treating as not-in-team (fail-closed)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
if in_any_team:
|
||||
na_state[gate]["declared"].append(u)
|
||||
else:
|
||||
na_state[gate]["rejected"].append(f"{u} (not-in-team)")
|
||||
|
||||
# Build per-gate reason string from declared users.
|
||||
for gate in na_gates:
|
||||
decl = na_state[gate]["declared"]
|
||||
if decl:
|
||||
reasons: list[str] = []
|
||||
for u in decl:
|
||||
r = latest_na_reason.get((u, gate), "")
|
||||
if r:
|
||||
reasons.append(f"{u}: {r}")
|
||||
else:
|
||||
reasons.append(u)
|
||||
na_state[gate]["reason"] = "; ".join(reasons)
|
||||
|
||||
return na_state
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Gitea API client
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class GiteaClient:
|
||||
def __init__(self, host: str, token: str):
|
||||
self.base = f"https://{host}/api/v1"
|
||||
self.token = token
|
||||
# Cache team-name → team-id resolutions per org.
|
||||
self._team_id_cache: dict[tuple[str, str], int | None] = {}
|
||||
|
||||
def _req(
|
||||
self,
|
||||
method: str,
|
||||
path: str,
|
||||
body: dict[str, Any] | None = None,
|
||||
ok_codes: tuple[int, ...] = (200, 201, 204),
|
||||
) -> tuple[int, Any]:
|
||||
url = self.base + path
|
||||
data = None
|
||||
headers = {
|
||||
"Authorization": f"token {self.token}",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
if body is not None:
|
||||
data = json.dumps(body).encode("utf-8")
|
||||
headers["Content-Type"] = "application/json"
|
||||
req = urllib.request.Request(url, method=method, data=data, headers=headers)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=20) as r:
|
||||
raw = r.read()
|
||||
code = r.getcode()
|
||||
except urllib.error.HTTPError as e:
|
||||
code = e.code
|
||||
raw = e.read()
|
||||
try:
|
||||
parsed = json.loads(raw.decode("utf-8")) if raw else None
|
||||
except json.JSONDecodeError:
|
||||
parsed = raw.decode("utf-8", errors="replace") if raw else None
|
||||
return code, parsed
|
||||
|
||||
def get_pr(self, owner: str, repo: str, pr: int) -> dict[str, Any]:
|
||||
code, data = self._req("GET", f"/repos/{owner}/{repo}/pulls/{pr}")
|
||||
if code != 200:
|
||||
raise RuntimeError(f"GET pulls/{pr} → HTTP {code}: {data!r}")
|
||||
return data
|
||||
|
||||
def get_issue_comments(
|
||||
self, owner: str, repo: str, issue: int
|
||||
) -> list[dict[str, Any]]:
|
||||
# Paginate. Gitea default page size 50.
|
||||
out: list[dict[str, Any]] = []
|
||||
page = 1
|
||||
while True:
|
||||
code, data = self._req(
|
||||
"GET",
|
||||
f"/repos/{owner}/{repo}/issues/{issue}/comments?limit=50&page={page}",
|
||||
)
|
||||
if code != 200:
|
||||
raise RuntimeError(
|
||||
f"GET issues/{issue}/comments page={page} → HTTP {code}: {data!r}"
|
||||
)
|
||||
if not data:
|
||||
break
|
||||
out.extend(data)
|
||||
if len(data) < 50:
|
||||
break
|
||||
page += 1
|
||||
return out
|
||||
|
||||
def resolve_team_id(self, org: str, team_name: str) -> int | None:
|
||||
key = (org, team_name)
|
||||
if key in self._team_id_cache:
|
||||
return self._team_id_cache[key]
|
||||
code, data = self._req("GET", f"/orgs/{org}/teams/search?q={urllib.parse.quote(team_name)}")
|
||||
team_id = None
|
||||
if code == 200 and isinstance(data, dict):
|
||||
for t in data.get("data", []):
|
||||
if t.get("name") == team_name:
|
||||
team_id = t.get("id")
|
||||
break
|
||||
if team_id is None and code == 200 and isinstance(data, list):
|
||||
for t in data:
|
||||
if t.get("name") == team_name:
|
||||
team_id = t.get("id")
|
||||
break
|
||||
self._team_id_cache[key] = team_id
|
||||
return team_id
|
||||
|
||||
def is_team_member(self, team_id: int, login: str) -> bool | None:
|
||||
"""Return True / False / None (unknown — 403 from API)."""
|
||||
code, _ = self._req(
|
||||
"GET", f"/teams/{team_id}/members/{urllib.parse.quote(login)}"
|
||||
)
|
||||
if code in (200, 204):
|
||||
return True
|
||||
if code == 404:
|
||||
return False
|
||||
# 403 means the token owner isn't in this team, so the API
|
||||
# refuses to confirm membership. Fail-closed at the caller.
|
||||
return None
|
||||
|
||||
def post_status(
|
||||
self,
|
||||
owner: str,
|
||||
repo: str,
|
||||
sha: str,
|
||||
state: str,
|
||||
context: str,
|
||||
description: str,
|
||||
target_url: str = "",
|
||||
) -> None:
|
||||
body = {
|
||||
"state": state,
|
||||
"context": context,
|
||||
"description": description[:140], # Gitea truncates to 255 but be safe
|
||||
"target_url": target_url or "",
|
||||
}
|
||||
code, data = self._req(
|
||||
"POST",
|
||||
f"/repos/{owner}/{repo}/statuses/{sha}",
|
||||
body=body,
|
||||
ok_codes=(201,),
|
||||
)
|
||||
if code not in (200, 201):
|
||||
raise RuntimeError(
|
||||
f"POST statuses/{sha} → HTTP {code}: {data!r}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Config loader (PyYAML-free — config file is intentionally tiny + flat)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def load_config(path: str) -> dict[str, Any]:
|
||||
"""Load .gitea/sop-checklist-config.yaml.
|
||||
|
||||
Uses PyYAML if available, otherwise falls back to a built-in
|
||||
minimal parser sufficient for our flat config shape. Bundling
|
||||
PyYAML on the runner is one apt install away but we avoid the
|
||||
dep by keeping the config shape constrained.
|
||||
"""
|
||||
try:
|
||||
import yaml # type: ignore[import-not-found]
|
||||
with open(path) as f:
|
||||
return yaml.safe_load(f)
|
||||
except ImportError:
|
||||
return _load_config_minimal(path)
|
||||
|
||||
|
||||
def _load_config_minimal(path: str) -> dict[str, Any]:
|
||||
"""Minimal YAML subset parser for our config shape.
|
||||
|
||||
Supports: top-level scalar:value, top-level map-of-map (e.g.
|
||||
tier_failure_mode), top-level list of maps (items:), and within an
|
||||
item map: scalars + lists of scalars. Does NOT support nested lists,
|
||||
YAML anchors, multi-doc, or flow style.
|
||||
"""
|
||||
with open(path) as f:
|
||||
lines = f.readlines()
|
||||
return _parse_minimal_yaml(lines)
|
||||
|
||||
|
||||
def _parse_minimal_yaml(lines: list[str]) -> dict[str, Any]: # noqa: C901
|
||||
"""Hand-rolled subset parser. See _load_config_minimal docstring."""
|
||||
# Strip comments + blank lines but preserve indentation.
|
||||
cleaned: list[tuple[int, str]] = []
|
||||
for raw in lines:
|
||||
# Don't strip a "#" that is inside a quoted value.
|
||||
body = raw.rstrip("\n")
|
||||
# Remove trailing comment.
|
||||
idx = body.find("#")
|
||||
if idx >= 0 and (idx == 0 or body[idx - 1] in " \t"):
|
||||
body = body[:idx].rstrip()
|
||||
if not body.strip():
|
||||
continue
|
||||
indent = len(body) - len(body.lstrip(" "))
|
||||
cleaned.append((indent, body.strip()))
|
||||
|
||||
root: dict[str, Any] = {}
|
||||
i = 0
|
||||
n = len(cleaned)
|
||||
|
||||
def parse_scalar(s: str) -> Any:
|
||||
s = s.strip()
|
||||
if s.startswith('"') and s.endswith('"'):
|
||||
return s[1:-1]
|
||||
if s.startswith("'") and s.endswith("'"):
|
||||
return s[1:-1]
|
||||
if s.lower() in ("true", "yes"):
|
||||
return True
|
||||
if s.lower() in ("false", "no"):
|
||||
return False
|
||||
try:
|
||||
return int(s)
|
||||
except ValueError:
|
||||
pass
|
||||
return s
|
||||
|
||||
def parse_inline_list(s: str) -> list[Any]:
|
||||
s = s.strip()
|
||||
if not (s.startswith("[") and s.endswith("]")):
|
||||
return [parse_scalar(s)]
|
||||
inner = s[1:-1]
|
||||
if not inner.strip():
|
||||
return []
|
||||
return [parse_scalar(x.strip()) for x in inner.split(",")]
|
||||
|
||||
while i < n:
|
||||
indent, line = cleaned[i]
|
||||
if indent != 0:
|
||||
i += 1
|
||||
continue
|
||||
if ":" not in line:
|
||||
i += 1
|
||||
continue
|
||||
key, _, rest = line.partition(":")
|
||||
key = key.strip()
|
||||
rest = rest.strip()
|
||||
if rest == "":
|
||||
# Block — could be map or list.
|
||||
i += 1
|
||||
# Look ahead for first child.
|
||||
if i < n and cleaned[i][1].startswith("- "):
|
||||
# List of items.
|
||||
items: list[Any] = []
|
||||
while i < n and cleaned[i][0] > indent and cleaned[i][1].startswith("- "):
|
||||
item_indent = cleaned[i][0]
|
||||
first_kv = cleaned[i][1][2:].strip() # strip "- "
|
||||
item: dict[str, Any] = {}
|
||||
if ":" in first_kv:
|
||||
k, _, v = first_kv.partition(":")
|
||||
k = k.strip()
|
||||
v = v.strip()
|
||||
if v == "":
|
||||
item[k] = ""
|
||||
elif v.startswith(">-") or v.startswith(">"):
|
||||
# Folded scalar continues on subsequent indented lines
|
||||
collected: list[str] = []
|
||||
i += 1
|
||||
while i < n and cleaned[i][0] > item_indent:
|
||||
collected.append(cleaned[i][1])
|
||||
i += 1
|
||||
item[k] = " ".join(collected)
|
||||
items.append(item)
|
||||
continue
|
||||
elif v.startswith("["):
|
||||
item[k] = parse_inline_list(v)
|
||||
else:
|
||||
item[k] = parse_scalar(v)
|
||||
i += 1
|
||||
# Subsequent k:v lines at deeper indent belong to this item.
|
||||
while i < n and cleaned[i][0] > item_indent and not cleaned[i][1].startswith("- "):
|
||||
sub_indent, sub_line = cleaned[i]
|
||||
if ":" in sub_line:
|
||||
k, _, v = sub_line.partition(":")
|
||||
k = k.strip()
|
||||
v = v.strip()
|
||||
if v == "":
|
||||
item[k] = ""
|
||||
i += 1
|
||||
elif v.startswith(">-") or v.startswith(">"):
|
||||
collected = []
|
||||
i += 1
|
||||
while i < n and cleaned[i][0] > sub_indent:
|
||||
collected.append(cleaned[i][1])
|
||||
i += 1
|
||||
item[k] = " ".join(collected)
|
||||
elif v.startswith("["):
|
||||
item[k] = parse_inline_list(v)
|
||||
i += 1
|
||||
else:
|
||||
item[k] = parse_scalar(v)
|
||||
i += 1
|
||||
else:
|
||||
i += 1
|
||||
items.append(item)
|
||||
root[key] = items
|
||||
else:
|
||||
# Sub-map.
|
||||
submap: dict[str, Any] = {}
|
||||
while i < n and cleaned[i][0] > indent:
|
||||
sub_indent, sub_line = cleaned[i]
|
||||
if ":" in sub_line:
|
||||
k, _, v = sub_line.partition(":")
|
||||
k = k.strip().strip('"').strip("'")
|
||||
v = v.strip()
|
||||
if v.startswith("[") and v.endswith("]"):
|
||||
submap[k] = parse_inline_list(v)
|
||||
else:
|
||||
submap[k] = parse_scalar(v)
|
||||
i += 1
|
||||
root[key] = submap
|
||||
else:
|
||||
# Inline scalar or list.
|
||||
if rest.startswith("[") and rest.endswith("]"):
|
||||
root[key] = parse_inline_list(rest)
|
||||
else:
|
||||
root[key] = parse_scalar(rest)
|
||||
i += 1
|
||||
return root
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Main entry point
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def render_status(
|
||||
items: list[dict[str, Any]],
|
||||
ack_state: dict[str, dict[str, Any]],
|
||||
body_state: dict[str, bool],
|
||||
) -> tuple[str, str]:
|
||||
"""Return (state, description) for the commit-status post.
|
||||
|
||||
state is "success" if every item has at least one valid ack
|
||||
(body section presence is informational only — peer-ack is the
|
||||
real gate). tier:low PRs receive state="success" (soft-fail — no
|
||||
acks required); the description carries "[info tier:low]" prefix.
|
||||
"""
|
||||
n = len(items)
|
||||
fully_acked = [
|
||||
it["slug"] for it in items if ack_state[it["slug"]]["ackers"]
|
||||
]
|
||||
missing = [
|
||||
it["slug"] for it in items if not ack_state[it["slug"]]["ackers"]
|
||||
]
|
||||
missing_body = [it["slug"] for it in items if not body_state.get(it["slug"], False)]
|
||||
|
||||
desc_parts = [f"acked: {len(fully_acked)}/{n}"]
|
||||
if missing:
|
||||
# Show up to 3 missing slugs to stay inside the 140-char budget.
|
||||
shown = ", ".join(missing[:3])
|
||||
if len(missing) > 3:
|
||||
shown += f", +{len(missing) - 3}"
|
||||
desc_parts.append(f"missing: {shown}")
|
||||
if missing_body:
|
||||
shown = ", ".join(missing_body[:3])
|
||||
if len(missing_body) > 3:
|
||||
shown += f", +{len(missing_body) - 3}"
|
||||
desc_parts.append(f"body-unfilled: {shown}")
|
||||
state = "success" if not missing and not missing_body else "failure"
|
||||
return state, " — ".join(desc_parts)
|
||||
|
||||
|
||||
def get_tier_mode(pr: dict[str, Any], cfg: dict[str, Any]) -> str:
|
||||
"""Read tier label, return 'hard' or 'soft' per cfg.tier_failure_mode."""
|
||||
labels = pr.get("labels") or []
|
||||
tier_labels = [l.get("name", "") for l in labels if (l.get("name", "") or "").startswith("tier:")]
|
||||
mode_map = cfg.get("tier_failure_mode") or {}
|
||||
default_mode = cfg.get("default_mode", "hard")
|
||||
for tl in tier_labels:
|
||||
if tl in mode_map:
|
||||
return mode_map[tl]
|
||||
return default_mode
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
p = argparse.ArgumentParser()
|
||||
p.add_argument("--owner", required=True)
|
||||
p.add_argument("--repo", required=True)
|
||||
p.add_argument("--pr", type=int, required=True)
|
||||
p.add_argument("--config", default=".gitea/sop-checklist-config.yaml")
|
||||
p.add_argument("--gitea-host", default="git.moleculesai.app")
|
||||
p.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Compute state but do not POST the status.",
|
||||
)
|
||||
p.add_argument(
|
||||
"--status-context",
|
||||
default="sop-checklist / all-items-acked (pull_request)",
|
||||
)
|
||||
p.add_argument(
|
||||
"--exit-on-state",
|
||||
action="store_true",
|
||||
help=(
|
||||
"If set, exit non-zero when state=failure. Default OFF so the "
|
||||
"job-level conclusion is independent of ack-state — the only "
|
||||
"thing BP sees is the POSTed status. Useful for local debugging."
|
||||
),
|
||||
)
|
||||
args = p.parse_args(argv)
|
||||
|
||||
token = os.environ.get("GITEA_TOKEN", "")
|
||||
if not token and not args.dry_run:
|
||||
print("::error::GITEA_TOKEN env required", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
cfg = load_config(args.config)
|
||||
items: list[dict[str, Any]] = cfg["items"]
|
||||
items_by_slug = {it["slug"]: it for it in items}
|
||||
numeric_aliases = {
|
||||
int(it["numeric_alias"]): it["slug"] for it in items if it.get("numeric_alias")
|
||||
}
|
||||
na_gates: dict[str, dict[str, Any]] = cfg.get("n/a_gates") or {}
|
||||
|
||||
client = GiteaClient(args.gitea_host, token) if token else None
|
||||
if not client:
|
||||
print("::error::No client (dry-run without token has nothing to do)", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
pr = client.get_pr(args.owner, args.repo, args.pr)
|
||||
if pr.get("state") != "open":
|
||||
print(f"::notice::PR #{args.pr} is {pr.get('state')} — gate is a no-op")
|
||||
return 0
|
||||
|
||||
author = (pr.get("user") or {}).get("login", "")
|
||||
head_sha = (pr.get("head") or {}).get("sha", "")
|
||||
body = pr.get("body", "") or ""
|
||||
|
||||
if not author or not head_sha:
|
||||
print("::error::PR payload missing user.login or head.sha", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
target_url = f"https://{args.gitea_host}/{args.owner}/{args.repo}/pulls/{args.pr}"
|
||||
|
||||
comments = client.get_issue_comments(args.owner, args.repo, args.pr)
|
||||
|
||||
# Build team-membership probe closure that caches results per
|
||||
# (user, team-id) so a user acking multiple items only triggers
|
||||
# one membership lookup per team.
|
||||
team_member_cache: dict[tuple[str, int], bool | None] = {}
|
||||
|
||||
def probe(slug: str, users: list[str]) -> list[str]:
|
||||
item = items_by_slug[slug]
|
||||
team_names: list[str] = item["required_teams"]
|
||||
# Resolve names → ids. NOTE: orgs/{org}/teams/search may not be
|
||||
# available — fall back to the list endpoint.
|
||||
team_ids: list[int] = []
|
||||
for tn in team_names:
|
||||
tid = client.resolve_team_id(args.owner, tn)
|
||||
if tid is None:
|
||||
# Try the list endpoint as a fallback.
|
||||
code, data = client._req( # noqa: SLF001
|
||||
"GET", f"/orgs/{args.owner}/teams"
|
||||
)
|
||||
if code == 200 and isinstance(data, list):
|
||||
for t in data:
|
||||
if t.get("name") == tn:
|
||||
tid = t.get("id")
|
||||
client._team_id_cache[(args.owner, tn)] = tid # noqa: SLF001
|
||||
break
|
||||
if tid is not None:
|
||||
team_ids.append(tid)
|
||||
else:
|
||||
print(
|
||||
f"::warning::could not resolve team-id for '{tn}' "
|
||||
f"in org '{args.owner}' — item '{slug}' will fail closed",
|
||||
file=sys.stderr,
|
||||
)
|
||||
approved: list[str] = []
|
||||
for u in users:
|
||||
for tid in team_ids:
|
||||
cache_key = (u, tid)
|
||||
if cache_key not in team_member_cache:
|
||||
team_member_cache[cache_key] = client.is_team_member(tid, u)
|
||||
result = team_member_cache[cache_key]
|
||||
if result is True:
|
||||
approved.append(u)
|
||||
break
|
||||
if result is None:
|
||||
print(
|
||||
f"::warning::team-probe for {u} in team-id {tid} returned 403 "
|
||||
"(token owner not in that team — fail-closed per RFC#324)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
# Treat as not-in-team for this user/team pair; loop
|
||||
# may still find membership in another team.
|
||||
return approved
|
||||
|
||||
ack_state = compute_ack_state(comments, author, items_by_slug, numeric_aliases, probe)
|
||||
body_state = {it["slug"]: section_marker_present(body, it["pr_section_marker"]) for it in items}
|
||||
|
||||
# --- N/A gate state (RFC#324 §N/A follow-up) ---
|
||||
na_state: dict[str, dict[str, Any]] = {}
|
||||
if na_gates:
|
||||
na_state = compute_na_state(
|
||||
comments, author, na_gates, numeric_aliases,
|
||||
probe, client, args.owner,
|
||||
)
|
||||
# Post N/A declarations status (read by review-check.sh).
|
||||
na_satisfied = [g for g, s in na_state.items() if s["declared"]]
|
||||
na_missing = [g for g, s in na_state.items() if not s["declared"]]
|
||||
if na_satisfied:
|
||||
na_desc = f"N/A: {', '.join(na_satisfied)}"
|
||||
na_post_state = "success"
|
||||
elif na_missing:
|
||||
na_desc = f"awaiting /sop-n/a declaration for: {', '.join(na_missing)}"
|
||||
na_post_state = "pending"
|
||||
else:
|
||||
# Configured but no declarations yet.
|
||||
na_desc = "no /sop-n/a declarations yet"
|
||||
na_post_state = "pending"
|
||||
na_context = "sop-checklist / na-declarations (pull_request)"
|
||||
print(f"::notice::na-declarations status: {na_post_state} — {na_desc}")
|
||||
if not args.dry_run:
|
||||
client.post_status(
|
||||
args.owner, args.repo, head_sha,
|
||||
state=na_post_state, context=na_context,
|
||||
description=na_desc,
|
||||
target_url=target_url,
|
||||
)
|
||||
print(f"::notice::na-declarations status posted: {na_context} → {na_post_state}")
|
||||
# Log per-gate diagnostics.
|
||||
for gate in na_gates:
|
||||
s = na_state.get(gate, {})
|
||||
if s.get("declared"):
|
||||
print(f"::notice:: [PASS] gate={gate} — N/A declared by {','.join(s['declared'])}"
|
||||
+ (f" ({s['reason']})" if s.get("reason") else ""))
|
||||
else:
|
||||
extra = f" — rejected: {', '.join(s.get('rejected', []))}" if s.get("rejected") else ""
|
||||
print(f"::notice:: [WAIT] gate={gate} — no valid N/A declaration yet{extra}")
|
||||
|
||||
|
||||
state, description = render_status(items, ack_state, body_state)
|
||||
mode = get_tier_mode(pr, cfg)
|
||||
if mode == "soft":
|
||||
# tier:low: acks are informational only — post success so BP gate passes.
|
||||
# Description carries "[info tier:low]" prefix so reviewers know acks
|
||||
# were not required (vs a tier:medium+ PR that truly passed all acks).
|
||||
state = "success"
|
||||
description = f"[info tier:low] {description}"
|
||||
|
||||
# Diagnostics to job log.
|
||||
print(f"::notice::PR #{args.pr} author={author} head={head_sha[:7]} mode={mode}")
|
||||
for it in items:
|
||||
slug = it["slug"]
|
||||
ackers = ack_state[slug]["ackers"]
|
||||
if ackers:
|
||||
print(f"::notice:: [PASS] {slug} — acked by {','.join(ackers)}")
|
||||
else:
|
||||
r = ack_state[slug]["rejected"]
|
||||
extras: list[str] = []
|
||||
if r["self_ack"]:
|
||||
extras.append(f"self-acks-rejected:{','.join(r['self_ack'])}")
|
||||
if r["not_in_team"]:
|
||||
extras.append(f"not-in-team:{','.join(r['not_in_team'])}")
|
||||
extra = " (" + "; ".join(extras) + ")" if extras else ""
|
||||
print(f"::notice:: [WAIT] {slug} — no valid peer-ack yet{extra}")
|
||||
|
||||
print(f"::notice::posting status: state={state} desc={description!r}")
|
||||
|
||||
if args.dry_run:
|
||||
print("::notice::--dry-run: not posting status")
|
||||
if args.exit_on_state:
|
||||
return 0 if state in ("success", "pending") else 1
|
||||
return 0
|
||||
|
||||
client.post_status(
|
||||
args.owner, args.repo, head_sha,
|
||||
state=state, context=args.status_context,
|
||||
description=description, target_url=target_url,
|
||||
)
|
||||
print(f"::notice::status posted: {args.status_context} → {state}")
|
||||
# By default exit 0 — the POSTed status IS the gate, NOT the job
|
||||
# conclusion. If the job exits 1 BP will see TWO failure signals
|
||||
# (one from the job's auto-status, one from our POST), making the
|
||||
# description less actionable. --exit-on-state restores the old
|
||||
# behavior for local debugging.
|
||||
if args.exit_on_state:
|
||||
return 0 if state in ("success", "pending") else 1
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@ -96,27 +96,16 @@ API="https://${GITEA_HOST}/api/v1"
|
||||
AUTH="Authorization: token ${GITEA_TOKEN}"
|
||||
echo "::notice::tier-check start: repo=$OWNER/$NAME pr=$PR_NUMBER author=$PR_AUTHOR"
|
||||
|
||||
# Sanity: token resolves to a user.
|
||||
# Use || true on the jq pipeline so that set -euo pipefail (line 45) does not
|
||||
# cause the script to exit prematurely when the token is empty/invalid — the
|
||||
# if check below handles that case gracefully. Without || true, a 401 from an
|
||||
# empty/invalid token causes jq to exit 1, triggering set -e and exiting the
|
||||
# entire script before SOP_FAIL_OPEN can be evaluated (the check is in the jq-
|
||||
# install block; if jq is already on PATH, that block is skipped entirely).
|
||||
WHOAMI=$(curl -sS -H "$AUTH" "${API}/user" | jq -r '.login // ""') || true
|
||||
# Sanity: token resolves to a user
|
||||
WHOAMI=$(curl -sS -H "$AUTH" "${API}/user" | jq -r '.login // ""')
|
||||
if [ -z "$WHOAMI" ]; then
|
||||
echo "::error::GITEA_TOKEN cannot resolve a user via /api/v1/user — check the token scope and that the secret is wired correctly."
|
||||
if [ "${SOP_FAIL_OPEN:-}" = "1" ]; then
|
||||
echo "::warning::SOP_FAIL_OPEN=1 — exiting 0 so CI does not block."
|
||||
exit 0
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
echo "::notice::token resolves to user: $WHOAMI"
|
||||
|
||||
# 1. Read tier label. || true ensures set -euo pipefail does not abort the
|
||||
# script if curl or jq fails (e.g. 401 from empty token).
|
||||
LABELS=$(curl -sS -H "$AUTH" "${API}/repos/${OWNER}/${NAME}/issues/${PR_NUMBER}/labels" | jq -r '.[].name') || true
|
||||
# 1. Read tier label
|
||||
LABELS=$(curl -sS -H "$AUTH" "${API}/repos/${OWNER}/${NAME}/issues/${PR_NUMBER}/labels" | jq -r '.[].name')
|
||||
TIER=""
|
||||
for L in $LABELS; do
|
||||
case "$L" in
|
||||
@ -187,25 +176,17 @@ fi
|
||||
# 4. Resolve all team names → IDs
|
||||
# /orgs/{org}/teams/{slug}/... endpoints don't exist on Gitea 1.22;
|
||||
# we use /teams/{id}.
|
||||
# set +e prevents set -e from aborting the script if curl fails (e.g. empty token).
|
||||
ORG_TEAMS_FILE=$(mktemp)
|
||||
trap 'rm -f "$ORG_TEAMS_FILE"' EXIT
|
||||
set +e
|
||||
HTTP_CODE=$(curl -sS -o "$ORG_TEAMS_FILE" -w '%{http_code}' -H "$AUTH" \
|
||||
"${API}/orgs/${OWNER}/teams")
|
||||
_HTTP_EXIT=$?
|
||||
set -e
|
||||
debug "teams-list HTTP=$HTTP_CODE (curl exit=$_HTTP_EXIT) size=$(wc -c <"$ORG_TEAMS_FILE")"
|
||||
debug "teams-list HTTP=$HTTP_CODE size=$(wc -c <"$ORG_TEAMS_FILE")"
|
||||
if [ "${SOP_DEBUG:-}" = "1" ]; then
|
||||
echo " [debug] teams-list body (first 300 chars):" >&2
|
||||
head -c 300 "$ORG_TEAMS_FILE" >&2; echo >&2
|
||||
fi
|
||||
if [ "$_HTTP_EXIT" -ne 0 ] || [ "$HTTP_CODE" != "200" ]; then
|
||||
echo "::error::GET /orgs/${OWNER}/teams failed (curl exit=$_HTTP_EXIT HTTP=$HTTP_CODE) — token may lack read:org scope or be invalid."
|
||||
if [ "${SOP_FAIL_OPEN:-}" = "1" ]; then
|
||||
echo "::warning::SOP_FAIL_OPEN=1 — exiting 0 so CI does not block."
|
||||
exit 0
|
||||
fi
|
||||
if [ "$HTTP_CODE" != "200" ]; then
|
||||
echo "::error::GET /orgs/${OWNER}/teams returned HTTP $HTTP_CODE — token likely lacks read:org scope."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@ -250,22 +231,9 @@ for _t in $_all_teams; do
|
||||
debug "team-id: $_t → $_id"
|
||||
done
|
||||
|
||||
# 5. Read approving reviewers. set +e disables set -e temporarily so that curl
|
||||
# failures (e.g. empty/invalid token → HTTP 401) do not abort the script before
|
||||
# SOP_FAIL_OPEN is evaluated. set -e is restored immediately after.
|
||||
set +e
|
||||
# 5. Read approving reviewers
|
||||
REVIEWS=$(curl -sS -H "$AUTH" "${API}/repos/${OWNER}/${NAME}/pulls/${PR_NUMBER}/reviews")
|
||||
_REVIEWS_EXIT=$?
|
||||
set -e
|
||||
if [ $_REVIEWS_EXIT -ne 0 ] || [ -z "$REVIEWS" ]; then
|
||||
echo "::error::Failed to fetch reviews (curl exit=$_REVIEWS_EXIT) — token may be invalid or unreachable."
|
||||
if [ "${SOP_FAIL_OPEN:-}" = "1" ]; then
|
||||
echo "::warning::SOP_FAIL_OPEN=1 — exiting 0 so CI does not block."
|
||||
exit 0
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
APPROVERS=$(echo "$REVIEWS" | jq -r '[.[] | select(.state=="APPROVED") | .user.login] | unique | .[]') || true
|
||||
APPROVERS=$(echo "$REVIEWS" | jq -r '[.[] | select(.state=="APPROVED") | .user.login] | unique | .[]')
|
||||
if [ -z "$APPROVERS" ]; then
|
||||
echo "::error::No approving reviews on this PR. Set SOP_DEBUG=1 and re-run for diagnostics."
|
||||
exit 1
|
||||
|
||||
@ -1,796 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""status-reaper — Option B compensating-status POST for Gitea 1.22.6's
|
||||
hardcoded `(push)` suffix on default-branch commit statuses.
|
||||
|
||||
Tracking: this PR (workflow + script + tests + audit issue). Sibling
|
||||
bots: internal#327 (publish-runtime-bot), internal#328 (mc-drift-bot).
|
||||
Upstream RFC: internal#80. Persona provisioned by sub-agent aefaac1b
|
||||
(2026-05-11 21:39Z; Gitea uid 94, scope=write:repository).
|
||||
|
||||
What this script does, per `.gitea/workflows/status-reaper.yml` invocation:
|
||||
|
||||
1. Walk `.gitea/workflows/*.yml`. For each file, build the workflow_id
|
||||
using this resolution (per hongming-pc 22:08Z review):
|
||||
- If YAML has top-level `name:` → use that.
|
||||
- Else → use filename stem (basename minus `.yml`).
|
||||
Fail-LOUD on:
|
||||
- Two workflows resolving to the SAME identifier (collision).
|
||||
- Any identifier containing `/` (it would break context parsing
|
||||
downstream — Gitea uses ` / ` as the workflow/job separator).
|
||||
Classify each by whether `on:` contains a `push:` trigger.
|
||||
|
||||
2. List the last N (=30, rev3 — widened from 10) commits on
|
||||
WATCH_BRANCH via GET /repos/{o}/{r}/commits?sha={branch}&limit={N}.
|
||||
rev2 sweeps N commits per tick instead of HEAD only — schedule
|
||||
workflows post `failure` to whatever SHA was HEAD when they
|
||||
COMPLETED, so by the next */5 tick main has often moved forward
|
||||
and the red gets stranded on a stale commit. rev3 widens the
|
||||
window from 10 → 30 because schedule workflows post `failure`
|
||||
RETROACTIVELY (5-15 min after their merge); a 10-commit window
|
||||
is narrower than the merge-cadence during a burst, so reds land
|
||||
OUTSIDE the window before reaper sees them (Phase 1+2 evidence:
|
||||
rev2 run 17057 at 02:46Z saw 185/0 contexts on 10 SHAs; direct
|
||||
probe ~30min later showed ~25 fails on those same 10 SHAs).
|
||||
|
||||
3. For EACH SHA in the list:
|
||||
- GET combined commit status. Per-SHA error isolation
|
||||
(refinement #7): if this call raises ApiError or any 5xx,
|
||||
LOG `::warning::` + continue to the next SHA. Different from
|
||||
the single-HEAD pre-rev2 path where fail-loud was correct;
|
||||
the sweep is best-effort across historical commits, so one
|
||||
transient blip on a stale SHA must not strand reds on the
|
||||
OTHER stale SHAs.
|
||||
- If combined.state == "success": skip — cost optimization
|
||||
(refinement #2), common case (most commits are green).
|
||||
- Otherwise iterate per-context entries. For each entry where:
|
||||
state == "failure" AND context.endswith(" (push)")
|
||||
Parse context as `<workflow_name> / <job_name> (push)`.
|
||||
Look up workflow_name in the trigger map:
|
||||
- missing → log ::notice:: and skip (conservative).
|
||||
- has_push_trigger=True → preserve (real defect signal).
|
||||
- has_push_trigger=False → POST a compensating
|
||||
`state=success` status to /statuses/{sha} with the same
|
||||
context (Gitea de-dups by context) and a description
|
||||
documenting the workaround + this script's path.
|
||||
|
||||
4. Exit 0. Re-running is idempotent — Gitea's commit-status table
|
||||
stores the LATEST state-per-context, so the success POST sticks
|
||||
even if another tick happens before the runner finishes.
|
||||
|
||||
What it does NOT do:
|
||||
- Touch ` (pull_request)` contexts unless the exact same
|
||||
workflow/job has a successful ` (push)` context on the same
|
||||
default-branch SHA. That case is post-merge status pollution, not
|
||||
an unproven PR gate.
|
||||
- Compensate `error`/`pending` states. Only `failure` — the only one
|
||||
Gitea emits for the hardcoded-suffix bug.
|
||||
- Write to non-default branches. WATCH_BRANCH is sourced from
|
||||
`github.event.repository.default_branch` in the workflow.
|
||||
- Mutate workflows or runs. The Actions UI still shows the
|
||||
underlying schedule-triggered run as failed; this script edits
|
||||
the commit-status surface only.
|
||||
|
||||
Halt conditions (script-level — orchestrator-level halts are in the
|
||||
workflow comments):
|
||||
- PyYAML missing → fail-loud at import (no fallback parse).
|
||||
- Workflow `name:` collision → exit 1 with ::error:: message.
|
||||
- Workflow `name:` containing `/` → exit 1 with ::error:: message.
|
||||
- Ambiguous `on:` shape (e.g. neither str/list/dict) → treat as
|
||||
"has_push_trigger=True" and log ::notice:: (preserve, never
|
||||
compensate the unknown).
|
||||
- api() non-2xx → raise ApiError, fail the workflow run loudly so
|
||||
a subsequent tick retries (per
|
||||
`feedback_api_helper_must_raise_not_return_dict`).
|
||||
|
||||
Local dry-run (no network):
|
||||
GITEA_TOKEN=... GITEA_HOST=git.moleculesai.app REPO=owner/repo \\
|
||||
WATCH_BRANCH=main WORKFLOWS_DIR=.gitea/workflows \\
|
||||
python3 .gitea/scripts/status-reaper.py --dry-run
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import yaml # PyYAML 6.0.2 — installed by the workflow before this runs.
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Environment
|
||||
# --------------------------------------------------------------------------
|
||||
def _env(key: str, *, default: str = "") -> str:
|
||||
"""Read an env var with a default. Module-import-safe — tests can
|
||||
import this script without setting the full env contract."""
|
||||
return os.environ.get(key, default)
|
||||
|
||||
|
||||
GITEA_TOKEN = _env("GITEA_TOKEN")
|
||||
GITEA_HOST = _env("GITEA_HOST")
|
||||
REPO = _env("REPO")
|
||||
WATCH_BRANCH = _env("WATCH_BRANCH", default="main")
|
||||
WORKFLOWS_DIR = _env("WORKFLOWS_DIR", default=".gitea/workflows")
|
||||
|
||||
OWNER, NAME = (REPO.split("/", 1) + [""])[:2] if REPO else ("", "")
|
||||
API = f"https://{GITEA_HOST}/api/v1" if GITEA_HOST else ""
|
||||
API_TIMEOUT_SEC = int(_env("STATUS_REAPER_API_TIMEOUT_SEC", default="30") or "30")
|
||||
API_RETRIES = int(_env("STATUS_REAPER_API_RETRIES", default="3") or "3")
|
||||
API_RETRY_SLEEP_SEC = float(_env("STATUS_REAPER_API_RETRY_SLEEP_SEC", default="2") or "2")
|
||||
|
||||
# Compensating-status description prefix. Used as the marker so a human
|
||||
# auditing commit statuses can tell at a glance that the green was
|
||||
# synthetic, not a real CI pass. Kept stable; downstream tooling
|
||||
# (e.g. main-red-watchdog visual diff) MAY key on it.
|
||||
PUSH_COMPENSATION_DESCRIPTION = (
|
||||
"Compensated by status-reaper (workflow has no push: trigger; "
|
||||
"Gitea 1.22.6 hardcoded-suffix bug — see .gitea/scripts/status-reaper.py)"
|
||||
)
|
||||
# Backward-compatible alias for older tests/tooling that predate the split
|
||||
# between push-suffix compensation and pull-request-shadow compensation.
|
||||
COMPENSATION_DESCRIPTION = PUSH_COMPENSATION_DESCRIPTION
|
||||
PR_SHADOW_COMPENSATION_DESCRIPTION = (
|
||||
"Compensated by status-reaper (default-branch pull_request status "
|
||||
"shadowed by successful push status on same SHA; see "
|
||||
".gitea/scripts/status-reaper.py)"
|
||||
)
|
||||
|
||||
# Context suffix the reaper acts on. Gitea hardcodes this for ALL
|
||||
# default-branch workflow runs.
|
||||
PUSH_SUFFIX = " (push)"
|
||||
PULL_REQUEST_SUFFIX = " (pull_request)"
|
||||
|
||||
|
||||
def _require_runtime_env() -> None:
|
||||
"""Enforce env contract — called from `main()` only.
|
||||
|
||||
Tests import individual functions without setting the full env
|
||||
contract. Mirrors `main-red-watchdog.py`/`ci-required-drift.py`.
|
||||
"""
|
||||
for key in ("GITEA_TOKEN", "GITEA_HOST", "REPO", "WATCH_BRANCH", "WORKFLOWS_DIR"):
|
||||
if not os.environ.get(key):
|
||||
sys.stderr.write(f"::error::missing required env var: {key}\n")
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Tiny HTTP helper — raises on non-2xx + on JSON-decode-of-expected-JSON.
|
||||
# --------------------------------------------------------------------------
|
||||
class ApiError(RuntimeError):
|
||||
"""Raised when a Gitea API call cannot be trusted to have succeeded.
|
||||
|
||||
Per `feedback_api_helper_must_raise_not_return_dict`: soft-failure is
|
||||
opt-in via `expect_json=False`, never the default. A pre-fix
|
||||
implementation that returned `{}` on non-2xx would skip the
|
||||
compensating POST on a transient outage AND silently lose the
|
||||
failed-status enumeration, painting main green via omission.
|
||||
"""
|
||||
|
||||
|
||||
def api(
|
||||
method: str,
|
||||
path: str,
|
||||
*,
|
||||
body: dict | None = None,
|
||||
query: dict[str, str] | None = None,
|
||||
expect_json: bool = True,
|
||||
) -> tuple[int, Any]:
|
||||
"""Tiny HTTP helper around urllib. Same contract as
|
||||
`main-red-watchdog.py` and `ci-required-drift.py` so behaviour
|
||||
is cross-checkable."""
|
||||
url = f"{API}{path}"
|
||||
if query:
|
||||
url = f"{url}?{urllib.parse.urlencode(query)}"
|
||||
data = None
|
||||
headers = {
|
||||
"Authorization": f"token {GITEA_TOKEN}",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
if body is not None:
|
||||
data = json.dumps(body).encode("utf-8")
|
||||
headers["Content-Type"] = "application/json"
|
||||
req = urllib.request.Request(url, method=method, data=data, headers=headers)
|
||||
attempts = max(API_RETRIES, 1)
|
||||
for attempt in range(1, attempts + 1):
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=API_TIMEOUT_SEC) as resp:
|
||||
raw = resp.read()
|
||||
status = resp.status
|
||||
break
|
||||
except urllib.error.HTTPError as e:
|
||||
raw = e.read()
|
||||
status = e.code
|
||||
break
|
||||
except (TimeoutError, socket.timeout, urllib.error.URLError, OSError) as e:
|
||||
if attempt >= attempts:
|
||||
raise ApiError(
|
||||
f"{method} {path} failed after {attempts} attempts: {e}"
|
||||
) from e
|
||||
print(
|
||||
f"::warning::{method} {path} transient API error "
|
||||
f"(attempt {attempt}/{attempts}): {e}; retrying"
|
||||
)
|
||||
time.sleep(API_RETRY_SLEEP_SEC)
|
||||
|
||||
if not (200 <= status < 300):
|
||||
snippet = raw[:500].decode("utf-8", errors="replace") if raw else ""
|
||||
raise ApiError(f"{method} {path} -> HTTP {status}: {snippet}")
|
||||
|
||||
if not raw:
|
||||
return status, None
|
||||
try:
|
||||
return status, json.loads(raw)
|
||||
except json.JSONDecodeError as e:
|
||||
if expect_json:
|
||||
raise ApiError(
|
||||
f"{method} {path} -> HTTP {status} but body is not JSON: {e}"
|
||||
) from e
|
||||
return status, {"_raw": raw.decode("utf-8", errors="replace")}
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Workflow scan + classification
|
||||
# --------------------------------------------------------------------------
|
||||
def _on_block(doc: dict) -> Any:
|
||||
"""Extract the `on:` block from a parsed YAML doc.
|
||||
|
||||
PyYAML parses bareword `on:` as Python `True` (YAML 1.1 boolean
|
||||
spec — `on/off/yes/no` are booleans). The actual key in the dict
|
||||
is therefore `True`, NOT the string `"on"`. We accept both for
|
||||
forward-compat with YAML 1.2 loaders (which keep it as `"on"`).
|
||||
"""
|
||||
if True in doc:
|
||||
return doc[True]
|
||||
return doc.get("on")
|
||||
|
||||
|
||||
def _has_push_trigger(on_block: Any, workflow_id: str) -> bool:
|
||||
"""Return True if `on:` block declares a `push` trigger.
|
||||
|
||||
Accepts the three common shapes:
|
||||
- str: `on: push` → True only if == "push"
|
||||
- list: `on: [push, pull_request]` → True if "push" in list
|
||||
- dict: `on: { push: {...}, schedule: ... }` → True if "push" key
|
||||
|
||||
Defensive: for anything else (including None/empty), return True
|
||||
so we preserve rather than over-compensate. Logged via ::notice::.
|
||||
"""
|
||||
if isinstance(on_block, str):
|
||||
return on_block == "push"
|
||||
if isinstance(on_block, list):
|
||||
return "push" in on_block
|
||||
if isinstance(on_block, dict):
|
||||
return "push" in on_block
|
||||
# None or unexpected shape — preserve, log.
|
||||
print(
|
||||
f"::notice::ambiguous on: for {workflow_id}; preserving "
|
||||
f"(value={on_block!r}, type={type(on_block).__name__})"
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def scan_workflows(workflows_dir: str) -> dict[str, bool]:
|
||||
"""Walk `workflows_dir` and return `{workflow_id: has_push_trigger}`.
|
||||
|
||||
Workflow ID resolution (per hongming-pc 22:08Z review):
|
||||
- Top-level `name:` if present.
|
||||
- Else filename stem (basename minus `.yml`).
|
||||
|
||||
Fail-LOUD on:
|
||||
- Two workflows resolving to the same ID (collision).
|
||||
- Any ID containing `/` (would break ` / `-separated context
|
||||
parsing on the downstream side).
|
||||
|
||||
Returns a dict for O(1) lookup in the per-status loop.
|
||||
"""
|
||||
path = Path(workflows_dir)
|
||||
if not path.is_dir():
|
||||
# Workflow dir missing → no workflows to classify. Empty map is
|
||||
# safe: per-status loop will hit "unknown workflow; skip" for
|
||||
# every entry, which is correct (we cannot tell if a push
|
||||
# trigger exists, so we preserve).
|
||||
print(f"::warning::workflows dir not found: {workflows_dir}")
|
||||
return {}
|
||||
|
||||
out: dict[str, bool] = {}
|
||||
sources: dict[str, str] = {} # workflow_id -> source file (for collision msg)
|
||||
|
||||
for yml in sorted(path.glob("*.yml")):
|
||||
try:
|
||||
with yml.open() as f:
|
||||
doc = yaml.safe_load(f)
|
||||
except yaml.YAMLError as e:
|
||||
# A malformed YAML in the workflows dir is a real defect
|
||||
# (the workflow wouldn't load on Gitea either). Surface it
|
||||
# and keep going — the reaper's job is to compensate the
|
||||
# OTHER workflows even if one is broken.
|
||||
print(f"::warning::yaml parse failed for {yml.name}: {e}; skip")
|
||||
continue
|
||||
if not isinstance(doc, dict):
|
||||
print(f"::warning::workflow {yml.name} not a dict; skip")
|
||||
continue
|
||||
|
||||
# Resolve workflow_id.
|
||||
name_field = doc.get("name")
|
||||
if isinstance(name_field, str) and name_field.strip():
|
||||
workflow_id = name_field.strip()
|
||||
else:
|
||||
workflow_id = yml.stem # basename minus .yml
|
||||
|
||||
# Halt-loud: `/` in workflow_id breaks ` / ` context parsing.
|
||||
if "/" in workflow_id:
|
||||
sys.stderr.write(
|
||||
f"::error::workflow name contains '/' which breaks "
|
||||
f"context parsing: {workflow_id} (file={yml.name})\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Halt-loud: ID collision.
|
||||
if workflow_id in out:
|
||||
sys.stderr.write(
|
||||
f"::error::workflow name collision detected: {workflow_id} "
|
||||
f"(files: {sources[workflow_id]} + {yml.name})\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
on_block = _on_block(doc)
|
||||
out[workflow_id] = _has_push_trigger(on_block, workflow_id)
|
||||
sources[workflow_id] = yml.name
|
||||
|
||||
return out
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Gitea reads
|
||||
# --------------------------------------------------------------------------
|
||||
def get_head_sha(branch: str) -> str:
|
||||
"""HEAD SHA of `branch`. Raises ApiError on non-2xx."""
|
||||
_, body = api("GET", f"/repos/{OWNER}/{NAME}/branches/{branch}")
|
||||
if not isinstance(body, dict):
|
||||
raise ApiError(f"branch {branch} response not a JSON object")
|
||||
commit = body.get("commit")
|
||||
if not isinstance(commit, dict):
|
||||
raise ApiError(f"branch {branch} response missing `commit` object")
|
||||
sha = commit.get("id") or commit.get("sha")
|
||||
if not isinstance(sha, str) or len(sha) < 7:
|
||||
raise ApiError(f"branch {branch} response has no usable commit SHA")
|
||||
return sha
|
||||
|
||||
|
||||
def get_combined_status(sha: str) -> dict:
|
||||
"""Combined commit status for `sha`. Gitea returns:
|
||||
{
|
||||
"state": "success" | "failure" | "pending" | "error",
|
||||
"statuses": [
|
||||
{"context": "...", "state": "...", "target_url": "...",
|
||||
"description": "..."},
|
||||
...
|
||||
],
|
||||
...
|
||||
}
|
||||
Raises ApiError on non-2xx.
|
||||
"""
|
||||
_, body = api("GET", f"/repos/{OWNER}/{NAME}/commits/{sha}/status")
|
||||
if not isinstance(body, dict):
|
||||
raise ApiError(f"status for {sha} response not a JSON object")
|
||||
return body
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Context parsing
|
||||
# --------------------------------------------------------------------------
|
||||
def parse_suffixed_context(context: str, suffix: str) -> tuple[str, str] | None:
|
||||
"""Parse `<workflow_name> / <job_name> (<event>)` into
|
||||
(workflow_name, job_name).
|
||||
|
||||
Returns None if the context doesn't match the shape (caller skips).
|
||||
Strict: requires the trailing suffix and at least one ` / `
|
||||
separator. Anything else is left alone.
|
||||
"""
|
||||
if not context.endswith(suffix):
|
||||
return None
|
||||
head = context[: -len(suffix)]
|
||||
if " / " not in head:
|
||||
return None
|
||||
workflow_name, job_name = head.split(" / ", 1)
|
||||
return workflow_name, job_name
|
||||
|
||||
|
||||
def parse_push_context(context: str) -> tuple[str, str] | None:
|
||||
"""Parse `<workflow_name> / <job_name> (push)` into
|
||||
(workflow_name, job_name)."""
|
||||
return parse_suffixed_context(context, PUSH_SUFFIX)
|
||||
|
||||
|
||||
def push_equivalent_context(context: str) -> str | None:
|
||||
"""Return the matching `(push)` context for a `(pull_request)` context."""
|
||||
parsed = parse_suffixed_context(context, PULL_REQUEST_SUFFIX)
|
||||
if parsed is None:
|
||||
return None
|
||||
workflow_name, job_name = parsed
|
||||
return f"{workflow_name} / {job_name}{PUSH_SUFFIX}"
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Compensating POST
|
||||
# --------------------------------------------------------------------------
|
||||
def post_compensating_status(
|
||||
sha: str,
|
||||
context: str,
|
||||
target_url: str | None,
|
||||
*,
|
||||
description: str = PUSH_COMPENSATION_DESCRIPTION,
|
||||
dry_run: bool = False,
|
||||
) -> None:
|
||||
"""POST a `state=success` to /repos/{o}/{r}/statuses/{sha} with the
|
||||
given context. Gitea de-dups by context (latest write wins).
|
||||
|
||||
Description references this script so the compensation is
|
||||
self-documenting on the commit's status view.
|
||||
"""
|
||||
payload: dict[str, Any] = {
|
||||
"context": context,
|
||||
"state": "success",
|
||||
"description": description,
|
||||
}
|
||||
# Echo the original target_url when present so a human auditing
|
||||
# the (now-green) compensated status can still reach the run logs
|
||||
# that produced the original red.
|
||||
if target_url:
|
||||
payload["target_url"] = target_url
|
||||
|
||||
if dry_run:
|
||||
print(
|
||||
f"::notice::[dry-run] would compensate {context!r} on {sha[:10]} "
|
||||
f"with state=success"
|
||||
)
|
||||
return
|
||||
|
||||
api("POST", f"/repos/{OWNER}/{NAME}/statuses/{sha}", body=payload)
|
||||
print(f"::notice::compensated {context!r} on {sha[:10]} (state=success)")
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Main reap loop
|
||||
# --------------------------------------------------------------------------
|
||||
def reap(
|
||||
workflow_trigger_map: dict[str, bool],
|
||||
combined: dict,
|
||||
sha: str,
|
||||
*,
|
||||
dry_run: bool = False,
|
||||
) -> dict[str, Any]:
|
||||
"""Walk `combined.statuses[]` and compensate where appropriate.
|
||||
|
||||
Per-SHA worker. The multi-SHA orchestrator (`reap_branch`) calls
|
||||
this once per stale main commit each tick.
|
||||
|
||||
Returns counters for observability:
|
||||
{compensated, preserved_real_push, preserved_unknown,
|
||||
preserved_non_failure, preserved_non_push_suffix,
|
||||
preserved_unparseable, compensated_pr_shadowed_by_push_success,
|
||||
preserved_pr_without_push_success,
|
||||
compensated_contexts: [<context>, ...]}
|
||||
|
||||
`compensated_contexts` is rev2-added so `reap_branch` can build
|
||||
`compensated_per_sha` without re-deriving it from the POST stream.
|
||||
"""
|
||||
counters: dict[str, Any] = {
|
||||
"compensated": 0,
|
||||
"preserved_real_push": 0,
|
||||
"preserved_unknown": 0,
|
||||
"preserved_non_failure": 0,
|
||||
"preserved_non_push_suffix": 0,
|
||||
"preserved_unparseable": 0,
|
||||
"compensated_pr_shadowed_by_push_success": 0,
|
||||
"preserved_pr_without_push_success": 0,
|
||||
"compensated_contexts": [],
|
||||
}
|
||||
|
||||
statuses = combined.get("statuses") or []
|
||||
successful_contexts = {
|
||||
(s.get("context") or "")
|
||||
for s in statuses
|
||||
if isinstance(s, dict) and (s.get("status") or s.get("state") or "") == "success"
|
||||
}
|
||||
for s in statuses:
|
||||
if not isinstance(s, dict):
|
||||
continue
|
||||
context = s.get("context") or ""
|
||||
# Schema asymmetry: Gitea 1.22.6 returns the TOP-LEVEL combined
|
||||
# aggregate as `combined.state` but each per-context entry in
|
||||
# `combined.statuses[]` uses the key `status`, NOT `state`.
|
||||
# Prefer `status`; fall back to `state` so a future Gitea
|
||||
# version (or a test fixture written against the wrong key)
|
||||
# still flows through the compensation path. Verified empirically
|
||||
# via direct API probe 2026-05-12 03:42Z:
|
||||
# /repos/.../commits/{sha}/status entries → key is "status".
|
||||
# Pre-rev4 code read "state" only → returned "" → bypassed the
|
||||
# `state != "failure"` guard → compensation path unreachable.
|
||||
# See `feedback_smoke_test_vendor_truth_not_shape_match`.
|
||||
state = s.get("status") or s.get("state") or ""
|
||||
|
||||
# Only `failure` is the bug shape. `error`/`pending`/`success`
|
||||
# left alone — they have other meanings.
|
||||
if state != "failure":
|
||||
counters["preserved_non_failure"] += 1
|
||||
continue
|
||||
|
||||
# Default-branch `pull_request` contexts can be stale shadows of
|
||||
# the exact same workflow/job already proven by the successful
|
||||
# `push` context on the same SHA. Compensate only that narrow
|
||||
# shape; a missing or failed push equivalent remains a real gate
|
||||
# signal and is preserved.
|
||||
push_equivalent = push_equivalent_context(context)
|
||||
if push_equivalent is not None:
|
||||
if push_equivalent in successful_contexts:
|
||||
post_compensating_status(
|
||||
sha,
|
||||
context,
|
||||
s.get("target_url"),
|
||||
description=PR_SHADOW_COMPENSATION_DESCRIPTION,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
counters["compensated"] += 1
|
||||
counters["compensated_pr_shadowed_by_push_success"] += 1
|
||||
counters["compensated_contexts"].append(context)
|
||||
else:
|
||||
counters["preserved_pr_without_push_success"] += 1
|
||||
continue
|
||||
|
||||
# Only `(push)`-suffix contexts hit the hardcoded-suffix bug.
|
||||
# Other failed contexts are preserved unless handled by the
|
||||
# pull-request-shadow rule above.
|
||||
if not context.endswith(PUSH_SUFFIX):
|
||||
counters["preserved_non_push_suffix"] += 1
|
||||
continue
|
||||
|
||||
parsed = parse_push_context(context)
|
||||
if parsed is None:
|
||||
# Has ` (push)` suffix but missing ` / ` separator — not
|
||||
# the bug shape. Preserve.
|
||||
counters["preserved_unparseable"] += 1
|
||||
continue
|
||||
workflow_name, _job_name = parsed
|
||||
|
||||
if workflow_name not in workflow_trigger_map:
|
||||
# Real workflow but renamed/deleted/external — we can't
|
||||
# tell if it has push trigger. Conservative: preserve.
|
||||
print(f"::notice::unknown workflow {workflow_name!r}; skip")
|
||||
counters["preserved_unknown"] += 1
|
||||
continue
|
||||
|
||||
if workflow_trigger_map[workflow_name]:
|
||||
# Real push trigger → real defect signal. Preserve.
|
||||
counters["preserved_real_push"] += 1
|
||||
continue
|
||||
|
||||
# Class-O: schedule/dispatch/etc.-only workflow with a fake
|
||||
# (push) status from Gitea's hardcoded-suffix bug. Compensate.
|
||||
post_compensating_status(
|
||||
sha, context, s.get("target_url"), dry_run=dry_run
|
||||
)
|
||||
counters["compensated"] += 1
|
||||
counters["compensated_contexts"].append(context)
|
||||
|
||||
return counters
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# rev2: multi-SHA sweep over the last N commits on WATCH_BRANCH
|
||||
# --------------------------------------------------------------------------
|
||||
# How many main commits to sweep per tick. Sized to cover a burst-merge
|
||||
# window where multiple PRs land in the 5-min interval between reaper
|
||||
# ticks. Older reds falling off the window is acceptable — they were
|
||||
# already stale enough that the schedule-run that posted them has long
|
||||
# since been overwritten by a real push trigger. See `reference_post_
|
||||
# suspension_pipeline` for the merge-cadence baseline.
|
||||
#
|
||||
# rev3 (2026-05-12, hongming-pc2 GO 03:25Z): widened from 10 → 30.
|
||||
# rev2 (limit=10) shipped 01:48Z and ran 6/6 ticks post-merge with
|
||||
# `compensated:0` despite ~25 stranded reds visible on those same 10
|
||||
# SHAs ~30min later. Root cause: schedule workflows post `failure`
|
||||
# RETROACTIVELY 5-15 min after their merge, so by the time reaper's
|
||||
# next */5 tick lands, the stranded red is on a SHA that has already
|
||||
# fallen out of a 10-commit window during a burst-merge period.
|
||||
# Trades window-width-cheap for cadence-loady (per hongming-pc2):
|
||||
# kept `*/5` cron unchanged; only the window-N is widened.
|
||||
DEFAULT_SWEEP_LIMIT = 30
|
||||
|
||||
|
||||
def list_recent_commit_shas(branch: str, limit: int) -> list[str]:
|
||||
"""List the most recent `limit` commit SHAs on `branch`, newest
|
||||
first.
|
||||
|
||||
Wraps GET /repos/{o}/{r}/commits?sha={branch}&limit={limit}. Gitea
|
||||
1.22.6 returns a JSON list of commit objects each with a `sha` key
|
||||
(verified via vendor-truth probe 2026-05-11 against
|
||||
git.moleculesai.app — `feedback_smoke_test_vendor_truth_not_shape_match`).
|
||||
|
||||
Raises ApiError on non-2xx OR on unexpected response shape. The
|
||||
branch-level caller soft-skips this tick because the next scheduled
|
||||
tick can safely retry the listing. Per-SHA status/write errors remain
|
||||
separate and must not be mislabeled as commit-list outages.
|
||||
"""
|
||||
_, body = api(
|
||||
"GET",
|
||||
f"/repos/{OWNER}/{NAME}/commits",
|
||||
query={"sha": branch, "limit": str(limit)},
|
||||
)
|
||||
if not isinstance(body, list):
|
||||
raise ApiError(
|
||||
f"commits listing for {branch} not a JSON array "
|
||||
f"(got {type(body).__name__})"
|
||||
)
|
||||
shas: list[str] = []
|
||||
for entry in body:
|
||||
if not isinstance(entry, dict):
|
||||
continue
|
||||
sha = entry.get("sha")
|
||||
if isinstance(sha, str) and len(sha) >= 7:
|
||||
shas.append(sha)
|
||||
if not shas:
|
||||
raise ApiError(
|
||||
f"commits listing for {branch} returned no usable SHAs"
|
||||
)
|
||||
return shas
|
||||
|
||||
|
||||
def reap_branch(
|
||||
workflow_trigger_map: dict[str, bool],
|
||||
branch: str,
|
||||
*,
|
||||
limit: int = DEFAULT_SWEEP_LIMIT,
|
||||
dry_run: bool = False,
|
||||
) -> dict[str, Any]:
|
||||
"""Sweep the last `limit` commits on `branch`, applying `reap()`
|
||||
to each (with per-SHA error isolation).
|
||||
|
||||
Returns aggregated counters PLUS rev2 observability fields:
|
||||
- scanned_shas: how many SHAs we actually iterated
|
||||
- compensated_per_sha: {<sha_full>: [<context>, ...]} — only
|
||||
SHAs that actually got at least one compensation are included
|
||||
"""
|
||||
try:
|
||||
shas = list_recent_commit_shas(branch, limit)
|
||||
except ApiError as e:
|
||||
print(
|
||||
"::warning::status-reaper skipped this tick because the "
|
||||
f"commit list could not be read after retries: {e}"
|
||||
)
|
||||
return {
|
||||
"scanned_shas": 0,
|
||||
"compensated": 0,
|
||||
"preserved_real_push": 0,
|
||||
"preserved_unknown": 0,
|
||||
"preserved_non_failure": 0,
|
||||
"preserved_non_push_suffix": 0,
|
||||
"preserved_unparseable": 0,
|
||||
"compensated_pr_shadowed_by_push_success": 0,
|
||||
"preserved_pr_without_push_success": 0,
|
||||
"compensated_per_sha": {},
|
||||
"skipped": True,
|
||||
"skip_reason": "commit-list-api-error",
|
||||
}
|
||||
|
||||
aggregate: dict[str, Any] = {
|
||||
"scanned_shas": 0,
|
||||
"compensated": 0,
|
||||
"preserved_real_push": 0,
|
||||
"preserved_unknown": 0,
|
||||
"preserved_non_failure": 0,
|
||||
"preserved_non_push_suffix": 0,
|
||||
"preserved_unparseable": 0,
|
||||
"compensated_pr_shadowed_by_push_success": 0,
|
||||
"preserved_pr_without_push_success": 0,
|
||||
"compensated_per_sha": {},
|
||||
}
|
||||
|
||||
for sha in shas:
|
||||
aggregate["scanned_shas"] += 1
|
||||
|
||||
# Per-SHA error isolation (refinement #7). One transient blip
|
||||
# on a historical commit must NOT abort the whole tick — the
|
||||
# OTHER stale SHAs may still hold strandable reds.
|
||||
try:
|
||||
combined = get_combined_status(sha)
|
||||
except ApiError as e:
|
||||
print(
|
||||
f"::warning::get_combined_status({sha[:10]}) failed; "
|
||||
f"skipping this SHA: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
# Cost optimization (refinement #2): the common case is a green
|
||||
# commit. Skip the per-context loop entirely when combined is
|
||||
# already success — saves a tight loop over ~20 statuses per SHA
|
||||
# on green commits, the dominant majority.
|
||||
if combined.get("state") == "success":
|
||||
continue
|
||||
|
||||
per_sha = reap(
|
||||
workflow_trigger_map, combined, sha, dry_run=dry_run
|
||||
)
|
||||
|
||||
# Aggregate scalar counters.
|
||||
for key in (
|
||||
"compensated",
|
||||
"preserved_real_push",
|
||||
"preserved_unknown",
|
||||
"preserved_non_failure",
|
||||
"preserved_non_push_suffix",
|
||||
"preserved_unparseable",
|
||||
"compensated_pr_shadowed_by_push_success",
|
||||
"preserved_pr_without_push_success",
|
||||
):
|
||||
aggregate[key] += per_sha[key]
|
||||
|
||||
# Record per-SHA compensated contexts (only when non-empty —
|
||||
# keep the summary readable when most SHAs are no-ops).
|
||||
contexts = per_sha.get("compensated_contexts") or []
|
||||
if contexts:
|
||||
aggregate["compensated_per_sha"][sha] = list(contexts)
|
||||
|
||||
return aggregate
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Skip the compensating POST; print what would be done.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
type=int,
|
||||
default=DEFAULT_SWEEP_LIMIT,
|
||||
help=(
|
||||
"How many recent commits on WATCH_BRANCH to sweep per tick "
|
||||
f"(default: {DEFAULT_SWEEP_LIMIT})."
|
||||
),
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
_require_runtime_env()
|
||||
|
||||
workflow_trigger_map = scan_workflows(WORKFLOWS_DIR)
|
||||
print(
|
||||
f"::notice::scanned {len(workflow_trigger_map)} workflows; "
|
||||
f"push-triggered={sum(1 for v in workflow_trigger_map.values() if v)}, "
|
||||
f"class-O candidates={sum(1 for v in workflow_trigger_map.values() if not v)}"
|
||||
)
|
||||
|
||||
counters = reap_branch(
|
||||
workflow_trigger_map,
|
||||
WATCH_BRANCH,
|
||||
limit=args.limit,
|
||||
dry_run=args.dry_run,
|
||||
)
|
||||
|
||||
# Observability: print one JSON line summarising the tick. Loki
|
||||
# ingestion via the runner's stdout (`source="gitea-actions"`).
|
||||
print(
|
||||
"status-reaper summary: "
|
||||
+ json.dumps(
|
||||
{
|
||||
"branch": WATCH_BRANCH,
|
||||
"dry_run": args.dry_run,
|
||||
"limit": args.limit,
|
||||
**counters,
|
||||
},
|
||||
sort_keys=True,
|
||||
)
|
||||
)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@ -1,143 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Stub Gitea API for review-check.sh test scenarios.
|
||||
|
||||
Reads $FIXTURE_STATE_DIR/scenario to decide what to return for each
|
||||
endpoint the review-check.sh script calls.
|
||||
Reads $FIXTURE_STATE_DIR/token_owner_in_teams to decide whether
|
||||
the team membership probe returns 200/204 (member) or 403 (not in team).
|
||||
|
||||
Scenarios:
|
||||
T1_pr_open — open PR, author=alice, sha=deadbeef → continue
|
||||
T2_pr_closed — closed PR → script exits 0 (no-op)
|
||||
T3_reviews_approved_non_author — one APPROVED from non-author → candidates exist
|
||||
T4_reviews_empty — zero APPROVED non-author → exit 1 (no candidates)
|
||||
T5_reviews_only_author — only author reviews → exit 1 (no candidates)
|
||||
T6_reviews_dismissed — dismissed APPROVED → treated as no approval
|
||||
T7_team_member — team membership → 204 (member) → exit 0
|
||||
T8_team_not_member — team membership → 404 (not a member) → exit 1
|
||||
T9_team_403 — team membership → 403 (token not in team) → exit 1
|
||||
T14_non_default_base — open PR targeting staging → script exits 0 (no-op)
|
||||
|
||||
Usage:
|
||||
FIXTURE_STATE_DIR=/tmp/x python3 _review_check_fixture.py 8080
|
||||
"""
|
||||
|
||||
import http.server
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urllib.parse
|
||||
|
||||
|
||||
STATE_DIR = os.environ.get("FIXTURE_STATE_DIR", "/tmp")
|
||||
|
||||
|
||||
def scenario() -> str:
|
||||
p = os.path.join(STATE_DIR, "scenario")
|
||||
if not os.path.isfile(p):
|
||||
return "T1_pr_open"
|
||||
with open(p) as f:
|
||||
return f.read().strip()
|
||||
|
||||
|
||||
class Handler(http.server.BaseHTTPRequestHandler):
|
||||
def log_message(self, *args, **kwargs):
|
||||
pass # keep stdout for explicit logs only
|
||||
|
||||
def _json(self, code: int, body: dict) -> None:
|
||||
payload = json.dumps(body).encode()
|
||||
self.send_response(code)
|
||||
self.send_header("Content-Type", "application/json")
|
||||
self.send_header("Content-Length", str(len(payload)))
|
||||
self.end_headers()
|
||||
self.wfile.write(payload)
|
||||
|
||||
def _empty(self, code: int) -> None:
|
||||
self.send_response(code)
|
||||
self.send_header("Content-Length", "0")
|
||||
self.end_headers()
|
||||
|
||||
def _text(self, code: int, body: str) -> None:
|
||||
payload = body.encode()
|
||||
self.send_response(code)
|
||||
self.send_header("Content-Type", "text/plain")
|
||||
self.send_header("Content-Length", str(len(payload)))
|
||||
self.end_headers()
|
||||
self.wfile.write(payload)
|
||||
|
||||
def do_GET(self):
|
||||
u = urllib.parse.urlparse(self.path)
|
||||
path = u.path
|
||||
sc = scenario()
|
||||
|
||||
if path == "/_ping":
|
||||
return self._json(200, {"ok": True})
|
||||
|
||||
# GET /repos/{owner}/{name}/pulls/{pr_number}
|
||||
m = re.match(r"^/api/v1/repos/([^/]+)/([^/]+)/pulls/(\d+)$", path)
|
||||
if m:
|
||||
owner, name, pr_num = m.group(1), m.group(2), m.group(3)
|
||||
if sc == "T2_pr_closed":
|
||||
return self._json(200, {
|
||||
"number": int(pr_num),
|
||||
"state": "closed",
|
||||
"head": {"sha": "deadbeef0000111122223333444455556666"},
|
||||
"base": {"ref": "main"},
|
||||
"user": {"login": "alice"},
|
||||
})
|
||||
return self._json(200, {
|
||||
"number": int(pr_num),
|
||||
"state": "open",
|
||||
"head": {"sha": "deadbeef0000111122223333444455556666"},
|
||||
"base": {"ref": "staging" if sc == "T14_non_default_base" else "main"},
|
||||
"user": {"login": "alice"},
|
||||
})
|
||||
|
||||
# GET /repos/{owner}/{name}/pulls/{pr_number}/reviews
|
||||
m = re.match(r"^/api/v1/repos/([^/]+)/([^/]+)/pulls/(\d+)/reviews$", path)
|
||||
if m:
|
||||
if sc in ("T4_reviews_empty", "T5_reviews_only_author"):
|
||||
return self._json(200, [])
|
||||
if sc == "T6_reviews_dismissed":
|
||||
return self._json(200, [{
|
||||
"state": "APPROVED",
|
||||
"dismissed": True,
|
||||
"user": {"login": "core-devops"},
|
||||
"commit_id": "abc1234",
|
||||
}])
|
||||
if sc == "T3_reviews_approved_non_author":
|
||||
return self._json(200, [
|
||||
{"state": "CHANGES_REQUESTED", "dismissed": False, "user": {"login": "bob"}, "commit_id": "abc1234"},
|
||||
{"state": "APPROVED", "dismissed": False, "user": {"login": "core-devops"}, "commit_id": "abc1234"},
|
||||
])
|
||||
# Default: one non-author APPROVED
|
||||
return self._json(200, [
|
||||
{"state": "APPROVED", "dismissed": False, "user": {"login": "core-devops"}, "commit_id": "abc1234"},
|
||||
])
|
||||
|
||||
# GET /teams/{team_id}/members/{username}
|
||||
m = re.match(r"^/api/v1/teams/(\d+)/members/([^/]+)$", path)
|
||||
if m:
|
||||
team_id, login = m.group(1), m.group(2)
|
||||
if sc == "T8_team_not_member":
|
||||
return self._empty(404)
|
||||
if sc == "T9_team_403":
|
||||
return self._empty(403)
|
||||
# T7_team_member: member
|
||||
return self._empty(204)
|
||||
|
||||
return self._json(404, {"path": path, "msg": "fixture: no route"})
|
||||
|
||||
def do_POST(self):
|
||||
self._json(404, {"path": self.path, "msg": "fixture: no POST routes"})
|
||||
|
||||
|
||||
def main():
|
||||
port = int(sys.argv[1])
|
||||
srv = http.server.ThreadingHTTPServer(("127.0.0.1", port), Handler)
|
||||
srv.serve_forever()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -1,114 +0,0 @@
|
||||
import importlib.util
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
SCRIPT = Path(__file__).resolve().parents[1] / "gitea-merge-queue.py"
|
||||
spec = importlib.util.spec_from_file_location("gitea_merge_queue", SCRIPT)
|
||||
mq = importlib.util.module_from_spec(spec)
|
||||
sys.modules[spec.name] = mq
|
||||
spec.loader.exec_module(mq)
|
||||
|
||||
|
||||
def test_latest_statuses_dedupes_by_context_newest_first():
|
||||
statuses = [
|
||||
{"context": "CI / all-required (pull_request)", "status": "failure"},
|
||||
{"context": "sop-checklist / all-items-acked (pull_request)", "state": "success"},
|
||||
{"context": "CI / all-required (pull_request)", "status": "success"},
|
||||
]
|
||||
|
||||
latest = mq.latest_statuses_by_context(statuses)
|
||||
|
||||
assert latest["CI / all-required (pull_request)"]["status"] == "failure"
|
||||
assert latest["sop-checklist / all-items-acked (pull_request)"]["state"] == "success"
|
||||
|
||||
|
||||
def test_required_contexts_green_rejects_missing_and_pending():
|
||||
latest = mq.latest_statuses_by_context([
|
||||
{"context": "CI / all-required (pull_request)", "status": "success"},
|
||||
{"context": "sop-checklist / all-items-acked (pull_request)", "status": "pending"},
|
||||
])
|
||||
|
||||
ok, missing_or_bad = mq.required_contexts_green(
|
||||
latest,
|
||||
[
|
||||
"CI / all-required (pull_request)",
|
||||
"sop-checklist / all-items-acked (pull_request)",
|
||||
"qa-review / approved (pull_request)",
|
||||
],
|
||||
)
|
||||
|
||||
assert ok is False
|
||||
assert missing_or_bad == [
|
||||
"sop-checklist / all-items-acked (pull_request)=pending",
|
||||
"qa-review / approved (pull_request)=missing",
|
||||
]
|
||||
|
||||
|
||||
def test_choose_next_pr_sorts_by_queue_label_timestamp_then_number():
|
||||
issues = [
|
||||
{
|
||||
"number": 12,
|
||||
"pull_request": {},
|
||||
"labels": [{"name": "merge-queue"}],
|
||||
"created_at": "2026-05-13T05:00:00Z",
|
||||
"updated_at": "2026-05-13T06:00:00Z",
|
||||
},
|
||||
{
|
||||
"number": 9,
|
||||
"pull_request": {},
|
||||
"labels": [{"name": "merge-queue"}],
|
||||
"created_at": "2026-05-13T04:00:00Z",
|
||||
"updated_at": "2026-05-13T07:00:00Z",
|
||||
},
|
||||
{
|
||||
"number": 7,
|
||||
"labels": [{"name": "merge-queue"}],
|
||||
"created_at": "2026-05-13T03:00:00Z",
|
||||
},
|
||||
]
|
||||
|
||||
selected = mq.choose_next_queued_issue(issues, queue_label="merge-queue")
|
||||
|
||||
assert selected["number"] == 9
|
||||
|
||||
|
||||
def test_pr_needs_update_when_base_sha_absent_from_commits():
|
||||
commits = [
|
||||
{"sha": "head"},
|
||||
{"sha": "parent"},
|
||||
]
|
||||
|
||||
assert mq.pr_contains_base_sha(commits, "mainsha") is False
|
||||
assert mq.pr_contains_base_sha(commits, "parent") is True
|
||||
|
||||
|
||||
def test_merge_decision_requires_main_green_pr_green_and_current_base():
|
||||
required = ["CI / all-required (pull_request)"]
|
||||
main_status = {"state": "success", "statuses": []}
|
||||
pr_status = {
|
||||
"state": "success",
|
||||
"statuses": [{"context": "CI / all-required (pull_request)", "status": "success"}],
|
||||
}
|
||||
|
||||
decision = mq.evaluate_merge_readiness(
|
||||
main_status=main_status,
|
||||
pr_status=pr_status,
|
||||
required_contexts=required,
|
||||
pr_has_current_base=True,
|
||||
)
|
||||
|
||||
assert decision.ready is True
|
||||
assert decision.action == "merge"
|
||||
|
||||
|
||||
def test_merge_decision_updates_stale_pr_before_merge():
|
||||
decision = mq.evaluate_merge_readiness(
|
||||
main_status={"state": "success", "statuses": []},
|
||||
pr_status={"state": "success", "statuses": [{"context": "CI / all-required (pull_request)", "status": "success"}]},
|
||||
required_contexts=["CI / all-required (pull_request)"],
|
||||
pr_has_current_base=False,
|
||||
)
|
||||
|
||||
assert decision.ready is False
|
||||
assert decision.action == "update"
|
||||
@ -1,505 +0,0 @@
|
||||
"""Unit tests for .gitea/scripts/lint_pre_flip_continue_on_error.py.
|
||||
|
||||
These tests pin the pure-logic surface (flip detection + per-flip
|
||||
verdict aggregation) without making real HTTP calls. The end-to-end
|
||||
git ls-tree + Gitea API path is exercised by running the workflow
|
||||
against real PRs.
|
||||
|
||||
Run locally::
|
||||
|
||||
python3 -m unittest .gitea/scripts/tests/test_lint_pre_flip_continue_on_error.py -v
|
||||
|
||||
Mirrors the pattern in scripts/ops/test_check_migration_collisions.py
|
||||
+ scripts/test_build_runtime_package.py.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib.util
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
# Load the script as a module without invoking main(). Tests must NOT
|
||||
# depend on the full runtime env contract (GITEA_TOKEN etc.), so we
|
||||
# import individual functions and stub the network surface explicitly.
|
||||
SCRIPT_PATH = Path(__file__).resolve().parent.parent / "lint_pre_flip_continue_on_error.py"
|
||||
spec = importlib.util.spec_from_file_location("lpfc", SCRIPT_PATH)
|
||||
lpfc = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(lpfc)
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Fixtures: minimal valid workflow YAML on each side of a "diff"
|
||||
# --------------------------------------------------------------------------
|
||||
CI_YML_BASE = """\
|
||||
name: CI
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
jobs:
|
||||
platform-build:
|
||||
name: Platform (Go)
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- run: echo platform
|
||||
canvas-build:
|
||||
name: Canvas (Next.js)
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- run: echo canvas
|
||||
all-required:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
needs: [platform-build, canvas-build]
|
||||
steps:
|
||||
- run: echo ok
|
||||
"""
|
||||
|
||||
CI_YML_HEAD_FLIPPED = """\
|
||||
name: CI
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
jobs:
|
||||
platform-build:
|
||||
name: Platform (Go)
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: false
|
||||
steps:
|
||||
- run: echo platform
|
||||
canvas-build:
|
||||
name: Canvas (Next.js)
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: false
|
||||
steps:
|
||||
- run: echo canvas
|
||||
all-required:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
needs: [platform-build, canvas-build]
|
||||
steps:
|
||||
- run: echo ok
|
||||
"""
|
||||
|
||||
CI_YML_HEAD_NO_DIFF = CI_YML_BASE # identical to base, no flip
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# 1. CoE coercion (truthy/falsy/quoted/absent)
|
||||
# --------------------------------------------------------------------------
|
||||
class TestCoerceCoE(unittest.TestCase):
|
||||
def test_python_bool_true(self):
|
||||
self.assertTrue(lpfc._coerce_coe(True))
|
||||
|
||||
def test_python_bool_false(self):
|
||||
self.assertFalse(lpfc._coerce_coe(False))
|
||||
|
||||
def test_none_is_false(self):
|
||||
# GitHub Actions default: absent == false.
|
||||
self.assertFalse(lpfc._coerce_coe(None))
|
||||
|
||||
def test_string_true_lowercase(self):
|
||||
# Quoted "true" in YAML — Gitea Actions normalizes to True.
|
||||
self.assertTrue(lpfc._coerce_coe("true"))
|
||||
|
||||
def test_string_True_titlecase(self):
|
||||
self.assertTrue(lpfc._coerce_coe("True"))
|
||||
|
||||
def test_string_yes(self):
|
||||
# YAML 1.1 truthy form.
|
||||
self.assertTrue(lpfc._coerce_coe("yes"))
|
||||
|
||||
def test_string_false(self):
|
||||
self.assertFalse(lpfc._coerce_coe("false"))
|
||||
|
||||
def test_string_random_falsy(self):
|
||||
# An unrecognized string is treated as falsy — safer than
|
||||
# silently coercing "maybe" to True and false-positiving a
|
||||
# flip.
|
||||
self.assertFalse(lpfc._coerce_coe("maybe"))
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# 2. Diff detection — flips, not arbitrary changes
|
||||
# --------------------------------------------------------------------------
|
||||
class TestDetectFlips(unittest.TestCase):
|
||||
def test_no_flip_in_diff_passes(self):
|
||||
# Acceptance test #1: PR doesn't flip continue-on-error → 0 flips.
|
||||
flips = lpfc.detect_flips(
|
||||
{".gitea/workflows/ci.yml": CI_YML_BASE},
|
||||
{".gitea/workflows/ci.yml": CI_YML_HEAD_NO_DIFF},
|
||||
)
|
||||
self.assertEqual(flips, [])
|
||||
|
||||
def test_flip_detected_in_one_file(self):
|
||||
flips = lpfc.detect_flips(
|
||||
{".gitea/workflows/ci.yml": CI_YML_BASE},
|
||||
{".gitea/workflows/ci.yml": CI_YML_HEAD_FLIPPED},
|
||||
)
|
||||
# Two jobs flipped: platform-build, canvas-build. all-required
|
||||
# is still true on both sides.
|
||||
self.assertEqual(len(flips), 2)
|
||||
keys = sorted(f["job_key"] for f in flips)
|
||||
self.assertEqual(keys, ["canvas-build", "platform-build"])
|
||||
|
||||
def test_context_name_render(self):
|
||||
flips = lpfc.detect_flips(
|
||||
{".gitea/workflows/ci.yml": CI_YML_BASE},
|
||||
{".gitea/workflows/ci.yml": CI_YML_HEAD_FLIPPED},
|
||||
)
|
||||
platform = next(f for f in flips if f["job_key"] == "platform-build")
|
||||
self.assertEqual(platform["context"], "CI / Platform (Go) (push)")
|
||||
self.assertEqual(platform["workflow_name"], "CI")
|
||||
|
||||
def test_context_falls_back_to_job_key_when_no_name(self):
|
||||
base = "name: WF\njobs:\n foo:\n continue-on-error: true\n runs-on: x\n steps: []\n"
|
||||
head = "name: WF\njobs:\n foo:\n continue-on-error: false\n runs-on: x\n steps: []\n"
|
||||
flips = lpfc.detect_flips({"a.yml": base}, {"a.yml": head})
|
||||
self.assertEqual(len(flips), 1)
|
||||
self.assertEqual(flips[0]["context"], "WF / foo (push)")
|
||||
|
||||
def test_no_flip_when_only_one_side_has_file(self):
|
||||
# Newly added workflow file — head has CoE:false, base has no
|
||||
# file. Adding a new workflow with CoE:false is fine; there's
|
||||
# nothing to mask.
|
||||
flips = lpfc.detect_flips(
|
||||
{}, # base has no workflow files
|
||||
{".gitea/workflows/new.yml": CI_YML_HEAD_FLIPPED},
|
||||
)
|
||||
self.assertEqual(flips, [])
|
||||
|
||||
def test_no_flip_when_job_removed(self):
|
||||
# Job exists on base, not on head — a removal, not a flip.
|
||||
head = """\
|
||||
name: CI
|
||||
jobs:
|
||||
canvas-build:
|
||||
name: Canvas (Next.js)
|
||||
continue-on-error: true
|
||||
runs-on: ubuntu-latest
|
||||
steps: []
|
||||
"""
|
||||
flips = lpfc.detect_flips(
|
||||
{".gitea/workflows/ci.yml": CI_YML_BASE},
|
||||
{".gitea/workflows/ci.yml": head},
|
||||
)
|
||||
self.assertEqual(flips, [])
|
||||
|
||||
def test_no_flip_when_job_added_with_false(self):
|
||||
# New job on head with CoE:false — no base side; not a flip.
|
||||
head_with_new = CI_YML_BASE.replace(
|
||||
" all-required:",
|
||||
" newjob:\n name: New Job\n continue-on-error: false\n"
|
||||
" runs-on: x\n steps: []\n"
|
||||
" all-required:",
|
||||
)
|
||||
flips = lpfc.detect_flips(
|
||||
{".gitea/workflows/ci.yml": CI_YML_BASE},
|
||||
{".gitea/workflows/ci.yml": head_with_new},
|
||||
)
|
||||
self.assertEqual(flips, [])
|
||||
|
||||
def test_yaml_parse_error_warns_not_raises(self):
|
||||
# Malformed YAML on head — should warn (stderr) and skip,
|
||||
# not raise.
|
||||
bad_head = "name: CI\njobs:\n :::\n"
|
||||
# Capture stderr so the test isn't noisy.
|
||||
with mock.patch.object(sys, "stderr"):
|
||||
flips = lpfc.detect_flips(
|
||||
{".gitea/workflows/ci.yml": CI_YML_BASE},
|
||||
{".gitea/workflows/ci.yml": bad_head},
|
||||
)
|
||||
self.assertEqual(flips, [])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# 3. grep_fail_markers — the regex / substring matcher
|
||||
# --------------------------------------------------------------------------
|
||||
class TestGrepFailMarkers(unittest.TestCase):
|
||||
def test_clean_log_returns_empty(self):
|
||||
log = "===== test run starting =====\nPASS\nok example.com/foo 1.234s\n"
|
||||
self.assertEqual(lpfc.grep_fail_markers(log), [])
|
||||
|
||||
def test_go_minus_minus_minus_fail_caught(self):
|
||||
log = "ok example.com/foo 1.234s\n--- FAIL: TestBar (0.01s)\n bar_test.go:42:\n"
|
||||
matches = lpfc.grep_fail_markers(log)
|
||||
self.assertEqual(len(matches), 1)
|
||||
self.assertIn("FAIL: TestBar", matches[0])
|
||||
|
||||
def test_go_package_fail_caught(self):
|
||||
log = "FAIL\texample.com/baz\t1.234s\n"
|
||||
matches = lpfc.grep_fail_markers(log)
|
||||
self.assertEqual(len(matches), 1)
|
||||
self.assertIn("FAIL", matches[0])
|
||||
|
||||
def test_bash_error_directive_caught(self):
|
||||
# `lint-curl-status-capture` pattern: a python heredoc inside a
|
||||
# bash step that prints `::error::` then sys.exit(1). With
|
||||
# continue-on-error:true the job rolls up as success despite
|
||||
# this line. THAT's the masking we're trying to catch.
|
||||
log = "Running scan...\n::error::Found 3 curl-status-capture pollution site(s):\n"
|
||||
matches = lpfc.grep_fail_markers(log)
|
||||
self.assertEqual(len(matches), 1)
|
||||
self.assertIn("::error::", matches[0])
|
||||
|
||||
def test_caps_matches_at_max_5(self):
|
||||
log = "\n".join(["--- FAIL: T%d" % i for i in range(20)])
|
||||
matches = lpfc.grep_fail_markers(log)
|
||||
self.assertEqual(len(matches), 5)
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# 4. verify_flip — single-flip verdict assembly (network surface stubbed)
|
||||
# --------------------------------------------------------------------------
|
||||
def _stub_status(context: str, state: str, target_url: str = "/owner/repo/actions/runs/1/jobs/0") -> dict:
|
||||
"""Build a single-context combined-status response."""
|
||||
return {
|
||||
"state": state,
|
||||
"statuses": [
|
||||
{"context": context, "status": state, "target_url": target_url, "description": ""}
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
FLIP_FIXTURE = {
|
||||
"workflow_path": ".gitea/workflows/ci.yml",
|
||||
"workflow_name": "CI",
|
||||
"job_key": "platform-build",
|
||||
"job_name": "Platform (Go)",
|
||||
"context": "CI / Platform (Go) (push)",
|
||||
}
|
||||
|
||||
|
||||
class TestVerifyFlip(unittest.TestCase):
|
||||
def test_flip_with_clean_history_passes(self):
|
||||
# Acceptance test #2: flip detected, last 5 runs clean → exit 0.
|
||||
with mock.patch.object(lpfc, "recent_commits_on_branch", return_value=["sha1", "sha2", "sha3"]):
|
||||
with mock.patch.object(
|
||||
lpfc, "combined_status",
|
||||
side_effect=[_stub_status(FLIP_FIXTURE["context"], "success") for _ in range(3)],
|
||||
):
|
||||
with mock.patch.object(lpfc, "fetch_log", return_value="ok example.com/foo 1s\nPASS\n"):
|
||||
verdict = lpfc.verify_flip(FLIP_FIXTURE, "main", 5)
|
||||
self.assertEqual(verdict["fail_runs"], [])
|
||||
self.assertEqual(verdict["masked_runs"], [])
|
||||
self.assertEqual(verdict["checked_commits"], 3)
|
||||
self.assertEqual(verdict["warnings"], [])
|
||||
|
||||
def test_flip_with_recent_fail_blocks(self):
|
||||
# Acceptance test #3: flip detected, recent run has --- FAIL → exit 1.
|
||||
# Setup: 3 commits, the most recent run's log shows --- FAIL
|
||||
# but the STATUS is success (Quirk #10 mask). That's the
|
||||
# masked_runs case.
|
||||
log_with_fail = "ok example.com/foo 1s\n--- FAIL: TestSqlmock (0.01s)\n sqlmock_test.go:42:\n"
|
||||
with mock.patch.object(lpfc, "recent_commits_on_branch", return_value=["sha1", "sha2", "sha3"]):
|
||||
with mock.patch.object(
|
||||
lpfc, "combined_status",
|
||||
side_effect=[_stub_status(FLIP_FIXTURE["context"], "success") for _ in range(3)],
|
||||
):
|
||||
with mock.patch.object(lpfc, "fetch_log", side_effect=[log_with_fail, "PASS\n", "PASS\n"]):
|
||||
verdict = lpfc.verify_flip(FLIP_FIXTURE, "main", 5)
|
||||
self.assertEqual(len(verdict["masked_runs"]), 1)
|
||||
self.assertEqual(verdict["masked_runs"][0]["sha"], "sha1")
|
||||
self.assertTrue(any("TestSqlmock" in s for s in verdict["masked_runs"][0]["samples"]))
|
||||
self.assertEqual(verdict["fail_runs"], [])
|
||||
|
||||
def test_red_status_alone_blocks(self):
|
||||
# Status itself is `failure` — block without needing log
|
||||
# markers. (Belt-and-braces: even with a clean log, a `failure`
|
||||
# status means the job's exit code was non-zero.)
|
||||
with mock.patch.object(lpfc, "recent_commits_on_branch", return_value=["sha1"]):
|
||||
with mock.patch.object(
|
||||
lpfc, "combined_status",
|
||||
return_value=_stub_status(FLIP_FIXTURE["context"], "failure"),
|
||||
):
|
||||
with mock.patch.object(lpfc, "fetch_log", return_value="some unrelated text\n"):
|
||||
verdict = lpfc.verify_flip(FLIP_FIXTURE, "main", 5)
|
||||
self.assertEqual(len(verdict["fail_runs"]), 1)
|
||||
self.assertEqual(verdict["fail_runs"][0]["status"], "failure")
|
||||
|
||||
def test_unreadable_log_warns_not_blocks(self):
|
||||
# Acceptance test #5: log fetch 404 (None) → warn, not block.
|
||||
# Status is `success`, log is None — we can't tell, so we warn
|
||||
# and allow.
|
||||
with mock.patch.object(lpfc, "recent_commits_on_branch", return_value=["sha1"]):
|
||||
with mock.patch.object(
|
||||
lpfc, "combined_status",
|
||||
return_value=_stub_status(FLIP_FIXTURE["context"], "success"),
|
||||
):
|
||||
with mock.patch.object(lpfc, "fetch_log", return_value=None):
|
||||
verdict = lpfc.verify_flip(FLIP_FIXTURE, "main", 5)
|
||||
self.assertEqual(verdict["fail_runs"], [])
|
||||
self.assertEqual(verdict["masked_runs"], [])
|
||||
self.assertTrue(any("log unavailable" in w for w in verdict["warnings"]))
|
||||
|
||||
def test_unreadable_log_with_failure_status_still_blocks(self):
|
||||
# Edge case: log fetch fails BUT the status itself is `failure`.
|
||||
# We can still block — the status alone is sufficient signal,
|
||||
# we don't need the log to confirm.
|
||||
with mock.patch.object(lpfc, "recent_commits_on_branch", return_value=["sha1"]):
|
||||
with mock.patch.object(
|
||||
lpfc, "combined_status",
|
||||
return_value=_stub_status(FLIP_FIXTURE["context"], "failure"),
|
||||
):
|
||||
with mock.patch.object(lpfc, "fetch_log", return_value=None):
|
||||
verdict = lpfc.verify_flip(FLIP_FIXTURE, "main", 5)
|
||||
self.assertEqual(len(verdict["fail_runs"]), 1)
|
||||
self.assertIn("log unavailable", verdict["fail_runs"][0]["samples"][0])
|
||||
|
||||
def test_zero_runs_history_warns_allows(self):
|
||||
# No commits with a matching context — newly added workflow.
|
||||
# Allow with warning.
|
||||
with mock.patch.object(lpfc, "recent_commits_on_branch", return_value=["sha1", "sha2"]):
|
||||
with mock.patch.object(
|
||||
lpfc, "combined_status",
|
||||
return_value={"state": "success", "statuses": []}, # no matching context
|
||||
):
|
||||
verdict = lpfc.verify_flip(FLIP_FIXTURE, "main", 5)
|
||||
self.assertEqual(verdict["checked_commits"], 0)
|
||||
self.assertEqual(verdict["fail_runs"], [])
|
||||
self.assertEqual(verdict["masked_runs"], [])
|
||||
self.assertTrue(any("no runs of" in w for w in verdict["warnings"]))
|
||||
|
||||
def test_zero_commits_warns_allows(self):
|
||||
# Empty branch (newly created repo, e.g.). Allow with warning.
|
||||
with mock.patch.object(lpfc, "recent_commits_on_branch", return_value=[]):
|
||||
verdict = lpfc.verify_flip(FLIP_FIXTURE, "main", 5)
|
||||
self.assertEqual(verdict["checked_commits"], 0)
|
||||
self.assertEqual(verdict["fail_runs"], [])
|
||||
self.assertEqual(verdict["masked_runs"], [])
|
||||
self.assertTrue(any("no recent commits" in w for w in verdict["warnings"]))
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# 5. Multiple-flip aggregation in main()
|
||||
# --------------------------------------------------------------------------
|
||||
class TestMainAggregation(unittest.TestCase):
|
||||
"""Tests that `main()` aggregates multiple flips and exits 1 when
|
||||
ANY one of them has a masked or red recent run. Acceptance test #4.
|
||||
|
||||
We stub at the verify_flip + workflows_at_sha + _require_runtime_env
|
||||
boundary so we don't need real git or HTTP.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
# The actual env values are irrelevant — _require_runtime_env
|
||||
# is stubbed out — but the module reads OWNER/NAME at import
|
||||
# time. Patch the runtime env contract to a no-op for the
|
||||
# duration of each test.
|
||||
self._patches = [
|
||||
mock.patch.object(lpfc, "_require_runtime_env", return_value=None),
|
||||
mock.patch.object(lpfc, "BASE_REF", "main"),
|
||||
mock.patch.object(lpfc, "BASE_SHA", "deadbeefcafe"),
|
||||
mock.patch.object(lpfc, "HEAD_SHA", "feedfaceabad"),
|
||||
mock.patch.object(lpfc, "RECENT_COMMITS_N", 5),
|
||||
]
|
||||
for p in self._patches:
|
||||
p.start()
|
||||
self.addCleanup(lambda: [p.stop() for p in self._patches])
|
||||
|
||||
def test_multiple_flips_aggregated_one_bad_blocks(self):
|
||||
# PR flips 3 jobs; 1 has a recent fail → exit 1, naming that job.
|
||||
flips = [
|
||||
{"workflow_path": ".gitea/workflows/ci.yml", "workflow_name": "CI",
|
||||
"job_key": "platform-build", "job_name": "Platform (Go)",
|
||||
"context": "CI / Platform (Go) (push)"},
|
||||
{"workflow_path": ".gitea/workflows/ci.yml", "workflow_name": "CI",
|
||||
"job_key": "canvas-build", "job_name": "Canvas (Next.js)",
|
||||
"context": "CI / Canvas (Next.js) (push)"},
|
||||
{"workflow_path": ".gitea/workflows/ci.yml", "workflow_name": "CI",
|
||||
"job_key": "python-lint", "job_name": "Python Lint & Test",
|
||||
"context": "CI / Python Lint & Test (push)"},
|
||||
]
|
||||
clean = {"flip": flips[0], "checked_commits": 5, "masked_runs": [],
|
||||
"fail_runs": [], "warnings": []}
|
||||
bad = {"flip": flips[1], "checked_commits": 5,
|
||||
"masked_runs": [{"sha": "abc1234567", "status": "success",
|
||||
"target_url": "/x/y/actions/runs/1/jobs/0",
|
||||
"samples": ["--- FAIL: TestSqlmock"]}],
|
||||
"fail_runs": [], "warnings": []}
|
||||
also_clean = {"flip": flips[2], "checked_commits": 5, "masked_runs": [],
|
||||
"fail_runs": [], "warnings": []}
|
||||
|
||||
with mock.patch.object(lpfc, "workflows_at_sha", return_value={}):
|
||||
with mock.patch.object(lpfc, "detect_flips", return_value=flips):
|
||||
with mock.patch.object(lpfc, "verify_flip",
|
||||
side_effect=[clean, bad, also_clean]):
|
||||
# Capture stdout to assert on naming.
|
||||
captured = []
|
||||
with mock.patch("builtins.print", side_effect=lambda *a, **k: captured.append(" ".join(str(x) for x in a))):
|
||||
rc = lpfc.main([])
|
||||
self.assertEqual(rc, 1)
|
||||
# The blocking error message must name the failing job.
|
||||
joined = "\n".join(captured)
|
||||
self.assertIn("canvas-build", joined)
|
||||
# And it must mention the empirical class so a reviewer can
|
||||
# cross-link the right RFC.
|
||||
self.assertTrue("mc#664" in joined or "PR#656" in joined)
|
||||
|
||||
def test_no_flips_in_diff_exits_zero(self):
|
||||
# Acceptance test #1 at main() level: empty flips → exit 0.
|
||||
with mock.patch.object(lpfc, "workflows_at_sha", return_value={}):
|
||||
with mock.patch.object(lpfc, "detect_flips", return_value=[]):
|
||||
rc = lpfc.main([])
|
||||
self.assertEqual(rc, 0)
|
||||
|
||||
def test_all_flips_clean_exits_zero(self):
|
||||
flips = [{"workflow_path": ".gitea/workflows/ci.yml", "workflow_name": "CI",
|
||||
"job_key": "platform-build", "job_name": "Platform (Go)",
|
||||
"context": "CI / Platform (Go) (push)"}]
|
||||
clean = {"flip": flips[0], "checked_commits": 5, "masked_runs": [],
|
||||
"fail_runs": [], "warnings": []}
|
||||
with mock.patch.object(lpfc, "workflows_at_sha", return_value={}):
|
||||
with mock.patch.object(lpfc, "detect_flips", return_value=flips):
|
||||
with mock.patch.object(lpfc, "verify_flip", return_value=clean):
|
||||
rc = lpfc.main([])
|
||||
self.assertEqual(rc, 0)
|
||||
|
||||
def test_dry_run_forces_exit_zero_even_with_bad_flip(self):
|
||||
# --dry-run never fails, even when verification finds masked runs.
|
||||
flips = [{"workflow_path": ".gitea/workflows/ci.yml", "workflow_name": "CI",
|
||||
"job_key": "platform-build", "job_name": "Platform (Go)",
|
||||
"context": "CI / Platform (Go) (push)"}]
|
||||
bad = {"flip": flips[0], "checked_commits": 5,
|
||||
"masked_runs": [{"sha": "abc1234567", "status": "success",
|
||||
"target_url": "/x/y/actions/runs/1/jobs/0",
|
||||
"samples": ["--- FAIL: TestSqlmock"]}],
|
||||
"fail_runs": [], "warnings": []}
|
||||
with mock.patch.object(lpfc, "workflows_at_sha", return_value={}):
|
||||
with mock.patch.object(lpfc, "detect_flips", return_value=flips):
|
||||
with mock.patch.object(lpfc, "verify_flip", return_value=bad):
|
||||
rc = lpfc.main(["--dry-run"])
|
||||
self.assertEqual(rc, 0)
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# 6. Context-name rendering (the format Gitea Actions actually emits)
|
||||
# --------------------------------------------------------------------------
|
||||
class TestContextName(unittest.TestCase):
|
||||
def test_push_event(self):
|
||||
self.assertEqual(
|
||||
lpfc.context_name("CI", "Platform (Go)", "push"),
|
||||
"CI / Platform (Go) (push)",
|
||||
)
|
||||
|
||||
def test_pull_request_event(self):
|
||||
self.assertEqual(
|
||||
lpfc.context_name("CI", "Platform (Go)", "pull_request"),
|
||||
"CI / Platform (Go) (pull_request)",
|
||||
)
|
||||
|
||||
def test_workflow_name_falls_back_to_filename(self):
|
||||
# No top-level `name:` → falls back to filename minus extension.
|
||||
doc = {"jobs": {"foo": {"continue-on-error": True}}}
|
||||
self.assertEqual(
|
||||
lpfc.workflow_name(doc, fallback="my-workflow"),
|
||||
"my-workflow",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@ -1,120 +0,0 @@
|
||||
import importlib.util
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
SCRIPT = Path(__file__).resolve().parents[1] / "prod-auto-deploy.py"
|
||||
spec = importlib.util.spec_from_file_location("prod_auto_deploy", SCRIPT)
|
||||
prod = importlib.util.module_from_spec(spec)
|
||||
sys.modules[spec.name] = prod
|
||||
spec.loader.exec_module(prod)
|
||||
|
||||
|
||||
def test_truthy_flag_accepts_operator_disable_values():
|
||||
for value in ("1", "true", "TRUE", "yes", "on", "disabled", "disable"):
|
||||
assert prod.truthy_flag(value) is True
|
||||
|
||||
for value in ("", "0", "false", "no", "off", None):
|
||||
assert prod.truthy_flag(value) is False
|
||||
|
||||
|
||||
def test_build_plan_defaults_to_staging_sha_target_and_prod_cp():
|
||||
plan = prod.build_plan(
|
||||
{
|
||||
"GITHUB_SHA": "abcdef1234567890",
|
||||
"PROD_AUTO_DEPLOY_DISABLED": "",
|
||||
}
|
||||
)
|
||||
|
||||
assert plan["enabled"] is True
|
||||
assert plan["sha"] == "abcdef1234567890"
|
||||
assert plan["target_tag"] == "staging-abcdef1"
|
||||
assert plan["cp_url"] == "https://api.moleculesai.app"
|
||||
assert plan["body"] == {
|
||||
"target_tag": "staging-abcdef1",
|
||||
"canary_slug": "hongming",
|
||||
"soak_seconds": 60,
|
||||
"batch_size": 3,
|
||||
"dry_run": False,
|
||||
}
|
||||
|
||||
|
||||
def test_build_plan_rejects_non_prod_cp_without_explicit_override():
|
||||
try:
|
||||
prod.build_plan(
|
||||
{
|
||||
"GITHUB_SHA": "abcdef1234567890",
|
||||
"CP_URL": "https://staging-api.moleculesai.app",
|
||||
}
|
||||
)
|
||||
except ValueError as exc:
|
||||
assert "PROD_ALLOW_NON_PROD_CP_URL=true" in str(exc)
|
||||
else:
|
||||
raise AssertionError("expected non-prod CP URL rejection")
|
||||
|
||||
|
||||
def test_build_plan_allows_non_prod_cp_only_with_override():
|
||||
plan = prod.build_plan(
|
||||
{
|
||||
"GITHUB_SHA": "abcdef1234567890",
|
||||
"CP_URL": "https://staging-api.moleculesai.app",
|
||||
"PROD_ALLOW_NON_PROD_CP_URL": "true",
|
||||
}
|
||||
)
|
||||
|
||||
assert plan["cp_url"] == "https://staging-api.moleculesai.app"
|
||||
|
||||
|
||||
def test_build_plan_disable_flag_short_circuits_before_credentials():
|
||||
plan = prod.build_plan(
|
||||
{
|
||||
"GITHUB_SHA": "abcdef1234567890",
|
||||
"PROD_AUTO_DEPLOY_DISABLED": "true",
|
||||
}
|
||||
)
|
||||
|
||||
assert plan["enabled"] is False
|
||||
assert plan["disabled_reason"] == "PROD_AUTO_DEPLOY_DISABLED=true"
|
||||
|
||||
|
||||
def test_latest_status_for_context_uses_first_matching_status():
|
||||
statuses = [
|
||||
{"context": "CI / all-required (push)", "status": "pending"},
|
||||
{"context": "CI / all-required (pull_request)", "status": "success"},
|
||||
{"context": "CI / all-required (push)", "status": "success"},
|
||||
]
|
||||
|
||||
latest = prod.latest_status_for_context(statuses, "CI / all-required (push)")
|
||||
|
||||
assert latest == {"context": "CI / all-required (push)", "status": "pending"}
|
||||
|
||||
|
||||
def test_ci_context_state_handles_missing_and_gitea_status_key():
|
||||
assert prod.ci_context_state([], "CI / all-required (push)") == "missing"
|
||||
assert (
|
||||
prod.ci_context_state(
|
||||
[{"context": "CI / all-required (push)", "status": "success"}],
|
||||
"CI / all-required (push)",
|
||||
)
|
||||
== "success"
|
||||
)
|
||||
assert (
|
||||
prod.ci_context_state(
|
||||
[{"context": "CI / all-required (push)", "state": "failure"}],
|
||||
"CI / all-required (push)",
|
||||
)
|
||||
== "failure"
|
||||
)
|
||||
|
||||
|
||||
def test_context_is_satisfied_accepts_only_success():
|
||||
assert prod.context_is_satisfied("success") is True
|
||||
for state in ("failure", "error", "cancelled", "canceled", "skipped", "pending", "missing"):
|
||||
assert prod.context_is_satisfied(state) is False
|
||||
|
||||
|
||||
def test_context_is_terminal_failure_rejects_cancelled_and_skipped():
|
||||
for state in ("failure", "error", "cancelled", "canceled", "skipped"):
|
||||
assert prod.context_is_terminal_failure(state) is True
|
||||
for state in ("pending", "missing", "success"):
|
||||
assert prod.context_is_terminal_failure(state) is False
|
||||
@ -1,343 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Regression tests for .gitea/scripts/review-check.sh (RFC#324 Step 1).
|
||||
#
|
||||
# Covers:
|
||||
# T1 — open PR: script fetches PR + reviews, continues to team probe
|
||||
# T2 — closed PR: script exits 0 (no-op)
|
||||
# T3 — APPROVED non-author review exists → candidates exist
|
||||
# T4 — no non-author APPROVED reviews → exit 1 (no candidates)
|
||||
# T5 — only author reviews (no non-author APPROVE) → exit 1
|
||||
# T6 — dismissed APPROVED review → treated as no approval
|
||||
# T7 — team membership probe → 204 (member) → script exits 0
|
||||
# T8 — team membership probe → 404 (not a member) → script exits 1
|
||||
# T9 — team membership probe → 403 (token not in team) → script exits 1 (fail closed)
|
||||
# T10 — CURL_AUTH_FILE created with mode 600 and correct header content
|
||||
# T11 — bash syntax check (bash -n passes)
|
||||
# T12 — jq filter: non-author APPROVED → in candidate list; dismissed → excluded
|
||||
# T13 — missing required env GITEA_TOKEN → exits 1 with error
|
||||
# T14 — non-default-base PR exits 0 without requiring review
|
||||
#
|
||||
# Hostile-self-review (per feedback_assert_exact_not_substring):
|
||||
# this test MUST FAIL if the script is absent. Verified by running
|
||||
# the test before the file exists (covered in the PR body).
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
THIS_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
SCRIPT_DIR="$(cd "$THIS_DIR/.." && pwd)"
|
||||
SCRIPT="$SCRIPT_DIR/review-check.sh"
|
||||
|
||||
PASS=0
|
||||
FAIL=0
|
||||
FAILED_TESTS=""
|
||||
|
||||
assert_eq() {
|
||||
local label="$1"
|
||||
local expected="$2"
|
||||
local got="$3"
|
||||
if [ "$expected" = "$got" ]; then
|
||||
echo " PASS $label"
|
||||
PASS=$((PASS + 1))
|
||||
else
|
||||
echo " FAIL $label"
|
||||
echo " expected: <$expected>"
|
||||
echo " got: <$got>"
|
||||
FAIL=$((FAIL + 1))
|
||||
FAILED_TESTS="${FAILED_TESTS} ${label}"
|
||||
fi
|
||||
}
|
||||
|
||||
assert_contains() {
|
||||
local label="$1"
|
||||
local needle="$2"
|
||||
local haystack="$3"
|
||||
if printf '%s' "$haystack" | grep -qF "$needle"; then
|
||||
echo " PASS $label"
|
||||
PASS=$((PASS + 1))
|
||||
else
|
||||
echo " FAIL $label"
|
||||
echo " needle: <$needle>"
|
||||
echo " haystack: <$(printf '%s' "$haystack" | head -c 200)>"
|
||||
FAIL=$((FAIL + 1))
|
||||
FAILED_TESTS="${FAILED_TESTS} ${label}"
|
||||
fi
|
||||
}
|
||||
|
||||
assert_file_mode() {
|
||||
local label="$1"
|
||||
local path="$2"
|
||||
local expected_mode="$3"
|
||||
if [ ! -f "$path" ]; then
|
||||
echo " FAIL $label (file not found: $path)"
|
||||
FAIL=$((FAIL + 1))
|
||||
FAILED_TESTS="${FAILED_TESTS} ${label}"
|
||||
return
|
||||
fi
|
||||
local got_mode
|
||||
got_mode=$(stat -c '%a' "$path" 2>/dev/null || stat -f '%Lp' "$path" 2>/dev/null || echo "000")
|
||||
if [ "$expected_mode" = "$got_mode" ]; then
|
||||
echo " PASS $label (mode=$got_mode)"
|
||||
PASS=$((PASS + 1))
|
||||
else
|
||||
echo " FAIL $label (expected mode=$expected_mode, got=$got_mode)"
|
||||
FAIL=$((FAIL + 1))
|
||||
FAILED_TESTS="${FAILED_TESTS} ${label}"
|
||||
fi
|
||||
}
|
||||
|
||||
assert_file_contains() {
|
||||
local label="$1"
|
||||
local path="$2"
|
||||
local needle="$3"
|
||||
if [ ! -f "$path" ]; then
|
||||
echo " FAIL $label (file not found: $path)"
|
||||
FAIL=$((FAIL + 1))
|
||||
FAILED_TESTS="${FAILED_TESTS} ${label}"
|
||||
return
|
||||
fi
|
||||
if grep -qF "$needle" "$path"; then
|
||||
echo " PASS $label"
|
||||
PASS=$((PASS + 1))
|
||||
else
|
||||
echo " FAIL $label (needle not found: <$needle>)"
|
||||
FAIL=$((FAIL + 1))
|
||||
FAILED_TESTS="${FAILED_TESTS} ${label}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Existence check (foundation)
|
||||
echo
|
||||
echo "== existence =="
|
||||
if [ -f "$SCRIPT" ]; then
|
||||
echo " PASS script exists: $SCRIPT"
|
||||
PASS=$((PASS + 1))
|
||||
else
|
||||
echo " FAIL script not found: $SCRIPT"
|
||||
FAIL=$((FAIL + 1))
|
||||
FAILED_TESTS="${FAILED_TESTS} script_exists"
|
||||
echo
|
||||
echo "------"
|
||||
echo "PASS=$PASS FAIL=$FAIL (existence)"
|
||||
echo "Cannot proceed without the script."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# T11 — bash syntax check
|
||||
echo
|
||||
echo "== T11 bash syntax =="
|
||||
if bash -n "$SCRIPT" 2>&1; then
|
||||
echo " PASS T11 bash -n passes"
|
||||
PASS=$((PASS + 1))
|
||||
else
|
||||
echo " FAIL T11 bash -n failed"
|
||||
FAIL=$((FAIL + 1))
|
||||
FAILED_TESTS="${FAILED_TESTS} T11"
|
||||
fi
|
||||
|
||||
# T13 — missing required env
|
||||
echo
|
||||
echo "== T13 missing GITEA_TOKEN =="
|
||||
set +e
|
||||
T13_OUT=$(PATH="/tmp:$PATH" GITEA_TOKEN= GITEA_HOST=git.example.com REPO=x/y PR_NUMBER=1 TEAM=qa TEAM_ID=1 bash "$SCRIPT" 2>&1 || true)
|
||||
set -e
|
||||
assert_contains "T13 exits non-zero when GITEA_TOKEN missing" "GITEA_TOKEN required" "$T13_OUT"
|
||||
|
||||
# Start fixture HTTP server
|
||||
echo
|
||||
echo "== fixture setup =="
|
||||
FIXTURE_DIR=$(mktemp -d)
|
||||
trap 'rm -rf "$FIXTURE_DIR"; [ -n "${FIX_PID:-}" ] && kill "$FIX_PID" 2>/dev/null || true' EXIT
|
||||
FIXTURE_PY="$THIS_DIR/_review_check_fixture.py"
|
||||
if [ ! -f "$FIXTURE_PY" ]; then
|
||||
echo "::error::fixture server $FIXTURE_PY missing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
FIX_LOG="$FIXTURE_DIR/fixture.log"
|
||||
FIX_STATE_DIR="$FIXTURE_DIR/state"
|
||||
mkdir -p "$FIX_STATE_DIR"
|
||||
|
||||
# Find an unused port
|
||||
FIX_PORT=$(python3 -c 'import socket;s=socket.socket();s.bind(("127.0.0.1",0));print(s.getsockname()[1]);s.close()')
|
||||
|
||||
FIXTURE_STATE_DIR="$FIX_STATE_DIR" python3 "$FIXTURE_PY" "$FIX_PORT" \
|
||||
>"$FIX_LOG" 2>&1 &
|
||||
FIX_PID=$!
|
||||
|
||||
# Wait for fixture readiness
|
||||
for _ in $(seq 1 50); do
|
||||
if curl -fsS "http://127.0.0.1:${FIX_PORT}/_ping" >/dev/null 2>&1; then
|
||||
break
|
||||
fi
|
||||
sleep 0.1
|
||||
done
|
||||
if ! curl -fsS "http://127.0.0.1:${FIX_PORT}/_ping" >/dev/null 2>&1; then
|
||||
echo "::error::fixture server failed to start. Log:"
|
||||
cat "$FIX_LOG"
|
||||
exit 1
|
||||
fi
|
||||
echo " fixture running on port $FIX_PORT"
|
||||
|
||||
# Install a curl shim that rewrites https://fixture.local/* -> http://127.0.0.1:$FIX_PORT/*
|
||||
# Use double-quoted heredoc so FIX_PORT is expanded into the shim at creation time.
|
||||
mkdir -p "$FIXTURE_DIR/bin"
|
||||
cat >"$FIXTURE_DIR/bin/curl" <<"CURL_SHIM"
|
||||
#!/usr/bin/env bash
|
||||
# Shim: rewrite https://fixture.local/* -> http://127.0.0.1:FIXPORT/*
|
||||
# Generated at test-run time; FIXPORT is substituted when this file is written.
|
||||
new_args=()
|
||||
for a in "$@"; do
|
||||
if [[ "$a" == https://fixture.local/* ]]; then
|
||||
rest="${a#https://fixture.local}"
|
||||
a="http://127.0.0.1:FIXPORT${rest}"
|
||||
fi
|
||||
new_args+=("$a")
|
||||
done
|
||||
exec /usr/bin/curl "${new_args[@]}"
|
||||
CURL_SHIM
|
||||
# Now substitute FIXPORT with the actual port number. Use perl rather than
|
||||
# sed -i so the test runs on both GNU sed and BSD/macOS sed.
|
||||
perl -0pi -e "s/FIXPORT/${FIX_PORT}/g" "$FIXTURE_DIR/bin/curl"
|
||||
chmod +x "$FIXTURE_DIR/bin/curl"
|
||||
|
||||
# Helper: run the script with fixture environment
|
||||
run_review_check() {
|
||||
local scenario="$1"
|
||||
echo "$scenario" >"$FIX_STATE_DIR/scenario"
|
||||
local out
|
||||
set +e
|
||||
out=$(
|
||||
PATH="$FIXTURE_DIR/bin:/tmp:$PATH" \
|
||||
GITEA_TOKEN="fixture-token" \
|
||||
GITEA_HOST="fixture.local" \
|
||||
REPO="molecule-ai/molecule-core" \
|
||||
PR_NUMBER="999" \
|
||||
DEFAULT_BRANCH="main" \
|
||||
TEAM="qa" \
|
||||
TEAM_ID="20" \
|
||||
REVIEW_CHECK_DEBUG="0" \
|
||||
REVIEW_CHECK_STRICT="0" \
|
||||
bash "$SCRIPT" 2>&1
|
||||
)
|
||||
local rc=$?
|
||||
set -e
|
||||
echo "$out" >"$FIX_STATE_DIR/last_run.log"
|
||||
echo "$rc" >"$FIX_STATE_DIR/last_rc"
|
||||
echo "$out"
|
||||
}
|
||||
|
||||
# T1 — open PR: script fetches PR and continues
|
||||
echo
|
||||
echo "== T1 open PR =="
|
||||
T1_OUT=$(run_review_check "T1_pr_open")
|
||||
T1_RC=$(cat "$FIX_STATE_DIR/last_rc")
|
||||
assert_eq "T1 exit code 0 (approver exists + team member)" "0" "$T1_RC"
|
||||
assert_contains "T1 qa-review APPROVED by core-devops" "APPROVED by core-devops" "$T1_OUT"
|
||||
|
||||
# T2 — closed PR: exits 0 immediately (no-op)
|
||||
echo
|
||||
echo "== T2 closed PR =="
|
||||
T2_OUT=$(run_review_check "T2_pr_closed")
|
||||
T2_RC=$(cat "$FIX_STATE_DIR/last_rc")
|
||||
assert_eq "T2 exit code 0 (closed PR no-op)" "0" "$T2_RC"
|
||||
|
||||
# T3 — APPROVED non-author reviews exist
|
||||
echo
|
||||
echo "== T3 approved non-author reviews =="
|
||||
T3_OUT=$(run_review_check "T3_reviews_approved_non_author")
|
||||
T3_RC=$(cat "$FIX_STATE_DIR/last_rc")
|
||||
assert_eq "T3 exit code 0 (candidates + team member)" "0" "$T3_RC"
|
||||
|
||||
# T4 — no non-author APPROVED reviews → exit 1
|
||||
echo
|
||||
echo "== T4 no non-author APPROVED reviews =="
|
||||
T4_OUT=$(run_review_check "T4_reviews_empty")
|
||||
T4_RC=$(cat "$FIX_STATE_DIR/last_rc")
|
||||
assert_eq "T4 exit code 1 (no candidates)" "1" "$T4_RC"
|
||||
assert_contains "T4 awaiting non-author APPROVE" "awaiting non-author APPROVE" "$T4_OUT"
|
||||
|
||||
# T14 — non-default-base PR should not make the default branch red.
|
||||
echo
|
||||
echo "== T14 non-default base PR =="
|
||||
T14_OUT=$(run_review_check "T14_non_default_base")
|
||||
T14_RC=$(cat "$FIX_STATE_DIR/last_rc")
|
||||
assert_eq "T14 exit code 0 (non-default base no-op)" "0" "$T14_RC"
|
||||
assert_contains "T14 not applicable notice" "gate not applicable" "$T14_OUT"
|
||||
|
||||
# T5 — only author reviews → exit 1
|
||||
echo
|
||||
echo "== T5 only author reviews =="
|
||||
T5_OUT=$(run_review_check "T5_reviews_only_author")
|
||||
T5_RC=$(cat "$FIX_STATE_DIR/last_rc")
|
||||
assert_eq "T5 exit code 1 (only author reviews, no candidates)" "1" "$T5_RC"
|
||||
|
||||
# T6 — dismissed APPROVED review → treated as no approval
|
||||
echo
|
||||
echo "== T6 dismissed APPROVED review =="
|
||||
T6_OUT=$(run_review_check "T6_reviews_dismissed")
|
||||
T6_RC=$(cat "$FIX_STATE_DIR/last_rc")
|
||||
assert_eq "T6 exit code 1 (dismissed = no approval)" "1" "$T6_RC"
|
||||
|
||||
# T7 — team member → exit 0
|
||||
echo
|
||||
echo "== T7 team membership 204 (member) =="
|
||||
T7_OUT=$(run_review_check "T7_team_member")
|
||||
T7_RC=$(cat "$FIX_STATE_DIR/last_rc")
|
||||
assert_eq "T7 exit code 0 (member, APPROVED)" "0" "$T7_RC"
|
||||
assert_contains "T7 APPROVED by core-devops (team member)" "APPROVED by core-devops" "$T7_OUT"
|
||||
|
||||
# T8 — not a team member → exit 1 (fail closed)
|
||||
echo
|
||||
echo "== T8 team membership 404 (not a member) =="
|
||||
T8_OUT=$(run_review_check "T8_team_not_member")
|
||||
T8_RC=$(cat "$FIX_STATE_DIR/last_rc")
|
||||
assert_eq "T8 exit code 1 (not in team)" "1" "$T8_RC"
|
||||
|
||||
# T9 — 403 token-not-in-team → exit 1 (fail closed)
|
||||
echo
|
||||
echo "== T9 team membership 403 (token not in team) =="
|
||||
T9_OUT=$(run_review_check "T9_team_403")
|
||||
T9_RC=$(cat "$FIX_STATE_DIR/last_rc")
|
||||
assert_eq "T9 exit code 1 (403 token-not-in-team, fail closed)" "1" "$T9_RC"
|
||||
assert_contains "T9 403 error in output" "403" "$T9_OUT"
|
||||
|
||||
# T10 — token file creation and permissions
|
||||
echo
|
||||
echo "== T10 CURL_AUTH_FILE =="
|
||||
# Verify the token-file logic directly: create a temp file with the
|
||||
# same mktemp pattern, write the header with printf, chmod 600, then assert.
|
||||
T10_TOKEN="secret-test-token-abc123"
|
||||
T10_AUTHFILE=$(mktemp "${TMPDIR:-/tmp}/curl-auth.test.XXXXXX")
|
||||
chmod 600 "$T10_AUTHFILE"
|
||||
printf 'header = "Authorization: token %s"\n' "$T10_TOKEN" > "$T10_AUTHFILE"
|
||||
assert_file_mode "T10a mktemp authfile mode 600 (CURL_AUTH_FILE pattern)" "$T10_AUTHFILE" "600"
|
||||
assert_file_contains "T10b printf header format (CURL_AUTH_FILE content)" "$T10_AUTHFILE" "Authorization: token secret-test-token-abc123"
|
||||
assert_file_contains "T10c 'header =' curl-config syntax" "$T10_AUTHFILE" 'header = "Authorization: token '
|
||||
rm -f "$T10_AUTHFILE"
|
||||
|
||||
# T12 — jq filter: non-author APPROVED included, dismissed excluded
|
||||
echo
|
||||
echo "== T12 jq filter =="
|
||||
# These are tested indirectly via T3 and T6 above, but let's also test
|
||||
# the jq expression directly.
|
||||
JQ_FILTER='.[]
|
||||
| select(.state == "APPROVED")
|
||||
| select(.dismissed != true)
|
||||
| select(.user.login != "alice")
|
||||
| .user.login'
|
||||
|
||||
T12_INPUT='[{"state":"APPROVED","dismissed":false,"user":{"login":"core-devops"}},{"state":"CHANGES_REQUESTED","dismissed":false,"user":{"login":"bob"}},{"state":"APPROVED","dismissed":false,"user":{"login":"alice"}},{"state":"APPROVED","dismissed":true,"user":{"login":"carol"}}]'
|
||||
|
||||
JQ_CMD=$(command -v jq 2>/dev/null || echo /tmp/jq)
|
||||
T12_CANDIDATES=$(echo "$T12_INPUT" | "$JQ_CMD" -r "$JQ_FILTER" 2>/dev/null | sort -u)
|
||||
assert_contains "T12 jq: core-devops (non-author APPROVED) in candidates" "core-devops" "$T12_CANDIDATES"
|
||||
assert_eq "T12 jq: alice (author) NOT in candidates" "" "$(echo "$T12_CANDIDATES" | grep '^alice$' || true)"
|
||||
assert_eq "T12 jq: carol (dismissed) NOT in candidates" "" "$(echo "$T12_CANDIDATES" | grep '^carol$' || true)"
|
||||
|
||||
echo
|
||||
echo "------"
|
||||
echo "PASS=$PASS FAIL=$FAIL"
|
||||
if [ "$FAIL" -gt 0 ]; then
|
||||
echo "Failed:$FAILED_TESTS"
|
||||
fi
|
||||
[ "$FAIL" -eq 0 ]
|
||||
@ -1,553 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# Unit tests for sop-checklist.py
|
||||
#
|
||||
# Run: python3 .gitea/scripts/tests/test_sop_checklist.py
|
||||
# or: pytest .gitea/scripts/tests/test_sop_checklist.py
|
||||
#
|
||||
# RFC#351 Step 2 of 6 — implementation MVP. Tests cover:
|
||||
# - slug normalization (the 4 example variants in the script header)
|
||||
# - parse_directives (ack, revoke, with/without note, mid-comment, etc.)
|
||||
# - section_marker_present (empty answer rejected, filled answer ok)
|
||||
# - compute_ack_state (self-ack rejected, team probe applied, revoke
|
||||
# invalidates own prior ack, peer's ack survives unrevoked)
|
||||
# - render_status (state + description format)
|
||||
# - get_tier_mode (label-driven, default fallback)
|
||||
# - load_config (default config parses cleanly with both PyYAML and
|
||||
# the bundled minimal parser)
|
||||
#
|
||||
# All tests run WITHOUT touching the Gitea API — the team-probe
|
||||
# callable is dependency-injected.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
# Resolve sibling script regardless of where pytest is invoked from.
|
||||
HERE = os.path.dirname(os.path.abspath(__file__))
|
||||
PARENT = os.path.dirname(HERE) # .gitea/scripts
|
||||
sys.path.insert(0, PARENT)
|
||||
|
||||
import importlib.util # noqa: E402
|
||||
|
||||
_spec = importlib.util.spec_from_file_location(
|
||||
"sop_checklist", os.path.join(PARENT, "sop-checklist.py")
|
||||
)
|
||||
sop = importlib.util.module_from_spec(_spec)
|
||||
_spec.loader.exec_module(sop) # type: ignore[union-attr]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Test fixtures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
CONFIG_PATH = os.path.join(PARENT, "..", "sop-checklist-config.yaml")
|
||||
|
||||
|
||||
def _items() -> list[dict]:
|
||||
cfg = sop.load_config(CONFIG_PATH)
|
||||
return cfg["items"]
|
||||
|
||||
|
||||
def _items_by_slug() -> dict[str, dict]:
|
||||
return {it["slug"]: it for it in _items()}
|
||||
|
||||
|
||||
def _numeric_aliases() -> dict[int, str]:
|
||||
return {
|
||||
int(it["numeric_alias"]): it["slug"]
|
||||
for it in _items()
|
||||
if it.get("numeric_alias")
|
||||
}
|
||||
|
||||
|
||||
def _comment(user: str, body: str) -> dict:
|
||||
return {"user": {"login": user}, "body": body}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# normalize_slug
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestNormalizeSlug(unittest.TestCase):
|
||||
def test_kebab_already(self):
|
||||
self.assertEqual(sop.normalize_slug("comprehensive-testing"), "comprehensive-testing")
|
||||
|
||||
def test_underscore_to_dash(self):
|
||||
self.assertEqual(sop.normalize_slug("comprehensive_testing"), "comprehensive-testing")
|
||||
|
||||
def test_space_to_dash(self):
|
||||
self.assertEqual(sop.normalize_slug("comprehensive testing"), "comprehensive-testing")
|
||||
|
||||
def test_uppercase_to_lower(self):
|
||||
self.assertEqual(sop.normalize_slug("Comprehensive-Testing"), "comprehensive-testing")
|
||||
|
||||
def test_mixed_separators(self):
|
||||
self.assertEqual(sop.normalize_slug("Comprehensive_Testing"), "comprehensive-testing")
|
||||
self.assertEqual(sop.normalize_slug("FIVE_axis review"), "five-axis-review")
|
||||
|
||||
def test_collapse_repeated_dashes(self):
|
||||
self.assertEqual(sop.normalize_slug("comprehensive--testing"), "comprehensive-testing")
|
||||
self.assertEqual(sop.normalize_slug("comprehensive testing"), "comprehensive-testing")
|
||||
|
||||
def test_strip_trailing_punctuation(self):
|
||||
self.assertEqual(sop.normalize_slug("comprehensive-testing."), "comprehensive-testing")
|
||||
self.assertEqual(sop.normalize_slug("comprehensive-testing!"), "comprehensive-testing")
|
||||
|
||||
def test_numeric_shorthand_known(self):
|
||||
self.assertEqual(
|
||||
sop.normalize_slug("1", _numeric_aliases()),
|
||||
"comprehensive-testing",
|
||||
)
|
||||
self.assertEqual(
|
||||
sop.normalize_slug("3", _numeric_aliases()),
|
||||
"staging-smoke",
|
||||
)
|
||||
self.assertEqual(
|
||||
sop.normalize_slug("7", _numeric_aliases()),
|
||||
"memory-consulted",
|
||||
)
|
||||
|
||||
def test_numeric_shorthand_unknown_returns_empty(self):
|
||||
# "8" is out of range → empty so caller can flag as unparseable.
|
||||
self.assertEqual(sop.normalize_slug("8", _numeric_aliases()), "")
|
||||
|
||||
def test_numeric_without_alias_table_keeps_digits(self):
|
||||
# No alias table → return the digits as-is.
|
||||
self.assertEqual(sop.normalize_slug("1"), "1")
|
||||
|
||||
def test_empty_input(self):
|
||||
self.assertEqual(sop.normalize_slug(""), "")
|
||||
self.assertEqual(sop.normalize_slug(" "), "")
|
||||
self.assertEqual(sop.normalize_slug(None), "")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# parse_directives
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestParseDirectives(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.aliases = _numeric_aliases()
|
||||
|
||||
def parse_ack_revoke(self, body):
|
||||
directives, na_directives = sop.parse_directives(body, self.aliases)
|
||||
self.assertEqual(na_directives, [])
|
||||
return directives
|
||||
|
||||
def test_simple_ack(self):
|
||||
d = self.parse_ack_revoke("/sop-ack comprehensive-testing")
|
||||
self.assertEqual(d, [("sop-ack", "comprehensive-testing", "")])
|
||||
|
||||
def test_simple_revoke(self):
|
||||
d = self.parse_ack_revoke("/sop-revoke staging-smoke")
|
||||
self.assertEqual(d, [("sop-revoke", "staging-smoke", "")])
|
||||
|
||||
def test_ack_with_note(self):
|
||||
d = self.parse_ack_revoke(
|
||||
"/sop-ack comprehensive-testing LGTM the test covers all edge cases"
|
||||
)
|
||||
self.assertEqual(len(d), 1)
|
||||
self.assertEqual(d[0][0], "sop-ack")
|
||||
self.assertEqual(d[0][1], "comprehensive-testing")
|
||||
self.assertIn("LGTM", d[0][2])
|
||||
|
||||
def test_numeric_shorthand(self):
|
||||
d = self.parse_ack_revoke("/sop-ack 1")
|
||||
self.assertEqual(d, [("sop-ack", "comprehensive-testing", "")])
|
||||
|
||||
def test_revoke_with_reason(self):
|
||||
d = self.parse_ack_revoke(
|
||||
"/sop-revoke comprehensive-testing realized the e2e was mocking the DB"
|
||||
)
|
||||
self.assertEqual(d[0][0], "sop-revoke")
|
||||
self.assertEqual(d[0][1], "comprehensive-testing")
|
||||
self.assertIn("mocking", d[0][2])
|
||||
|
||||
def test_directive_in_middle_of_comment(self):
|
||||
body = (
|
||||
"Reviewed the PR, looks good overall.\n"
|
||||
"/sop-ack comprehensive-testing\n"
|
||||
"Will follow up on the doc nit separately."
|
||||
)
|
||||
d = self.parse_ack_revoke(body)
|
||||
self.assertEqual(len(d), 1)
|
||||
self.assertEqual(d[0][1], "comprehensive-testing")
|
||||
|
||||
def test_multiple_directives_in_one_comment(self):
|
||||
body = (
|
||||
"/sop-ack comprehensive-testing\n"
|
||||
"/sop-ack local-postgres-e2e\n"
|
||||
)
|
||||
d = self.parse_ack_revoke(body)
|
||||
self.assertEqual(len(d), 2)
|
||||
slugs = {x[1] for x in d}
|
||||
self.assertEqual(slugs, {"comprehensive-testing", "local-postgres-e2e"})
|
||||
|
||||
def test_must_be_at_line_start(self):
|
||||
# A directive embedded mid-line is not honored (prevents review
|
||||
# comments like "to /sop-ack you need..." from acting as acks).
|
||||
body = "If you want to /sop-ack comprehensive-testing reply in this thread"
|
||||
d = self.parse_ack_revoke(body)
|
||||
self.assertEqual(d, [])
|
||||
|
||||
def test_leading_whitespace_allowed(self):
|
||||
body = " /sop-ack comprehensive-testing"
|
||||
d = self.parse_ack_revoke(body)
|
||||
self.assertEqual(len(d), 1)
|
||||
|
||||
def test_empty_body(self):
|
||||
self.assertEqual(sop.parse_directives("", self.aliases), ([], []))
|
||||
self.assertEqual(sop.parse_directives(None, self.aliases), ([], []))
|
||||
|
||||
def test_normalization_applied(self):
|
||||
# /sop-ack Comprehensive_Testing → canonical comprehensive-testing
|
||||
d = self.parse_ack_revoke("/sop-ack Comprehensive_Testing")
|
||||
self.assertEqual(d[0][1], "comprehensive-testing")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# section_marker_present
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestSectionMarkerPresent(unittest.TestCase):
|
||||
def test_marker_with_inline_answer(self):
|
||||
body = "- [ ] **Comprehensive testing performed**: Added 12 new tests covering null/empty/giant inputs."
|
||||
self.assertTrue(sop.section_marker_present(body, "Comprehensive testing performed"))
|
||||
|
||||
def test_marker_with_empty_answer(self):
|
||||
body = "- [ ] **Comprehensive testing performed**:"
|
||||
self.assertFalse(sop.section_marker_present(body, "Comprehensive testing performed"))
|
||||
|
||||
def test_marker_with_only_whitespace_answer(self):
|
||||
body = "- [ ] **Comprehensive testing performed**: \n"
|
||||
self.assertFalse(sop.section_marker_present(body, "Comprehensive testing performed"))
|
||||
|
||||
def test_marker_with_next_line_answer(self):
|
||||
body = (
|
||||
"- [ ] **Comprehensive testing performed**:\n"
|
||||
" Yes — see attached log + 12 new unit tests in foo_test.py.\n"
|
||||
)
|
||||
self.assertTrue(sop.section_marker_present(body, "Comprehensive testing performed"))
|
||||
|
||||
def test_marker_missing(self):
|
||||
body = "- [ ] **Local-postgres E2E run**: N/A — pure-frontend\n"
|
||||
self.assertFalse(sop.section_marker_present(body, "Comprehensive testing performed"))
|
||||
|
||||
def test_case_insensitive_marker_match(self):
|
||||
body = "- [ ] **comprehensive TESTING performed**: yes"
|
||||
self.assertTrue(sop.section_marker_present(body, "Comprehensive testing performed"))
|
||||
|
||||
def test_empty_body(self):
|
||||
self.assertFalse(sop.section_marker_present("", "X"))
|
||||
self.assertFalse(sop.section_marker_present(None, "X"))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# compute_ack_state
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestComputeAckState(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.items = _items_by_slug()
|
||||
self.aliases = _numeric_aliases()
|
||||
|
||||
@staticmethod
|
||||
def _approve_all(slug, users):
|
||||
return list(users)
|
||||
|
||||
@staticmethod
|
||||
def _approve_none(slug, users):
|
||||
return []
|
||||
|
||||
def _approve_only(self, allowed_users):
|
||||
return lambda slug, users: [u for u in users if u in allowed_users]
|
||||
|
||||
def test_peer_ack_passes(self):
|
||||
comments = [_comment("bob", "/sop-ack comprehensive-testing")]
|
||||
state = sop.compute_ack_state(
|
||||
comments, "alice", self.items, self.aliases, self._approve_all
|
||||
)
|
||||
self.assertEqual(state["comprehensive-testing"]["ackers"], ["bob"])
|
||||
|
||||
def test_self_ack_rejected(self):
|
||||
comments = [_comment("alice", "/sop-ack comprehensive-testing")]
|
||||
state = sop.compute_ack_state(
|
||||
comments, "alice", self.items, self.aliases, self._approve_all
|
||||
)
|
||||
self.assertEqual(state["comprehensive-testing"]["ackers"], [])
|
||||
self.assertEqual(state["comprehensive-testing"]["rejected"]["self_ack"], ["alice"])
|
||||
|
||||
def test_not_in_team_rejected(self):
|
||||
comments = [_comment("eve", "/sop-ack comprehensive-testing")]
|
||||
state = sop.compute_ack_state(
|
||||
comments, "alice", self.items, self.aliases, self._approve_none
|
||||
)
|
||||
self.assertEqual(state["comprehensive-testing"]["ackers"], [])
|
||||
self.assertEqual(state["comprehensive-testing"]["rejected"]["not_in_team"], ["eve"])
|
||||
|
||||
def test_revoke_invalidates_own_prior_ack(self):
|
||||
# Bob acks then later revokes — Bob no longer counts.
|
||||
comments = [
|
||||
_comment("bob", "/sop-ack comprehensive-testing"),
|
||||
_comment("bob", "/sop-revoke comprehensive-testing realized e2e was mocked"),
|
||||
]
|
||||
state = sop.compute_ack_state(
|
||||
comments, "alice", self.items, self.aliases, self._approve_all
|
||||
)
|
||||
self.assertEqual(state["comprehensive-testing"]["ackers"], [])
|
||||
|
||||
def test_revoke_does_not_affect_others_acks(self):
|
||||
# Bob revokes his own ack; Carol's still counts.
|
||||
comments = [
|
||||
_comment("bob", "/sop-ack comprehensive-testing"),
|
||||
_comment("carol", "/sop-ack comprehensive-testing"),
|
||||
_comment("bob", "/sop-revoke comprehensive-testing"),
|
||||
]
|
||||
state = sop.compute_ack_state(
|
||||
comments, "alice", self.items, self.aliases, self._approve_all
|
||||
)
|
||||
self.assertEqual(state["comprehensive-testing"]["ackers"], ["carol"])
|
||||
|
||||
def test_ack_after_revoke_restored(self):
|
||||
# Bob revokes then re-acks (e.g. after re-reviewing).
|
||||
comments = [
|
||||
_comment("bob", "/sop-ack comprehensive-testing"),
|
||||
_comment("bob", "/sop-revoke comprehensive-testing"),
|
||||
_comment("bob", "/sop-ack comprehensive-testing"),
|
||||
]
|
||||
state = sop.compute_ack_state(
|
||||
comments, "alice", self.items, self.aliases, self._approve_all
|
||||
)
|
||||
self.assertEqual(state["comprehensive-testing"]["ackers"], ["bob"])
|
||||
|
||||
def test_numeric_shorthand_ack(self):
|
||||
# /sop-ack 1 → comprehensive-testing
|
||||
comments = [_comment("bob", "/sop-ack 1")]
|
||||
state = sop.compute_ack_state(
|
||||
comments, "alice", self.items, self.aliases, self._approve_all
|
||||
)
|
||||
self.assertEqual(state["comprehensive-testing"]["ackers"], ["bob"])
|
||||
|
||||
def test_ack_for_unknown_slug_ignored(self):
|
||||
# Some other slug not in config — silently drop (doesn't crash).
|
||||
comments = [_comment("bob", "/sop-ack does-not-exist")]
|
||||
state = sop.compute_ack_state(
|
||||
comments, "alice", self.items, self.aliases, self._approve_all
|
||||
)
|
||||
for slug in self.items:
|
||||
self.assertEqual(state[slug]["ackers"], [])
|
||||
|
||||
def test_multi_item_multi_user(self):
|
||||
comments = [
|
||||
_comment("bob", "/sop-ack comprehensive-testing\n/sop-ack staging-smoke"),
|
||||
_comment("carol", "/sop-ack five-axis-review"),
|
||||
]
|
||||
state = sop.compute_ack_state(
|
||||
comments, "alice", self.items, self.aliases, self._approve_all
|
||||
)
|
||||
self.assertEqual(state["comprehensive-testing"]["ackers"], ["bob"])
|
||||
self.assertEqual(state["staging-smoke"]["ackers"], ["bob"])
|
||||
self.assertEqual(state["five-axis-review"]["ackers"], ["carol"])
|
||||
self.assertEqual(state["root-cause"]["ackers"], [])
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# render_status
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRenderStatus(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.items = _items()
|
||||
self.items_by_slug = _items_by_slug()
|
||||
|
||||
def _state_with(self, acked: list[str]) -> dict:
|
||||
return {
|
||||
it["slug"]: {
|
||||
"ackers": ["peer"] if it["slug"] in acked else [],
|
||||
"rejected": {"self_ack": [], "not_in_team": []},
|
||||
}
|
||||
for it in self.items
|
||||
}
|
||||
|
||||
def test_all_acked_returns_success(self):
|
||||
all_slugs = [it["slug"] for it in self.items]
|
||||
state, desc = sop.render_status(
|
||||
self.items, self._state_with(all_slugs), {s: True for s in all_slugs}
|
||||
)
|
||||
self.assertEqual(state, "success")
|
||||
self.assertIn("7/7", desc)
|
||||
|
||||
def test_partial_acked_returns_failure(self):
|
||||
state, desc = sop.render_status(
|
||||
self.items,
|
||||
self._state_with(["comprehensive-testing", "staging-smoke"]),
|
||||
{it["slug"]: True for it in self.items},
|
||||
)
|
||||
self.assertEqual(state, "failure")
|
||||
self.assertIn("2/7", desc)
|
||||
self.assertIn("missing", desc)
|
||||
|
||||
def test_description_truncates_long_missing_list(self):
|
||||
# Only ack one — 6 missing should be summarized as "+N".
|
||||
state, desc = sop.render_status(
|
||||
self.items,
|
||||
self._state_with(["comprehensive-testing"]),
|
||||
{it["slug"]: True for it in self.items},
|
||||
)
|
||||
# Length budget: under 140 chars.
|
||||
self.assertLessEqual(len(desc), 140)
|
||||
self.assertIn("+", desc) # +N elision marker
|
||||
|
||||
def test_body_unfilled_surfaced(self):
|
||||
all_slugs = [it["slug"] for it in self.items]
|
||||
state, desc = sop.render_status(
|
||||
self.items,
|
||||
self._state_with(all_slugs),
|
||||
{it["slug"]: False for it in self.items},
|
||||
)
|
||||
self.assertEqual(state, "failure")
|
||||
self.assertIn("body-unfilled", desc)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# get_tier_mode
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestGetTierMode(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.cfg = sop.load_config(CONFIG_PATH)
|
||||
|
||||
def test_tier_high_is_hard(self):
|
||||
pr = {"labels": [{"name": "tier:high"}, {"name": "area:ci"}]}
|
||||
self.assertEqual(sop.get_tier_mode(pr, self.cfg), "hard")
|
||||
|
||||
def test_tier_medium_is_hard(self):
|
||||
pr = {"labels": [{"name": "tier:medium"}]}
|
||||
self.assertEqual(sop.get_tier_mode(pr, self.cfg), "hard")
|
||||
|
||||
def test_tier_low_is_soft(self):
|
||||
pr = {"labels": [{"name": "tier:low"}]}
|
||||
self.assertEqual(sop.get_tier_mode(pr, self.cfg), "soft")
|
||||
|
||||
def test_no_tier_label_defaults_to_hard(self):
|
||||
# Per feedback_fix_root_not_symptom — never silently lower the bar.
|
||||
pr = {"labels": [{"name": "area:ci"}]}
|
||||
self.assertEqual(sop.get_tier_mode(pr, self.cfg), "hard")
|
||||
|
||||
def test_no_labels_defaults_to_hard(self):
|
||||
self.assertEqual(sop.get_tier_mode({"labels": []}, self.cfg), "hard")
|
||||
self.assertEqual(sop.get_tier_mode({}, self.cfg), "hard")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# load_config
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestLoadConfig(unittest.TestCase):
|
||||
def test_default_config_parses(self):
|
||||
cfg = sop.load_config(CONFIG_PATH)
|
||||
self.assertIn("items", cfg)
|
||||
self.assertEqual(len(cfg["items"]), 7)
|
||||
slugs = {it["slug"] for it in cfg["items"]}
|
||||
self.assertEqual(
|
||||
slugs,
|
||||
{
|
||||
"comprehensive-testing",
|
||||
"local-postgres-e2e",
|
||||
"staging-smoke",
|
||||
"root-cause",
|
||||
"five-axis-review",
|
||||
"no-backwards-compat",
|
||||
"memory-consulted",
|
||||
},
|
||||
)
|
||||
|
||||
def test_default_config_tier_mode_shape(self):
|
||||
cfg = sop.load_config(CONFIG_PATH)
|
||||
self.assertEqual(cfg["tier_failure_mode"]["tier:high"], "hard")
|
||||
self.assertEqual(cfg["tier_failure_mode"]["tier:medium"], "hard")
|
||||
self.assertEqual(cfg["tier_failure_mode"]["tier:low"], "soft")
|
||||
self.assertEqual(cfg["default_mode"], "hard")
|
||||
|
||||
def test_each_item_has_required_fields(self):
|
||||
cfg = sop.load_config(CONFIG_PATH)
|
||||
for it in cfg["items"]:
|
||||
self.assertIn("slug", it)
|
||||
self.assertIn("numeric_alias", it)
|
||||
self.assertIn("pr_section_marker", it)
|
||||
self.assertIn("required_teams", it)
|
||||
self.assertIsInstance(it["required_teams"], list)
|
||||
self.assertGreater(len(it["required_teams"]), 0)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Edge case: full integration without team probe (dependency-injected)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestEndToEndAckFlow(unittest.TestCase):
|
||||
"""All-7-items happy path with synthetic comments. Verifies the
|
||||
full pipeline minus the Gitea API."""
|
||||
|
||||
def test_all_seven_acked_by_proper_teams(self):
|
||||
items = _items_by_slug()
|
||||
aliases = _numeric_aliases()
|
||||
comments = [
|
||||
_comment("qa-bot", "/sop-ack comprehensive-testing"),
|
||||
_comment("eng-bot", "/sop-ack local-postgres-e2e"),
|
||||
_comment("eng-bot", "/sop-ack staging-smoke"),
|
||||
_comment("mgr-bot", "/sop-ack root-cause"),
|
||||
_comment("eng-bot", "/sop-ack five-axis-review"),
|
||||
_comment("mgr-bot", "/sop-ack no-backwards-compat"),
|
||||
_comment("eng-bot", "/sop-ack memory-consulted"),
|
||||
]
|
||||
|
||||
def probe(slug, users):
|
||||
# Pretend every user is in every team.
|
||||
return list(users)
|
||||
|
||||
state = sop.compute_ack_state(comments, "alice-author", items, aliases, probe)
|
||||
body = {it["slug"]: True for it in items.values()}
|
||||
items_list = list(items.values())
|
||||
result_state, desc = sop.render_status(items_list, state, body)
|
||||
self.assertEqual(result_state, "success")
|
||||
self.assertIn("7/7", desc)
|
||||
|
||||
def test_all_acks_still_fail_when_body_section_unfilled(self):
|
||||
items = _items_by_slug()
|
||||
aliases = _numeric_aliases()
|
||||
comments = [
|
||||
_comment("qa-bot", "/sop-ack comprehensive-testing"),
|
||||
_comment("eng-bot", "/sop-ack local-postgres-e2e"),
|
||||
_comment("eng-bot", "/sop-ack staging-smoke"),
|
||||
_comment("mgr-bot", "/sop-ack root-cause"),
|
||||
_comment("eng-bot", "/sop-ack five-axis-review"),
|
||||
_comment("mgr-bot", "/sop-ack no-backwards-compat"),
|
||||
_comment("eng-bot", "/sop-ack memory-consulted"),
|
||||
]
|
||||
|
||||
def probe(slug, users):
|
||||
return list(users)
|
||||
|
||||
state = sop.compute_ack_state(comments, "alice-author", items, aliases, probe)
|
||||
body = {it["slug"]: True for it in items.values()}
|
||||
body["root-cause"] = False
|
||||
items_list = list(items.values())
|
||||
result_state, desc = sop.render_status(items_list, state, body)
|
||||
self.assertEqual(result_state, "failure")
|
||||
self.assertIn("7/7", desc)
|
||||
self.assertIn("body-unfilled: root-cause", desc)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(verbosity=2)
|
||||
@ -32,7 +32,6 @@ THIS_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
SCRIPT_DIR="$(cd "$THIS_DIR/.." && pwd)"
|
||||
WORKFLOW_DIR="$(cd "$THIS_DIR/../../workflows" && pwd)"
|
||||
WORKFLOW="$WORKFLOW_DIR/sop-tier-refire.yml"
|
||||
DISPATCH_WORKFLOW="$WORKFLOW_DIR/review-refire-comments.yml"
|
||||
SCRIPT="$SCRIPT_DIR/sop-tier-refire.sh"
|
||||
|
||||
PASS=0
|
||||
@ -88,7 +87,6 @@ assert_file_exists() {
|
||||
echo
|
||||
echo "== existence =="
|
||||
assert_file_exists "workflow file exists" "$WORKFLOW"
|
||||
assert_file_exists "dispatcher workflow file exists" "$DISPATCH_WORKFLOW"
|
||||
assert_file_exists "script file exists" "$SCRIPT"
|
||||
if [ "$FAIL" -gt 0 ]; then
|
||||
echo
|
||||
@ -106,44 +104,30 @@ echo "== T6/T7 workflow yaml =="
|
||||
PARSE_OUT=$(python3 -c 'import sys,yaml;yaml.safe_load(open(sys.argv[1]).read());print("ok")' "$WORKFLOW" 2>&1 || true)
|
||||
assert_eq "T7 workflow parses as YAML" "ok" "$PARSE_OUT"
|
||||
|
||||
# The old per-workflow issue_comment listener caused queue storms because
|
||||
# Gitea queues jobs before evaluating job-level `if:`. The script remains,
|
||||
# but comment-triggered refires route through the single dispatcher.
|
||||
# Three required gates in the `if:` expression
|
||||
WORKFLOW_CONTENT=$(cat "$WORKFLOW")
|
||||
if printf '%s' "$WORKFLOW_CONTENT" | grep -q '^ issue_comment:'; then
|
||||
echo " FAIL T6a manual fallback workflow must not listen on issue_comment"
|
||||
FAIL=$((FAIL + 1))
|
||||
FAILED_TESTS="${FAILED_TESTS} T6a"
|
||||
else
|
||||
echo " PASS T6a manual fallback workflow does not listen on issue_comment"
|
||||
PASS=$((PASS + 1))
|
||||
fi
|
||||
assert_contains "T6b workflow exposes workflow_dispatch" \
|
||||
"workflow_dispatch" "$WORKFLOW_CONTENT"
|
||||
assert_contains "T6c workflow documents unsupported manual inputs" \
|
||||
"workflow_dispatch inputs" "$WORKFLOW_CONTENT"
|
||||
assert_contains "T6a workflow if: contains author_association gate" \
|
||||
"github.event.comment.author_association" "$WORKFLOW_CONTENT"
|
||||
assert_contains "T6b workflow if: gates on MEMBER/OWNER/COLLABORATOR" \
|
||||
'["MEMBER","OWNER","COLLABORATOR"]' "$WORKFLOW_CONTENT"
|
||||
assert_contains "T6c workflow if: contains slash-command trigger" \
|
||||
"/refire-tier-check" "$WORKFLOW_CONTENT"
|
||||
assert_contains "T6d workflow if: gates on PR-not-issue" \
|
||||
"github.event.issue.pull_request" "$WORKFLOW_CONTENT"
|
||||
assert_contains "T6e workflow listens on issue_comment" \
|
||||
"issue_comment" "$WORKFLOW_CONTENT"
|
||||
assert_contains "T6f workflow requests statuses:write permission" \
|
||||
"statuses: write" "$WORKFLOW_CONTENT"
|
||||
# Does NOT check out PR HEAD (security)
|
||||
if grep -q 'ref: \${{ github.event.pull_request.head' "$WORKFLOW"; then
|
||||
echo " FAIL T6d workflow MUST NOT check out PR head (security)"
|
||||
echo " FAIL T6g workflow MUST NOT check out PR head (security)"
|
||||
FAIL=$((FAIL + 1))
|
||||
FAILED_TESTS="${FAILED_TESTS} T6d"
|
||||
FAILED_TESTS="${FAILED_TESTS} T6g"
|
||||
else
|
||||
echo " PASS T6d workflow does not check out PR head"
|
||||
echo " PASS T6g workflow does not check out PR head"
|
||||
PASS=$((PASS + 1))
|
||||
fi
|
||||
|
||||
DISPATCH_PARSE_OUT=$(python3 -c 'import sys,yaml;yaml.safe_load(open(sys.argv[1]).read());print("ok")' "$DISPATCH_WORKFLOW" 2>&1 || true)
|
||||
assert_eq "T6e dispatcher workflow parses as YAML" "ok" "$DISPATCH_PARSE_OUT"
|
||||
DISPATCH_CONTENT=$(cat "$DISPATCH_WORKFLOW")
|
||||
assert_contains "T6f dispatcher listens on issue_comment" \
|
||||
"issue_comment" "$DISPATCH_CONTENT"
|
||||
assert_contains "T6g dispatcher handles /qa-recheck" \
|
||||
"/qa-recheck" "$DISPATCH_CONTENT"
|
||||
assert_contains "T6h dispatcher handles /security-recheck" \
|
||||
"/security-recheck" "$DISPATCH_CONTENT"
|
||||
assert_contains "T6i dispatcher handles /refire-tier-check" \
|
||||
"/refire-tier-check" "$DISPATCH_CONTENT"
|
||||
|
||||
# T1-T5 — script behavior against a local Gitea-fixture
|
||||
echo
|
||||
echo "== T1-T5 script behavior (vs local fixture) =="
|
||||
|
||||
@ -1,169 +0,0 @@
|
||||
import importlib.util
|
||||
import json
|
||||
import pathlib
|
||||
import urllib.error
|
||||
|
||||
|
||||
ROOT = pathlib.Path(__file__).resolve().parents[1]
|
||||
SCRIPT = ROOT / "status-reaper.py"
|
||||
|
||||
|
||||
def load_reaper():
|
||||
spec = importlib.util.spec_from_file_location("status_reaper", SCRIPT)
|
||||
mod = importlib.util.module_from_spec(spec)
|
||||
assert spec.loader is not None
|
||||
spec.loader.exec_module(mod)
|
||||
mod.API = "https://git.example.test/api/v1"
|
||||
mod.GITEA_TOKEN = "test-token"
|
||||
mod.API_TIMEOUT_SEC = 1
|
||||
mod.API_RETRIES = 3
|
||||
mod.API_RETRY_SLEEP_SEC = 0
|
||||
return mod
|
||||
|
||||
|
||||
class FakeResponse:
|
||||
status = 200
|
||||
|
||||
def __init__(self, payload):
|
||||
self.payload = payload
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc, tb):
|
||||
return False
|
||||
|
||||
def read(self):
|
||||
return json.dumps(self.payload).encode("utf-8")
|
||||
|
||||
|
||||
def test_api_retries_transient_timeout(monkeypatch):
|
||||
mod = load_reaper()
|
||||
calls = {"n": 0}
|
||||
|
||||
def fake_urlopen(req, timeout):
|
||||
calls["n"] += 1
|
||||
if calls["n"] == 1:
|
||||
raise TimeoutError("simulated slow Gitea API")
|
||||
return FakeResponse({"ok": True})
|
||||
|
||||
monkeypatch.setattr(mod.urllib.request, "urlopen", fake_urlopen)
|
||||
|
||||
status, body = mod.api("GET", "/repos/o/r/commits")
|
||||
|
||||
assert status == 200
|
||||
assert body == {"ok": True}
|
||||
assert calls["n"] == 2
|
||||
|
||||
|
||||
def test_api_raises_after_retry_budget(monkeypatch):
|
||||
mod = load_reaper()
|
||||
|
||||
def fake_urlopen(req, timeout):
|
||||
raise urllib.error.URLError("connection reset")
|
||||
|
||||
monkeypatch.setattr(mod.urllib.request, "urlopen", fake_urlopen)
|
||||
|
||||
try:
|
||||
mod.api("GET", "/repos/o/r/commits")
|
||||
except mod.ApiError as exc:
|
||||
assert "failed after 3 attempts" in str(exc)
|
||||
else:
|
||||
raise AssertionError("expected ApiError")
|
||||
|
||||
|
||||
def test_reap_compensates_failed_pr_context_when_push_equivalent_passed(monkeypatch):
|
||||
mod = load_reaper()
|
||||
posted = []
|
||||
|
||||
def fake_post(sha, context, target_url, *, description="", dry_run=False):
|
||||
posted.append((sha, context, target_url, description, dry_run))
|
||||
|
||||
monkeypatch.setattr(mod, "post_compensating_status", fake_post)
|
||||
|
||||
counters = mod.reap(
|
||||
{"CI": True, "Handlers Postgres Integration": True},
|
||||
{
|
||||
"statuses": [
|
||||
{
|
||||
"context": "CI / Platform (Go) (pull_request)",
|
||||
"status": "failure",
|
||||
"target_url": "https://git.example.test/ci-pr",
|
||||
},
|
||||
{
|
||||
"context": "CI / Platform (Go) (push)",
|
||||
"status": "success",
|
||||
},
|
||||
{
|
||||
"context": (
|
||||
"Handlers Postgres Integration / "
|
||||
"Handlers Postgres Integration (pull_request)"
|
||||
),
|
||||
"status": "failure",
|
||||
"target_url": "https://git.example.test/handlers-pr",
|
||||
},
|
||||
{
|
||||
"context": (
|
||||
"Handlers Postgres Integration / "
|
||||
"Handlers Postgres Integration (push)"
|
||||
),
|
||||
"status": "success",
|
||||
},
|
||||
],
|
||||
},
|
||||
"db3b7a93e31adc0cb072a6d177d92dd73275a191",
|
||||
)
|
||||
|
||||
assert counters["compensated_pr_shadowed_by_push_success"] == 2
|
||||
assert posted == [
|
||||
(
|
||||
"db3b7a93e31adc0cb072a6d177d92dd73275a191",
|
||||
"CI / Platform (Go) (pull_request)",
|
||||
"https://git.example.test/ci-pr",
|
||||
mod.PR_SHADOW_COMPENSATION_DESCRIPTION,
|
||||
False,
|
||||
),
|
||||
(
|
||||
"db3b7a93e31adc0cb072a6d177d92dd73275a191",
|
||||
"Handlers Postgres Integration / Handlers Postgres Integration (pull_request)",
|
||||
"https://git.example.test/handlers-pr",
|
||||
mod.PR_SHADOW_COMPENSATION_DESCRIPTION,
|
||||
False,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def test_reap_preserves_failed_pr_context_without_push_success(monkeypatch):
|
||||
mod = load_reaper()
|
||||
posted = []
|
||||
monkeypatch.setattr(
|
||||
mod,
|
||||
"post_compensating_status",
|
||||
lambda sha, context, target_url, *, description="", dry_run=False: posted.append(
|
||||
context
|
||||
),
|
||||
)
|
||||
|
||||
counters = mod.reap(
|
||||
{"CI": True},
|
||||
{
|
||||
"statuses": [
|
||||
{
|
||||
"context": "CI / Platform (Go) (pull_request)",
|
||||
"status": "failure",
|
||||
},
|
||||
{
|
||||
"context": "CI / Platform (Go) (push)",
|
||||
"status": "failure",
|
||||
},
|
||||
{
|
||||
"context": "CI / Shellcheck (pull_request)",
|
||||
"status": "failure",
|
||||
},
|
||||
],
|
||||
},
|
||||
"db3b7a93e31adc0cb072a6d177d92dd73275a191",
|
||||
)
|
||||
|
||||
assert counters["preserved_pr_without_push_success"] == 2
|
||||
assert posted == []
|
||||
@ -1,145 +0,0 @@
|
||||
# SOP-Checklist gate — per-item required reviewer teams.
|
||||
#
|
||||
# RFC#351 v1 starter set. Each item lists:
|
||||
# slug — canonical kebab-case form used in /sop-ack <slug>
|
||||
# pr_section_marker — substring matched in the PR body to detect that
|
||||
# the author filled in this item (case-insensitive)
|
||||
# required_teams — list of Gitea team names; an ack from ANY one of
|
||||
# these teams (logical OR) satisfies the item.
|
||||
# Membership is probed at gate-time via
|
||||
# GET /api/v1/teams/{id}/members/{login}.
|
||||
# Team-id resolution happens at script start via
|
||||
# GET /api/v1/orgs/{org}/teams (cheap, one call).
|
||||
# numeric_alias — 1..7; lets reviewers type `/sop-ack 3` as a
|
||||
# shortcut for `/sop-ack staging-smoke`.
|
||||
#
|
||||
# WHY THESE TEAM MAPPINGS:
|
||||
# The RFC table referenced persona-role names like `core-qa`,
|
||||
# `core-be`, `core-devops` — these are individual Gitea user logins,
|
||||
# not teams. The Gitea team-membership API is /teams/{id}/members/{u},
|
||||
# so we need actual teams. Orchestrator preflight 2026-05-12 verified
|
||||
# only these teams exist on molecule-ai: ceo(5), engineers(2),
|
||||
# managers(6), qa(20), security(21), Owners(1), and bot teams. We
|
||||
# map the RFC roles to the closest existing team and surface the
|
||||
# mapping explicitly so it's reviewable.
|
||||
#
|
||||
# HOW TO EDIT:
|
||||
# - Tightening: replace `engineers` with a smaller team after creating
|
||||
# it (e.g. a new `senior-engineers` team if needed).
|
||||
# - Loosening: add another team to required_teams (OR semantics).
|
||||
# - Add an item: append to items list and document the slug below.
|
||||
#
|
||||
# AUTHOR SELF-ACK IS FORBIDDEN regardless of which team contains them
|
||||
# — the gate script enforces commenter != PR author before checking
|
||||
# team membership.
|
||||
|
||||
version: 1
|
||||
|
||||
# Tier-aware failure mode (RFC#351 open question 2):
|
||||
# For tier:high — hard-fail (status `failure`, blocks merge via BP).
|
||||
# For tier:medium — hard-fail (same as high; medium is non-trivial).
|
||||
# For tier:low — soft-fail (status `pending` with `acked: N/M` in the
|
||||
# description). BP can choose to require the context
|
||||
# or not for low-tier PRs.
|
||||
# If no tier label is present, default to medium (hard-fail) — every PR
|
||||
# should have a tier label per sop-tier-check, and absence indicates
|
||||
# a missing-tier defect we should surface, not silently lower the bar.
|
||||
tier_failure_mode:
|
||||
"tier:high": hard
|
||||
"tier:medium": hard
|
||||
"tier:low": soft
|
||||
default_mode: hard # used when no tier:* label is present
|
||||
|
||||
items:
|
||||
- slug: comprehensive-testing
|
||||
numeric_alias: 1
|
||||
pr_section_marker: "Comprehensive testing performed"
|
||||
required_teams: [qa, engineers]
|
||||
description: >-
|
||||
What was tested, how, edge cases covered. Ack from any qa-team
|
||||
member (or engineers fallback while qa is small).
|
||||
|
||||
- slug: local-postgres-e2e
|
||||
numeric_alias: 2
|
||||
pr_section_marker: "Local-postgres E2E run"
|
||||
required_teams: [engineers]
|
||||
description: >-
|
||||
Link to local CI artifact, or "N/A: pure-frontend change". Ack
|
||||
from any engineer who can verify the local DB test actually ran.
|
||||
|
||||
- slug: staging-smoke
|
||||
numeric_alias: 3
|
||||
pr_section_marker: "Staging-smoke verified or pending"
|
||||
required_teams: [engineers]
|
||||
description: >-
|
||||
Link to canary run, or "scheduled post-merge". Ack from any
|
||||
engineer (core-devops/infra-sre are members of engineers team).
|
||||
|
||||
- slug: root-cause
|
||||
numeric_alias: 4
|
||||
pr_section_marker: "Root-cause not symptom"
|
||||
required_teams: [managers, ceo]
|
||||
description: >-
|
||||
One-sentence root-cause statement. Ack from managers tier
|
||||
(team-leads) or ceo. Senior judgment required to attest
|
||||
root-cause-versus-symptom.
|
||||
|
||||
- slug: five-axis-review
|
||||
numeric_alias: 5
|
||||
pr_section_marker: "Five-Axis review walked"
|
||||
required_teams: [engineers]
|
||||
description: >-
|
||||
Correctness / readability / architecture / security / performance.
|
||||
Ack from any non-author engineer.
|
||||
|
||||
- slug: no-backwards-compat
|
||||
numeric_alias: 6
|
||||
pr_section_marker: "No backwards-compat shim / dead code added"
|
||||
required_teams: [managers, ceo]
|
||||
description: >-
|
||||
Yes/no + justification if no. Senior ack required because
|
||||
backward-compat shims are how dead-code accretes.
|
||||
|
||||
- slug: memory-consulted
|
||||
numeric_alias: 7
|
||||
pr_section_marker: "Memory/saved-feedback consulted"
|
||||
required_teams: [engineers]
|
||||
description: >-
|
||||
List of feedback memories applicable to this change. Ack from
|
||||
any engineer who has the same memory access.
|
||||
|
||||
# N/A gate declarations (RFC#324 §N/A follow-up).
|
||||
# PRs where a gate genuinely does not apply (e.g., pure-infra with no
|
||||
# qa surface, or docs-only) can be declared N/A by a non-author peer
|
||||
# who is in one of the gate's required_teams. The sop-checklist
|
||||
# posts a `sop-checklist / na-declarations (pull_request)` status that
|
||||
# review-check.sh reads to skip the Gitea-APPROVE requirement.
|
||||
#
|
||||
# Usage: any PR commenter (peer) posts:
|
||||
# /sop-n/a qa-review <reason>
|
||||
# /sop-n/a security-review <reason>
|
||||
#
|
||||
# Slash commands:
|
||||
# /sop-n/a <gate> [reason] — declare gate N/A (most-recent per-user wins)
|
||||
# /sop-revoke <gate> — revoke prior N/A declaration for that gate
|
||||
#
|
||||
# Gate names must match the context strings used by review-check.sh:
|
||||
# qa-review → qa-review / approved (<event>) [TEAM_ID=20]
|
||||
# security-review → security-review / approved (<event>) [TEAM_ID=21]
|
||||
#
|
||||
# required_teams: OR semantics — any team member can declare N/A.
|
||||
# Authors cannot self-declare N/A (enforced by gate script).
|
||||
n/a_gates:
|
||||
qa-review:
|
||||
required_teams: [qa, security, engineers]
|
||||
description: >-
|
||||
QA review N/A when this change has no qa surface (pure-infra,
|
||||
tooling-only, revert, dependency-only). A qa/eng/security member
|
||||
must post /sop-n/a qa-review to activate.
|
||||
|
||||
security-review:
|
||||
required_teams: [security, managers, ceo]
|
||||
description: >-
|
||||
Security review N/A when this change has no security surface
|
||||
(docs-only, pure-frontend, dependency-only). A security/owners
|
||||
member must post /sop-n/a security-review to activate.
|
||||
@ -1,58 +1,88 @@
|
||||
# audit-force-merge — emit `incident.force_merge` to runner stdout when
|
||||
# a PR is merged with required-status-checks not green. Vector picks
|
||||
# audit-force-merge — emit `incident.force_merge` to the runner log when
|
||||
# a PR is merged with required-status checks NOT all green. Vector picks
|
||||
# the JSON line off docker_logs and ships to Loki on
|
||||
# molecule-canonical-obs (per `reference_obs_stack_phase1`); query as:
|
||||
#
|
||||
# {host="operator"} |= "event_type" |= "incident.force_merge" | json
|
||||
#
|
||||
# Closes the §SOP-6 audit gap (the doc says force-merges write to
|
||||
# `structure_events`, but that table lives in the platform DB, not
|
||||
# Gitea-side; Loki is the practical equivalent for Gitea Actions
|
||||
# events). When the credential / observability stack converges later,
|
||||
# this can sync into structure_events from Loki via a backfill job —
|
||||
# the structured JSON shape is forward-compatible.
|
||||
# Companion to `audit-force-merge.sh` (script-extract pattern, same as
|
||||
# sop-tier-check). The audit observes BOTH UI-merged and REST-merged PRs
|
||||
# uniformly per `feedback_gh_cli_merge_lies_use_rest`.
|
||||
#
|
||||
# Logic in `.gitea/scripts/audit-force-merge.sh` per the same script-
|
||||
# extract pattern as sop-tier-check.
|
||||
# Closes the §SOP-6 audit gap for the molecule-core repo. RFC:
|
||||
# internal#219 §6. Mirrors the same-named workflow in
|
||||
# molecule-controlplane; design rationale lives in the RFC, not here,
|
||||
# to keep the workflow file scannable.
|
||||
|
||||
name: audit-force-merge
|
||||
|
||||
# pull_request_target loads from the base branch — same security model
|
||||
# as sop-tier-check. Without this, an attacker could rewrite the
|
||||
# workflow on a PR and skip the audit emission for their own
|
||||
# force-merge. See `.gitea/workflows/sop-tier-check.yml` for the full
|
||||
# rationale.
|
||||
# as sop-tier-check. Without this, a PR author could rewrite the
|
||||
# workflow on their own PR and skip the audit emission for their own
|
||||
# force-merge. The base-branch checkout below ALSO uses
|
||||
# `base.sha`, not `base.ref`, so a fast-moving base can't slip a
|
||||
# different audit script in under us.
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [closed]
|
||||
|
||||
# `pull-requests: read` + `contents: read` covers everything the script
|
||||
# needs (fetch PR + commit statuses). `issues:` deliberately omitted —
|
||||
# audit fires-and-forgets to stdout, never opens issues.
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
|
||||
jobs:
|
||||
audit:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
# Skip when PR is closed without merge — saves a runner.
|
||||
if: github.event.pull_request.merged == true
|
||||
steps:
|
||||
- name: Check out base branch (for the script)
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
# base.sha pinning, NOT base.ref — see header rationale.
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
- name: Detect force-merge + emit audit event
|
||||
env:
|
||||
# Same org-level secret the sop-tier-check workflow uses.
|
||||
# Same org-level secret the sop-tier-check workflow uses;
|
||||
# falls back to the auto-injected GITHUB_TOKEN if the
|
||||
# org-level SOP_TIER_CHECK_TOKEN isn't set on a transitional
|
||||
# repo.
|
||||
GITEA_TOKEN: ${{ secrets.SOP_TIER_CHECK_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
# Required-status-check contexts to evaluate at merge time.
|
||||
# Newline-separated. Mirror this against branch protection
|
||||
# (settings → branches → protected branch → required checks).
|
||||
# Newline-separated. MUST mirror branch protection's
|
||||
# status_check_contexts for protected branches
|
||||
# (currently `main`; `staging` protection forthcoming per
|
||||
# RFC internal#219 Phase 4).
|
||||
#
|
||||
# Initialized 2026-05-11 from the current molecule-core `main`
|
||||
# branch protection:
|
||||
#
|
||||
# GET /api/v1/repos/molecule-ai/molecule-core/
|
||||
# branch_protections/main
|
||||
# → status_check_contexts = [
|
||||
# "Secret scan / Scan diff for credential-shaped strings (pull_request)",
|
||||
# "sop-tier-check / tier-check (pull_request)"
|
||||
# ]
|
||||
#
|
||||
# Declared here rather than fetched from /branch_protections
|
||||
# because that endpoint requires admin write — sop-tier-bot is
|
||||
# read-only by design (least-privilege).
|
||||
# because that endpoint requires admin write — sop-tier-bot
|
||||
# is read-only by design (least-privilege per
|
||||
# `feedback_least_privilege_via_workflow_env` / internal#257).
|
||||
# Drift between this env and the real protection list is
|
||||
# auto-detected by `ci-required-drift.yml` (RFC §4 + §6),
|
||||
# which opens a `[ci-drift]` issue within one hour.
|
||||
#
|
||||
# When the protection set changes (e.g. Phase 4 adds the
|
||||
# `ci / all-required (pull_request)` sentinel), update BOTH
|
||||
# branch protection AND this env in the SAME PR; drift-detect
|
||||
# will otherwise file an issue for you.
|
||||
REQUIRED_CHECKS: |
|
||||
CI / all-required (pull_request)
|
||||
sop-checklist / all-items-acked (pull_request)
|
||||
Secret scan / Scan diff for credential-shaped strings (pull_request)
|
||||
sop-tier-check / tier-check (pull_request)
|
||||
run: bash .gitea/scripts/audit-force-merge.sh
|
||||
|
||||
@ -37,7 +37,6 @@ jobs:
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking
|
||||
# the PR. Follow-up PR flips this off after surfaced defects are
|
||||
# triaged.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
@ -43,13 +43,11 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
# bp-exempt: drift visibility gate; CI / all-required remains the required aggregate.
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking
|
||||
# the PR. Follow-up PR flips this off after surfaced defects are
|
||||
# triaged.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
@ -45,7 +45,6 @@ jobs:
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking
|
||||
# the PR. Follow-up PR flips this off after surfaced defects are
|
||||
# triaged.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
|
||||
@ -1,165 +0,0 @@
|
||||
name: MCP Stdio Transport Regression
|
||||
|
||||
# Regression test for molecule-ai-workspace-runtime#61:
|
||||
# asyncio.connect_read_pipe / connect_write_pipe fail with
|
||||
# ValueError: "Pipe transport is only for pipes, sockets and character devices"
|
||||
# when stdout is a regular file (openclaw capture, CI tee, debugging).
|
||||
#
|
||||
# This workflow reproduces the exact failure mode and verifies the
|
||||
# fallback to direct buffer I/O works. It runs on every PR that
|
||||
# touches the MCP server or this workflow, plus nightly cron.
|
||||
#
|
||||
# Why a separate workflow (not folded into ci.yml python-lint):
|
||||
# - The test needs to spawn the MCP server with stdout redirected
|
||||
# to a regular file (not a TTY/pipe), which conflicts with
|
||||
# pytest's own capture mechanism.
|
||||
# - It exercises the actual process spawn path (python a2a_mcp_server.py)
|
||||
# not just unit-test mocks — closer to the real openclaw integration.
|
||||
# - A dedicated workflow surfaces stdio-specific regressions without
|
||||
# coupling to the broader Python test suite's coverage gate.
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main, staging]
|
||||
paths:
|
||||
- 'workspace/a2a_mcp_server.py'
|
||||
- 'workspace/mcp_cli.py'
|
||||
- 'workspace/tests/test_a2a_mcp_server.py'
|
||||
- '.gitea/workflows/ci-mcp-stdio-transport.yml'
|
||||
push:
|
||||
branches: [main, staging]
|
||||
paths:
|
||||
- 'workspace/a2a_mcp_server.py'
|
||||
- 'workspace/mcp_cli.py'
|
||||
- 'workspace/tests/test_a2a_mcp_server.py'
|
||||
- '.gitea/workflows/ci-mcp-stdio-transport.yml'
|
||||
schedule:
|
||||
# Nightly at 04:00 UTC — catches drift from dependency updates
|
||||
# (e.g. asyncio behavior changes in new Python patch releases).
|
||||
- cron: '0 4 * * *'
|
||||
|
||||
concurrency:
|
||||
group: mcp-stdio-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
jobs:
|
||||
# bp-exempt: regression canary for runtime#61; not a merge gate — informational only until promoted to required.
|
||||
# mc#774: continue-on-error mask — new workflow, flip to false once it's green on ≥3 consecutive main runs.
|
||||
mcp-stdio-regular-file:
|
||||
name: MCP stdio with regular-file stdout
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true # mc#774
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
WORKSPACE_ID: "00000000-0000-0000-0000-000000000001"
|
||||
defaults:
|
||||
run:
|
||||
working-directory: workspace
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
- uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: pip
|
||||
cache-dependency-path: workspace/requirements.txt
|
||||
- run: pip install -r requirements.txt pytest pytest-asyncio pytest-cov
|
||||
|
||||
- name: Reproduce runtime#61 — stdout as regular file
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "=== Reproducing molecule-ai-workspace-runtime#61 ==="
|
||||
echo ""
|
||||
echo "Before the fix, this command would fail with:"
|
||||
echo ' ValueError: Pipe transport is only for pipes, sockets and character devices'
|
||||
echo ""
|
||||
|
||||
# Spawn the MCP server with stdout redirected to a regular file.
|
||||
# This is exactly what openclaw does when capturing MCP output.
|
||||
OUTPUT=$(mktemp)
|
||||
trap 'rm -f "$OUTPUT"' EXIT
|
||||
|
||||
# Send initialize request, then tools/list, then exit
|
||||
{
|
||||
echo '{"jsonrpc":"2.0","id":1,"method":"initialize","params":{}}'
|
||||
echo '{"jsonrpc":"2.0","id":2,"method":"tools/list"}'
|
||||
} | python a2a_mcp_server.py > "$OUTPUT" 2>&1 || {
|
||||
RC=$?
|
||||
echo "FAIL: MCP server exited with code $RC"
|
||||
echo "--- stdout+stderr ---"
|
||||
cat "$OUTPUT"
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "PASS: MCP server handled regular-file stdout without crashing"
|
||||
echo ""
|
||||
echo "--- Output (first 20 lines) ---"
|
||||
head -20 "$OUTPUT"
|
||||
echo ""
|
||||
|
||||
# Verify we got valid JSON-RPC responses
|
||||
if grep -q '"result"' "$OUTPUT"; then
|
||||
echo "PASS: JSON-RPC responses found in output"
|
||||
else
|
||||
echo "FAIL: No JSON-RPC responses in output"
|
||||
cat "$OUTPUT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Reproduce runtime#61 — stdin from regular file
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "=== stdin as regular file (CI tee / capture pattern) ==="
|
||||
|
||||
INPUT=$(mktemp)
|
||||
OUTPUT=$(mktemp)
|
||||
trap 'rm -f "$INPUT" "$OUTPUT"' EXIT
|
||||
|
||||
cat > "$INPUT" <<'EOF'
|
||||
{"jsonrpc":"2.0","id":1,"method":"initialize","params":{}}
|
||||
{"jsonrpc":"2.0","id":2,"method":"tools/list"}
|
||||
EOF
|
||||
|
||||
python a2a_mcp_server.py < "$INPUT" > "$OUTPUT" 2>&1 || {
|
||||
RC=$?
|
||||
echo "FAIL: MCP server exited with code $RC"
|
||||
cat "$OUTPUT"
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "PASS: MCP server handled regular-file stdin without crashing"
|
||||
|
||||
if grep -q '"result"' "$OUTPUT"; then
|
||||
echo "PASS: JSON-RPC responses found in output"
|
||||
else
|
||||
echo "FAIL: No JSON-RPC responses in output"
|
||||
cat "$OUTPUT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify warning is emitted for non-pipe stdio
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "=== Verify diagnostic warning ==="
|
||||
|
||||
OUTPUT=$(mktemp)
|
||||
trap 'rm -f "$OUTPUT"' EXIT
|
||||
|
||||
{
|
||||
echo '{"jsonrpc":"2.0","id":1,"method":"initialize","params":{}}'
|
||||
} | python a2a_mcp_server.py > "$OUTPUT" 2>&1
|
||||
|
||||
# The warning should mention "not a pipe" for operator visibility
|
||||
if grep -qi "not a pipe" "$OUTPUT"; then
|
||||
echo "PASS: Diagnostic warning emitted for non-pipe stdio"
|
||||
else
|
||||
echo "NOTE: No warning in output (may be suppressed by log level)"
|
||||
fi
|
||||
|
||||
- name: Run unit tests for stdio transport
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "=== Running stdio transport unit tests ==="
|
||||
python -m pytest tests/test_a2a_mcp_server.py::TestStdioPipeAssertion -v --no-cov
|
||||
@ -23,11 +23,11 @@
|
||||
# `feedback_behavior_based_ast_gates` — NOT grep-by-name. That way
|
||||
# job renames or matrix-expansion-induced churn produce honest signal.
|
||||
#
|
||||
# NOTE on protection endpoint scope: `GET /repos/.../branch_protections/{branch}`
|
||||
# requires repo-admin role in Gitea 1.22.6. If DRIFT_BOT_TOKEN lacks it,
|
||||
# the script skips that branch with a clear ::error:: diagnostic and exits 0
|
||||
# (the issue IS the alarm, not a red workflow). See provisioning trail in
|
||||
# the run step's GITEA_TOKEN env comment.
|
||||
# IMPORTANT — TRANSITIONAL STATE: molecule-core's ci.yml does NOT yet
|
||||
# contain the `all-required` sentinel job (RFC §4 Phase 4 adds it).
|
||||
# Until Phase 4 lands the detector will hard-fail with exit 3 on the
|
||||
# missing sentinel. That's intentional: a red workflow on a 5-min cron
|
||||
# is louder than a silent issue and forces Phase 4 to land soon.
|
||||
|
||||
name: ci-required-drift
|
||||
|
||||
|
||||
@ -70,12 +70,10 @@ jobs:
|
||||
changes:
|
||||
name: Detect changes
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 4 (RFC #219 §1): all required jobs >=98% green on main.
|
||||
# Flip confirmed 2026-05-12 via combined-status check of latest main
|
||||
# commit (all CI jobs green). `all-required` sentinel hard-fails
|
||||
# when this job fails; no Phase 3 suppression needed.
|
||||
# revert: add `continue-on-error: true` back if regressions appear.
|
||||
continue-on-error: false
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking
|
||||
# the PR. Follow-up PR flips this off after the surfaced defects
|
||||
# (if any) are triaged.
|
||||
continue-on-error: true
|
||||
outputs:
|
||||
platform: ${{ steps.check.outputs.platform }}
|
||||
canvas: ${{ steps.check.outputs.canvas }}
|
||||
@ -107,25 +105,16 @@ jobs:
|
||||
echo "scripts=true" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
# Workflow-only edits are covered by the workflow lint family
|
||||
# and by this workflow's always-present required jobs. Do not fan
|
||||
# those edits out into Go/Canvas/Python/shellcheck work; the
|
||||
# downstream jobs still emit their required contexts via no-op
|
||||
# steps when their surface flag is false.
|
||||
#
|
||||
# If the diff itself cannot be trusted, fail open by running every
|
||||
# surface instead of silently under-testing the PR.
|
||||
if ! DIFF=$(git diff --name-only "$BASE" HEAD 2>/dev/null); then
|
||||
echo "platform=true" >> "$GITHUB_OUTPUT"
|
||||
echo "canvas=true" >> "$GITHUB_OUTPUT"
|
||||
echo "python=true" >> "$GITHUB_OUTPUT"
|
||||
echo "scripts=true" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
echo "platform=$(echo "$DIFF" | grep -qE '^workspace-server/' && echo true || echo false)" >> "$GITHUB_OUTPUT"
|
||||
echo "canvas=$(echo "$DIFF" | grep -qE '^canvas/' && echo true || echo false)" >> "$GITHUB_OUTPUT"
|
||||
echo "python=$(echo "$DIFF" | grep -qE '^workspace/' && echo true || echo false)" >> "$GITHUB_OUTPUT"
|
||||
echo "scripts=$(echo "$DIFF" | grep -qE '^tests/e2e/|^scripts/|^infra/scripts/' && echo true || echo false)" >> "$GITHUB_OUTPUT"
|
||||
# Both .github/workflows/ci.yml AND .gitea/workflows/ci.yml count
|
||||
# as "this workflow changed" — either edit should force-run every
|
||||
# downstream job. The Gitea port follows the same shape as the
|
||||
# GitHub original so behavior matches when triggered on either
|
||||
# platform.
|
||||
DIFF=$(git diff --name-only "$BASE" HEAD 2>/dev/null || echo ".gitea/workflows/ci.yml")
|
||||
echo "platform=$(echo "$DIFF" | grep -qE '^workspace-server/|^\.gitea/workflows/ci\.yml$|^\.github/workflows/ci\.yml$' && echo true || echo false)" >> "$GITHUB_OUTPUT"
|
||||
echo "canvas=$(echo "$DIFF" | grep -qE '^canvas/|^\.gitea/workflows/ci\.yml$|^\.github/workflows/ci\.yml$' && echo true || echo false)" >> "$GITHUB_OUTPUT"
|
||||
echo "python=$(echo "$DIFF" | grep -qE '^workspace/|^\.gitea/workflows/ci\.yml$|^\.github/workflows/ci\.yml$' && echo true || echo false)" >> "$GITHUB_OUTPUT"
|
||||
echo "scripts=$(echo "$DIFF" | grep -qE '^tests/e2e/|^scripts/|^infra/scripts/|^\.gitea/workflows/ci\.yml$|^\.github/workflows/ci\.yml$' && echo true || echo false)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Platform (Go) — Go build/vet/test/lint + coverage gates. The always-run
|
||||
# + per-step gating shape preserves the GitHub-side required-check name
|
||||
@ -135,17 +124,7 @@ jobs:
|
||||
name: Platform (Go)
|
||||
needs: changes
|
||||
runs-on: ubuntu-latest
|
||||
# mc#774 (closed 2026-05-14): Phase 4 flip of the platform-build job.
|
||||
# Phase 4 (#656) originally flipped this to continue-on-error: false based on
|
||||
# Phase-3-masked "green on main 2026-05-12". Two failure classes then surfaced:
|
||||
# (1) 4x delegation_test.go sqlmock gaps (PR #669 / #634 fix-forward, closed).
|
||||
# (2) TestMCPHandler_CommitMemory_GlobalScope_Blocked (mcp_test.go:433):
|
||||
# OFFSEC-001 hardening collided with test assertion; tracked in mc#762.
|
||||
# Fix-forward for (1) landed in PR #669. The mc#762 gap (2) is a separate
|
||||
# issue — it does NOT block this flip because the test is already wrapped in
|
||||
# the diagnostic step with its own continue-on-error: true (line 203).
|
||||
# Flip confirmed by CI / Platform (Go) status = success on main HEAD 363905d3.
|
||||
continue-on-error: false
|
||||
continue-on-error: true
|
||||
defaults:
|
||||
run:
|
||||
working-directory: workspace-server
|
||||
@ -165,29 +144,10 @@ jobs:
|
||||
run: go build ./cmd/server
|
||||
# CLI (molecli) moved to standalone repo: git.moleculesai.app/molecule-ai/molecule-cli
|
||||
- if: needs.changes.outputs.platform == 'true'
|
||||
run: go vet ./...
|
||||
- if: needs.changes.outputs.platform == 'true'
|
||||
name: Install golangci-lint
|
||||
run: go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.12.2
|
||||
run: go vet ./... || true
|
||||
- if: needs.changes.outputs.platform == 'true'
|
||||
name: Run golangci-lint
|
||||
run: $(go env GOPATH)/bin/golangci-lint run --timeout 3m ./...
|
||||
- if: needs.changes.outputs.platform == 'true'
|
||||
name: Diagnostic — per-package verbose 60s
|
||||
run: |
|
||||
set +e
|
||||
go test -race -v -timeout 60s ./internal/handlers/... 2>&1 | tee /tmp/test-handlers.log
|
||||
handlers_exit=$?
|
||||
go test -race -v -timeout 60s ./internal/pendinguploads/... 2>&1 | tee /tmp/test-pu.log
|
||||
pu_exit=$?
|
||||
echo "::group::handlers exit=$handlers_exit (last 100 lines)"
|
||||
tail -100 /tmp/test-handlers.log
|
||||
echo "::endgroup::"
|
||||
echo "::group::pendinguploads exit=$pu_exit (last 100 lines)"
|
||||
tail -100 /tmp/test-pu.log
|
||||
echo "::endgroup::"
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
run: golangci-lint run --timeout 3m ./... || true
|
||||
- if: needs.changes.outputs.platform == 'true'
|
||||
name: Run tests with race detection and coverage
|
||||
run: go test -race -coverprofile=coverage.out ./...
|
||||
@ -296,8 +256,7 @@ jobs:
|
||||
name: Canvas (Next.js)
|
||||
needs: changes
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 4 (RFC #219 §1): confirmed green on main 2026-05-12.
|
||||
continue-on-error: false
|
||||
continue-on-error: true
|
||||
defaults:
|
||||
run:
|
||||
working-directory: canvas
|
||||
@ -343,8 +302,7 @@ jobs:
|
||||
name: Shellcheck (E2E scripts)
|
||||
needs: changes
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 4 (RFC #219 §1): confirmed green on main 2026-05-12.
|
||||
continue-on-error: false
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- if: needs.changes.outputs.scripts != 'true'
|
||||
run: echo "No tests/e2e/ or infra/scripts/ changes — skipping real shellcheck; this job always runs to satisfy the required-check name on branch protection."
|
||||
@ -370,54 +328,22 @@ jobs:
|
||||
run: |
|
||||
bash tests/e2e/test_model_slug.sh
|
||||
|
||||
- if: needs.changes.outputs.scripts == 'true'
|
||||
name: Test ECR promote-tenant-image script (mock-driven, no live infra)
|
||||
# Covers scripts/promote-tenant-image.sh — the codified
|
||||
# :staging-latest → :latest ECR promote + tenant fleet redeploy
|
||||
# closing molecule-ai/molecule-core#660. 40 mock-driven cases
|
||||
# exercise every exit path (preflight, snapshot, promote, redeploy
|
||||
# 403→SSM-refresh, verify, rollback). No live AWS/CP/SSM calls.
|
||||
run: |
|
||||
bash scripts/test-promote-tenant-image.sh
|
||||
|
||||
- if: needs.changes.outputs.scripts == 'true'
|
||||
name: Shellcheck promote-tenant-image script
|
||||
# scripts/ is excluded from the bulk shellcheck pass above (legacy
|
||||
# SC3040/SC3043 cleanup pending). Run shellcheck explicitly on
|
||||
# the promote script + its test harness so regressions there are
|
||||
# caught by the required check.
|
||||
run: |
|
||||
shellcheck --severity=warning \
|
||||
scripts/promote-tenant-image.sh \
|
||||
scripts/test-promote-tenant-image.sh
|
||||
|
||||
canvas-deploy-reminder:
|
||||
name: Canvas Deploy Reminder
|
||||
runs-on: ubuntu-latest
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
needs: [changes, canvas-build]
|
||||
# Keep the job itself always runnable. Gitea 1.22.6 leaves job-level
|
||||
# event/ref `if:` gates as pending on PRs, which blocks the combined
|
||||
# status even though this reminder is intentionally non-required.
|
||||
# Only fires on direct pushes to main (i.e. after staging→main promotion).
|
||||
if: needs.changes.outputs.canvas == 'true' && github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
steps:
|
||||
- name: Write deploy reminder to step summary
|
||||
env:
|
||||
COMMIT_SHA: ${{ github.sha }}
|
||||
CANVAS_CHANGED: ${{ needs.changes.outputs.canvas }}
|
||||
EVENT_NAME: ${{ github.event_name }}
|
||||
REF_NAME: ${{ github.ref }}
|
||||
# github.server_url resolves via the workflow-level env override
|
||||
# to the Gitea instance, so the RUN_URL points at the Gitea run
|
||||
# page (not github.com). See feedback_act_runner_github_server_url.
|
||||
RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ "$CANVAS_CHANGED" != "true" ] || [ "$EVENT_NAME" != "push" ] || [ "$REF_NAME" != "refs/heads/main" ]; then
|
||||
echo "Canvas deploy reminder not applicable for event=$EVENT_NAME ref=$REF_NAME canvas_changed=$CANVAS_CHANGED."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Write body to a temp file — avoids backtick escaping in shell.
|
||||
cat > /tmp/deploy-reminder.md << 'BODY'
|
||||
## Canvas build passed — deploy required
|
||||
@ -451,8 +377,7 @@ jobs:
|
||||
name: Python Lint & Test
|
||||
needs: changes
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 4 (RFC #219 §1): confirmed green on main 2026-05-12.
|
||||
continue-on-error: false
|
||||
continue-on-error: true
|
||||
env:
|
||||
WORKSPACE_ID: test
|
||||
defaults:
|
||||
@ -562,22 +487,16 @@ jobs:
|
||||
# hourly if this list diverges from status_check_contexts or from
|
||||
# audit-force-merge.yml's REQUIRED_CHECKS env (RFC §4 + §6).
|
||||
#
|
||||
# canvas-deploy-reminder is intentionally excluded from all-required.needs:
|
||||
# it needs canvas-build, which is skipped on CI-only PRs (canvas=false).
|
||||
# Including it in all-required.needs causes all-required to hang on
|
||||
# every CI-only PR. Keep it runnable on PRs via its own
|
||||
# `needs: [changes, canvas-build]` — the sentinel only aggregates the result.
|
||||
# Excluded from `needs:`: `canvas-deploy-reminder` — gated by
|
||||
# `if: ... github.event_name == 'push' && github.ref == 'refs/heads/main'`,
|
||||
# so on PR events it's legitimately `skipped`. The drift detector
|
||||
# explicitly excludes `github.event_name`-gated jobs from F1 (see
|
||||
# `.gitea/scripts/ci-required-drift.py::ci_job_names`).
|
||||
#
|
||||
# Phase 3 (RFC #219 §1) safety: underlying build jobs carry
|
||||
# continue-on-error: true so their failures are masked to null (2026-05-12: re-enabled mc#774 interim)
|
||||
# (Gitea suppresses status reporting for CoE jobs). This sentinel
|
||||
# runs with continue-on-error: false so it always reports its
|
||||
# result to the API — without this, the required-status entry
|
||||
# (CI / all-required (pull_request)) is never created, which
|
||||
# blocks PR merges. When Phase 3 ends, flip underlying jobs to
|
||||
# continue-on-error: false; this sentinel can then be flipped to
|
||||
# continue-on-error: true if a Phase-4 regression requires it.
|
||||
continue-on-error: false
|
||||
# NOTE: `continue-on-error: true` is intentionally NOT set here — Phase 3
|
||||
# (parent PR for ci.yml port, RFC §1) sets it on the underlying build
|
||||
# jobs to surface defects without blocking. The sentinel itself must
|
||||
# hard-fail; that's the whole point.
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 1
|
||||
needs:
|
||||
@ -586,41 +505,23 @@ jobs:
|
||||
- canvas-build
|
||||
- shellcheck
|
||||
- python-lint
|
||||
if: ${{ always() }}
|
||||
if: always()
|
||||
steps:
|
||||
- name: Assert every required dependency succeeded
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# `needs.*.result` is one of: success | failure | cancelled | skipped | null.
|
||||
# `needs.*.result` is one of: success | failure | cancelled | skipped
|
||||
# We assert success per dep (not != failure) — see RFC §2 reasoning above.
|
||||
# Null results are skipped: they come from Phase 3 (continue-on-error: true
|
||||
# suppresses status) or from jobs still in-flight. The sentinel succeeds
|
||||
# rather than blocking PRs on Phase 3 noise.
|
||||
results='${{ toJSON(needs) }}'
|
||||
echo "$results"
|
||||
echo "$results" | python3 -c '
|
||||
import json, sys
|
||||
ns = json.load(sys.stdin)
|
||||
# Phase 3 masked: jobs with continue-on-error: true may report "failure"
|
||||
# Remove when mc#774 handler test failures are resolved.
|
||||
PHASE3_MASKED = {"platform-build"}
|
||||
# Exclude null (Phase 3 suppressed / in-flight) from the bad list.
|
||||
bad = [(k, v.get("result")) for k, v in ns.items()
|
||||
if v.get("result") not in ("success", None, "cancelled", "skipped") and k not in PHASE3_MASKED]
|
||||
bad = [(k, v.get("result")) for k, v in ns.items() if v.get("result") != "success"]
|
||||
if bad:
|
||||
print(f"FAIL: jobs not green:", file=sys.stderr)
|
||||
for k, r in bad:
|
||||
print(f" - {k}: {r}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
pending = [(k, v.get("result")) for k, v in ns.items()
|
||||
if v.get("result") is None]
|
||||
cancelled = [(k, v.get("result")) for k, v in ns.items()
|
||||
if v.get("result") == "cancelled"]
|
||||
if pending:
|
||||
print(f"WARN: {len(pending)} job(s) still in-flight (result=null): " +
|
||||
", ".join(k for k, _ in pending), file=sys.stderr)
|
||||
if cancelled:
|
||||
print(f"INFO: {len(cancelled)} job(s) masked by continue-on-error: " +
|
||||
", ".join(k for k, _ in cancelled), file=sys.stderr)
|
||||
print(f"OK: all {len(ns)} required jobs succeeded (or Phase-3 suppressed)")
|
||||
print(f"OK: all {len(ns)} required jobs succeeded")
|
||||
'
|
||||
|
||||
@ -90,7 +90,6 @@ jobs:
|
||||
name: Synthetic E2E against staging
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
# Bumped from 12 → 20 (2026-05-04). Tenant user-data install phase
|
||||
# (apt-get update + install docker.io/jq/awscli/caddy + snap install
|
||||
|
||||
@ -103,7 +103,6 @@ jobs:
|
||||
detect-changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
outputs:
|
||||
api: ${{ steps.decide.outputs.api }}
|
||||
@ -155,7 +154,6 @@ jobs:
|
||||
name: E2E API Smoke Test
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
timeout-minutes: 15
|
||||
env:
|
||||
@ -166,6 +164,7 @@ jobs:
|
||||
# we let Docker assign an ephemeral host port.
|
||||
PG_CONTAINER: pg-e2e-api-${{ github.run_id }}-${{ github.run_attempt }}
|
||||
REDIS_CONTAINER: redis-e2e-api-${{ github.run_id }}-${{ github.run_attempt }}
|
||||
PORT: "8080"
|
||||
steps:
|
||||
- name: No-op pass (paths filter excluded this commit)
|
||||
if: needs.detect-changes.outputs.api != 'true'
|
||||
@ -269,20 +268,6 @@ jobs:
|
||||
if: needs.detect-changes.outputs.api == 'true'
|
||||
working-directory: workspace-server
|
||||
run: go build -o platform-server ./cmd/server
|
||||
- name: Pick platform port
|
||||
if: needs.detect-changes.outputs.api == 'true'
|
||||
run: |
|
||||
PLATFORM_PORT=$(python3 - <<'PY'
|
||||
import socket
|
||||
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||
s.bind(("127.0.0.1", 0))
|
||||
print(s.getsockname()[1])
|
||||
PY
|
||||
)
|
||||
echo "PORT=${PLATFORM_PORT}" >> "$GITHUB_ENV"
|
||||
echo "BASE=http://127.0.0.1:${PLATFORM_PORT}" >> "$GITHUB_ENV"
|
||||
echo "Platform host port: ${PLATFORM_PORT}"
|
||||
- name: Start platform (background)
|
||||
if: needs.detect-changes.outputs.api == 'true'
|
||||
working-directory: workspace-server
|
||||
@ -295,7 +280,7 @@ jobs:
|
||||
if: needs.detect-changes.outputs.api == 'true'
|
||||
run: |
|
||||
for i in $(seq 1 30); do
|
||||
if curl -sf "$BASE/health" > /dev/null; then
|
||||
if curl -sf http://127.0.0.1:8080/health > /dev/null; then
|
||||
echo "Platform up after ${i}s"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
@ -70,7 +70,6 @@ jobs:
|
||||
detect-changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
outputs:
|
||||
canvas: ${{ steps.decide.outputs.canvas }}
|
||||
@ -119,7 +118,6 @@ jobs:
|
||||
name: Canvas tabs E2E
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
timeout-minutes: 40
|
||||
|
||||
@ -168,7 +166,6 @@ jobs:
|
||||
|
||||
- name: Install Playwright browsers
|
||||
if: needs.detect-changes.outputs.canvas == 'true'
|
||||
timeout-minutes: 10
|
||||
run: npx playwright install --with-deps chromium
|
||||
|
||||
- name: Run staging canvas E2E
|
||||
|
||||
@ -84,7 +84,6 @@ jobs:
|
||||
name: E2E Staging External Runtime
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
timeout-minutes: 25
|
||||
|
||||
|
||||
@ -88,20 +88,17 @@ jobs:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
|
||||
- uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
|
||||
- name: YAML validation (best-effort)
|
||||
run: |
|
||||
echo "e2e-staging-saas.yml — PR validation: workflow YAML is valid."
|
||||
echo "E2E step runs only when provisioning-critical files change."
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
|
||||
# Actual E2E: runs on trunk pushes (main + staging). NOT the PR-fire-only
|
||||
@ -112,7 +109,6 @@ jobs:
|
||||
# Only runs on trunk pushes. PR paths get pr-validate instead.
|
||||
if: github.event.pull_request.base.ref == ''
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
timeout-minutes: 45
|
||||
permissions:
|
||||
|
||||
@ -37,7 +37,6 @@ jobs:
|
||||
name: Intentional-failure teardown sanity
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
timeout-minutes: 20
|
||||
|
||||
|
||||
@ -32,22 +32,12 @@ on:
|
||||
# iterating all open PRs when PR_NUMBER is empty.
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
# read: contents — for checkout (base ref, not PR head for security)
|
||||
# read: pull-requests — for reading PR info via API
|
||||
# write: pull-requests — for posting/updating gate-check comments
|
||||
# Without this the token cannot POST/PATCH /issues/comments → 403.
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
env:
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
jobs:
|
||||
# bp-exempt: PR advisory bot; merge blocking is enforced by CI status and branch protection.
|
||||
gate-check:
|
||||
runs-on: ubuntu-latest
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true # Never block on our own detector failing
|
||||
steps:
|
||||
- name: Check out BASE ref (never PR-head under pull_request_target)
|
||||
@ -64,7 +54,6 @@ jobs:
|
||||
if: github.event_name == 'pull_request_target' || github.event.inputs.pr_number != ''
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.SOP_TIER_CHECK_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number || github.event.inputs.pr_number }}
|
||||
POST_COMMENT: ${{ github.event.inputs.post_comment || 'true' }}
|
||||
run: |
|
||||
@ -79,33 +68,21 @@ jobs:
|
||||
if: github.event_name == 'schedule'
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.SOP_TIER_CHECK_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
REPO: ${{ github.repository }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Fetch all open PRs and run gate-check on each
|
||||
# socket.setdefaulttimeout(15): defence-in-depth for missing SOP_TIER_CHECK_TOKEN.
|
||||
# gate_check.py uses timeout=15 on every urlopen call; this catches the
|
||||
# inline Python polling loop too (issue #603).
|
||||
pr_numbers=$(python3 <<'PY'
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
import urllib.request
|
||||
|
||||
socket.setdefaulttimeout(15)
|
||||
token = os.environ["GITEA_TOKEN"]
|
||||
repo = os.environ["REPO"]
|
||||
req = urllib.request.Request(
|
||||
f"https://git.moleculesai.app/api/v1/repos/{repo}/pulls?state=open&limit=100",
|
||||
headers={"Authorization": f"token {token}", "Accept": "application/json"},
|
||||
)
|
||||
with urllib.request.urlopen(req) as r:
|
||||
prs = json.loads(r.read())
|
||||
for pr in prs:
|
||||
print(pr["number"])
|
||||
PY
|
||||
)
|
||||
pr_numbers=$(python3 -c "
|
||||
import urllib.request, json, os
|
||||
token = os.environ['GITEA_TOKEN']
|
||||
req = urllib.request.Request(
|
||||
'https://git.moleculesai.app/api/v1/repos/${{ github.repository }}/pulls?state=open&limit=100',
|
||||
headers={'Authorization': f'token {token}', 'Accept': 'application/json'}
|
||||
)
|
||||
with urllib.request.urlopen(req) as r:
|
||||
prs = json.loads(r.read())
|
||||
for pr in prs:
|
||||
print(pr['number'])
|
||||
")
|
||||
for pr in $pr_numbers; do
|
||||
echo "Checking PR #$pr..."
|
||||
python3 tools/gate-check-v3/gate_check.py \
|
||||
|
||||
@ -1,51 +0,0 @@
|
||||
name: gitea-merge-queue
|
||||
|
||||
# External serialized merge queue for Gitea 1.22.6.
|
||||
#
|
||||
# Gitea's `pull_auto_merge` table is not a real merge queue: it does not
|
||||
# serialize green PRs against a freshly-tested latest main. This workflow runs
|
||||
# the user-space queue bot, one PR per tick, using the non-bypass merge actor.
|
||||
#
|
||||
# Queue contract:
|
||||
# - add label `merge-queue` to an open same-repo PR
|
||||
# - bot updates stale PR heads with current main, then waits for CI
|
||||
# - bot merges only when current main is green and required PR contexts pass
|
||||
# - add `merge-queue-hold` to pause a queued PR without removing it
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '*/5 * * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: gitea-merge-queue-${{ github.repository }}
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
queue:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- name: Check out queue script from main
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event.repository.default_branch }}
|
||||
|
||||
- name: Process one queued PR
|
||||
env:
|
||||
# AUTO_SYNC_TOKEN is the devops-engineer persona PAT. It is the
|
||||
# non-bypass merge actor allowed by branch protection.
|
||||
GITEA_TOKEN: ${{ secrets.AUTO_SYNC_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
WATCH_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
QUEUE_LABEL: merge-queue
|
||||
HOLD_LABEL: merge-queue-hold
|
||||
UPDATE_STYLE: merge
|
||||
REQUIRED_CONTEXTS: >-
|
||||
CI / all-required (pull_request),
|
||||
sop-checklist / all-items-acked (pull_request)
|
||||
run: python3 .gitea/scripts/gitea-merge-queue.py
|
||||
@ -78,8 +78,7 @@ jobs:
|
||||
detect-changes:
|
||||
name: detect-changes
|
||||
runs-on: ubuntu-latest
|
||||
# mc#774 Phase 3 (RFC §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
continue-on-error: true
|
||||
outputs:
|
||||
handlers: ${{ steps.filter.outputs.handlers }}
|
||||
@ -90,25 +89,18 @@ jobs:
|
||||
- id: filter
|
||||
# Inline replacement for dorny/paths-filter — see e2e-api.yml.
|
||||
run: |
|
||||
# Gitea Actions evaluates github.event.before to empty string in shell
|
||||
# scripts. Use GITHUB_EVENT_BEFORE shell env var instead (Gitea
|
||||
# correctly populates it for push events). PR case uses template var.
|
||||
BASE=""
|
||||
BASE="${GITHUB_BASE_REF:-${{ github.event.before }}}"
|
||||
if [ "${{ github.event_name }}" = "pull_request" ] && [ -n "${{ github.event.pull_request.base.sha }}" ]; then
|
||||
BASE="${{ github.event.pull_request.base.sha }}"
|
||||
elif [ -n "$GITHUB_EVENT_BEFORE" ]; then
|
||||
BASE="$GITHUB_EVENT_BEFORE"
|
||||
fi
|
||||
if [ -z "$BASE" ] || echo "$BASE" | grep -qE '^0+$'; then
|
||||
echo "handlers=true" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
# timeout 30 guards against the case where BASE points to a ref that
|
||||
# git can resolve but cat-file hangs (rare on corrupted objects).
|
||||
if ! timeout 30 git cat-file -e "$BASE" 2>/dev/null; then
|
||||
if ! git cat-file -e "$BASE" 2>/dev/null; then
|
||||
git fetch --depth=1 origin "$BASE" 2>/dev/null || true
|
||||
fi
|
||||
if ! timeout 30 git cat-file -e "$BASE" 2>/dev/null; then
|
||||
if ! git cat-file -e "$BASE" 2>/dev/null; then
|
||||
echo "handlers=true" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
@ -126,8 +118,7 @@ jobs:
|
||||
name: Handlers Postgres Integration
|
||||
needs: detect-changes
|
||||
runs-on: ubuntu-latest
|
||||
# mc#774 Phase 3 (RFC §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
continue-on-error: true
|
||||
env:
|
||||
# Unique name per run so concurrent jobs don't collide on the
|
||||
|
||||
@ -60,11 +60,9 @@ env:
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
jobs:
|
||||
# bp-exempt: change detector only; downstream Harness Replays is the meaningful gate.
|
||||
detect-changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
outputs:
|
||||
run: ${{ steps.decide.outputs.run }}
|
||||
@ -133,14 +131,7 @@ jobs:
|
||||
RESP=$(curl -sS --fail --max-time 30 \
|
||||
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
-H "Accept: application/json" \
|
||||
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/compare/$BASE...$HEAD") || {
|
||||
# If Gitea's Compare API is slow/unavailable, choose the conservative
|
||||
# behavior: run the harness instead of failing the detector and polluting
|
||||
# main with a red non-gate context.
|
||||
echo "run=true" >> "$GITHUB_OUTPUT"
|
||||
echo "debug=compare-api-unavailable base=$BASE head=$HEAD" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
}
|
||||
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/compare/$BASE...$HEAD")
|
||||
DIFF_FILES=$(echo "$RESP" | bash .gitea/scripts/compare-api-diff-files.py 2>/dev/null || true)
|
||||
|
||||
echo "debug=diff-base=$BASE diff-files=$DIFF_FILES" >> "$GITHUB_OUTPUT"
|
||||
@ -158,13 +149,11 @@ jobs:
|
||||
# matches e2e-api.yml — see that workflow's comment for why a
|
||||
# job-level `if: false` would block branch protection via the
|
||||
# SKIPPED-in-set bug.
|
||||
# bp-exempt: path-filtered replay suite; CI / all-required is the branch-protection aggregate.
|
||||
harness-replays:
|
||||
needs: detect-changes
|
||||
name: Harness Replays
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
@ -231,14 +220,12 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -z "${MOLECULE_GITEA_TOKEN}" ]; then
|
||||
echo "::warning::AUTO_SYNC_TOKEN not set — using anonymous clone (repos are public per manifest.json OSS contract)"
|
||||
echo "::error::AUTO_SYNC_TOKEN secret is empty — register the devops-engineer persona PAT in repo Actions secrets"
|
||||
exit 1
|
||||
fi
|
||||
mkdir -p .tenant-bundle-deps
|
||||
# Strip JSON5 comments before jq parsing — Integration Tester appends
|
||||
# `// Triggered by ...` which breaks `jq` in clone-manifest.sh.
|
||||
sed '/^[[:space:]]*\/\//d' manifest.json > .manifest-stripped.json
|
||||
bash scripts/clone-manifest.sh \
|
||||
.manifest-stripped.json \
|
||||
manifest.json \
|
||||
.tenant-bundle-deps/workspace-configs-templates \
|
||||
.tenant-bundle-deps/org-templates \
|
||||
.tenant-bundle-deps/plugins
|
||||
|
||||
@ -1,120 +0,0 @@
|
||||
name: lint-bp-context-emit-match
|
||||
|
||||
# Tier 2f scheduled lint (per mc#774) — detects drift between
|
||||
# `branch_protections/<branch>.status_check_contexts` and the set of
|
||||
# contexts emitted by `.gitea/workflows/*.yml`.
|
||||
#
|
||||
# Rule
|
||||
# ----
|
||||
# For each protected branch context (Source A — BP), there must exist
|
||||
# at least one emitting workflow + job pair (Source B — workflow YAML
|
||||
# + on:-event mapping) whose runtime status-name maps to it. The
|
||||
# inverse direction (emitter without BP context) is informational
|
||||
# only — Tier 2g handles that at PR-time.
|
||||
#
|
||||
# Why this exists
|
||||
# ---------------
|
||||
# A BP-required context with no emitter blocks merges forever — Gitea
|
||||
# 1.22.6 treats absent-as-`pending`, NOT absent-as-`skipped`. The
|
||||
# phantom-required-check class previously surfaced as
|
||||
# `feedback_phantom_required_check_after_gitea_migration` (a port
|
||||
# kept the GitHub context name after rename to Gitea, but no
|
||||
# workflow emitted under the new name).
|
||||
#
|
||||
# This lint catches the same class structurally + a forward case:
|
||||
# workflow renamed/deleted while still in BP.
|
||||
#
|
||||
# Scope
|
||||
# -----
|
||||
# Scheduled daily. We DON'T run on `pull_request` because (a) the
|
||||
# emitter side moves with PR diffs (transitional state false-flags)
|
||||
# and (b) Tier 2g handles emitter-side drift at PR-time.
|
||||
#
|
||||
# Cross-repo
|
||||
# ----------
|
||||
# Today this runs only on molecule-core/main. Per internal#349
|
||||
# (cross-repo BP sweep) Class-D repos will get the same lint after
|
||||
# their BP rollouts.
|
||||
#
|
||||
# Auth
|
||||
# ----
|
||||
# `GET /repos/.../branch_protections/{branch}` requires repo-admin
|
||||
# role on Gitea 1.22.6. We use DRIFT_BOT_TOKEN (same persona as
|
||||
# ci-required-drift.yml — `internal#329` provisioning trail).
|
||||
# Graceful-degrade per Tier 2a contract: 403/404 → exit 0 with
|
||||
# ::error::.
|
||||
#
|
||||
# Idempotency
|
||||
# -----------
|
||||
# The drift issue is filed with title prefix
|
||||
# `[ci-bp-drift] {repo}/{branch}: BP→emitter mismatch`. The script
|
||||
# searches OPEN issues for an exact title-prefix match and PATCHes
|
||||
# the existing issue (if any) instead of POSTing a duplicate.
|
||||
# Mirrors `ci-required-drift.py`'s contract.
|
||||
#
|
||||
# Phase contract (RFC internal#219 §1 ladder)
|
||||
# -------------------------------------------
|
||||
# Lands at `continue-on-error: true` (Phase 3). After 7 days of clean
|
||||
# scheduled runs on `main`, flip to `false` so a scheduled failure
|
||||
# becomes a hard CI signal.
|
||||
#
|
||||
# Cross-links
|
||||
# -----------
|
||||
# - mc#774 (the RFC that specs this lint)
|
||||
# - internal#349 (cross-repo BP sweep)
|
||||
# - feedback_phantom_required_check_after_gitea_migration
|
||||
# - feedback_tier_label_ids_are_per_repo
|
||||
# - ci-required-drift.yml (F2 detector, narrower-scope sibling)
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Daily at 03:31 UTC — off-peak, prime-staggered from other
|
||||
# scheduled jobs (ci-required-drift :00 hourly, lint-coe-tracking
|
||||
# 13:11). At 03:31 the CI fleet is quietest in EMEA hours.
|
||||
- cron: '31 3 * * *'
|
||||
workflow_dispatch:
|
||||
# No `push` / `pull_request` here — Tier 2g owns PR-time drift.
|
||||
|
||||
env:
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write # needed to file/edit the drift issue
|
||||
|
||||
concurrency:
|
||||
group: lint-bp-context-emit-match-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: lint-bp-context-emit-match
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
# Phase 3 (RFC #219 §1): surface drift without blocking. After 7
|
||||
# clean scheduled runs on main, flip to false so a scheduled
|
||||
# failure is a hard CI signal.
|
||||
continue-on-error: true # mc#774 Phase 3 — flip to false after 7 clean main runs
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
- name: Install PyYAML
|
||||
run: python -m pip install --quiet 'PyYAML==6.0.2'
|
||||
- name: Run lint-bp-context-emit-match
|
||||
env:
|
||||
# DRIFT_BOT_TOKEN — repo-admin on this repo (internal#329
|
||||
# provisioning trail). Required for branch_protections read.
|
||||
GITEA_TOKEN: ${{ secrets.DRIFT_BOT_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
BRANCH: main
|
||||
WORKFLOWS_DIR: .gitea/workflows
|
||||
DRIFT_LABEL: ci-bp-drift
|
||||
GITHUB_RUN_URL: https://git.moleculesai.app/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
run: python3 .gitea/scripts/lint_bp_context_emit_match.py
|
||||
- name: Run lint-bp-context-emit-match unit tests
|
||||
run: |
|
||||
python -m pip install --quiet pytest
|
||||
python3 -m pytest tests/test_lint_bp_context_emit_match.py -v
|
||||
@ -1,122 +0,0 @@
|
||||
name: lint-continue-on-error-tracking
|
||||
|
||||
# Tier 2e hard-gate lint (per mc#774) — every
|
||||
# `continue-on-error: true` in `.gitea/workflows/*.yml` must carry a
|
||||
# `# mc#NNNN` or `# internal#NNNN` tracker comment within 2 lines,
|
||||
# the referenced issue must be OPEN, and ≤14 days old.
|
||||
#
|
||||
# Why this exists
|
||||
# ---------------
|
||||
# `continue-on-error: true` on `platform-build` had been hiding
|
||||
# mc#774-class regressions for ~3 weeks before #656 surfaced them on
|
||||
# 2026-05-12. A 14-day cap on tracker age forces a review cycle and
|
||||
# surfaces mask-drift within at most 14 days of the original defect.
|
||||
# Each `continue-on-error: true` gets a paper trail — close or renew.
|
||||
#
|
||||
# How the gate works
|
||||
# ------------------
|
||||
# 1. Walk `.gitea/workflows/*.yml` via PyYAML's line-tracking loader
|
||||
# (per `feedback_behavior_based_ast_gates`) and find every job
|
||||
# whose `continue-on-error` evaluates truthy (`true` or string
|
||||
# `"true"` — Gitea's evaluator coerces strings).
|
||||
# 2. For each, scan ±2 lines of the directive's source line for a
|
||||
# `# mc#NNNN` or `# internal#NNNN` comment. Inline-trailing
|
||||
# comments on the directive line count.
|
||||
# 3. For each tracker reference, GET the issue from the Gitea API.
|
||||
# Validate: exists, `state == open`, `created_at` ≤ MAX_AGE_DAYS.
|
||||
# 4. Aggregate ALL violations (not short-circuit) and exit 1 if any.
|
||||
#
|
||||
# Triggers
|
||||
# --------
|
||||
# Runs on PR events (paths-filter on `.gitea/workflows/**`) AND on
|
||||
# a daily schedule. PR runs catch the violation at introduction time.
|
||||
# Schedule runs catch the AGE-EXPIRY class: a tracker that was ≤14d
|
||||
# old when the PR landed but is now 20d old, with the underlying
|
||||
# defect still unfixed. Per `feedback_chained_defects_in_never_tested_workflows`,
|
||||
# scheduled drift detection is the second half of the gate.
|
||||
#
|
||||
# Phase contract (RFC internal#219 §1 ladder)
|
||||
# -------------------------------------------
|
||||
# Lands at `continue-on-error: true` (Phase 3 — surface broken shapes
|
||||
# without blocking). The pre-existing `continue-on-error: true`
|
||||
# directives on `main` will all violate this lint at first
|
||||
# (intentional — they're the masked defects this lint exists to
|
||||
# surface). Each must be triaged: file a fresh tracker comment,
|
||||
# close-and-flip, or document the deliberate keep-mask in a fresh
|
||||
# 14-day-renewable tracker. After main is clean for 3 days,
|
||||
# follow-up PR flips this workflow's continue-on-error to false.
|
||||
# Tracking: mc#774.
|
||||
#
|
||||
# Cross-links
|
||||
# -----------
|
||||
# - mc#774 (the RFC that specs this lint)
|
||||
# - mc#774 (the empirical masked-3-weeks case)
|
||||
# - feedback_chained_defects_in_never_tested_workflows
|
||||
# - feedback_behavior_based_ast_gates
|
||||
# - feedback_strict_root_only_after_class_a
|
||||
#
|
||||
# Auth: DRIFT_BOT_TOKEN — same persona used by ci-required-drift.yml
|
||||
# (provisioned under internal#329). Auto-injected GITHUB_TOKEN is
|
||||
# insufficient because `internal#NNN` references cross repositories
|
||||
# (molecule-core → molecule-ai/internal).
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
paths:
|
||||
- '.gitea/workflows/**'
|
||||
- '.gitea/scripts/lint_continue_on_error_tracking.py'
|
||||
- 'tests/test_lint_continue_on_error_tracking.py'
|
||||
push:
|
||||
branches: [main, staging]
|
||||
paths:
|
||||
- '.gitea/workflows/**'
|
||||
- '.gitea/scripts/lint_continue_on_error_tracking.py'
|
||||
schedule:
|
||||
# Daily at 13:11 UTC — off-peak, prime-staggered from the other
|
||||
# Tier-2 lint schedules (ci-required-drift runs hourly :00).
|
||||
- cron: '11 13 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: lint-coe-tracking-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# bp-exempt: meta-lint for masked jobs; tracked separately until masks are burned down.
|
||||
lint:
|
||||
name: lint-continue-on-error-tracking
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
# Phase 3 (RFC #219 §1): surface masked defects without blocking
|
||||
# PRs. Pre-existing continue-on-error: true directives on main
|
||||
# all violate this lint at first — intentional. Flip to false
|
||||
# follow-up after main is clean for 3 days. mc#774.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true # mc#774 Phase 3 mask — 14d forced-renewal cadence
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
- name: Install PyYAML
|
||||
run: python -m pip install --quiet 'PyYAML==6.0.2'
|
||||
- name: Run lint-continue-on-error-tracking
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.DRIFT_BOT_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
INTERNAL_REPO: molecule-ai/internal
|
||||
WORKFLOWS_DIR: .gitea/workflows
|
||||
MAX_AGE_DAYS: '14'
|
||||
run: python3 .gitea/scripts/lint_continue_on_error_tracking.py
|
||||
- name: Run lint-continue-on-error-tracking unit tests
|
||||
run: |
|
||||
python -m pip install --quiet pytest
|
||||
python3 -m pytest tests/test_lint_continue_on_error_tracking.py -v
|
||||
@ -30,16 +30,10 @@ name: Lint curl status-code capture
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.gitea/workflows/**'
|
||||
- '.gitea/scripts/lint-curl-status-capture.py'
|
||||
- 'tests/test_lint_curl_status_capture.py'
|
||||
paths: ['.gitea/workflows/**']
|
||||
push:
|
||||
branches: [main, staging]
|
||||
paths:
|
||||
- '.gitea/workflows/**'
|
||||
- '.gitea/scripts/lint-curl-status-capture.py'
|
||||
- 'tests/test_lint_curl_status_capture.py'
|
||||
paths: ['.gitea/workflows/**']
|
||||
|
||||
env:
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
@ -51,10 +45,60 @@ jobs:
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking
|
||||
# the PR. Follow-up PR flips this off after surfaced defects are
|
||||
# triaged.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
- name: Find curl ... -w '%{http_code}' ... || echo "000" subshells
|
||||
run: |
|
||||
python3 .gitea/scripts/lint-curl-status-capture.py
|
||||
set -uo pipefail
|
||||
# Multi-line aware: look for `$(curl ... -w '%{http_code}' ... || echo "000")`
|
||||
# subshell where the entire command-substitution wraps a curl that
|
||||
# ends with `|| echo "000"`. Must distinguish from the SAFE shape
|
||||
# `$(cat tempfile 2>/dev/null || echo "000")` — `cat` with a missing
|
||||
# tempfile produces empty stdout, no pollution.
|
||||
python3 <<'PY'
|
||||
import os, re, sys, glob
|
||||
|
||||
BAD_FILES = []
|
||||
|
||||
# Match the buggy substitution across newlines: $(curl ... -w '%{http_code}' ... || echo "000")
|
||||
# The `\\n` is the bash line-continuation that lets curl flags span lines.
|
||||
# We collapse continuation lines first, then look for the single-line bad pattern.
|
||||
PATTERN = re.compile(
|
||||
r'\$\(\s*curl\b[^)]*-w\s*[\'"]%\{http_code\}[\'"][^)]*\|\|\s*echo\s+"000"\s*\)',
|
||||
re.DOTALL,
|
||||
)
|
||||
|
||||
# Self-skip: this lint workflow contains the literal anti-pattern in
|
||||
# its own docstring — that's intentional, not a bug.
|
||||
SELF = ".gitea/workflows/lint-curl-status-capture.yml"
|
||||
|
||||
for f in sorted(glob.glob(".gitea/workflows/*.yml")):
|
||||
if f == SELF:
|
||||
continue
|
||||
with open(f) as fh:
|
||||
content = fh.read()
|
||||
# Collapse bash line-continuations (\\\n + leading whitespace)
|
||||
# into a single logical line so the regex can see the full
|
||||
# curl invocation as one chunk.
|
||||
flat = re.sub(r'\\\s*\n\s*', ' ', content)
|
||||
for m in PATTERN.finditer(flat):
|
||||
BAD_FILES.append((f, m.group(0)[:120]))
|
||||
|
||||
if not BAD_FILES:
|
||||
print("OK No curl-status-capture pollution patterns detected")
|
||||
sys.exit(0)
|
||||
|
||||
print(f"::error::Found {len(BAD_FILES)} curl-status-capture pollution site(s):")
|
||||
for f, snippet in BAD_FILES:
|
||||
print(f"::error file={f}::Curl status-capture pollution: '|| echo \"000\"' inside a $(curl ... -w '%{{http_code}}' ...) subshell. On non-2xx or connection failure, curl's -w writes a status, then exits non-zero, then the || echo appends another '000' — producing 'HTTP 000000' or '409000' that fails comparisons silently. Fix: route -w into a tempfile so the exit code can't pollute stdout. See memory feedback_curl_status_capture_pollution.md.")
|
||||
print(f" matched: {snippet}...")
|
||||
print()
|
||||
print("Fix template:")
|
||||
print(' set +e')
|
||||
print(' curl ... -w \'%{http_code}\' >code.txt 2>/dev/null')
|
||||
print(' set -e')
|
||||
print(' HTTP_CODE=$(cat code.txt 2>/dev/null)')
|
||||
print(' [ -z "$HTTP_CODE" ] && HTTP_CODE="000"')
|
||||
sys.exit(1)
|
||||
PY
|
||||
|
||||
@ -1,134 +0,0 @@
|
||||
name: lint-mask-pr-atomicity
|
||||
|
||||
# Tier 2d hard-gate lint (per mc#774) — blocks PRs that touch
|
||||
# `.gitea/workflows/ci.yml` and modify ONLY ONE of {continue-on-error,
|
||||
# all-required.sentinel.needs} without a `Paired: #NNN` reference in
|
||||
# the PR body or in a commit message.
|
||||
#
|
||||
# Why this exists
|
||||
# ---------------
|
||||
# PR#665 (interim `continue-on-error: true` on `platform-build`) and
|
||||
# PR#668 (sentinel-`needs` demotion of the same job) were designed as a
|
||||
# pair but merged solo — #665 landed at 04:47Z 2026-05-12, #668 was
|
||||
# still open at 05:07Z when the main-red watchdog (#674) fired. Result:
|
||||
# ~20 minutes of `main` red and a cascade of false-positives on
|
||||
# unrelated PRs. This lint structurally prevents that class.
|
||||
#
|
||||
# How the gate works
|
||||
# ------------------
|
||||
# 1. The workflow runs on every PR whose diff touches ci.yml (paths
|
||||
# filter). It is NOT a required check on `main` because the rule is
|
||||
# diff-based — running it on PRs that don't touch ci.yml would
|
||||
# produce a `pending` status forever (per
|
||||
# `feedback_path_filtered_workflow_cant_be_required`).
|
||||
# 2. The script reads `BASE_SHA:ci.yml` and `HEAD_SHA:ci.yml`, parses
|
||||
# both via PyYAML AST (per `feedback_behavior_based_ast_gates` — no
|
||||
# grep, no regex on the raw text — so a YAML-shape refactor still
|
||||
# detects).
|
||||
# 3. Walks `jobs.*.continue-on-error` on each side; flags any value
|
||||
# diff. Reads `jobs.all-required.needs` on each side; flags any
|
||||
# set diff (order-insensitive — `needs:` is engine-unordered).
|
||||
# 4. If both predicates fired → atomic, OK. If neither → no risk, OK.
|
||||
# If exactly one fired → require `Paired: #NNN` in PR body OR in
|
||||
# any commit message between base..head; else fail.
|
||||
#
|
||||
# Phase contract (RFC internal#219 §1 ladder)
|
||||
# -------------------------------------------
|
||||
# This workflow lands at `continue-on-error: true` (Phase 3 — surface
|
||||
# regressions without blocking PRs while the rule beds in).
|
||||
# Follow-up PR flips to `false` once we have ≥3 days of clean runs on
|
||||
# `main` and no false-positives. Tracking issue: mc#774.
|
||||
#
|
||||
# Cross-links
|
||||
# -----------
|
||||
# - mc#774 (the RFC that specs this lint)
|
||||
# - PR#665 / PR#668 (the empirical split-pair)
|
||||
# - mc#774 (the main-red incident the split caused)
|
||||
# - feedback_strict_root_only_after_class_a
|
||||
# - feedback_behavior_based_ast_gates
|
||||
#
|
||||
# Auth: only needs the auto-injected GITHUB_TOKEN (read-only, repo
|
||||
# scope). No DRIFT_BOT_TOKEN needed — Tier 2d does NOT call
|
||||
# branch_protections (Tier 2g/f do).
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, edited]
|
||||
# `edited` is included because the rule depends on PR_BODY: a user
|
||||
# may add `Paired: #NNN` after first push to satisfy the lint. The
|
||||
# rerun on `edited` lets the PR turn green without an empty
|
||||
# commit. Gitea 1.22.6 fires `edited` on body changes — verified
|
||||
# via gitea-source/models/issues/pull_list.go::triggerNewPRWebhook.
|
||||
paths:
|
||||
- '.gitea/workflows/ci.yml'
|
||||
- '.gitea/scripts/lint_mask_pr_atomicity.py'
|
||||
- '.gitea/workflows/lint-mask-pr-atomicity.yml'
|
||||
- 'tests/test_lint_mask_pr_atomicity.py'
|
||||
|
||||
env:
|
||||
# Belt-and-suspenders against the runner-default trap
|
||||
# (feedback_act_runner_github_server_url). Runners are configured
|
||||
# with this env via /opt/molecule/runners/config.yaml, but pinning
|
||||
# at the workflow level protects against a runner regenerated
|
||||
# without the config file.
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
|
||||
# Per-PR concurrency — re-pushes cancel previous runs to keep the
|
||||
# queue short. The lint is cheap (one git show + log + a YAML parse).
|
||||
concurrency:
|
||||
group: lint-mask-pr-atomicity-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# bp-exempt: meta-lint advisory during mask burn-down; CI / all-required gates merges.
|
||||
scan:
|
||||
name: lint-mask-pr-atomicity
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
# Phase 3 (RFC #219 §1): surface broken shapes without blocking
|
||||
# PRs. Follow-up PR flips this to `false` once recent runs on main
|
||||
# are confirmed clean (eat-our-own-dogfood discipline mirrors
|
||||
# PR#673's same-shape comment). Tracking: mc#774.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: Check out PR head with full history (need base SHA blobs)
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
# `git show <base-sha>:<path>` needs the base SHA's blobs.
|
||||
# Shallow=1 would miss it. Same rationale as PR#673 and
|
||||
# check-migration-collisions.yml.
|
||||
fetch-depth: 0
|
||||
- name: Set up Python (PyYAML for AST parsing)
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
- name: Install PyYAML
|
||||
# Same pin as ci-required-drift.yml + the rest of the Tier 2
|
||||
# lint family — keep runner-cache hits uniform.
|
||||
run: python -m pip install --quiet 'PyYAML==6.0.2'
|
||||
- name: Ensure base ref is reachable locally
|
||||
# fetch-depth=0 usually pulls the base too, but explicit-fetch
|
||||
# is cheap insurance against runner-version drift (matches the
|
||||
# comment in check-migration-collisions.yml and PR#673).
|
||||
run: |
|
||||
git fetch origin "${{ github.event.pull_request.base.ref }}" || true
|
||||
- name: Run lint-mask-pr-atomicity
|
||||
env:
|
||||
BASE_SHA: ${{ github.event.pull_request.base.sha }}
|
||||
HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
# PR body — the script greps for `Paired: #NNN`.
|
||||
PR_BODY: ${{ github.event.pull_request.body }}
|
||||
CI_WORKFLOW_PATH: .gitea/workflows/ci.yml
|
||||
SENTINEL_JOB_KEY: all-required
|
||||
run: python3 .gitea/scripts/lint_mask_pr_atomicity.py
|
||||
- name: Run lint-mask-pr-atomicity unit tests
|
||||
# Run the test suite in-CI so the lint's own behaviour is
|
||||
# verified on every change. Matches lint-workflow-yaml.yml.
|
||||
run: |
|
||||
python -m pip install --quiet pytest
|
||||
python3 -m pytest tests/test_lint_mask_pr_atomicity.py -v
|
||||
@ -1,141 +0,0 @@
|
||||
name: Lint pre-flip continue-on-error
|
||||
|
||||
# Pre-merge gate: blocks PRs that flip `continue-on-error: true → false`
|
||||
# on any job in `.gitea/workflows/*.yml` WITHOUT proof that the affected
|
||||
# job's recent runs on the target branch (PR base) are actually green.
|
||||
#
|
||||
# Empirical class: PR #656 / mc#774. PR #656 (RFC internal#219 Phase 4)
|
||||
# flipped 5 platform-build-class jobs `continue-on-error: true → false`
|
||||
# on the basis of a "verified green on main via combined-status check".
|
||||
# But that "green" was the LIE the prior `continue-on-error: true`
|
||||
# produced: Gitea Quirk #10 (internal#342 + dup #287) — a failed step
|
||||
# inside a `continue-on-error: true` job rolls up to a `success`
|
||||
# job-level status. The precondition the PR claimed to verify was
|
||||
# structurally fooled by the bug being flipped.
|
||||
#
|
||||
# mc#774 captured the surfaced defects (2 mutually-masked regressions):
|
||||
# - Class 1: sqlmock helper drift since 2f36bb9a (24 days old)
|
||||
# - Class 2: OFFSEC-001 contract collision since 7d1a189f (1 day old)
|
||||
#
|
||||
# Codified 04:35Z as hongming-pc2 charter §SOP-N rule (e)
|
||||
# "run-log-grep-before-flip" — now structurally enforced here at PR
|
||||
# time, ahead of merge.
|
||||
#
|
||||
# How the gate works:
|
||||
# 1. Read every `.gitea/workflows/*.yml` at the PR base SHA AND at
|
||||
# the PR head SHA via `git show <sha>:<path>` (no checkout
|
||||
# needed).
|
||||
# 2. Parse both sides via PyYAML AST (NOT grep — per
|
||||
# `feedback_behavior_based_ast_gates`). Walk `jobs.<key>.
|
||||
# continue-on-error` on each side. A flip is base=true,
|
||||
# head=false.
|
||||
# 3. For each flipped job, render the commit-status context as
|
||||
# `"{workflow.name} / {job.name or job.key} (push)"` — that's
|
||||
# how Gitea Actions emits the per-context status on `main`/
|
||||
# `staging` runs.
|
||||
# 4. Pull last 5 commits on the PR base branch, fetch combined
|
||||
# commit-status per commit, scan for the target context. For
|
||||
# each match, fetch the run log via the web-UI route
|
||||
# `{server_url}/{repo}/actions/runs/{run_id}/jobs/{job_idx}/logs`
|
||||
# (per `reference_gitea_actions_log_fetch` —
|
||||
# Gitea 1.22.6 lacks REST `/actions/runs/*`; web-UI is the
|
||||
# only working path, see also
|
||||
# `reference_gitea_1_22_6_lacks_rest_rerun_endpoints`).
|
||||
# 5. Grep each log for `--- FAIL`, `FAIL\s`, `::error::`. If
|
||||
# the status is `success` but the log shows any of these,
|
||||
# the job was masked. Block the PR with `::error::`.
|
||||
#
|
||||
# Graceful-degrade contract (per task halt-conditions):
|
||||
# - Log fetch 404 (act_runner pruned the log, transient outage):
|
||||
# emit `::warning::` "log unavailable" — does NOT block.
|
||||
# - Zero recent runs of the flipped job's context on the base
|
||||
# branch (newly added workflow): emit `::warning::` "no run
|
||||
# history to verify" — allow the flip. Chicken-and-egg
|
||||
# exemption.
|
||||
# - YAML parse error in one of the workflow files: warn-only,
|
||||
# don't block — the YAML lint workflows catch this separately.
|
||||
#
|
||||
# Cross-links: PR#656, mc#774, PR#665 (interim re-mask),
|
||||
# Quirk #10 (internal#342 + dup #287), hongming-pc2 charter
|
||||
# §SOP-N rule (e), feedback_strict_root_only_after_class_a,
|
||||
# feedback_no_shared_persona_token_use.
|
||||
#
|
||||
# Phase contract (RFC internal#219 §1 ladder):
|
||||
# - This workflow lands at `continue-on-error: true` (Phase 3 —
|
||||
# surface defects without blocking). Follow-up PR flips it to
|
||||
# `false` ONLY after this workflow's own recent runs on `main`
|
||||
# are confirmed clean — exactly the discipline the workflow
|
||||
# itself enforces. Eat your own dogfood.
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
paths:
|
||||
- '.gitea/workflows/**'
|
||||
- '.gitea/scripts/lint_pre_flip_continue_on_error.py'
|
||||
- '.gitea/workflows/lint-pre-flip-continue-on-error.yml'
|
||||
|
||||
env:
|
||||
# Per `feedback_act_runner_github_server_url` — without this,
|
||||
# actions/checkout and friends default to github.com → break.
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
# Need read on the API to pull combined commit-status + commit list
|
||||
# for the base branch. The job-log fetch uses the same token via
|
||||
# the web-UI route (Gitea 1.22.6 accepts `Authorization: token ...`
|
||||
# there).
|
||||
pull-requests: read
|
||||
|
||||
concurrency:
|
||||
group: lint-pre-flip-coe-${{ github.event.pull_request.head.sha || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
scan:
|
||||
name: Verify continue-on-error flips have run-log proof
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 8
|
||||
# Phase 3 (RFC internal#219 §1): surface broken flips without blocking
|
||||
# the PR yet. Follow-up flips this to `false` once the workflow itself
|
||||
# has clean recent runs on main. mc#774 interim — remove when CoE→false.
|
||||
continue-on-error: true # mc#774
|
||||
steps:
|
||||
- name: Check out PR head (full history for base-SHA access)
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
# `git show <base-sha>:<path>` needs the base SHA's blobs.
|
||||
# Shallow=1 would miss it. Same rationale as
|
||||
# check-migration-collisions.yml.
|
||||
fetch-depth: 0
|
||||
- name: Set up Python (PyYAML for AST parsing)
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
- name: Install PyYAML
|
||||
# Same pin as ci-required-drift.yml — keep dependencies
|
||||
# uniform so a Gitea runner cache hits across both jobs.
|
||||
run: python -m pip install --quiet 'PyYAML==6.0.2'
|
||||
- name: Ensure base ref is reachable locally
|
||||
# `actions/checkout@v6 fetch-depth=0` usually pulls the base
|
||||
# too, but explicit-fetch is cheap insurance against the
|
||||
# form-of-ref differences across Gitea runner versions
|
||||
# (mirrors the comment in check-migration-collisions.yml).
|
||||
run: |
|
||||
git fetch origin "${{ github.event.pull_request.base.ref }}" || true
|
||||
- name: Run lint
|
||||
env:
|
||||
# Auto-injected by Gitea Actions; sufficient scope for
|
||||
# combined-status + commit-list + log fetch via web-UI
|
||||
# route. NO repo-admin needed (unlike the
|
||||
# branch_protections endpoint).
|
||||
GITEA_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
BASE_REF: ${{ github.event.pull_request.base.ref }}
|
||||
BASE_SHA: ${{ github.event.pull_request.base.sha }}
|
||||
HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
# Last 5 commits on the base branch is the spec default.
|
||||
RECENT_COMMITS_N: '5'
|
||||
run: python3 .gitea/scripts/lint_pre_flip_continue_on_error.py
|
||||
@ -1,118 +0,0 @@
|
||||
name: lint-required-context-exists-in-bp
|
||||
|
||||
# Tier 2g hard-gate lint (per mc#774) — diff-based PR-time
|
||||
# check. When a PR adds a NEW commit-status emission (workflow YAML
|
||||
# `name:` + job `name:`-or-key + on:-event), the workflow file must
|
||||
# carry one of three directives adjacent to the new job:
|
||||
#
|
||||
# - `# bp-required: yes` — and BP must list the context
|
||||
# - `# bp-required: pending #NNN` — acknowledged asymmetry + tracker
|
||||
# - `# bp-exempt: <reason>` — informational job, not a gate
|
||||
#
|
||||
# Default (no directive on a new emitter) = FAIL.
|
||||
#
|
||||
# Why this exists
|
||||
# ---------------
|
||||
# PR#656 added `CI / all-required (pull_request)` as a sentinel
|
||||
# context that workflows emit, but BP did NOT list it. When
|
||||
# platform-build failed, all-required failed, but BP let the PR
|
||||
# merge anyway → cascade to mc#774. With this lint, PR#656 would
|
||||
# have been blocked until either the BP PATCH ran alongside OR
|
||||
# the author added a `bp-required: pending` directive.
|
||||
#
|
||||
# Tier 2g vs Tier 2f
|
||||
# ------------------
|
||||
# Tier 2g runs at PR-time (diff-based) and BLOCKS the merge.
|
||||
# Tier 2f runs daily (scheduled) and FILES a drift issue. They
|
||||
# share the workflow-context enumeration helpers
|
||||
# (`_event_map`, `workflow_contexts`, `_job_display`) but the
|
||||
# semantics are intentionally distinct so they're separate scripts.
|
||||
# Co-design is documented in mc#774.
|
||||
#
|
||||
# Directive comment lives in the workflow file (NOT PR body)
|
||||
# ----------------------------------------------------------
|
||||
# A PR-body claim of "BP exempt" evaporates on merge — the
|
||||
# asymmetry returns to undetected state and Tier 2f's daily
|
||||
# scheduled audit can't see it. The directive must live with the
|
||||
# emitter so both PR-time (Tier 2g) and post-merge (Tier 2f)
|
||||
# readers consume the same source.
|
||||
#
|
||||
# Phase contract (RFC internal#219 §1 ladder)
|
||||
# -------------------------------------------
|
||||
# Lands at `continue-on-error: true` (Phase 3 — surface the
|
||||
# pattern without blocking PRs while the directive convention
|
||||
# beds in). After 7 days of clean runs on `main` with no false
|
||||
# positives, follow-up flips to `false`. Tracking: mc#774.
|
||||
#
|
||||
# Cross-links
|
||||
# -----------
|
||||
# - mc#774 (the RFC that specs this lint)
|
||||
# - PR#656 (the empirical case)
|
||||
# - mc#774 (the surfaced cascade)
|
||||
# - feedback_phantom_required_check_after_gitea_migration (Tier 2f cousin)
|
||||
# - feedback_behavior_based_ast_gates
|
||||
#
|
||||
# Auth: DRIFT_BOT_TOKEN (repo-admin for branch_protections read).
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
paths:
|
||||
- '.gitea/workflows/**'
|
||||
- '.gitea/scripts/lint_required_context_exists_in_bp.py'
|
||||
- '.gitea/workflows/lint-required-context-exists-in-bp.yml'
|
||||
- 'tests/test_lint_required_context_exists_in_bp.py'
|
||||
|
||||
env:
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: lint-required-context-exists-in-bp-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# bp-exempt: this lint is a PR-time advisory and is not intended to
|
||||
# be a required gate on main. The directive eat-our-own-dogfood
|
||||
# confirms the convention works on the lint that defines it.
|
||||
lint:
|
||||
name: lint-required-context-exists-in-bp
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
# Phase 3 (RFC #219 §1): surface the pattern without blocking PRs
|
||||
# while the directive convention beds in. Follow-up flip to false
|
||||
# after 7 clean days on main. mc#774.
|
||||
continue-on-error: true # mc#774 Phase 3 — flip to false after 7 clean main runs
|
||||
steps:
|
||||
- name: Check out PR head with full history (need base SHA blobs)
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
# `git show <base-sha>:<path>` needs the base SHA's blobs.
|
||||
# Same rationale as PR#673 and check-migration-collisions.yml.
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
- name: Install PyYAML
|
||||
run: python -m pip install --quiet 'PyYAML==6.0.2'
|
||||
- name: Ensure base ref is reachable locally
|
||||
# Cheap insurance against runner-version drift.
|
||||
run: |
|
||||
git fetch origin "${{ github.event.pull_request.base.ref }}" || true
|
||||
- name: Run lint-required-context-exists-in-bp
|
||||
env:
|
||||
# DRIFT_BOT_TOKEN — repo-admin (needed for branch_protections).
|
||||
GITEA_TOKEN: ${{ secrets.DRIFT_BOT_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
BRANCH: main
|
||||
BASE_SHA: ${{ github.event.pull_request.base.sha }}
|
||||
HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
WORKFLOWS_DIR: .gitea/workflows
|
||||
run: python3 .gitea/scripts/lint_required_context_exists_in_bp.py
|
||||
- name: Run lint-required-context-exists-in-bp unit tests
|
||||
run: |
|
||||
python -m pip install --quiet pytest
|
||||
python3 -m pytest tests/test_lint_required_context_exists_in_bp.py -v
|
||||
@ -1,97 +0,0 @@
|
||||
# lint-required-no-paths — structural enforcement of
|
||||
# `feedback_path_filtered_workflow_cant_be_required`.
|
||||
#
|
||||
# Fails the PR if ANY workflow whose status-check context appears in
|
||||
# `branch_protections/main.status_check_contexts` carries a
|
||||
# `paths:` or `paths-ignore:` filter in its `on:` block.
|
||||
#
|
||||
# Why this exists:
|
||||
# A required-check workflow with a paths filter silently degrades the
|
||||
# merge gate. If a PR's diff doesn't touch the filter, the workflow
|
||||
# never fires; Gitea (1.22.6) reports the required context as
|
||||
# `pending` (NOT `skipped == success`), so the PR cannot merge. For a
|
||||
# docs-only PR against `paths: ['**.go']`, the PR is wedged forever.
|
||||
#
|
||||
# Previously prevented only by reviewer vigilance + the saved memory
|
||||
# `feedback_path_filtered_workflow_cant_be_required`. This workflow
|
||||
# makes it a hard CI gate.
|
||||
#
|
||||
# Forward-compat scope:
|
||||
# Today (2026-05-11) molecule-core/main protects 3 contexts:
|
||||
# - "Secret scan / Scan diff for credential-shaped strings (pull_request)"
|
||||
# - "sop-tier-check / tier-check (pull_request)"
|
||||
# - "CI / all-required (pull_request)"
|
||||
# Per RFC#324 Step 2 the required-list expands to ~5 contexts
|
||||
# (qa-review, security-review added). Each new required context's
|
||||
# workflow must remain unconditional. This lint pins that contract.
|
||||
#
|
||||
# Meta-required-check:
|
||||
# This workflow ITSELF deliberately has NO `paths:` filter on its `on:`
|
||||
# block — otherwise a paths-non-matching PR could bypass the check.
|
||||
# Self-evident from this file: only `pull_request` types + no paths.
|
||||
#
|
||||
# Auth:
|
||||
# `GET /repos/.../branch_protections/{branch}` requires repo-admin
|
||||
# role in Gitea 1.22.6. The workflow-default `GITHUB_TOKEN` is
|
||||
# non-admin (read-only), so we re-use `DRIFT_BOT_TOKEN` (same persona
|
||||
# that powers `ci-required-drift.yml` — verified working there).
|
||||
# If `DRIFT_BOT_TOKEN` becomes unavailable, the script exits 0 with a
|
||||
# loud `::error::` rather than red-X every PR — token-scope issues
|
||||
# should be fixed at the token, not surfaced as a gate failure on
|
||||
# every unrelated PR.
|
||||
#
|
||||
# Behavior-based gate per `feedback_behavior_based_ast_gates`:
|
||||
# YAML AST walk (PyYAML), NOT grep. Workflow renames, formatting
|
||||
# changes (block-scalar vs flow-style), or moving `paths:` between
|
||||
# `pull_request:` and `pull_request_target:` all still detect.
|
||||
#
|
||||
# IMPORTANT — Gitea 1.22.6 parser quirk per
|
||||
# `feedback_gitea_workflow_dispatch_inputs_unsupported`: do NOT add an
|
||||
# `inputs:` block to `workflow_dispatch:` — Gitea 1.22.6 rejects the
|
||||
# entire workflow as "unknown on type" and it registers for ZERO events.
|
||||
|
||||
name: lint-required-no-paths
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
workflow_dispatch:
|
||||
|
||||
# Read protection + read local YAML. No writes.
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
# Only one in-flight run per PR — re-pushes cancel the previous run to
|
||||
# keep the queue short. Required-list reads are cheap (one GET); the
|
||||
# cancellation is just hygiene.
|
||||
concurrency:
|
||||
group: lint-required-no-paths-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# bp-exempt: meta-lint advisory; CI / all-required is the required aggregate.
|
||||
lint:
|
||||
name: lint-required-no-paths
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- name: Check out repo (we read the workflow YAML files locally)
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
- name: Set up Python (PyYAML for AST parsing)
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
- name: Install PyYAML
|
||||
run: python -m pip install --quiet 'PyYAML==6.0.2'
|
||||
- name: Run lint-required-no-paths
|
||||
env:
|
||||
# DRIFT_BOT_TOKEN is owned by mc-drift-bot, a least-privilege
|
||||
# Gitea persona with repo-admin role for branch_protections
|
||||
# read. Same secret used by ci-required-drift.yml — see that
|
||||
# workflow's header for provisioning trail (internal#329).
|
||||
GITEA_TOKEN: ${{ secrets.DRIFT_BOT_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
BRANCH: main
|
||||
WORKFLOWS_DIR: .gitea/workflows
|
||||
run: python3 .gitea/scripts/lint-required-no-paths.py
|
||||
@ -1,76 +0,0 @@
|
||||
name: Lint workflow YAML (Gitea-1.22.6-hostile shapes)
|
||||
|
||||
# Tier-2 hard-gate lint (RFC internal#219 §1, charter §SOP-N rule (m)).
|
||||
# Catches six Gitea-1.22.6-hostile workflow-YAML shapes BEFORE they reach
|
||||
# `main`. Each rule maps to a documented incident in saved memory:
|
||||
#
|
||||
# 1. workflow_dispatch.inputs — feedback_gitea_workflow_dispatch_inputs_unsupported
|
||||
# (2026-05-11 PyPI freeze 24h)
|
||||
# 2. on: workflow_run — task #81 (Gitea 1.22.6 lacks the event)
|
||||
# 3. name: containing "/" — breaks status-context tokenization
|
||||
# 4. cross-file name collision — status-reaper rev1 fail-loud class
|
||||
# 5. cross-repo uses: org/r/p@r — feedback_gitea_cross_repo_uses_blocked
|
||||
# (DEFAULT_ACTIONS_URL=github → 404)
|
||||
# 6. (WARN) api.github.com refs — feedback_act_runner_github_server_url
|
||||
# without workflow-level GITHUB_SERVER_URL
|
||||
#
|
||||
# Empirical history this hardens against:
|
||||
# - status-reaper rev1 caught rule-4 (name-collision) class
|
||||
# - sop-tier-refire DOA'd on rule-2 (workflow_run partial)
|
||||
# - #319 bootstrap-paradox (chained-defect class, related)
|
||||
# - internal#329 dispatcher race (adjacent)
|
||||
# - 2026-05-11 publish-runtime: rule-1, 24h PyPI freeze
|
||||
#
|
||||
# Triggers:
|
||||
# - pull_request: pre-merge gate — block hostile shapes before they land
|
||||
# - push: post-merge regression detection — catch direct-to-main edits
|
||||
#
|
||||
# Per RFC internal#219 §1 contract: continue-on-error: true during the
|
||||
# surface-broken-shapes phase. Follow-up PR flips off after surfaced
|
||||
# defects are triaged. The push-trigger ensures we catch regressions
|
||||
# even if the pull_request gate is bypassed by branch-protection drift.
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.gitea/workflows/**'
|
||||
- '.gitea/scripts/lint-workflow-yaml.py'
|
||||
- 'tests/test_lint_workflow_yaml.py'
|
||||
push:
|
||||
branches: [main, staging]
|
||||
paths:
|
||||
- '.gitea/workflows/**'
|
||||
- '.gitea/scripts/lint-workflow-yaml.py'
|
||||
- 'tests/test_lint_workflow_yaml.py'
|
||||
|
||||
# Belt-and-suspenders against runner default
|
||||
# (feedback_act_runner_github_server_url).
|
||||
env:
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Lint workflow YAML for Gitea-1.22.6-hostile shapes
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken shapes without blocking PRs.
|
||||
# Follow-up PR flips this off after the 4 existing-on-main rule-2
|
||||
# (workflow_run) violations are migrated to a supported trigger.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Install PyYAML
|
||||
run: pip install --quiet 'PyYAML>=6.0'
|
||||
|
||||
- name: Lint .gitea/workflows/*.yml
|
||||
run: python3 .gitea/scripts/lint-workflow-yaml.py
|
||||
|
||||
- name: Run lint-workflow-yaml unit tests
|
||||
run: |
|
||||
pip install --quiet pytest
|
||||
python3 -m pytest tests/test_lint_workflow_yaml.py -v
|
||||
@ -37,11 +37,6 @@ name: main-red-watchdog
|
||||
# "unknown on type" when `workflow_dispatch.inputs.X` is present. Revisit
|
||||
# when Gitea ≥ 1.23 is fleet-wide.
|
||||
on:
|
||||
# SCHEDULE RE-ENABLED 2026-05-12 rev3 — interim disable (mc#645) reverted alongside
|
||||
# status-reaper rev3 (widen-window). Job-level timeout-minutes raised 5 → 15 below
|
||||
# to absorb runner-saturation latency without spurious cancels (the original cascade
|
||||
# cause). If runner-saturation root persists, the dedicated-runner-label split
|
||||
# remains the structural next step (tracked separately).
|
||||
schedule:
|
||||
# Hourly at :05 — task spec calls for "off-zero" (`5 * * * *`),
|
||||
# offset from :17 (ci-required-drift) and :00 (peak cron load).
|
||||
@ -63,12 +58,7 @@ concurrency:
|
||||
jobs:
|
||||
watchdog:
|
||||
runs-on: ubuntu-latest
|
||||
# rev3 (2026-05-12, mc#645 revert): raised 5 → 15 to absorb runner-saturation
|
||||
# latency. Original 5min cap was producing 124-style cancels under load,
|
||||
# which fed the very `[main-red]` issues this workflow files (self-poisoning).
|
||||
# 15min is still well below Gitea-default 6h job ceiling; if a real hang
|
||||
# occurs the issue-file path is still the alarm surface.
|
||||
timeout-minutes: 15
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- name: Check out repo (script lives at .gitea/scripts/)
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
@ -9,12 +9,18 @@ name: publish-canvas-image
|
||||
# - Workflow-level env.GITHUB_SERVER_URL pinned per
|
||||
# feedback_act_runner_github_server_url.
|
||||
# - `continue-on-error: true` on each job (RFC §1 contract).
|
||||
# - Retargeted the image push from GHCR to ECR. GHCR was retired during
|
||||
# the 2026-05-06 Gitea migration, and Gitea's GITHUB_TOKEN cannot
|
||||
# authenticate to ghcr.io.
|
||||
# - **Open question for review**: this workflow pushes the canvas
|
||||
# image to `ghcr.io`. GHCR was retired during the 2026-05-06
|
||||
# Gitea migration in favor of ECR (per staging-verify.yml header
|
||||
# notes). The image may not be consumable post-migration. Two
|
||||
# options for follow-up: (a) retarget to
|
||||
# `153263036946.dkr.ecr.us-east-2.amazonaws.com/molecule-ai/canvas`,
|
||||
# or (b) retire this workflow entirely and route canvas deploys
|
||||
# via the operator-host build path. tier:low + continue-on-error
|
||||
# means failed pushes do not block PRs.
|
||||
#
|
||||
|
||||
# Builds and pushes the canvas Docker image to ECR whenever a commit lands
|
||||
# Builds and pushes the canvas Docker image to GHCR whenever a commit lands
|
||||
# on main that touches canvas code. Previously canvas changes were visible in
|
||||
# CI (npm run build passed) but the live container was never updated —
|
||||
# operators had to manually run `docker compose build canvas` each time.
|
||||
@ -39,61 +45,32 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
packages: write # required to push to ghcr.io/${{ github.repository_owner }}/*
|
||||
|
||||
env:
|
||||
IMAGE_NAME: 153263036946.dkr.ecr.us-east-2.amazonaws.com/molecule-ai/canvas
|
||||
IMAGE_NAME: ghcr.io/molecule-ai/canvas
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
jobs:
|
||||
# bp-exempt: post-merge image publication side effect; CI / all-required gates source changes.
|
||||
build-and-push:
|
||||
name: Build & push canvas image
|
||||
# REVERTED (infra/revert-docker-runner-label): `runs-on: ubuntu-latest` restored.
|
||||
# The `docker` label is not registered on any act_runner. `runs-on: [ubuntu-latest, docker]`
|
||||
# causes jobs to queue indefinitely with zero eligible runners — strictly worse than the
|
||||
# pre-#599 coin-flip (50% success rate). Once the `docker` label is registered on
|
||||
# ≥2 runners, re-apply the fix from #599 (infra/docker-runner-label).
|
||||
# See issue #576 + infra-lead pulse ~00:30Z.
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Log in to ECR
|
||||
env:
|
||||
IMAGE_NAME: ${{ env.IMAGE_NAME }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: us-east-2
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ECR_REGISTRY="${IMAGE_NAME%%/*}"
|
||||
aws ecr get-login-password --region us-east-2 | \
|
||||
docker login --username AWS --password-stdin "${ECR_REGISTRY}"
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Ensure ECR repository exists
|
||||
env:
|
||||
IMAGE_NAME: ${{ env.IMAGE_NAME }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: us-east-2
|
||||
run: |
|
||||
set -euo pipefail
|
||||
repo_path="${IMAGE_NAME#*/}"
|
||||
if ! aws ecr describe-repositories --repository-names "${repo_path}" --region us-east-2 >/dev/null 2>&1; then
|
||||
aws ecr create-repository \
|
||||
--repository-name "${repo_path}" \
|
||||
--image-scanning-configuration scanOnPush=true \
|
||||
--region us-east-2 >/dev/null
|
||||
fi
|
||||
|
||||
# Health check: verify Docker daemon is accessible before attempting any
|
||||
# build steps. This fails loudly at step 1 when the runner's docker.sock
|
||||
# is inaccessible rather than silently continuing to the build step
|
||||
@ -102,15 +79,11 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "::group::Docker daemon health check"
|
||||
echo "Runner: ${HOSTNAME:-unknown}"
|
||||
docker_info="$(docker info 2>&1)" || {
|
||||
docker info 2>&1 | head -5 || {
|
||||
echo "::error::Docker daemon is not accessible at /var/run/docker.sock"
|
||||
echo "::error::Runner: ${HOSTNAME:-unknown}"
|
||||
printf '%s\n' "${docker_info}"
|
||||
echo "::error::Check: (1) daemon running, (2) runner user in docker group, (3) sock perms 660+"
|
||||
exit 1
|
||||
}
|
||||
printf '%s\n' "${docker_info}" | sed -n '1,5p'
|
||||
echo "Docker daemon OK"
|
||||
echo "::endgroup::"
|
||||
|
||||
@ -144,7 +117,7 @@ jobs:
|
||||
echo "platform_url=${PLATFORM_URL}" >> "$GITHUB_OUTPUT"
|
||||
echo "ws_url=${WS_URL}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build & push canvas image to ECR
|
||||
- name: Build & push canvas image to GHCR
|
||||
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
|
||||
with:
|
||||
context: ./canvas
|
||||
@ -157,10 +130,9 @@ jobs:
|
||||
tags: |
|
||||
${{ env.IMAGE_NAME }}:latest
|
||||
${{ env.IMAGE_NAME }}:sha-${{ steps.tags.outputs.sha }}
|
||||
# Gitea artifact-cache reachability is best-effort on the operator
|
||||
# runner network. Do not let cache export fail an image that already
|
||||
# built and pushed successfully.
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
labels: |
|
||||
org.opencontainers.image.source=https://git.moleculesai.app/${{ github.repository }}
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.description=Molecule AI canvas (Next.js 15 + React Flow)
|
||||
|
||||
@ -36,10 +36,6 @@ on:
|
||||
- staging
|
||||
paths:
|
||||
- "workspace/**"
|
||||
# Manual dispatch — useful when Gitea Actions API (/actions/*) is
|
||||
# unreachable (e.g. act_runner 404 on Gitea 1.22.6) and we cannot
|
||||
# re-trigger via curl.
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write # required to push tags back
|
||||
@ -53,10 +49,8 @@ jobs:
|
||||
# Operational failures (PyPI unreachable, missing DISPATCH_TOKEN) are
|
||||
# surfaced via continue-on-error: true rather than blocking the merge.
|
||||
# The actual bump work happens on the main/staging push after merge.
|
||||
# bp-exempt: advisory validation for runtime publication; not a branch-protection gate.
|
||||
pr-validate:
|
||||
runs-on: ubuntu-latest
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true # do not block PR merge on operational failures
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
@ -80,18 +74,11 @@ jobs:
|
||||
# Actual bump-and-tag: runs on main/staging pushes, posts real success/failure.
|
||||
# No continue-on-error — operational failures here trip the main-red
|
||||
# watchdog, which is the desired signal for infrastructure degradation.
|
||||
# bp-exempt: post-merge tag publication side effect; CI / all-required gates source changes.
|
||||
bump-and-tag:
|
||||
runs-on: ubuntu-latest
|
||||
# Only fire on push events (main/staging after PR merge). Pull_request
|
||||
# events are handled by pr-validate above; we do NOT bump on every
|
||||
# push-synchronize because that would race with the PR head.
|
||||
#
|
||||
# NOTE: the prior condition `github.event.pull_request.base.ref == ''`
|
||||
# was broken — on a PR-merge push in Gitea Actions, the pull_request
|
||||
# context is still attached (base.ref='main'), so the condition always
|
||||
# evaluated to false and bump-and-tag was permanently skipped.
|
||||
if: github.event_name == 'push'
|
||||
# This job only fires on main/staging pushes (not on PR events) because
|
||||
# the pull_request trigger above routes to pr-validate instead.
|
||||
if: github.event.pull_request.base.ref == ''
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
|
||||
@ -18,31 +18,29 @@ name: publish-workspace-server-image
|
||||
# :staging-<sha> — per-commit digest, stable for canary verify
|
||||
# :staging-latest — tracks most recent build on this branch
|
||||
#
|
||||
# Production auto-deploy:
|
||||
# After both platform and tenant images are pushed, deploy-production waits
|
||||
# for strict required push contexts on the same SHA to go green, then
|
||||
# calls the production CP redeploy-fleet endpoint with target_tag=
|
||||
# staging-<sha>. Set repo variable or secret PROD_AUTO_DEPLOY_DISABLED=true
|
||||
# to stop production rollout while keeping image publishing enabled.
|
||||
#
|
||||
# ECR target: 153263036946.dkr.ecr.us-east-2.amazonaws.com/molecule-ai/*
|
||||
# Required secrets: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AUTO_SYNC_TOKEN
|
||||
#
|
||||
# mc#711: Docker daemon not accessible on ubuntu-latest runner (molecule-canonical-1
|
||||
# shows client-only in `docker info` — daemon not running). DinD mount is present but
|
||||
# daemon doesn't respond. Fix: add diagnostic step showing socket info so ops can
|
||||
# identify which runners have a live daemon. If no daemon is available, the job
|
||||
# fails fast with actionable output rather than silent deep failure.
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'workspace-server/**'
|
||||
- 'canvas/**'
|
||||
- 'manifest.json'
|
||||
- 'scripts/**'
|
||||
- '.gitea/workflows/publish-workspace-server-image.yml'
|
||||
workflow_dispatch:
|
||||
|
||||
# No `concurrency:` block here. Gitea 1.22.6 can cancel queued runs despite
|
||||
# `cancel-in-progress: false`; that is not acceptable for a workflow with a
|
||||
# production deploy job. Per-SHA image tags are immutable, and staging-latest is
|
||||
# best-effort last-writer-wins metadata.
|
||||
# Serialize per-branch so two rapid main pushes don't race the same
|
||||
# :staging-latest tag retag. Allow parallel runs as they produce
|
||||
# different :staging-<sha> tags and last-write-wins on :staging-latest.
|
||||
#
|
||||
# cancel-in-progress: false → in-flight builds finish; the next push's
|
||||
# build queues. This avoids a partially-pushed image.
|
||||
concurrency:
|
||||
group: publish-workspace-server-image-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@ -61,21 +59,20 @@ jobs:
|
||||
|
||||
# Health check: verify Docker daemon is accessible before attempting any
|
||||
# build steps. This fails loudly at step 1 when the runner's docker.sock
|
||||
# is inaccessible rather than silently continuing where `docker build`
|
||||
# fails deep in the process with a cryptic ECR auth error.
|
||||
# is inaccessible (e.g. permission change, daemon restart, or group-membership
|
||||
# drift) rather than silently continuing to step 2 where `docker build`
|
||||
# fails deep in the process with a cryptic ECR auth error that doesn't
|
||||
# surface the root cause. Also reports the daemon version so operator
|
||||
# can correlate with runner host logs.
|
||||
- name: Verify Docker daemon access
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "::group::Docker daemon health check"
|
||||
echo "Runner: ${HOSTNAME:-unknown}"
|
||||
docker_info="$(docker info 2>&1)" || {
|
||||
docker info 2>&1 | head -5 || {
|
||||
echo "::error::Docker daemon is not accessible at /var/run/docker.sock"
|
||||
echo "::error::Runner: ${HOSTNAME:-unknown}"
|
||||
printf '%s\n' "${docker_info}"
|
||||
echo "::error::Check: (1) daemon is running, (2) runner user is in docker group, (3) sock permissions are 660+"
|
||||
exit 1
|
||||
}
|
||||
printf '%s\n' "${docker_info}" | sed -n '1,5p'
|
||||
echo "Docker daemon OK"
|
||||
echo "::endgroup::"
|
||||
|
||||
@ -95,12 +92,13 @@ jobs:
|
||||
MOLECULE_GITEA_TOKEN: ${{ secrets.AUTO_SYNC_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -z "${MOLECULE_GITEA_TOKEN}" ]; then
|
||||
echo "::error::AUTO_SYNC_TOKEN secret is empty"
|
||||
exit 1
|
||||
fi
|
||||
mkdir -p .tenant-bundle-deps
|
||||
# Strip JSON5 comments before jq parsing — Integration Tester appends
|
||||
# `// Triggered by ...` which breaks `jq` in clone-manifest.sh.
|
||||
sed '/^[[:space:]]*\/\//d' manifest.json > .manifest-stripped.json
|
||||
bash scripts/clone-manifest.sh \
|
||||
.manifest-stripped.json \
|
||||
manifest.json \
|
||||
.tenant-bundle-deps/workspace-configs-templates \
|
||||
.tenant-bundle-deps/org-templates \
|
||||
.tenant-bundle-deps/plugins
|
||||
@ -175,173 +173,3 @@ jobs:
|
||||
--tag "${TENANT_IMAGE_NAME}:${TAG_SHA}" \
|
||||
--tag "${TENANT_IMAGE_NAME}:${TAG_LATEST}" \
|
||||
--push .
|
||||
|
||||
# bp-exempt: production deploy side-effect; merge is gated by CI / all-required and this job waits for push CI before acting.
|
||||
deploy-production:
|
||||
name: Production auto-deploy
|
||||
needs: build-and-push
|
||||
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 75
|
||||
env:
|
||||
CP_URL: ${{ vars.PROD_CP_URL || 'https://api.moleculesai.app' }}
|
||||
CP_ADMIN_API_TOKEN: ${{ secrets.CP_ADMIN_API_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
GITEA_TOKEN: ${{ secrets.PROD_AUTO_DEPLOY_CONTROL_TOKEN || secrets.AUTO_SYNC_TOKEN }}
|
||||
PROD_AUTO_DEPLOY_DISABLED: ${{ vars.PROD_AUTO_DEPLOY_DISABLED || secrets.PROD_AUTO_DEPLOY_DISABLED || '' }}
|
||||
PROD_AUTO_DEPLOY_CANARY_SLUG: ${{ vars.PROD_AUTO_DEPLOY_CANARY_SLUG || 'hongming' }}
|
||||
PROD_AUTO_DEPLOY_SOAK_SECONDS: ${{ vars.PROD_AUTO_DEPLOY_SOAK_SECONDS || '60' }}
|
||||
PROD_AUTO_DEPLOY_BATCH_SIZE: ${{ vars.PROD_AUTO_DEPLOY_BATCH_SIZE || '3' }}
|
||||
PROD_AUTO_DEPLOY_DRY_RUN: ${{ vars.PROD_AUTO_DEPLOY_DRY_RUN || '' }}
|
||||
PROD_ALLOW_NON_PROD_CP_URL: ${{ vars.PROD_ALLOW_NON_PROD_CP_URL || '' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Build deploy plan
|
||||
id: plan
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 .gitea/scripts/prod-auto-deploy.py plan > "$RUNNER_TEMP/prod-auto-deploy-plan.json"
|
||||
jq . "$RUNNER_TEMP/prod-auto-deploy-plan.json"
|
||||
enabled="$(jq -r '.enabled' "$RUNNER_TEMP/prod-auto-deploy-plan.json")"
|
||||
echo "enabled=$enabled" >> "$GITHUB_OUTPUT"
|
||||
if [ "$enabled" != "true" ]; then
|
||||
reason="$(jq -r '.disabled_reason' "$RUNNER_TEMP/prod-auto-deploy-plan.json")"
|
||||
echo "::notice::Production auto-deploy disabled: $reason"
|
||||
{
|
||||
echo "## Production auto-deploy skipped"
|
||||
echo ""
|
||||
echo "Reason: \`$reason\`"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
exit 0
|
||||
fi
|
||||
if [ -z "${CP_ADMIN_API_TOKEN:-}" ]; then
|
||||
echo "::error::CP_ADMIN_API_TOKEN secret is required for production auto-deploy."
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "${GITEA_TOKEN:-}" ]; then
|
||||
echo "::error::AUTO_SYNC_TOKEN secret is required so production deploy can wait for green CI."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Self-test production deploy helper
|
||||
if: ${{ steps.plan.outputs.enabled == 'true' }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 -m pip install --quiet 'pytest==9.0.2' 'PyYAML==6.0.2'
|
||||
python3 -m pytest .gitea/scripts/tests/test_prod_auto_deploy.py -q
|
||||
python3 .gitea/scripts/lint-workflow-yaml.py --workflow-dir .gitea/workflows
|
||||
|
||||
- name: Wait for green main CI on this SHA
|
||||
if: ${{ steps.plan.outputs.enabled == 'true' }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 .gitea/scripts/prod-auto-deploy.py wait-ci
|
||||
|
||||
- name: Call production CP redeploy-fleet
|
||||
if: ${{ steps.plan.outputs.enabled == 'true' }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 .gitea/scripts/prod-auto-deploy.py assert-enabled
|
||||
PLAN="$RUNNER_TEMP/prod-auto-deploy-plan.json"
|
||||
TARGET_TAG="$(jq -r '.target_tag' "$PLAN")"
|
||||
BODY="$(jq -c '.body' "$PLAN")"
|
||||
|
||||
echo "POST $CP_URL/cp/admin/tenants/redeploy-fleet"
|
||||
echo " target_tag: $TARGET_TAG"
|
||||
echo " body: $BODY"
|
||||
|
||||
HTTP_RESPONSE="$RUNNER_TEMP/prod-redeploy-response.json"
|
||||
HTTP_CODE_FILE="$RUNNER_TEMP/prod-redeploy-http-code.txt"
|
||||
set +e
|
||||
curl -sS -o "$HTTP_RESPONSE" -w '%{http_code}' \
|
||||
-m 1200 \
|
||||
-H "Authorization: Bearer $CP_ADMIN_API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-X POST "$CP_URL/cp/admin/tenants/redeploy-fleet" \
|
||||
-d "$BODY" > "$HTTP_CODE_FILE"
|
||||
set -e
|
||||
|
||||
HTTP_CODE="$(cat "$HTTP_CODE_FILE" 2>/dev/null || echo "000")"
|
||||
[ -z "$HTTP_CODE" ] && HTTP_CODE="000"
|
||||
echo "HTTP $HTTP_CODE"
|
||||
jq '{ok, result_count: (.results // [] | length)}' "$HTTP_RESPONSE" || true
|
||||
|
||||
{
|
||||
echo "## Production auto-deploy"
|
||||
echo ""
|
||||
echo "**Commit:** \`${GITHUB_SHA:0:7}\`"
|
||||
echo "**Target tag:** \`$TARGET_TAG\`"
|
||||
echo "**HTTP:** $HTTP_CODE"
|
||||
echo ""
|
||||
echo "### Per-tenant result"
|
||||
echo ""
|
||||
echo "| Slug | Phase | SSM Status | Exit | Healthz | Error present |"
|
||||
echo "|------|-------|------------|------|---------|---------------|"
|
||||
jq -r '.results[]? | "| \(.slug) | \(.phase) | \(.ssm_status // "-") | \(.ssm_exit_code) | \(.healthz_ok) | \((.error // "") != "") |"' "$HTTP_RESPONSE" || true
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
if [ "$HTTP_CODE" != "200" ]; then
|
||||
echo "::error::redeploy-fleet returned HTTP $HTTP_CODE"
|
||||
exit 1
|
||||
fi
|
||||
OK="$(jq -r '.ok' "$HTTP_RESPONSE")"
|
||||
if [ "$OK" != "true" ]; then
|
||||
echo "::error::redeploy-fleet reported ok=false; production rollout halted."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify reachable tenants report this SHA
|
||||
if: ${{ steps.plan.outputs.enabled == 'true' }}
|
||||
env:
|
||||
TENANT_DOMAIN: moleculesai.app
|
||||
run: |
|
||||
set -euo pipefail
|
||||
RESP="$RUNNER_TEMP/prod-redeploy-response.json"
|
||||
mapfile -t SLUGS < <(jq -r '.results[]? | .slug' "$RESP")
|
||||
if [ ${#SLUGS[@]} -eq 0 ]; then
|
||||
echo "::error::No tenants returned from redeploy-fleet; refusing to mark production deploy verified."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
STALE_COUNT=0
|
||||
UNREACHABLE_COUNT=0
|
||||
UNHEALTHY_COUNT=0
|
||||
for slug in "${SLUGS[@]}"; do
|
||||
healthz_ok="$(jq -r --arg slug "$slug" '.results[]? | select(.slug == $slug) | .healthz_ok' "$RESP" | tail -1)"
|
||||
if [ "$healthz_ok" != "true" ]; then
|
||||
echo "::error::$slug did not report healthz_ok=true in redeploy-fleet response."
|
||||
UNHEALTHY_COUNT=$((UNHEALTHY_COUNT + 1))
|
||||
continue
|
||||
fi
|
||||
url="https://${slug}.${TENANT_DOMAIN}/buildinfo"
|
||||
body="$(curl -sS --max-time 30 --retry 3 --retry-delay 5 --retry-connrefused "$url" || true)"
|
||||
actual="$(echo "$body" | jq -r '.git_sha // ""' 2>/dev/null || echo "")"
|
||||
if [ -z "$actual" ]; then
|
||||
echo "::error::$slug did not return /buildinfo after deploy."
|
||||
UNREACHABLE_COUNT=$((UNREACHABLE_COUNT + 1))
|
||||
continue
|
||||
fi
|
||||
if [ "$actual" != "$GITHUB_SHA" ]; then
|
||||
echo "::error::$slug is stale: actual=${actual:0:7}, expected=${GITHUB_SHA:0:7}"
|
||||
STALE_COUNT=$((STALE_COUNT + 1))
|
||||
else
|
||||
echo "$slug: ${actual:0:7}"
|
||||
fi
|
||||
done
|
||||
|
||||
{
|
||||
echo ""
|
||||
echo "### Buildinfo verification"
|
||||
echo ""
|
||||
echo "Expected SHA: \`${GITHUB_SHA:0:7}\`"
|
||||
echo "Verified tenants: ${#SLUGS[@]}"
|
||||
echo "Stale tenants: $STALE_COUNT"
|
||||
echo "Unhealthy tenants: $UNHEALTHY_COUNT"
|
||||
echo "Unreachable tenants: $UNREACHABLE_COUNT"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
if [ "$STALE_COUNT" -gt 0 ] || [ "$UNHEALTHY_COUNT" -gt 0 ] || [ "$UNREACHABLE_COUNT" -gt 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@ -9,10 +9,10 @@
|
||||
# Triggers on:
|
||||
# - `pull_request_target`: opened, synchronize, reopened
|
||||
# → initial status posts when PR opens / re-pushes
|
||||
# - comment refires are handled by `review-refire-comments.yml`
|
||||
# → a single issue_comment dispatcher prevents every SOP/review
|
||||
# comment from enqueueing separate qa/security/tier jobs on
|
||||
# Gitea 1.22.6 before job-level `if:` can skip them.
|
||||
# - `issue_comment`: /qa-recheck slash-command on the PR
|
||||
# → manual re-fire after a QA reviewer clicks APPROVE
|
||||
# (Gitea 1.22.6 doesn't re-fire on pull_request_review, per
|
||||
# go-gitea/gitea#33700 + feedback_pull_request_review_no_refire)
|
||||
# Workflow name = `qa-review` ; job name = `approved`.
|
||||
# The job's own pass/fail conclusion publishes the status context
|
||||
# `qa-review / approved (<event>)` — NO `POST /statuses` call → NO
|
||||
@ -85,20 +85,27 @@ name: qa-review
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened]
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
|
||||
jobs:
|
||||
# bp-exempt: PR review bot signal; required merge state is enforced by CI / all-required.
|
||||
approved:
|
||||
# Gate the job:
|
||||
# - On pull_request_target events: always run.
|
||||
# Comment-triggered refires live in review-refire-comments.yml. Keeping
|
||||
# this workflow PR-only avoids comment-triggered queue storms.
|
||||
# - On issue_comment events: only when it's a PR comment and the body
|
||||
# contains the slash-command. NO privilege gate at the step level
|
||||
# (RFC#324 v1.3 §A1.1): a non-collaborator's /qa-recheck is fine
|
||||
# because the eval is read-only and idempotent — re-running it
|
||||
# just re-confirms whether a real team-member APPROVE exists.
|
||||
if: |
|
||||
github.event_name == 'pull_request_target'
|
||||
github.event_name == 'pull_request_target' ||
|
||||
(github.event_name == 'issue_comment' &&
|
||||
github.event.issue.pull_request != null &&
|
||||
startsWith(github.event.comment.body, '/qa-recheck'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Privilege check (A1.1 — INFORMATIONAL log only, NOT a gate)
|
||||
@ -112,7 +119,7 @@ jobs:
|
||||
# no comment.user.login so the step is a no-op skip there.
|
||||
if: github.event_name == 'issue_comment'
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.SOP_TIER_CHECK_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
GITEA_TOKEN: ${{ secrets.RFC_324_TEAM_READ_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
login="${{ github.event.comment.user.login }}"
|
||||
@ -143,14 +150,13 @@ jobs:
|
||||
|
||||
- name: Evaluate qa-review
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.SOP_TIER_CHECK_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
GITEA_TOKEN: ${{ secrets.RFC_324_TEAM_READ_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
# PR number lives in different places per event:
|
||||
# pull_request_target → github.event.pull_request.number
|
||||
# issue_comment → github.event.issue.number
|
||||
PR_NUMBER: ${{ github.event.pull_request.number || github.event.issue.number }}
|
||||
DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
TEAM: qa
|
||||
TEAM_ID: '20'
|
||||
REVIEW_CHECK_DEBUG: '0'
|
||||
|
||||
@ -51,7 +51,6 @@ jobs:
|
||||
name: Audit Railway env vars for drift-prone pins
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
timeout-minutes: 10
|
||||
|
||||
|
||||
@ -9,17 +9,20 @@ name: redeploy-tenants-on-main
|
||||
# - Workflow-level env.GITHUB_SERVER_URL pinned per
|
||||
# feedback_act_runner_github_server_url.
|
||||
# - `continue-on-error: true` on each job (RFC §1 contract).
|
||||
# - Dropped unsupported `workflow_run` (task #81).
|
||||
# - Later changed to manual-only after publish-workspace-server-image.yml
|
||||
# gained an integrated ordered production deploy job.
|
||||
# - **Gitea workflow_run trigger limitation**: Gitea 1.22.6's support
|
||||
# for the `workflow_run` event is partial. If this never fires on a
|
||||
# real publish-workspace-server-image completion, the follow-up
|
||||
# triage PR should replace the trigger with a push-with-paths-filter
|
||||
# on .gitea/workflows/publish-workspace-server-image.yml. Until
|
||||
# then continue-on-error+dead-workflow doesn't break anything.
|
||||
#
|
||||
|
||||
# Manual production tenant redeploy/rollback helper.
|
||||
# Auto-refresh prod tenant EC2s after every main merge.
|
||||
#
|
||||
# Why this workflow is manual-only: publish-workspace-server-image now owns
|
||||
# the ordered build -> push -> production auto-deploy sequence in one workflow.
|
||||
# A separate push-triggered redeploy workflow races before the new ECR image
|
||||
# exists and can paint main red with a false deployment failure.
|
||||
# Why this workflow exists: publish-workspace-server-image builds and
|
||||
# pushes a new platform-tenant :<sha> to ECR on every merge to main,
|
||||
# but running tenants pulled their image once at boot and never re-pull.
|
||||
# Users see stale code indefinitely.
|
||||
#
|
||||
# This workflow closes the gap by calling the control-plane admin
|
||||
# endpoint that performs a canary-first, batched, health-gated rolling
|
||||
@ -32,58 +35,60 @@ name: redeploy-tenants-on-main
|
||||
# Gitea suspension migration. The staging-verify.yml promote step now
|
||||
# uses the same redeploy-fleet endpoint (fixes the silent-GHCR gap).
|
||||
#
|
||||
# Runtime ordering for automatic deploys now lives in
|
||||
# publish-workspace-server-image.yml:
|
||||
# 1. build-and-push creates new :staging-<sha> images in ECR.
|
||||
# 2. deploy-production waits for required push contexts on that SHA.
|
||||
# 3. deploy-production calls redeploy-fleet canary-first.
|
||||
# Runtime ordering:
|
||||
# 1. publish-workspace-server-image completes → new :staging-<sha> in ECR.
|
||||
# 2. This workflow fires via workflow_run, calls redeploy-fleet with
|
||||
# target_tag=staging-<sha>. No CDN propagation wait needed —
|
||||
# ECR image manifest is consistent immediately after push.
|
||||
# 3. Calls redeploy-fleet with canary_slug (if set) and a soak
|
||||
# period. Canary proves the image boots; batches follow.
|
||||
# 4. Any failure aborts the rollout and leaves older tenants on the
|
||||
# prior image — safer default than half-and-half state.
|
||||
#
|
||||
# Rollback path: set PROD_MANUAL_REDEPLOY_TARGET_TAG as a repo/org
|
||||
# variable or secret, run workflow_dispatch, then unset it after the
|
||||
# rollback. That calls redeploy-fleet with target_tag=<value>,
|
||||
# re-pulling the pinned image on every tenant.
|
||||
# Rollback path: re-run this workflow with a specific SHA pinned via
|
||||
# the workflow_dispatch input. That calls redeploy-fleet with
|
||||
# target_tag=<sha>, re-pulling the older image on every tenant.
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_run:
|
||||
workflows: ['publish-workspace-server-image']
|
||||
types: [completed]
|
||||
branches: [main]
|
||||
permissions:
|
||||
contents: read
|
||||
# No write scopes needed — the workflow hits an external CP endpoint,
|
||||
# not the GitHub API.
|
||||
|
||||
# Serialize manual redeploys so two operator-triggered rollbacks do not
|
||||
# overlap and cause confusing per-tenant SSM state.
|
||||
# Serialize redeploys so two rapid main pushes' redeploys don't overlap
|
||||
# and cause confusing per-tenant SSM state. Without this, GitHub's
|
||||
# implicit workflow_run queueing would *probably* serialize them, but
|
||||
# the explicit block makes the invariant defensible. Mirrors the
|
||||
# concurrency block on redeploy-tenants-on-staging.yml for shape parity.
|
||||
#
|
||||
# NOTE: cancel-in-progress: false removed (Rule 7 fix). Gitea 1.22.6
|
||||
# cancels queued runs regardless of this setting, so it provides no
|
||||
# actual protection. Each redeploy-fleet call is idempotent (canary-first
|
||||
# + batched + health-gated) so a cancelled predecessor is recovered
|
||||
# automatically by the next run.
|
||||
# cancel-in-progress: false → aborting a half-rolled-out fleet would
|
||||
# leave tenants stuck on whatever image they happened to be on when
|
||||
# cancelled. Better to finish the in-flight rollout before starting
|
||||
# the next one.
|
||||
concurrency:
|
||||
group: redeploy-tenants-on-main
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
jobs:
|
||||
# bp-exempt: production redeploy is a side-effect workflow, not a merge gate.
|
||||
redeploy:
|
||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
# Skip the auto-trigger if publish-workspace-server-image didn't
|
||||
# actually succeed. workflow_run fires on any completion state; we
|
||||
# don't want to redeploy against a half-built image.
|
||||
# NOTE (Gitea port): workflow_dispatch trigger dropped; only the
|
||||
# workflow_run path remains.
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
timeout-minutes: 25
|
||||
env:
|
||||
# Rule 9 fix: keep the same operational kill switch surface as the
|
||||
# integrated auto-deploy workflow.
|
||||
PROD_AUTO_DEPLOY_DISABLED: ${{ vars.PROD_AUTO_DEPLOY_DISABLED || secrets.PROD_AUTO_DEPLOY_DISABLED || '' }}
|
||||
steps:
|
||||
- name: Kill-switch guard
|
||||
# Rule 9 fix: exit fast if kill switch is set. No redeploy happens.
|
||||
if: env.PROD_AUTO_DEPLOY_DISABLED == 'true'
|
||||
run: |
|
||||
echo "::notice::Production auto-deploy disabled (PROD_AUTO_DEPLOY_DISABLED=true). Skipping redeploy."
|
||||
echo "To re-enable: unset the repo variable or set it to false."
|
||||
- name: Note on ECR propagation
|
||||
# ECR image manifests are consistent immediately after push — no
|
||||
# CDN cache to wait for. The old GHCR-based workflow had a 30s
|
||||
@ -97,16 +102,21 @@ jobs:
|
||||
# tag) → used verbatim. Lets ops pin `latest` for emergency
|
||||
# rollback to last canary-verified digest, or pin a specific
|
||||
# `staging-<sha>` to roll back to a known-good build.
|
||||
# 2. Default → `staging-<short_head_sha>` for manual reruns from
|
||||
# the current default-branch SHA.
|
||||
# 2. Default → `staging-<short_head_sha>`. The just-published
|
||||
# digest. Bypasses the `:latest` retag path that's currently
|
||||
# dead (staging-verify soft-skips without canary fleet, so
|
||||
# the only thing retagging `:latest` today is the manual
|
||||
# promote-latest.yml — last run 2026-04-28). Auto-trigger
|
||||
# from workflow_run uses workflow_run.head_sha; manual
|
||||
# dispatch with no input falls through to github.sha.
|
||||
env:
|
||||
PROD_MANUAL_REDEPLOY_TARGET_TAG: ${{ vars.PROD_MANUAL_REDEPLOY_TARGET_TAG || secrets.PROD_MANUAL_REDEPLOY_TARGET_TAG || '' }}
|
||||
HEAD_SHA: ${{ github.sha }}
|
||||
INPUT_TAG: ${{ inputs.target_tag }}
|
||||
HEAD_SHA: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -n "${PROD_MANUAL_REDEPLOY_TARGET_TAG:-}" ]; then
|
||||
echo "target_tag=$PROD_MANUAL_REDEPLOY_TARGET_TAG" >> "$GITHUB_OUTPUT"
|
||||
echo "Using operator-pinned tag from PROD_MANUAL_REDEPLOY_TARGET_TAG."
|
||||
if [ -n "${INPUT_TAG:-}" ]; then
|
||||
echo "target_tag=$INPUT_TAG" >> "$GITHUB_OUTPUT"
|
||||
echo "Using operator-pinned tag: $INPUT_TAG"
|
||||
else
|
||||
SHORT="${HEAD_SHA:0:7}"
|
||||
echo "target_tag=staging-$SHORT" >> "$GITHUB_OUTPUT"
|
||||
@ -122,26 +132,13 @@ jobs:
|
||||
CP_URL: ${{ vars.CP_URL || 'https://api.moleculesai.app' }}
|
||||
CP_ADMIN_API_TOKEN: ${{ secrets.CP_ADMIN_API_TOKEN }}
|
||||
TARGET_TAG: ${{ steps.tag.outputs.target_tag }}
|
||||
CANARY_SLUG: ${{ vars.PROD_REDEPLOY_CANARY_SLUG || secrets.PROD_REDEPLOY_CANARY_SLUG || '' }}
|
||||
SOAK_SECONDS: ${{ vars.PROD_REDEPLOY_SOAK_SECONDS || secrets.PROD_REDEPLOY_SOAK_SECONDS || '' }}
|
||||
BATCH_SIZE: ${{ vars.PROD_REDEPLOY_BATCH_SIZE || secrets.PROD_REDEPLOY_BATCH_SIZE || '' }}
|
||||
DRY_RUN: ${{ vars.PROD_REDEPLOY_DRY_RUN || secrets.PROD_REDEPLOY_DRY_RUN || '' }}
|
||||
PROD_AUTO_DEPLOY_DISABLED: ${{ vars.PROD_AUTO_DEPLOY_DISABLED || secrets.PROD_AUTO_DEPLOY_DISABLED || '' }}
|
||||
CANARY_SLUG: ${{ inputs.canary_slug || 'hongming' }}
|
||||
SOAK_SECONDS: ${{ inputs.soak_seconds || '60' }}
|
||||
BATCH_SIZE: ${{ inputs.batch_size || '3' }}
|
||||
DRY_RUN: ${{ inputs.dry_run || false }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
case "${PROD_AUTO_DEPLOY_DISABLED,,}" in
|
||||
1|true|yes|on)
|
||||
echo "::notice::PROD_AUTO_DEPLOY_DISABLED is set; skipping production redeploy."
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
|
||||
CANARY_SLUG="${CANARY_SLUG:-hongming}"
|
||||
SOAK_SECONDS="${SOAK_SECONDS:-60}"
|
||||
BATCH_SIZE="${BATCH_SIZE:-3}"
|
||||
DRY_RUN="${DRY_RUN:-false}"
|
||||
|
||||
if [ -z "${CP_ADMIN_API_TOKEN:-}" ]; then
|
||||
echo "::error::CP_ADMIN_API_TOKEN secret not set — skipping redeploy"
|
||||
echo "::notice::Set CP_ADMIN_API_TOKEN in repo secrets to enable auto-redeploy."
|
||||
@ -163,7 +160,7 @@ jobs:
|
||||
}')
|
||||
|
||||
echo "POST $CP_URL/cp/admin/tenants/redeploy-fleet"
|
||||
echo " target_tag=$TARGET_TAG canary=$CANARY_SLUG soak_seconds=$SOAK_SECONDS batch_size=$BATCH_SIZE dry_run=$DRY_RUN"
|
||||
echo " body: $BODY"
|
||||
|
||||
HTTP_RESPONSE=$(mktemp)
|
||||
HTTP_CODE_FILE=$(mktemp)
|
||||
@ -191,9 +188,7 @@ jobs:
|
||||
[ -z "$HTTP_CODE" ] && HTTP_CODE="000"
|
||||
|
||||
echo "HTTP $HTTP_CODE"
|
||||
# Rule 8 fix: redact raw CP response from CI logs. Print only
|
||||
# safe fields: ok boolean, result count, error presence (no content).
|
||||
jq '{ok, result_count: (.results | length), has_errors: (.results | any(.error != null))}' "$HTTP_RESPONSE" || echo "(jq parse failed)"
|
||||
cat "$HTTP_RESPONSE" | jq . || cat "$HTTP_RESPONSE"
|
||||
|
||||
# Pretty-print per-tenant results in the job summary so
|
||||
# ops can see which tenants were redeployed without drilling
|
||||
@ -209,11 +204,9 @@ jobs:
|
||||
echo ""
|
||||
echo "### Per-tenant result"
|
||||
echo ""
|
||||
echo '| Slug | Phase | SSM Status | Exit | Healthz | Errors |'
|
||||
echo '| Slug | Phase | SSM Status | Exit | Healthz | Error |'
|
||||
echo '|------|-------|------------|------|---------|-------|'
|
||||
# Rule 8 fix: .error field redacted from CI logs/summary. Print only
|
||||
# presence boolean so ops know whether to look deeper.
|
||||
jq -r '.results[]? | "| \(.slug) | \(.phase) | \(.ssm_status // "-") | \(.ssm_exit_code) | \(.healthz_ok) | \(.error != null) |"' "$HTTP_RESPONSE" || true
|
||||
jq -r '.results[]? | "| \(.slug) | \(.phase) | \(.ssm_status // "-") | \(.ssm_exit_code) | \(.healthz_ok) | \(.error // "-") |"' "$HTTP_RESPONSE" || true
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
if [ "$HTTP_CODE" != "200" ]; then
|
||||
@ -252,11 +245,13 @@ jobs:
|
||||
# fail the workflow, which is what `ok=true` should have
|
||||
# guaranteed all along.
|
||||
#
|
||||
# When the redeploy is triggered manually with a specific tag
|
||||
# (target_tag != "latest"), the expected SHA may not equal
|
||||
# ${{ github.sha }}.
|
||||
# When the redeploy was triggered by workflow_dispatch with a
|
||||
# specific tag (target_tag != "latest"), the expected SHA may
|
||||
# not equal ${{ github.sha }} — in that case we resolve via
|
||||
# GHCR's manifest. For workflow_run (default :latest) the
|
||||
# workflow_run.head_sha is the SHA that just published.
|
||||
env:
|
||||
EXPECTED_SHA: ${{ github.sha }}
|
||||
EXPECTED_SHA: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
TARGET_TAG: ${{ steps.tag.outputs.target_tag }}
|
||||
# Tenant subdomain template — slugs from the response are
|
||||
# appended. Production CP issues `<slug>.moleculesai.app`;
|
||||
@ -270,10 +265,10 @@ jobs:
|
||||
if [ "$TARGET_TAG" != "latest" ] \
|
||||
&& [ "$TARGET_TAG" != "$EXPECTED_SHA" ] \
|
||||
&& [ "$TARGET_TAG" != "staging-$EXPECTED_SHORT" ]; then
|
||||
# Manual redeploy with a pinned tag that isn't the head
|
||||
# workflow_dispatch with a pinned tag that isn't the head
|
||||
# SHA — operator is rolling back / pinning. Skip the
|
||||
# verification because we don't have the expected SHA in
|
||||
# this context (would need to inspect the ECR
|
||||
# this context (would need to crane-inspect the GHCR
|
||||
# manifest, which is a follow-up). Failing-open here is
|
||||
# safe: the operator chose the tag deliberately.
|
||||
#
|
||||
|
||||
@ -9,13 +9,12 @@ name: redeploy-tenants-on-staging
|
||||
# - Workflow-level env.GITHUB_SERVER_URL pinned per
|
||||
# feedback_act_runner_github_server_url.
|
||||
# - `continue-on-error: true` on each job (RFC §1 contract).
|
||||
# - ~~**Gitea workflow_run trigger limitation**~~ FIXED: replaced with
|
||||
# push+paths filter per this PR. Gitea 1.22.6 does not support
|
||||
# `workflow_run` (task #81). The push trigger fires on every
|
||||
# commit to publish-workspace-server-image.yml which is the
|
||||
# same signal (only successful runs commit to main). Removed
|
||||
# `workflow_run.conclusion==success` job if since push implies
|
||||
# the workflow completed and committed.
|
||||
# - **Gitea workflow_run trigger limitation**: Gitea 1.22.6's support
|
||||
# for the `workflow_run` event is partial. If this never fires on a
|
||||
# real publish-workspace-server-image completion, the follow-up
|
||||
# triage PR should replace the trigger with a push-with-paths-filter
|
||||
# on .gitea/workflows/publish-workspace-server-image.yml. Until
|
||||
# then continue-on-error+dead-workflow doesn't break anything.
|
||||
#
|
||||
|
||||
# Auto-refresh staging tenant EC2s after every staging-branch merge.
|
||||
@ -51,11 +50,10 @@ name: redeploy-tenants-on-staging
|
||||
# of a known-good build.
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [staging]
|
||||
paths:
|
||||
- '.gitea/workflows/publish-workspace-server-image.yml'
|
||||
workflow_dispatch:
|
||||
workflow_run:
|
||||
workflows: ['publish-workspace-server-image']
|
||||
types: [completed]
|
||||
branches: [main]
|
||||
permissions:
|
||||
contents: read
|
||||
# No write scopes needed — the workflow hits an external CP endpoint,
|
||||
@ -73,11 +71,15 @@ env:
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
jobs:
|
||||
# bp-exempt: post-merge staging redeploy side effect; CI / all-required gates source changes.
|
||||
redeploy:
|
||||
# Skip the auto-trigger if publish-workspace-server-image didn't
|
||||
# actually succeed. workflow_run fires on any completion state; we
|
||||
# don't want to redeploy against a half-built image.
|
||||
# NOTE (Gitea port): workflow_dispatch trigger dropped; only the
|
||||
# workflow_run path remains.
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
timeout-minutes: 25
|
||||
steps:
|
||||
|
||||
@ -1,72 +0,0 @@
|
||||
name: review-check-tests
|
||||
|
||||
# Runs review-check.sh regression tests on every PR + push that touches
|
||||
# the evaluator script or its test fixtures.
|
||||
#
|
||||
# Follows RFC#324 follow-up (issue #540):
|
||||
# .gitea/scripts/review-check.sh is load-bearing for PR merge gates.
|
||||
# It has ZERO production CI coverage. This workflow closes that gap.
|
||||
#
|
||||
# Design choices:
|
||||
# - Bash test harness (not bats). The existing test_review_check.sh
|
||||
# uses a custom assert_eq/assert_contains framework that is already
|
||||
# working and covers all 13 acceptance criteria (issue #540 §Acceptance).
|
||||
# Converting to bats would be refactoring, not closing the gap.
|
||||
# - No bats dependency: the runner-base image needs no extra tooling.
|
||||
# - continue-on-error: false — these tests must pass; a failure means
|
||||
# the review-gate evaluator is broken and must not be merged.
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, staging]
|
||||
paths:
|
||||
- '.gitea/scripts/review-check.sh'
|
||||
- '.gitea/scripts/tests/test_review_check.sh'
|
||||
- '.gitea/scripts/tests/_review_check_fixture.py'
|
||||
- '.gitea/workflows/review-check-tests.yml'
|
||||
pull_request:
|
||||
branches: [main, staging]
|
||||
paths:
|
||||
- '.gitea/scripts/review-check.sh'
|
||||
- '.gitea/scripts/tests/test_review_check.sh'
|
||||
- '.gitea/scripts/tests/_review_check_fixture.py'
|
||||
- '.gitea/workflows/review-check-tests.yml'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# bp-exempt: review tooling regression suite; CI / all-required is the required aggregate.
|
||||
test:
|
||||
name: review-check.sh regression tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Install jq
|
||||
# Required for T12 jq-filter test case. Gitea Actions runners (ubuntu-latest
|
||||
# label) do not bundle jq. Install via apt-get first (reliable for Ubuntu
|
||||
# runners with internet access to package mirrors). Falls back to GitHub
|
||||
# binary download. GitHub releases may be blocked on some runner networks
|
||||
# (infra#241 follow-up).
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
run: |
|
||||
if apt-get update -qq && apt-get install -y -qq jq; then
|
||||
echo "::notice::jq installed via apt-get: $(jq --version)"
|
||||
elif timeout 120 curl -sSL \
|
||||
"https://github.com/jqlang/jq/releases/download/jq-1.7.1/jq-linux-amd64" \
|
||||
-o /usr/local/bin/jq && chmod +x /usr/local/bin/jq; then
|
||||
echo "::notice::jq binary downloaded: $(/usr/local/bin/jq --version)"
|
||||
else
|
||||
echo "::warning::jq install failed — apt-get and GitHub download both failed."
|
||||
fi
|
||||
jq --version 2>/dev/null || echo "::notice::jq not yet available — continuing"
|
||||
|
||||
- name: Run review-check.sh regression suite
|
||||
run: bash .gitea/scripts/tests/test_review_check.sh
|
||||
@ -1,109 +0,0 @@
|
||||
# Consolidated comment dispatcher for manual review/tier refires.
|
||||
#
|
||||
# Gitea 1.22 queues one run per workflow subscribed to `issue_comment` before
|
||||
# evaluating job-level `if:`. SOP-heavy PRs therefore created queue storms when
|
||||
# qa-review, security-review, sop-checklist, and sop-tier-refire all
|
||||
# listened to comments. This workflow is the single non-SOP comment subscriber:
|
||||
# ordinary comments no-op quickly; slash commands post the required status
|
||||
# contexts to the PR head SHA.
|
||||
|
||||
name: review-refire-comments
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
statuses: write
|
||||
|
||||
jobs:
|
||||
dispatch:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Classify comment
|
||||
id: classify
|
||||
env:
|
||||
COMMENT_BODY: ${{ github.event.comment.body }}
|
||||
IS_PR: ${{ github.event.issue.pull_request != null }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
{
|
||||
echo "run_qa=false"
|
||||
echo "run_security=false"
|
||||
echo "run_tier=false"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
if [ "$IS_PR" != "true" ]; then
|
||||
echo "::notice::not a PR comment; no-op"
|
||||
exit 0
|
||||
fi
|
||||
first_line=$(printf '%s\n' "$COMMENT_BODY" | sed -n '1p')
|
||||
case "$first_line" in
|
||||
/qa-recheck*)
|
||||
echo "run_qa=true" >> "$GITHUB_OUTPUT"
|
||||
;;
|
||||
/security-recheck*)
|
||||
echo "run_security=true" >> "$GITHUB_OUTPUT"
|
||||
;;
|
||||
/refire-tier-check*)
|
||||
echo "run_tier=true" >> "$GITHUB_OUTPUT"
|
||||
;;
|
||||
*)
|
||||
echo "::notice::no supported review refire slash command; no-op"
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Check out BASE ref for trusted scripts
|
||||
if: |
|
||||
steps.classify.outputs.run_qa == 'true' ||
|
||||
steps.classify.outputs.run_security == 'true' ||
|
||||
steps.classify.outputs.run_tier == 'true'
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event.repository.default_branch }}
|
||||
|
||||
- name: Refire qa-review status
|
||||
if: steps.classify.outputs.run_qa == 'true'
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.RFC_324_TEAM_READ_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
PR_NUMBER: ${{ github.event.issue.number }}
|
||||
DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
TEAM: qa
|
||||
TEAM_ID: '20'
|
||||
REVIEW_CHECK_DEBUG: '0'
|
||||
REVIEW_CHECK_STRICT: '0'
|
||||
COMMENT_AUTHOR: ${{ github.event.comment.user.login }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
.gitea/scripts/review-refire-status.sh
|
||||
|
||||
- name: Refire security-review status
|
||||
if: steps.classify.outputs.run_security == 'true'
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.RFC_324_TEAM_READ_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
PR_NUMBER: ${{ github.event.issue.number }}
|
||||
DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
TEAM: security
|
||||
TEAM_ID: '21'
|
||||
REVIEW_CHECK_DEBUG: '0'
|
||||
REVIEW_CHECK_STRICT: '0'
|
||||
COMMENT_AUTHOR: ${{ github.event.comment.user.login }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
.gitea/scripts/review-refire-status.sh
|
||||
|
||||
- name: Refire sop-tier-check status
|
||||
if: steps.classify.outputs.run_tier == 'true'
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.SOP_TIER_CHECK_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
PR_NUMBER: ${{ github.event.issue.number }}
|
||||
COMMENT_AUTHOR: ${{ github.event.comment.user.login }}
|
||||
SOP_DEBUG: '0'
|
||||
run: bash .gitea/scripts/sop-tier-refire.sh
|
||||
@ -67,7 +67,6 @@ jobs:
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking
|
||||
# the PR. Follow-up PR flips this off after surfaced defects are
|
||||
# triaged.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
@ -52,7 +52,6 @@ jobs:
|
||||
detect-changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
outputs:
|
||||
wheel: ${{ steps.decide.outputs.wheel }}
|
||||
@ -66,28 +65,19 @@ jobs:
|
||||
# PR#372's ci.yml port used. Diffs against the PR base or the
|
||||
# previous push SHA, then matches against the wheel-relevant
|
||||
# path set.
|
||||
#
|
||||
# NOTE: Gitea Actions does not expose github.event.before as a
|
||||
# shell environment variable. The ${{ github.event.before }} template
|
||||
# expression works inside YAML run: blocks but is evaluated to an
|
||||
# empty string for push events, making the ${VAR:-fallback} always
|
||||
# use the fallback. Use GITHUB_EVENT_BEFORE instead — it IS set in
|
||||
# the runner's shell environment for push events.
|
||||
BASE=""
|
||||
if [ "${{ github.event_name }}" = "pull_request" ]; then
|
||||
BASE="${GITHUB_BASE_REF:-${{ github.event.before }}}"
|
||||
if [ "${{ github.event_name }}" = "pull_request" ] && [ -n "${{ github.event.pull_request.base.sha }}" ]; then
|
||||
BASE="${{ github.event.pull_request.base.sha }}"
|
||||
elif [ -n "$GITHUB_EVENT_BEFORE" ]; then
|
||||
BASE="$GITHUB_EVENT_BEFORE"
|
||||
fi
|
||||
if [ -z "$BASE" ] || echo "$BASE" | grep -qE '^0+$'; then
|
||||
# New branch or no previous SHA: treat as wheel-relevant.
|
||||
echo "wheel=true" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
if ! timeout 30 git cat-file -e "$BASE" 2>/dev/null; then
|
||||
if ! git cat-file -e "$BASE" 2>/dev/null; then
|
||||
git fetch --depth=1 origin "$BASE" 2>/dev/null || true
|
||||
fi
|
||||
if ! timeout 30 git cat-file -e "$BASE" 2>/dev/null; then
|
||||
if ! git cat-file -e "$BASE" 2>/dev/null; then
|
||||
echo "wheel=true" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
@ -106,7 +96,6 @@ jobs:
|
||||
name: PR-built wheel + import smoke
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: No-op pass (paths filter excluded this commit)
|
||||
|
||||
@ -57,7 +57,6 @@ jobs:
|
||||
name: Detect SECRET_PATTERNS drift
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
|
||||
@ -12,18 +12,22 @@ name: security-review
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened]
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
|
||||
jobs:
|
||||
# bp-exempt: PR security review bot signal; required merge state is enforced by CI / all-required.
|
||||
approved:
|
||||
# Comment-triggered refires live in review-refire-comments.yml. Keeping
|
||||
# this workflow PR-only avoids comment-triggered queue storms.
|
||||
# See qa-review.yml header for full A1-α / A1.1 (v1.3 — informational
|
||||
# log only, NOT a gate) / A4 / A5 design rationale.
|
||||
if: |
|
||||
github.event_name == 'pull_request_target'
|
||||
github.event_name == 'pull_request_target' ||
|
||||
(github.event_name == 'issue_comment' &&
|
||||
github.event.issue.pull_request != null &&
|
||||
startsWith(github.event.comment.body, '/security-recheck'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Privilege check (A1.1 — INFORMATIONAL log only, NOT a gate)
|
||||
@ -32,7 +36,7 @@ jobs:
|
||||
# so re-running on a non-collaborator comment is harmless.
|
||||
if: github.event_name == 'issue_comment'
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.SOP_TIER_CHECK_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
GITEA_TOKEN: ${{ secrets.RFC_324_TEAM_READ_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
login="${{ github.event.comment.user.login }}"
|
||||
@ -57,11 +61,10 @@ jobs:
|
||||
|
||||
- name: Evaluate security-review
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.SOP_TIER_CHECK_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
GITEA_TOKEN: ${{ secrets.RFC_324_TEAM_READ_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number || github.event.issue.number }}
|
||||
DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
TEAM: security
|
||||
TEAM_ID: '21'
|
||||
REVIEW_CHECK_DEBUG: '0'
|
||||
|
||||
@ -1,130 +0,0 @@
|
||||
# sop-checklist — peer-ack merge gate for SOP-checklist items.
|
||||
#
|
||||
# RFC#351 Step 2 of 6 (implementation MVP).
|
||||
#
|
||||
# === DESIGN ===
|
||||
#
|
||||
# Goal: each PR must answer 7 SOP-checklist questions in its body,
|
||||
# and each item must have at least one /sop-ack <slug> comment from
|
||||
# a non-author peer in the required team. BP requires the
|
||||
# `sop-checklist / all-items-acked (pull_request)` status to merge.
|
||||
#
|
||||
# Triggers:
|
||||
# - `pull_request_target`: opened, edited, synchronize, reopened
|
||||
# → fires when PR opens, body is edited (refire — RFC#351 §4),
|
||||
# or new code is pushed (head.sha changes → stale status would
|
||||
# be auto-discarded by BP via dismiss_stale_reviews, but the
|
||||
# status itself is per-SHA so we re-post on the new head).
|
||||
# - `issue_comment`: created, edited, deleted
|
||||
# → fires on any new comment so /sop-ack / /sop-revoke take
|
||||
# effect immediately (Gitea 1.22.6 doesn't refire on
|
||||
# pull_request_review per feedback_pull_request_review_no_refire,
|
||||
# so issue_comment is the canonical refire channel).
|
||||
#
|
||||
# Trust boundary (mirrors RFC#324 §A4 + sop-tier-check security note):
|
||||
# `pull_request_target` (not `pull_request`) — workflow def is loaded
|
||||
# from BASE branch, so a PR cannot rewrite this workflow to exfiltrate
|
||||
# the token. The `actions/checkout` step pins `ref: base.sha` so the
|
||||
# script ALSO comes from BASE. PR-HEAD code is never executed in the
|
||||
# runner.
|
||||
#
|
||||
# Token scope:
|
||||
# - read:repository, read:organization for PR + comments + team probes
|
||||
# - write:repository for POST /statuses/{sha}
|
||||
# - The token owner MUST be a member of every team referenced by the
|
||||
# config's required_teams (else /teams/{id}/members/{login} returns
|
||||
# 403 — see review-check.sh same-gotcha doc). For the MVP we use
|
||||
# the dev-lead token (a member of engineers, managers, qa, security)
|
||||
# via a repo secret `SOP_CHECKLIST_GATE_TOKEN`. Provisioning of that
|
||||
# secret is a follow-up authorization step (separate from this PR).
|
||||
#
|
||||
# Failure mode: tier-aware (RFC#351 open question 2):
|
||||
# - tier:high → state=failure (hard-fail; BP blocks merge)
|
||||
# - tier:medium → state=failure (hard-fail; same)
|
||||
# - tier:low → state=pending (soft-fail; BP can choose to require
|
||||
# this context or skip for low-tier PRs)
|
||||
# - missing/no-tier → state=failure (default-mode: hard — never lower
|
||||
# the bar per feedback_fix_root_not_symptom)
|
||||
#
|
||||
# Slash-command contract (RFC#351 v1 + §A1.1-style notes from RFC#324):
|
||||
#
|
||||
# /sop-ack <slug-or-numeric-alias> [optional note]
|
||||
# — register a peer-ack for one checklist item.
|
||||
# — slug accepts kebab-case, snake_case, or natural-spaces
|
||||
# (all normalize to canonical kebab-case).
|
||||
# — numeric 1..7 maps via config.items[*].numeric_alias.
|
||||
# — most-recent (user, slug) directive wins.
|
||||
#
|
||||
# /sop-revoke <slug-or-numeric-alias> [reason]
|
||||
# — invalidate the commenter's own prior /sop-ack for this slug.
|
||||
# — does NOT affect other peers' acks on the same slug.
|
||||
# — most-recent (user, slug) directive wins, so a later /sop-ack
|
||||
# re-restores the ack.
|
||||
#
|
||||
# The eval is read-only + idempotent (read PR + comments + team
|
||||
# membership, compute, post status). Re-running on any event is safe —
|
||||
# the new status overwrites the previous one for the same context.
|
||||
|
||||
name: sop-checklist
|
||||
|
||||
# Cancel any in-progress runs for the same PR to prevent
|
||||
# stale runs from overwriting newer status contexts.
|
||||
concurrency:
|
||||
group: ${{ github.repository }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# bp-required: yes ← emits sop-checklist / all-items-acked (pull_request)
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, edited, synchronize, reopened, labeled, unlabeled]
|
||||
issue_comment:
|
||||
types: [created, edited, deleted]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
# NOTE: `statuses: write` is the GitHub-Actions name for POST /statuses.
|
||||
# Gitea 1.22.6 may not gate on this permission key (it just checks the
|
||||
# token), but listing it explicitly documents intent for the next
|
||||
# platform-version upgrade.
|
||||
statuses: write
|
||||
|
||||
jobs:
|
||||
all-items-acked:
|
||||
# Run on pull_request_target events always. On issue_comment events,
|
||||
# only when the comment is on a PR (issue_comment fires for issues
|
||||
# too) and the body contains one of the slash-commands.
|
||||
if: |
|
||||
github.event_name == 'pull_request_target' ||
|
||||
(github.event_name == 'issue_comment' &&
|
||||
github.event.issue.pull_request != null &&
|
||||
(contains(github.event.comment.body, '/sop-ack') ||
|
||||
contains(github.event.comment.body, '/sop-revoke') ||
|
||||
contains(github.event.comment.body, '/sop-n/a')))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out BASE ref (trust boundary — never PR-head)
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
# For pull_request_target, the default branch is the trust
|
||||
# anchor. For issue_comment the PR base may differ from the
|
||||
# default branch (PR targeting `staging`), so we use the
|
||||
# default-branch ref explicitly — same approach as
|
||||
# qa-review.yml so the script source is always trusted.
|
||||
ref: ${{ github.event.repository.default_branch }}
|
||||
|
||||
- name: Run sop-checklist
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.SOP_CHECKLIST_GATE_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number || github.event.issue.number }}
|
||||
OWNER: ${{ github.repository_owner }}
|
||||
REPO_NAME: ${{ github.event.repository.name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 .gitea/scripts/sop-checklist.py \
|
||||
--owner "$OWNER" \
|
||||
--repo "$REPO_NAME" \
|
||||
--pr "$PR_NUMBER" \
|
||||
--config .gitea/sop-checklist-config.yaml \
|
||||
--gitea-host git.moleculesai.app
|
||||
@ -28,16 +28,15 @@
|
||||
#
|
||||
# Environment variables:
|
||||
# SOP_DEBUG=1 — per-API-call diagnostic lines. Default: off.
|
||||
# SOP_LEGACY_CHECK=1 — revert to OR-gate for this run. Intended for
|
||||
# emergency use only; burn-in window closed
|
||||
# 2026-05-17 (internal#189 Phase 1).
|
||||
# SOP_LEGACY_CHECK=1 — revert to OR-gate for this run. Grace window
|
||||
# for PRs in-flight when AND-composition deployed.
|
||||
# Burn-in: remove after 2026-05-17 (7-day window).
|
||||
#
|
||||
# BURN-IN CLOSED 2026-05-17 (internal#189 Phase 1): The 7-day burn-in
|
||||
# window closed. continue-on-error: true has been removed from the
|
||||
# tier-check job; AND-composition is now fully enforced. If you need
|
||||
# to temporarily re-introduce a mask, file a tracker and follow the
|
||||
# mc#774 protocol (Tier 2e lint requires a current tracker within
|
||||
# 2 lines of any continue-on-error: true).
|
||||
# BURN-IN NOTE (internal#189 Phase 1): continue-on-error: true is set on
|
||||
# the tier-check job below. This prevents AND-composition from blocking
|
||||
# PRs during the 7-day burn-in. After 2026-05-17:
|
||||
# 1. Remove `continue-on-error: true` from this job block.
|
||||
# 2. Update this BURN-IN NOTE comment to mark the window closed.
|
||||
|
||||
name: sop-tier-check
|
||||
|
||||
@ -64,6 +63,9 @@ on:
|
||||
jobs:
|
||||
tier-check:
|
||||
runs-on: ubuntu-latest
|
||||
# BURN-IN: continue-on-error prevents AND-composition from blocking
|
||||
# PRs during the 7-day window. Remove after 2026-05-17 (internal#189).
|
||||
continue-on-error: true
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
@ -87,7 +89,6 @@ jobs:
|
||||
# runners). The sop-tier-check script has its own fallback as a
|
||||
# third line of defense. continue-on-error: true ensures this step
|
||||
# failing does not block the job.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
run: |
|
||||
# apt-get is the primary method — Ubuntu package mirrors are reliably
|
||||
@ -108,7 +109,6 @@ jobs:
|
||||
# continue-on-error: true at step level — job-level is ignored by Gitea
|
||||
# Actions (quirk #10, internal runbooks). Belt-and-suspenders with
|
||||
# SOP_FAIL_OPEN=1 + || true below.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.SOP_TIER_CHECK_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
# sop-tier-refire — manual fallback for sop-tier-check refire.
|
||||
# sop-tier-refire — issue_comment-triggered refire of sop-tier-check.
|
||||
#
|
||||
# Closes internal#292. Gitea 1.22.6 doesn't refire workflows on the
|
||||
# `pull_request_review` event (go-gitea/gitea#33700); the `sop-tier-check`
|
||||
@ -8,12 +8,12 @@
|
||||
# to merge is the admin force-merge path (audited via `audit-force-merge`
|
||||
# but the audit trail keeps growing; see `feedback_never_admin_merge_bypass`).
|
||||
#
|
||||
# Comment-triggered refires now live in `review-refire-comments.yml`. Gitea
|
||||
# queues issue_comment workflows before evaluating job-level `if:`, so having
|
||||
# qa-review, security-review, sop-checklist, and sop-tier-refire all subscribe
|
||||
# to every comment caused queue storms on SOP-heavy PRs. This workflow is a
|
||||
# non-automatic breadcrumb only; Gitea 1.22.6 does not support
|
||||
# workflow_dispatch inputs, so real refires must use `/refire-tier-check`.
|
||||
# Workaround pattern from `feedback_pull_request_review_no_refire`:
|
||||
# `issue_comment` events DO fire reliably on 1.22.6. When a repo
|
||||
# MEMBER/OWNER/COLLABORATOR comments `/refire-tier-check` on a PR, this
|
||||
# workflow re-runs the sop-tier-check logic and POSTs the resulting
|
||||
# status to the PR head SHA directly. No empty commit, no git history
|
||||
# bloat, no cascade re-fire of every other workflow on the PR.
|
||||
#
|
||||
# SECURITY MODEL:
|
||||
#
|
||||
@ -37,16 +37,43 @@
|
||||
# Rate-limit: a 1s pre-sleep + a "skip if status posted in last 30s"
|
||||
# guard prevents comment-spam from thrashing the status. See the script.
|
||||
|
||||
name: sop-tier-check refire (manual)
|
||||
name: sop-tier-check refire (issue_comment)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
jobs:
|
||||
refire:
|
||||
# Three gates, all required:
|
||||
# - comment is on a PR (not a plain issue)
|
||||
# - commenter is MEMBER, OWNER, or COLLABORATOR
|
||||
# - comment body contains the slash-command trigger
|
||||
if: |
|
||||
github.event.issue.pull_request != null &&
|
||||
contains(fromJson('["MEMBER","OWNER","COLLABORATOR"]'), github.event.comment.author_association) &&
|
||||
contains(github.event.comment.body, '/refire-tier-check')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
statuses: write
|
||||
steps:
|
||||
- name: Explain supported refire path
|
||||
run: |
|
||||
echo "::error::Gitea 1.22.6 does not support workflow_dispatch inputs here; comment /refire-tier-check on the PR instead."
|
||||
exit 1
|
||||
- name: Check out base branch (for the script)
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
# Load the script from the default branch (main), matching the
|
||||
# sop-tier-check.yml security model.
|
||||
ref: ${{ github.event.repository.default_branch }}
|
||||
- name: Re-evaluate sop-tier-check and POST status
|
||||
env:
|
||||
# Same org-level secret sop-tier-check.yml + audit-force-merge.yml use.
|
||||
# Fallback to GITHUB_TOKEN with a clear error if missing.
|
||||
GITEA_TOKEN: ${{ secrets.SOP_TIER_CHECK_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
PR_NUMBER: ${{ github.event.issue.number }}
|
||||
COMMENT_AUTHOR: ${{ github.event.comment.user.login }}
|
||||
# Set to '1' for diagnostic per-API-call output. Off by default.
|
||||
SOP_DEBUG: '0'
|
||||
run: bash .gitea/scripts/sop-tier-refire.sh
|
||||
|
||||
@ -11,14 +11,11 @@ name: Staging verify
|
||||
# - Workflow-level env.GITHUB_SERVER_URL pinned per
|
||||
# feedback_act_runner_github_server_url.
|
||||
# - `continue-on-error: true` on each job (RFC §1 contract).
|
||||
# - ~~**Gitea workflow_run trigger limitation**~~ FIXED: replaced with
|
||||
# push+paths filter per this PR. Gitea 1.22.6 does not support
|
||||
# `workflow_run` (task #81). The push trigger fires on every
|
||||
# commit to publish-workspace-server-image.yml. Removed the
|
||||
# `workflow_run.conclusion==success` job if since the push trigger
|
||||
# doesn't carry completion state — the smoke test is the safety net
|
||||
# (it will detect and abort on a bad image regardless). Added
|
||||
# workflow_dispatch for manual runs.
|
||||
# - **Gitea workflow_run trigger limitation**: Gitea 1.22.6's support
|
||||
# for the `workflow_run` event is partial. If this never fires on a
|
||||
# real publish-workspace-server-image completion, the follow-up
|
||||
# triage PR should replace the trigger with a push-with-paths-filter
|
||||
# on the same publish workflow's path (i.e. `.gitea/workflows/publish-workspace-server-image.yml`).
|
||||
#
|
||||
|
||||
# Runs the canary smoke suite against the staging canary tenant fleet
|
||||
@ -62,11 +59,9 @@ name: Staging verify
|
||||
# are populated.
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [staging]
|
||||
paths:
|
||||
- '.gitea/workflows/publish-workspace-server-image.yml'
|
||||
workflow_dispatch:
|
||||
workflow_run:
|
||||
workflows: ["publish-workspace-server-image"]
|
||||
types: [completed]
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
@ -82,11 +77,13 @@ env:
|
||||
GITHUB_SERVER_URL: https://git.moleculesai.app
|
||||
|
||||
jobs:
|
||||
# bp-exempt: post-merge staging verification side effect; CI / all-required gates merges.
|
||||
staging-smoke:
|
||||
# Skip when the upstream workflow failed — no image to test against.
|
||||
# workflow_dispatch trigger dropped in this Gitea port; only the
|
||||
# workflow_run path remains.
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
outputs:
|
||||
sha: ${{ steps.compute.outputs.sha }}
|
||||
@ -191,7 +188,6 @@ jobs:
|
||||
echo "assertions in the staging-smoke step log above."
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
# bp-exempt: post-merge image promotion side effect; staging-smoke controls promotion.
|
||||
promote-to-latest:
|
||||
# On green, calls the CP redeploy-fleet endpoint with target_tag=
|
||||
# staging-<sha> to promote the verified ECR image. This is the same
|
||||
@ -208,7 +204,6 @@ jobs:
|
||||
if: ${{ needs.staging-smoke.result == 'success' && needs.staging-smoke.outputs.smoke_ran == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
env:
|
||||
SHA: ${{ needs.staging-smoke.outputs.sha }}
|
||||
|
||||
@ -1,124 +0,0 @@
|
||||
# status-reaper — Option B (compensating-status POST) for Gitea 1.22.6's
|
||||
# hardcoded `(push)` suffix on default-branch commit statuses.
|
||||
#
|
||||
# Tracking: molecule-core#? (this PR), internal#327 (sibling publish-runtime-bot),
|
||||
# internal#328 (sibling mc-drift-bot), internal#80 (upstream RFC). Sister
|
||||
# bots already deployed under the same per-persona-identity contract
|
||||
# (`feedback_per_agent_gitea_identity_default`).
|
||||
#
|
||||
# Root cause:
|
||||
# Gitea 1.22.6 emits commit-status context as
|
||||
# `<workflow_name> / <job_name> (push)`
|
||||
# for ANY workflow run on the default branch's HEAD commit, REGARDLESS
|
||||
# of the trigger event. Schedule- and workflow_dispatch-triggered runs
|
||||
# on `main` therefore appear as `(push)` failures on the latest main
|
||||
# commit, painting main red via a fake-push status. Verified on runs
|
||||
# 14525 + 14526 via Phase 1 evidence (3 sub-agents). No upstream fix
|
||||
# in 1.23-1.26.1 (sibling a6f20db1 research).
|
||||
#
|
||||
# Why a cron-driven reaper, not workflow_run:
|
||||
# Gitea 1.22.6 does NOT support `on: workflow_run` (verified via
|
||||
# modules/actions/workflows.go enumeration; sister a6f20db1). The
|
||||
# only event-shaped option that fires is cron. 5min is chosen to
|
||||
# sit BETWEEN ci-required-drift (`:17` hourly) and main-red-watchdog
|
||||
# (`:05` hourly) so the reaper sweeps red before the watchdog files
|
||||
# a `[main-red]` issue (would-be false-positive).
|
||||
#
|
||||
# What the reaper does each tick:
|
||||
# 1. Parse `.gitea/workflows/*.yml`, classify each by whether `on:`
|
||||
# contains a `push:` trigger (see script for workflow_id resolution
|
||||
# including `name:` collision and `/`-in-name fail-loud lints).
|
||||
# 2. GET combined status for main HEAD.
|
||||
# 3. For each `failure` status whose context ends ` (push)`:
|
||||
# - if workflow has push trigger: PRESERVE (real defect signal).
|
||||
# - if workflow has no push trigger: POST a compensating
|
||||
# `state=success` with the same context and a description that
|
||||
# documents the workaround.
|
||||
#
|
||||
# What it does NOT do:
|
||||
# - Mutate non-`(push)`-suffix statuses (e.g. `(pull_request)` from
|
||||
# branch_protections required-checks — verified safe 2026-05-11).
|
||||
# - Auto-revert. Same reasoning as main-red-watchdog.
|
||||
# - Cancel runs. The runs themselves stay visible in Actions UI; the
|
||||
# fix is at the commit-status surface only.
|
||||
#
|
||||
# Removal path: drop this workflow when Gitea ≥ 1.24 ships with a
|
||||
# real fix for the hardcoded-suffix bug. Audit issue (filed post-merge)
|
||||
# tracks the deletion as a follow-up sweep.
|
||||
|
||||
name: status-reaper
|
||||
|
||||
# IMPORTANT — Gitea 1.22.6 parser quirk per
|
||||
# `feedback_gitea_workflow_dispatch_inputs_unsupported`: do NOT add an
|
||||
# `inputs:` block here. Gitea 1.22.6 rejects the whole workflow as
|
||||
# "unknown on type" when `workflow_dispatch.inputs.X` is present.
|
||||
on:
|
||||
# SCHEDULE RE-ENABLED 2026-05-12 rev3 — interim disable (mc#645) reverted now that
|
||||
# rev3 widens DEFAULT_SWEEP_LIMIT 10 → 30 (covers retroactive-failure timing window).
|
||||
# Sibling watchdog re-enabled in the same PR with timeout-minutes raised 5 → 15.
|
||||
schedule:
|
||||
# Every 5 minutes. Off-zero alignment with sibling cron workflows:
|
||||
# ci-required-drift (`:17`), main-red-watchdog (`:05`),
|
||||
# railway-pin-audit (`:23`). 5-min cadence gives a tight enough
|
||||
# close on schedule-triggered false-reds that main-red-watchdog
|
||||
# (hourly :05) almost never files an issue on the false case.
|
||||
# rev3 keeps `*/5` unchanged per hongming-pc2 03:25Z review:
|
||||
# "trades window-width-cheap for cadence-loady" — N=30 widens
|
||||
# the lookback cheaply without doubling runner load via `*/2`.
|
||||
- cron: '*/5 * * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
# Compensating-status POST needs write on repo statuses; no other
|
||||
# write surface is touched. checkout still needs `contents: read`.
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
# NOTE: NO `concurrency:` block is intentional.
|
||||
# Gitea 1.22.6 doesn't honor `cancel-in-progress: false`: queued ticks
|
||||
# of the same group get cancelled-with-started=0 instead of waiting
|
||||
# (DB-verified 2026-05-12, runs 16053/16085 of status-reaper.yml).
|
||||
# The reaper's POST /statuses/{sha} is idempotent — Gitea de-dups by
|
||||
# context — so concurrent ticks are safe; accept them rather than
|
||||
# serialise via the broken mechanism.
|
||||
|
||||
jobs:
|
||||
reap:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 8
|
||||
steps:
|
||||
- name: Check out repo at default-branch HEAD
|
||||
# BASE checkout per `feedback_pull_request_target_workflow_from_base`.
|
||||
# The script reads .gitea/workflows/*.yml from the working tree to
|
||||
# classify trigger sets; we must read main's CURRENT state, not
|
||||
# the SHA a stale schedule fired against.
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event.repository.default_branch }}
|
||||
|
||||
- name: Set up Python (PyYAML for workflow `on:` parse)
|
||||
# Pinned to 3.12 to match sibling watchdog / ci-required-drift.
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install PyYAML
|
||||
# PyYAML is needed because shell-grep on `on:` misses list/string
|
||||
# forms and nested `push: { paths: ... }`. Same install pattern
|
||||
# as ci-required-drift.yml (sub-2s install, no wheel cache).
|
||||
run: python -m pip install --quiet 'PyYAML==6.0.2'
|
||||
|
||||
- name: Compensate operational push-suffix failures on main
|
||||
env:
|
||||
# claude-status-reaper persona token; provisioned by sibling
|
||||
# aefaac1b 2026-05-11. Owns write:repository scope to POST
|
||||
# /statuses/{sha} but NOTHING ELSE
|
||||
# (`feedback_per_agent_gitea_identity_default`).
|
||||
GITEA_TOKEN: ${{ secrets.STATUS_REAPER_TOKEN }}
|
||||
GITEA_HOST: git.moleculesai.app
|
||||
REPO: ${{ github.repository }}
|
||||
WATCH_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
WORKFLOWS_DIR: .gitea/workflows
|
||||
STATUS_REAPER_API_RETRIES: "4"
|
||||
STATUS_REAPER_API_TIMEOUT_SEC: "20"
|
||||
STATUS_REAPER_API_RETRY_SLEEP_SEC: "2"
|
||||
run: python3 .gitea/scripts/status-reaper.py
|
||||
@ -29,26 +29,26 @@ name: Sweep stale AWS Secrets Manager secrets
|
||||
# reconciler enumerator) is filed as a separate controlplane
|
||||
# issue. This sweeper is the immediate cost-relief stopgap.
|
||||
#
|
||||
# AWS credentials: use the dedicated Secrets Manager janitor principal.
|
||||
# Do not fall back to the molecule-cp application principal: it does
|
||||
# not need account-wide ListSecrets, and a 2026-05-12 CI failure proved
|
||||
# that using it here turns a least-privilege production credential into
|
||||
# a red scheduled janitor.
|
||||
# AWS credentials: the confirmed Gitea secrets are AWS_ACCESS_KEY_ID /
|
||||
# AWS_SECRET_ACCESS_KEY (the molecule-cp IAM user). These are the same
|
||||
# credentials used by the rest of the platform. The dedicated
|
||||
# AWS_JANITOR_* naming (which the original GitHub workflow used) was
|
||||
# never populated in Gitea — the existing secrets are AWS_ACCESS_KEY_ID /
|
||||
# AWS_SECRET_ACCESS_KEY (per issue #425 §425 audit). These DO have
|
||||
# secretsmanager:ListSecrets (the production molecule-cp principal);
|
||||
# if ListSecrets is revoked in future, a dedicated janitor principal
|
||||
# would need to be created and the Gitea secret names updated here.
|
||||
#
|
||||
# Safety: the script's MAX_DELETE_PCT gate (default 50%, mirroring
|
||||
# sweep-cf-orphans.yml — tenant secrets are durable by design, unlike
|
||||
# the mostly-orphan tunnels) refuses to nuke past the threshold.
|
||||
|
||||
on:
|
||||
# Disabled as an hourly schedule until the dedicated
|
||||
# AWS_SECRETS_JANITOR_* key exists in the key-management SSOT and is
|
||||
# mirrored into Gitea. Falling back to the molecule-cp app principal is
|
||||
# intentionally not allowed: it lacks account-wide ListSecrets, and
|
||||
# granting that to an application credential would weaken least privilege.
|
||||
#
|
||||
# Keep the manual trigger so operators can validate the workflow immediately
|
||||
# after provisioning the janitor key, then restore the hourly :30 schedule.
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Hourly at :30 — offsets from sweep-cf-orphans (:15) and
|
||||
# sweep-cf-tunnels (:45) so the three janitors don't burst the
|
||||
# CP admin endpoints at the same minute.
|
||||
- cron: '30 * * * *'
|
||||
# Don't let two sweeps race the same AWS account.
|
||||
concurrency:
|
||||
group: sweep-aws-secrets
|
||||
@ -65,7 +65,6 @@ jobs:
|
||||
name: Sweep AWS Secrets Manager
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
# 30 min cap, mirroring the other janitors. AWS DeleteSecret is
|
||||
# fast (~0.3s/call) so even a 100+ backlog drains in seconds
|
||||
@ -74,8 +73,8 @@ jobs:
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
AWS_REGION: ${{ secrets.AWS_REGION || 'us-east-1' }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_SECRETS_JANITOR_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRETS_JANITOR_SECRET_ACCESS_KEY }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
CP_ADMIN_API_TOKEN: ${{ secrets.CP_ADMIN_API_TOKEN }}
|
||||
CP_STAGING_ADMIN_API_TOKEN: ${{ secrets.CP_STAGING_ADMIN_API_TOKEN }}
|
||||
MAX_DELETE_PCT: ${{ github.event.inputs.max_delete_pct || '50' }}
|
||||
|
||||
@ -71,7 +71,6 @@ jobs:
|
||||
name: Sweep CF orphans
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
# 3 min surfaces hangs (CF API stall, AWS describe-instances stuck)
|
||||
# within one cron interval instead of burning a full tick. Realistic
|
||||
|
||||
@ -55,7 +55,6 @@ jobs:
|
||||
name: Sweep CF tunnels
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
# 30 min cap. Was 5 min on the theory that the only thing that
|
||||
# could take >5min is a CF-API hang — but on 2026-05-02 a backlog
|
||||
|
||||
@ -11,9 +11,8 @@ name: Ops Scripts Tests
|
||||
# - `continue-on-error: true` on the job (RFC §1 contract).
|
||||
#
|
||||
# Runs the unittest suite for scripts/ on every PR + push that touches
|
||||
# anything under scripts/ or .gitea/scripts/. Kept separate from the main CI
|
||||
# so a script-only change doesn't trigger the heavier Go/Canvas/Python
|
||||
# pipelines.
|
||||
# anything under scripts/. Kept separate from the main CI so a script-only
|
||||
# change doesn't trigger the heavier Go/Canvas/Python pipelines.
|
||||
#
|
||||
# Discovery layout: tests sit alongside the code they test (see
|
||||
# scripts/ops/test_sweep_cf_decide.py for the pattern; scripts/
|
||||
@ -28,13 +27,11 @@ on:
|
||||
branches: [main, staging]
|
||||
paths:
|
||||
- 'scripts/**'
|
||||
- '.gitea/scripts/**'
|
||||
- '.gitea/workflows/test-ops-scripts.yml'
|
||||
pull_request:
|
||||
branches: [main, staging]
|
||||
paths:
|
||||
- 'scripts/**'
|
||||
- '.gitea/scripts/**'
|
||||
- '.gitea/workflows/test-ops-scripts.yml'
|
||||
|
||||
env:
|
||||
@ -49,15 +46,12 @@ jobs:
|
||||
name: Ops scripts (unittest)
|
||||
runs-on: ubuntu-latest
|
||||
# Phase 3 (RFC #219 §1): surface broken workflows without blocking.
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
- uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install .gitea script test dependencies
|
||||
run: python -m pip install --quiet 'pytest==9.0.2' 'PyYAML==6.0.2'
|
||||
- name: Run scripts/ unittests (build_runtime_package, ...)
|
||||
# Top-level scripts/ tests live alongside their target file
|
||||
# (e.g. scripts/test_build_runtime_package.py exercises
|
||||
@ -69,5 +63,3 @@ jobs:
|
||||
- name: Run scripts/ops/ unittests (sweep_cf_decide, ...)
|
||||
working-directory: scripts/ops
|
||||
run: python -m unittest discover -p 'test_*.py' -v
|
||||
- name: Run .gitea/scripts pytest suite
|
||||
run: python -m pytest .gitea/scripts/tests -q
|
||||
|
||||
@ -1,121 +0,0 @@
|
||||
name: Weekly Platform-Go Surface
|
||||
|
||||
# Surface latent vet/test errors on main by running the full Platform-Go
|
||||
# suite on a weekly cron regardless of whether the last push touched
|
||||
# workspace-server/.
|
||||
#
|
||||
# Background: ci.yml's `platform-build` job gates real work on
|
||||
# `if: needs.changes.outputs.platform == 'true'`. When no push touches
|
||||
# workspace-server/, the skip fires and the suite never executes on main.
|
||||
# Latent vet errors and test flakes can sit for weeks undetected.
|
||||
#
|
||||
# This workflow runs the full suite (build, vet, golangci-lint, tests with
|
||||
# coverage) every Monday at 04:17 UTC. Results are posted as commit statuses
|
||||
# but continue-on-error: true means they never block anything — they're
|
||||
# purely a noise-reduction signal for when the next workspace-server push
|
||||
# lands and would otherwise trigger the first real suite run.
|
||||
#
|
||||
# Why 04:17 UTC on Monday: off-peak, before the weekly sprint cycle starts.
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '17 4 * * 1' # Mondays at 04:17 UTC
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
statuses: write
|
||||
|
||||
jobs:
|
||||
weekly-platform-go:
|
||||
name: Weekly Platform-Go Surface
|
||||
runs-on: ubuntu-latest
|
||||
# continue-on-error: surface only, never block
|
||||
# mc#774: pre-existing continue-on-error mask; root-fix and remove, do not renew silently.
|
||||
continue-on-error: true
|
||||
defaults:
|
||||
run:
|
||||
working-directory: workspace-server
|
||||
steps:
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@40f1582b2485089dde7abd97c1529aa768e1baff # v5
|
||||
with:
|
||||
go-version: stable
|
||||
|
||||
- name: Go mod download
|
||||
run: go mod download
|
||||
|
||||
- name: Build
|
||||
run: go build ./cmd/server
|
||||
|
||||
# `go vet` is NOT `|| true`-guarded: surfacing latent vet errors on main is
|
||||
# the whole point of this workflow (issue #567 — the motivating case was a
|
||||
# `go vet` error in org_external.go that sat undetected on main for weeks).
|
||||
# A vet error here fails the step → fails the job → shows red on the weekly
|
||||
# commit. Per Gitea quirk #10 (job-level continue-on-error is ignored), that
|
||||
# red surfaces on main — which is the intended signal, not a regression.
|
||||
- name: go vet
|
||||
run: go vet ./...
|
||||
|
||||
# golangci-lint stays `|| true`-guarded: lint is noisier (more false-
|
||||
# positives than vet) and golangci-lint may not be pre-installed on every
|
||||
# runner image — a `|| true` here keeps a missing-binary or lint-noise case
|
||||
# from masking the vet/test signal above. Tighten to match ci.yml's lint
|
||||
# gate if/when ci.yml's lint step becomes hard-failing.
|
||||
- name: golangci-lint
|
||||
run: golangci-lint run --timeout 3m ./... || true
|
||||
|
||||
- name: Tests with race detection + coverage
|
||||
run: go test -race -coverprofile=coverage.out ./...
|
||||
|
||||
- name: Check coverage thresholds
|
||||
run: |
|
||||
set -e
|
||||
TOTAL_FLOOR=25
|
||||
CRITICAL_PATHS=(
|
||||
"internal/handlers/tokens"
|
||||
"internal/handlers/workspace_provision"
|
||||
"internal/handlers/a2a_proxy"
|
||||
"internal/handlers/registry"
|
||||
"internal/handlers/secrets"
|
||||
"internal/middleware/wsauth"
|
||||
"internal/crypto"
|
||||
)
|
||||
|
||||
TOTAL=$(go tool cover -func=coverage.out | grep '^total:' | awk '{print $3}' | sed 's/%//')
|
||||
echo "Total coverage: ${TOTAL}%"
|
||||
if awk "BEGIN{exit !(\$TOTAL < \$TOTAL_FLOOR)}"; then
|
||||
echo "::error::Total coverage \${TOTAL}% is below the \${TOTAL_FLOOR}% floor."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ALLOWLIST=""
|
||||
if [ -f ../.coverage-allowlist.txt ]; then
|
||||
ALLOWLIST=$(grep -vE '^(#|[[:space:]]*$)' ../.coverage-allowlist.txt || true)
|
||||
fi
|
||||
|
||||
FAILED=0
|
||||
for path in "\${CRITICAL_PATHS[@]}"; do
|
||||
while read -r file pct; do
|
||||
[[ "$file" == *_test.go ]] && continue
|
||||
[[ "$file" == *"$path"* ]] || continue
|
||||
awk "BEGIN{exit !(\$pct < 10)}" || continue
|
||||
rel=$(echo "$file" | sed 's|^github.com/molecule-ai/molecule-monorepo/platform/workspace-server/||; s|^github.com/molecule-ai/molecule-monorepo/platform/||')
|
||||
if echo "$ALLOWLIST" | grep -qxF "$rel"; then
|
||||
continue
|
||||
fi
|
||||
echo "::error::Low coverage \${pct}% on \${rel} (below 10% in critical path \${path})"
|
||||
FAILED=$((FAILED + 1))
|
||||
done < <(go tool cover -func=coverage.out | grep -v '^total:' | awk '{file=$1; sub(/:[0-9][0-9.]*:.*/, "", file); pct=$NF; gsub(/%/,"",pct); s[file]+=pct; c[file]++} END {for (f in s) printf "%s %.1f\n", f, s[f]/c[f]}' | sort)
|
||||
done
|
||||
if [ "$FAILED" -gt 0 ]; then
|
||||
echo "::error::\${FAILED} critical paths below 10% coverage — see above."
|
||||
exit 1
|
||||
fi
|
||||
echo "Coverage thresholds: OK"
|
||||
1
.github/workflows/e2e-staging-canvas.yml
vendored
1
.github/workflows/e2e-staging-canvas.yml
vendored
@ -131,7 +131,6 @@ jobs:
|
||||
|
||||
- name: Install Playwright browsers
|
||||
if: needs.detect-changes.outputs.canvas == 'true'
|
||||
timeout-minutes: 10
|
||||
run: npx playwright install --with-deps chromium
|
||||
|
||||
- name: Run staging canvas E2E
|
||||
|
||||
@ -156,16 +156,6 @@ and run CI manually.
|
||||
| python-lint | pytest with coverage |
|
||||
| e2e-api | Full API test suite (62 tests) |
|
||||
| shellcheck | Shell script linting |
|
||||
| review-check-tests | `review-check.sh` evaluator regression suite (13 scenarios) |
|
||||
| ops-scripts | Python unittest suite for `scripts/*.py` |
|
||||
|
||||
## Local Testing
|
||||
|
||||
### review-check.sh
|
||||
```bash
|
||||
bash .gitea/scripts/tests/test_review_check.sh
|
||||
```
|
||||
Runs the full regression suite against a fixture HTTP server. No network access required.
|
||||
|
||||
## Code Style
|
||||
|
||||
|
||||
@ -327,7 +327,7 @@ function OrgCTA({ org }: { org: Org }) {
|
||||
return (
|
||||
<a
|
||||
href={href}
|
||||
className="rounded bg-emerald-700 px-4 py-2 text-sm font-medium text-white hover:bg-emerald-600"
|
||||
className="rounded bg-emerald-600 px-4 py-2 text-sm font-medium text-white hover:bg-emerald-500"
|
||||
>
|
||||
Open
|
||||
</a>
|
||||
@ -337,7 +337,7 @@ function OrgCTA({ org }: { org: Org }) {
|
||||
return (
|
||||
<a
|
||||
href={`/pricing?org=${encodeURIComponent(org.slug)}`}
|
||||
className="rounded bg-amber-800 px-4 py-2 text-sm font-medium text-white hover:bg-amber-700"
|
||||
className="rounded bg-amber-600 px-4 py-2 text-sm font-medium text-white hover:bg-amber-500"
|
||||
>
|
||||
Complete payment
|
||||
</a>
|
||||
|
||||
@ -16,8 +16,6 @@ interface PendingApproval {
|
||||
|
||||
export function ApprovalBanner() {
|
||||
const [approvals, setApprovals] = useState<PendingApproval[]>([]);
|
||||
// Guards double-click / double-keypress during in-flight POST.
|
||||
const [pendingApprovalId, setPendingApprovalId] = useState<string | null>(null);
|
||||
|
||||
// Single endpoint — no N+1 per-workspace polling
|
||||
const pollApprovals = useCallback(async () => {
|
||||
@ -37,8 +35,6 @@ export function ApprovalBanner() {
|
||||
}, [pollApprovals]);
|
||||
|
||||
const handleDecide = async (approval: PendingApproval, decision: "approved" | "denied") => {
|
||||
if (pendingApprovalId !== null) return; // guard double-submit
|
||||
setPendingApprovalId(approval.id);
|
||||
try {
|
||||
await api.post(`/workspaces/${approval.workspace_id}/approvals/${approval.id}/decide`, {
|
||||
decision,
|
||||
@ -48,8 +44,6 @@ export function ApprovalBanner() {
|
||||
setApprovals((prev) => prev.filter((a) => a.id !== approval.id));
|
||||
} catch {
|
||||
showToast("Failed to submit decision", "error");
|
||||
} finally {
|
||||
setPendingApprovalId(null);
|
||||
}
|
||||
};
|
||||
|
||||
@ -78,25 +72,22 @@ export function ApprovalBanner() {
|
||||
<div className="flex gap-2 mt-3">
|
||||
<button
|
||||
type="button"
|
||||
disabled={pendingApprovalId !== null}
|
||||
onClick={() => handleDecide(approval, "approved")}
|
||||
aria-disabled={pendingApprovalId !== null}
|
||||
// Hover goes DARKER — emerald-600 on white text is 3.3:1 (WCAG AA FAIL).
|
||||
// emerald-700 is 4.6:1 (WCAG AA PASS). Hover darkens to emerald-600.
|
||||
className="px-3 py-1.5 bg-emerald-700 hover:bg-emerald-600 disabled:opacity-40 disabled:cursor-not-allowed text-xs rounded-lg text-white font-medium transition-colors focus:outline-none focus-visible:ring-2 focus-visible:ring-offset-2 focus-visible:ring-offset-amber-950 focus-visible:ring-emerald-400/70"
|
||||
// Hover DARKER not lighter — emerald-500 on white text
|
||||
// drops contrast vs emerald-700.
|
||||
className="px-3 py-1.5 bg-emerald-600 hover:bg-emerald-700 text-xs rounded-lg text-white font-medium transition-colors focus:outline-none focus-visible:ring-2 focus-visible:ring-offset-2 focus-visible:ring-offset-amber-950 focus-visible:ring-emerald-400/70"
|
||||
>
|
||||
{pendingApprovalId === approval.id ? "…" : "Approve"}
|
||||
Approve
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
disabled={pendingApprovalId !== null}
|
||||
onClick={() => handleDecide(approval, "denied")}
|
||||
aria-disabled={pendingApprovalId !== null}
|
||||
// `text-ink` (not text-ink-mid) for WCAG AA contrast on bg-surface-card.
|
||||
// text-ink-mid on zinc-800 fails AA at ~3:1; text-ink passes at ~7:1.
|
||||
className="px-3 py-1.5 bg-surface-card hover:bg-surface-elevated hover:text-ink text-ink disabled:opacity-40 disabled:cursor-not-allowed text-xs rounded-lg font-medium transition-colors focus:outline-none focus-visible:ring-2 focus-visible:ring-offset-2 focus-visible:ring-offset-amber-950 focus-visible:ring-amber-400/70"
|
||||
// Was a no-op hover (`bg-surface-card hover:bg-surface-card`).
|
||||
// Lift to surface-elevated on hover so the button visibly
|
||||
// responds before a destructive deny.
|
||||
className="px-3 py-1.5 bg-surface-card hover:bg-surface-elevated hover:text-ink text-xs rounded-lg text-ink-mid transition-colors focus:outline-none focus-visible:ring-2 focus-visible:ring-offset-2 focus-visible:ring-offset-amber-950 focus-visible:ring-amber-400/70"
|
||||
>
|
||||
{pendingApprovalId === approval.id ? "…" : "Deny"}
|
||||
Deny
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -8,17 +8,11 @@ import type { AuditEntry, AuditResponse } from "@/types/audit";
|
||||
|
||||
type EventFilter = "all" | AuditEntry["event_type"];
|
||||
|
||||
// Contrast note: text is rendered on near-black bg (bg-*-950/40). Every text
|
||||
// color below is chosen to pass WCAG 2.1 AA 4.5:1 on that background:
|
||||
// blue-300 ( delegation ) ≈ 8.8:1
|
||||
// violet-300 ( decision ) ≈ 9.5:1
|
||||
// yellow-200 ( gate ) ≈ 11.5:1
|
||||
// orange-300 ( hitl ) ≈ 9.1:1
|
||||
const BADGE_COLORS: Record<AuditEntry["event_type"], { text: string; bg: string; border: string }> = {
|
||||
delegation: { text: "text-blue-300", bg: "bg-blue-950/40", border: "border-blue-800/40" },
|
||||
decision: { text: "text-violet-300", bg: "bg-violet-950/40", border: "border-violet-800/40" },
|
||||
gate: { text: "text-yellow-200", bg: "bg-yellow-950/40", border: "border-yellow-800/40" },
|
||||
hitl: { text: "text-orange-300", bg: "bg-orange-950/40", border: "border-orange-800/40" },
|
||||
delegation: { text: "text-accent", bg: "bg-blue-950/40", border: "border-blue-800/40" },
|
||||
decision: { text: "text-violet-400", bg: "bg-violet-950/40", border: "border-violet-800/40" },
|
||||
gate: { text: "text-yellow-400", bg: "bg-yellow-950/40", border: "border-yellow-800/40" },
|
||||
hitl: { text: "text-orange-400", bg: "bg-orange-950/40", border: "border-orange-800/40" },
|
||||
};
|
||||
|
||||
const FILTERS: { id: EventFilter; label: string }[] = [
|
||||
@ -170,10 +164,7 @@ export function AuditTrailPanel({ workspaceId }: Props) {
|
||||
|
||||
{/* Error banner */}
|
||||
{error && (
|
||||
<div
|
||||
role="alert"
|
||||
className="mx-4 mt-3 px-3 py-2 bg-red-950/30 border border-red-800/40 rounded text-xs text-bad shrink-0"
|
||||
>
|
||||
<div className="mx-4 mt-3 px-3 py-2 bg-red-950/30 border border-red-800/40 rounded text-xs text-bad shrink-0">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
@ -251,6 +242,7 @@ export function AuditEntryRow({ entry, now }: AuditEntryRowProps) {
|
||||
{/* Event-type badge */}
|
||||
<span
|
||||
className={`shrink-0 text-[9px] font-semibold uppercase tracking-wider px-1.5 py-0.5 rounded border ${badge.text} ${badge.bg} ${badge.border}`}
|
||||
aria-label={`Event type: ${entry.event_type}`}
|
||||
>
|
||||
{entry.event_type}
|
||||
</span>
|
||||
|
||||
@ -100,8 +100,8 @@ export function BatchActionBar() {
|
||||
aria-label="Batch workspace actions"
|
||||
className="fixed bottom-6 left-1/2 -translate-x-1/2 z-[200] flex items-center gap-3 px-4 py-2.5 rounded-2xl bg-surface-sunken/95 border border-line/70 shadow-2xl shadow-black/50 backdrop-blur-md"
|
||||
>
|
||||
{/* Selection count badge — bg-zinc-700 passes 7.2:1 on white text */}
|
||||
<span className="text-[12px] font-semibold text-white bg-zinc-700 px-2.5 py-0.5 rounded-full tabular-nums">
|
||||
{/* Selection count badge */}
|
||||
<span className="text-[12px] font-semibold text-white bg-accent-strong/80 px-2.5 py-0.5 rounded-full tabular-nums">
|
||||
{count} selected
|
||||
</span>
|
||||
|
||||
@ -112,7 +112,7 @@ export function BatchActionBar() {
|
||||
type="button"
|
||||
disabled={busy}
|
||||
onClick={() => setPending("restart")}
|
||||
className="flex items-center gap-1.5 px-3 py-1.5 rounded-lg text-[12px] font-medium text-white bg-sky-900/30 hover:bg-sky-800/50 border border-sky-700/30 hover:border-sky-600/50 transition-colors disabled:opacity-50 focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-sky-500/70"
|
||||
className="flex items-center gap-1.5 px-3 py-1.5 rounded-lg text-[12px] font-medium text-sky-300 bg-sky-900/30 hover:bg-sky-800/50 border border-sky-700/30 hover:border-sky-600/50 transition-colors disabled:opacity-50 focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-sky-500/70"
|
||||
>
|
||||
<span aria-hidden="true">↻</span>
|
||||
Restart All
|
||||
@ -122,7 +122,7 @@ export function BatchActionBar() {
|
||||
type="button"
|
||||
disabled={busy}
|
||||
onClick={() => setPending("pause")}
|
||||
className="flex items-center gap-1.5 px-3 py-1.5 rounded-lg text-[12px] font-medium text-white bg-amber-900/30 hover:bg-amber-800/50 border border-amber-700/30 hover:border-amber-600/50 transition-colors disabled:opacity-50 focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-amber-500/70"
|
||||
className="flex items-center gap-1.5 px-3 py-1.5 rounded-lg text-[12px] font-medium text-warm bg-amber-900/30 hover:bg-amber-800/50 border border-amber-700/30 hover:border-amber-600/50 transition-colors disabled:opacity-50 focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-amber-500/70"
|
||||
>
|
||||
<span aria-hidden="true">⏸</span>
|
||||
Pause All
|
||||
@ -132,7 +132,7 @@ export function BatchActionBar() {
|
||||
type="button"
|
||||
disabled={busy}
|
||||
onClick={() => setPending("delete")}
|
||||
className="flex items-center gap-1.5 px-3 py-1.5 rounded-lg text-[12px] font-medium text-white bg-red-900/30 hover:bg-red-800/50 border border-red-700/30 hover:border-red-600/50 transition-colors disabled:opacity-50 focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-red-500/70"
|
||||
className="flex items-center gap-1.5 px-3 py-1.5 rounded-lg text-[12px] font-medium text-bad bg-red-900/30 hover:bg-red-800/50 border border-red-700/30 hover:border-red-600/50 transition-colors disabled:opacity-50 focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-red-500/70"
|
||||
>
|
||||
<span aria-hidden="true">✕</span>
|
||||
Delete All
|
||||
|
||||
@ -96,9 +96,9 @@ export function ConfirmDialog({
|
||||
// readable in both light and dark themes.
|
||||
const confirmColors =
|
||||
confirmVariant === "danger"
|
||||
? "bg-red-700 hover:bg-red-600 text-white"
|
||||
? "bg-red-600 hover:bg-red-700 text-white"
|
||||
: confirmVariant === "warning"
|
||||
? "bg-amber-800 hover:bg-amber-700 text-white"
|
||||
? "bg-amber-600 hover:bg-amber-700 text-white"
|
||||
: "bg-accent hover:bg-accent-strong text-white";
|
||||
|
||||
// Render via Portal so the fixed-position dialog escapes any containing block
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { useCanvasStore, type WorkspaceNodeData } from "@/store/canvas";
|
||||
import { api } from "@/lib/api";
|
||||
import { showToast } from "./Toaster";
|
||||
@ -23,17 +23,9 @@ export function ContextMenu() {
|
||||
const setPanelTab = useCanvasStore((s) => s.setPanelTab);
|
||||
const nestNode = useCanvasStore((s) => s.nestNode);
|
||||
const contextNodeId = contextMenu?.nodeId ?? null;
|
||||
// Select the full nodes array (stable reference across unrelated store
|
||||
// updates) and derive children via useMemo. Filtering inside the
|
||||
// selector returned a new array every call, which Zustand's
|
||||
// useSyncExternalStore saw as "snapshot changed" → schedule
|
||||
// re-render → loop → React error #185. See canvas-store-snapshots.
|
||||
const nodes = useCanvasStore((s) => s.nodes);
|
||||
const children = useMemo(
|
||||
() => (contextNodeId ? nodes.filter((n) => n.data.parentId === contextNodeId) : []),
|
||||
[nodes, contextNodeId],
|
||||
const hasChildren = useCanvasStore((s) =>
|
||||
contextNodeId ? s.nodes.some((n) => n.data.parentId === contextNodeId) : false
|
||||
);
|
||||
const hasChildren = children.length > 0;
|
||||
const setPendingDelete = useCanvasStore((s) => s.setPendingDelete);
|
||||
const ref = useRef<HTMLDivElement>(null);
|
||||
const [actionLoading, setActionLoading] = useState(false);
|
||||
@ -197,9 +189,10 @@ export function ContextMenu() {
|
||||
// it survives ContextMenu unmount. Closing the menu here avoids the
|
||||
// prior race where the portal dialog's Confirm click was treated as
|
||||
// "outside" by the menu's outside-click handler.
|
||||
setPendingDelete({ id: contextMenu.nodeId, name: contextMenu.nodeData.name, hasChildren, children: children.map(c => ({ id: c.id, name: c.data.name })) });
|
||||
const childNodes = useCanvasStore.getState().nodes.filter((n) => n.data.parentId === contextMenu.nodeId);
|
||||
setPendingDelete({ id: contextMenu.nodeId, name: contextMenu.nodeData.name, hasChildren, children: childNodes.map(c => ({ id: c.id, name: c.data.name })) });
|
||||
closeContextMenu();
|
||||
}, [contextMenu, setPendingDelete, closeContextMenu, children, hasChildren]);
|
||||
}, [contextMenu, setPendingDelete, closeContextMenu]);
|
||||
|
||||
const handleViewDetails = useCallback(() => {
|
||||
if (!contextMenu) return;
|
||||
@ -318,7 +311,7 @@ export function ContextMenu() {
|
||||
aria-hidden="true"
|
||||
className={`w-1.5 h-1.5 rounded-full ${statusDotClass(contextMenu.nodeData.status)}`}
|
||||
/>
|
||||
<span className="text-[10px] text-ink">{contextMenu.nodeData.status}</span>
|
||||
<span className="text-[10px] text-ink-mid">{contextMenu.nodeData.status}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@ -31,25 +31,17 @@ export function extractMessageText(body: Record<string, unknown> | null): string
|
||||
if (text) return text;
|
||||
|
||||
// Response: result.parts[].text or result.parts[].root.text
|
||||
// Use the first part that has a direct text field; within that part,
|
||||
// prefer direct text over root.text. Subsequent parts' root.text fields
|
||||
// are ignored when a direct text exists in an earlier part.
|
||||
const result = body.result as Record<string, unknown> | undefined;
|
||||
const rParts = (result?.parts || []) as Array<Record<string, unknown>>;
|
||||
const firstPartWithText = rParts.find(
|
||||
(p) => typeof p.text === "string" && (p.text as string) !== ""
|
||||
);
|
||||
if (firstPartWithText) {
|
||||
return firstPartWithText.text as string;
|
||||
}
|
||||
// No direct text found; use root.text from the first part (if present).
|
||||
const firstPart = rParts[0];
|
||||
if (firstPart) {
|
||||
const root = firstPart.root as Record<string, unknown> | undefined;
|
||||
if (typeof root?.text === "string" && root.text !== "") {
|
||||
return root.text as string;
|
||||
}
|
||||
}
|
||||
const rText = rParts
|
||||
.map((p) => {
|
||||
if (p.text) return p.text as string;
|
||||
const root = p.root as Record<string, unknown> | undefined;
|
||||
return (root?.text as string) || "";
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join("\n");
|
||||
if (rText) return rText;
|
||||
|
||||
if (typeof body.result === "string") return body.result;
|
||||
} catch { /* ignore */ }
|
||||
@ -187,7 +179,7 @@ export function ConversationTraceModal({ open, workspaceId: _workspaceId, onClos
|
||||
isError
|
||||
? "bg-red-950/50 text-bad"
|
||||
: isSend
|
||||
? "bg-cyan-950 text-cyan-300"
|
||||
? "bg-cyan-950/50 text-cyan-400"
|
||||
: isReceive
|
||||
? "bg-blue-950/50 text-accent"
|
||||
: "bg-surface-card text-ink-mid"
|
||||
@ -251,7 +243,7 @@ export function ConversationTraceModal({ open, workspaceId: _workspaceId, onClos
|
||||
|
||||
{/* Error */}
|
||||
{isError && entry.error_detail && (
|
||||
<div className="text-[10px] text-bad mt-1 truncate">
|
||||
<div className="text-[10px] text-bad/80 mt-1 truncate">
|
||||
{entry.error_detail.slice(0, 200)}
|
||||
</div>
|
||||
)}
|
||||
@ -272,7 +264,7 @@ export function ConversationTraceModal({ open, workspaceId: _workspaceId, onClos
|
||||
)}
|
||||
{responseText && (
|
||||
<div className="mt-1 bg-surface/60 border border-emerald-900/30 rounded-lg px-3 py-2 max-h-32 overflow-y-auto">
|
||||
<div className="text-[8px] text-good uppercase mb-1">Response</div>
|
||||
<div className="text-[8px] text-good/60 uppercase mb-1">Response</div>
|
||||
<div className="text-[10px] text-ink-mid whitespace-pre-wrap break-words leading-relaxed">
|
||||
{responseText.slice(0, 2000)}
|
||||
{responseText.length > 2000 && (
|
||||
|
||||
@ -80,7 +80,6 @@ export function CreateWorkspaceButton() {
|
||||
// isExternal is true the template / model / hermes-provider fields are
|
||||
// hidden (they're meaningless for BYO-compute agents).
|
||||
const [isExternal, setIsExternal] = useState(false);
|
||||
const [externalRuntime, setExternalRuntime] = useState("external");
|
||||
const [externalConnection, setExternalConnection] =
|
||||
useState<ExternalConnectionInfo | null>(null);
|
||||
|
||||
@ -224,7 +223,6 @@ export function CreateWorkspaceButton() {
|
||||
setBudgetLimit("");
|
||||
setError(null);
|
||||
setHermesProvider("anthropic");
|
||||
setExternalRuntime("external");
|
||||
setHermesApiKey("");
|
||||
setHermesModel("");
|
||||
api
|
||||
@ -284,7 +282,7 @@ export function CreateWorkspaceButton() {
|
||||
// Runtime=external flips the backend into awaiting-agent mode:
|
||||
// no container provisioning, token minted, connection payload
|
||||
// returned in the response for the modal below.
|
||||
...(isExternal ? { runtime: externalRuntime } : {}),
|
||||
...(isExternal ? { runtime: "external" } : {}),
|
||||
...(!isExternal && isHermes && provider
|
||||
? {
|
||||
secrets: { [provider.envVar]: hermesApiKey.trim() },
|
||||
@ -384,23 +382,6 @@ export function CreateWorkspaceButton() {
|
||||
</div>
|
||||
</label>
|
||||
|
||||
{isExternal && (
|
||||
<div>
|
||||
<label className="text-[11px] text-ink-mid block mb-1">
|
||||
External Runtime
|
||||
</label>
|
||||
<select
|
||||
value={externalRuntime}
|
||||
onChange={(e) => setExternalRuntime(e.target.value)}
|
||||
className="w-full bg-surface-card/60 border border-line/50 rounded-lg px-3 py-2 text-sm text-ink focus:outline-none focus:border-accent/60 focus:ring-1 focus:ring-accent/20 transition-colors"
|
||||
>
|
||||
<option value="external">Generic External</option>
|
||||
<option value="kimi">Kimi CLI</option>
|
||||
<option value="kimi-cli">Kimi CLI (alt)</option>
|
||||
</select>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isExternal && (
|
||||
<InputField
|
||||
label="Template"
|
||||
|
||||
@ -126,8 +126,8 @@ export function DeleteCascadeConfirmDialog({
|
||||
|
||||
{/* Cascade warning */}
|
||||
<div className="rounded border border-red-900/40 bg-red-950/20 px-3 py-2.5 mb-4">
|
||||
<p className="text-[12px] text-red-300 leading-relaxed">
|
||||
Deleting will cascade — <strong className="text-red-100">all child workspaces and their data will be permanently removed.</strong> This cannot be undone.
|
||||
<p className="text-[12px] text-bad/80 leading-relaxed">
|
||||
Deleting will cascade — <strong className="text-red-200">all child workspaces and their data will be permanently removed.</strong> This cannot be undone.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
@ -164,13 +164,13 @@ export function DeleteCascadeConfirmDialog({
|
||||
type="button"
|
||||
onClick={onConfirm}
|
||||
disabled={!checked}
|
||||
// Hover goes DARKER, not lighter — bg-red-600 on white text
|
||||
// drops contrast below AA. Same trap fixed in ConfirmDialog.
|
||||
// focus-visible ring matches the canvas chrome.
|
||||
// Hover goes DARKER, not lighter — bg-red-500 on white text
|
||||
// drops contrast below AA vs bg-red-700. Same trap fixed in
|
||||
// ConfirmDialog and ApprovalBanner. focus-visible ring matches.
|
||||
className={`px-3.5 py-1.5 text-[13px] rounded-lg transition-colors focus:outline-none focus-visible:ring-2 focus-visible:ring-red-500/60 focus-visible:ring-offset-2 focus-visible:ring-offset-surface-sunken
|
||||
${checked
|
||||
? "bg-red-700 hover:bg-red-600 text-white cursor-pointer"
|
||||
: "bg-red-900/30 text-red-400 cursor-not-allowed"
|
||||
? "bg-red-600 hover:bg-red-700 text-white cursor-pointer"
|
||||
: "bg-red-900/30 text-bad/40 cursor-not-allowed"
|
||||
}`}
|
||||
>
|
||||
Delete All
|
||||
|
||||
@ -51,7 +51,7 @@ export class ErrorBoundary extends React.Component<
|
||||
render() {
|
||||
if (this.state.hasError) {
|
||||
return (
|
||||
<div role="alert" aria-live="assertive" className="fixed inset-0 flex items-center justify-center bg-surface z-50">
|
||||
<div className="fixed inset-0 flex items-center justify-center bg-surface z-50">
|
||||
<div className="max-w-md rounded-2xl border border-red-500/30 bg-surface-sunken/90 px-8 py-8 text-center shadow-2xl shadow-black/40">
|
||||
<div className="mx-auto mb-4 flex h-14 w-14 items-center justify-center rounded-full bg-red-500/10 border border-red-500/30">
|
||||
<svg
|
||||
@ -76,7 +76,7 @@ export class ErrorBoundary extends React.Component<
|
||||
<p className="text-sm text-ink-mid mb-1">
|
||||
An unexpected error occurred while rendering the application.
|
||||
</p>
|
||||
<p className="text-xs text-bad mb-6 font-mono break-all">
|
||||
<p className="text-xs text-bad/80 mb-6 font-mono break-all">
|
||||
{this.state.error?.message ?? "Unknown error"}
|
||||
</p>
|
||||
<div className="flex items-center justify-center gap-3">
|
||||
|
||||
@ -18,7 +18,7 @@
|
||||
import { useCallback, useState } from "react";
|
||||
import * as Dialog from "@radix-ui/react-dialog";
|
||||
|
||||
type Tab = "python" | "curl" | "claude" | "mcp" | "hermes" | "codex" | "openclaw" | "kimi" | "fields";
|
||||
type Tab = "python" | "curl" | "claude" | "mcp" | "hermes" | "codex" | "openclaw" | "fields";
|
||||
|
||||
export interface ExternalConnectionInfo {
|
||||
workspace_id: string;
|
||||
@ -58,10 +58,6 @@ export interface ExternalConnectionInfo {
|
||||
// openclaw gateway on loopback. Outbound-tools-only today; push
|
||||
// parity on an external openclaw needs a sessions.steer bridge.
|
||||
openclaw_snippet?: string;
|
||||
// Kimi CLI setup snippet — self-contained Python heartbeat script
|
||||
// that keeps a Kimi workspace online in poll mode. Optional for
|
||||
// backward compat with platforms that haven't shipped the Kimi tab.
|
||||
kimi_snippet?: string;
|
||||
}
|
||||
|
||||
interface Props {
|
||||
@ -154,11 +150,6 @@ export function ExternalConnectModal({ info, onClose }: Props) {
|
||||
'WORKSPACE_TOKEN="<paste from create response>"',
|
||||
`WORKSPACE_TOKEN="${info.auth_token}"`,
|
||||
);
|
||||
// Kimi snippet carries the placeholder inside the shell heredoc.
|
||||
const filledKimi = info.kimi_snippet?.replace(
|
||||
'MOLECULE_WORKSPACE_TOKEN=<paste from create response>',
|
||||
`MOLECULE_WORKSPACE_TOKEN=${info.auth_token}`,
|
||||
);
|
||||
|
||||
return (
|
||||
<Dialog.Root open onOpenChange={(o) => !o && onClose()}>
|
||||
@ -198,7 +189,6 @@ export function ExternalConnectModal({ info, onClose }: Props) {
|
||||
if (filledHermes) tabs.push("hermes");
|
||||
if (filledCodex) tabs.push("codex");
|
||||
if (filledOpenClaw) tabs.push("openclaw");
|
||||
if (filledKimi) tabs.push("kimi");
|
||||
tabs.push("curl", "fields");
|
||||
return tabs;
|
||||
})().map((t) => (
|
||||
@ -222,8 +212,6 @@ export function ExternalConnectModal({ info, onClose }: Props) {
|
||||
? "Codex"
|
||||
: t === "openclaw"
|
||||
? "OpenClaw"
|
||||
: t === "kimi"
|
||||
? "Kimi"
|
||||
: t === "python"
|
||||
? "Python SDK"
|
||||
: t === "mcp"
|
||||
@ -300,15 +288,6 @@ export function ExternalConnectModal({ info, onClose }: Props) {
|
||||
onCopy={() => copy(filledOpenClaw, "openclaw")}
|
||||
/>
|
||||
)}
|
||||
{tab === "kimi" && filledKimi && (
|
||||
<SnippetBlock
|
||||
value={filledKimi}
|
||||
label="Kimi CLI — self-contained Python bridge. Registers, heartbeats, polls for canvas messages, and echoes replies back. NAT-safe (no public URL). Run in a background terminal or via launchd."
|
||||
copyKey="kimi"
|
||||
copied={copiedKey === "kimi"}
|
||||
onCopy={() => copy(filledKimi, "kimi")}
|
||||
/>
|
||||
)}
|
||||
{tab === "fields" && (
|
||||
<div className="space-y-2">
|
||||
<Field label="workspace_id" value={info.workspace_id} onCopy={() => copy(info.workspace_id, "wsid")} copied={copiedKey === "wsid"} />
|
||||
@ -360,7 +339,7 @@ function SnippetBlock({
|
||||
<button
|
||||
type="button"
|
||||
onClick={onCopy}
|
||||
className="text-xs px-2 py-1 rounded bg-accent text-white hover:bg-accent-strong transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-accent focus-visible:ring-offset-1"
|
||||
className="text-xs px-2 py-1 rounded bg-accent-strong/80 hover:bg-accent text-white focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-accent focus-visible:ring-offset-1"
|
||||
>
|
||||
{copied ? "Copied!" : "Copy"}
|
||||
</button>
|
||||
|
||||
@ -451,7 +451,7 @@ function ProviderPickerModal({
|
||||
<button
|
||||
onClick={() => handleSaveKey(index)}
|
||||
disabled={!entry.value.trim() || entry.saving}
|
||||
className="px-3 py-1.5 bg-accent-strong hover:bg-accent text-[11px] rounded text-white disabled:opacity-30 transition-colors shrink-0 focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-accent focus-visible:ring-offset-1"
|
||||
className="px-3 py-1.5 bg-accent-strong hover:bg-accent text-[11px] rounded text-white disabled:opacity-30 transition-colors shrink-0"
|
||||
>
|
||||
{entry.saving ? "..." : "Save"}
|
||||
</button>
|
||||
@ -492,7 +492,7 @@ function ProviderPickerModal({
|
||||
!selectorValue.providerId ||
|
||||
(showModelInput && model.trim() === "")
|
||||
}
|
||||
className="px-3.5 py-1.5 text-[12px] bg-accent-strong hover:bg-accent text-white rounded-lg transition-colors disabled:opacity-40 focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-accent focus-visible:ring-offset-1"
|
||||
className="px-3.5 py-1.5 text-[12px] bg-accent-strong hover:bg-accent text-white rounded-lg transition-colors disabled:opacity-40"
|
||||
>
|
||||
{allSaved ? "Deploy" : entries.length > 1 ? "Add Keys" : "Add Key"}
|
||||
</button>
|
||||
|
||||
@ -308,7 +308,7 @@ export function OrgImportPreflightModal({
|
||||
type="button"
|
||||
onClick={onProceed}
|
||||
disabled={!canProceed}
|
||||
className="px-4 py-1.5 text-[11px] font-semibold rounded bg-accent hover:bg-accent-strong text-white disabled:bg-surface-card disabled:text-ink-soft disabled:cursor-not-allowed focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-accent focus-visible:ring-offset-1"
|
||||
className="px-4 py-1.5 text-[11px] font-semibold rounded bg-accent hover:bg-accent-strong text-white disabled:bg-surface-card disabled:text-white-soft disabled:cursor-not-allowed focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-accent focus-visible:ring-offset-1"
|
||||
>
|
||||
Import
|
||||
</button>
|
||||
|
||||
@ -117,7 +117,7 @@ function PlanCard({
|
||||
<ul className="mt-6 flex-1 space-y-2 text-sm text-ink-mid">
|
||||
{plan.features.map((f) => (
|
||||
<li key={f} className="flex items-start">
|
||||
<span className="mr-2 text-accent" aria-hidden="true">
|
||||
<span className="mr-2 text-accent" aria-hidden>
|
||||
✓
|
||||
</span>
|
||||
{f}
|
||||
|
||||
@ -420,7 +420,7 @@ export function ProviderModelSelector({
|
||||
spellCheck={false}
|
||||
autoComplete="off"
|
||||
data-testid="model-input"
|
||||
className="w-full bg-surface-sunken border border-line rounded px-2 py-1.5 text-[11px] text-ink font-mono focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-accent focus-visible:ring-offset-1 focus-visible:border-accent transition-colors disabled:opacity-50"
|
||||
className="w-full bg-surface-sunken border border-line rounded px-2 py-1.5 text-[11px] text-ink font-mono focus:outline-none focus:border-accent focus:ring-1 focus:ring-accent/20 transition-colors disabled:opacity-50"
|
||||
/>
|
||||
<p className="text-[9px] text-ink-mid mt-1 leading-relaxed">
|
||||
{selected?.wildcard
|
||||
|
||||
@ -341,7 +341,7 @@ export function ProvisioningTimeout({
|
||||
type="button"
|
||||
onClick={() => handleRetry(entry.workspaceId)}
|
||||
disabled={isRetrying || isCancelling || retryCooldown.has(entry.workspaceId)}
|
||||
className="px-3 py-1.5 bg-amber-800 hover:bg-amber-700 text-[11px] font-medium rounded-lg text-white disabled:opacity-40 transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-amber-400 focus-visible:ring-offset-1 focus-visible:ring-offset-amber-950"
|
||||
className="px-3 py-1.5 bg-amber-600 hover:bg-amber-500 text-[11px] font-medium rounded-lg text-white disabled:opacity-40 transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-amber-400 focus-visible:ring-offset-1 focus-visible:ring-offset-amber-950"
|
||||
>
|
||||
{isRetrying ? "Retrying..." : retryCooldown.has(entry.workspaceId) ? "Wait..." : "Retry"}
|
||||
</button>
|
||||
@ -389,7 +389,7 @@ export function ProvisioningTimeout({
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleCancelConfirm}
|
||||
className="px-3.5 py-1.5 text-[12px] bg-red-800 hover:bg-red-700 text-white rounded-lg transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-red-400 focus-visible:ring-offset-1"
|
||||
className="px-3.5 py-1.5 text-[12px] bg-red-600 hover:bg-red-500 text-white rounded-lg transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-red-400 focus-visible:ring-offset-1"
|
||||
>
|
||||
Remove Workspace
|
||||
</button>
|
||||
|
||||
@ -87,21 +87,20 @@ export function TermsGate({ children }: { children: React.ReactNode }) {
|
||||
<>
|
||||
{children}
|
||||
{status === "pending" && (
|
||||
// Backdrop is purely decorative (blur overlay). Separated from the
|
||||
// dialog so aria-hidden on the backdrop does NOT hide the dialog from
|
||||
// assistive tech. Backdrop click does nothing — this is a hard gate.
|
||||
<>
|
||||
<div aria-hidden="true" className="fixed inset-0 z-50 bg-surface/80 backdrop-blur-sm" />
|
||||
// Backdrop is decorative — does NOT carry aria-hidden anymore.
|
||||
// The earlier version put aria-hidden="true" on this wrapper,
|
||||
// which hid the dialog AND its descendants from screen readers,
|
||||
// making the entire terms-acceptance flow invisible to AT users.
|
||||
// Backdrop click intentionally does nothing — this is a hard
|
||||
// gate.
|
||||
<div className="fixed inset-0 z-50 flex items-center justify-center bg-surface/80 backdrop-blur-sm">
|
||||
<div
|
||||
role="dialog"
|
||||
aria-modal="true"
|
||||
aria-labelledby="terms-dialog-title"
|
||||
aria-describedby="terms-dialog-body"
|
||||
className="fixed inset-0 z-50 flex items-center justify-center"
|
||||
className="mx-4 max-w-lg rounded-lg border border-line bg-surface-sunken p-6 shadow-xl"
|
||||
>
|
||||
<div
|
||||
className="mx-4 max-w-lg rounded-lg border border-line bg-surface-sunken p-6 shadow-xl"
|
||||
>
|
||||
<h2 id="terms-dialog-title" className="text-lg font-semibold text-ink">Terms & conditions</h2>
|
||||
<div id="terms-dialog-body">
|
||||
<p className="mt-3 text-sm text-ink-mid">
|
||||
@ -136,17 +135,16 @@ export function TermsGate({ children }: { children: React.ReactNode }) {
|
||||
ref={agreeButtonRef}
|
||||
onClick={accept}
|
||||
disabled={submitting}
|
||||
aria-disabled={submitting}
|
||||
// Hover goes DARKER — emerald-600 on white text is 3.3:1 (WCAG AA FAIL).
|
||||
// emerald-700 is 4.6:1 (WCAG AA PASS). Hover darkens to emerald-600.
|
||||
className="rounded bg-emerald-700 hover:bg-emerald-600 px-4 py-2 text-sm font-medium text-white disabled:opacity-50 transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-emerald-400 focus-visible:ring-offset-2 focus-visible:ring-offset-surface-sunken"
|
||||
// Hover goes DARKER, not lighter — emerald-500 on white
|
||||
// text drops contrast below AA vs emerald-700. Same trap
|
||||
// I fixed in ApprovalBanner + ConfirmDialog.
|
||||
className="rounded bg-emerald-600 hover:bg-emerald-700 px-4 py-2 text-sm font-medium text-white disabled:opacity-50 transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-emerald-400 focus-visible:ring-offset-2 focus-visible:ring-offset-surface-sunken"
|
||||
>
|
||||
{submitting ? "…" : "I agree"}
|
||||
{submitting ? "Saving…" : "I agree"}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
</div>
|
||||
)}
|
||||
{status === "error" && (
|
||||
<div role="alert" className="fixed bottom-4 left-4 right-4 mx-auto max-w-md rounded border border-red-800 bg-red-950 p-3 text-sm text-red-200">
|
||||
|
||||
@ -61,12 +61,8 @@ export function ThemeToggle({ className = "" }: { className?: string }) {
|
||||
return;
|
||||
}
|
||||
setTheme(OPTIONS[next].value);
|
||||
// Move focus to the new button so arrow-key navigation is continuous.
|
||||
// Use direct-child query to scope strictly to this radiogroup's buttons
|
||||
// and avoid accidentally focusing unrelated [role=radio] elements
|
||||
// elsewhere in the DOM (e.g. React Flow canvas nodes).
|
||||
const radiogroup = e.currentTarget.closest("[role=radiogroup]") as HTMLElement | null;
|
||||
const btns = radiogroup?.querySelectorAll<HTMLButtonElement>("> [role=radio]");
|
||||
// Move focus to the new button so arrow-key navigation is continuous
|
||||
const btns = (e.currentTarget.closest("[role=radiogroup]") as HTMLElement)?.querySelectorAll<HTMLButtonElement>("[role=radio]");
|
||||
btns?.[next]?.focus();
|
||||
},
|
||||
[]
|
||||
|
||||
@ -314,7 +314,7 @@ export function Toolbar() {
|
||||
<div ref={helpRef} className="relative">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setHelpOpen(true)}
|
||||
onClick={() => setHelpOpen((open) => !open)}
|
||||
className="flex items-center justify-center w-7 h-7 bg-surface-card hover:bg-surface-card/70 border border-line rounded-lg transition-colors text-ink-mid hover:text-ink focus:outline-none focus-visible:ring-2 focus-visible:ring-accent/40"
|
||||
aria-expanded={helpOpen}
|
||||
aria-label="Open shortcuts and tips"
|
||||
|
||||
@ -9,7 +9,6 @@ import { Tooltip } from "@/components/Tooltip";
|
||||
import { STATUS_CONFIG, TIER_CONFIG } from "@/lib/design-tokens";
|
||||
import { useOrgDeployState } from "@/components/canvas/useOrgDeployState";
|
||||
import { OrgCancelButton } from "@/components/canvas/OrgCancelButton";
|
||||
import { isExternalLikeRuntime } from "@/lib/externalRuntimes";
|
||||
|
||||
/** Descendant count for the "N sub" badge — children are first-class nodes
|
||||
* rendered as full cards inside this one via React Flow's native parentId,
|
||||
@ -249,9 +248,9 @@ export function WorkspaceNode({ id, data }: NodeProps<Node<WorkspaceNodeData>>)
|
||||
if (!runtime) return null;
|
||||
return (
|
||||
<div className="mb-1 flex items-center gap-1">
|
||||
{isExternalLikeRuntime(runtime) ? (
|
||||
{runtime === "external" ? (
|
||||
<span
|
||||
className="text-[7px] font-mono px-1.5 py-0.5 rounded-md text-white bg-violet-800 border border-violet-900"
|
||||
className="text-[7px] font-mono px-1.5 py-0.5 rounded-md text-white bg-violet-600 border border-violet-700"
|
||||
title="Phase 30 remote agent — runs outside this platform's Docker network. Lifecycle managed via heartbeat-based polling, not Docker exec."
|
||||
>
|
||||
★ REMOTE
|
||||
|
||||
@ -238,98 +238,6 @@ describe("ApprovalBanner — decisions", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("ApprovalBanner — disabled state while submitting", () => {
|
||||
// Deferred so we can control when the mock POST resolves.
|
||||
let resolvePost: (value: unknown) => void;
|
||||
let postPromise: Promise<unknown>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
mockApiGet.mockReset().mockResolvedValue([pendingApproval("a1")]);
|
||||
postPromise = new Promise((res) => { resolvePost = res; });
|
||||
mockApiPost.mockReset().mockImplementation(() => postPromise as Promise<unknown>);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
vi.useRealTimers();
|
||||
vi.restoreAllMocks();
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
it("disables both buttons while POST is in flight", async () => {
|
||||
render(<ApprovalBanner />);
|
||||
await act(async () => { await vi.runOnlyPendingTimersAsync(); });
|
||||
const approveBtn = screen.getAllByRole("button", { name: /approve/i })[0];
|
||||
const denyBtn = screen.getAllByRole("button", { name: /deny/i })[0];
|
||||
|
||||
fireEvent.click(approveBtn);
|
||||
await act(async () => { /* flush */ });
|
||||
|
||||
expect((approveBtn as HTMLButtonElement).disabled).toBe(true);
|
||||
expect((denyBtn as HTMLButtonElement).disabled).toBe(true);
|
||||
});
|
||||
|
||||
it("re-enables buttons after POST resolves", async () => {
|
||||
render(<ApprovalBanner />);
|
||||
await act(async () => { await vi.runOnlyPendingTimersAsync(); });
|
||||
const approveBtn = screen.getAllByRole("button", { name: /approve/i })[0];
|
||||
const denyBtn = screen.getAllByRole("button", { name: /deny/i })[0];
|
||||
|
||||
fireEvent.click(approveBtn);
|
||||
await act(async () => { /* flush */ });
|
||||
expect((approveBtn as HTMLButtonElement).disabled).toBe(true);
|
||||
expect((denyBtn as HTMLButtonElement).disabled).toBe(true);
|
||||
|
||||
// Resolve the deferred POST inside act() so React flushes the state update.
|
||||
await act(async () => {
|
||||
resolvePost!({});
|
||||
});
|
||||
expect(screen.queryByRole("alert")).toBeNull();
|
||||
});
|
||||
|
||||
it("re-enables buttons after POST fails", async () => {
|
||||
mockApiPost.mockImplementation(() => Promise.reject(new Error("Network error")));
|
||||
render(<ApprovalBanner />);
|
||||
await act(async () => { await vi.runOnlyPendingTimersAsync(); });
|
||||
const approveBtn = screen.getAllByRole("button", { name: /approve/i })[0];
|
||||
|
||||
fireEvent.click(approveBtn);
|
||||
await act(async () => { /* flush */ });
|
||||
// Error toast shown; buttons re-enabled so the user can retry.
|
||||
expect((approveBtn as HTMLButtonElement).disabled).toBe(false);
|
||||
});
|
||||
|
||||
it("shows ellipsis text on the clicked button while submitting", async () => {
|
||||
render(<ApprovalBanner />);
|
||||
await act(async () => { await vi.runOnlyPendingTimersAsync(); });
|
||||
fireEvent.click(screen.getAllByRole("button", { name: /approve/i })[0]);
|
||||
await act(async () => { /* flush */ });
|
||||
// The clicked button now shows "…" instead of "Approve"
|
||||
expect(screen.queryByRole("button", { name: /approve/i })).toBeNull();
|
||||
expect(screen.getAllByRole("button", { name: /^…$/ }).length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("disables ALL buttons globally while any submission is in flight", async () => {
|
||||
// Guard is per-banner (pendingApprovalId), not per-approval. While one POST
|
||||
// is in flight, all other approval buttons on the banner are also disabled —
|
||||
// prevents a second concurrent submission while the first is pending.
|
||||
mockApiGet.mockReset().mockResolvedValue([
|
||||
pendingApproval("a1"),
|
||||
pendingApproval("a2", "ws-2"),
|
||||
]);
|
||||
render(<ApprovalBanner />);
|
||||
await act(async () => { await vi.runOnlyPendingTimersAsync(); });
|
||||
const card1Approve = screen.getAllByRole("button", { name: /approve/i })[0];
|
||||
const card2Approve = screen.getAllByRole("button", { name: /approve/i })[1];
|
||||
fireEvent.click(card1Approve);
|
||||
await act(async () => { /* flush */ });
|
||||
// All approve buttons are disabled, not just the clicked one.
|
||||
expect((card1Approve as HTMLButtonElement).disabled).toBe(true);
|
||||
expect((card2Approve as HTMLButtonElement).disabled).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ApprovalBanner — handles empty list from server", () => {
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
@ -1,63 +0,0 @@
|
||||
// @vitest-environment jsdom
|
||||
/**
|
||||
* Unit tests for formatAuditRelativeTime — pure date formatter from AuditTrailPanel.
|
||||
*/
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { formatAuditRelativeTime } from "../AuditTrailPanel";
|
||||
|
||||
describe("formatAuditRelativeTime", () => {
|
||||
it('returns "just now" for timestamps within the last minute', () => {
|
||||
const now = 1_700_000_000_000;
|
||||
const thirtySecAgo = new Date(now - 30_000).toISOString();
|
||||
expect(formatAuditRelativeTime(thirtySecAgo, now)).toBe("just now");
|
||||
});
|
||||
|
||||
it('returns "Xm ago" for timestamps within the last hour', () => {
|
||||
const now = 1_700_000_000_000;
|
||||
const fiveMinAgo = new Date(now - 5 * 60_000).toISOString();
|
||||
expect(formatAuditRelativeTime(fiveMinAgo, now)).toBe("5m ago");
|
||||
});
|
||||
|
||||
it('returns "Xh ago" for timestamps within the last day', () => {
|
||||
const now = 1_700_000_000_000;
|
||||
const threeHoursAgo = new Date(now - 3 * 3_600_000).toISOString();
|
||||
expect(formatAuditRelativeTime(threeHoursAgo, now)).toBe("3h ago");
|
||||
});
|
||||
|
||||
it("returns locale date string for timestamps older than 24h", () => {
|
||||
const now = 1_700_000_000_000;
|
||||
const twoDaysAgo = new Date(now - 2 * 86_400_000).toISOString();
|
||||
const result = formatAuditRelativeTime(twoDaysAgo, now);
|
||||
// Should be a date string (not "Xh ago" or "Xm ago")
|
||||
expect(result).not.toMatch(/m ago|h ago|just now/);
|
||||
expect(result).toBe(new Date(twoDaysAgo).toLocaleDateString());
|
||||
});
|
||||
|
||||
it("handles the boundary between minute and hour correctly", () => {
|
||||
const now = 1_700_000_000_000;
|
||||
const exactlyOneHourAgo = new Date(now - 3_600_000).toISOString();
|
||||
expect(formatAuditRelativeTime(exactlyOneHourAgo, now)).toBe("1h ago");
|
||||
});
|
||||
|
||||
it("handles the boundary between hour and day correctly", () => {
|
||||
const now = 1_700_000_000_000;
|
||||
// 23h ago is < 24h so it shows "23h ago"; exactly 24h falls through to date string
|
||||
const twentyThreeHoursAgo = new Date(now - 23 * 3_600_000).toISOString();
|
||||
expect(formatAuditRelativeTime(twentyThreeHoursAgo, now)).toBe("23h ago");
|
||||
});
|
||||
|
||||
it("returns locale date string for exactly 24h ago (boundary)", () => {
|
||||
const now = 1_700_000_000_000;
|
||||
const exactlyOneDayAgo = new Date(now - 86_400_000).toISOString();
|
||||
const result = formatAuditRelativeTime(exactlyOneDayAgo, now);
|
||||
// diff is exactly 86_400_000, which is NOT < 86_400_000, so it falls through
|
||||
expect(result).toBe(new Date(exactlyOneDayAgo).toLocaleDateString());
|
||||
});
|
||||
|
||||
it("future timestamps return 'just now' (negative diff < 60_000)", () => {
|
||||
const now = 1_700_000_000_000;
|
||||
const future = new Date(now + 60_000).toISOString();
|
||||
// Negative diff passes diff < 60_000, returning "just now"
|
||||
expect(formatAuditRelativeTime(future, now)).toBe("just now");
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user