forked from molecule-ai/molecule-core
Merge pull request #1023 from Molecule-AI/feat/productivity-boost-event-crons-autopush
feat: event-driven crons + auto-push hook for agent productivity
This commit is contained in:
commit
14c36e1bbd
21
.github/workflows/canary-verify.yml
vendored
21
.github/workflows/canary-verify.yml
vendored
@ -34,7 +34,9 @@ jobs:
|
||||
canary-smoke:
|
||||
# Skip when the upstream workflow failed — no image to test against.
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' || github.event_name == 'workflow_dispatch' }}
|
||||
runs-on: ubuntu-latest
|
||||
# Self-hosted mac mini — GitHub-hosted minutes are quota-blocked on
|
||||
# this org (same reason publish/promote-latest moved earlier).
|
||||
runs-on: [self-hosted, macos, arm64]
|
||||
outputs:
|
||||
sha: ${{ steps.compute.outputs.sha }}
|
||||
steps:
|
||||
@ -77,12 +79,21 @@ jobs:
|
||||
# the runner) that can retag remotely with a single API call each.
|
||||
needs: canary-smoke
|
||||
if: ${{ needs.canary-smoke.result == 'success' }}
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: [self-hosted, macos, arm64]
|
||||
steps:
|
||||
- name: Install crane
|
||||
- name: Ensure crane installed
|
||||
# Matches the install pattern in promote-latest.yml — brew
|
||||
# cleanup exits non-zero on the shared runner's /opt/homebrew
|
||||
# symlinks, so skip it.
|
||||
env:
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: "1"
|
||||
HOMEBREW_NO_AUTO_UPDATE: "1"
|
||||
HOMEBREW_NO_ENV_HINTS: "1"
|
||||
run: |
|
||||
curl -fsSL https://github.com/google/go-containerregistry/releases/download/v0.20.2/go-containerregistry_Linux_x86_64.tar.gz | \
|
||||
tar xz -C /usr/local/bin crane
|
||||
if ! command -v crane >/dev/null 2>&1; then
|
||||
brew install crane
|
||||
fi
|
||||
crane version
|
||||
|
||||
- name: GHCR login
|
||||
run: |
|
||||
|
||||
124
.github/workflows/codeql.yml
vendored
Normal file
124
.github/workflows/codeql.yml
vendored
Normal file
@ -0,0 +1,124 @@
|
||||
name: CodeQL
|
||||
|
||||
# Controls CodeQL scan triggers for this repo.
|
||||
#
|
||||
# GitHub's "Code quality" default setup (the UI-configured one) is
|
||||
# hardcoded to only scan the default branch — on this repo that's
|
||||
# `staging`, so PRs promoting staging→main would otherwise never be
|
||||
# scanned. This workflow fills that gap by explicitly scanning both
|
||||
# branches on push and PR.
|
||||
#
|
||||
# Runs on the self-hosted mac mini (matches the org-wide Code Quality
|
||||
# runner-label config). GHAS is NOT enabled on this repo, so results
|
||||
# are not uploaded to the Security tab — the scan fails the PR check
|
||||
# on findings, and the SARIF is kept as a workflow artifact for
|
||||
# triage.
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, staging]
|
||||
pull_request:
|
||||
branches: [main, staging]
|
||||
schedule:
|
||||
# Weekly run picks up findings in code that hasn't been touched.
|
||||
- cron: '30 1 * * 0'
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
# No security-events: write — we don't call the upload API.
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
runs-on: [self-hosted, macos, arm64]
|
||||
timeout-minutes: 45
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [go, javascript-typescript, python]
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Checkout sibling plugin repo
|
||||
# Same reasoning as publish-workspace-server-image.yml — the Go
|
||||
# module's replace directive needs the plugin source so
|
||||
# CodeQL's "go build" phase can resolve.
|
||||
if: matrix.language == 'go'
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: Molecule-AI/molecule-ai-plugin-github-app-auth
|
||||
path: molecule-ai-plugin-github-app-auth
|
||||
token: ${{ secrets.PLUGIN_REPO_PAT || secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Ensure jq installed
|
||||
# Follows the crane-install pattern in promote-latest.yml.
|
||||
# HOMEBREW_NO_* flags skip the cleanup that fails on the shared
|
||||
# runner's /opt/homebrew symlinks.
|
||||
env:
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: "1"
|
||||
HOMEBREW_NO_AUTO_UPDATE: "1"
|
||||
HOMEBREW_NO_ENV_HINTS: "1"
|
||||
run: command -v jq >/dev/null || brew install jq
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# security-extended widens past the default to include the
|
||||
# full security-query set for a public SaaS surface.
|
||||
queries: security-extended
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
id: analyze
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{ matrix.language }}"
|
||||
# upload: never — GHAS isn't enabled on this repo, so the
|
||||
# upload API 403s. Write SARIF locally instead.
|
||||
upload: never
|
||||
output: sarif-results/${{ matrix.language }}
|
||||
|
||||
- name: Parse SARIF + fail on findings
|
||||
# The analyze step writes <database>.sarif into the output
|
||||
# directory — database name is the short CodeQL lang id, not
|
||||
# the matrix value (e.g. "javascript-typescript" →
|
||||
# javascript.sarif), so glob rather than hardcode.
|
||||
# Filter to error/warning severity: security-extended emits
|
||||
# "note" rows for informational findings we don't want to fail
|
||||
# the build over.
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
dir="sarif-results/${{ matrix.language }}"
|
||||
sarif=$(ls "$dir"/*.sarif 2>/dev/null | head -1 || true)
|
||||
if [ -z "$sarif" ] || [ ! -f "$sarif" ]; then
|
||||
echo "::error::No SARIF file found under $dir"
|
||||
ls -la "$dir" 2>/dev/null || true
|
||||
exit 1
|
||||
fi
|
||||
echo "Parsing $sarif"
|
||||
count=$(jq '[.runs[].results[] | select(.level == "error" or .level == "warning")] | length' "$sarif")
|
||||
echo "CodeQL findings (error+warning) for ${{ matrix.language }}: $count"
|
||||
if [ "$count" -gt 0 ]; then
|
||||
echo "::error::CodeQL found $count issues. Details below; full SARIF in the artifact."
|
||||
jq -r '.runs[].results[] | select(.level == "error" or .level == "warning") | " - [\(.level)] \(.ruleId // "?"): \(.message.text // "(no message)") @ \(.locations[0].physicalLocation.artifactLocation.uri // "?"):\(.locations[0].physicalLocation.region.startLine // "?")"' "$sarif"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload SARIF artifact
|
||||
# Keep SARIF around on success + failure so triagers can diff.
|
||||
# 14-day retention — longer than default 3, short enough not
|
||||
# to bloat quota.
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: codeql-sarif-${{ matrix.language }}
|
||||
path: sarif-results/${{ matrix.language }}/
|
||||
retention-days: 14
|
||||
@ -20,6 +20,8 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { fetchSession, redirectToLogin, type Session } from "@/lib/auth";
|
||||
import { PLATFORM_URL } from "@/lib/api";
|
||||
import { formatCredits, pillTone, bannerKind } from "@/lib/credits";
|
||||
import { TermsGate } from "@/components/TermsGate";
|
||||
|
||||
type OrgStatus = "awaiting_payment" | "provisioning" | "running" | "failed" | string;
|
||||
|
||||
@ -31,6 +33,13 @@ interface Org {
|
||||
status: OrgStatus;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
// Credit system fields. Present whenever the control plane's models
|
||||
// serializer runs — tests + older snapshot JSONs may not have them,
|
||||
// so treat as optional in TS and fall back to 0 at render time.
|
||||
credits_balance?: number;
|
||||
plan_monthly_credits?: number;
|
||||
overage_used_credits?: number;
|
||||
overage_cap_credits?: number;
|
||||
}
|
||||
|
||||
export default function OrgsPage() {
|
||||
@ -154,17 +163,38 @@ function CheckoutBanner() {
|
||||
function Shell({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<main className="min-h-screen bg-zinc-950 text-zinc-100">
|
||||
<div className="mx-auto max-w-2xl px-6 pt-20 pb-12">
|
||||
<h1 className="text-3xl font-bold text-white">Your organizations</h1>
|
||||
<p className="mt-2 text-zinc-400">
|
||||
Each org is an isolated Molecule workspace.
|
||||
</p>
|
||||
<div className="mt-8">{children}</div>
|
||||
</div>
|
||||
<TermsGate>
|
||||
<div className="mx-auto max-w-2xl px-6 pt-20 pb-12">
|
||||
<h1 className="text-3xl font-bold text-white">Your organizations</h1>
|
||||
<p className="mt-2 text-zinc-400">
|
||||
Each org is an isolated Molecule workspace.
|
||||
</p>
|
||||
<DataResidencyNotice />
|
||||
<div className="mt-8">{children}</div>
|
||||
</div>
|
||||
</TermsGate>
|
||||
</main>
|
||||
);
|
||||
}
|
||||
|
||||
// DataResidencyNotice surfaces where workspace data lives so EU-based
|
||||
// signups can make an informed choice (GDPR Art. 13 disclosure
|
||||
// requirement). Plain text, no icon — the goal is clarity, not
|
||||
// decoration. A future EU region selector can replace this with a
|
||||
// region dropdown.
|
||||
function DataResidencyNotice() {
|
||||
return (
|
||||
<p className="mt-3 rounded border border-zinc-800 bg-zinc-900/60 px-3 py-2 text-xs text-zinc-400">
|
||||
Workspaces run in AWS us-east-2 (Ohio, United States). EU region support is on the roadmap — reach out to
|
||||
{" "}
|
||||
<a href="mailto:support@moleculesai.app" className="underline">
|
||||
support@moleculesai.app
|
||||
</a>
|
||||
{" "}if you need data residency in another region today.
|
||||
</p>
|
||||
);
|
||||
}
|
||||
|
||||
function OrgRow({ org }: { org: Org }) {
|
||||
return (
|
||||
<li className="rounded-lg border border-zinc-800 bg-zinc-900 p-4">
|
||||
@ -174,6 +204,10 @@ function OrgRow({ org }: { org: Org }) {
|
||||
<div className="text-sm text-zinc-400">
|
||||
{org.slug} · <StatusLabel status={org.status} /> · {org.plan || "free"}
|
||||
</div>
|
||||
<div className="mt-2 flex items-center gap-2">
|
||||
<CreditsPill org={org} />
|
||||
<LowCreditsBanner org={org} />
|
||||
</div>
|
||||
</div>
|
||||
<OrgCTA org={org} />
|
||||
</div>
|
||||
@ -181,6 +215,48 @@ function OrgRow({ org }: { org: Org }) {
|
||||
);
|
||||
}
|
||||
|
||||
// CreditsPill renders the balance with a tone that matches the banner
|
||||
// severity. Format + color logic lives in @/lib/credits so it can be
|
||||
// tested without mounting React.
|
||||
function CreditsPill({ org }: { org: Org }) {
|
||||
const balance = org.credits_balance ?? 0;
|
||||
return (
|
||||
<span className={`rounded border px-2 py-0.5 text-xs ${pillTone(org)}`} title="Credit balance">
|
||||
{formatCredits(balance)} credits
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
// LowCreditsBanner is a one-liner that only renders when the balance
|
||||
// is low AND the org is running. bannerKind() picks which message to
|
||||
// show; render just dispatches on it.
|
||||
function LowCreditsBanner({ org }: { org: Org }) {
|
||||
if (org.status !== "running") return null;
|
||||
const kind = bannerKind(org);
|
||||
if (kind === "none") return null;
|
||||
if (kind === "overage") {
|
||||
const used = (org.overage_used_credits ?? 0).toLocaleString();
|
||||
return (
|
||||
<span className="text-xs text-amber-300">
|
||||
overage active · {used} used
|
||||
</span>
|
||||
);
|
||||
}
|
||||
if (kind === "out-of-credits") {
|
||||
return (
|
||||
<a href={`/pricing?org=${encodeURIComponent(org.slug)}`} className="text-xs text-red-300 underline">
|
||||
out of credits — upgrade to keep running
|
||||
</a>
|
||||
);
|
||||
}
|
||||
// trial-tail
|
||||
return (
|
||||
<a href={`/pricing?org=${encodeURIComponent(org.slug)}`} className="text-xs text-amber-300 underline">
|
||||
trial almost out
|
||||
</a>
|
||||
);
|
||||
}
|
||||
|
||||
function StatusLabel({ status }: { status: OrgStatus }) {
|
||||
const cls =
|
||||
status === "running"
|
||||
|
||||
117
canvas/src/components/TermsGate.tsx
Normal file
117
canvas/src/components/TermsGate.tsx
Normal file
@ -0,0 +1,117 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState } from "react";
|
||||
import { PLATFORM_URL } from "@/lib/api";
|
||||
|
||||
// TermsGate blocks the page it wraps until the user has accepted the
|
||||
// current terms version. Fetches /cp/auth/terms-status on mount; if
|
||||
// the server says accepted=false it renders a modal over the children
|
||||
// instead of hiding them entirely — that way the /orgs list is still
|
||||
// visible behind the gate so the user understands what they're
|
||||
// agreeing to touch.
|
||||
//
|
||||
// The server is the source of truth; this component is a UX
|
||||
// convenience. Org-mutating endpoints should (and do) also enforce
|
||||
// ToS via their own DB check so a power-user calling curl can't
|
||||
// bypass the gate.
|
||||
export function TermsGate({ children }: { children: React.ReactNode }) {
|
||||
const [status, setStatus] = useState<"loading" | "accepted" | "pending" | "error">("loading");
|
||||
const [submitting, setSubmitting] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
let cancelled = false;
|
||||
(async () => {
|
||||
try {
|
||||
const res = await fetch(`${PLATFORM_URL}/cp/auth/terms-status`, {
|
||||
credentials: "include",
|
||||
signal: AbortSignal.timeout(10_000),
|
||||
});
|
||||
if (cancelled) return;
|
||||
if (res.status === 401) {
|
||||
// Not signed in — the page this wraps handles redirect to login.
|
||||
// Fall through to "accepted" so we don't double-gate anonymous.
|
||||
setStatus("accepted");
|
||||
return;
|
||||
}
|
||||
if (!res.ok) {
|
||||
setStatus("error");
|
||||
setError(`terms-status: ${res.status}`);
|
||||
return;
|
||||
}
|
||||
const body = (await res.json()) as { accepted?: boolean };
|
||||
setStatus(body.accepted ? "accepted" : "pending");
|
||||
} catch (err) {
|
||||
if (!cancelled) {
|
||||
setStatus("error");
|
||||
setError(err instanceof Error ? err.message : String(err));
|
||||
}
|
||||
}
|
||||
})();
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, []);
|
||||
|
||||
const accept = async () => {
|
||||
setSubmitting(true);
|
||||
setError(null);
|
||||
try {
|
||||
const res = await fetch(`${PLATFORM_URL}/cp/auth/accept-terms`, {
|
||||
method: "POST",
|
||||
credentials: "include",
|
||||
signal: AbortSignal.timeout(10_000),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const text = await res.text();
|
||||
throw new Error(`${res.status}: ${text}`);
|
||||
}
|
||||
setStatus("accepted");
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : String(err));
|
||||
setSubmitting(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
{children}
|
||||
{status === "pending" && (
|
||||
<div className="fixed inset-0 z-50 flex items-center justify-center bg-zinc-950/80 backdrop-blur-sm">
|
||||
<div className="mx-4 max-w-lg rounded-lg border border-zinc-700 bg-zinc-900 p-6 shadow-xl">
|
||||
<h2 className="text-lg font-semibold text-white">Terms & conditions</h2>
|
||||
<p className="mt-3 text-sm text-zinc-300">
|
||||
Before you create an organization, please review our{" "}
|
||||
<a href="/legal/terms" className="text-sky-400 underline" target="_blank" rel="noreferrer">
|
||||
Terms of Service
|
||||
</a>{" "}
|
||||
and{" "}
|
||||
<a href="/legal/privacy" className="text-sky-400 underline" target="_blank" rel="noreferrer">
|
||||
Privacy Policy
|
||||
</a>
|
||||
. Click agree to continue.
|
||||
</p>
|
||||
<p className="mt-3 text-xs text-zinc-500">
|
||||
By agreeing you acknowledge that workspace data is stored in AWS us-east-2 (Ohio, United States).
|
||||
</p>
|
||||
{error && <p className="mt-3 text-sm text-red-400">{error}</p>}
|
||||
<div className="mt-5 flex justify-end gap-2">
|
||||
<button
|
||||
onClick={accept}
|
||||
disabled={submitting}
|
||||
className="rounded bg-emerald-600 px-4 py-2 text-sm font-medium text-white hover:bg-emerald-500 disabled:opacity-50"
|
||||
>
|
||||
{submitting ? "Saving…" : "I agree"}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{status === "error" && (
|
||||
<div className="fixed bottom-4 left-4 right-4 mx-auto max-w-md rounded border border-red-800 bg-red-950 p-3 text-sm text-red-200">
|
||||
Couldn't check terms status: {error ?? "unknown error"}
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
53
canvas/src/lib/__tests__/credits.test.ts
Normal file
53
canvas/src/lib/__tests__/credits.test.ts
Normal file
@ -0,0 +1,53 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { formatCredits, pillTone, bannerKind } from "@/lib/credits";
|
||||
|
||||
describe("formatCredits", () => {
|
||||
it("renders raw numbers under 10k", () => {
|
||||
expect(formatCredits(0)).toBe("0");
|
||||
expect(formatCredits(42)).toBe("42");
|
||||
expect(formatCredits(9999)).toBe("9999");
|
||||
});
|
||||
it("compacts 10k+ with one decimal", () => {
|
||||
expect(formatCredits(12345)).toBe("12.3k");
|
||||
expect(formatCredits(30000)).toBe("30.0k");
|
||||
});
|
||||
});
|
||||
|
||||
describe("pillTone", () => {
|
||||
it("zinc for healthy balance", () => {
|
||||
expect(pillTone({ credits_balance: 5000, plan_monthly_credits: 9000 })).toContain("zinc");
|
||||
});
|
||||
it("amber when under 10% of monthly", () => {
|
||||
expect(pillTone({ credits_balance: 500, plan_monthly_credits: 9000 })).toContain("amber");
|
||||
});
|
||||
it("red at zero or negative", () => {
|
||||
expect(pillTone({ credits_balance: 0, plan_monthly_credits: 9000 })).toContain("red");
|
||||
expect(pillTone({ credits_balance: -1, plan_monthly_credits: 9000 })).toContain("red");
|
||||
});
|
||||
it("trial (monthly=0) is healthy until balance hits zero", () => {
|
||||
// No paid plan → no ratio reference; only "0" means empty.
|
||||
expect(pillTone({ credits_balance: 50, plan_monthly_credits: 0 })).toContain("zinc");
|
||||
expect(pillTone({ credits_balance: 0, plan_monthly_credits: 0 })).toContain("red");
|
||||
});
|
||||
});
|
||||
|
||||
describe("bannerKind", () => {
|
||||
it("overage wins when overage_used > 0", () => {
|
||||
// Even a healthy balance gets "overage" so the banner reminds the
|
||||
// paying customer that extra charges are accruing.
|
||||
expect(bannerKind({ credits_balance: 3000, plan_monthly_credits: 9000, overage_used_credits: 500 }))
|
||||
.toBe("overage");
|
||||
});
|
||||
it("out-of-credits when balance <= 0 and no overage", () => {
|
||||
expect(bannerKind({ credits_balance: 0, plan_monthly_credits: 9000 })).toBe("out-of-credits");
|
||||
});
|
||||
it("trial-tail when plan is free and balance is low", () => {
|
||||
expect(bannerKind({ credits_balance: 50, plan_monthly_credits: 0 })).toBe("trial-tail");
|
||||
});
|
||||
it("none for healthy paid balance", () => {
|
||||
expect(bannerKind({ credits_balance: 8000, plan_monthly_credits: 9000 })).toBe("none");
|
||||
});
|
||||
it("none for a trial that still has plenty of credits", () => {
|
||||
expect(bannerKind({ credits_balance: 400, plan_monthly_credits: 0 })).toBe("none");
|
||||
});
|
||||
});
|
||||
53
canvas/src/lib/credits.ts
Normal file
53
canvas/src/lib/credits.ts
Normal file
@ -0,0 +1,53 @@
|
||||
// credits.ts — small pure helpers for rendering credit state on /orgs.
|
||||
// Kept out of page.tsx so unit tests can exercise the formatting +
|
||||
// banner-kind logic in node (no jsdom) without needing to mount React.
|
||||
|
||||
export type CreditsBannerKind =
|
||||
| "none"
|
||||
| "overage" // paid plan has started burning overage this period
|
||||
| "out-of-credits" // balance 0, not on a paid plan (trial ran out)
|
||||
| "trial-tail"; // balance low but not zero, no paid plan yet
|
||||
|
||||
export interface CreditsFields {
|
||||
credits_balance?: number;
|
||||
plan_monthly_credits?: number;
|
||||
overage_used_credits?: number;
|
||||
}
|
||||
|
||||
// formatCredits renders an int as a compact string. 9999 → "9999",
|
||||
// 12345 → "12.3k". Keeps the balance pill narrow enough to fit on one
|
||||
// line next to the org slug even for the Scale plan's 30k grant.
|
||||
export function formatCredits(n: number): string {
|
||||
if (n < 10_000) return String(n);
|
||||
return `${(n / 1000).toFixed(1)}k`;
|
||||
}
|
||||
|
||||
// pillTone returns the tailwind classnames that color the balance pill.
|
||||
// Empty / exhausted → red; within 10% of zero → amber; else zinc. The
|
||||
// 10% threshold matches the banner trigger — one consistent "low"
|
||||
// signal so the pill and banner agree.
|
||||
export function pillTone(fields: CreditsFields): string {
|
||||
const balance = fields.credits_balance ?? 0;
|
||||
const monthly = fields.plan_monthly_credits ?? 0;
|
||||
if (balance <= 0) return "bg-red-950 text-red-200 border-red-800";
|
||||
const ratio = monthly > 0 ? balance / monthly : 1;
|
||||
if (ratio < 0.1) return "bg-amber-950 text-amber-200 border-amber-800";
|
||||
return "bg-zinc-800 text-zinc-200 border-zinc-700";
|
||||
}
|
||||
|
||||
// bannerKind picks which (if any) banner to show under the balance
|
||||
// pill. Precedence:
|
||||
// 1. overage_used > 0 → "overage" (even if balance is refreshed)
|
||||
// 2. balance <= 0 → "out-of-credits"
|
||||
// 3. trial + low tail → "trial-tail"
|
||||
// 4. otherwise → "none"
|
||||
export function bannerKind(fields: CreditsFields): CreditsBannerKind {
|
||||
const balance = fields.credits_balance ?? 0;
|
||||
const monthly = fields.plan_monthly_credits ?? 0;
|
||||
const overageUsed = fields.overage_used_credits ?? 0;
|
||||
|
||||
if (overageUsed > 0) return "overage";
|
||||
if (balance <= 0) return "out-of-credits";
|
||||
if (monthly === 0 && balance < 100) return "trial-tail";
|
||||
return "none";
|
||||
}
|
||||
@ -7,10 +7,12 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/Molecule-AI/molecule-monorepo/platform/internal/db"
|
||||
"github.com/Molecule-AI/molecule-monorepo/platform/internal/events"
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
@ -56,6 +58,16 @@ func (h *WebhookHandler) GitHub(c *gin.Context) {
|
||||
}
|
||||
|
||||
eventType := c.GetHeader("X-GitHub-Event")
|
||||
|
||||
// Event-driven cron triggers: certain GitHub events fire matching
|
||||
// schedules immediately instead of forwarding to a specific workspace.
|
||||
if triggered, triggerErr := h.handleCronTriggerEvent(c, eventType, rawBody); triggered {
|
||||
if triggerErr != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": triggerErr.Error()})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
deliveryID := c.GetHeader("X-GitHub-Delivery")
|
||||
payloadWorkspaceID, a2aPayload, buildErr := buildGitHubA2APayload(eventType, deliveryID, rawBody)
|
||||
if buildErr != nil {
|
||||
@ -295,3 +307,131 @@ func newGitHubMessagePayload(text string, metadata map[string]interface{}) map[s
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Event-driven cron triggers
|
||||
//
|
||||
// Some GitHub events don't target a specific workspace — instead they should
|
||||
// wake up all engineer work crons immediately so the team reacts to new issues
|
||||
// or PR reviews without waiting for the next 30-minute timer tick.
|
||||
//
|
||||
// Supported events:
|
||||
// - issues (action=opened) → fires schedules with "pick-up-work" in name
|
||||
// - pull_request_review (action=submitted) → fires schedules with "PR review"
|
||||
// or "security review" in name
|
||||
//
|
||||
// Mechanism: UPDATE next_run_at = NOW() on matching enabled schedules. The
|
||||
// scheduler's 30-second poll loop picks them up on the next tick.
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// githubIssuesEvent is the minimal subset of the GitHub "issues" webhook payload.
|
||||
type githubIssuesEvent struct {
|
||||
Action string `json:"action"`
|
||||
Repository githubRepository `json:"repository"`
|
||||
Sender githubSender `json:"sender"`
|
||||
Issue struct {
|
||||
Number int `json:"number"`
|
||||
Title string `json:"title"`
|
||||
HTMLURL string `json:"html_url"`
|
||||
} `json:"issue"`
|
||||
}
|
||||
|
||||
// githubPullRequestReviewEvent is the minimal subset of the GitHub
|
||||
// "pull_request_review" webhook payload.
|
||||
type githubPullRequestReviewEvent struct {
|
||||
Action string `json:"action"`
|
||||
Repository githubRepository `json:"repository"`
|
||||
Sender githubSender `json:"sender"`
|
||||
Review struct {
|
||||
State string `json:"state"` // approved, changes_requested, commented
|
||||
HTMLURL string `json:"html_url"`
|
||||
} `json:"review"`
|
||||
PullRequest struct {
|
||||
Number int `json:"number"`
|
||||
Title string `json:"title"`
|
||||
HTMLURL string `json:"html_url"`
|
||||
} `json:"pull_request"`
|
||||
}
|
||||
|
||||
// handleCronTriggerEvent checks if the GitHub event is one that should trigger
|
||||
// schedules immediately. Returns (true, nil) if it handled the event and wrote
|
||||
// the HTTP response, (true, err) if it handled but errored, or (false, nil) if
|
||||
// the event is not a cron-trigger type and should fall through to A2A forwarding.
|
||||
func (h *WebhookHandler) handleCronTriggerEvent(c *gin.Context, eventType string, rawBody []byte) (bool, error) {
|
||||
ctx := c.Request.Context()
|
||||
|
||||
switch eventType {
|
||||
case "issues":
|
||||
var payload githubIssuesEvent
|
||||
if err := json.Unmarshal(rawBody, &payload); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid issues payload"})
|
||||
return true, nil
|
||||
}
|
||||
if payload.Action != "opened" {
|
||||
c.JSON(http.StatusAccepted, gin.H{"status": "ignored", "reason": "only issues action=opened triggers crons"})
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// Fire all enabled schedules whose name contains "pick-up-work" (case-insensitive).
|
||||
result, err := db.DB.ExecContext(ctx, `
|
||||
UPDATE workspace_schedules
|
||||
SET next_run_at = now(), updated_at = now()
|
||||
WHERE enabled = true
|
||||
AND next_run_at IS NOT NULL
|
||||
AND LOWER(name) LIKE '%pick-up-work%'
|
||||
`)
|
||||
if err != nil {
|
||||
log.Printf("Webhook: cron trigger (issues/opened) DB error: %v", err)
|
||||
return true, fmt.Errorf("failed to trigger schedules: %w", err)
|
||||
}
|
||||
affected, _ := result.RowsAffected()
|
||||
log.Printf("Webhook: issues/opened in %s #%d by %s — triggered %d pick-up-work schedule(s)",
|
||||
payload.Repository.FullName, payload.Issue.Number, payload.Sender.Login, affected)
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"status": "triggered",
|
||||
"event": "issues",
|
||||
"action": "opened",
|
||||
"schedules_affected": affected,
|
||||
})
|
||||
return true, nil
|
||||
|
||||
case "pull_request_review":
|
||||
var payload githubPullRequestReviewEvent
|
||||
if err := json.Unmarshal(rawBody, &payload); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid pull_request_review payload"})
|
||||
return true, nil
|
||||
}
|
||||
if payload.Action != "submitted" {
|
||||
c.JSON(http.StatusAccepted, gin.H{"status": "ignored", "reason": "only pull_request_review action=submitted triggers crons"})
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// Fire all enabled schedules whose name contains "PR review" or "security review" (case-insensitive).
|
||||
result, err := db.DB.ExecContext(ctx, `
|
||||
UPDATE workspace_schedules
|
||||
SET next_run_at = now(), updated_at = now()
|
||||
WHERE enabled = true
|
||||
AND next_run_at IS NOT NULL
|
||||
AND (LOWER(name) LIKE '%pr review%' OR LOWER(name) LIKE '%security review%')
|
||||
`)
|
||||
if err != nil {
|
||||
log.Printf("Webhook: cron trigger (pull_request_review/submitted) DB error: %v", err)
|
||||
return true, fmt.Errorf("failed to trigger schedules: %w", err)
|
||||
}
|
||||
affected, _ := result.RowsAffected()
|
||||
log.Printf("Webhook: pull_request_review/submitted in %s PR #%d by %s (state=%s) — triggered %d review schedule(s)",
|
||||
payload.Repository.FullName, payload.PullRequest.Number, payload.Sender.Login, payload.Review.State, affected)
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"status": "triggered",
|
||||
"event": "pull_request_review",
|
||||
"action": "submitted",
|
||||
"schedules_affected": affected,
|
||||
})
|
||||
return true, nil
|
||||
|
||||
default:
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
@ -8,6 +8,7 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@ -206,3 +207,147 @@ func TestGitHubWebhook_ValidPRReviewComment_Forwards(t *testing.T) {
|
||||
t.Fatalf("unmet sqlmock expectations: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Event-driven cron trigger tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func TestGitHubWebhook_IssuesOpened_TriggersCrons(t *testing.T) {
|
||||
mock := setupTestDB(t)
|
||||
setupTestRedis(t)
|
||||
broadcaster := newTestBroadcaster()
|
||||
handler := NewWebhookHandler(broadcaster)
|
||||
|
||||
secret := "test-secret"
|
||||
t.Setenv("GITHUB_WEBHOOK_SECRET", secret)
|
||||
|
||||
body := []byte(`{
|
||||
"action": "opened",
|
||||
"repository": {"full_name": "Molecule-AI/molecule-core"},
|
||||
"sender": {"login": "alice"},
|
||||
"issue": {"number": 42, "title": "New feature request", "html_url": "https://github.com/Molecule-AI/molecule-core/issues/42"}
|
||||
}`)
|
||||
|
||||
// Expect the UPDATE that sets next_run_at = now() on pick-up-work schedules.
|
||||
mock.ExpectExec("UPDATE workspace_schedules").
|
||||
WillReturnResult(sqlmock.NewResult(0, 3))
|
||||
|
||||
w, c := newWebhookTestContext(t, "", body)
|
||||
c.Request.Header.Set("X-GitHub-Event", "issues")
|
||||
c.Request.Header.Set("X-Hub-Signature-256", githubSignature(secret, body))
|
||||
|
||||
handler.GitHub(c)
|
||||
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("expected status 200, got %d: %s", w.Code, w.Body.String())
|
||||
}
|
||||
|
||||
// Verify response includes trigger metadata.
|
||||
respBody := w.Body.String()
|
||||
if !strings.Contains(respBody, `"triggered"`) {
|
||||
t.Fatalf("expected 'triggered' in response, got: %s", respBody)
|
||||
}
|
||||
if !strings.Contains(respBody, `"schedules_affected"`) {
|
||||
t.Fatalf("expected 'schedules_affected' in response, got: %s", respBody)
|
||||
}
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Fatalf("unmet sqlmock expectations: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGitHubWebhook_IssuesClosed_Ignored(t *testing.T) {
|
||||
setupTestDB(t)
|
||||
setupTestRedis(t)
|
||||
broadcaster := newTestBroadcaster()
|
||||
handler := NewWebhookHandler(broadcaster)
|
||||
|
||||
secret := "test-secret"
|
||||
t.Setenv("GITHUB_WEBHOOK_SECRET", secret)
|
||||
|
||||
body := []byte(`{
|
||||
"action": "closed",
|
||||
"repository": {"full_name": "Molecule-AI/molecule-core"},
|
||||
"sender": {"login": "alice"},
|
||||
"issue": {"number": 42, "title": "Old issue", "html_url": "https://github.com/Molecule-AI/molecule-core/issues/42"}
|
||||
}`)
|
||||
|
||||
w, c := newWebhookTestContext(t, "", body)
|
||||
c.Request.Header.Set("X-GitHub-Event", "issues")
|
||||
c.Request.Header.Set("X-Hub-Signature-256", githubSignature(secret, body))
|
||||
|
||||
handler.GitHub(c)
|
||||
|
||||
if w.Code != http.StatusAccepted {
|
||||
t.Fatalf("expected status 202, got %d: %s", w.Code, w.Body.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestGitHubWebhook_PRReviewSubmitted_TriggersCrons(t *testing.T) {
|
||||
mock := setupTestDB(t)
|
||||
setupTestRedis(t)
|
||||
broadcaster := newTestBroadcaster()
|
||||
handler := NewWebhookHandler(broadcaster)
|
||||
|
||||
secret := "test-secret"
|
||||
t.Setenv("GITHUB_WEBHOOK_SECRET", secret)
|
||||
|
||||
body := []byte(`{
|
||||
"action": "submitted",
|
||||
"repository": {"full_name": "Molecule-AI/molecule-core"},
|
||||
"sender": {"login": "bob"},
|
||||
"review": {"state": "changes_requested", "html_url": "https://github.com/Molecule-AI/molecule-core/pull/7#pullrequestreview-1"},
|
||||
"pull_request": {"number": 7, "title": "Fix scheduler bug", "html_url": "https://github.com/Molecule-AI/molecule-core/pull/7"}
|
||||
}`)
|
||||
|
||||
// Expect the UPDATE that sets next_run_at = now() on review schedules.
|
||||
mock.ExpectExec("UPDATE workspace_schedules").
|
||||
WillReturnResult(sqlmock.NewResult(0, 2))
|
||||
|
||||
w, c := newWebhookTestContext(t, "", body)
|
||||
c.Request.Header.Set("X-GitHub-Event", "pull_request_review")
|
||||
c.Request.Header.Set("X-Hub-Signature-256", githubSignature(secret, body))
|
||||
|
||||
handler.GitHub(c)
|
||||
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("expected status 200, got %d: %s", w.Code, w.Body.String())
|
||||
}
|
||||
|
||||
respBody := w.Body.String()
|
||||
if !strings.Contains(respBody, `"triggered"`) {
|
||||
t.Fatalf("expected 'triggered' in response, got: %s", respBody)
|
||||
}
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Fatalf("unmet sqlmock expectations: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGitHubWebhook_PRReviewDismissed_Ignored(t *testing.T) {
|
||||
setupTestDB(t)
|
||||
setupTestRedis(t)
|
||||
broadcaster := newTestBroadcaster()
|
||||
handler := NewWebhookHandler(broadcaster)
|
||||
|
||||
secret := "test-secret"
|
||||
t.Setenv("GITHUB_WEBHOOK_SECRET", secret)
|
||||
|
||||
body := []byte(`{
|
||||
"action": "dismissed",
|
||||
"repository": {"full_name": "Molecule-AI/molecule-core"},
|
||||
"sender": {"login": "bob"},
|
||||
"review": {"state": "dismissed", "html_url": "https://github.com/Molecule-AI/molecule-core/pull/7#pullrequestreview-1"},
|
||||
"pull_request": {"number": 7, "title": "Fix scheduler bug", "html_url": "https://github.com/Molecule-AI/molecule-core/pull/7"}
|
||||
}`)
|
||||
|
||||
w, c := newWebhookTestContext(t, "", body)
|
||||
c.Request.Header.Set("X-GitHub-Event", "pull_request_review")
|
||||
c.Request.Header.Set("X-Hub-Signature-256", githubSignature(secret, body))
|
||||
|
||||
handler.GitHub(c)
|
||||
|
||||
if w.Code != http.StatusAccepted {
|
||||
t.Fatalf("expected status 202, got %d: %s", w.Code, w.Body.String())
|
||||
}
|
||||
}
|
||||
|
||||
@ -45,6 +45,7 @@ from executor_helpers import (
|
||||
CONFIG_MOUNT,
|
||||
MEMORY_CONTENT_MAX_CHARS,
|
||||
WORKSPACE_MOUNT,
|
||||
auto_push_hook,
|
||||
brief_summary,
|
||||
commit_memory,
|
||||
extract_message_text,
|
||||
@ -473,6 +474,8 @@ class ClaudeSDKExecutor(AgentExecutor):
|
||||
await commit_memory(
|
||||
f"Conversation: {original_input[:MEMORY_CONTENT_MAX_CHARS]}"
|
||||
)
|
||||
# Auto-push unpushed commits and open PR (non-blocking, best-effort).
|
||||
await auto_push_hook()
|
||||
|
||||
return response_text or _NO_RESPONSE_MSG
|
||||
|
||||
|
||||
@ -14,10 +14,12 @@ Provides:
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@ -390,3 +392,156 @@ def sanitize_agent_error(
|
||||
else:
|
||||
tag = "unknown"
|
||||
return f"Agent error ({tag}) — see workspace logs for details."
|
||||
|
||||
|
||||
# ========================================================================
|
||||
# Auto-push hook — push unpushed commits and open PR after task completion
|
||||
# ========================================================================
|
||||
|
||||
# Git/gh wrappers at /usr/local/bin have GH_TOKEN baked in.
|
||||
_GIT = "/usr/local/bin/git"
|
||||
_GH = "/usr/local/bin/gh"
|
||||
_PROTECTED_BRANCHES = frozenset({"staging", "main", "master"})
|
||||
|
||||
|
||||
def _run_git(args: list[str], cwd: str, timeout: int = 30) -> subprocess.CompletedProcess:
|
||||
"""Run a git/gh command with bounded timeout. Never raises on failure."""
|
||||
return subprocess.run(
|
||||
args,
|
||||
cwd=cwd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
|
||||
def _auto_push_and_pr_sync(cwd: str) -> None:
|
||||
"""Synchronous implementation of the auto-push hook.
|
||||
|
||||
1. Check if we're in a git repo with unpushed commits on a feature branch.
|
||||
2. Push the branch.
|
||||
3. Open a PR against staging if one doesn't already exist.
|
||||
|
||||
Designed to be called from a background thread — never raises, logs all
|
||||
errors. Uses the git/gh wrappers at /usr/local/bin/ which have GH_TOKEN
|
||||
baked in.
|
||||
"""
|
||||
try:
|
||||
# --- Guard: is this a git repo? ---
|
||||
probe = _run_git([_GIT, "rev-parse", "--is-inside-work-tree"], cwd)
|
||||
if probe.returncode != 0:
|
||||
return
|
||||
|
||||
# --- Guard: get current branch ---
|
||||
branch_result = _run_git(
|
||||
[_GIT, "rev-parse", "--abbrev-ref", "HEAD"], cwd
|
||||
)
|
||||
if branch_result.returncode != 0:
|
||||
return
|
||||
branch = branch_result.stdout.strip()
|
||||
if not branch or branch in _PROTECTED_BRANCHES or branch == "HEAD":
|
||||
return
|
||||
|
||||
# --- Guard: any unpushed commits? ---
|
||||
log_result = _run_git(
|
||||
[_GIT, "log", "origin/staging..HEAD", "--oneline"], cwd
|
||||
)
|
||||
if log_result.returncode != 0 or not log_result.stdout.strip():
|
||||
# No unpushed commits (or origin/staging doesn't exist).
|
||||
return
|
||||
|
||||
unpushed_lines = log_result.stdout.strip().splitlines()
|
||||
logger.info(
|
||||
"auto-push: %d unpushed commit(s) on branch '%s', pushing...",
|
||||
len(unpushed_lines),
|
||||
branch,
|
||||
)
|
||||
|
||||
# --- Push ---
|
||||
push_result = _run_git(
|
||||
[_GIT, "push", "origin", branch], cwd, timeout=60
|
||||
)
|
||||
if push_result.returncode != 0:
|
||||
logger.warning(
|
||||
"auto-push: git push failed (exit %d): %s",
|
||||
push_result.returncode,
|
||||
(push_result.stderr or push_result.stdout)[:500],
|
||||
)
|
||||
return
|
||||
|
||||
logger.info("auto-push: pushed branch '%s' successfully", branch)
|
||||
|
||||
# --- Check if PR already exists ---
|
||||
pr_list = _run_git(
|
||||
[_GH, "pr", "list", "--head", branch, "--json", "number"], cwd
|
||||
)
|
||||
if pr_list.returncode != 0:
|
||||
logger.warning(
|
||||
"auto-push: gh pr list failed (exit %d): %s",
|
||||
pr_list.returncode,
|
||||
(pr_list.stderr or pr_list.stdout)[:500],
|
||||
)
|
||||
return
|
||||
|
||||
existing_prs = json.loads(pr_list.stdout.strip() or "[]")
|
||||
if existing_prs:
|
||||
logger.info(
|
||||
"auto-push: PR already exists for branch '%s' (#%s), skipping create",
|
||||
branch,
|
||||
existing_prs[0].get("number", "?"),
|
||||
)
|
||||
return
|
||||
|
||||
# --- Get first commit message for PR title ---
|
||||
first_commit = _run_git(
|
||||
[_GIT, "log", "origin/staging..HEAD", "--reverse",
|
||||
"--format=%s", "-1"],
|
||||
cwd,
|
||||
)
|
||||
pr_title = first_commit.stdout.strip() if first_commit.returncode == 0 else branch
|
||||
# Truncate to 256 chars (GitHub limit)
|
||||
if len(pr_title) > 256:
|
||||
pr_title = pr_title[:253] + "..."
|
||||
|
||||
# --- Create PR ---
|
||||
pr_create = _run_git(
|
||||
[
|
||||
_GH, "pr", "create",
|
||||
"--base", "staging",
|
||||
"--title", pr_title,
|
||||
"--body", "Auto-created by workspace agent",
|
||||
],
|
||||
cwd,
|
||||
timeout=60,
|
||||
)
|
||||
if pr_create.returncode != 0:
|
||||
logger.warning(
|
||||
"auto-push: gh pr create failed (exit %d): %s",
|
||||
pr_create.returncode,
|
||||
(pr_create.stderr or pr_create.stdout)[:500],
|
||||
)
|
||||
else:
|
||||
pr_url = pr_create.stdout.strip()
|
||||
logger.info("auto-push: created PR %s", pr_url)
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.warning("auto-push: command timed out, skipping")
|
||||
except Exception:
|
||||
logger.exception("auto-push: unexpected error (non-fatal)")
|
||||
|
||||
|
||||
async def auto_push_hook(cwd: str | None = None) -> None:
|
||||
"""Post-execution hook: push unpushed commits and open a PR.
|
||||
|
||||
Runs the git/gh subprocess work in a background thread via
|
||||
asyncio.to_thread so it never blocks the agent's event loop.
|
||||
Catches all exceptions — the agent must never crash due to this hook.
|
||||
"""
|
||||
if cwd is None:
|
||||
cwd = WORKSPACE_MOUNT
|
||||
if not os.path.isdir(cwd):
|
||||
return
|
||||
try:
|
||||
await asyncio.to_thread(_auto_push_and_pr_sync, cwd)
|
||||
except Exception:
|
||||
logger.exception("auto_push_hook: failed (non-fatal)")
|
||||
|
||||
Loading…
Reference in New Issue
Block a user