chore: promote 5 staging-only feature PRs to main (Phase 3 of internal#81)
Some checks failed
Retarget main PRs to staging / Retarget to staging (pull_request) Has been skipped
CodeQL / Analyze (${{ matrix.language }}) (go) (pull_request) Successful in 3s
CodeQL / Analyze (${{ matrix.language }}) (javascript-typescript) (pull_request) Successful in 6s
CodeQL / Analyze (${{ matrix.language }}) (python) (pull_request) Successful in 7s
Block internal-flavored paths / Block forbidden paths (pull_request) Successful in 9s
pr-guards / disable-auto-merge-on-push (pull_request) Successful in 4s
CI / Detect changes (pull_request) Successful in 11s
E2E API Smoke Test / detect-changes (pull_request) Successful in 13s
E2E Staging Canvas (Playwright) / detect-changes (pull_request) Successful in 13s
Handlers Postgres Integration / detect-changes (pull_request) Successful in 14s
Harness Replays / detect-changes (pull_request) Successful in 13s
Runtime PR-Built Compatibility / detect-changes (pull_request) Successful in 11s
Secret scan / Scan diff for credential-shaped strings (pull_request) Successful in 11s
CI / Shellcheck (E2E scripts) (pull_request) Successful in 5s
CI / Canvas (Next.js) (pull_request) Successful in 9s
CI / Python Lint & Test (pull_request) Successful in 7s
Runtime PR-Built Compatibility / PR-built wheel + import smoke (pull_request) Successful in 5s
CI / Canvas Deploy Reminder (pull_request) Has been skipped
E2E Staging Canvas (Playwright) / Canvas tabs E2E (pull_request) Successful in 8s
Harness Replays / Harness Replays (pull_request) Failing after 57s
E2E API Smoke Test / E2E API Smoke Test (pull_request) Successful in 3m8s
Handlers Postgres Integration / Handlers Postgres Integration (pull_request) Successful in 3m10s
CI / Platform (Go) (pull_request) Successful in 4m36s

This was supposed to fast-forward when each PR merged on staging,
but auto-promote-staging.yml has not been firing reliably on Gitea
since the GitHub suspension. Result: main is missing 5 substantive
feature PRs that landed on staging between 2026-04-29 and 2026-05-07:

  - #102: test(org-include) symlink-based subtree composition contract
  - #103: test(local-e2e) dev-department extraction end-to-end
  - #104: fix(provisioner)+test EvalSymlinks templatePath; stage-2 e2e
  - #105: feat(org-import) !external cross-repo subtree resolver (#222)
  - #106: test(org-external) integration + e2e for !external resolver

Each PR was independently reviewed and CI-green at staging-merge time;
this commit promotes the merged state atomically. Use git log on main
after the merge to see the original PR-merge commits preserved.

Sister work: Phase 3 of internal#81 (trunk-based migration). Workflow
trigger updates land in a follow-up PR; staging-branch deletion happens
after a no-op verification deploy.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
claude-ceo-assistant 2026-05-08 13:07:22 +00:00
commit 2597511d7b
7 changed files with 1681 additions and 0 deletions

View File

@ -0,0 +1,375 @@
package handlers
import (
"archive/tar"
"bytes"
"net"
"os"
"os/exec"
"path/filepath"
"strings"
"testing"
"time"
"gopkg.in/yaml.v3"
)
// Local E2E for the dev-department extraction (RFC internal#77).
//
// Pre-conditions: both repos cloned as siblings under
// /tmp/local-e2e-deploy/{molecule-dev, molecule-dev-department}.
// (Set up by the orchestrator before running this test.)
//
// What this proves end-to-end through real platform code:
// 1. resolveYAMLIncludes follows the dev-lead symlink at the parent's
// template root and pulls in the dev-department subtree.
// 2. Recursive !include's inside the symlinked subtree resolve
// correctly via the chain dev-lead/workspace.yaml →
// ./core-lead/workspace.yaml → ./core-be/workspace.yaml etc.
// 3. The resolved YAML unmarshals into a complete OrgTemplate with the
// expected count of workspaces (parent's PM+Marketing+Research +
// dev-department's atomized 28 workspaces).
//
// Skipped if the local-e2e-deploy fixture isn't present — won't block
// CI on hosts that haven't set it up.
func TestLocalE2E_DevDepartmentExtraction(t *testing.T) {
parent := "/tmp/local-e2e-deploy/molecule-dev"
if _, err := os.Stat(filepath.Join(parent, "org.yaml")); err != nil {
t.Skipf("local-e2e fixture not present at %s: %v", parent, err)
}
orgYAML, err := os.ReadFile(filepath.Join(parent, "org.yaml"))
if err != nil {
t.Fatalf("read org.yaml: %v", err)
}
expanded, err := resolveYAMLIncludes(orgYAML, parent)
if err != nil {
t.Fatalf("resolveYAMLIncludes failed: %v", err)
}
var tmpl OrgTemplate
if err := yaml.Unmarshal(expanded, &tmpl); err != nil {
t.Fatalf("unmarshal expanded OrgTemplate: %v", err)
}
// Walk the full workspace tree, collect names.
names := []string{}
var walk func([]OrgWorkspace)
walk = func(ws []OrgWorkspace) {
for _, w := range ws {
names = append(names, w.Name)
walk(w.Children)
}
}
walk(tmpl.Workspaces)
t.Logf("org name: %q", tmpl.Name)
t.Logf("total workspaces (recursive): %d", len(names))
for _, n := range names {
t.Logf(" - %q", n)
}
// Expected: PM + Marketing Lead + Dev Lead at top level, plus the
// full sub-trees under each. After atomization, we expect:
// - PM tree: PM + Research Lead + 3 research roles = 5
// - Marketing tree: Marketing Lead + 5 marketing roles = 6
// - Dev Lead tree: Dev Lead + (5 sub-team leads × ~6 each) +
// 3 floaters + Triage Operator = ~32
// Roughly ~43 total. Be liberal; just assert a floor.
if len(names) < 30 {
t.Errorf("workspace count too low (%d) — expected ~40+ (PM+Marketing+Dev tree)", len(names))
}
// Specific sentinel names we expect to find:
expected := []string{
"PM",
"Marketing Lead",
"Dev Lead",
"Core Platform Lead",
"Controlplane Lead",
"App & Docs Lead",
"Infra Lead",
"SDK Lead",
"Documentation Specialist", // Q1 — should be under app-lead
"Triage Operator", // Q2 — should be under dev-lead
}
found := map[string]bool{}
for _, n := range names {
found[n] = true
}
for _, want := range expected {
if !found[want] {
t.Errorf("missing expected workspace %q", want)
}
}
}
// Stage-2 of the local e2e: prove every resolved workspace's `files_dir`
// path actually consumes correctly through the rest of the import chain.
// resolveYAMLIncludes returning a populated OrgTemplate is necessary but
// not sufficient — `POST /org/import` then does:
//
// 1. resolveInsideRoot(orgBaseDir, ws.FilesDir) → must return a path
// that exists and stat-resolves to a directory (org_import.go:313-317).
// 2. CopyTemplateToContainer(ctx, containerID, templatePath) → walks
// the dir with filepath.Walk and tars its contents into the
// workspace's /configs/ mount (provisioner.go:766-820).
//
// This stage-2 test exercises both #1 and #2 against every workspace in
// the resolved tree, mimicking what the platform does post-include-
// resolution. Catches: files_dir paths that don't resolve through the
// symlink, paths that exist but are empty (silently produces empty
// /configs/), or filepath.Walk failing to descend through cross-repo
// symlink boundaries.
func TestLocalE2E_FilesDirConsumption(t *testing.T) {
parent := "/tmp/local-e2e-deploy/molecule-dev"
if _, err := os.Stat(filepath.Join(parent, "org.yaml")); err != nil {
t.Skipf("local-e2e fixture not present at %s: %v", parent, err)
}
orgYAML, err := os.ReadFile(filepath.Join(parent, "org.yaml"))
if err != nil {
t.Fatalf("read org.yaml: %v", err)
}
expanded, err := resolveYAMLIncludes(orgYAML, parent)
if err != nil {
t.Fatalf("resolveYAMLIncludes: %v", err)
}
var tmpl OrgTemplate
if err := yaml.Unmarshal(expanded, &tmpl); err != nil {
t.Fatalf("unmarshal: %v", err)
}
// Flatten every workspace — including children, grandchildren, etc.
flat := []OrgWorkspace{}
var walk func([]OrgWorkspace)
walk = func(ws []OrgWorkspace) {
for _, w := range ws {
flat = append(flat, w)
walk(w.Children)
}
}
walk(tmpl.Workspaces)
checked := 0
for _, w := range flat {
if w.FilesDir == "" {
continue // workspace declared inline (no files_dir) — skip
}
checked++
t.Run(w.Name+"/"+w.FilesDir, func(t *testing.T) {
// Step 1: resolveInsideRoot returns a path that's-inside-root.
abs, err := resolveInsideRoot(parent, w.FilesDir)
if err != nil {
t.Fatalf("resolveInsideRoot(%q, %q): %v", parent, w.FilesDir, err)
}
info, err := os.Stat(abs)
if err != nil {
t.Fatalf("stat %q (resolved from files_dir %q): %v", abs, w.FilesDir, err)
}
if !info.IsDir() {
t.Fatalf("files_dir %q resolved to %q which is not a directory", w.FilesDir, abs)
}
// Step 2: walk the dir like CopyTemplateToContainer does.
// Mirror the platform's symlink-resolution at the root —
// filepath.Walk doesn't descend into a symlink leaf, so
// CopyTemplateToContainer (provisioner.go) calls
// EvalSymlinks on templatePath first. Replicate exactly.
if resolved, err := filepath.EvalSymlinks(abs); err == nil {
abs = resolved
}
var buf bytes.Buffer
tw := tar.NewWriter(&buf)
fileCount := 0
fileNames := []string{}
err = filepath.Walk(abs, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
rel, err := filepath.Rel(abs, path)
if err != nil {
return err
}
if rel == "." {
return nil
}
header, _ := tar.FileInfoHeader(info, "")
header.Name = rel
if err := tw.WriteHeader(header); err != nil {
return err
}
if !info.IsDir() {
fileCount++
fileNames = append(fileNames, rel)
data, err := os.ReadFile(path)
if err != nil {
return err
}
header.Size = int64(len(data))
tw.Write(data)
}
return nil
})
if err != nil {
t.Fatalf("filepath.Walk %q (mimics CopyTemplateToContainer): %v", abs, err)
}
tw.Close()
if fileCount == 0 {
t.Errorf("files_dir %q at %q is empty — CopyTemplateToContainer would produce empty /configs/",
w.FilesDir, abs)
}
// Sanity: every workspace folder should have AT LEAST one of
// {workspace.yaml, system-prompt.md, initial-prompt.md} —
// these are the markers a workspace folder is recognizable
// as a workspace (mirrors validator's WORKSPACE_FOLDER_MARKERS).
markers := []string{"workspace.yaml", "system-prompt.md", "initial-prompt.md"}
hasMarker := false
for _, name := range fileNames {
for _, m := range markers {
if name == m || strings.HasSuffix(name, "/"+m) {
hasMarker = true
break
}
}
if hasMarker {
break
}
}
if !hasMarker {
t.Errorf("files_dir %q at %q has %d files but none of the workspace markers %v — found: %v",
w.FilesDir, abs, fileCount, markers, fileNames)
}
})
}
t.Logf("checked %d workspaces with files_dir", checked)
if checked < 25 {
t.Errorf("expected ~28 workspaces with files_dir (post-atomization); only saw %d", checked)
}
}
// PR-C from the Phase 3a phasing (task #234): real-Gitea e2e for the
// !external resolver against the LIVE molecule-ai/molecule-dev-department
// repo. Verifies the production gitFetcher fetches the dev tree and the
// resolver grafts it correctly into a parent template that has NO
// symlink — composition is purely platform-side.
//
// Skipped if Gitea isn't reachable (offline / firewall / CI without
// network). Requires `git` binary on PATH.
func TestLocalE2E_ExternalDevDepartment(t *testing.T) {
if _, err := exec.LookPath("git"); err != nil {
t.Skipf("git binary not found: %v", err)
}
// Skip if Gitea host isn't reachable (TCP probe). Avoids network-
// dependent tests failing on offline runners.
conn, err := net.DialTimeout("tcp", "git.moleculesai.app:443", 3*time.Second)
if err != nil {
t.Skipf("git.moleculesai.app:443 unreachable: %v", err)
}
conn.Close()
// Build a minimal parent template inline — no need for the
// /tmp/local-e2e-deploy/ symlinked fixture. The whole point of
// !external is that the parent template is self-contained;
// composition resolves over the network at import time.
parent := t.TempDir()
orgYAML := []byte(`name: External-Only Test Parent
description: Parent template that pulls the entire dev tree via !external.
defaults:
runtime: claude-code
tier: 2
workspaces:
- !external
repo: molecule-ai/molecule-dev-department
ref: main
path: dev-lead/workspace.yaml
`)
if err := os.WriteFile(filepath.Join(parent, "org.yaml"), orgYAML, 0o644); err != nil {
t.Fatalf("write org.yaml: %v", err)
}
out, err := resolveYAMLIncludes(orgYAML, parent)
if err != nil {
t.Fatalf("resolveYAMLIncludes (!external against live Gitea): %v", err)
}
var tmpl OrgTemplate
if err := yaml.Unmarshal(out, &tmpl); err != nil {
t.Fatalf("unmarshal: %v", err)
}
// Walk the workspace tree, collect names + check files_dir paths.
flat := []OrgWorkspace{}
var walk func([]OrgWorkspace)
walk = func(ws []OrgWorkspace) {
for _, w := range ws {
flat = append(flat, w)
walk(w.Children)
}
}
walk(tmpl.Workspaces)
t.Logf("workspaces resolved through !external: %d", len(flat))
if len(flat) < 25 {
t.Errorf("expected ~28 dev-tree workspaces via !external; got %d", len(flat))
}
// Sentinel checks — same as TestLocalE2E_DevDepartmentExtraction
// (Q1+Q2 placements verified).
expected := []string{
"Dev Lead",
"Core Platform Lead",
"Controlplane Lead",
"App & Docs Lead",
"Documentation Specialist", // Q1
"Triage Operator", // Q2
}
found := map[string]bool{}
for _, w := range flat {
found[w.Name] = true
}
for _, want := range expected {
if !found[want] {
t.Errorf("missing expected workspace %q", want)
}
}
// Every workspace's files_dir must be cache-prefixed (proves the
// path-rewrite ran end-to-end).
cachePrefix := ".external-cache"
for _, w := range flat {
if w.FilesDir == "" {
continue
}
if !strings.HasPrefix(w.FilesDir, cachePrefix) {
t.Errorf("workspace %q files_dir %q missing cache prefix %q", w.Name, w.FilesDir, cachePrefix)
}
}
// Verify the fetched cache exists and resolveInsideRoot accepts
// every workspace's files_dir (would cause provisioning to fail
// if not).
for _, w := range flat {
if w.FilesDir == "" {
continue
}
abs, err := resolveInsideRoot(parent, w.FilesDir)
if err != nil {
t.Errorf("workspace %q files_dir %q: resolveInsideRoot: %v", w.Name, w.FilesDir, err)
continue
}
info, err := os.Stat(abs)
if err != nil {
t.Errorf("workspace %q: stat %q: %v", w.Name, abs, err)
continue
}
if !info.IsDir() {
t.Errorf("workspace %q files_dir %q is not a directory", w.Name, w.FilesDir)
}
}
}

View File

@ -0,0 +1,439 @@
package handlers
import (
"context"
"fmt"
"net/url"
"os"
"os/exec"
"path/filepath"
"regexp"
"strings"
"time"
"gopkg.in/yaml.v3"
)
// External-ref resolver — gitops-style cross-repo subtree composition.
// Internal#77 RFC, Phase 3a (task #222). Prior art: Helm subcharts +
// dependency cache, Kustomize remote bases, Terraform module sources.
//
// Schema (a `!external`-tagged mapping anywhere a workspace entry is
// allowed — workspaces:, roots:, children:):
//
// - !external
// repo: molecule-ai/molecule-dev-department
// ref: main
// path: dev-lead/workspace.yaml
//
// At resolve time, the platform fetches the repo at ref into a content-
// addressable cache under <rootDir>/.external-cache/<repo>/<sha>/, loads
// the yaml at <cacheDir>/<path>, rewrites every files_dir + relative
// !include path to be cache-prefixed, then grafts the result in place of
// the !external node. Downstream pipeline (resolveInsideRoot, plugin
// merge, CopyTemplateToContainer) sees ordinary in-tree paths.
// ExternalRef is the deserialized form of an `!external`-tagged mapping.
type ExternalRef struct {
Repo string `yaml:"repo"`
Ref string `yaml:"ref"`
Path string `yaml:"path"`
// URL overrides the default Gitea host. Optional; defaults to
// MOLECULE_EXTERNAL_GITEA_URL env or git.moleculesai.app.
URL string `yaml:"url,omitempty"`
}
const (
// maxExternalDepth caps recursion through nested `!external`s. Lower
// than maxIncludeDepth (16) because each level may issue a network
// fetch. Composition that genuinely needs >4 layers is a smell.
maxExternalDepth = 4
// externalCacheDirName is the per-template cache subdir under rootDir.
// Content-addressable: keyed by (repo, sha). Operators add this to
// .gitignore — cache is platform-mutated, not source-tracked.
externalCacheDirName = ".external-cache"
// gitFetchTimeout caps a single clone operation. Conservative —
// org template fetches are typically <100KB.
gitFetchTimeout = 60 * time.Second
)
// safeRefPattern restricts `ref` values to characters git itself accepts
// for branch / tag / SHA. Belt-and-braces over git's own validation.
var safeRefPattern = regexp.MustCompile(`^[a-zA-Z0-9_./-]+$`)
// allowlistedHostPath returns true if `<host>/<repo>` matches the
// configured allowlist. Default allowlist: git.moleculesai.app/molecule-ai/.
// Override via MOLECULE_EXTERNAL_REPO_ALLOWLIST env var (comma-separated
// patterns). Patterns are matched as prefixes (with trailing-slash
// semantics) or as exact matches. Trailing /* is treated as "any
// descendants of this prefix".
//
// Examples:
// - "git.moleculesai.app/molecule-ai/" → matches molecule-ai/* (any repo)
// - "git.moleculesai.app/molecule-ai/*" → same; trailing /* normalized to /
// - "git.moleculesai.app/molecule-ai/molecule-dev-department" → exact
// - "git.moleculesai.app/" → matches everything on that host
func allowlistedHostPath(host, repoPath string) bool {
allow := os.Getenv("MOLECULE_EXTERNAL_REPO_ALLOWLIST")
if allow == "" {
allow = "git.moleculesai.app/molecule-ai/"
}
hp := host + "/" + repoPath
for _, pat := range strings.Split(allow, ",") {
pat = strings.TrimSpace(pat)
if pat == "" {
continue
}
// Normalize trailing /* → /
pat = strings.TrimSuffix(pat, "*")
if pat == hp {
return true
}
if strings.HasSuffix(pat, "/") && strings.HasPrefix(hp+"/", pat) {
return true
}
}
return false
}
// externalFetcher abstracts the git-clone-into-cache step. Production
// uses gitFetcher (shells out to git); tests inject a fake that
// pre-stages content in a temp dir.
type externalFetcher interface {
// Fetch ensures rootDir/.external-cache/<safe-repo>/<sha>/ contains
// the repo content at the given ref. Returns the absolute cache
// dir + the resolved SHA. Cache hit = no network. Cache miss =
// clone.
Fetch(ctx context.Context, rootDir, host, repoPath, ref string) (cacheDir, sha string, err error)
}
// defaultExternalFetcher is the package-level fetcher injection point.
// Production code uses the git-shell fetcher; tests override via
// SetExternalFetcherForTest.
var defaultExternalFetcher externalFetcher = &gitFetcher{}
// SetExternalFetcherForTest swaps the fetcher for testing. Returns a
// cleanup func that restores the previous fetcher.
func SetExternalFetcherForTest(f externalFetcher) func() {
prev := defaultExternalFetcher
defaultExternalFetcher = f
return func() { defaultExternalFetcher = prev }
}
// resolveExternalMapping replaces an `!external`-tagged mapping node
// with the loaded + path-rewritten yaml content from the fetched repo.
//
// `currentDir` and `rootDir` are inherited from expandNode's resolve
// frame. `visited` tracks (repo, sha, path) tuples for cycle detection
// across nested externals.
func resolveExternalMapping(n *yaml.Node, currentDir, rootDir string, visited map[string]bool, depth int) error {
if depth > maxExternalDepth {
return fmt.Errorf("!external: max depth %d exceeded (possible cycle)", maxExternalDepth)
}
if rootDir == "" {
return fmt.Errorf("!external at line %d requires a dir-based org template (no rootDir in inline-template mode)", n.Line)
}
var ref ExternalRef
if err := n.Decode(&ref); err != nil {
return fmt.Errorf("!external at line %d: decode: %w", n.Line, err)
}
if ref.Repo == "" || ref.Ref == "" || ref.Path == "" {
return fmt.Errorf("!external at line %d: repo, ref, path are all required (got %+v)", n.Line, ref)
}
if !safeRefPattern.MatchString(ref.Ref) {
return fmt.Errorf("!external at line %d: ref %q contains disallowed characters", n.Line, ref.Ref)
}
// Defense-in-depth: even though git itself rejects refs containing
// `..`, the regex above currently allows them. Reject explicitly.
if strings.Contains(ref.Ref, "..") {
return fmt.Errorf("!external at line %d: ref %q must not contain '..'", n.Line, ref.Ref)
}
if strings.Contains(ref.Path, "..") || strings.HasPrefix(ref.Path, "/") {
return fmt.Errorf("!external at line %d: path %q must be relative-and-down-only", n.Line, ref.Path)
}
host := ref.URL
if host == "" {
host = os.Getenv("MOLECULE_EXTERNAL_GITEA_URL")
}
if host == "" {
host = "git.moleculesai.app"
}
host = strings.TrimPrefix(strings.TrimPrefix(host, "https://"), "http://")
host = strings.TrimSuffix(host, "/")
if !allowlistedHostPath(host, ref.Repo) {
return fmt.Errorf("!external at line %d: %s/%s not in MOLECULE_EXTERNAL_REPO_ALLOWLIST", n.Line, host, ref.Repo)
}
ctx, cancel := context.WithTimeout(context.Background(), gitFetchTimeout)
defer cancel()
cacheDir, sha, err := defaultExternalFetcher.Fetch(ctx, rootDir, host, ref.Repo, ref.Ref)
if err != nil {
return fmt.Errorf("!external at line %d: fetch %s/%s@%s: %w", n.Line, host, ref.Repo, ref.Ref, err)
}
// Cycle key: (repo, sha, path) — same external content reachable
// via two paths is fine, but a self-referential cycle isn't.
cycleKey := fmt.Sprintf("%s/%s@%s/%s", host, ref.Repo, sha, ref.Path)
if visited[cycleKey] {
return fmt.Errorf("!external cycle detected at %q (line %d)", cycleKey, n.Line)
}
// Validate path resolves inside the cache dir (anti-traversal).
yamlPathAbs, err := resolveInsideRoot(cacheDir, ref.Path)
if err != nil {
return fmt.Errorf("!external at line %d: path %q: %w", n.Line, ref.Path, err)
}
if _, err := os.Stat(yamlPathAbs); err != nil {
return fmt.Errorf("!external at line %d: %s/%s@%s does not contain %q: %w", n.Line, host, ref.Repo, sha, ref.Path, err)
}
data, err := os.ReadFile(yamlPathAbs)
if err != nil {
return fmt.Errorf("!external at line %d: read %q: %w", n.Line, yamlPathAbs, err)
}
var sub yaml.Node
if err := yaml.Unmarshal(data, &sub); err != nil {
return fmt.Errorf("!external at line %d: parse %q: %w", n.Line, yamlPathAbs, err)
}
root := &sub
if root.Kind == yaml.DocumentNode && len(root.Content) == 1 {
root = root.Content[0]
}
// Recurse FIRST: load all nested !include / !external content into
// the tree. Then rewrite ALL files_dir scalars in the fully-resolved
// tree (top + nested) with the cache prefix in one pass. Doing
// rewrite-before-recurse would leave nested-loaded files_dir paths
// unprefixed.
visited[cycleKey] = true
defer delete(visited, cycleKey)
subDir := filepath.Dir(yamlPathAbs)
if err := expandNode(root, subDir, rootDir, visited, depth+1); err != nil {
return err
}
// Path rewrite: prefix every files_dir scalar in the fully-resolved
// content with the cache-relative-from-rootDir prefix. After this
// pass, fetched workspaces look like ordinary in-tree workspaces.
cachePrefix, err := filepath.Rel(rootDir, cacheDir)
if err != nil {
return fmt.Errorf("!external at line %d: cannot compute cache prefix: %w", n.Line, err)
}
rewriteFilesDir(root, cachePrefix)
// Replace the !external mapping with the resolved content in-place.
*n = *root
if n.Tag == "!external" {
n.Tag = ""
}
return nil
}
// rewriteFilesDir walks the yaml node tree and prepends cachePrefix to
// every files_dir scalar value. Idempotent: if a files_dir value already
// starts with the prefix, no-op.
//
// !include paths are intentionally NOT rewritten. They resolve relative
// to their containing file's directory (subDir in expandNode), and after
// fetch that directory IS inside the cache, so relative !include paths
// Just Work without any rewrite. Rewriting them would double-prefix on
// recursive resolution.
//
// files_dir DOES need rewriting because it's consumed at workspace-
// provisioning time relative to orgBaseDir (the parent template's root),
// not relative to the workspace.yaml's containing dir.
func rewriteFilesDir(n *yaml.Node, cachePrefix string) {
if n == nil {
return
}
if n.Kind == yaml.MappingNode {
for i := 0; i+1 < len(n.Content); i += 2 {
key, value := n.Content[i], n.Content[i+1]
if key.Kind == yaml.ScalarNode && key.Value == "files_dir" && value.Kind == yaml.ScalarNode {
if !strings.HasPrefix(value.Value, cachePrefix+string(filepath.Separator)) && value.Value != cachePrefix {
value.Value = filepath.Join(cachePrefix, value.Value)
}
}
}
}
for _, child := range n.Content {
rewriteFilesDir(child, cachePrefix)
}
}
// safeRepoCacheDir converts a repo path like "molecule-ai/foo" into a
// filesystem-safe segment "molecule-ai__foo". Avoids nesting cache dirs
// (which would complicate cleanup).
func safeRepoCacheDir(host, repoPath string) string {
hp := host + "/" + repoPath
hp = strings.ReplaceAll(hp, "/", "__")
hp = strings.ReplaceAll(hp, ":", "_")
return hp
}
// gitFetcher is the production externalFetcher: shells out to `git` to
// clone the repo at ref into the cache dir. Cache key includes the
// resolved SHA, so different SHAs of the same ref get different cache
// dirs (no overwrite).
//
// Token handling — important for security. The auth token never enters
// the clone URL (and therefore never lands in the cloned repo's
// .git/config) and never appears in returned errors. We use git's
// `http.extraHeader` config option (passed via `-c`), which sends an
// Authorization header per-request without persisting it. The token is
// briefly visible in the `git` process's argv (so other local users
// with the same uid could see it via `ps`), which is the same exposure
// it has via the env var that supplied it.
//
// Cache validity uses a `.complete` marker written after a successful
// clone+rename. Cache-hit checks for the marker, not just the dir
// existence — a partially-written cache (clone failed mid-way, or a
// concurrent caller wrote a half-baked cache dir) is treated as cache
// miss and re-fetched cleanly.
type gitFetcher struct{}
// cacheCompleteMarker is the filename written after a successful clone.
// Cache-hit requires this marker; without it, the cache dir is treated
// as partially-written and re-fetched.
const cacheCompleteMarker = ".complete"
// Fetch resolves ref → SHA via `git ls-remote`, then `git clone --depth=1`
// if the cache dir is missing or incomplete. Auth via MOLECULE_GITEA_TOKEN
// injected via http.extraHeader (never via URL).
func (g *gitFetcher) Fetch(ctx context.Context, rootDir, host, repoPath, ref string) (string, string, error) {
cacheRoot := filepath.Join(rootDir, externalCacheDirName, safeRepoCacheDir(host, repoPath))
if err := os.MkdirAll(cacheRoot, 0o755); err != nil {
return "", "", fmt.Errorf("mkdir cache root: %w", err)
}
cloneURL := buildExternalCloneURL(host, repoPath)
gitArgs := func(extra ...string) []string {
args := authConfigArgs()
return append(args, extra...)
}
// 1. Resolve ref → SHA (so cache dir is content-addressable).
sha, err := g.resolveRefToSHA(ctx, cloneURL, ref, gitArgs)
if err != nil {
return "", "", fmt.Errorf("ls-remote: %s", redactToken(err.Error()))
}
cacheDir := filepath.Join(cacheRoot, sha)
// Cache-hit requires the .complete marker AND the .git dir.
// Without the marker, cache is partially-written → treat as miss.
if isCacheComplete(cacheDir) {
return cacheDir, sha, nil
}
// Cache miss or partially-written — clean any stale cacheDir before
// cloning (a previous broken attempt would otherwise block rename).
os.RemoveAll(cacheDir)
// 2. Clone into a sibling tmp dir; atomic rename on success.
tmpDir, err := os.MkdirTemp(cacheRoot, sha+".tmp.")
if err != nil {
return "", "", fmt.Errorf("mkdir tmp: %w", err)
}
// MkdirTemp creates the dir; git clone refuses to clone into a
// non-empty dir. Remove + recreate empty.
os.RemoveAll(tmpDir)
cloneAndConfig := append(gitArgs("clone", "--quiet", "--depth=1", "-b", ref, cloneURL, tmpDir))
cmd := exec.CommandContext(ctx, "git", cloneAndConfig...)
cmd.Env = append(os.Environ(), "GIT_TERMINAL_PROMPT=0")
if out, err := cmd.CombinedOutput(); err != nil {
os.RemoveAll(tmpDir)
return "", "", fmt.Errorf("git clone: %w: %s", err, redactToken(strings.TrimSpace(string(out))))
}
// Write the .complete marker BEFORE the rename. If rename succeeds,
// the marker is in place. If rename loses the race (concurrent
// fetcher won), our tmp gets cleaned up and we trust the winner.
if err := os.WriteFile(filepath.Join(tmpDir, cacheCompleteMarker), []byte(time.Now().UTC().Format(time.RFC3339)), 0o644); err != nil {
os.RemoveAll(tmpDir)
return "", "", fmt.Errorf("write complete marker: %w", err)
}
if err := os.Rename(tmpDir, cacheDir); err != nil {
// Race: another import beat us. Validate THEIR cache, accept it.
os.RemoveAll(tmpDir)
if isCacheComplete(cacheDir) {
return cacheDir, sha, nil
}
return "", "", fmt.Errorf("rename clone to cache (and winner's cache is incomplete): %w", err)
}
return cacheDir, sha, nil
}
// isCacheComplete reports whether cacheDir contains both the cloned
// repo (.git) and the .complete marker. Treats partial state as miss.
func isCacheComplete(cacheDir string) bool {
if _, err := os.Stat(filepath.Join(cacheDir, ".git")); err != nil {
return false
}
if _, err := os.Stat(filepath.Join(cacheDir, cacheCompleteMarker)); err != nil {
return false
}
return true
}
func (g *gitFetcher) resolveRefToSHA(ctx context.Context, cloneURL, ref string, gitArgs func(...string) []string) (string, error) {
args := gitArgs("ls-remote", cloneURL, ref)
cmd := exec.CommandContext(ctx, "git", args...)
cmd.Env = append(os.Environ(), "GIT_TERMINAL_PROMPT=0")
out, err := cmd.Output()
if err != nil {
return "", err
}
line := strings.TrimSpace(string(out))
if line == "" {
return "", fmt.Errorf("ref %q not found", ref)
}
// First whitespace-separated field is the SHA.
for i, ch := range line {
if ch == ' ' || ch == '\t' {
return line[:i], nil
}
}
return line, nil
}
// buildExternalCloneURL constructs the clone URL WITHOUT auth in userinfo.
// Auth is layered on via authConfigArgs's http.extraHeader.
func buildExternalCloneURL(host, repoPath string) string {
u := url.URL{Scheme: "https", Host: host, Path: "/" + repoPath + ".git"}
return u.String()
}
// authConfigArgs returns the `-c http.extraHeader=Authorization: token X`
// args to pass to git, OR an empty slice if no token is set. The token
// goes into the request headers (not the URL or .git/config), so it
// doesn't persist on disk and doesn't appear in clone error output.
func authConfigArgs() []string {
token := os.Getenv("MOLECULE_GITEA_TOKEN")
if token == "" {
return nil
}
return []string{"-c", "http.extraHeader=Authorization: token " + token}
}
// redactToken scrubs the auth token from a string before it's logged
// or returned in an error. Belt-and-braces: with the http.extraHeader
// approach the token shouldn't appear in git's output, but if some
// future git version or libcurl debug mode emits it, this catches it.
func redactToken(s string) string {
token := os.Getenv("MOLECULE_GITEA_TOKEN")
if token == "" || len(token) < 8 {
return s
}
return strings.ReplaceAll(s, token, "<redacted-token>")
}

View File

@ -0,0 +1,379 @@
package handlers
import (
"context"
"os"
"os/exec"
"path/filepath"
"runtime"
"strings"
"testing"
"gopkg.in/yaml.v3"
)
// PR-B integration test: exercises the REAL gitFetcher (no fakeFetcher
// injection) against a local bare-git repo. Uses git's `insteadOf`
// config to rewrite the configured Gitea URL to the local bare path
// at clone time, so the fetcher's URL-building, ls-remote, clone,
// atomic-rename, and cache-hit paths all run against real git
// without requiring network or modifying production code.
//
// Internal#77 task #233 (PR-B from the design's phasing).
// TestGitFetcher_RealClone_LocalRedirect proves the production
// gitFetcher round-trips correctly against a real git repository.
// Steps:
// 1. Set up a local bare-git repo with workspace content.
// 2. Configure git's `insteadOf` to rewrite the gitea URL → local path
// via GIT_CONFIG_COUNT/KEY/VALUE env vars (process-scoped).
// 3. Run resolveYAMLIncludes with !external pointing at the gitea URL.
// 4. Assert: cache dir populated; content materialized; path rewrite
// applied; second invocation hits cache (no second clone).
func TestGitFetcher_RealClone_LocalRedirect(t *testing.T) {
if _, err := exec.LookPath("git"); err != nil {
t.Skipf("git binary not found: %v", err)
}
if runtime.GOOS == "windows" {
t.Skip("path-based git URLs behave differently on Windows; skipping")
}
// Step 1: create a local bare-git repo at <fixtures>/test-dev-dept.git
// with workspace content. Use a working clone to add content, then
// push to the bare.
fixtures := t.TempDir()
barePath := filepath.Join(fixtures, "test-dev-dept.git")
workPath := filepath.Join(fixtures, "work")
mustGit(t, "", "init", "--bare", "-b", "main", barePath)
mustGit(t, "", "clone", barePath, workPath)
mustGit(t, workPath, "config", "user.email", "test@example.com")
mustGit(t, workPath, "config", "user.name", "Integration Test")
mustWriteFile(t, filepath.Join(workPath, "dev-lead/workspace.yaml"), `name: Dev Lead
files_dir: dev-lead
children:
- !include ./core-be/workspace.yaml
`)
mustWriteFile(t, filepath.Join(workPath, "dev-lead/system-prompt.md"), "Dev Lead persona body.\n")
mustWriteFile(t, filepath.Join(workPath, "dev-lead/core-be/workspace.yaml"), `name: Core BE
files_dir: dev-lead/core-be
`)
mustWriteFile(t, filepath.Join(workPath, "dev-lead/core-be/system-prompt.md"), "Core BE persona body.\n")
mustGit(t, workPath, "add", ".")
mustGit(t, workPath, "commit", "-m", "seed dev tree")
mustGit(t, workPath, "push", "origin", "main")
// Step 2: configure git's insteadOf rewrite. The fetcher will try
// to clone https://git.moleculesai.app/molecule-ai/test-dev-dept.git;
// git rewrites to file://<barePath>.
//
// GIT_CONFIG_COUNT/KEY/VALUE injects config without touching
// ~/.gitconfig — process-scoped, no test pollution.
geesUrl := "https://git.moleculesai.app/molecule-ai/test-dev-dept.git"
t.Setenv("GIT_CONFIG_COUNT", "1")
t.Setenv("GIT_CONFIG_KEY_0", "url."+barePath+".insteadOf")
t.Setenv("GIT_CONFIG_VALUE_0", geesUrl)
// Step 3: run resolveYAMLIncludes with !external pointing at the
// gitea URL. Allowlist is the default (molecule-ai/* on Gitea host).
rootDir := t.TempDir()
src := []byte(`workspaces:
- !external
repo: molecule-ai/test-dev-dept
ref: main
path: dev-lead/workspace.yaml
`)
out, err := resolveYAMLIncludes(src, rootDir)
if err != nil {
t.Fatalf("resolveYAMLIncludes: %v", err)
}
var tmpl OrgTemplate
if err := yaml.Unmarshal(out, &tmpl); err != nil {
t.Fatalf("unmarshal: %v", err)
}
if len(tmpl.Workspaces) != 1 {
t.Fatalf("workspaces: %+v", tmpl.Workspaces)
}
dev := tmpl.Workspaces[0]
if dev.Name != "Dev Lead" {
t.Errorf("dev.Name = %q; want Dev Lead", dev.Name)
}
if !strings.Contains(dev.FilesDir, ".external-cache") {
t.Errorf("dev.FilesDir = %q; want cache prefix", dev.FilesDir)
}
if !strings.HasSuffix(dev.FilesDir, "dev-lead") {
t.Errorf("dev.FilesDir = %q; want suffix dev-lead", dev.FilesDir)
}
if len(dev.Children) != 1 {
t.Fatalf("expected nested core-be child; got %+v", dev.Children)
}
core := dev.Children[0]
if core.Name != "Core BE" {
t.Errorf("core.Name = %q; want Core BE", core.Name)
}
if !strings.HasSuffix(core.FilesDir, filepath.Join("dev-lead", "core-be")) {
t.Errorf("core.FilesDir = %q; want suffix dev-lead/core-be", core.FilesDir)
}
// Step 4: verify the cache dir actually exists and contains the
// materialized files (CopyTemplateToContainer would tar these).
cacheRoot := filepath.Join(rootDir, ".external-cache")
entries, err := os.ReadDir(cacheRoot)
if err != nil {
t.Fatalf("read cache root: %v", err)
}
if len(entries) != 1 {
t.Fatalf("expected 1 cached repo, got %d: %v", len(entries), entries)
}
repoDir := filepath.Join(cacheRoot, entries[0].Name())
shaDirs, _ := os.ReadDir(repoDir)
if len(shaDirs) != 1 {
t.Fatalf("expected 1 SHA cache dir, got %d", len(shaDirs))
}
cacheDir := filepath.Join(repoDir, shaDirs[0].Name())
if _, err := os.Stat(filepath.Join(cacheDir, "dev-lead/system-prompt.md")); err != nil {
t.Errorf("expected dev-lead/system-prompt.md in cache: %v", err)
}
if _, err := os.Stat(filepath.Join(cacheDir, "dev-lead/core-be/system-prompt.md")); err != nil {
t.Errorf("expected dev-lead/core-be/system-prompt.md in cache: %v", err)
}
// Step 5: re-run; verify cache hit (no second clone). Set a
// "marker" file in the cache that a second clone would clobber.
marker := filepath.Join(cacheDir, ".cache-hit-marker")
if err := os.WriteFile(marker, []byte("hit"), 0o644); err != nil {
t.Fatal(err)
}
out2, err := resolveYAMLIncludes(src, rootDir)
if err != nil {
t.Fatalf("resolveYAMLIncludes second call: %v", err)
}
if string(out) != string(out2) {
t.Errorf("cached output differs from initial — non-deterministic resolve")
}
if _, err := os.Stat(marker); err != nil {
t.Errorf("cache hit not honored — marker file disappeared: %v", err)
}
}
// TestGitFetcher_RealClone_BadRefFails: pointing at a ref that doesn't
// exist in the bare-repo surfaces git's error cleanly.
func TestGitFetcher_RealClone_BadRefFails(t *testing.T) {
if _, err := exec.LookPath("git"); err != nil {
t.Skipf("git binary not found: %v", err)
}
if runtime.GOOS == "windows" {
t.Skip("skipping on windows")
}
fixtures := t.TempDir()
barePath := filepath.Join(fixtures, "empty-repo.git")
workPath := filepath.Join(fixtures, "work")
mustGit(t, "", "init", "--bare", "-b", "main", barePath)
mustGit(t, "", "clone", barePath, workPath)
mustGit(t, workPath, "config", "user.email", "test@example.com")
mustGit(t, workPath, "config", "user.name", "Test")
mustWriteFile(t, filepath.Join(workPath, "README.md"), "x")
mustGit(t, workPath, "add", ".")
mustGit(t, workPath, "commit", "-m", "seed")
mustGit(t, workPath, "push", "origin", "main")
t.Setenv("GIT_CONFIG_COUNT", "1")
t.Setenv("GIT_CONFIG_KEY_0", "url."+barePath+".insteadOf")
t.Setenv("GIT_CONFIG_VALUE_0", "https://git.moleculesai.app/molecule-ai/empty-repo.git")
rootDir := t.TempDir()
src := []byte(`workspaces:
- !external
repo: molecule-ai/empty-repo
ref: nonexistent-branch
path: anything.yaml
`)
_, err := resolveYAMLIncludes(src, rootDir)
if err == nil {
t.Fatalf("expected error for nonexistent ref; got nil")
}
if !strings.Contains(err.Error(), "ref") && !strings.Contains(err.Error(), "ls-remote") && !strings.Contains(err.Error(), "not found") {
t.Errorf("error doesn't mention ref/ls-remote: %v", err)
}
}
// ---------- helpers ----------
func mustGit(t *testing.T, cwd string, args ...string) {
t.Helper()
cmd := exec.Command("git", args...)
if cwd != "" {
cmd.Dir = cwd
}
// Ensure user.email/name are set globally for non-cwd commands too.
cmd.Env = append(os.Environ(),
"GIT_AUTHOR_EMAIL=test@example.com",
"GIT_AUTHOR_NAME=Integration Test",
"GIT_COMMITTER_EMAIL=test@example.com",
"GIT_COMMITTER_NAME=Integration Test",
)
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("git %s: %v\n%s", strings.Join(args, " "), err, string(out))
}
}
func mustWriteFile(t *testing.T, path, content string) {
t.Helper()
if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
t.Fatal(err)
}
if err := os.WriteFile(path, []byte(content), 0o644); err != nil {
t.Fatal(err)
}
}
// Verify gitFetcher.Fetch direct invocation (no resolver wrapping) for
// the cache-hit path, exercising the bare API against a local bare-repo.
func TestGitFetcher_DirectFetch_CacheHit(t *testing.T) {
if _, err := exec.LookPath("git"); err != nil {
t.Skipf("git binary not found: %v", err)
}
if runtime.GOOS == "windows" {
t.Skip("skipping on windows")
}
fixtures := t.TempDir()
barePath := filepath.Join(fixtures, "direct.git")
workPath := filepath.Join(fixtures, "w")
mustGit(t, "", "init", "--bare", "-b", "main", barePath)
mustGit(t, "", "clone", barePath, workPath)
mustGit(t, workPath, "config", "user.email", "t@e")
mustGit(t, workPath, "config", "user.name", "T")
mustWriteFile(t, filepath.Join(workPath, "marker.txt"), "hello")
mustGit(t, workPath, "add", ".")
mustGit(t, workPath, "commit", "-m", "seed")
mustGit(t, workPath, "push", "origin", "main")
t.Setenv("GIT_CONFIG_COUNT", "1")
t.Setenv("GIT_CONFIG_KEY_0", "url."+barePath+".insteadOf")
t.Setenv("GIT_CONFIG_VALUE_0", "https://git.moleculesai.app/molecule-ai/direct.git")
rootDir := t.TempDir()
g := &gitFetcher{}
ctx := context.Background()
cacheDir1, sha1, err := g.Fetch(ctx, rootDir, "git.moleculesai.app", "molecule-ai/direct", "main")
if err != nil {
t.Fatalf("first Fetch: %v", err)
}
if sha1 == "" || len(sha1) < 7 {
t.Errorf("expected SHA-like string, got %q", sha1)
}
if _, err := os.Stat(filepath.Join(cacheDir1, "marker.txt")); err != nil {
t.Errorf("first fetch missing marker.txt: %v", err)
}
// Second call: cache hit, returns same dir + sha, no re-clone.
stamp := filepath.Join(cacheDir1, ".not-clobbered-by-second-fetch")
if err := os.WriteFile(stamp, []byte("x"), 0o644); err != nil {
t.Fatal(err)
}
cacheDir2, sha2, err := g.Fetch(ctx, rootDir, "git.moleculesai.app", "molecule-ai/direct", "main")
if err != nil {
t.Fatalf("second Fetch: %v", err)
}
if cacheDir2 != cacheDir1 || sha2 != sha1 {
t.Errorf("cache miss on second call: %q/%q vs %q/%q", cacheDir1, sha1, cacheDir2, sha2)
}
if _, err := os.Stat(stamp); err != nil {
t.Errorf("cache hit not honored — stamp file disappeared: %v", err)
}
}
// TestGitFetcher_RejectsRefWithDoubleDot: defense-in-depth on ref input.
// safeRefPattern allows '.' as a regex character, so ".." would match
// without an explicit deny. Verify it's rejected even though git itself
// would also reject the resulting clone.
func TestGitFetcher_RejectsRefWithDoubleDot(t *testing.T) {
rootDir := t.TempDir()
src := []byte(`workspaces:
- !external
repo: molecule-ai/x
ref: foo..bar
path: x.yaml
`)
_, err := resolveYAMLIncludes(src, rootDir)
if err == nil {
t.Fatalf("expected '..' rejection")
}
if !strings.Contains(err.Error(), "..") {
t.Errorf("expected '..' in error; got %v", err)
}
}
// TestGitFetcher_CacheValidatedByCompleteMarker: a partially-written
// cache (the .git dir exists but no .complete marker) is treated as
// cache-miss and re-fetched. Catches the broken-cache-permanence bug.
func TestGitFetcher_CacheValidatedByCompleteMarker(t *testing.T) {
if _, err := exec.LookPath("git"); err != nil {
t.Skipf("git not found: %v", err)
}
if runtime.GOOS == "windows" {
t.Skip("skipping on windows")
}
fixtures := t.TempDir()
barePath := filepath.Join(fixtures, "test.git")
workPath := filepath.Join(fixtures, "w")
mustGit(t, "", "init", "--bare", "-b", "main", barePath)
mustGit(t, "", "clone", barePath, workPath)
mustGit(t, workPath, "config", "user.email", "t@e")
mustGit(t, workPath, "config", "user.name", "T")
mustWriteFile(t, filepath.Join(workPath, "good.txt"), "from-network")
mustGit(t, workPath, "add", ".")
mustGit(t, workPath, "commit", "-m", "seed")
mustGit(t, workPath, "push", "origin", "main")
t.Setenv("GIT_CONFIG_COUNT", "1")
t.Setenv("GIT_CONFIG_KEY_0", "url."+barePath+".insteadOf")
t.Setenv("GIT_CONFIG_VALUE_0", "https://git.moleculesai.app/molecule-ai/marker-test.git")
rootDir := t.TempDir()
g := &gitFetcher{}
// First fetch — populates the cache (creates .complete marker).
cacheDir1, _, err := g.Fetch(context.Background(), rootDir, "git.moleculesai.app", "molecule-ai/marker-test", "main")
if err != nil {
t.Fatalf("first Fetch: %v", err)
}
marker := filepath.Join(cacheDir1, cacheCompleteMarker)
if _, err := os.Stat(marker); err != nil {
t.Fatalf("first fetch should have written .complete marker: %v", err)
}
// Now simulate a partial cache: delete the marker but leave .git
// in place. The next Fetch should treat this as cache-miss and
// re-fetch (NOT silently use the partial cache).
if err := os.Remove(marker); err != nil {
t.Fatal(err)
}
// Drop a sentinel file the second fetch will clobber if it re-fetches.
sentinel := filepath.Join(cacheDir1, "_should_be_clobbered")
if err := os.WriteFile(sentinel, []byte("partial"), 0o644); err != nil {
t.Fatal(err)
}
cacheDir2, _, err := g.Fetch(context.Background(), rootDir, "git.moleculesai.app", "molecule-ai/marker-test", "main")
if err != nil {
t.Fatalf("second Fetch: %v", err)
}
if cacheDir1 != cacheDir2 {
t.Errorf("cache dirs differ across fetches: %q vs %q", cacheDir1, cacheDir2)
}
if _, err := os.Stat(filepath.Join(cacheDir2, cacheCompleteMarker)); err != nil {
t.Errorf("re-fetch should have re-written .complete marker: %v", err)
}
if _, err := os.Stat(sentinel); err == nil {
t.Errorf("sentinel still present — re-fetch did NOT clobber partial cache")
}
}

View File

@ -0,0 +1,331 @@
package handlers
import (
"context"
"os"
"path/filepath"
"strings"
"testing"
"gopkg.in/yaml.v3"
)
// fakeFetcher pre-stages a "fetched" repo at a fixed path inside the
// rootDir's .external-cache, bypassing the real git clone. Tests
// inject this via SetExternalFetcherForTest to exercise the resolver
// + path-rewrite logic without network.
type fakeFetcher struct {
// content maps "<host>/<repo>@<ref>" → a function that materializes
// repo content under cacheDir. Returns the fake SHA to use.
content map[string]func(cacheDir string) (sha string, err error)
}
func (f *fakeFetcher) Fetch(ctx context.Context, rootDir, host, repoPath, ref string) (string, string, error) {
key := host + "/" + repoPath + "@" + ref
stage, ok := f.content[key]
if !ok {
return "", "", &fakeNotFoundError{key: key}
}
// Use a stable SHA for the test so cache dir is deterministic.
cacheDir := filepath.Join(rootDir, ".external-cache", safeRepoCacheDir(host, repoPath), "deadbeef")
if err := os.MkdirAll(cacheDir, 0o755); err != nil {
return "", "", err
}
sha, err := stage(cacheDir)
if err != nil {
return "", "", err
}
return cacheDir, sha, nil
}
type fakeNotFoundError struct{ key string }
func (e *fakeNotFoundError) Error() string {
return "fake fetcher: no content registered for " + e.key
}
// stageFiles writes a map of relative-path → content into cacheDir,
// returning a fake SHA. Helper for fakeFetcher closures.
func stageFiles(cacheDir string, files map[string]string) error {
if err := os.MkdirAll(filepath.Join(cacheDir, ".git"), 0o755); err != nil {
return err
}
for path, content := range files {
full := filepath.Join(cacheDir, path)
if err := os.MkdirAll(filepath.Dir(full), 0o755); err != nil {
return err
}
if err := os.WriteFile(full, []byte(content), 0o644); err != nil {
return err
}
}
return nil
}
// TestResolveExternalMapping_HappyPath: a parent template with an
// !external entry resolves cleanly into the fetched workspace + path-
// rewrites files_dir + relative !include refs into the cache prefix.
func TestResolveExternalMapping_HappyPath(t *testing.T) {
tmp := t.TempDir()
// Stub fetcher: "fetched" content has a workspace.yaml that uses
// files_dir + nested !include relative to the fetched repo's root.
fake := &fakeFetcher{
content: map[string]func(string) (string, error){
"git.moleculesai.app/molecule-ai/molecule-dev-department@main": func(cacheDir string) (string, error) {
return "deadbeef", stageFiles(cacheDir, map[string]string{
"dev-lead/workspace.yaml": `name: Dev Lead
files_dir: dev-lead
children:
- !include ./core-lead/workspace.yaml
`,
"dev-lead/core-lead/workspace.yaml": `name: Core Platform Lead
files_dir: dev-lead/core-lead
`,
})
},
},
}
cleanup := SetExternalFetcherForTest(fake)
defer cleanup()
src := []byte(`name: Parent
workspaces:
- !external
repo: molecule-ai/molecule-dev-department
ref: main
path: dev-lead/workspace.yaml
`)
out, err := resolveYAMLIncludes(src, tmp)
if err != nil {
t.Fatalf("resolveYAMLIncludes: %v", err)
}
var tmpl OrgTemplate
if err := yaml.Unmarshal(out, &tmpl); err != nil {
t.Fatalf("unmarshal: %v", err)
}
if len(tmpl.Workspaces) != 1 {
t.Fatalf("workspaces: %+v", tmpl.Workspaces)
}
dev := tmpl.Workspaces[0]
if dev.Name != "Dev Lead" {
t.Errorf("dev.Name = %q; want Dev Lead", dev.Name)
}
// files_dir should be cache-prefixed.
wantPrefix := filepath.Join(".external-cache", "git.moleculesai.app__molecule-ai__molecule-dev-department", "deadbeef")
if !strings.HasPrefix(dev.FilesDir, wantPrefix) {
t.Errorf("dev.FilesDir = %q; want prefix %q", dev.FilesDir, wantPrefix)
}
if !strings.HasSuffix(dev.FilesDir, "dev-lead") {
t.Errorf("dev.FilesDir = %q; want suffix dev-lead", dev.FilesDir)
}
// Nested child: files_dir cache-prefixed, name Core Platform Lead.
if len(dev.Children) != 1 {
t.Fatalf("dev.Children: %+v", dev.Children)
}
core := dev.Children[0]
if core.Name != "Core Platform Lead" {
t.Errorf("core.Name = %q; want Core Platform Lead", core.Name)
}
if !strings.HasPrefix(core.FilesDir, wantPrefix) {
t.Errorf("core.FilesDir = %q; want prefix %q", core.FilesDir, wantPrefix)
}
if !strings.HasSuffix(core.FilesDir, filepath.Join("dev-lead", "core-lead")) {
t.Errorf("core.FilesDir = %q; want suffix dev-lead/core-lead", core.FilesDir)
}
}
// TestResolveExternalMapping_AllowlistRejection: hostile yaml pointing
// at a non-allowlisted repo gets rejected.
func TestResolveExternalMapping_AllowlistRejection(t *testing.T) {
tmp := t.TempDir()
fake := &fakeFetcher{content: map[string]func(string) (string, error){}}
cleanup := SetExternalFetcherForTest(fake)
defer cleanup()
// Default allowlist is git.moleculesai.app/molecule-ai/*.
// github.com/foo/bar is NOT in it.
src := []byte(`workspaces:
- !external
repo: foo/bar
ref: main
path: x.yaml
url: github.com
`)
_, err := resolveYAMLIncludes(src, tmp)
if err == nil {
t.Fatalf("expected allowlist rejection, got nil")
}
if !strings.Contains(err.Error(), "MOLECULE_EXTERNAL_REPO_ALLOWLIST") {
t.Errorf("expected allowlist error; got %v", err)
}
}
// TestResolveExternalMapping_PathTraversalRejection: hostile yaml
// with `path: ../../etc/passwd` gets rejected before fetch.
func TestResolveExternalMapping_PathTraversalRejection(t *testing.T) {
tmp := t.TempDir()
fake := &fakeFetcher{content: map[string]func(string) (string, error){}}
cleanup := SetExternalFetcherForTest(fake)
defer cleanup()
src := []byte(`workspaces:
- !external
repo: molecule-ai/dev-department
ref: main
path: ../../etc/passwd
`)
_, err := resolveYAMLIncludes(src, tmp)
if err == nil {
t.Fatalf("expected path traversal rejection, got nil")
}
if !strings.Contains(err.Error(), "relative-and-down-only") {
t.Errorf("expected path traversal error; got %v", err)
}
}
// TestResolveExternalMapping_BadRefRejection: non-allowlisted ref chars.
func TestResolveExternalMapping_BadRefRejection(t *testing.T) {
tmp := t.TempDir()
fake := &fakeFetcher{content: map[string]func(string) (string, error){}}
cleanup := SetExternalFetcherForTest(fake)
defer cleanup()
src := []byte(`workspaces:
- !external
repo: molecule-ai/dev-department
ref: "main; rm -rf /"
path: foo.yaml
`)
_, err := resolveYAMLIncludes(src, tmp)
if err == nil || !strings.Contains(err.Error(), "disallowed characters") {
t.Errorf("expected ref-validation error; got %v", err)
}
}
// TestResolveExternalMapping_MissingRequiredFields: repo / ref / path
// are all required.
func TestResolveExternalMapping_MissingRequiredFields(t *testing.T) {
tmp := t.TempDir()
fake := &fakeFetcher{content: map[string]func(string) (string, error){}}
cleanup := SetExternalFetcherForTest(fake)
defer cleanup()
cases := []string{
// missing repo
`workspaces:
- !external
ref: main
path: x.yaml
`,
// missing ref
`workspaces:
- !external
repo: molecule-ai/x
path: x.yaml
`,
// missing path
`workspaces:
- !external
repo: molecule-ai/x
ref: main
`,
}
for i, src := range cases {
_, err := resolveYAMLIncludes([]byte(src), tmp)
if err == nil {
t.Errorf("case %d: expected required-field error, got nil", i)
} else if !strings.Contains(err.Error(), "required") {
t.Errorf("case %d: want 'required' in error; got %v", i, err)
}
}
}
// TestRewriteFilesDir: verify the path-rewrite walker
// prefixes files_dir scalars. !include scalars are NOT rewritten —
// they resolve relative to their containing file's dir, which post-
// fetch is naturally inside the cache.
func TestRewriteFilesDir(t *testing.T) {
src := `name: Foo
files_dir: dev-lead
children:
- !include ./bar/workspace.yaml
- !include other-team.yaml
inner:
files_dir: dev-lead/sub
`
var n yaml.Node
if err := yaml.Unmarshal([]byte(src), &n); err != nil {
t.Fatal(err)
}
rewriteFilesDir(&n, ".external-cache/foo/bar")
out, err := yaml.Marshal(&n)
if err != nil {
t.Fatal(err)
}
got := string(out)
for _, want := range []string{
"files_dir: .external-cache/foo/bar/dev-lead",
"files_dir: .external-cache/foo/bar/dev-lead/sub",
// !include preserved as-is; resolves naturally via subDir.
"!include ./bar/workspace.yaml",
"!include other-team.yaml",
} {
if !strings.Contains(got, want) {
t.Errorf("missing %q in:\n%s", want, got)
}
}
}
// TestRewriteFilesDir_Idempotent: re-running the rewriter
// on already-prefixed files_dir doesn't double-prefix.
func TestRewriteFilesDir_Idempotent(t *testing.T) {
src := `files_dir: .external-cache/foo/bar/dev-lead
inner:
files_dir: .external-cache/foo/bar/dev-lead/sub
`
var n yaml.Node
if err := yaml.Unmarshal([]byte(src), &n); err != nil {
t.Fatal(err)
}
rewriteFilesDir(&n, ".external-cache/foo/bar")
out, _ := yaml.Marshal(&n)
got := string(out)
if strings.Contains(got, ".external-cache/foo/bar/.external-cache") {
t.Errorf("double-prefix detected:\n%s", got)
}
// Should still be valid (single-prefixed) afterwards.
for _, want := range []string{
"files_dir: .external-cache/foo/bar/dev-lead",
"files_dir: .external-cache/foo/bar/dev-lead/sub",
} {
if !strings.Contains(got, want) {
t.Errorf("expected unchanged %q in:\n%s", want, got)
}
}
}
// TestAllowlistedHostPath: env-var override + glob matching.
func TestAllowlistedHostPath(t *testing.T) {
t.Setenv("MOLECULE_EXTERNAL_REPO_ALLOWLIST", "")
if !allowlistedHostPath("git.moleculesai.app", "molecule-ai/foo") {
t.Error("default allowlist should accept molecule-ai/*")
}
if allowlistedHostPath("github.com", "molecule-ai/foo") {
t.Error("default allowlist should reject github.com")
}
t.Setenv("MOLECULE_EXTERNAL_REPO_ALLOWLIST", "github.com/me/*,git.moleculesai.app/*")
if !allowlistedHostPath("github.com", "me/x") {
t.Error("override should accept github.com/me/*")
}
if !allowlistedHostPath("git.moleculesai.app", "any/repo") {
t.Error("override should accept git.moleculesai.app/*")
}
if allowlistedHostPath("github.com", "evil/x") {
t.Error("override should reject github.com/evil/*")
}
}

View File

@ -76,6 +76,12 @@ func expandNode(n *yaml.Node, currentDir, rootDir string, visited map[string]boo
return resolveIncludeScalar(n, currentDir, rootDir, visited, depth)
}
// `!external`-tagged mapping: gitops cross-repo subtree composition.
// See org_external.go (internal#77 / task #222).
if n.Kind == yaml.MappingNode && n.Tag == "!external" {
return resolveExternalMapping(n, currentDir, rootDir, visited, depth)
}
for _, child := range n.Content {
if err := expandNode(child, currentDir, rootDir, visited, depth); err != nil {
return err

View File

@ -0,0 +1,136 @@
package handlers
import (
"os"
"path/filepath"
"testing"
"gopkg.in/yaml.v3"
)
// Phase 5 (RFC internal#77 dev-department extraction):
// Proves a parent org template can compose a subtree from a sibling repo
// via a directory symlink. Pattern that gets shipped:
//
// /org-templates/parent-template/ ← imported by POST /org/import
// org.yaml (workspaces: !include dev/dev-lead/workspace.yaml)
// dev → /org-templates/molecule-dev-department/ (symlink)
// /org-templates/molecule-dev-department/ (sibling repo)
// dev-lead/
// workspace.yaml (children: !include ./core-platform/workspace.yaml)
// core-platform/
// workspace.yaml
//
// resolveYAMLIncludes resolves paths via filepath.Abs/Rel (no symlink
// following at the path-string layer), so the security check passes. The
// actual file open uses os.ReadFile, which DOES follow symlinks — so the
// content from the sibling repo gets inlined. This test pins that contract.
func TestResolveYAMLIncludes_FollowsDirectorySymlink(t *testing.T) {
tmp := t.TempDir()
// Subtree repo: dev-department/dev-lead/...
devDept := filepath.Join(tmp, "molecule-dev-department")
devLead := filepath.Join(devDept, "dev-lead")
corePlatform := filepath.Join(devLead, "core-platform")
if err := os.MkdirAll(corePlatform, 0o755); err != nil {
t.Fatal(err)
}
// dev-lead/workspace.yaml — uses `./core-platform/workspace.yaml` (relative
// to its own dir, which after symlink follows is dev-department/dev-lead/).
devLeadYAML := []byte(`name: Dev Lead
tier: 3
children:
- !include ./core-platform/workspace.yaml
`)
if err := os.WriteFile(filepath.Join(devLead, "workspace.yaml"), devLeadYAML, 0o644); err != nil {
t.Fatal(err)
}
if err := os.WriteFile(filepath.Join(corePlatform, "workspace.yaml"), []byte("name: Core Platform\ntier: 3\n"), 0o644); err != nil {
t.Fatal(err)
}
// Parent template: parent/, with `dev` symlink → ../molecule-dev-department/
parent := filepath.Join(tmp, "parent-template")
if err := os.MkdirAll(parent, 0o755); err != nil {
t.Fatal(err)
}
// Symlink TARGET is a relative path (matches operator-side deploy
// convention where both repos are cloned as siblings under a shared
// /org-templates/ dir).
if err := os.Symlink("../molecule-dev-department", filepath.Join(parent, "dev")); err != nil {
t.Skipf("symlinks unsupported on this fs: %v", err)
}
// Parent's org.yaml: !include into the symlinked subtree.
src := []byte(`name: Parent
workspaces:
- !include dev/dev-lead/workspace.yaml
`)
out, err := resolveYAMLIncludes(src, parent)
if err != nil {
t.Fatalf("resolveYAMLIncludes through symlink failed: %v", err)
}
var tmpl OrgTemplate
if err := yaml.Unmarshal(out, &tmpl); err != nil {
t.Fatalf("unmarshal: %v", err)
}
if len(tmpl.Workspaces) != 1 {
t.Fatalf("expected 1 workspace, got %d", len(tmpl.Workspaces))
}
if tmpl.Workspaces[0].Name != "Dev Lead" {
t.Fatalf("workspace[0].Name = %q; want Dev Lead", tmpl.Workspaces[0].Name)
}
kids := tmpl.Workspaces[0].Children
if len(kids) != 1 {
t.Fatalf("expected 1 child workspace, got %d", len(kids))
}
if kids[0].Name != "Core Platform" {
t.Fatalf("child[0].Name = %q; want Core Platform — symlink-aware nested !include broken", kids[0].Name)
}
}
// Companion: prove the security check still works when the symlink target
// is OUTSIDE the parent template's root. This is the "hostile symlink"
// case — an org.yaml that tries to slip in arbitrary files from /etc.
func TestResolveYAMLIncludes_RejectsSymlinkEscapingRoot(t *testing.T) {
tmp := t.TempDir()
parent := filepath.Join(tmp, "parent-template")
outside := filepath.Join(tmp, "outside")
if err := os.MkdirAll(parent, 0o755); err != nil {
t.Fatal(err)
}
if err := os.MkdirAll(outside, 0o755); err != nil {
t.Fatal(err)
}
if err := os.WriteFile(filepath.Join(outside, "evil.yaml"), []byte("name: Evil\n"), 0o644); err != nil {
t.Fatal(err)
}
// Symlink that escapes the parent root via `../outside/...`. The path
// STRING `evil` resolves to parent/evil — passes the rel2 check. But
// because filepath.Abs doesn't follow symlinks, the ReadFile call DOES
// follow it to outside/evil.yaml. This is the trade-off the symlink
// approach accepts: the security boundary is a deployment-layer
// invariant, not a code-layer one. Documented in dev-department/README.
if err := os.Symlink(filepath.Join(outside, "evil.yaml"), filepath.Join(parent, "evil.yaml")); err != nil {
t.Skipf("symlinks unsupported on this fs: %v", err)
}
src := []byte("workspaces:\n - !include evil.yaml\n")
out, err := resolveYAMLIncludes(src, parent)
if err != nil {
// If the resolver is later hardened to refuse symlink targets
// outside the root (e.g. via filepath.EvalSymlinks), this test
// will start failing — and the dev-department symlink approach
// would need to be updated accordingly.
t.Fatalf("symlink resolved successfully under current resolver: %v", err)
}
var tmpl OrgTemplate
if err := yaml.Unmarshal(out, &tmpl); err != nil {
t.Fatalf("unmarshal: %v", err)
}
if len(tmpl.Workspaces) != 1 || tmpl.Workspaces[0].Name != "Evil" {
t.Fatalf("expected Evil workspace via symlink; got %+v", tmpl.Workspaces)
}
}

View File

@ -765,6 +765,21 @@ func ApplyTierConfig(hostCfg *container.HostConfig, cfg WorkspaceConfig, configM
// CopyTemplateToContainer copies files from a host directory into /configs in the container.
func (p *Provisioner) CopyTemplateToContainer(ctx context.Context, containerID, templatePath string) error {
// Resolve symlinks at the root before walking. filepath.Walk does
// NOT follow a symlink that IS the root — it Lstats the path, sees
// a symlink (non-directory), and emits exactly one entry without
// descending. With cross-repo composition (parent template's
// dev-lead → ../sibling-repo/dev-lead/, see internal#77), the
// caller routinely passes a symlink as templatePath. Without this
// resolution the workspace's /configs/ mount lands empty.
//
// Security: templatePath has already passed resolveInsideRoot's
// path-string check at the call site — the trust boundary is the
// operator-side /org-templates/ filesystem layout, not this
// resolution step.
if resolved, err := filepath.EvalSymlinks(templatePath); err == nil {
templatePath = resolved
}
var buf bytes.Buffer
tw := tar.NewWriter(&buf)