Compare commits

..

No commits in common. "main" and "chore/runtime-version-file" have entirely different histories.

7 changed files with 41 additions and 1173 deletions

View File

@ -1,232 +0,0 @@
name: CI
# Ported from .github/workflows/ci.yml on 2026-05-11 per internal#326
# (Class-A root: cross-repo `uses:` blocker for Gitea 1.22.6 —
# feedback_gitea_cross_repo_uses_blocked).
#
# Root cause of the main-red CI on this repo:
# The .github/ original used
# uses: molecule-ai/molecule-ci/.github/workflows/validate-workspace-template.yml@main
# which Gitea 1.22.6 rejects (DEFAULT_ACTIONS_URL=github → 404 against
# the remote repo even though it lives on the same Gitea instance).
# Gitea reads .github/ as a fallback when .gitea/ is absent
# (reference_per_repo_gitea_vs_github_actions_dir), so the .github/
# workflow was firing on Gitea and failing in 1s.
#
# Fix shape: inline the validation logic directly. The canonical
# validator in molecule-ai/molecule-ci already self-clones into the
# runner via a direct HTTPS `git clone` step (validate-workspace-template.yml
# does this verbatim) — so the inline port is just "do that clone +
# invoke the validator script in-place", preserving the
# single-source-of-truth property (each CI run still fetches the
# canonical validator fresh).
#
# Four-surface migration audit (feedback_gitea_actions_migration_audit_pattern):
# 1. YAML — no `workflow_dispatch.inputs`; no `merge_group`; preserved
# `on: [push, pull_request]` from the original. Added workflow-level
# env.GITHUB_SERVER_URL (feedback_act_runner_github_server_url).
# 2. Cache — `actions/setup-python` `cache: pip` preserved; works against
# Gitea's built-in cache server when runner.cache is configured.
# 3. Token — uses auto-injected GITHUB_TOKEN (Gitea-aliased). Validator
# job needs only `contents: read` (no write to issues/PRs).
# 4. Docs — anonymous git-clone of molecule-ci (no token in URL); the
# molecule-ci repo is public on the Gitea instance.
#
# Fork-PR semantics: validate-runtime is intentionally skipped on fork
# PRs because pip-install + docker-build + adapter-import are arbitrary
# code execution. Internal PRs and main pushes get full coverage. The
# `github.event.pull_request.head.repo.fork` field is null for non-PR
# events; the `!= true` comparison defaults to running.
#
# Cross-links:
# - internal#326 — parent tracking issue
# - molecule-ai/molecule-ci/.github/workflows/validate-workspace-template.yml — pattern source
# - molecule-ai/molecule-core/.gitea/workflows/ci.yml — Gitea port style reference
on: [push, pull_request]
env:
# Belt-and-suspenders against the runner-default trap
# (feedback_act_runner_github_server_url). Runners are configured
# with this env via /opt/molecule/runners/config.yaml runner.envs,
# but pinning at the workflow level protects against a runner
# regenerated without the config file.
GITHUB_SERVER_URL: https://git.moleculesai.app
# Defense-in-depth on the GITHUB_TOKEN scope. The validate-runtime job
# runs untrusted-by-design code from the calling repo — pip-installs
# requirements.txt (post-install hooks), imports adapter.py, and
# docker-builds the Dockerfile. Each primitive can execute arbitrary
# code with the token in env. Pinning `contents: read` means the worst
# a malicious template PR can do with the token is read public repo
# state — no write to issues, no push to branches, no comment-spam.
permissions:
contents: read
jobs:
validate-static:
name: Template validation (static)
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
# Canonical validator script lives in molecule-ci, fetched fresh on
# every run. Anonymous fetch of the public molecule-ci repo — no
# token needed; no actions/checkout cross-repo idiosyncrasies.
- name: Fetch molecule-ci canonical scripts
run: git clone --depth 1 https://git.moleculesai.app/molecule-ai/molecule-ci.git .molecule-ci-canonical
- uses: actions/setup-python@v5
with:
python-version: "3.11"
# Secret scan — the most important check. Always runs, including
# on fork PRs (no third-party code executes here).
- name: Check for secrets
run: |
python3 - << 'PYEOF'
import os, re, sys
from pathlib import Path
PATTERNS = [
re.compile(r'''["']sk-ant-[a-zA-Z0-9]{50,}["']'''),
re.compile(r'''["']ghp_[a-zA-Z0-9]{36,}["']'''),
re.compile(r'''["']AKIA[A-Z0-9]{16}["']'''),
re.compile(r'''["'][a-zA-Z0-9/+=]{40}["']'''),
re.compile(r'''["']sk_test_[a-zA-Z0-9]{24,}["']'''),
re.compile(r'''["']Bearer\s+[a-zA-Z0-9_.-]{20,}["']'''),
re.compile(r'''ghp_[a-zA-Z0-9]{36,}'''),
re.compile(r'''sk-ant-[a-zA-Z0-9]{50,}'''),
]
SKIP_DIRS = {'.molecule-ci', '.molecule-ci-canonical', '.git', 'node_modules', '__pycache__'}
EXTENSIONS = {'.yaml', '.yml', '.md', '.py', '.sh'}
def is_false_positive(line):
ctx = line.lower()
return '...' in ctx or '<example' in ctx or '</example' in ctx
root = Path(os.environ.get('GITHUB_WORKSPACE', '.'))
warnings = []
for dirpath, dirnames, filenames in os.walk(root):
dirnames[:] = [d for d in dirnames if d not in SKIP_DIRS]
for filename in filenames:
if Path(filename).suffix not in EXTENSIONS:
continue
filepath = Path(dirpath) / filename
try:
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
for lineno, line in enumerate(f.readlines(), 1):
for pattern in PATTERNS:
for match in pattern.finditer(line):
if not is_false_positive(line):
warnings.append(f" {filepath}:{lineno}: {match.group(0)[:40]}...")
except Exception:
pass
if warnings:
print("::error::Potential secret found in committed files:")
for w in warnings:
print(w)
sys.exit(1)
else:
print("::notice::No secrets detected")
PYEOF
# Static-only validator — file existence checks, YAML parse,
# AST inspection of adapter.py (no import). Doesn't execute any
# third-party code; safe on fork PRs.
- run: pip install pyyaml -q
- run: python3 .molecule-ci-canonical/scripts/validate-workspace-template.py --static-only
validate-runtime:
name: Template validation (runtime)
runs-on: ubuntu-latest
timeout-minutes: 15
needs: validate-static
# Skip when the PR comes from a fork — those are external,
# untrusted, and would let attackers run pip install / docker build
# / adapter.py import on our runner.
if: github.event.pull_request.head.repo.fork != true
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Fetch molecule-ci canonical scripts
run: git clone --depth 1 https://git.moleculesai.app/molecule-ai/molecule-ci.git .molecule-ci-canonical
- uses: actions/setup-python@v5
with:
python-version: "3.11"
cache: "pip"
cache-dependency-path: requirements.txt
- run: pip install pyyaml -q
# Install the template's runtime dependencies so the validator's
# check_adapter_runtime_load() can import adapter.py the same way
# the workspace container does at boot. Without this, a
# syntactically-valid adapter that ImportErrors on a missing
# transitive dep would build clean and crash on first user prompt.
- if: hashFiles('requirements.txt') != ''
run: pip install -q -r requirements.txt
- if: hashFiles('requirements.txt') == ''
run: pip install -q molecule-ai-workspace-runtime
- run: python3 .molecule-ci-canonical/scripts/validate-workspace-template.py
- name: Docker build smoke test
if: hashFiles('Dockerfile') != ''
run: |
# Graceful skip when the runner's job-container can't reach the
# Docker daemon (e.g. /var/run/docker.sock not mounted into the
# act job container, or the in-container uid not in the docker
# group). Without this guard, CI stays red even when the
# template's Dockerfile is fine — see internal#222 for the
# proper runner-config fix.
if ! docker info >/dev/null 2>&1; then
echo "::warning::docker daemon unreachable from runner job container — skipping Docker build smoke (runner-config gap, not a template issue)."
exit 0
fi
docker build -t template-test . --no-cache 2>&1 | tail -5 && echo "Docker build succeeded"
# Aggregator that emits a single `validate` check name — matches the
# historical required-check name on this repo's branch protection.
validate:
name: validate
runs-on: ubuntu-latest
needs: [validate-static, validate-runtime]
if: always()
timeout-minutes: 1
steps:
- name: Aggregate
run: |
static="${{ needs.validate-static.result }}"
runtime="${{ needs.validate-runtime.result }}"
echo "validate-static: $static"
echo "validate-runtime: $runtime"
if [ "$static" != "success" ]; then
echo "::error::validate-static did not succeed: $static"
exit 1
fi
# Treat `skipped` as a pass for fork-PR semantics (validate-runtime
# is intentionally skipped on forks; static coverage is the gate).
if [ "$runtime" != "success" ] && [ "$runtime" != "skipped" ]; then
echo "::error::validate-runtime did not succeed: $runtime"
exit 1
fi
echo "::notice::Template validation aggregate passed (static=$static, runtime=$runtime)"
tests:
name: Adapter unit tests
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/setup-python@v5
with:
python-version: "3.11"
# pyyaml is the runtime dep that adapter.py's _load_providers reads
# /configs/config.yaml through. In production it arrives transitively
# via molecule-ai-workspace-runtime; in this minimal test env we
# install it explicitly so the YAML-loading code path is actually
# exercised (without it, _load_providers' broad except-Exception
# swallows the ImportError and silently falls back to _BUILTIN_PROVIDERS,
# which is exactly the behavior that bit us 2026-04-30 when CI
# claimed green on a build that couldn't route any third-party model).
- run: pip install -q pytest pytest-asyncio pyyaml
# Tests live under tests/ with their own pytest.ini that anchors
# rootdir there — keeps pytest from importing the package
# __init__.py (which does `from .adapter import ...` for runtime
# discovery and can't be satisfied without molecule_runtime
# installed). See tests/pytest.ini for the full rationale.
- run: python3 -m pytest tests/ -v

View File

@ -43,19 +43,6 @@ RUN pip install --no-cache-dir -r requirements.txt && \
# Copy adapter code # Copy adapter code
COPY adapter.py . COPY adapter.py .
COPY __init__.py . COPY __init__.py .
# Provider registry. The adapter's _load_providers walks 4 paths:
# 1. /opt/adapter/config.yaml — provisioner-managed canonical
# 2. os.path.dirname(__file__)/config.yaml — alongside adapter.py (this image)
# 3. ${WORKSPACE_CONFIG_PATH}/config.yaml — workspace per-instance overrides
# 4. _BUILTIN_PROVIDERS — oauth + anthropic-api only
# On this image /opt/adapter/ is never populated by the platform
# provisioner, so path 2 (/app/config.yaml) is the load-bearing one.
# Without this COPY the file isn't in the image, all 3 file paths fail,
# and _load_providers falls through to _BUILTIN_PROVIDERS — every
# MiniMax/GLM/Kimi/DeepSeek model silently routes to anthropic-oauth →
# "Not logged in. Please run /login" at first LLM call. Caused the
# canary's 38h chronic red on 2026-05-07/08 (molecule-core#129).
COPY config.yaml .
# Adapter-specific executor — owned by THIS template (universal-runtime # Adapter-specific executor — owned by THIS template (universal-runtime
# refactor, molecule-core task #87). Lives alongside adapter.py so # refactor, molecule-core task #87). Lives alongside adapter.py so
# Python's import system picks the local /app/claude_sdk_executor.py # Python's import system picks the local /app/claude_sdk_executor.py

View File

@ -147,118 +147,36 @@ def _normalize_provider(entry: dict):
} }
# Canonical install path the platform provisioner is contracted to clone
# the template repo into. Hardcoded so the adapter's config.yaml lookup
# is invariant across Docker (mounted /app→/opt/adapter) and EC2-host
# (cloned by molecule-controlplane's ec2.go) install paths — robust
# against the site-packages copy that bit us 2026-05-04 11:08Z.
_CANONICAL_ADAPTER_DIR = "/opt/adapter"
# Adjacent-to-adapter.py path. Module-level so tests can monkeypatch it
# to redirect the path-2 lookup at a controlled tmp dir. Production code
# resolves this once at import time and never touches it again — same
# semantics as before.
_TEMPLATE_DIR = os.path.dirname(os.path.abspath(__file__))
def _load_providers(config_path: str) -> tuple: def _load_providers(config_path: str) -> tuple:
"""Load the provider registry from the template's bundled config.yaml. """Load the provider registry from /configs/config.yaml.
The providers list is a TEMPLATE concern it describes which The YAML's top-level ``providers:`` list is the canonical source —
models/auth-modes this runtime image supports and ships in the canvas Config tab reads the same list to populate its Provider
template's own config.yaml alongside adapter.py. The per-workspace dropdown so the UI and the adapter never disagree on what's
``${WORKSPACE_CONFIG_PATH}/config.yaml`` (default ``/configs/``) available. Falls back to ``_BUILTIN_PROVIDERS`` (oauth + anthropic-api)
only contains workspace-specific overrides (model, runtime, skills, if the file is missing, malformed, or has no providers section, so a
prompt files) and does NOT carry a providers section. bare-bones workspace still boots with the historical defaults.
Two-step incident history: Per-entry isolation: a single bad provider entry is dropped with a
Pre-2026-05-04 09:00Z: only checked ``config_path``, fell back warning; the rest of the registry survives. Used to be a generator
to ``_BUILTIN_PROVIDERS`` (oauth + anthropic-api). Every inside tuple(...) that propagated any AttributeError out and reverted
MiniMax / GLM / Kimi / DeepSeek model resolved to the whole registry to builtins exactly the silent-fallback failure
``anthropic-oauth`` and crashed at first LLM call with mode this file's existence was meant to fix.
"Not logged in. Please run /login". Fixed by adding a
template-bundled lookup using
``os.path.dirname(os.path.abspath(__file__))``.
2026-05-04 11:08Z: that ``__file__`` lookup misses on EC2-host
installs because the provisioner copies adapter.py to
``/opt/molecule-venv/lib/python3.12/site-packages/``
site-packages wins over PYTHONPATH=/opt/adapter (which the
host install doesn't set), so __file__ resolves to the venv
path WITHOUT an adjacent config.yaml. Same silent fallback
to anthropic-oauth + same "Not logged in" symptom.
2026-05-08 (#129): the multi-path lookup that fixed both of
the above was lost in a post-suspension migration cycle (the
Gitea main branch never carried the fix even though the
:latest image had it baked in from a prior build). Canary
chronic red for 38h before this commit restored the lookup.
Resolution order:
1. ``/opt/adapter/config.yaml`` canonical provisioner-managed
install dir. Hardcoded because the platform contract is
"provisioner clones template repo into /opt/adapter"; this
is invariant across Docker (mounted /app/opt/adapter) and
EC2-host (cloned by ec2.go) install paths. Robust against
site-packages copy.
2. Adjacent to ``adapter.__file__`` works in dev/test where
the canonical path doesn't exist. Also covers the Docker
image's /app/config.yaml (bundled by Dockerfile #6).
3. Per-workspace ``${config_path}/config.yaml`` fallback for
operator-shipped overrides on a private deployment that
wants a custom providers list.
4. ``_BUILTIN_PROVIDERS`` oauth + anthropic-api defaults so a
bare-bones workspace still boots even with no config.yaml
anywhere.
Per-entry isolation: a single bad provider entry is dropped with
a warning; the rest of the registry survives.
""" """
canonical_yaml = os.path.join(_CANONICAL_ADAPTER_DIR, "config.yaml") yaml_path = os.path.join(config_path, "config.yaml")
template_yaml = os.path.join(_TEMPLATE_DIR, "config.yaml")
workspace_yaml = os.path.join(config_path, "config.yaml")
# Deduplicate while preserving order — _CANONICAL_ADAPTER_DIR and
# the __file__ dir collide in dev/test (when imported from
# /opt/adapter directly), and workspace_yaml may also collide if
# config_path == /opt/adapter in tests.
seen = set()
candidates = []
for path in (canonical_yaml, template_yaml, workspace_yaml):
if path not in seen:
seen.add(path)
candidates.append(path)
raw = None
chosen_path = None
try: try:
import yaml # transitive dep via molecule-ai-workspace-runtime import yaml # transitive dep via molecule-ai-workspace-runtime
except ImportError: with open(yaml_path, "r") as f:
logger.warning("providers: yaml import failed; using builtins") data = yaml.safe_load(f) or {}
except FileNotFoundError:
logger.info("providers: %s not found, using builtin defaults", yaml_path)
return _BUILTIN_PROVIDERS
except Exception as exc: # noqa: BLE001 — defensive: never block boot on YAML
logger.warning("providers: failed to load from %s (%s); using builtins", yaml_path, exc)
return _BUILTIN_PROVIDERS return _BUILTIN_PROVIDERS
for yaml_path in candidates: raw = data.get("providers") if isinstance(data, dict) else None
try: if not isinstance(raw, list) or not raw:
with open(yaml_path, "r") as f:
data = yaml.safe_load(f) or {}
except FileNotFoundError:
logger.info("providers: %s not found, trying next candidate", yaml_path)
continue
except Exception as exc: # noqa: BLE001 — defensive: never block boot on YAML
logger.warning(
"providers: failed to load from %s (%s); trying next candidate",
yaml_path, exc,
)
continue
candidate_raw = data.get("providers") if isinstance(data, dict) else None
if isinstance(candidate_raw, list) and candidate_raw:
raw = candidate_raw
chosen_path = yaml_path
break
if raw is None:
logger.info(
"providers: no providers section found in %s; using builtin defaults",
" or ".join(candidates),
)
return _BUILTIN_PROVIDERS return _BUILTIN_PROVIDERS
parsed = [] parsed = []
@ -272,139 +190,11 @@ def _load_providers(config_path: str) -> tuple:
parsed.append(normalized) parsed.append(normalized)
if not parsed: if not parsed:
logger.warning("providers: no valid entries in %s; using builtins", chosen_path) logger.warning("providers: no valid entries in %s; using builtins", yaml_path)
return _BUILTIN_PROVIDERS return _BUILTIN_PROVIDERS
logger.info("providers: loaded %d entries from %s", len(parsed), chosen_path)
return tuple(parsed) return tuple(parsed)
# Aliases for `MODEL_PROVIDER` env values that should map to a registry
# provider name. The persona env files use shorter / friendlier slugs
# than the registry's canonical names — without this alias map a value
# like ``MODEL_PROVIDER=claude-code`` would fall through to YAML-based
# resolution and (when the YAML doesn't pin a provider) hit the
# model-prefix matcher with the operator-picked MODEL, mis-routing a
# lead workspace through MiniMax even though its CLAUDE_CODE_OAUTH_TOKEN
# was clearly meant to be used.
#
# Maintain this list in sync with the persona env file convention:
# - ``claude-code`` → ``anthropic-oauth`` (Claude Code subscription path)
# - ``anthropic`` → ``anthropic-api`` (direct Anthropic API key)
# Provider names already in the registry alias to themselves implicitly
# (the ``in registry`` check catches them before this map is consulted).
_PROVIDER_SLUG_ALIASES = {
"claude-code": "anthropic-oauth",
"anthropic": "anthropic-api",
}
def _resolve_model_and_provider_from_env(
yaml_model: str,
yaml_provider: str,
providers: tuple,
) -> tuple:
"""Reconcile model + provider from env vars vs YAML, with the persona-env
convention winning over the legacy ``MODEL_PROVIDER``-as-model-id usage.
The persona env files (``~/.molecule-ai/personas/<name>/env`` on the host,
sourced into each workspace container at provision time) declare TWO env
vars with distinct semantics:
* ``MODEL`` the model id (e.g. ``MiniMax-M2.7-highspeed``, ``opus``).
* ``MODEL_PROVIDER`` the provider slug (e.g. ``minimax``,
``claude-code``, ``anthropic``).
The legacy ``workspace/config.py`` (in molecule-ai-workspace-runtime)
historically interpreted ``MODEL_PROVIDER`` as the *model id* a name
chosen before there was a separate ``MODEL`` env var. When both env vars
are set with the persona convention, the legacy code reads
``MODEL_PROVIDER=minimax`` into ``runtime_config.model``, which then
fails to match any registry prefix (``minimax-`` requires a hyphen
suffix) and silently falls through to providers[0] (``anthropic-oauth``).
OAuth-token-less workspaces then wedge at ``query.initialize()`` because
the claude CLI can't authenticate. This is the 2026-05-08 dev-tree
incident 22/27 non-lead workspaces stuck in ``degraded``.
Resolution order (this function):
1. ``MODEL`` env var picked_model. Authoritative when set; the
persona env always sets it alongside ``MODEL_PROVIDER`` so the
model id never has to be inferred.
2. ``MODEL_PROVIDER`` env var explicit_provider, BUT only when the
value matches a known provider name in the registry. This guards
against the legacy case where some callers still set
``MODEL_PROVIDER`` to a model id (e.g. canvas Save+Restart prior to
this fix). If the value isn't a registered provider name and YAML
didn't supply a model, treat it as a model id for back-compat.
3. YAML ``runtime_config.model`` / ``provider`` used for any field
the env didn't supply. Carries the operator's canvas selection
on workspaces that haven't yet adopted the persona env shape.
Returns ``(picked_model, explicit_provider_name)``. Either may be
empty/None the caller (``setup``) handles the empty cases via
``_resolve_provider``'s registry fallback.
"""
env_model = (os.environ.get("MODEL") or "").strip()
env_provider = (os.environ.get("MODEL_PROVIDER") or "").strip()
provider_names_lower = {p.get("name", "").lower() for p in providers}
# Detect whether MODEL_PROVIDER carries the persona-convention slug
# (provider name) vs. the legacy convention (model id). Persona-
# convention wins when the value matches a registered provider; we
# fall back to legacy interpretation only when it doesn't.
#
# First, apply the alias map so persona-friendly slugs like
# ``claude-code`` resolve to the canonical registry name
# ``anthropic-oauth``. Without this, a lead workspace's
# ``MODEL_PROVIDER=claude-code`` env would fall through to the model-
# prefix matcher, see ``MODEL=MiniMax-M2.7`` and mis-route to MiniMax
# even though the operator's intent (and the OAuth token they set)
# was the OAuth subscription path.
env_provider_resolved = _PROVIDER_SLUG_ALIASES.get(
env_provider.lower(), env_provider,
) if env_provider else ""
env_provider_is_slug = (
bool(env_provider_resolved)
and env_provider_resolved.lower() in provider_names_lower
)
# Picked model resolution
if env_model:
picked_model = env_model
elif env_provider and not env_provider_is_slug:
# Legacy: MODEL_PROVIDER env carried the model id. Honor it so
# canvas Save+Restart workflows that predate this fix keep working.
picked_model = env_provider
else:
picked_model = yaml_model or ""
# Explicit provider resolution — env wins when it's a registered slug
# (after alias mapping), otherwise fall back to YAML.
#
# YAML aliasing: the molecule-runtime wheel (config.py) auto-derives
# ``runtime_config.provider`` from the YAML/default model slug — the
# default model ``anthropic:claude-opus-4-7`` yields ``anthropic`` as
# the inferred provider. Without applying the alias map here, that
# auto-derived ``anthropic`` slug fails registry lookup and the
# adapter raises ValueError ("provider='anthropic' but it is not in
# the providers registry"), wedging the workspace at boot. The alias
# map already handles this for the env-var path above; mirror the
# same treatment for the YAML path so the runtime-wheel default
# produces a registered provider name in both cases. Caught
# 2026-05-09 on staging-cplead-2 — every workspace booted with
# ``configuration_status=not_configured`` because the YAML provider
# ``anthropic`` was passed through verbatim instead of being aliased
# to ``anthropic-api``.
if env_provider_is_slug:
explicit_provider = env_provider_resolved
elif yaml_provider:
yp_lower = yaml_provider.lower()
explicit_provider = _PROVIDER_SLUG_ALIASES.get(yp_lower, yaml_provider)
else:
explicit_provider = None
return picked_model, explicit_provider
def _strip_provider_prefix(model: str) -> str: def _strip_provider_prefix(model: str) -> str:
"""Strip LangChain-style "<provider>:<model>" prefix from a model id. """Strip LangChain-style "<provider>:<model>" prefix from a model id.
@ -490,28 +280,13 @@ def _project_vendor_auth(provider: dict) -> None:
return return
def _resolve_provider( def _resolve_provider(model: str, providers: tuple) -> dict:
model: str,
providers: tuple,
explicit_provider: str = None,
) -> dict:
"""Return the provider entry matching this model id. """Return the provider entry matching this model id.
If ``explicit_provider`` is given (set via the ``provider:`` field in Match is case-insensitive: prefix wins over alias when both could
workspace config.yaml or runtime_config), look up by name first. If the apply. Unknown ids fall back to the first provider in the registry
named provider is not in the registry, RAISE ``ValueError`` with an (by convention, the OAuth/safest default anthropic-oauth in both
actionable message silent fallback to ``providers[0]`` is the bug _BUILTIN_PROVIDERS and the shipped config.yaml).
that motivated #180 (workspace operator picks ``provider: minimax``
in the canvas Config tab, the adapter ignores it, the Claude SDK
silently keeps using ``CLAUDE_CODE_OAUTH_TOKEN`` and the operator has
no way to tell from the canvas that their provider switch did
nothing).
Without an explicit name: match is case-insensitive, prefix wins over
alias when both could apply, and unknown ids fall back to the first
provider in the registry (by convention, the OAuth/safest default
``anthropic-oauth`` in both _BUILTIN_PROVIDERS and the shipped
config.yaml).
Pre-condition: ``providers`` is non-empty. _load_providers always Pre-condition: ``providers`` is non-empty. _load_providers always
returns at least one entry (built-ins when YAML is missing or every returns at least one entry (built-ins when YAML is missing or every
@ -523,44 +298,6 @@ def _resolve_provider(
"_load_providers must always return at least one entry " "_load_providers must always return at least one entry "
"(falling back to _BUILTIN_PROVIDERS when needed)" "(falling back to _BUILTIN_PROVIDERS when needed)"
) )
# Explicit provider name takes precedence — fail fast if it's not in
# the registry. Anything else would silently route the operator's
# picked provider through the wrong auth/base_url path. The error
# message tells them exactly which two paths fix it.
if explicit_provider:
ep_lower = explicit_provider.lower()
for provider in providers:
if provider["name"].lower() == ep_lower:
return provider
names = ", ".join(p["name"] for p in providers)
raise ValueError(
f"claude-code adapter: workspace config picks "
f"provider='{explicit_provider}' but it is not in the "
f"providers registry.\n"
f"\n"
f"Known providers: {names}\n"
f"\n"
f"Two ways to fix:\n"
f" (a) Add '{explicit_provider}' to /configs/config.yaml as a "
f"providers: entry. Required keys:\n"
f" providers:\n"
f" - name: {explicit_provider}\n"
f" auth_mode: third_party_anthropic_compat\n"
f" base_url: https://... # provider's Anthropic-compat endpoint\n"
f" auth_env: [{explicit_provider.upper()}_API_KEY]\n"
f" model_prefixes: [...]\n"
f" (b) Switch the workspace runtime template to one that "
f"natively supports {explicit_provider} (CrewAI, LangGraph, or "
f"DeepAgents read provider/model from runtime_config and route "
f"directly without needing an Anthropic-compat shim).\n"
f"\n"
f"Note: claude-code SDK speaks the Anthropic API protocol. "
f"Providers that only expose OpenAI-compatible endpoints "
f"(MiniMax, GLM, Kimi, DeepSeek native APIs) need either an "
f"Anthropic-compat proxy in front, or option (b)."
)
if not model: if not model:
return providers[0] return providers[0]
m = model.lower() m = model.lower()
@ -663,52 +400,9 @@ class ClaudeCodeAdapter(BaseAdapter):
# validation + ANTHROPIC_BASE_URL routing from that single decision. # validation + ANTHROPIC_BASE_URL routing from that single decision.
rc = config.runtime_config rc = config.runtime_config
if isinstance(rc, dict): if isinstance(rc, dict):
yaml_model = rc.get("model") or "" picked_model = rc.get("model") or "sonnet"
yaml_provider_name = rc.get("provider") or ""
else: else:
yaml_model = getattr(rc, "model", None) or "" picked_model = getattr(rc, "model", None) or "sonnet"
yaml_provider_name = getattr(rc, "provider", None) or ""
# Also honor the top-level `provider:` field in /configs/config.yaml.
# The canvas Config-tab Provider dropdown writes there (not into
# runtime_config) on some legacy paths. Either source is canonical;
# whichever is set wins. Root cause of #180: the adapter used to
# ignore both, silently routing every non-Anthropic provider pick
# through anthropic-oauth.
if not yaml_provider_name:
yaml_path = os.path.join(config.config_path, "config.yaml")
try:
import yaml # transitive dep via molecule-ai-workspace-runtime
with open(yaml_path, "r") as f:
data = yaml.safe_load(f) or {}
if isinstance(data, dict):
val = data.get("provider")
if isinstance(val, str) and val.strip():
yaml_provider_name = val.strip()
except FileNotFoundError:
pass
except Exception as exc: # noqa: BLE001 — defensive: never block boot
logger.warning(
"providers: failed to read top-level provider: from %s (%s); "
"falling back to model-based resolution",
yaml_path, exc,
)
# Reconcile env vars (persona convention: MODEL=<id>,
# MODEL_PROVIDER=<slug>) against YAML. Env wins over YAML — the
# persona env files are the canonical per-agent provider mapping
# (Phase 2 mapping 2026-05-08), and the workspace-runtime wheel's
# legacy ``MODEL_PROVIDER``-as-model-id reading would otherwise
# silently route non-leads to providers[0] = anthropic-oauth.
# Documented in detail at _resolve_model_and_provider_from_env.
picked_model, explicit_provider_name = _resolve_model_and_provider_from_env(
yaml_model=yaml_model,
yaml_provider=yaml_provider_name,
providers=providers,
)
if not picked_model:
picked_model = "sonnet"
# NOTE: do NOT strip the provider prefix here. The pre-fix routing # NOTE: do NOT strip the provider prefix here. The pre-fix routing
# behavior — `anthropic:claude-opus-4-7` falls through to # behavior — `anthropic:claude-opus-4-7` falls through to
# providers[0] (anthropic-oauth) when no model_prefixes match — is # providers[0] (anthropic-oauth) when no model_prefixes match — is
@ -717,15 +411,7 @@ class ClaudeCodeAdapter(BaseAdapter):
# `anthropic-api` provider and the CLI then hangs at `initialize` # `anthropic-api` provider and the CLI then hangs at `initialize`
# because ANTHROPIC_API_KEY isn't set. The strip belongs only at # because ANTHROPIC_API_KEY isn't set. The strip belongs only at
# the CLI invocation site (create_executor below). # the CLI invocation site (create_executor below).
# provider = _resolve_provider(picked_model, providers)
# Pass the explicit provider name through so _resolve_provider
# raises ValueError with an actionable message (instead of silently
# routing to providers[0]) when an operator picks a provider that
# isn't in the registry. See #180.
provider = _resolve_provider(
picked_model, providers,
explicit_provider=explicit_provider_name,
)
auth_env_options = provider["auth_env"] auth_env_options = provider["auth_env"]
# Project the per-vendor API key (MINIMAX_API_KEY, GLM_API_KEY, # Project the per-vendor API key (MINIMAX_API_KEY, GLM_API_KEY,
@ -836,26 +522,9 @@ class ClaudeCodeAdapter(BaseAdapter):
# RuntimeConfig dataclass. Read `model` defensively from either shape. # RuntimeConfig dataclass. Read `model` defensively from either shape.
rc = config.runtime_config rc = config.runtime_config
if isinstance(rc, dict): if isinstance(rc, dict):
yaml_model = rc.get("model") or "" explicit_model = rc.get("model") or ""
yaml_provider = rc.get("provider") or ""
else: else:
yaml_model = getattr(rc, "model", None) or "" explicit_model = getattr(rc, "model", None) or ""
yaml_provider = getattr(rc, "provider", None) or ""
# Reconcile against env vars (persona convention: MODEL=<id>,
# MODEL_PROVIDER=<slug>) using the same helper that ``setup`` uses,
# so the executor and the boot banner agree on the picked model.
# Without this, a workspace whose env says ``MODEL=MiniMax-M2.7``
# but whose runtime wheel pre-dates the persona-env fix would set
# runtime_config.model="minimax" (the slug, mistakenly read by the
# legacy ``MODEL_PROVIDER``-as-model-id path); this helper restores
# the correct model id before it reaches the SDK.
providers = _load_providers(config.config_path)
explicit_model, _ = _resolve_model_and_provider_from_env(
yaml_model=yaml_model,
yaml_provider=yaml_provider,
providers=providers,
)
explicit_model = _strip_provider_prefix(explicit_model) explicit_model = _strip_provider_prefix(explicit_model)
# Pre-validation: detect the misconfiguration combo that drove the # Pre-validation: detect the misconfiguration combo that drove the
@ -886,7 +555,7 @@ class ClaudeCodeAdapter(BaseAdapter):
"The default fallback ('sonnet') is an Anthropic-native " "The default fallback ('sonnet') is an Anthropic-native "
"alias; non-Anthropic shims (MiniMax, OpenAI gateways, " "alias; non-Anthropic shims (MiniMax, OpenAI gateways, "
"etc.) won't recognize it and the SDK --print probe will " "etc.) won't recognize it and the SDK --print probe will "
"hang for 30s before timing out. Fix: set MODEL " "hang for 30s before timing out. Fix: set MODEL_PROVIDER "
"as a workspace secret (canvas: Save+Restart with model " "as a workspace secret (canvas: Save+Restart with model "
"picked) or set runtime_config.model in /configs/config.yaml." "picked) or set runtime_config.model in /configs/config.yaml."
) )

View File

@ -1,89 +0,0 @@
"""Shared pytest fixtures + import shims for the adapter test suite.
`adapter.py` imports at module load:
- molecule_runtime.adapters.base (BaseAdapter, AdapterConfig, RuntimeCapabilities)
- molecule_runtime.plugins (lazy in setup(), but stubbed proactively)
- a2a.server.agent_execution (AgentExecutor)
- claude_sdk_executor (lazy in create_executor(), stubbed proactively)
In production those arrive transitively via molecule-ai-workspace-runtime.
The CI runner only installs `pytest pytest-asyncio pyyaml`, so the import
chain would fail with ModuleNotFoundError before any test collects
exactly the failure that broke CI on the #180 fix branch (PR #4) and
caused the merge wall to block on a green local but red Gitea CI.
Putting the stub installer here (collected before any test module is
imported, per pytest semantics) means every test file can do
`from adapter import ...` at module top without a per-file boilerplate
copy. It also forces a single shape for the stubs so two files can't
silently disagree on whether `BaseAdapter` has
`install_plugins_via_registry` (see test_adapter_prevalidate's
async-setup tests, which need the method to exist on the parent class).
"""
import os
import sys
import types
from dataclasses import dataclass
from unittest.mock import MagicMock
@dataclass
class _StubRuntimeCapabilities:
provides_native_session: bool = False
@dataclass
class _StubAdapterConfig:
runtime_config: object = None
config_path: str = "/tmp/configs"
system_prompt: str = ""
heartbeat: object = None
class _StubBaseAdapter:
async def install_plugins_via_registry(self, *_args, **_kwargs):
pass
def _install_stubs() -> None:
"""Install the smallest set of import shims that adapter.py needs."""
if "molecule_runtime" not in sys.modules:
mr = types.ModuleType("molecule_runtime")
mr.adapters = types.ModuleType("molecule_runtime.adapters")
mr.adapters.base = types.ModuleType("molecule_runtime.adapters.base")
mr.adapters.base.BaseAdapter = _StubBaseAdapter
mr.adapters.base.AdapterConfig = _StubAdapterConfig
mr.adapters.base.RuntimeCapabilities = _StubRuntimeCapabilities
mr.plugins = types.ModuleType("molecule_runtime.plugins")
mr.plugins.load_plugins = lambda **_kwargs: []
sys.modules["molecule_runtime"] = mr
sys.modules["molecule_runtime.adapters"] = mr.adapters
sys.modules["molecule_runtime.adapters.base"] = mr.adapters.base
sys.modules["molecule_runtime.plugins"] = mr.plugins
if "a2a" not in sys.modules:
a2a = types.ModuleType("a2a")
a2a.server = types.ModuleType("a2a.server")
a2a.server.agent_execution = types.ModuleType("a2a.server.agent_execution")
a2a.server.agent_execution.AgentExecutor = type("AgentExecutor", (), {})
sys.modules["a2a"] = a2a
sys.modules["a2a.server"] = a2a.server
sys.modules["a2a.server.agent_execution"] = a2a.server.agent_execution
if "claude_sdk_executor" not in sys.modules:
mod = types.ModuleType("claude_sdk_executor")
mod.ClaudeSDKExecutor = MagicMock(name="ClaudeSDKExecutor")
sys.modules["claude_sdk_executor"] = mod
# Run at conftest import time — pytest collects conftest.py before any
# test module, so the stubs are in sys.modules before `from adapter
# import ...` ever executes.
_install_stubs()
# adapter.py lives in the parent dir of tests/ (template root). pytest's
# `--import-mode=importlib` + tests/pytest.ini anchoring rootdir at
# tests/ means the parent isn't on sys.path automatically. Add it here
# once so every test file can do `from adapter import ...` cleanly.
_PARENT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if _PARENT_DIR not in sys.path:
sys.path.insert(0, _PARENT_DIR)

View File

@ -514,15 +514,8 @@ async def test_setup_auth_token_alone_satisfies_third_party_check(
# ---- _load_providers / _resolve_provider unit tests ---- # ---- _load_providers / _resolve_provider unit tests ----
def test_load_providers_returns_builtin_when_yaml_missing(tmp_path, monkeypatch): def test_load_providers_returns_builtin_when_yaml_missing(tmp_path):
"""FileNotFoundError path returns the in-code defaults verbatim. """FileNotFoundError path returns the in-code defaults verbatim."""
Monkeypatches the canonical + template paths to a non-existent dir
so only the workspace config_path is in scope. Without this, the
multi-path lookup picks up the repo-root config.yaml that ships
with the template (path 2 finds the bundled providers list and
returns it instead of falling through to builtins).
"""
_install_stubs() _install_stubs()
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if parent_dir not in sys.path: if parent_dir not in sys.path:
@ -530,10 +523,6 @@ def test_load_providers_returns_builtin_when_yaml_missing(tmp_path, monkeypatch)
sys.modules.pop("adapter", None) sys.modules.pop("adapter", None)
import adapter as adapter_module import adapter as adapter_module
nonexistent = str(tmp_path / "_isolate_canonical")
monkeypatch.setattr(adapter_module, "_CANONICAL_ADAPTER_DIR", nonexistent)
monkeypatch.setattr(adapter_module, "_TEMPLATE_DIR", nonexistent)
result = adapter_module._load_providers(str(tmp_path)) result = adapter_module._load_providers(str(tmp_path))
assert result == adapter_module._BUILTIN_PROVIDERS assert result == adapter_module._BUILTIN_PROVIDERS
@ -587,12 +576,8 @@ async def test_setup_routes_extra_providers(
assert os.environ.get("ANTHROPIC_BASE_URL") == expected_url assert os.environ.get("ANTHROPIC_BASE_URL") == expected_url
def test_load_providers_falls_back_on_malformed_yaml(tmp_path, caplog, monkeypatch): def test_load_providers_falls_back_on_malformed_yaml(tmp_path, caplog):
"""Malformed YAML → log warning + fallback (don't kill boot). """Malformed YAML → log warning + fallback (don't kill boot)."""
Isolated from the multi-path lookup by pinning canonical + template
dirs at a non-existent path; only the workspace config_path is read.
"""
_install_stubs() _install_stubs()
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if parent_dir not in sys.path: if parent_dir not in sys.path:
@ -600,10 +585,6 @@ def test_load_providers_falls_back_on_malformed_yaml(tmp_path, caplog, monkeypat
sys.modules.pop("adapter", None) sys.modules.pop("adapter", None)
import adapter as adapter_module import adapter as adapter_module
nonexistent = str(tmp_path / "_isolate_canonical")
monkeypatch.setattr(adapter_module, "_CANONICAL_ADAPTER_DIR", nonexistent)
monkeypatch.setattr(adapter_module, "_TEMPLATE_DIR", nonexistent)
(tmp_path / "config.yaml").write_text("providers: [not valid yaml: {{{") (tmp_path / "config.yaml").write_text("providers: [not valid yaml: {{{")
import logging import logging
@ -641,7 +622,7 @@ def test_resolve_provider_minimax_prefix_matches_minimax_provider():
assert result2["name"] == "minimax" assert result2["name"] == "minimax"
def test_load_providers_drops_bad_entry_keeps_rest(tmp_path, caplog, monkeypatch): def test_load_providers_drops_bad_entry_keeps_rest(tmp_path, caplog):
"""Per-entry isolation: one malformed entry shouldn't nuke the registry. """Per-entry isolation: one malformed entry shouldn't nuke the registry.
Pre-fix: ``_load_providers`` built the registry via a generator inside Pre-fix: ``_load_providers`` built the registry via a generator inside
@ -653,9 +634,6 @@ def test_load_providers_drops_bad_entry_keeps_rest(tmp_path, caplog, monkeypatch
Post-fix: per-entry try/except drops the bad entry with a warning, Post-fix: per-entry try/except drops the bad entry with a warning,
rest of the registry survives. rest of the registry survives.
Isolated from the multi-path lookup so only the test's tmp config.yaml
is read.
""" """
_install_stubs() _install_stubs()
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@ -664,10 +642,6 @@ def test_load_providers_drops_bad_entry_keeps_rest(tmp_path, caplog, monkeypatch
sys.modules.pop("adapter", None) sys.modules.pop("adapter", None)
import adapter as adapter_module import adapter as adapter_module
nonexistent = str(tmp_path / "_isolate_canonical")
monkeypatch.setattr(adapter_module, "_CANONICAL_ADAPTER_DIR", nonexistent)
monkeypatch.setattr(adapter_module, "_TEMPLATE_DIR", nonexistent)
yaml_with_typo = textwrap.dedent(""" yaml_with_typo = textwrap.dedent("""
providers: providers:
- name: good-zai - name: good-zai
@ -716,7 +690,7 @@ def test_load_providers_drops_bad_entry_keeps_rest(tmp_path, caplog, monkeypatch
) )
def test_load_providers_string_as_prefix_does_not_split_into_chars(tmp_path, caplog, monkeypatch): def test_load_providers_string_as_prefix_does_not_split_into_chars(tmp_path, caplog):
"""A YAML field declared as list-of-strings but written as a bare """A YAML field declared as list-of-strings but written as a bare
string (operator forgot brackets) used to silently iterate over string (operator forgot brackets) used to silently iterate over
characters ``('m','i','m','o','-')``. Post-fix: non-list value characters ``('m','i','m','o','-')``. Post-fix: non-list value
@ -731,10 +705,6 @@ def test_load_providers_string_as_prefix_does_not_split_into_chars(tmp_path, cap
sys.modules.pop("adapter", None) sys.modules.pop("adapter", None)
import adapter as adapter_module import adapter as adapter_module
nonexistent = str(tmp_path / "_isolate_canonical")
monkeypatch.setattr(adapter_module, "_CANONICAL_ADAPTER_DIR", nonexistent)
monkeypatch.setattr(adapter_module, "_TEMPLATE_DIR", nonexistent)
yaml_str_prefix = textwrap.dedent(""" yaml_str_prefix = textwrap.dedent("""
providers: providers:
- name: typo-prefix - name: typo-prefix
@ -753,7 +723,7 @@ def test_load_providers_string_as_prefix_does_not_split_into_chars(tmp_path, cap
) )
def test_load_providers_drops_entry_without_name(tmp_path, caplog, monkeypatch): def test_load_providers_drops_entry_without_name(tmp_path, caplog):
"""An entry without ``name`` is operator error — no silent fallback """An entry without ``name`` is operator error — no silent fallback
to ``<unnamed>``. Drop the entry with a warning so the boot log to ``<unnamed>``. Drop the entry with a warning so the boot log
surfaces the typo. surfaces the typo.
@ -765,10 +735,6 @@ def test_load_providers_drops_entry_without_name(tmp_path, caplog, monkeypatch):
sys.modules.pop("adapter", None) sys.modules.pop("adapter", None)
import adapter as adapter_module import adapter as adapter_module
nonexistent = str(tmp_path / "_isolate_canonical")
monkeypatch.setattr(adapter_module, "_CANONICAL_ADAPTER_DIR", nonexistent)
monkeypatch.setattr(adapter_module, "_TEMPLATE_DIR", nonexistent)
yaml_no_name = textwrap.dedent(""" yaml_no_name = textwrap.dedent("""
providers: providers:
- name: good - name: good

View File

@ -1,287 +0,0 @@
"""Tests for ``_resolve_model_and_provider_from_env`` — the env-vs-YAML
reconciliation that fixes the 2026-05-08 dev-tree wedge incident.
Symptom: 22/27 non-lead workspaces (minimax tier) wedged on
``Control request timeout: initialize`` because the runtime wheel's
``workspace/config.py`` interpreted ``MODEL_PROVIDER=minimax`` as the
*model id* instead of the provider slug. ``model="minimax"`` failed to
match the ``minimax-`` registry prefix, fell through to providers[0]
(anthropic-oauth), demanded ``CLAUDE_CODE_OAUTH_TOKEN`` (unset on
non-leads), and the claude CLI hung at SDK init.
The persona env files (``~/.molecule-ai/personas/<name>/env``) declare
the new convention:
* ``MODEL`` model id (e.g. ``MiniMax-M2.7-highspeed``)
* ``MODEL_PROVIDER`` provider slug (e.g. ``minimax``)
These tests cover the matrix of (env shape) × (YAML shape) so a future
contributor can't silently regress the wedge fix.
"""
import pytest
from adapter import (
_BUILTIN_PROVIDERS,
_resolve_model_and_provider_from_env,
)
# A registry that contains both anthropic-oauth (providers[0]) and
# minimax/zai (third-party slugs) — matches the shipped config.yaml.
_REGISTRY = _BUILTIN_PROVIDERS + (
{
"name": "minimax",
"auth_mode": "third_party_anthropic_compat",
"model_prefixes": ("minimax-",),
"model_aliases": (),
"base_url": "https://api.minimax.io/anthropic",
"auth_env": ("MINIMAX_API_KEY",),
},
{
"name": "zai",
"auth_mode": "third_party_anthropic_compat",
"model_prefixes": ("glm-",),
"model_aliases": (),
"base_url": "https://api.z.ai/api/anthropic",
"auth_env": ("GLM_API_KEY",),
},
)
def _clear_env(monkeypatch):
monkeypatch.delenv("MODEL", raising=False)
monkeypatch.delenv("MODEL_PROVIDER", raising=False)
# ------------------------------------------------------------------
# Persona env convention: MODEL=<id>, MODEL_PROVIDER=<slug>
# ------------------------------------------------------------------
def test_persona_env_minimax_resolves_correctly(monkeypatch):
"""The 2026-05-08 wedge regression test: persona env shape must
yield model=MiniMax-M2.7-highspeed (not "minimax") and explicit
provider=minimax."""
_clear_env(monkeypatch)
monkeypatch.setenv("MODEL", "MiniMax-M2.7-highspeed")
monkeypatch.setenv("MODEL_PROVIDER", "minimax")
model, provider = _resolve_model_and_provider_from_env(
yaml_model="", yaml_provider="", providers=_REGISTRY,
)
assert model == "MiniMax-M2.7-highspeed"
assert provider == "minimax"
def test_persona_env_lead_claude_code_resolves_correctly(monkeypatch):
"""Lead persona env (MODEL=opus, MODEL_PROVIDER=claude-code) —
``claude-code`` is the persona-friendly alias for the canonical
``anthropic-oauth`` registry name. Must resolve via the alias map
so the lead boots through the OAuth subscription path even when
MODEL is a non-Anthropic model id (e.g. an operator who picked
MiniMax in canvas but whose persona env still pins claude-code)."""
_clear_env(monkeypatch)
monkeypatch.setenv("MODEL", "opus")
monkeypatch.setenv("MODEL_PROVIDER", "claude-code")
model, provider = _resolve_model_and_provider_from_env(
yaml_model="", yaml_provider="", providers=_REGISTRY,
)
assert model == "opus"
# claude-code → anthropic-oauth via the alias map
assert provider == "anthropic-oauth"
def test_persona_env_lead_with_minimax_model_routes_via_oauth(monkeypatch):
"""Lead workspace whose persona pins MODEL_PROVIDER=claude-code but
whose YAML/canvas selection happens to be a MiniMax model still
routes via OAuth the persona's provider pin wins over the
model-prefix matcher. Without the alias map, the fall-through
mis-routed leads to MiniMax even when their CLAUDE_CODE_OAUTH_TOKEN
was set."""
_clear_env(monkeypatch)
monkeypatch.setenv("MODEL", "MiniMax-M2.7")
monkeypatch.setenv("MODEL_PROVIDER", "claude-code")
model, provider = _resolve_model_and_provider_from_env(
yaml_model="", yaml_provider="", providers=_REGISTRY,
)
assert model == "MiniMax-M2.7"
assert provider == "anthropic-oauth"
def test_anthropic_alias_resolves_to_anthropic_api(monkeypatch):
"""``MODEL_PROVIDER=anthropic`` alias → ``anthropic-api`` (direct
Anthropic API key path)."""
_clear_env(monkeypatch)
monkeypatch.setenv("MODEL", "claude-opus-4-7")
monkeypatch.setenv("MODEL_PROVIDER", "anthropic")
model, provider = _resolve_model_and_provider_from_env(
yaml_model="", yaml_provider="", providers=_REGISTRY,
)
assert model == "claude-opus-4-7"
assert provider == "anthropic-api"
def test_persona_env_glm_resolves_correctly(monkeypatch):
_clear_env(monkeypatch)
monkeypatch.setenv("MODEL", "GLM-4.6")
monkeypatch.setenv("MODEL_PROVIDER", "zai")
model, provider = _resolve_model_and_provider_from_env(
yaml_model="", yaml_provider="", providers=_REGISTRY,
)
assert model == "GLM-4.6"
assert provider == "zai"
def test_env_provider_slug_case_insensitive(monkeypatch):
"""Operator typos like ``MiniMax`` (mixed case) still resolve."""
_clear_env(monkeypatch)
monkeypatch.setenv("MODEL", "MiniMax-M2.7-highspeed")
monkeypatch.setenv("MODEL_PROVIDER", "MiniMax") # mixed case
_, provider = _resolve_model_and_provider_from_env(
yaml_model="", yaml_provider="", providers=_REGISTRY,
)
assert provider == "MiniMax" # caller compares case-insensitively
# ------------------------------------------------------------------
# Legacy convention: MODEL_PROVIDER=<model-id>, MODEL unset
# ------------------------------------------------------------------
def test_legacy_model_provider_as_model_id_still_works(monkeypatch):
"""Pre-2026-05-08 canvas Save+Restart shape: MODEL_PROVIDER carried
the model id directly (e.g. ``MODEL_PROVIDER=MiniMax-M2.7``) and
no MODEL env. Must keep working so existing canvas users don't
break overnight."""
_clear_env(monkeypatch)
monkeypatch.setenv("MODEL_PROVIDER", "MiniMax-M2.7-highspeed")
model, provider = _resolve_model_and_provider_from_env(
yaml_model="", yaml_provider="", providers=_REGISTRY,
)
# MiniMax-M2.7-highspeed is not a registered provider name, so
# it's treated as a legacy model-id-in-MODEL_PROVIDER value.
assert model == "MiniMax-M2.7-highspeed"
assert provider is None
# ------------------------------------------------------------------
# Env wins over YAML
# ------------------------------------------------------------------
def test_env_model_wins_over_yaml_model(monkeypatch):
"""When both env MODEL and YAML model are set, env wins."""
_clear_env(monkeypatch)
monkeypatch.setenv("MODEL", "GLM-4.6")
model, _ = _resolve_model_and_provider_from_env(
yaml_model="MiniMax-M2.7", yaml_provider="", providers=_REGISTRY,
)
assert model == "GLM-4.6"
def test_env_provider_wins_over_yaml_provider(monkeypatch):
"""Env MODEL_PROVIDER (when a registered slug) wins over YAML provider."""
_clear_env(monkeypatch)
monkeypatch.setenv("MODEL", "GLM-4.6")
monkeypatch.setenv("MODEL_PROVIDER", "zai")
_, provider = _resolve_model_and_provider_from_env(
yaml_model="", yaml_provider="minimax", providers=_REGISTRY,
)
assert provider == "zai"
# ------------------------------------------------------------------
# YAML fallback (no env)
# ------------------------------------------------------------------
def test_no_env_falls_back_to_yaml(monkeypatch):
"""Workspace whose env doesn't set MODEL/MODEL_PROVIDER falls back
to the YAML config preserves existing operator workflows."""
_clear_env(monkeypatch)
model, provider = _resolve_model_and_provider_from_env(
yaml_model="claude-sonnet-4-6",
yaml_provider="anthropic-api",
providers=_REGISTRY,
)
assert model == "claude-sonnet-4-6"
assert provider == "anthropic-api"
def test_no_env_no_yaml_returns_empty(monkeypatch):
"""Pure default path — caller (setup) substitutes ``sonnet``."""
_clear_env(monkeypatch)
model, provider = _resolve_model_and_provider_from_env(
yaml_model="", yaml_provider="", providers=_REGISTRY,
)
assert model == ""
assert provider is None
def test_yaml_provider_anthropic_is_aliased_to_anthropic_api(monkeypatch):
"""Regression for 2026-05-09 staging-cplead-2 incident: every
workspace booted ``configuration_status=not_configured`` because the
molecule-runtime wheel auto-derives ``runtime_config.provider =
"anthropic"`` from the default model slug ``anthropic:claude-opus-4-7``.
The adapter received ``yaml_provider="anthropic"`` from the wheel and
rejected it with ``ValueError: provider='anthropic' but it is not in
the providers registry`` but ``anthropic`` is already in
``_PROVIDER_SLUG_ALIASES`` for the env-var path. Mirror the alias map
on the YAML path so the wheel default produces a registered provider
name."""
_clear_env(monkeypatch)
_, provider = _resolve_model_and_provider_from_env(
yaml_model="", yaml_provider="anthropic", providers=_REGISTRY,
)
assert provider == "anthropic-api", (
f"yaml_provider='anthropic' must resolve through the alias map to "
f"'anthropic-api'; got {provider!r}. Without this aliasing the "
f"wheel-default workspace boot wedges at adapter.setup()."
)
def test_yaml_provider_claude_code_is_aliased_to_anthropic_oauth(monkeypatch):
"""Symmetric coverage: persona-friendly ``claude-code`` slug from the
YAML ``provider:`` field must alias to ``anthropic-oauth``, the same
way the env-var path resolves it. Lead workspaces that pin the OAuth
path in YAML (instead of via env) must not wedge."""
_clear_env(monkeypatch)
_, provider = _resolve_model_and_provider_from_env(
yaml_model="", yaml_provider="claude-code", providers=_REGISTRY,
)
assert provider == "anthropic-oauth"
def test_yaml_provider_unknown_passes_through_for_actionable_error(monkeypatch):
"""An unaliased, unknown YAML provider (e.g. ``yaml_provider="mystery"``)
must NOT be silently swapped to providers[0] it must reach
``_resolve_provider`` so the adapter raises the actionable
``Known providers: ...`` error message. The alias map is a
convenience for the two persona-convention slugs only; everything
else must keep its original semantics."""
_clear_env(monkeypatch)
_, provider = _resolve_model_and_provider_from_env(
yaml_model="", yaml_provider="mystery", providers=_REGISTRY,
)
assert provider == "mystery"
# ------------------------------------------------------------------
# Whitespace / empty-value defensive cases
# ------------------------------------------------------------------
def test_whitespace_only_env_treated_as_unset(monkeypatch):
_clear_env(monkeypatch)
monkeypatch.setenv("MODEL", " ")
monkeypatch.setenv("MODEL_PROVIDER", " ")
model, provider = _resolve_model_and_provider_from_env(
yaml_model="opus", yaml_provider="", providers=_REGISTRY,
)
assert model == "opus"
assert provider is None
def test_empty_env_value_treated_as_unset(monkeypatch):
_clear_env(monkeypatch)
monkeypatch.setenv("MODEL", "")
monkeypatch.setenv("MODEL_PROVIDER", "")
model, provider = _resolve_model_and_provider_from_env(
yaml_model="sonnet", yaml_provider="", providers=_REGISTRY,
)
assert model == "sonnet"
assert provider is None

View File

@ -1,146 +0,0 @@
"""Tests for the provider-resolution path that was silent-failing on #180.
Regression coverage: when an operator picks a provider in the canvas Config
tab that isn't in the registry, the adapter must raise ValueError with an
actionable message NOT silently fall through to providers[0]
(anthropic-oauth) and then have the Claude SDK hit the user's OAuth quota
under a different name.
These tests mirror the production failure mode reported by Hongming
2026-05-07 17:35: workspace config.yaml had `provider: minimax` set, the
adapter ignored it entirely, the SDK kept calling the Anthropic API with
CLAUDE_CODE_OAUTH_TOKEN, hit the OAuth quota, and the canvas surfaced
"Agent error (Exception)" with no clue why.
Import-shim setup (sys.path + molecule_runtime / a2a / claude_sdk_executor
stubs) lives in tests/conftest.py shared with test_adapter_prevalidate
so the two stub installers can't disagree on shape (e.g. BaseAdapter
having install_plugins_via_registry).
"""
import pytest
from adapter import (
_BUILTIN_PROVIDERS,
_resolve_provider,
)
def test_resolve_with_no_explicit_provider_falls_back_to_model_match():
"""No explicit provider → model-based prefix/alias matching, default to providers[0]."""
p = _resolve_provider("claude-opus-4-7", _BUILTIN_PROVIDERS)
assert p["name"] == "anthropic-api" # matches model_prefixes=("claude-",)
def test_resolve_with_no_explicit_provider_falls_back_to_default():
"""Unknown model + no explicit provider → providers[0] (anthropic-oauth)."""
p = _resolve_provider("unknown-model", _BUILTIN_PROVIDERS)
assert p["name"] == "anthropic-oauth"
def test_resolve_with_explicit_provider_in_registry_returns_match():
"""Explicit name lookup wins over model-based resolution."""
# Even though "claude-opus-4-7" would normally resolve to anthropic-api
# via prefix matching, the explicit provider name wins.
p = _resolve_provider(
"claude-opus-4-7", _BUILTIN_PROVIDERS,
explicit_provider="anthropic-oauth",
)
assert p["name"] == "anthropic-oauth"
def test_resolve_with_explicit_provider_case_insensitive():
"""Provider name match is case-insensitive (operators write 'Anthropic-OAuth' etc)."""
p = _resolve_provider(
"sonnet", _BUILTIN_PROVIDERS,
explicit_provider="ANTHROPIC-OAUTH",
)
assert p["name"] == "anthropic-oauth"
def test_resolve_with_explicit_provider_not_in_registry_raises():
"""The #180 regression test: explicit non-registry provider must raise, not fall through."""
with pytest.raises(ValueError) as exc_info:
_resolve_provider(
"MiniMax-M2.7-highspeed", _BUILTIN_PROVIDERS,
explicit_provider="minimax",
)
msg = str(exc_info.value)
# Must name the bad provider so operator knows what they typed
assert "minimax" in msg
# Must list known providers so operator knows what's available
assert "anthropic-oauth" in msg
assert "anthropic-api" in msg
# Must give actionable next steps — NOT just "not found"
assert "providers:" in msg or "Add" in msg
assert "Switch" in msg or "runtime" in msg
def test_resolve_with_explicit_provider_does_not_silent_fallback():
"""Specifically: must not return providers[0] when explicit_provider is bogus.
This is the exact silent-fallback path that caused the user-visible
bug: operator picks 'minimax' adapter returns anthropic-oauth
SDK uses CLAUDE_CODE_OAUTH_TOKEN hits quota.
"""
with pytest.raises(ValueError):
result = _resolve_provider(
"anything", _BUILTIN_PROVIDERS,
explicit_provider="minimax",
)
# If the implementation regresses to silent fallback, this would
# have returned providers[0] (anthropic-oauth) instead of raising.
# Defense-in-depth: guard against accidental "return" inside the
# error path.
assert result["name"] not in {"anthropic-oauth", "anthropic-api"}, (
"REGRESSION: silent fallback to default provider when explicit "
"provider name is not in registry — this is the #180 bug."
)
def test_resolve_with_explicit_provider_in_custom_registry():
"""When operator adds a third-party provider to the registry, explicit lookup finds it."""
custom_registry = _BUILTIN_PROVIDERS + (
{
"name": "minimax",
"auth_mode": "third_party_anthropic_compat",
"model_prefixes": ("minimax-",),
"model_aliases": (),
"base_url": "https://api.minimaxi.com/anthropic-compat",
"auth_env": ("MINIMAX_API_KEY",),
},
)
p = _resolve_provider(
"MiniMax-M2.7-highspeed", custom_registry,
explicit_provider="minimax",
)
assert p["name"] == "minimax"
assert p["base_url"] == "https://api.minimaxi.com/anthropic-compat"
assert "MINIMAX_API_KEY" in p["auth_env"]
def test_resolve_empty_providers_raises():
"""Pre-condition: providers must be non-empty (existing behavior preserved)."""
with pytest.raises(ValueError, match="empty providers tuple"):
_resolve_provider("anything", ())
def test_resolve_explicit_empty_string_treated_as_no_explicit():
"""`provider: ''` (empty string) → fall back to model-based resolution, not raise."""
# This shape can happen when the canvas writes an empty provider field.
# Treating it as "no explicit pick" is more forgiving than raising,
# since the user clearly didn't intend to break their workspace.
p = _resolve_provider(
"claude-opus-4-7", _BUILTIN_PROVIDERS,
explicit_provider="",
)
assert p["name"] == "anthropic-api" # fell through to model-based
def test_resolve_explicit_none_treated_as_no_explicit():
"""`explicit_provider=None` (default) → fall back to model-based resolution."""
p = _resolve_provider(
"claude-opus-4-7", _BUILTIN_PROVIDERS,
explicit_provider=None,
)
assert p["name"] == "anthropic-api"