feat: upgrade default workspace model to claude-opus-4-7 (#727)

Replace the anthropic:claude-sonnet-4-6 default across config, handlers,
env example, and litellm proxy config. All tests updated to match the new
default; sonnet-4-6 alias kept in litellm_config.yml for pinned workspaces.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Molecule AI Backend Engineer 2026-04-17 15:30:57 +00:00
parent 63212130e3
commit ebfafb9139
8 changed files with 20 additions and 15 deletions

View File

@ -71,7 +71,7 @@ CEREBRAS_API_KEY= # Cerebras API key (cloud.cerebras.ai). Use with
GOOGLE_API_KEY= # Google AI API key (aistudio.google.com). Use with model: google_genai:gemini-2.5-flash GOOGLE_API_KEY= # Google AI API key (aistudio.google.com). Use with model: google_genai:gemini-2.5-flash
MAX_TOKENS=2048 # Max output tokens for OpenRouter requests (default: 2048) MAX_TOKENS=2048 # Max output tokens for OpenRouter requests (default: 2048)
LANGGRAPH_RECURSION_LIMIT=500 # LangGraph/DeepAgents max ReAct steps per turn (lib default: 25; raised to 500 — PM fan-out to 6+ reports + synthesis routinely exceeds 100) LANGGRAPH_RECURSION_LIMIT=500 # LangGraph/DeepAgents max ReAct steps per turn (lib default: 25; raised to 500 — PM fan-out to 6+ reports + synthesis routinely exceeds 100)
MODEL_PROVIDER=anthropic:claude-sonnet-4-6 # Format: provider:model. Providers: anthropic, openai, openrouter, groq, cerebras, google_genai, ollama MODEL_PROVIDER=anthropic:claude-opus-4-7 # Format: provider:model. Providers: anthropic, openai, openrouter, groq, cerebras, google_genai, ollama
# ---- Workspace tier resource limits (issue #14) ---- # ---- Workspace tier resource limits (issue #14) ----
# Per-tier memory/CPU caps applied to each workspace Docker container. # Per-tier memory/CPU caps applied to each workspace Docker container.

View File

@ -30,6 +30,11 @@ model_list:
model: anthropic/claude-sonnet-4-6 model: anthropic/claude-sonnet-4-6
api_key: ${ANTHROPIC_API_KEY} api_key: ${ANTHROPIC_API_KEY}
- model_name: claude-opus-4-7
litellm_params:
model: anthropic/claude-opus-4-7
api_key: ${ANTHROPIC_API_KEY}
# ── OpenAI ───────────────────────────────────────────────────────────────── # ── OpenAI ─────────────────────────────────────────────────────────────────
- model_name: gpt-4o - model_name: gpt-4o
litellm_params: litellm_params:

View File

@ -338,7 +338,7 @@ func (h *OrgHandler) createWorkspaceTree(ws OrgWorkspace, parentID *string, defa
if runtime == "claude-code" { if runtime == "claude-code" {
model = "sonnet" model = "sonnet"
} else { } else {
model = "anthropic:claude-sonnet-4-6" model = "anthropic:claude-opus-4-7"
} }
} }
tier := ws.Tier tier := ws.Tier

View File

@ -189,7 +189,7 @@ func TestOrgDefaults_Model_FallbackClaudeCode(t *testing.T) {
if runtime == "claude-code" { if runtime == "claude-code" {
model = "sonnet" model = "sonnet"
} else { } else {
model = "anthropic:claude-sonnet-4-6" model = "anthropic:claude-opus-4-7"
} }
} }
if model != "sonnet" { if model != "sonnet" {
@ -211,11 +211,11 @@ func TestOrgDefaults_Model_FallbackDeepAgents(t *testing.T) {
if runtime == "claude-code" { if runtime == "claude-code" {
model = "sonnet" model = "sonnet"
} else { } else {
model = "anthropic:claude-sonnet-4-6" model = "anthropic:claude-opus-4-7"
} }
} }
if model != "anthropic:claude-sonnet-4-6" { if model != "anthropic:claude-opus-4-7" {
t.Errorf("deepagents with empty model should get 'anthropic:claude-sonnet-4-6', got %q", model) t.Errorf("deepagents with empty model should get 'anthropic:claude-opus-4-7', got %q", model)
} }
} }
@ -227,11 +227,11 @@ func TestOrgDefaults_Model_FallbackLangGraph(t *testing.T) {
if runtime == "claude-code" { if runtime == "claude-code" {
model = "sonnet" model = "sonnet"
} else { } else {
model = "anthropic:claude-sonnet-4-6" model = "anthropic:claude-opus-4-7"
} }
} }
if model != "anthropic:claude-sonnet-4-6" { if model != "anthropic:claude-opus-4-7" {
t.Errorf("langgraph with empty model should get 'anthropic:claude-sonnet-4-6', got %q", model) t.Errorf("langgraph with empty model should get 'anthropic:claude-opus-4-7', got %q", model)
} }
} }

View File

@ -417,7 +417,7 @@ func (h *WorkspaceHandler) ensureDefaultConfig(workspaceID string, payload model
if runtime == "claude-code" { if runtime == "claude-code" {
model = "sonnet" model = "sonnet"
} else { } else {
model = "anthropic:claude-sonnet-4-6" model = "anthropic:claude-opus-4-7"
} }
} }

View File

@ -215,7 +215,7 @@ func TestEnsureDefaultConfig_LangGraph(t *testing.T) {
if !contains(content, "tier: 1") { if !contains(content, "tier: 1") {
t.Errorf("config.yaml missing tier, got:\n%s", content) t.Errorf("config.yaml missing tier, got:\n%s", content)
} }
if !contains(content, `model: "anthropic:claude-sonnet-4-6"`) { if !contains(content, `model: "anthropic:claude-opus-4-7"`) {
t.Errorf("config.yaml should use default langgraph model, got:\n%s", content) t.Errorf("config.yaml should use default langgraph model, got:\n%s", content)
} }
} }
@ -354,7 +354,7 @@ func TestEnsureDefaultConfig_EmptyRuntimeDefaultsToLangGraph(t *testing.T) {
if !contains(configYAML, "runtime: langgraph") { if !contains(configYAML, "runtime: langgraph") {
t.Errorf("empty runtime should default to langgraph, got:\n%s", configYAML) t.Errorf("empty runtime should default to langgraph, got:\n%s", configYAML)
} }
if !contains(configYAML, `model: "anthropic:claude-sonnet-4-6"`) { if !contains(configYAML, `model: "anthropic:claude-opus-4-7"`) {
t.Errorf("langgraph default model should be anthropic (quoted), got:\n%s", configYAML) t.Errorf("langgraph default model should be anthropic (quoted), got:\n%s", configYAML)
} }
} }

View File

@ -197,7 +197,7 @@ class WorkspaceConfig:
description: str = "" description: str = ""
version: str = "1.0.0" version: str = "1.0.0"
tier: int = 1 tier: int = 1
model: str = "anthropic:claude-sonnet-4-6" model: str = "anthropic:claude-opus-4-7"
runtime: str = "langgraph" # langgraph | claude-code | codex | ollama | custom runtime: str = "langgraph" # langgraph | claude-code | codex | ollama | custom
runtime_config: RuntimeConfig = field(default_factory=RuntimeConfig) runtime_config: RuntimeConfig = field(default_factory=RuntimeConfig)
initial_prompt: str = "" initial_prompt: str = ""
@ -251,7 +251,7 @@ def load_config(config_path: Optional[str] = None) -> WorkspaceConfig:
raw = yaml.safe_load(f) or {} raw = yaml.safe_load(f) or {}
# Override model from env if provided # Override model from env if provided
model = os.environ.get("MODEL_PROVIDER", raw.get("model", "anthropic:claude-sonnet-4-6")) model = os.environ.get("MODEL_PROVIDER", raw.get("model", "anthropic:claude-opus-4-7"))
runtime = raw.get("runtime", "langgraph") runtime = raw.get("runtime", "langgraph")
runtime_raw = raw.get("runtime_config", {}) runtime_raw = raw.get("runtime_config", {})

View File

@ -52,7 +52,7 @@ def test_load_config_defaults(tmp_path):
assert cfg.description == "" assert cfg.description == ""
assert cfg.version == "1.0.0" assert cfg.version == "1.0.0"
assert cfg.tier == 1 assert cfg.tier == 1
assert cfg.model == "anthropic:claude-sonnet-4-6" assert cfg.model == "anthropic:claude-opus-4-7"
assert cfg.skills == [] assert cfg.skills == []
assert cfg.tools == [] assert cfg.tools == []
assert cfg.prompt_files == [] assert cfg.prompt_files == []