feat: add Vercel Sandbox backend

Adds Vercel Sandbox as a supported Hermes terminal backend alongside
existing providers (Local, Docker, Modal, SSH, Daytona, Singularity).

Uses the Vercel Python SDK to create/manage cloud microVMs, supports
snapshot-based filesystem persistence keyed by task_id, and integrates
with the existing BaseEnvironment shell contract and FileSyncManager
for credential/skill syncing.

Based on #17127 by @scotttrinh, cherry-picked onto current main.
This commit is contained in:
Scott Trinh 2026-04-29 18:20:53 +05:30 committed by kshitij
parent 810d98e892
commit 5a1d4f6804
32 changed files with 2241 additions and 44 deletions

1
cli.py
View File

@ -497,6 +497,7 @@ def load_cli_config() -> Dict[str, Any]:
"singularity_image": "TERMINAL_SINGULARITY_IMAGE",
"modal_image": "TERMINAL_MODAL_IMAGE",
"daytona_image": "TERMINAL_DAYTONA_IMAGE",
"vercel_runtime": "TERMINAL_VERCEL_RUNTIME",
# SSH config
"ssh_host": "TERMINAL_SSH_HOST",
"ssh_user": "TERMINAL_SSH_USER",

View File

@ -267,6 +267,7 @@ if _config_path.exists():
"singularity_image": "TERMINAL_SINGULARITY_IMAGE",
"modal_image": "TERMINAL_MODAL_IMAGE",
"daytona_image": "TERMINAL_DAYTONA_IMAGE",
"vercel_runtime": "TERMINAL_VERCEL_RUNTIME",
"ssh_host": "TERMINAL_SSH_HOST",
"ssh_user": "TERMINAL_SSH_USER",
"ssh_port": "TERMINAL_SSH_PORT",

View File

@ -499,6 +499,7 @@ DEFAULT_CONFIG = {
"singularity_image": "docker://nikolaik/python-nodejs:python3.11-nodejs20",
"modal_image": "nikolaik/python-nodejs:python3.11-nodejs20",
"daytona_image": "nikolaik/python-nodejs:python3.11-nodejs20",
"vercel_runtime": "node24",
# Container resource limits (docker, singularity, modal, daytona — ignored for local/ssh)
"container_cpu": 1,
"container_memory": 5120, # MB (default 5GB)
@ -4193,6 +4194,9 @@ def show_config():
print(f" Daytona image: {terminal.get('daytona_image', 'nikolaik/python-nodejs:python3.11-nodejs20')}")
daytona_key = get_env_value('DAYTONA_API_KEY')
print(f" API key: {'configured' if daytona_key else '(not set)'}")
elif terminal.get('backend') == 'vercel_sandbox':
print(f" Vercel runtime: {terminal.get('vercel_runtime', 'node24')}")
print(f" Vercel auth: {'configured' if get_env_value('VERCEL_OIDC_TOKEN') or (get_env_value('VERCEL_TOKEN') and get_env_value('VERCEL_PROJECT_ID') and get_env_value('VERCEL_TEAM_ID')) else '(not set)'}")
elif terminal.get('backend') == 'ssh':
ssh_host = get_env_value('TERMINAL_SSH_HOST')
ssh_user = get_env_value('TERMINAL_SSH_USER')
@ -4385,6 +4389,7 @@ def set_config_value(key: str, value: str):
"terminal.singularity_image": "TERMINAL_SINGULARITY_IMAGE",
"terminal.modal_image": "TERMINAL_MODAL_IMAGE",
"terminal.daytona_image": "TERMINAL_DAYTONA_IMAGE",
"terminal.vercel_runtime": "TERMINAL_VERCEL_RUNTIME",
"terminal.docker_mount_cwd_to_workspace": "TERMINAL_DOCKER_MOUNT_CWD_TO_WORKSPACE",
"terminal.docker_run_as_host_user": "TERMINAL_DOCKER_RUN_AS_HOST_USER",
"terminal.cwd": "TERMINAL_CWD",

View File

@ -8,6 +8,7 @@ import os
import sys
import subprocess
import shutil
import importlib.util
from pathlib import Path
from hermes_cli.config import get_project_root, get_hermes_home, get_env_path
@ -30,6 +31,7 @@ load_dotenv(PROJECT_ROOT / ".env", override=False, encoding="utf-8")
from hermes_cli.colors import Colors, color
from hermes_cli.models import _HERMES_USER_AGENT
from hermes_cli.vercel_auth import describe_vercel_auth
from hermes_constants import OPENROUTER_MODELS_URL
from utils import base_url_host_matches
@ -863,6 +865,48 @@ def run_doctor(args):
check_fail("daytona SDK not installed", "(pip install daytona)")
issues.append("Install daytona SDK: pip install daytona")
# Vercel Sandbox (if using vercel_sandbox backend)
if terminal_env == "vercel_sandbox":
runtime = os.getenv("TERMINAL_VERCEL_RUNTIME", "node24").strip() or "node24"
if runtime in {"node24", "node22", "python3.13"}:
check_ok("Vercel runtime", f"({runtime})")
else:
check_fail("Vercel runtime unsupported", f"({runtime}; use node24, node22, or python3.13)")
issues.append("Set TERMINAL_VERCEL_RUNTIME to node24, node22, or python3.13")
disk = os.getenv("TERMINAL_CONTAINER_DISK", "51200").strip()
if disk in ("", "0", "51200"):
check_ok("Vercel disk setting", "(uses platform default)")
else:
check_fail("Vercel custom disk unsupported", "(reset terminal.container_disk to 51200)")
issues.append("Vercel Sandbox does not support custom container_disk; use the shared default 51200")
if importlib.util.find_spec("vercel") is not None:
check_ok("vercel SDK", "(installed)")
else:
check_fail("vercel SDK not installed", "(pip install 'hermes-agent[vercel]')")
issues.append("Install the Vercel optional dependency: pip install 'hermes-agent[vercel]'")
auth_status = describe_vercel_auth()
if auth_status.ok:
check_ok("Vercel auth", f"({auth_status.label})")
elif auth_status.label.startswith("partial"):
check_fail("Vercel auth incomplete", f"({auth_status.label})")
issues.append("Set VERCEL_TOKEN, VERCEL_PROJECT_ID, and VERCEL_TEAM_ID together")
else:
check_fail("Vercel auth not configured", f"({auth_status.label})")
issues.append(
"Configure Vercel Sandbox auth with VERCEL_TOKEN, VERCEL_PROJECT_ID, and VERCEL_TEAM_ID"
)
for line in auth_status.detail_lines:
check_info(f"Vercel auth {line}")
persistent = os.getenv("TERMINAL_CONTAINER_PERSISTENT", "true").lower() in ("1", "true", "yes", "on")
if persistent:
check_info("Vercel persistence: snapshot filesystem only; live processes do not survive sandbox recreation")
else:
check_info("Vercel persistence: ephemeral filesystem")
# Node.js + agent-browser (for browser automation tools)
if shutil.which("node"):
check_ok("Node.js")

View File

@ -12,6 +12,7 @@ Config files are stored in ~/.hermes/ for easy access.
"""
import importlib.util
import json
import logging
import os
import shutil
@ -139,6 +140,7 @@ from hermes_cli.config import (
load_config,
save_config,
save_env_value,
remove_env_value,
get_env_value,
ensure_hermes_home,
)
@ -655,6 +657,99 @@ def _prompt_container_resources(config: dict):
pass
def _prompt_vercel_sandbox_settings(config: dict):
"""Prompt for Vercel Sandbox settings without exposing unsupported disk sizing."""
terminal = config.setdefault("terminal", {})
print()
print_info("Vercel Sandbox settings:")
print_info(" Filesystem persistence uses Vercel snapshots.")
print_info(" Snapshots restore files only; live processes do not continue after sandbox recreation.")
current_runtime = terminal.get("vercel_runtime") or "node24"
runtime = prompt(" Runtime (node24, node22, python3.13)", current_runtime).strip() or current_runtime
if runtime not in {"node24", "node22", "python3.13"}:
print_warning(f"Unsupported Vercel runtime '{runtime}', keeping {current_runtime}.")
runtime = current_runtime if current_runtime in {"node24", "node22", "python3.13"} else "node24"
terminal["vercel_runtime"] = runtime
save_env_value("TERMINAL_VERCEL_RUNTIME", runtime)
current_persist = terminal.get("container_persistent", True)
persist_label = "yes" if current_persist else "no"
terminal["container_persistent"] = prompt(
" Persist filesystem with snapshots? (yes/no)", persist_label
).lower() in ("yes", "true", "y", "1")
current_cpu = terminal.get("container_cpu", 1)
cpu_str = prompt(" CPU cores", str(current_cpu))
try:
terminal["container_cpu"] = float(cpu_str)
except ValueError:
pass
current_mem = terminal.get("container_memory", 5120)
mem_str = prompt(" Memory in MB (5120 = 5GB)", str(current_mem))
try:
terminal["container_memory"] = int(mem_str)
except ValueError:
pass
if terminal.get("container_disk", 51200) not in (0, 51200):
print_warning("Vercel Sandbox does not support custom disk sizing; resetting container_disk to 51200.")
terminal["container_disk"] = 51200
print()
print_info("Vercel authentication:")
print_info(" Use a long-lived Vercel access token plus project/team IDs.")
linked_project = _read_nearest_vercel_project()
if linked_project:
print_info(" Found defaults in nearest .vercel/project.json.")
remove_env_value("VERCEL_OIDC_TOKEN")
token = prompt(" Vercel access token", get_env_value("VERCEL_TOKEN") or "", password=True)
project = prompt(
" Vercel project ID",
get_env_value("VERCEL_PROJECT_ID") or linked_project.get("projectId", ""),
)
team = prompt(
" Vercel team ID",
get_env_value("VERCEL_TEAM_ID") or linked_project.get("orgId", ""),
)
if token:
save_env_value("VERCEL_TOKEN", token)
if project:
save_env_value("VERCEL_PROJECT_ID", project)
if team:
save_env_value("VERCEL_TEAM_ID", team)
def _read_nearest_vercel_project(start: Path | None = None) -> dict[str, str]:
"""Read project/team defaults from the nearest Vercel link file."""
current = (start or Path.cwd()).resolve()
if current.is_file():
current = current.parent
for directory in (current, *current.parents):
project_file = directory / ".vercel" / "project.json"
if not project_file.exists():
continue
try:
data = json.loads(project_file.read_text(encoding="utf-8"))
except (OSError, json.JSONDecodeError):
return {}
if not isinstance(data, dict):
return {}
return {
key: value
for key, value in {
"projectId": data.get("projectId"),
"orgId": data.get("orgId"),
}.items()
if isinstance(value, str) and value.strip()
}
return {}
# Tool categories and provider config are now in tools_config.py (shared
# between `hermes tools` and `hermes setup tools`).
@ -1190,11 +1285,12 @@ def setup_terminal_backend(config: dict):
"Modal - serverless cloud sandbox",
"SSH - run on a remote machine",
"Daytona - persistent cloud development environment",
"Vercel Sandbox - cloud microVM with snapshot filesystem persistence",
]
idx_to_backend = {0: "local", 1: "docker", 2: "modal", 3: "ssh", 4: "daytona"}
backend_to_idx = {"local": 0, "docker": 1, "modal": 2, "ssh": 3, "daytona": 4}
idx_to_backend = {0: "local", 1: "docker", 2: "modal", 3: "ssh", 4: "daytona", 5: "vercel_sandbox"}
backend_to_idx = {"local": 0, "docker": 1, "modal": 2, "ssh": 3, "daytona": 4, "vercel_sandbox": 5}
next_idx = 5
next_idx = 6
if is_linux:
terminal_choices.append("Singularity/Apptainer - HPC-friendly container")
idx_to_backend[next_idx] = "singularity"
@ -1443,6 +1539,39 @@ def setup_terminal_backend(config: dict):
_prompt_container_resources(config)
elif selected_backend == "vercel_sandbox":
print_success("Terminal backend: Vercel Sandbox")
print_info("Cloud microVM sandboxes with snapshot-backed filesystem persistence.")
print_info("Requires the optional SDK: pip install 'hermes-agent[vercel]'")
try:
__import__("vercel")
except ImportError:
print_info("Installing vercel SDK...")
import subprocess
uv_bin = shutil.which("uv")
if uv_bin:
result = subprocess.run(
[uv_bin, "pip", "install", "--python", sys.executable, "vercel"],
capture_output=True,
text=True,
)
else:
result = subprocess.run(
[sys.executable, "-m", "pip", "install", "vercel"],
capture_output=True,
text=True,
)
if result.returncode == 0:
print_success("vercel SDK installed")
else:
print_warning("Install failed — run manually: pip install 'hermes-agent[vercel]'")
if result.stderr:
print_info(f" Error: {result.stderr.strip().splitlines()[-1]}")
_prompt_vercel_sandbox_settings(config)
elif selected_backend == "ssh":
print_success("Terminal backend: SSH")
print_info("Run commands on a remote machine via SSH.")
@ -1496,6 +1625,8 @@ def setup_terminal_backend(config: dict):
save_env_value("TERMINAL_ENV", selected_backend)
if selected_backend == "modal":
save_env_value("TERMINAL_MODAL_MODE", config["terminal"].get("modal_mode", "auto"))
if selected_backend == "vercel_sandbox":
save_env_value("TERMINAL_VERCEL_RUNTIME", config["terminal"].get("vercel_runtime", "node24"))
save_config(config)
print()
print_success(f"Terminal backend set to: {selected_backend}")

View File

@ -7,16 +7,18 @@ Shows the status of all Hermes Agent components.
import os
import sys
import subprocess # noqa: F401 — re-exported for tests that monkeypatch status.subprocess to guard against regressions
import importlib.util
from pathlib import Path
PROJECT_ROOT = Path(__file__).parent.parent.resolve()
from hermes_cli.auth import AuthError, resolve_provider
from hermes_cli.colors import Colors, color
from hermes_cli.config import cfg_get, get_env_path, get_env_value, get_hermes_home, load_config
from hermes_cli.config import get_env_path, get_env_value, get_hermes_home, load_config
from hermes_cli.models import provider_label
from hermes_cli.nous_subscription import get_nous_subscription_features
from hermes_cli.runtime_provider import resolve_requested_provider
from hermes_cli.vercel_auth import describe_vercel_auth
from hermes_constants import OPENROUTER_MODELS_URL
from tools.tool_backend_helpers import managed_nous_tools_enabled
@ -300,15 +302,10 @@ def show_status(args):
print()
print(color("◆ Terminal Backend", Colors.CYAN, Colors.BOLD))
terminal_cfg = config.get("terminal", {}) if isinstance(config.get("terminal"), dict) else {}
terminal_env = os.getenv("TERMINAL_ENV", "")
if not terminal_env:
# Fall back to config file value when env var isn't set
# (hermes status doesn't go through cli.py's config loading)
try:
_cfg = load_config()
terminal_env = cfg_get(_cfg, "terminal", "backend", default="local")
except Exception:
terminal_env = "local"
terminal_env = terminal_cfg.get("backend", "local")
print(f" Backend: {terminal_env}")
if terminal_env == "ssh":
@ -322,6 +319,23 @@ def show_status(args):
elif terminal_env == "daytona":
daytona_image = os.getenv("TERMINAL_DAYTONA_IMAGE", "nikolaik/python-nodejs:python3.11-nodejs20")
print(f" Daytona Image: {daytona_image}")
elif terminal_env == "vercel_sandbox":
runtime = os.getenv("TERMINAL_VERCEL_RUNTIME") or terminal_cfg.get("vercel_runtime") or "node24"
persist = os.getenv("TERMINAL_CONTAINER_PERSISTENT")
if persist is None:
persist_enabled = bool(terminal_cfg.get("container_persistent", True))
else:
persist_enabled = persist.lower() in ("1", "true", "yes", "on")
auth_status = describe_vercel_auth()
sdk_ok = importlib.util.find_spec("vercel") is not None
sdk_label = "installed" if sdk_ok else "missing (install: pip install 'hermes-agent[vercel]')"
print(f" Runtime: {runtime}")
print(f" SDK: {check_mark(sdk_ok)} {sdk_label}")
print(f" Auth: {check_mark(auth_status.ok)} {auth_status.label}")
for line in auth_status.detail_lines:
print(f" Auth detail: {line}")
print(f" Persistence: {'snapshot filesystem' if persist_enabled else 'ephemeral filesystem'}")
print(" Processes: live processes do not survive cleanup, snapshots, or sandbox recreation")
sudo_password = os.getenv("SUDO_PASSWORD", "")
print(f" Sudo: {check_mark(bool(sudo_password))} {'enabled' if sudo_password else 'disabled'}")

70
hermes_cli/vercel_auth.py Normal file
View File

@ -0,0 +1,70 @@
"""Helpers for reporting Vercel Sandbox authentication state."""
from __future__ import annotations
import os
from dataclasses import dataclass
_TOKEN_TUPLE_VARS = ("VERCEL_TOKEN", "VERCEL_PROJECT_ID", "VERCEL_TEAM_ID")
@dataclass(frozen=True)
class VercelAuthStatus:
ok: bool
label: str
detail_lines: tuple[str, ...]
def _present(name: str) -> bool:
return bool(os.getenv(name))
def describe_vercel_auth() -> VercelAuthStatus:
"""Return Vercel auth status without exposing secret values."""
has_oidc = _present("VERCEL_OIDC_TOKEN")
token_states = {name: _present(name) for name in _TOKEN_TUPLE_VARS}
present_token_vars = tuple(name for name, present in token_states.items() if present)
missing_token_vars = tuple(name for name, present in token_states.items() if not present)
if has_oidc:
details = [
"mode: OIDC",
"active env: VERCEL_OIDC_TOKEN",
"note: OIDC tokens are development-only; use access-token auth for deployments and long-running processes",
]
if present_token_vars:
details.append(f"also present: {', '.join(present_token_vars)}")
return VercelAuthStatus(True, "OIDC token via VERCEL_OIDC_TOKEN", tuple(details))
if not missing_token_vars:
return VercelAuthStatus(
True,
"access token + project/team via VERCEL_TOKEN, VERCEL_PROJECT_ID, VERCEL_TEAM_ID",
(
"mode: access token",
"active env: VERCEL_TOKEN, VERCEL_PROJECT_ID, VERCEL_TEAM_ID",
),
)
if present_token_vars:
return VercelAuthStatus(
False,
f"partial access-token auth (missing {', '.join(missing_token_vars)})",
(
"mode: incomplete access token",
f"present env: {', '.join(present_token_vars)}",
f"missing env: {', '.join(missing_token_vars)}",
"recommended: set VERCEL_TOKEN, VERCEL_PROJECT_ID, and VERCEL_TEAM_ID together",
),
)
return VercelAuthStatus(
False,
"not configured",
(
"recommended: set VERCEL_TOKEN, VERCEL_PROJECT_ID, and VERCEL_TEAM_ID",
"development-only alternative: set VERCEL_OIDC_TOKEN",
),
)

View File

@ -253,7 +253,12 @@ _SCHEMA_OVERRIDES: Dict[str, Dict[str, Any]] = {
"terminal.backend": {
"type": "select",
"description": "Terminal execution backend",
"options": ["local", "docker", "ssh", "modal", "daytona", "singularity"],
"options": ["local", "docker", "ssh", "modal", "daytona", "vercel_sandbox", "singularity"],
},
"terminal.vercel_runtime": {
"type": "select",
"description": "Vercel Sandbox runtime",
"options": ["node24", "node22", "python3.13"],
},
"terminal.modal_mode": {
"type": "select",
@ -339,6 +344,7 @@ _CATEGORY_MERGE: Dict[str, str] = {
"human_delay": "display",
"dashboard": "display",
"code_execution": "agent",
"prompt_caching": "agent",
}
# Display order for tabs — unlisted categories sort alphabetically after these.

View File

@ -39,6 +39,7 @@ dependencies = [
[project.optional-dependencies]
modal = ["modal>=1.0.0,<2"]
daytona = ["daytona>=0.148.0,<1"]
vercel = ["vercel>=0.5.7,<0.6.0"]
dev = ["debugpy>=1.8.0,<2", "pytest>=9.0.2,<10", "pytest-asyncio>=1.3.0,<2", "pytest-xdist>=3.0,<4", "mcp>=1.2.0,<2", "ty>=0.0.1a29,<0.0.22", "ruff"]
messaging = ["python-telegram-bot[webhooks]>=22.6,<23", "discord.py[voice]>=2.7.1,<3", "aiohttp>=3.13.3,<4", "slack-bolt>=1.18.0,<2", "slack-sdk>=3.27.0,<4", "qrcode>=7.0,<8"]
cron = ["croniter>=6.0.0,<7"]
@ -100,6 +101,7 @@ yc-bench = ["yc-bench @ git+https://github.com/collinear-ai/yc-bench.git@bfb0c88
all = [
"hermes-agent[modal]",
"hermes-agent[daytona]",
"hermes-agent[vercel]",
"hermes-agent[messaging]",
# matrix: python-olm (required by matrix-nio[e2e]) is upstream-broken on
# modern macOS (archived libolm, C++ errors with Clang 21+). On Linux the

View File

@ -296,6 +296,30 @@ class TestRootLevelProviderOverride:
# Root-level "opencode-go" must NOT leak through
assert cfg["model"]["provider"] != "opencode-go"
def test_terminal_vercel_runtime_bridged_to_env(self, tmp_path, monkeypatch):
"""Classic CLI must expose terminal.vercel_runtime to terminal_tool.py."""
import yaml
hermes_home = tmp_path / ".hermes"
hermes_home.mkdir()
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
monkeypatch.delenv("TERMINAL_VERCEL_RUNTIME", raising=False)
config_path = hermes_home / "config.yaml"
config_path.write_text(yaml.safe_dump({
"terminal": {
"backend": "vercel_sandbox",
"vercel_runtime": "python3.13",
},
}))
import cli
monkeypatch.setattr(cli, "_hermes_home", hermes_home)
cfg = cli.load_cli_config()
assert cfg["terminal"]["vercel_runtime"] == "python3.13"
assert os.environ["TERMINAL_VERCEL_RUNTIME"] == "python3.13"
def test_normalize_root_model_keys_moves_to_model(self):
"""_normalize_root_model_keys migrates root keys into model section."""
from hermes_cli.config import _normalize_root_model_keys

View File

@ -33,6 +33,11 @@ def _simulate_config_bridge(cfg: dict, initial_env: dict | None = None):
"backend": "TERMINAL_ENV",
"cwd": "TERMINAL_CWD",
"timeout": "TERMINAL_TIMEOUT",
"vercel_runtime": "TERMINAL_VERCEL_RUNTIME",
"container_persistent": "TERMINAL_CONTAINER_PERSISTENT",
"container_cpu": "TERMINAL_CONTAINER_CPU",
"container_memory": "TERMINAL_CONTAINER_MEMORY",
"container_disk": "TERMINAL_CONTAINER_DISK",
}
for cfg_key, env_var in terminal_env_map.items():
if cfg_key in terminal_cfg:
@ -240,3 +245,24 @@ class TestTildeExpansion:
}
result = _simulate_config_bridge(cfg)
assert result["TERMINAL_CWD"] == os.path.expanduser("~/nested")
class TestVercelTerminalBridge:
def test_vercel_terminal_settings_bridge(self):
cfg = {
"terminal": {
"backend": "vercel_sandbox",
"vercel_runtime": "python3.13",
"container_persistent": True,
"container_cpu": 2,
"container_memory": 4096,
"container_disk": 51200,
}
}
result = _simulate_config_bridge(cfg, {"MESSAGING_CWD": "/from/env"})
assert result["TERMINAL_ENV"] == "vercel_sandbox"
assert result["TERMINAL_VERCEL_RUNTIME"] == "python3.13"
assert result["TERMINAL_CONTAINER_PERSISTENT"] == "True"
assert result["TERMINAL_CONTAINER_CPU"] == "2"
assert result["TERMINAL_CONTAINER_MEMORY"] == "4096"
assert result["TERMINAL_CONTAINER_DISK"] == "51200"

View File

@ -161,6 +161,38 @@ def test_check_gateway_service_linger_skips_when_service_not_installed(monkeypat
assert issues == []
def test_doctor_reports_vercel_backend_diagnostics(monkeypatch, tmp_path):
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setenv("TERMINAL_VERCEL_RUNTIME", "python3.13")
monkeypatch.setenv("TERMINAL_CONTAINER_DISK", "2048")
monkeypatch.setenv("VERCEL_TOKEN", "super-secret-value")
monkeypatch.delenv("VERCEL_PROJECT_ID", raising=False)
monkeypatch.setenv("VERCEL_TEAM_ID", "team")
monkeypatch.setattr(doctor_mod.importlib.util, "find_spec", lambda name: object() if name == "vercel" else None)
fake_model_tools = types.SimpleNamespace(
check_tool_availability=lambda *a, **kw: ([], []),
TOOLSET_REQUIREMENTS={},
)
monkeypatch.setitem(sys.modules, "model_tools", fake_model_tools)
buf = io.StringIO()
with contextlib.redirect_stdout(buf):
doctor_mod.run_doctor(Namespace(fix=False))
out = buf.getvalue()
assert "Vercel runtime" in out
assert "python3.13" in out
assert "Vercel custom disk unsupported" in out
assert "Vercel auth incomplete" in out
assert "VERCEL_PROJECT_ID" in out
assert "Vercel auth mode: incomplete access token" in out
assert "Vercel auth present env: VERCEL_TOKEN, VERCEL_TEAM_ID" in out
assert "Vercel auth missing env: VERCEL_PROJECT_ID" in out
assert "super-secret-value" not in out
assert "snapshot filesystem only" in out
# ── Memory provider section (doctor should only check the *active* provider) ──

View File

@ -127,6 +127,13 @@ class TestConfigYamlRouting:
or "TERMINAL_DOCKER_MOUNT_CWD_TO_WORKSPACE=True" in env_content
)
def test_terminal_vercel_runtime_goes_to_config_and_env(self, _isolated_hermes_home):
set_config_value("terminal.vercel_runtime", "python3.13")
config = _read_config(_isolated_hermes_home)
env_content = _read_env(_isolated_hermes_home)
assert "vercel_runtime: python3.13" in config
assert "TERMINAL_VERCEL_RUNTIME=python3.13" in env_content
# ---------------------------------------------------------------------------
# Empty / falsy values — regression tests for #4277

View File

@ -1,5 +1,6 @@
"""Tests for setup.py configuration flows."""
import json
import os
import sys
import types
@ -480,6 +481,83 @@ def test_modal_setup_persists_direct_mode_when_user_chooses_their_own_account(tm
assert config["terminal"]["modal_mode"] == "direct"
def test_vercel_setup_configures_access_token_auth(tmp_path, monkeypatch):
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
monkeypatch.setenv("VERCEL_OIDC_TOKEN", "old-oidc")
monkeypatch.setitem(sys.modules, "vercel", types.ModuleType("vercel"))
config = load_config()
def fake_prompt_choice(question, choices, default=0):
if question == "Select terminal backend:":
return 5
raise AssertionError(f"Unexpected prompt_choice call: {question}")
prompt_values = iter(["python3.13", "yes", "2", "4096", "token", "project", "team"])
monkeypatch.setattr("hermes_cli.setup.prompt_choice", fake_prompt_choice)
monkeypatch.setattr("hermes_cli.setup.prompt", lambda *args, **kwargs: next(prompt_values))
from hermes_cli.setup import setup_terminal_backend
setup_terminal_backend(config)
assert config["terminal"]["backend"] == "vercel_sandbox"
assert config["terminal"]["vercel_runtime"] == "python3.13"
assert config["terminal"]["container_disk"] == 51200
assert os.environ["TERMINAL_VERCEL_RUNTIME"] == "python3.13"
assert "VERCEL_OIDC_TOKEN" not in os.environ
assert os.environ["VERCEL_TOKEN"] == "token"
assert os.environ["VERCEL_PROJECT_ID"] == "project"
assert os.environ["VERCEL_TEAM_ID"] == "team"
def test_vercel_setup_prefills_project_and_team_from_link_file(tmp_path, monkeypatch):
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
project_root = tmp_path / "project"
nested = project_root / "app" / "src"
nested.mkdir(parents=True)
vercel_dir = project_root / ".vercel"
vercel_dir.mkdir()
(vercel_dir / "project.json").write_text(
json.dumps({"projectId": "linked-project", "orgId": "linked-team"}),
encoding="utf-8",
)
monkeypatch.chdir(nested)
monkeypatch.setitem(sys.modules, "vercel", types.ModuleType("vercel"))
config = load_config()
config["terminal"]["container_disk"] = 999
def fake_prompt_choice(question, choices, default=0):
if question == "Select terminal backend:":
return 5
raise AssertionError(f"Unexpected prompt_choice call: {question}")
prompt_values = iter(["node24", "no", "1", "5120", "token", "", ""])
defaults = {}
def fake_prompt(message, default="", **kwargs):
defaults[message] = default
value = next(prompt_values)
return value or default
monkeypatch.setattr("hermes_cli.setup.prompt_choice", fake_prompt_choice)
monkeypatch.setattr("hermes_cli.setup.prompt", fake_prompt)
from hermes_cli.setup import setup_terminal_backend
setup_terminal_backend(config)
assert config["terminal"]["backend"] == "vercel_sandbox"
assert config["terminal"]["container_persistent"] is False
assert config["terminal"]["container_disk"] == 51200
assert "VERCEL_OIDC_TOKEN" not in os.environ
assert os.environ["VERCEL_TOKEN"] == "token"
assert os.environ["VERCEL_PROJECT_ID"] == "linked-project"
assert os.environ["VERCEL_TEAM_ID"] == "linked-team"
assert defaults[" Vercel project ID"] == "linked-project"
assert defaults[" Vercel team ID"] == "linked-team"
def test_resolve_hermes_chat_argv_prefers_which(monkeypatch):
from hermes_cli import setup as setup_mod

View File

@ -79,3 +79,33 @@ def test_show_status_reports_nous_auth_error(monkeypatch, capsys, tmp_path):
assert "Error: Refresh session has been revoked" in output
assert "Access exp:" in output
assert "Key exp:" in output
def test_show_status_reports_vercel_backend_contract(monkeypatch, capsys, tmp_path):
from hermes_cli import status as status_mod
import hermes_cli.auth as auth_mod
import hermes_cli.gateway as gateway_mod
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setenv("TERMINAL_VERCEL_RUNTIME", "python3.13")
monkeypatch.setenv("TERMINAL_CONTAINER_PERSISTENT", "true")
monkeypatch.setenv("VERCEL_OIDC_TOKEN", "oidc-token")
monkeypatch.setattr(status_mod.importlib.util, "find_spec", lambda name: object() if name == "vercel" else None)
monkeypatch.setattr(status_mod, "load_config", lambda: {"terminal": {"backend": "vercel_sandbox"}}, raising=False)
monkeypatch.setattr(auth_mod, "get_nous_auth_status", lambda: {}, raising=False)
monkeypatch.setattr(auth_mod, "get_codex_auth_status", lambda: {}, raising=False)
monkeypatch.setattr(auth_mod, "get_qwen_auth_status", lambda: {}, raising=False)
monkeypatch.setattr(gateway_mod, "find_gateway_pids", lambda exclude_pids=None: [], raising=False)
status_mod.show_status(SimpleNamespace(all=False, deep=False))
output = capsys.readouterr().out
assert "Backend: vercel_sandbox" in output
assert "Runtime: python3.13" in output
assert "Auth:" in output and "OIDC token via VERCEL_OIDC_TOKEN" in output
assert "Auth detail: mode: OIDC" in output
assert "Auth detail: active env: VERCEL_OIDC_TOKEN" in output
assert "oidc-token" not in output
assert "snapshot filesystem" in output
assert "live processes do not survive" in output

View File

@ -371,6 +371,12 @@ class TestBuildSchemaFromConfig:
assert entry["type"] == "select"
assert "options" in entry
assert "local" in entry["options"]
assert "vercel_sandbox" in entry["options"]
runtime_entry = CONFIG_SCHEMA["terminal.vercel_runtime"]
assert runtime_entry["type"] == "select"
assert "node24" in runtime_entry["options"]
assert "python3.13" in runtime_entry["options"]
assert len(runtime_entry["options"]) >= 3
def test_empty_prefix_produces_correct_keys(self):
from hermes_cli.web_server import _build_schema_from_config

View File

@ -73,6 +73,10 @@ class TestContainerSkip:
result = check_all_command_guards("rm -rf /", "daytona")
assert result["approved"] is True
def test_vercel_sandbox_skips_both(self):
result = check_all_command_guards("rm -rf /", "vercel_sandbox")
assert result["approved"] is True
# ---------------------------------------------------------------------------
# tirith allow + safe command

View File

@ -132,6 +132,10 @@ class TestProviderEnvBlocklist:
"MODAL_TOKEN_ID": "modal-id",
"MODAL_TOKEN_SECRET": "modal-secret",
"DAYTONA_API_KEY": "daytona-key",
"VERCEL_OIDC_TOKEN": "vercel-oidc-token",
"VERCEL_TOKEN": "vercel-token",
"VERCEL_PROJECT_ID": "vercel-project",
"VERCEL_TEAM_ID": "vercel-team",
}
result_env = _run_with_env(extra_os_env=leaked_vars)
@ -287,6 +291,10 @@ class TestBlocklistCoverage:
"MODAL_TOKEN_ID",
"MODAL_TOKEN_SECRET",
"DAYTONA_API_KEY",
"VERCEL_OIDC_TOKEN",
"VERCEL_TOKEN",
"VERCEL_PROJECT_ID",
"VERCEL_TEAM_ID",
}
assert extras.issubset(_HERMES_PROVIDER_ENV_BLOCKLIST)

View File

@ -7,6 +7,7 @@ Covers the bugs discovered while setting up TBLite evaluation:
4. ensurepip fix in Modal image builder
5. No swe-rex dependency uses native Modal SDK
6. /home/ added to host prefix check
7. Vercel sandbox cwd normalization
"""
import os
@ -101,6 +102,26 @@ class TestCwdHandling:
config = _tt_mod._get_env_config()
assert config["cwd"] == "/root"
def test_host_path_replaced_for_vercel_sandbox(self, monkeypatch):
"""Host paths should be discarded for Vercel Sandbox."""
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setenv("TERMINAL_CWD", "/Users/someone/projects")
config = _tt_mod._get_env_config()
assert config["cwd"] == "/vercel/sandbox"
def test_relative_path_replaced_for_vercel_sandbox(self, monkeypatch):
"""Relative cwd should not map into a remote Vercel sandbox."""
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setenv("TERMINAL_CWD", "src")
config = _tt_mod._get_env_config()
assert config["cwd"] == "/vercel/sandbox"
def test_default_cwd_is_workspace_root_for_vercel_sandbox(self, monkeypatch):
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.delenv("TERMINAL_CWD", raising=False)
config = _tt_mod._get_env_config()
assert config["cwd"] == "/vercel/sandbox"
@pytest.mark.parametrize("backend", ["modal", "docker", "singularity", "daytona"])
def test_default_cwd_is_root_for_container_backends(self, backend, monkeypatch):
"""Container backends should default to /root, not ~."""

View File

@ -1,6 +1,8 @@
import importlib
import logging
import pytest
terminal_tool_module = importlib.import_module("tools.terminal_tool")
@ -8,11 +10,24 @@ def _clear_terminal_env(monkeypatch):
"""Remove terminal env vars that could affect requirements checks."""
keys = [
"TERMINAL_ENV",
"TERMINAL_CONTAINER_CPU",
"TERMINAL_CONTAINER_DISK",
"TERMINAL_CONTAINER_MEMORY",
"TERMINAL_DOCKER_FORWARD_ENV",
"TERMINAL_DOCKER_VOLUMES",
"TERMINAL_LIFETIME_SECONDS",
"TERMINAL_MODAL_MODE",
"TERMINAL_SSH_HOST",
"TERMINAL_SSH_PORT",
"TERMINAL_SSH_USER",
"TERMINAL_TIMEOUT",
"TERMINAL_VERCEL_RUNTIME",
"MODAL_TOKEN_ID",
"MODAL_TOKEN_SECRET",
"VERCEL_OIDC_TOKEN",
"VERCEL_TOKEN",
"VERCEL_PROJECT_ID",
"VERCEL_TEAM_ID",
"HOME",
"USERPROFILE",
]
@ -176,3 +191,126 @@ def test_modal_backend_managed_mode_without_feature_flag_logs_clear_error(monkey
"paid Nous subscription is required" in record.getMessage()
for record in caplog.records
)
def test_vercel_backend_without_sdk_logs_specific_error(monkeypatch, caplog):
_clear_terminal_env(monkeypatch)
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setattr(terminal_tool_module.importlib.util, "find_spec", lambda _name: None)
with caplog.at_level(logging.ERROR):
ok = terminal_tool_module.check_terminal_requirements()
assert ok is False
assert any(
"vercel is required for the Vercel Sandbox terminal backend" in record.getMessage()
for record in caplog.records
)
def test_vercel_backend_without_auth_logs_specific_error(monkeypatch, caplog):
_clear_terminal_env(monkeypatch)
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setattr(terminal_tool_module.importlib.util, "find_spec", lambda _name: object())
with caplog.at_level(logging.ERROR):
ok = terminal_tool_module.check_terminal_requirements()
assert ok is False
assert any(
"no supported auth configuration was found" in record.getMessage()
for record in caplog.records
)
def test_vercel_backend_accepts_oidc_auth(monkeypatch):
_clear_terminal_env(monkeypatch)
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setenv("VERCEL_OIDC_TOKEN", "oidc-token")
monkeypatch.setattr(terminal_tool_module.importlib.util, "find_spec", lambda _name: object())
assert terminal_tool_module.check_terminal_requirements() is True
def test_vercel_backend_accepts_token_tuple_auth(monkeypatch):
_clear_terminal_env(monkeypatch)
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setenv("VERCEL_TOKEN", "token")
monkeypatch.setenv("VERCEL_PROJECT_ID", "project")
monkeypatch.setenv("VERCEL_TEAM_ID", "team")
monkeypatch.setattr(terminal_tool_module.importlib.util, "find_spec", lambda _name: object())
assert terminal_tool_module.check_terminal_requirements() is True
@pytest.mark.parametrize("runtime", ["node24", "node22", "python3.13"])
def test_vercel_backend_accepts_supported_runtimes(monkeypatch, runtime):
_clear_terminal_env(monkeypatch)
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setenv("TERMINAL_VERCEL_RUNTIME", runtime)
monkeypatch.setenv("VERCEL_OIDC_TOKEN", "oidc-token")
monkeypatch.setattr(terminal_tool_module.importlib.util, "find_spec", lambda _name: object())
assert terminal_tool_module.check_terminal_requirements() is True
def test_vercel_backend_accepts_blank_runtime(monkeypatch):
_clear_terminal_env(monkeypatch)
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setenv("TERMINAL_VERCEL_RUNTIME", " ")
monkeypatch.setenv("VERCEL_OIDC_TOKEN", "oidc-token")
monkeypatch.setattr(terminal_tool_module.importlib.util, "find_spec", lambda _name: object())
assert terminal_tool_module.check_terminal_requirements() is True
def test_vercel_backend_rejects_unsupported_runtime(monkeypatch, caplog):
_clear_terminal_env(monkeypatch)
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setenv("TERMINAL_VERCEL_RUNTIME", "node20")
monkeypatch.setenv("VERCEL_OIDC_TOKEN", "oidc-token")
monkeypatch.setattr(terminal_tool_module.importlib.util, "find_spec", lambda _name: object())
with caplog.at_level(logging.ERROR):
ok = terminal_tool_module.check_terminal_requirements()
assert ok is False
assert any(
"Vercel Sandbox runtime 'node20' is not supported" in record.getMessage()
and "node24, node22, python3.13" in record.getMessage()
for record in caplog.records
)
def test_vercel_backend_rejects_nondefault_disk(monkeypatch, caplog):
_clear_terminal_env(monkeypatch)
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setenv("TERMINAL_CONTAINER_DISK", "8192")
monkeypatch.setenv("VERCEL_OIDC_TOKEN", "oidc-token")
monkeypatch.setattr(terminal_tool_module.importlib.util, "find_spec", lambda _name: object())
with caplog.at_level(logging.ERROR):
ok = terminal_tool_module.check_terminal_requirements()
assert ok is False
assert any(
"does not support custom TERMINAL_CONTAINER_DISK=8192" in record.getMessage()
for record in caplog.records
)
def test_vercel_backend_rejects_malformed_disk_without_raising(monkeypatch, caplog):
_clear_terminal_env(monkeypatch)
monkeypatch.setenv("TERMINAL_ENV", "vercel_sandbox")
monkeypatch.setenv("TERMINAL_CONTAINER_DISK", "large")
monkeypatch.setenv("VERCEL_OIDC_TOKEN", "oidc-token")
monkeypatch.setattr(terminal_tool_module.importlib.util, "find_spec", lambda _name: object())
with caplog.at_level(logging.ERROR):
ok = terminal_tool_module.check_terminal_requirements()
assert ok is False
assert any(
"Invalid value for TERMINAL_CONTAINER_DISK" in record.getMessage()
for record in caplog.records
)

View File

@ -49,3 +49,68 @@ class TestTerminalRequirements:
assert "terminal" in names
assert "execute_code" in names
def test_terminal_and_execute_code_tools_resolve_for_vercel_sandbox(self, monkeypatch):
monkeypatch.setenv("VERCEL_OIDC_TOKEN", "oidc-token")
monkeypatch.setattr(
terminal_tool_module,
"_get_env_config",
lambda: {"env_type": "vercel_sandbox", "container_disk": 51200},
)
monkeypatch.setattr(
terminal_tool_module.importlib.util,
"find_spec",
lambda _name: object(),
)
tools = get_tool_definitions(enabled_toolsets=["terminal", "code_execution"], quiet_mode=True)
names = {tool["function"]["name"] for tool in tools}
assert "terminal" in names
assert "execute_code" in names
def test_terminal_and_execute_code_tools_hide_for_unsupported_vercel_runtime(self, monkeypatch):
monkeypatch.setenv("VERCEL_OIDC_TOKEN", "oidc-token")
monkeypatch.setattr(
terminal_tool_module,
"_get_env_config",
lambda: {
"env_type": "vercel_sandbox",
"container_disk": 51200,
"vercel_runtime": "node20",
},
)
monkeypatch.setattr(
terminal_tool_module.importlib.util,
"find_spec",
lambda _name: object(),
)
tools = get_tool_definitions(enabled_toolsets=["terminal", "code_execution"], quiet_mode=True)
names = {tool["function"]["name"] for tool in tools}
assert "terminal" not in names
assert "execute_code" not in names
def test_terminal_and_execute_code_tools_hide_for_vercel_without_auth(self, monkeypatch):
monkeypatch.delenv("VERCEL_OIDC_TOKEN", raising=False)
monkeypatch.delenv("VERCEL_TOKEN", raising=False)
monkeypatch.delenv("VERCEL_PROJECT_ID", raising=False)
monkeypatch.delenv("VERCEL_TEAM_ID", raising=False)
monkeypatch.setattr(
terminal_tool_module,
"_get_env_config",
lambda: {
"env_type": "vercel_sandbox",
"container_disk": 51200,
"vercel_runtime": "node22",
},
)
monkeypatch.setattr(
terminal_tool_module.importlib.util,
"find_spec",
lambda _name: object(),
)
tools = get_tool_definitions(enabled_toolsets=["terminal", "code_execution"], quiet_mode=True)
names = {tool["function"]["name"] for tool in tools}
assert "terminal" not in names
assert "execute_code" not in names

View File

@ -0,0 +1,623 @@
"""Unit tests for the Vercel Sandbox terminal backend."""
from __future__ import annotations
import importlib
import io
import re
import sys
import tarfile
import threading
import types
from dataclasses import dataclass
from enum import StrEnum
from pathlib import Path
from types import SimpleNamespace
import pytest
class _FakeRunResult:
def __init__(self, output: str | bytes = "", exit_code: int = 0):
self._output = output
self.exit_code = exit_code
def output(self) -> str | bytes:
return self._output
class _FakeSandboxStatus(StrEnum):
PENDING = "pending"
RUNNING = "running"
STOPPING = "stopping"
STOPPED = "stopped"
FAILED = "failed"
ABORTED = "aborted"
SNAPSHOTTING = "snapshotting"
@dataclass(frozen=True)
class _FakeSnapshot:
snapshot_id: str
class _FakeSandbox:
def __init__(
self,
*,
cwd: str = "/vercel/sandbox",
home: str = "/home/vercel",
status: _FakeSandboxStatus = _FakeSandboxStatus.RUNNING,
):
self.sandbox = SimpleNamespace(cwd=cwd, id="sb-123")
self.status = status
self.home = home
self.closed = 0
self.client = SimpleNamespace(close=self._close)
self.run_command_calls: list[tuple[str, list[str], dict]] = []
self.run_command_side_effects: list[object] = []
self.write_files_calls: list[list[dict[str, object]]] = []
self.write_files_side_effects: list[object] = []
self.download_file_calls: list[tuple[str, Path]] = []
self.download_file_side_effects: list[object] = []
self.download_file_content = b""
self.stop_calls: list[tuple[tuple, dict]] = []
self.snapshot_calls: list[tuple[tuple, dict]] = []
self.snapshot_side_effects: list[object] = []
self.snapshot_id = "snap_default"
self.refresh_calls = 0
self.wait_for_status_calls: list[tuple[object, object, object]] = []
self.wait_for_status_side_effects: list[object] = []
def _close(self) -> None:
self.closed += 1
def refresh(self) -> None:
self.refresh_calls += 1
def wait_for_status(self, status: _FakeSandboxStatus | str, *, timeout, poll_interval) -> None:
self.wait_for_status_calls.append((status, timeout, poll_interval))
if self.wait_for_status_side_effects:
effect = self.wait_for_status_side_effects.pop(0)
if isinstance(effect, Exception):
raise effect
if callable(effect):
effect(status, timeout, poll_interval)
return
self.status = _FakeSandboxStatus(status)
def run_command(self, cmd: str, args: list[str] | None = None, **kwargs):
args = list(args or [])
self.run_command_calls.append((cmd, args, kwargs))
if self.run_command_side_effects:
effect = self.run_command_side_effects.pop(0)
if isinstance(effect, Exception):
raise effect
if callable(effect):
return effect(cmd, args, kwargs)
return effect
script = args[1] if len(args) > 1 else ""
if 'printf %s "$HOME"' in script:
return _FakeRunResult(self.home)
return _FakeRunResult("")
def write_files(self, files: list[dict[str, object]]) -> None:
self.write_files_calls.append(files)
if self.write_files_side_effects:
effect = self.write_files_side_effects.pop(0)
if isinstance(effect, Exception):
raise effect
if callable(effect):
effect(files)
def download_file(self, remote_path: str, local_path) -> str:
destination = Path(local_path)
self.download_file_calls.append((remote_path, destination))
if self.download_file_side_effects:
effect = self.download_file_side_effects.pop(0)
if isinstance(effect, Exception):
raise effect
if callable(effect):
return effect(remote_path, destination)
destination.write_bytes(self.download_file_content)
return str(destination.resolve())
def stop(self, *args, **kwargs) -> None:
self.stop_calls.append((args, kwargs))
def snapshot(self, *args, **kwargs):
self.snapshot_calls.append((args, kwargs))
if self.snapshot_side_effects:
effect = self.snapshot_side_effects.pop(0)
if isinstance(effect, Exception):
raise effect
if callable(effect):
return effect(*args, **kwargs)
if isinstance(effect, str):
return _FakeSnapshot(effect)
return effect
return _FakeSnapshot(self.snapshot_id)
@dataclass(frozen=True)
class _FakeResources:
vcpus: float | None = None
memory: int | None = None
@dataclass(frozen=True)
class _FakeWriteFile:
path: str
content: bytes
class _FakeSDK:
def __init__(self):
self.create_kwargs: list[dict[str, object]] = []
self.create_side_effects: list[object] = []
self.sandboxes: list[_FakeSandbox] = []
@property
def current(self) -> _FakeSandbox:
return self.sandboxes[-1]
def create(self, **kwargs):
self.create_kwargs.append(kwargs)
if self.create_side_effects:
effect = self.create_side_effects.pop(0)
if isinstance(effect, Exception):
raise effect
if isinstance(effect, _FakeSandbox):
self.sandboxes.append(effect)
return effect
sandbox = _FakeSandbox()
self.sandboxes.append(sandbox)
return sandbox
def _cwd_result(body: str = "", *, cwd: str = "/vercel/sandbox", exit_code: int = 0):
def _result(_cmd: str, args: list[str], _kwargs: dict):
script = args[1] if len(args) > 1 else ""
match = re.search(r"__HERMES_CWD_[A-Za-z0-9]+__", script)
marker = match.group(0) if match else "__HERMES_CWD_MISSING__"
prefix = f"{body}\n\n" if body else "\n"
return _FakeRunResult(f"{prefix}{marker}{cwd}{marker}\n", exit_code)
return _result
def _tar_bytes(entries: dict[str, bytes]) -> bytes:
buffer = io.BytesIO()
with tarfile.open(fileobj=buffer, mode="w") as tar:
for name, content in entries.items():
info = tarfile.TarInfo(name)
info.size = len(content)
tar.addfile(info, io.BytesIO(content))
return buffer.getvalue()
@pytest.fixture()
def vercel_sdk(monkeypatch):
fake_sdk = _FakeSDK()
sandbox_mod = types.ModuleType("vercel.sandbox")
sandbox_mod.Sandbox = types.SimpleNamespace(create=fake_sdk.create)
sandbox_mod.Resources = _FakeResources
sandbox_mod.WriteFile = _FakeWriteFile
sandbox_mod.SandboxStatus = _FakeSandboxStatus
vercel_mod = types.ModuleType("vercel")
vercel_mod.sandbox = sandbox_mod
monkeypatch.setitem(sys.modules, "vercel", vercel_mod)
monkeypatch.setitem(sys.modules, "vercel.sandbox", sandbox_mod)
return fake_sdk
@pytest.fixture()
def vercel_module(vercel_sdk, monkeypatch):
monkeypatch.setattr("tools.environments.base.is_interrupted", lambda: False)
monkeypatch.setattr("tools.credential_files.get_credential_file_mounts", lambda: [])
monkeypatch.setattr("tools.credential_files.iter_skills_files", lambda **kwargs: [])
monkeypatch.setattr("tools.credential_files.iter_cache_files", lambda **kwargs: [])
module = importlib.import_module("tools.environments.vercel_sandbox")
return importlib.reload(module)
@pytest.fixture()
def make_env(vercel_module, request):
envs = []
def _cleanup_envs():
for env in envs:
env._sync_manager = None
env.cleanup()
request.addfinalizer(_cleanup_envs)
def _factory(**kwargs):
kwargs.setdefault("runtime", "node22")
kwargs.setdefault("cwd", vercel_module.DEFAULT_VERCEL_CWD)
kwargs.setdefault("timeout", 30)
kwargs.setdefault("task_id", "task-123")
env = vercel_module.VercelSandboxEnvironment(**kwargs)
envs.append(env)
return env
return _factory
class TestStartup:
def test_default_cwd_tracks_remote_workspace_root(self, make_env, vercel_sdk):
sandbox = _FakeSandbox(cwd="/workspace")
vercel_sdk.create_side_effects.append(sandbox)
env = make_env()
assert env.cwd == "/workspace"
def test_tilde_cwd_resolves_against_remote_home(self, make_env, vercel_sdk):
sandbox = _FakeSandbox(home="/home/custom")
vercel_sdk.create_side_effects.append(sandbox)
env = make_env(cwd="~")
assert env.cwd == "/home/custom"
def test_pending_sandbox_timeout_raises_descriptive_error(
self, make_env, vercel_sdk
):
sandbox = _FakeSandbox(status=_FakeSandboxStatus.PENDING)
sandbox.wait_for_status_side_effects.append(TimeoutError("still pending"))
vercel_sdk.create_side_effects.append(sandbox)
with pytest.raises(RuntimeError, match="Sandbox did not reach running state"):
make_env()
class TestFileSync:
def test_initial_sync_uploads_managed_files_under_remote_home(
self, make_env, vercel_sdk, monkeypatch, tmp_path
):
src = tmp_path / "token.txt"
src.write_text("secret-token")
monkeypatch.setattr(
"tools.credential_files.get_credential_file_mounts",
lambda: [
{
"host_path": str(src),
"container_path": "/root/.hermes/credentials/token.txt",
}
],
)
monkeypatch.setattr("tools.credential_files.iter_skills_files", lambda **kwargs: [])
monkeypatch.setattr("tools.credential_files.iter_cache_files", lambda **kwargs: [])
make_env()
uploaded = vercel_sdk.current.write_files_calls[0]
assert uploaded == [
{
"path": "/home/vercel/.hermes/credentials/token.txt",
"content": b"secret-token",
}
]
def test_execute_resyncs_changed_managed_files(
self, make_env, vercel_sdk, monkeypatch, tmp_path
):
src = tmp_path / "token.txt"
src.write_text("secret-token")
monkeypatch.setattr(
"tools.credential_files.get_credential_file_mounts",
lambda: [
{
"host_path": str(src),
"container_path": "/root/.hermes/credentials/token.txt",
}
],
)
monkeypatch.setattr("tools.credential_files.iter_skills_files", lambda **kwargs: [])
monkeypatch.setattr("tools.credential_files.iter_cache_files", lambda **kwargs: [])
env = make_env()
src.write_text("updated-secret-token")
monkeypatch.setenv("HERMES_FORCE_FILE_SYNC", "1")
vercel_sdk.current.run_command_side_effects.append(_cwd_result("hello"))
result = env.execute("echo hello")
assert result == {"output": "hello\n", "returncode": 0}
assert vercel_sdk.current.write_files_calls[-1] == [
{
"path": "/home/vercel/.hermes/credentials/token.txt",
"content": b"updated-secret-token",
}
]
def test_cleanup_syncs_back_snapshots_closes_and_is_idempotent(
self, make_env, vercel_module, vercel_sdk, monkeypatch, tmp_path
):
hermes_home = tmp_path / ".hermes"
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
src = tmp_path / "token.txt"
src.write_text("host-token")
monkeypatch.setattr(
"tools.credential_files.get_credential_file_mounts",
lambda: [
{
"host_path": str(src),
"container_path": "/root/.hermes/credentials/token.txt",
}
],
)
monkeypatch.setattr(
"tools.credential_files.iter_skills_files",
lambda **kwargs: [],
)
monkeypatch.setattr(
"tools.credential_files.iter_cache_files",
lambda **kwargs: [],
)
env = make_env()
sandbox = vercel_sdk.current
sandbox.snapshot_id = "snap_cleanup"
vercel_sdk.current.download_file_content = _tar_bytes(
{
"home/vercel/.hermes/credentials/token.txt": b"remote-token",
"home/vercel/.hermes/credentials/new.txt": b"new-remote",
"home/vercel/.hermes/unmapped/skip.txt": b"skip",
}
)
env.cleanup()
env.cleanup()
assert src.read_text() == "remote-token"
assert (tmp_path / "new.txt").read_text() == "new-remote"
assert not (tmp_path / "skip.txt").exists()
assert len(sandbox.snapshot_calls) == 1
assert len(sandbox.stop_calls) == 1 # always stop after snapshot to avoid resource leaks
assert sandbox.closed == 1
assert vercel_module._load_snapshots() == {"task-123": "snap_cleanup"}
def test_cleanup_sync_back_failure_from_download_does_not_block_snapshot(
self, make_env, vercel_sdk, monkeypatch, tmp_path
):
src = tmp_path / "token.txt"
src.write_text("host-token")
monkeypatch.setattr(
"tools.credential_files.get_credential_file_mounts",
lambda: [
{
"host_path": str(src),
"container_path": "/root/.hermes/credentials/token.txt",
}
],
)
monkeypatch.setattr(
"tools.credential_files.iter_skills_files",
lambda **kwargs: [],
)
monkeypatch.setattr(
"tools.credential_files.iter_cache_files",
lambda **kwargs: [],
)
env = make_env()
sandbox = vercel_sdk.current
sandbox.run_command_side_effects.extend(
[
_FakeRunResult("tar failed", exit_code=2),
_FakeRunResult(""),
_FakeRunResult("tar failed", exit_code=2),
_FakeRunResult(""),
_FakeRunResult("tar failed", exit_code=2),
_FakeRunResult(""),
]
)
monkeypatch.setattr("tools.environments.file_sync.time.sleep", lambda _delay: None)
env.cleanup()
assert src.read_text() == "host-token"
assert len(sandbox.snapshot_calls) == 1
assert sandbox.closed == 1
assert len(sandbox.download_file_calls) == 0
class TestExecute:
def test_execute_runs_command_from_workspace_root_and_updates_cwd(
self, make_env, vercel_sdk
):
env = make_env()
vercel_sdk.current.run_command_side_effects.append(
_cwd_result("/tmp", cwd="/tmp")
)
result = env.execute("pwd", cwd="/tmp")
assert result == {"output": "/tmp\n", "returncode": 0}
assert env.cwd == "/tmp"
cmd, args, kwargs = vercel_sdk.current.run_command_calls[-1]
assert cmd == "bash"
assert args[0] == "-c"
assert "cd /tmp" in args[1]
assert kwargs["cwd"] == "/vercel/sandbox"
@pytest.mark.parametrize(
("make_unhealthy", "label"),
[
(
lambda sandbox: setattr(
sandbox, "status", _FakeSandboxStatus.STOPPED
),
"terminal state",
),
(
lambda sandbox: setattr(
sandbox,
"refresh",
lambda: (_ for _ in ()).throw(RuntimeError("refresh failed")),
),
"refresh failure",
),
],
ids=["terminal-state", "refresh-failure"],
)
def test_execute_recreates_unhealthy_sandbox_before_running_command(
self, make_env, vercel_sdk, make_unhealthy, label
):
env = make_env()
original = vercel_sdk.current
make_unhealthy(original)
replacement = _FakeSandbox()
replacement.run_command_side_effects.extend(
[
_FakeRunResult(replacement.home),
_cwd_result("hello"),
]
)
vercel_sdk.create_side_effects.append(replacement)
result = env.execute("echo hello")
assert result == {"output": "hello\n", "returncode": 0}, label
assert original.closed == 1
assert vercel_sdk.current is replacement
def test_run_bash_handle_uses_captured_sandbox_for_exec_and_cancel(
self, make_env
):
env = make_env()
original = env._sandbox
assert original is not None
replacement = _FakeSandbox()
started = threading.Event()
release = threading.Event()
def blocking_command(_cmd: str, _args: list[str], _kwargs: dict):
started.set()
release.wait(timeout=5)
return _FakeRunResult("done")
original.run_command_side_effects.append(blocking_command)
handle = env._run_bash("echo done")
assert started.wait(timeout=1)
env._sandbox = replacement
handle.kill()
release.set()
assert handle.wait(timeout=2) == 0
assert len(original.stop_calls) == 1
assert replacement.stop_calls == []
cmd, args, kwargs = original.run_command_calls[-1]
assert cmd == "bash"
assert args == ["-c", "echo done"]
assert kwargs["cwd"] == "/vercel/sandbox"
class TestSnapshotPersistence:
def test_create_restores_from_saved_snapshot(
self, make_env, vercel_module, vercel_sdk, monkeypatch, tmp_path
):
hermes_home = tmp_path / ".hermes"
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
vercel_module._store_snapshot("task-123", "snap_saved")
restored = _FakeSandbox(cwd="/restored")
vercel_sdk.create_side_effects.append(restored)
env = make_env()
assert env.cwd == "/restored"
assert vercel_sdk.create_kwargs[0]["source"] == {
"type": "snapshot",
"snapshot_id": "snap_saved",
}
assert vercel_module._load_snapshots() == {"task-123": "snap_saved"}
def test_restore_failure_prunes_snapshot_and_falls_back_to_fresh_sandbox(
self, make_env, vercel_module, vercel_sdk, monkeypatch, tmp_path
):
hermes_home = tmp_path / ".hermes"
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
vercel_module._store_snapshot("task-123", "snap_stale")
fresh = _FakeSandbox(cwd="/fresh")
vercel_sdk.create_side_effects.extend(
[RuntimeError("snapshot missing"), fresh]
)
env = make_env()
assert env.cwd == "/fresh"
assert vercel_sdk.create_kwargs[0]["source"] == {
"type": "snapshot",
"snapshot_id": "snap_stale",
}
assert "source" not in vercel_sdk.create_kwargs[1]
assert vercel_module._load_snapshots() == {}
def test_cleanup_stops_when_snapshot_fails_without_storing_metadata(
self, make_env, vercel_module, vercel_sdk, monkeypatch, tmp_path
):
hermes_home = tmp_path / ".hermes"
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
env = make_env()
sandbox = vercel_sdk.current
sandbox.snapshot_side_effects.append(RuntimeError("snapshot failed"))
env.cleanup()
assert len(sandbox.snapshot_calls) == 1
assert len(sandbox.stop_calls) == 1
assert sandbox.closed == 1
assert vercel_module._load_snapshots() == {}
def test_non_persistent_cleanup_stops_without_snapshot(
self, make_env, vercel_module, vercel_sdk, monkeypatch, tmp_path
):
hermes_home = tmp_path / ".hermes"
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
env = make_env(persistent_filesystem=False)
sandbox = vercel_sdk.current
env.cleanup()
assert sandbox.snapshot_calls == []
assert len(sandbox.stop_calls) == 1
assert sandbox.closed == 1
assert vercel_module._load_snapshots() == {}
def test_persistent_cleanup_without_task_id_stops_without_snapshot(
self, make_env, vercel_module, vercel_sdk, monkeypatch, tmp_path
):
hermes_home = tmp_path / ".hermes"
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
env = make_env(task_id="")
sandbox = vercel_sdk.current
env.cleanup()
assert sandbox.snapshot_calls == []
assert len(sandbox.stop_calls) == 1
assert sandbox.closed == 1
assert vercel_module._load_snapshots() == {}
class TestCleanup:
def test_cleanup_continues_when_sync_back_raises(self, make_env, vercel_sdk):
env = make_env()
sandbox = vercel_sdk.current
class FailingSyncManager:
def sync_back(self):
raise RuntimeError("download failed")
env._sync_manager = FailingSyncManager()
env.cleanup()
assert len(sandbox.snapshot_calls) == 1
assert sandbox.closed == 1

View File

@ -782,7 +782,7 @@ def check_dangerous_command(command: str, env_type: str,
Returns:
{"approved": True/False, "message": str or None, ...}
"""
if env_type in ("docker", "singularity", "modal", "daytona"):
if env_type in ("docker", "singularity", "modal", "daytona", "vercel_sandbox"):
return {"approved": True, "message": None}
# Hardline floor: commands with no recovery path (rm -rf /, mkfs, dd
@ -907,7 +907,7 @@ def check_all_command_guards(command: str, env_type: str,
other was shown to the user.
"""
# Skip containers for both checks
if env_type in ("docker", "singularity", "modal", "daytona"):
if env_type in ("docker", "singularity", "modal", "daytona", "vercel_sandbox"):
return {"approved": True, "message": None}
# Hardline floor: unconditional block for catastrophic commands

View File

@ -73,7 +73,24 @@ MAX_STDERR_BYTES = 10_000 # 10 KB
def check_sandbox_requirements() -> bool:
"""Code execution sandbox requires a POSIX OS for Unix domain sockets."""
return SANDBOX_AVAILABLE
if not SANDBOX_AVAILABLE:
return False
try:
from tools.terminal_tool import (
_check_vercel_sandbox_requirements,
_get_env_config,
)
config = _get_env_config()
except Exception:
logger.debug("Could not resolve terminal config for execute_code availability", exc_info=True)
return False
if config.get("env_type") == "vercel_sandbox":
return _check_vercel_sandbox_requirements(config)
return True
# ---------------------------------------------------------------------------
@ -481,12 +498,13 @@ def _get_or_create_env(task_id: str):
cwd = overrides.get("cwd") or config["cwd"]
container_config = None
if env_type in ("docker", "singularity", "modal", "daytona"):
if env_type in ("docker", "singularity", "modal", "daytona", "vercel_sandbox"):
container_config = {
"container_cpu": config.get("container_cpu", 1),
"container_memory": config.get("container_memory", 5120),
"container_disk": config.get("container_disk", 51200),
"container_persistent": config.get("container_persistent", True),
"vercel_runtime": config.get("vercel_runtime", ""),
"docker_volumes": config.get("docker_volumes", []),
"docker_run_as_host_user": config.get("docker_run_as_host_user", False),
}

View File

@ -100,6 +100,10 @@ def _build_provider_env_blocklist() -> frozenset:
"MODAL_TOKEN_ID",
"MODAL_TOKEN_SECRET",
"DAYTONA_API_KEY",
"VERCEL_OIDC_TOKEN",
"VERCEL_TOKEN",
"VERCEL_PROJECT_ID",
"VERCEL_TEAM_ID",
})
return frozenset(blocked)

View File

@ -0,0 +1,627 @@
"""Vercel Sandbox execution environment.
Uses the Vercel Python SDK to run commands in cloud sandboxes through Hermes'
shared ``BaseEnvironment`` shell contract. When persistence is enabled, the
backend stores task-scoped snapshot metadata under ``HERMES_HOME`` and restores
new sandboxes from those snapshots on later task reuse.
"""
from __future__ import annotations
from functools import cache
from dataclasses import dataclass
from datetime import timedelta
import logging
import math
import os
import shlex
import threading
import time
from pathlib import Path
from typing import TYPE_CHECKING, Any
import httpx
from hermes_constants import get_hermes_home
from tools.environments.base import (
BaseEnvironment,
_ThreadedProcessHandle,
_load_json_store,
_save_json_store,
)
from tools.environments.file_sync import (
FileSyncManager,
iter_sync_files,
quoted_rm_command,
)
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from vercel.sandbox import Resources, Sandbox, SandboxStatus, WriteFile
DEFAULT_VERCEL_CWD = "/vercel/sandbox"
_DEFAULT_CONTAINER_DISK_MB = 51200
_CREATE_RETRY_ATTEMPTS = 3
_WRITE_RETRY_ATTEMPTS = 3
_TRANSIENT_STATUS_CODES = frozenset({408, 425, 429, 500, 502, 503, 504})
_RETRY_BACKOFF_STEP = timedelta(milliseconds=100)
_MIN_SANDBOX_TIMEOUT = timedelta(minutes=5)
_MIN_RUNNING_WAIT = timedelta(seconds=1)
_RUNNING_WAIT_TIMEOUT = timedelta(seconds=30)
_RUNNING_WAIT_POLL_INTERVAL = timedelta(milliseconds=250)
_STOP_TIMEOUT = timedelta(seconds=15)
_STOP_POLL_INTERVAL = timedelta(milliseconds=500)
_SNAPSHOT_STORE_NAME = "vercel_sandbox_snapshots.json"
def _exception_chain(exc: BaseException) -> list[BaseException]:
chain: list[BaseException] = []
current: BaseException | None = exc
seen: set[int] = set()
while current is not None and id(current) not in seen:
chain.append(current)
seen.add(id(current))
current = current.__cause__ or current.__context__
return chain
def _extract_status_code(exc: BaseException) -> int | None:
response = getattr(exc, "response", None)
for value in (getattr(exc, "status_code", None), getattr(response, "status_code", None)):
if isinstance(value, int):
return value
return None
def _is_transient_vercel_error(exc: BaseException) -> bool:
for error in _exception_chain(exc):
status_code = _extract_status_code(error)
if status_code in _TRANSIENT_STATUS_CODES:
return True
if isinstance(
error,
(httpx.NetworkError, httpx.ProtocolError, httpx.ReadError),
):
return True
error_name = type(error).__name__.lower()
if "ratelimit" in error_name or "servererror" in error_name:
return True
return False
def _retry_vercel_call(
label: str,
callback,
*,
attempts: int,
):
backoff_seconds = _RETRY_BACKOFF_STEP.total_seconds()
for attempt in range(1, attempts + 1):
try:
return callback()
except Exception as exc:
if attempt >= attempts or not _is_transient_vercel_error(exc):
raise
logger.warning(
"Vercel: %s failed (%s); retrying %d/%d",
label,
exc,
attempt,
attempts,
)
time.sleep(backoff_seconds * attempt)
def _coerce_text(value: Any) -> str:
if value is None:
return ""
if isinstance(value, bytes):
return value.decode("utf-8", errors="replace")
return str(value)
def _extract_result_output(result: Any) -> str:
try:
return _coerce_text(result.output())
except (AttributeError, TypeError):
return _coerce_text(result)
def _extract_result_returncode(result: Any) -> int:
try:
exit_code = result.exit_code
except AttributeError:
try:
exit_code = result.returncode
except AttributeError:
return 1
return exit_code if isinstance(exit_code, int) else 1
def _snapshot_store_path() -> Path:
return get_hermes_home() / _SNAPSHOT_STORE_NAME
def _load_snapshots() -> dict:
return _load_json_store(_snapshot_store_path())
def _save_snapshots(data: dict) -> None:
_save_json_store(_snapshot_store_path(), data)
def _get_snapshot_id(task_id: str) -> str | None:
if not task_id:
return None
snapshot_id = _load_snapshots().get(task_id)
return snapshot_id if isinstance(snapshot_id, str) and snapshot_id else None
def _store_snapshot(task_id: str, snapshot_id: str) -> None:
if not task_id or not snapshot_id:
return
snapshots = _load_snapshots()
snapshots[task_id] = snapshot_id
_save_snapshots(snapshots)
def _delete_snapshot(task_id: str, snapshot_id: str | None = None) -> None:
if not task_id:
return
snapshots = _load_snapshots()
existing = snapshots.get(task_id)
if existing is None:
return
if snapshot_id is not None and existing != snapshot_id:
return
snapshots.pop(task_id, None)
_save_snapshots(snapshots)
def _extract_snapshot_id(snapshot: Any) -> str | None:
for attr in ("snapshot_id", "snapshotId", "id"):
value = getattr(snapshot, attr, None)
if isinstance(value, str) and value:
return value
if isinstance(snapshot, dict):
for key in ("snapshot_id", "snapshotId", "id"):
value = snapshot.get(key)
if isinstance(value, str) and value:
return value
return None
@cache
def _sandbox_status_type() -> type[SandboxStatus]:
from vercel.sandbox import SandboxStatus
return SandboxStatus
@cache
def _terminal_sandbox_states() -> frozenset[SandboxStatus]:
SandboxStatus = _sandbox_status_type()
return frozenset(
{
SandboxStatus.ABORTED,
SandboxStatus.FAILED,
SandboxStatus.STOPPED,
}
)
@dataclass(frozen=True, slots=True)
class _SandboxCreateParams:
timeout: timedelta
runtime: str | None = None
resources: Resources | None = None
class VercelSandboxEnvironment(BaseEnvironment):
"""Vercel cloud sandbox backend."""
_stdin_mode = "heredoc"
def __init__(
self,
runtime: str | None = None,
cwd: str = DEFAULT_VERCEL_CWD,
timeout: int = 60,
cpu: float = 1,
memory: int = 5120,
disk: int = _DEFAULT_CONTAINER_DISK_MB,
persistent_filesystem: bool = True,
task_id: str = "default",
):
requested_cwd = cwd
super().__init__(cwd=cwd, timeout=timeout)
self._runtime = runtime or None
self._persistent = persistent_filesystem
self._task_id = task_id
self._requested_cwd = requested_cwd
self._lock = threading.Lock()
self._sandbox: Sandbox | None = None
self._workspace_root = DEFAULT_VERCEL_CWD
self._remote_home = DEFAULT_VERCEL_CWD
self._sync_manager: FileSyncManager | None = None
self._create_params = self._build_create_params(cpu=cpu, memory=memory, disk=disk)
self._sandbox = self._create_sandbox()
self._configure_attached_sandbox(requested_cwd=requested_cwd)
self._sync_manager.sync(force=True)
self.init_session()
def _build_create_params(self, *, cpu: float, memory: int, disk: int) -> _SandboxCreateParams:
if disk not in (0, _DEFAULT_CONTAINER_DISK_MB):
raise ValueError(
"Vercel Sandbox does not support configurable container_disk. "
"Use the default shared setting."
)
from vercel.sandbox import Resources
sandbox_timeout = max(
timedelta(seconds=max(self.timeout, 0)),
_MIN_SANDBOX_TIMEOUT,
)
vcpus = math.floor(cpu) if cpu > 0 else None
memory_mb = memory if memory > 0 else None
resources = (
Resources(vcpus=vcpus, memory=memory_mb)
if vcpus is not None or memory_mb is not None
else None
)
return _SandboxCreateParams(
timeout=sandbox_timeout,
runtime=self._runtime,
resources=resources,
)
def _create_sandbox(self) -> Sandbox:
from vercel.sandbox import Sandbox
snapshot_id = _get_snapshot_id(self._task_id) if self._persistent else None
if snapshot_id:
try:
return _retry_vercel_call(
"sandbox restore",
lambda: Sandbox.create(
timeout=self._create_params.timeout,
runtime=self._create_params.runtime,
resources=self._create_params.resources,
source={"type": "snapshot", "snapshot_id": snapshot_id},
),
attempts=_CREATE_RETRY_ATTEMPTS,
)
except Exception as exc:
logger.warning(
"Vercel: failed to restore snapshot %s for task %s; "
"falling back to a fresh sandbox: %s",
snapshot_id,
self._task_id,
exc,
)
_delete_snapshot(self._task_id, snapshot_id)
params = self._create_params
return _retry_vercel_call(
"sandbox create",
lambda: Sandbox.create(
timeout=params.timeout,
runtime=params.runtime,
resources=params.resources,
),
attempts=_CREATE_RETRY_ATTEMPTS,
)
def _configure_attached_sandbox(self, *, requested_cwd: str) -> None:
self._wait_for_running()
self._workspace_root = self._detect_workspace_root()
self._remote_home = self._detect_remote_home()
if self._remote_home == "/":
container_base = "/.hermes"
else:
container_base = f"{self._remote_home.rstrip('/')}/.hermes"
self._sync_manager = FileSyncManager(
get_files_fn=lambda: iter_sync_files(container_base),
upload_fn=self._vercel_upload,
delete_fn=self._vercel_delete,
bulk_upload_fn=self._vercel_bulk_upload,
bulk_download_fn=self._vercel_bulk_download,
)
if requested_cwd == "~":
self.cwd = self._remote_home
elif requested_cwd in ("", DEFAULT_VERCEL_CWD):
self.cwd = self._workspace_root
else:
self.cwd = requested_cwd
def _detect_workspace_root(self) -> str:
sandbox = self._sandbox
if sandbox is None:
raise RuntimeError("Vercel sandbox is not attached")
cwd = sandbox.sandbox.cwd
return cwd if cwd.startswith("/") else DEFAULT_VERCEL_CWD
def _detect_remote_home(self) -> str:
sandbox = self._sandbox
if sandbox is None:
raise RuntimeError("Vercel sandbox is not attached")
try:
result = sandbox.run_command(
"sh",
["-lc", 'printf %s "$HOME"'],
cwd=self._workspace_root,
)
except Exception as exc:
logger.debug(
"Vercel: home detection failed for task %s: %s",
self._task_id,
exc,
)
return self._workspace_root
home = _extract_result_output(result).strip()
if home.startswith("/"):
return home
return self._workspace_root
def _wait_for_running(self, timeout: timedelta = _RUNNING_WAIT_TIMEOUT) -> None:
sandbox = self._sandbox
if sandbox is None:
raise RuntimeError("Vercel sandbox is not attached")
SandboxStatus = _sandbox_status_type()
status = sandbox.status
if status is None or status == SandboxStatus.RUNNING:
return
if status in _terminal_sandbox_states():
raise RuntimeError(f"Sandbox entered terminal state: {status}")
try:
sandbox.wait_for_status(
SandboxStatus.RUNNING,
timeout=max(timeout, _MIN_RUNNING_WAIT),
poll_interval=_RUNNING_WAIT_POLL_INTERVAL,
)
except TimeoutError as exc:
status = sandbox.status
if status in _terminal_sandbox_states():
raise RuntimeError(f"Sandbox entered terminal state: {status}") from exc
raise RuntimeError(
f"Sandbox did not reach running state (last status: {status})"
) from exc
def _close_sandbox_client(self, sandbox: Sandbox | None) -> None:
if sandbox is None:
return
try:
sandbox.client.close()
except Exception:
pass
def _stop_sandbox(self, sandbox: Sandbox | None) -> None:
if sandbox is None:
return
try:
sandbox.stop(
blocking=True,
timeout=_STOP_TIMEOUT,
poll_interval=_STOP_POLL_INTERVAL,
)
except TypeError:
try:
sandbox.stop()
except Exception:
pass
except Exception:
pass
def _snapshot_sandbox(self, sandbox: Sandbox) -> str | None:
if not self._persistent or not self._task_id:
return None
try:
snapshot = sandbox.snapshot()
except Exception as exc:
logger.warning(
"Vercel: filesystem snapshot failed for task %s: %s",
self._task_id,
exc,
)
return None
snapshot_id = _extract_snapshot_id(snapshot)
if not snapshot_id:
logger.warning(
"Vercel: filesystem snapshot for task %s did not return a snapshot id",
self._task_id,
)
return None
_store_snapshot(self._task_id, snapshot_id)
logger.info(
"Vercel: saved filesystem snapshot %s for task %s",
snapshot_id,
self._task_id,
)
return snapshot_id
def _ensure_sandbox_ready(self) -> None:
sandbox = self._sandbox
requested_cwd = self.cwd or self._requested_cwd or DEFAULT_VERCEL_CWD
if sandbox is None:
self._sandbox = self._create_sandbox()
self._configure_attached_sandbox(requested_cwd=requested_cwd)
return
try:
sandbox.refresh()
except Exception as exc:
logger.warning(
"Vercel: sandbox refresh failed for task %s: %s; recreating",
self._task_id,
exc,
)
self._close_sandbox_client(sandbox)
self._sandbox = self._create_sandbox()
self._configure_attached_sandbox(requested_cwd=requested_cwd)
return
status = sandbox.status
if status in _terminal_sandbox_states():
logger.warning(
"Vercel: sandbox entered state %s for task %s; recreating",
status,
self._task_id,
)
self._close_sandbox_client(sandbox)
self._sandbox = self._create_sandbox()
self._configure_attached_sandbox(requested_cwd=requested_cwd)
return
self._wait_for_running()
def _vercel_upload(self, host_path: str, remote_path: str) -> None:
self._vercel_bulk_upload([(host_path, remote_path)])
def _vercel_bulk_upload(self, files: list[tuple[str, str]]) -> None:
if not files:
return
payload: list[WriteFile] = [
{
"path": remote_path,
"content": Path(host_path).read_bytes(),
}
for host_path, remote_path in files
]
sandbox = self._sandbox
if sandbox is None:
raise RuntimeError("Vercel sandbox is not attached")
_retry_vercel_call(
"write_files",
lambda: sandbox.write_files(payload),
attempts=_WRITE_RETRY_ATTEMPTS,
)
def _vercel_delete(self, remote_paths: list[str]) -> None:
if not remote_paths:
return
sandbox = self._sandbox
if sandbox is None:
raise RuntimeError("Vercel sandbox is not attached")
result = sandbox.run_command(
"bash",
["-lc", quoted_rm_command(remote_paths)],
cwd=self._workspace_root,
)
if _extract_result_returncode(result) != 0:
raise RuntimeError(
f"Vercel delete failed: {_extract_result_output(result).strip()}"
)
def _vercel_bulk_download(self, dest_tar_path: Path) -> None:
remote_hermes = (
"/.hermes"
if self._remote_home == "/"
else f"{self._remote_home.rstrip('/')}/.hermes"
)
archive_member = remote_hermes.lstrip("/")
remote_tar = f"/tmp/.hermes_sync.{os.getpid()}.tar"
sandbox = self._sandbox
if sandbox is None:
raise RuntimeError("Vercel sandbox is not attached")
try:
result = sandbox.run_command(
"bash",
[
"-lc",
f"tar cf {shlex.quote(remote_tar)} -C / {shlex.quote(archive_member)}",
],
cwd=self._workspace_root,
)
if _extract_result_returncode(result) != 0:
raise RuntimeError(
f"Vercel bulk download failed: {_extract_result_output(result).strip()}"
)
sandbox.download_file(remote_tar, dest_tar_path)
finally:
try:
sandbox.run_command(
"bash",
["-lc", f"rm -f {shlex.quote(remote_tar)}"],
cwd=self._workspace_root,
)
except Exception:
pass
def _before_execute(self) -> None:
with self._lock:
self._ensure_sandbox_ready()
if self._sync_manager is not None:
self._sync_manager.sync()
def _run_bash(
self,
cmd_string: str,
*,
login: bool = False,
timeout: int = 120,
stdin_data: str | None = None,
):
del timeout
del stdin_data
sandbox = self._sandbox
if sandbox is None:
raise RuntimeError("Vercel sandbox is not attached")
workspace_root = self._workspace_root
lock = self._lock
def cancel() -> None:
with lock:
self._stop_sandbox(sandbox)
def exec_fn() -> tuple[str, int]:
result = sandbox.run_command(
"bash",
["-lc" if login else "-c", cmd_string],
cwd=workspace_root,
)
return _extract_result_output(result), _extract_result_returncode(result)
return _ThreadedProcessHandle(exec_fn, cancel_fn=cancel)
def cleanup(self):
with self._lock:
sandbox = self._sandbox
sync_manager = self._sync_manager
if sandbox is not None and sync_manager is not None:
try:
sync_manager.sync_back()
except Exception as exc:
logger.warning(
"Vercel: sync_back failed for task %s: %s",
self._task_id,
exc,
)
self._sandbox = None
self._sync_manager = None
if sandbox is None:
return
snapshot_id = self._snapshot_sandbox(sandbox)
# Always stop the sandbox during cleanup to avoid resource leaks,
# matching the Modal and Daytona patterns.
self._stop_sandbox(sandbox)
self._close_sandbox_client(sandbox)

View File

@ -380,12 +380,13 @@ def _get_file_ops(task_id: str = "default") -> ShellFileOperations:
logger.info("Creating new %s environment for task %s...", env_type, task_id[:8])
container_config = None
if env_type in ("docker", "singularity", "modal", "daytona"):
if env_type in ("docker", "singularity", "modal", "daytona", "vercel_sandbox"):
container_config = {
"container_cpu": config.get("container_cpu", 1),
"container_memory": config.get("container_memory", 5120),
"container_disk": config.get("container_disk", 51200),
"container_persistent": config.get("container_persistent", True),
"vercel_runtime": config.get("vercel_runtime", ""),
"docker_volumes": config.get("docker_volumes", []),
"docker_mount_cwd_to_workspace": config.get("docker_mount_cwd_to_workspace", False),
"docker_forward_env": config.get("docker_forward_env", []),

View File

@ -101,7 +101,9 @@ _PLATFORM_MAP = {
}
_ENV_VAR_NAME_RE = re.compile(r"^[A-Za-z_][A-Za-z0-9_]*$")
_EXCLUDED_SKILL_DIRS = frozenset((".git", ".github", ".hub"))
_REMOTE_ENV_BACKENDS = frozenset({"docker", "singularity", "modal", "ssh", "daytona"})
_REMOTE_ENV_BACKENDS = frozenset(
{"docker", "singularity", "modal", "ssh", "daytona", "vercel_sandbox"}
)
_secret_capture_callback = None

View File

@ -2,16 +2,19 @@
"""
Terminal Tool Module
A terminal tool that executes commands in local, Docker, Modal, SSH, Singularity, and Daytona environments.
Supports local execution, containerized backends, and Modal cloud sandboxes, including managed gateway mode.
A terminal tool that executes commands in local, Docker, Modal, SSH,
Singularity, Daytona, and Vercel Sandbox environments. Supports local
execution, containerized backends, and cloud sandboxes, including managed
Modal mode.
Environment Selection (via TERMINAL_ENV environment variable):
- "local": Execute directly on the host machine (default, fastest)
- "docker": Execute in Docker containers (isolated, requires Docker)
- "modal": Execute in Modal cloud sandboxes (direct Modal or managed gateway)
- "vercel_sandbox": Execute in Vercel Sandbox cloud sandboxes
Features:
- Multiple execution backends (local, docker, modal)
- Multiple execution backends (local, docker, modal, vercel_sandbox)
- Background task support
- VM/container lifecycle management
- Automatic cleanup after inactivity
@ -114,6 +117,68 @@ DISK_USAGE_WARNING_THRESHOLD_GB = _safe_parse_import_env(
float,
"number",
)
_VERCEL_SANDBOX_DEFAULT_CWD = "/vercel/sandbox"
_SUPPORTED_VERCEL_RUNTIMES = ("node24", "node22", "python3.13")
def _is_supported_vercel_runtime(runtime: str) -> bool:
return not runtime or runtime in _SUPPORTED_VERCEL_RUNTIMES
def _check_vercel_sandbox_requirements(config: dict[str, Any]) -> bool:
"""Validate Vercel Sandbox terminal backend requirements."""
runtime = (config.get("vercel_runtime") or "").strip()
if not _is_supported_vercel_runtime(runtime):
supported = ", ".join(_SUPPORTED_VERCEL_RUNTIMES)
logger.error(
"Vercel Sandbox runtime %r is not supported. "
"Set TERMINAL_VERCEL_RUNTIME to one of: %s.",
runtime,
supported,
)
return False
disk = config.get("container_disk", 51200)
if disk not in (0, 51200):
logger.error(
"Vercel Sandbox does not support custom TERMINAL_CONTAINER_DISK=%s. "
"Use the default shared setting (51200 MB).",
disk,
)
return False
if importlib.util.find_spec("vercel") is None:
logger.error(
"vercel is required for the Vercel Sandbox terminal backend: pip install vercel"
)
return False
has_oidc = bool(os.getenv("VERCEL_OIDC_TOKEN"))
has_token = bool(os.getenv("VERCEL_TOKEN"))
has_project = bool(os.getenv("VERCEL_PROJECT_ID"))
has_team = bool(os.getenv("VERCEL_TEAM_ID"))
if has_oidc:
return True
if has_token or has_project or has_team:
if has_token and has_project and has_team:
return True
logger.error(
"Vercel Sandbox backend selected with token auth, but "
"VERCEL_TOKEN, VERCEL_PROJECT_ID, and VERCEL_TEAM_ID must all "
"be set together. VERCEL_OIDC_TOKEN is supported for one-off "
"local development only."
)
return False
logger.error(
"Vercel Sandbox backend selected but no supported auth configuration "
"was found. Set VERCEL_TOKEN, VERCEL_PROJECT_ID, and VERCEL_TEAM_ID "
"for normal use. VERCEL_OIDC_TOKEN is supported for one-off local "
"development only."
)
return False
def _check_disk_usage_warning():
@ -744,9 +809,10 @@ def _transform_sudo_command(command: str | None) -> tuple[str | None, str | None
should prepend sudo_stdin to their stdin_data and pass the merged bytes to
Popen's stdin pipe.
Callers that cannot pipe subprocess stdin (modal, daytona) must embed the
password in the command string themselves; see their execute() methods for
how they handle the non-None sudo_stdin case.
Callers that cannot pipe subprocess stdin (modal, daytona,
vercel_sandbox) must embed the password in the command string
themselves; see their execute() methods for how they handle the
non-None sudo_stdin case.
If SUDO_PASSWORD is not set and in interactive mode (HERMES_INTERACTIVE=1):
Prompts user for password with 45s timeout, caches for session.
@ -910,13 +976,15 @@ def _get_env_config() -> Dict[str, Any]:
mount_docker_cwd = os.getenv("TERMINAL_DOCKER_MOUNT_CWD_TO_WORKSPACE", "false").lower() in ("true", "1", "yes")
# Default cwd: local uses the host's current directory, everything
# else starts in the user's home (~ resolves to whatever account
# is running inside the container/remote).
# Default cwd: local uses the host's current directory, ssh uses the
# remote home, Vercel uses its documented workspace root, and everything
# else starts in the backend's default root-like cwd.
if env_type == "local":
default_cwd = os.getcwd()
elif env_type == "ssh":
default_cwd = "~"
elif env_type == "vercel_sandbox":
default_cwd = _VERCEL_SANDBOX_DEFAULT_CWD
else:
default_cwd = "/root"
@ -938,7 +1006,7 @@ def _get_env_config() -> Dict[str, Any]:
):
host_cwd = candidate
cwd = "/workspace"
elif env_type in ("modal", "docker", "singularity", "daytona") and cwd:
elif env_type in ("modal", "docker", "singularity", "daytona", "vercel_sandbox") and cwd:
# Host paths and relative paths that won't work inside containers
is_host_path = any(cwd.startswith(p) for p in host_prefixes)
is_relative = not os.path.isabs(cwd) # e.g. "." or "src/"
@ -956,6 +1024,7 @@ def _get_env_config() -> Dict[str, Any]:
"singularity_image": os.getenv("TERMINAL_SINGULARITY_IMAGE", f"docker://{default_image}"),
"modal_image": os.getenv("TERMINAL_MODAL_IMAGE", default_image),
"daytona_image": os.getenv("TERMINAL_DAYTONA_IMAGE", default_image),
"vercel_runtime": os.getenv("TERMINAL_VERCEL_RUNTIME", "").strip(),
"cwd": cwd,
"host_cwd": host_cwd,
"docker_mount_cwd_to_workspace": mount_docker_cwd,
@ -974,7 +1043,8 @@ def _get_env_config() -> Dict[str, Any]:
os.getenv("TERMINAL_PERSISTENT_SHELL", "true"),
).lower() in ("true", "1", "yes"),
"local_persistent": os.getenv("TERMINAL_LOCAL_PERSISTENT", "false").lower() in ("true", "1", "yes"),
# Container resource config (applies to docker, singularity, modal, daytona -- ignored for local/ssh)
# Container resource config (applies to docker, singularity, modal,
# daytona, and vercel_sandbox -- ignored for local/ssh)
"container_cpu": _parse_env_var("TERMINAL_CONTAINER_CPU", "1", float, "number"),
"container_memory": _parse_env_var("TERMINAL_CONTAINER_MEMORY", "5120"), # MB (default 5GB)
"container_disk": _parse_env_var("TERMINAL_CONTAINER_DISK", "51200"), # MB (default 50GB)
@ -1002,8 +1072,9 @@ def _create_environment(env_type: str, image: str, cwd: str, timeout: int,
Create an execution environment for sandboxed command execution.
Args:
env_type: One of "local", "docker", "singularity", "modal", "daytona", "ssh"
image: Docker/Singularity/Modal image name (ignored for local/ssh)
env_type: One of "local", "docker", "singularity", "modal",
"daytona", "vercel_sandbox", "ssh"
image: Docker/Singularity/Modal image name (ignored for local/ssh/vercel)
cwd: Working directory
timeout: Default command timeout
ssh_config: SSH connection config (for env_type="ssh")
@ -1107,6 +1178,21 @@ def _create_environment(env_type: str, image: str, cwd: str, timeout: int,
persistent_filesystem=persistent, task_id=task_id,
)
elif env_type == "vercel_sandbox":
from tools.environments.vercel_sandbox import (
VercelSandboxEnvironment as _VercelSandboxEnvironment,
)
return _VercelSandboxEnvironment(
runtime=cc.get("vercel_runtime") or None,
cwd=cwd,
timeout=timeout,
cpu=cpu,
memory=memory,
disk=disk,
persistent_filesystem=persistent,
task_id=task_id,
)
elif env_type == "ssh":
if not ssh_config or not ssh_config.get("host") or not ssh_config.get("user"):
raise ValueError("SSH environment requires ssh_host and ssh_user to be configured")
@ -1120,7 +1206,10 @@ def _create_environment(env_type: str, image: str, cwd: str, timeout: int,
)
else:
raise ValueError(f"Unknown environment type: {env_type}. Use 'local', 'docker', 'singularity', 'modal', 'daytona', or 'ssh'")
raise ValueError(
f"Unknown environment type: {env_type}. Use 'local', 'docker', "
f"'singularity', 'modal', 'daytona', 'vercel_sandbox', or 'ssh'"
)
def _cleanup_inactive_envs(lifetime_seconds: int = 300):
@ -1654,13 +1743,14 @@ def terminal_tool(
}
container_config = None
if env_type in ("docker", "singularity", "modal", "daytona"):
if env_type in ("docker", "singularity", "modal", "daytona", "vercel_sandbox"):
container_config = {
"container_cpu": config.get("container_cpu", 1),
"container_memory": config.get("container_memory", 5120),
"container_disk": config.get("container_disk", 51200),
"container_persistent": config.get("container_persistent", True),
"modal_mode": config.get("modal_mode", "auto"),
"vercel_runtime": config.get("vercel_runtime", ""),
"docker_volumes": config.get("docker_volumes", []),
"docker_mount_cwd_to_workspace": config.get("docker_mount_cwd_to_workspace", False),
"docker_forward_env": config.get("docker_forward_env", []),
@ -1990,10 +2080,10 @@ def terminal_tool(
def check_terminal_requirements() -> bool:
"""Check if all requirements for the terminal tool are met."""
config = _get_env_config()
env_type = config["env_type"]
try:
config = _get_env_config()
env_type = config["env_type"]
if env_type == "local":
return True
@ -2077,6 +2167,9 @@ def check_terminal_requirements() -> bool:
return True
elif env_type == "vercel_sandbox":
return _check_vercel_sandbox_requirements(config)
elif env_type == "daytona":
from daytona import Daytona # noqa: F401 — SDK presence check
return os.getenv("DAYTONA_API_KEY") is not None
@ -2084,7 +2177,7 @@ def check_terminal_requirements() -> bool:
else:
logger.error(
"Unknown TERMINAL_ENV '%s'. Use one of: local, docker, singularity, "
"modal, daytona, ssh.",
"modal, daytona, vercel_sandbox, ssh.",
env_type,
)
return False
@ -2124,7 +2217,11 @@ if __name__ == "__main__":
print("\nEnvironment Variables:")
default_img = "nikolaik/python-nodejs:python3.11-nodejs20"
print(f" TERMINAL_ENV: {os.getenv('TERMINAL_ENV', 'local')} (local/docker/singularity/modal/daytona/ssh)")
print(
" TERMINAL_ENV: "
f"{os.getenv('TERMINAL_ENV', 'local')} "
"(local/docker/singularity/modal/daytona/vercel_sandbox/ssh)"
)
print(f" TERMINAL_DOCKER_IMAGE: {os.getenv('TERMINAL_DOCKER_IMAGE', default_img)}")
print(f" TERMINAL_SINGULARITY_IMAGE: {os.getenv('TERMINAL_SINGULARITY_IMAGE', f'docker://{default_img}')}")
print(f" TERMINAL_MODAL_IMAGE: {os.getenv('TERMINAL_MODAL_IMAGE', default_img)}")

41
uv.lock generated
View File

@ -1934,6 +1934,7 @@ all = [
{ name = "sounddevice" },
{ name = "ty" },
{ name = "uvicorn", extra = ["standard"] },
{ name = "vercel" },
]
bedrock = [
{ name = "boto3" },
@ -2025,6 +2026,9 @@ termux = [
tts-premium = [
{ name = "elevenlabs" },
]
vercel = [
{ name = "vercel" },
]
voice = [
{ name = "faster-whisper" },
{ name = "numpy" },
@ -2089,6 +2093,7 @@ requires-dist = [
{ name = "hermes-agent", extras = ["slack"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["sms"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["tts-premium"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["vercel"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["voice"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["web"], marker = "extra == 'all'" },
{ name = "honcho-ai", marker = "extra == 'honcho'", specifier = ">=2.0.1,<3" },
@ -2133,10 +2138,11 @@ requires-dist = [
{ name = "ty", marker = "extra == 'dev'", specifier = ">=0.0.1a29,<0.0.22" },
{ name = "uvicorn", extras = ["standard"], marker = "extra == 'rl'", specifier = ">=0.24.0,<1" },
{ name = "uvicorn", extras = ["standard"], marker = "extra == 'web'", specifier = ">=0.24.0,<1" },
{ name = "vercel", marker = "extra == 'vercel'", specifier = ">=0.5.7,<0.6.0" },
{ name = "wandb", marker = "extra == 'rl'", specifier = ">=0.15.0,<1" },
{ name = "yc-bench", marker = "python_full_version >= '3.12' and extra == 'yc-bench'", git = "https://github.com/collinear-ai/yc-bench.git?rev=bfb0c88062450f46341bd9a5298903fc2e952a5c" },
]
provides-extras = ["modal", "daytona", "dev", "messaging", "cron", "slack", "matrix", "cli", "tts-premium", "voice", "pty", "honcho", "mcp", "homeassistant", "sms", "acp", "mistral", "bedrock", "termux", "dingtalk", "feishu", "web", "rl", "yc-bench", "all"]
provides-extras = ["modal", "daytona", "vercel", "dev", "messaging", "cron", "slack", "matrix", "cli", "tts-premium", "voice", "pty", "honcho", "mcp", "homeassistant", "sms", "acp", "mistral", "bedrock", "termux", "dingtalk", "feishu", "web", "rl", "yc-bench", "all"]
[[package]]
name = "hf-transfer"
@ -5339,6 +5345,39 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e4/16/c1fd27e9549f3c4baf1dc9c20c456cd2f822dbf8de9f463824b0c0357e06/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e", size = 4296730, upload-time = "2025-10-16T22:17:00.744Z" },
]
[[package]]
name = "vercel"
version = "0.5.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "cbor2" },
{ name = "httpx" },
{ name = "pydantic" },
{ name = "python-dotenv" },
{ name = "vercel-workers", marker = "python_full_version >= '3.12'" },
{ name = "websockets" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d7/68/a671ebc656afbb5e25fb88c681b61511cc13670ea771c87b2f711782022b/vercel-0.5.7.tar.gz", hash = "sha256:8070ea1b33962adfed98498f9273f24ea2066a20c74d38643d479d8280801c6e", size = 118597, upload-time = "2026-04-15T17:58:20.424Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/2e/bacf1ccc0ec95464a68398e64bf5e36f859cd51f3e379623f103802f85f1/vercel-0.5.7-py3-none-any.whl", hash = "sha256:90eb2689c34e403db2170fec3eb47e1a91092c200d91baf4b4501fb3e2a44d28", size = 139698, upload-time = "2026-04-15T17:58:18.945Z" },
]
[[package]]
name = "vercel-workers"
version = "0.0.16"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio", marker = "python_full_version >= '3.12'" },
{ name = "httpx", marker = "python_full_version >= '3.12'" },
{ name = "python-dotenv", marker = "python_full_version >= '3.12'" },
{ name = "vercel", marker = "python_full_version >= '3.12'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/73/d8/17ba256fceff42be231ca8ff0567dcf2da54ee8de633e949fa08b9403b1f/vercel_workers-0.0.16.tar.gz", hash = "sha256:38df45dbf42fbae39ffa0e419f0908bf1beb047e38fc5ddd0a479feac340fb8c", size = 51615, upload-time = "2026-04-13T21:23:27.649Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/65/3a/0137d5b157845e1d41a70130d8dce8ba15d8712f34619693cda04ecb8f02/vercel_workers-0.0.16-py3-none-any.whl", hash = "sha256:542be839e46e236a68cc308695ccc3c970d76de72c978d7f416cc6ce09688896", size = 50141, upload-time = "2026-04-13T21:23:28.652Z" },
]
[[package]]
name = "wandb"
version = "0.25.1"

View File

@ -83,11 +83,11 @@ Leaving these unset keeps the legacy defaults (`HERMES_API_TIMEOUT=1800`s, `HERM
## Terminal Backend Configuration
Hermes supports six terminal backends. Each determines where the agent's shell commands actually execute — your local machine, a Docker container, a remote server via SSH, a Modal cloud sandbox, a Daytona workspace, or a Singularity/Apptainer container.
Hermes supports seven terminal backends. Each determines where the agent's shell commands actually execute — your local machine, a Docker container, a remote server via SSH, a Modal cloud sandbox, a Daytona workspace, a Vercel Sandbox, or a Singularity/Apptainer container.
```yaml
terminal:
backend: local # local | docker | ssh | modal | daytona | singularity
backend: local # local | docker | ssh | modal | daytona | vercel_sandbox | singularity
cwd: "." # Working directory ("." = current dir for local, "/root" for containers)
timeout: 180 # Per-command timeout in seconds
env_passthrough: [] # Env var names to forward to sandboxed execution (terminal + execute_code)
@ -96,7 +96,7 @@ terminal:
daytona_image: "nikolaik/python-nodejs:python3.11-nodejs20" # Container image for Daytona backend
```
For cloud sandboxes such as Modal and Daytona, `container_persistent: true` means Hermes will try to preserve filesystem state across sandbox recreation. It does not promise that the same live sandbox, PID space, or background processes will still be running later.
For cloud sandboxes such as Modal, Daytona, and Vercel Sandbox, `container_persistent: true` means Hermes will try to preserve filesystem state across sandbox recreation. It does not promise that the same live sandbox, PID space, or background processes will still be running later.
### Backend Overview
@ -107,6 +107,7 @@ For cloud sandboxes such as Modal and Daytona, `container_persistent: true` mean
| **ssh** | Remote server via SSH | Network boundary | Remote dev, powerful hardware |
| **modal** | Modal cloud sandbox | Full (cloud VM) | Ephemeral cloud compute, evals |
| **daytona** | Daytona workspace | Full (cloud container) | Managed cloud dev environments |
| **vercel_sandbox** | Vercel Sandbox | Full (cloud microVM) | Cloud execution with snapshot-backed filesystem persistence |
| **singularity** | Singularity/Apptainer container | Namespaces (--containall) | HPC clusters, shared machines |
### Local Backend
@ -223,6 +224,49 @@ terminal:
**Disk limit:** Daytona enforces a 10 GiB maximum. Requests above this are capped with a warning.
### Vercel Sandbox Backend
Runs commands in a [Vercel Sandbox](https://vercel.com/docs/vercel-sandbox) cloud microVM. Hermes uses the normal terminal and file tool surfaces; there are no Vercel-specific model-facing tools.
```yaml
terminal:
backend: vercel_sandbox
vercel_runtime: node24 # node24 | node22 | python3.13
cwd: /vercel/sandbox # default workspace root
container_persistent: true # Snapshot/restore filesystem
container_disk: 51200 # Shared default only; custom disk is unsupported
```
**Required install:** Install the optional SDK extra:
```bash
pip install 'hermes-agent[vercel]'
```
**Required authentication:** Configure access-token auth with all three of `VERCEL_TOKEN`, `VERCEL_PROJECT_ID`, and `VERCEL_TEAM_ID`. This is the supported setup for deployments and normal long-running Hermes processes on Render, Railway, Docker, and similar hosts.
For one-off local development, Hermes also accepts short-lived Vercel OIDC tokens:
```bash
VERCEL_OIDC_TOKEN="$(vc project token <project-name>)" hermes chat
```
From a linked Vercel project directory, you can omit the project name:
```bash
VERCEL_OIDC_TOKEN="$(vc project token)" hermes chat
```
OIDC tokens are short-lived and should not be used as the documented deployment path.
**Runtime:** `terminal.vercel_runtime` supports `node24`, `node22`, and `python3.13`. If unset, Hermes defaults to `node24`.
**Persistence:** When `container_persistent: true`, Hermes snapshots the sandbox filesystem during cleanup and restores a later sandbox for the same task from that snapshot. Snapshot contents can include Hermes-synced credentials, skills, and cache files that were copied into the sandbox. This preserves filesystem state only; it does not preserve live sandbox identity, PID space, shell state, or running background processes.
**Background commands:** `terminal(background=true)` uses Hermes' generic non-local background process flow. You can spawn, poll, wait, view logs, and kill processes through the normal process tool while the sandbox is alive. Hermes does not provide native Vercel detached-process recovery after cleanup or restart.
**Disk sizing:** Vercel Sandbox does not currently support Hermes' `container_disk` resource knob. Leave `container_disk` unset or at the shared default `51200`; non-default values fail diagnostics and backend creation instead of being silently ignored.
### Singularity/Apptainer Backend
Runs commands in a [Singularity/Apptainer](https://apptainer.org) container. Designed for HPC clusters and shared machines where Docker isn't available.

View File

@ -64,13 +64,14 @@ The terminal tool can execute commands in different environments:
| `singularity` | HPC containers | Cluster computing, rootless |
| `modal` | Cloud execution | Serverless, scale |
| `daytona` | Cloud sandbox workspace | Persistent remote dev environments |
| `vercel_sandbox` | Vercel Sandbox cloud microVM | Cloud execution with snapshot-backed filesystem persistence |
### Configuration
```yaml
# In ~/.hermes/config.yaml
terminal:
backend: local # or: docker, ssh, singularity, modal, daytona
backend: local # or: docker, ssh, singularity, modal, daytona, vercel_sandbox
cwd: "." # Working directory
timeout: 180 # Command timeout in seconds
```
@ -117,13 +118,41 @@ modal setup
hermes config set terminal.backend modal
```
### Vercel Sandbox
```bash
pip install 'hermes-agent[vercel]'
hermes config set terminal.backend vercel_sandbox
hermes config set terminal.vercel_runtime node24
```
Authenticate with all three of `VERCEL_TOKEN`, `VERCEL_PROJECT_ID`, and `VERCEL_TEAM_ID`. This access-token setup is the supported path for deployments and normal long-running Hermes processes on Render, Railway, Docker, and similar hosts. Supported runtimes are `node24`, `node22`, and `python3.13`; Hermes defaults to `/vercel/sandbox` as the remote workspace root.
For one-off local development, Hermes also accepts short-lived Vercel OIDC tokens:
```bash
VERCEL_OIDC_TOKEN="$(vc project token <project-name>)" hermes chat
```
From a linked Vercel project directory:
```bash
VERCEL_OIDC_TOKEN="$(vc project token)" hermes chat
```
With `container_persistent: true`, Hermes uses Vercel snapshots to preserve filesystem state across sandbox recreation for the same task. This can include Hermes-synced credentials, skills, and cache files inside the sandbox. Snapshots do not preserve live processes, PID space, or the same live sandbox identity.
Background terminal commands use Hermes' generic non-local process flow: spawn, poll, wait, log, and kill work through the normal process tool while the sandbox is alive, but Hermes does not provide native Vercel detached-process recovery after cleanup or restart.
Leave `container_disk` unset or at the shared default `51200`; custom disk sizing is unsupported for Vercel Sandbox and will fail diagnostics/backend creation.
### Container Resources
Configure CPU, memory, disk, and persistence for all container backends:
```yaml
terminal:
backend: docker # or singularity, modal, daytona
backend: docker # or singularity, modal, daytona, vercel_sandbox
container_cpu: 1 # CPU cores (default: 1)
container_memory: 5120 # Memory in MB (default: 5GB)
container_disk: 51200 # Disk in MB (default: 50GB)