# Postgres POSTGRES_USER= POSTGRES_PASSWORD= POSTGRES_DB=molecule DATABASE_URL=postgres://USER:PASS@postgres:5432/molecule?sslmode=disable # Redis REDIS_URL=redis://redis:6379 # Platform PORT=8080 SECRETS_ENCRYPTION_KEY= # 32-byte key (raw or base64). Leave empty for plaintext (dev only). CONFIGS_DIR= # Path to workspace-configs-templates/ (auto-discovered if empty) PLUGINS_DIR= # Path to plugins/ directory (default: /plugins in container) # Plugin install safeguards (POST /workspaces/:id/plugins) # All three bound the cost of a single install so a slow/malicious # source can't tie up a handler. Defaults are sane for typical use. PLUGIN_INSTALL_BODY_MAX_BYTES=65536 # max request body size (default: 64 KiB) PLUGIN_INSTALL_FETCH_TIMEOUT=5m # duration string; whole fetch+copy deadline PLUGIN_INSTALL_MAX_DIR_BYTES=104857600 # max staged-tree size (default: 100 MiB) # Phase 30.7 — remote-agent liveness threshold. Workspaces with # runtime='external' are marked offline if their last_heartbeat_at is # older than this many seconds. Slightly larger than the 60s Redis TTL # so transient WAN hiccups don't flap online/offline. Set to 0 to use # the built-in default (90s). REMOTE_LIVENESS_STALE_AFTER=90 # Canvas NEXT_PUBLIC_PLATFORM_URL=http://localhost:8080 NEXT_PUBLIC_WS_URL=ws://localhost:8080/ws # Workspace Runtime ANTHROPIC_API_KEY= OPENROUTER_API_KEY= # OpenRouter API key (openrouter.ai). Use with model: openrouter:anthropic/claude-3.5-haiku GROQ_API_KEY= # Groq API key (console.groq.com). Use with model: groq:llama-3.3-70b-versatile CEREBRAS_API_KEY= # Cerebras API key (cloud.cerebras.ai). Use with model: cerebras:llama3.1-8b GOOGLE_API_KEY= # Google AI API key (aistudio.google.com). Use with model: google_genai:gemini-2.5-flash MAX_TOKENS=2048 # Max output tokens for OpenRouter requests (default: 2048) LANGGRAPH_RECURSION_LIMIT=500 # LangGraph/DeepAgents max ReAct steps per turn (lib default: 25; raised to 500 — PM fan-out to 6+ reports + synthesis routinely exceeds 100) MOLECULE_IN_DOCKER= # Set when running the platform inside Docker (accepts 1/0, true/false — anything strconv.ParseBool recognises). Triggers A2A proxy to rewrite 127.0.0.1: agent URLs to Docker bridge hostnames. Auto-detected via /.dockerenv; only set if detection fails (e.g. Podman, custom runtimes) or to force off. MODEL_PROVIDER=anthropic:claude-sonnet-4-6 # Format: provider:model. Providers: anthropic, openai, openrouter, groq, cerebras, google_genai, ollama # Social Channels (optional — configure per-workspace via API or Canvas) TELEGRAM_BOT_TOKEN= # Telegram Bot API token (talk to @BotFather). Used as default for new Telegram channels. # Langfuse (optional observability) LANGFUSE_HOST=http://langfuse-web:3000 LANGFUSE_PUBLIC_KEY= LANGFUSE_SECRET_KEY= # ---- Operator identity (for org-templates/reno-stars/, see OPERATOR_NOTES.md) ---- # These are NOT consumed by the platform itself — they're documented here so # operators of the reno-stars template (and any future operator-personalised # template) know what to set as global_secrets. The platform injects every # global_secret into every workspace container as an env var; the agent # system-prompts reference them via ${VAR_NAME}. OPERATOR_EMAIL= # e.g. you@example.com OPERATOR_PHONE= # e.g. 555-123-4567 (display only, not used for SMS) OPERATOR_TELEGRAM_ID= # numeric Telegram user ID (for bot DMs) GADS_MCC_ID= # Google Ads MCC (manager) account ID, format 123-456-7890 GADS_CUSTOMER_ID= # Google Ads child customer ID, format 987-654-3210 GCP_PROJECT_ID= # Google Cloud project ID (e.g. my-website-123456) GSC_SERVICE_ACCOUNT= # Search Console reporter service account email