Merge pull request #947 from Molecule-AI/chore/final-cleanup
chore: final cleanup — remove internal tooling, gitignore local config
This commit is contained in:
commit
83c5fd1060
2
.gitignore
vendored
2
.gitignore
vendored
@ -83,6 +83,8 @@ redis_data/
|
||||
# Claude Code (local agent config — not shared)
|
||||
.claude/
|
||||
CLAUDE.md
|
||||
.mcp.json
|
||||
test-results/
|
||||
|
||||
# Workspace instance configs (auto-generated by provisioner, not templates)
|
||||
workspace-configs-templates/ws-*
|
||||
|
||||
@ -1,14 +1,5 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"awareness-memory": {
|
||||
"type": "stdio",
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@awareness-sdk/local",
|
||||
"mcp"
|
||||
]
|
||||
},
|
||||
"molecule": {
|
||||
"type": "stdio",
|
||||
"command": "npx",
|
||||
@ -1,114 +0,0 @@
|
||||
import { defineConfig } from "vitepress";
|
||||
|
||||
export default defineConfig({
|
||||
title: "Molecule AI",
|
||||
description: "The Organizational Operating System for AI Agents",
|
||||
head: [["link", { rel: "icon", href: "/assets/branding/molecule-icon.png" }]],
|
||||
|
||||
themeConfig: {
|
||||
logo: "/assets/branding/molecule-icon.png",
|
||||
|
||||
nav: [
|
||||
{ text: "Guide", link: "/quickstart" },
|
||||
{ text: "Architecture", link: "/architecture/architecture" },
|
||||
{ text: "API", link: "/api-protocol/platform-api" },
|
||||
{ text: "GitHub", link: "https://github.com/molecule-monorepo/molecule" },
|
||||
],
|
||||
|
||||
sidebar: [
|
||||
{
|
||||
text: "Getting Started",
|
||||
items: [
|
||||
{ text: "Overview", link: "/product/overview" },
|
||||
{ text: "Quickstart", link: "/quickstart" },
|
||||
{ text: "Core Concepts", link: "/product/core-concepts" },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "Architecture",
|
||||
collapsed: false,
|
||||
items: [
|
||||
{ text: "System Architecture", link: "/architecture/architecture" },
|
||||
{ text: "Technical Documentation", link: "/architecture/molecule-technical-doc" },
|
||||
{ text: "Database Schema", link: "/architecture/database-schema" },
|
||||
{ text: "Workspace Tiers", link: "/architecture/workspace-tiers" },
|
||||
{ text: "Provisioner", link: "/architecture/provisioner" },
|
||||
{ text: "Memory System", link: "/architecture/memory" },
|
||||
{ text: "Event Log", link: "/architecture/event-log" },
|
||||
{ text: "Technology Choices", link: "/architecture/technology-choices" },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "API & Protocols",
|
||||
collapsed: false,
|
||||
items: [
|
||||
{ text: "Platform API", link: "/api-protocol/platform-api" },
|
||||
{ text: "A2A Protocol", link: "/api-protocol/a2a-protocol" },
|
||||
{ text: "Communication Rules", link: "/api-protocol/communication-rules" },
|
||||
{ text: "WebSocket Events", link: "/api-protocol/websocket-events" },
|
||||
{ text: "Registry & Heartbeat", link: "/api-protocol/registry-and-heartbeat" },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "Agent Runtime",
|
||||
collapsed: false,
|
||||
items: [
|
||||
{ text: "Workspace Runtime", link: "/agent-runtime/workspace-runtime" },
|
||||
{ text: "CLI Runtime", link: "/agent-runtime/cli-runtime" },
|
||||
{ text: "Config Format", link: "/agent-runtime/config-format" },
|
||||
{ text: "Agent Card", link: "/agent-runtime/agent-card" },
|
||||
{ text: "Skills System", link: "/agent-runtime/skills" },
|
||||
{ text: "Team Expansion", link: "/agent-runtime/team-expansion" },
|
||||
{ text: "System Prompt", link: "/agent-runtime/system-prompt-structure" },
|
||||
{ text: "Bundle System", link: "/agent-runtime/bundle-system" },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "Frontend",
|
||||
collapsed: true,
|
||||
items: [
|
||||
{ text: "Canvas Engine", link: "/frontend/canvas" },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "Development",
|
||||
collapsed: true,
|
||||
items: [
|
||||
{ text: "Local Development", link: "/development/local-development" },
|
||||
{ text: "Build Order", link: "/development/build-order" },
|
||||
{ text: "Observability", link: "/development/observability" },
|
||||
{ text: "Code Sandbox", link: "/development/code-sandbox" },
|
||||
{ text: "Constraints & Rules", link: "/development/constraints-and-rules" },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "Product",
|
||||
collapsed: true,
|
||||
items: [
|
||||
{ text: "Product Narrative", link: "/product/molecule-product-doc" },
|
||||
{ text: "Landing Messaging Report", link: "/product/landing-messaging-report" },
|
||||
{ text: "PRD", link: "/product/PRD" },
|
||||
{ text: "SaaS Upgrade Path", link: "/product/saas-upgrade" },
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
search: {
|
||||
provider: "local",
|
||||
},
|
||||
|
||||
editLink: {
|
||||
pattern: "https://github.com/molecule-monorepo/molecule/edit/main/docs/:path",
|
||||
text: "Edit this page on GitHub",
|
||||
},
|
||||
|
||||
footer: {
|
||||
message: "Released under the MIT License.",
|
||||
copyright: "Copyright 2026 Molecule AI",
|
||||
},
|
||||
|
||||
socialLinks: [
|
||||
{ icon: "github", link: "https://github.com/molecule-monorepo/molecule" },
|
||||
],
|
||||
},
|
||||
});
|
||||
2514
docs/package-lock.json
generated
2514
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,17 +0,0 @@
|
||||
{
|
||||
"name": "docs",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vitepress dev",
|
||||
"build": "vitepress build",
|
||||
"preview": "vitepress preview"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"vitepress": "^1.6.4"
|
||||
}
|
||||
}
|
||||
@ -1,101 +0,0 @@
|
||||
"""Platform client for registering and maintaining external workspaces."""
|
||||
|
||||
import logging
|
||||
import time
|
||||
import threading
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger("bridge.platform")
|
||||
|
||||
|
||||
class PlatformClient:
|
||||
"""Manages the external workspace lifecycle with the Molecule AI platform."""
|
||||
|
||||
def __init__(self, platform_url: str, bridge_port: int, data_dir: Path):
|
||||
self.platform_url = platform_url
|
||||
self.bridge_port = bridge_port
|
||||
self.data_dir = data_dir
|
||||
self.ws_id_file = data_dir / "workspace_id"
|
||||
self.workspace_id = ""
|
||||
|
||||
def register(self, name: str, role: str, tier: int = 3, parent_id: str = "") -> str:
|
||||
"""Register as an external workspace. Returns workspace ID."""
|
||||
# Reuse existing if alive
|
||||
if self.ws_id_file.exists():
|
||||
ws_id = self.ws_id_file.read_text().strip()
|
||||
try:
|
||||
resp = httpx.get(f"{self.platform_url}/workspaces/{ws_id}", timeout=5)
|
||||
if resp.status_code == 200 and resp.json().get("status") not in ("removed",):
|
||||
httpx.patch(f"{self.platform_url}/workspaces/{ws_id}",
|
||||
json={"url": f"http://127.0.0.1:{self.bridge_port}"}, timeout=5)
|
||||
self.workspace_id = ws_id
|
||||
logger.info(f"Reusing workspace {ws_id}")
|
||||
return ws_id
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Create new
|
||||
payload = {
|
||||
"name": name,
|
||||
"role": role,
|
||||
"tier": tier,
|
||||
"runtime": "external",
|
||||
"external": True,
|
||||
"url": f"http://127.0.0.1:{self.bridge_port}",
|
||||
}
|
||||
if parent_id:
|
||||
payload["parent_id"] = parent_id
|
||||
|
||||
resp = httpx.post(f"{self.platform_url}/workspaces", json=payload, timeout=10)
|
||||
data = resp.json()
|
||||
ws_id = data.get("id", "")
|
||||
if not ws_id:
|
||||
raise RuntimeError(f"Failed to create workspace: {data}")
|
||||
|
||||
self.ws_id_file.write_text(ws_id)
|
||||
self.workspace_id = ws_id
|
||||
logger.info(f"Created external workspace: {ws_id}")
|
||||
|
||||
# Register agent card
|
||||
httpx.post(f"{self.platform_url}/registry/register", json={
|
||||
"workspace_id": ws_id,
|
||||
"agent_card": {
|
||||
"name": name,
|
||||
"description": role,
|
||||
"url": f"http://127.0.0.1:{self.bridge_port}",
|
||||
"version": "1.0.0",
|
||||
"skills": [],
|
||||
"capabilities": {"streaming": False, "pushNotifications": False},
|
||||
},
|
||||
}, timeout=10)
|
||||
|
||||
return ws_id
|
||||
|
||||
def start_heartbeat(self):
|
||||
"""Send periodic heartbeats to keep the workspace online."""
|
||||
start = time.time()
|
||||
def loop():
|
||||
while True:
|
||||
try:
|
||||
httpx.post(f"{self.platform_url}/registry/heartbeat", json={
|
||||
"workspace_id": self.workspace_id,
|
||||
"error_rate": 0, "sample_error": "",
|
||||
"uptime_seconds": int(time.time() - start),
|
||||
"active_tasks": 0, "current_task": "",
|
||||
}, timeout=5)
|
||||
except Exception:
|
||||
pass
|
||||
time.sleep(30)
|
||||
threading.Thread(target=loop, daemon=True).start()
|
||||
|
||||
def resolve_name(self, workspace_id: str) -> str:
|
||||
"""Look up workspace name."""
|
||||
try:
|
||||
resp = httpx.get(f"{self.platform_url}/workspaces/{workspace_id}", timeout=3)
|
||||
if resp.status_code == 200:
|
||||
return resp.json().get("name", workspace_id[:8])
|
||||
except Exception:
|
||||
pass
|
||||
return workspace_id[:8]
|
||||
@ -1,213 +0,0 @@
|
||||
"""Message processor interface and built-in implementations.
|
||||
|
||||
To add a new backend:
|
||||
1. Subclass MessageProcessor
|
||||
2. Implement process(message, sender, context) -> str
|
||||
3. Register in PROCESSORS dict
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
|
||||
logger = logging.getLogger("bridge.processor")
|
||||
|
||||
|
||||
class MessageProcessor(ABC):
|
||||
"""Interface for processing incoming A2A messages."""
|
||||
|
||||
@abstractmethod
|
||||
def process(self, message: str, sender: str, context: dict) -> str:
|
||||
"""Process an incoming message and return the response text.
|
||||
|
||||
Args:
|
||||
message: The incoming message text
|
||||
sender: Name of the sending workspace
|
||||
context: Additional context (sender_id, workspace_id, etc.)
|
||||
|
||||
Returns:
|
||||
Response text to send back via A2A
|
||||
"""
|
||||
...
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def name(self) -> str:
|
||||
"""Human-readable name for this processor."""
|
||||
...
|
||||
|
||||
|
||||
class ClaudeCodeProcessor(MessageProcessor):
|
||||
"""Spawns `claude --print` CLI with full codebase access."""
|
||||
|
||||
name = "claude-code"
|
||||
|
||||
def __init__(self, cwd: str | None = None, model: str = ""):
|
||||
self.cwd = cwd or str(Path(__file__).parent.parent.parent)
|
||||
self.model = model
|
||||
|
||||
def process(self, message: str, sender: str, context: dict) -> str:
|
||||
system_prompt = (
|
||||
f"You are an AI technical advisor for the Molecule AI Molecule AI platform. "
|
||||
f"Agent '{sender}' is asking you a question via A2A protocol. "
|
||||
f"You have access to the full codebase at the current directory. "
|
||||
f"Respond concisely and helpfully. Keep responses under 500 words "
|
||||
f"unless a detailed analysis is needed."
|
||||
)
|
||||
|
||||
cmd = ["claude", "--print", "--dangerously-skip-permissions",
|
||||
"--system-prompt", system_prompt, "-p", message]
|
||||
if self.model:
|
||||
cmd.extend(["--model", self.model])
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd, capture_output=True, text=True,
|
||||
timeout=300, cwd=self.cwd,
|
||||
)
|
||||
if result.returncode == 0 and result.stdout.strip():
|
||||
try:
|
||||
out = json.loads(result.stdout)
|
||||
if isinstance(out, dict) and "result" in out:
|
||||
return out["result"]
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
return result.stdout.strip()
|
||||
return f"Processing error: {result.stderr.strip()[:200]}"
|
||||
except subprocess.TimeoutExpired:
|
||||
return "Request timed out (5 min limit)."
|
||||
except FileNotFoundError:
|
||||
return "Claude CLI not found. Install: npm install -g @anthropic-ai/claude-code"
|
||||
except Exception as e:
|
||||
return f"Error: {e}"
|
||||
|
||||
|
||||
class OpenAIProcessor(MessageProcessor):
|
||||
"""Calls OpenAI-compatible API (GPT, local LLM, OpenRouter, etc.)."""
|
||||
|
||||
name = "openai"
|
||||
|
||||
def __init__(self, model: str = "gpt-4.1-mini", base_url: str = "", api_key: str = ""):
|
||||
self.model = model
|
||||
self.base_url = base_url or os.environ.get("OPENAI_BASE_URL", "https://api.openai.com/v1")
|
||||
self.api_key = api_key or os.environ.get("OPENAI_API_KEY", "")
|
||||
if not self.api_key:
|
||||
logger.warning("OpenAI processor: no API key set (OPENAI_API_KEY env var or --api-key)")
|
||||
|
||||
def process(self, message: str, sender: str, context: dict) -> str:
|
||||
if not self.api_key:
|
||||
return "OpenAI API key not configured. Set OPENAI_API_KEY environment variable."
|
||||
try:
|
||||
import httpx
|
||||
resp = httpx.post(
|
||||
f"{self.base_url}/chat/completions",
|
||||
headers={"Authorization": f"Bearer {self.api_key}"},
|
||||
json={
|
||||
"model": self.model,
|
||||
"messages": [
|
||||
{"role": "system", "content": f"You are a technical advisor. Agent '{sender}' is asking you a question."},
|
||||
{"role": "user", "content": message},
|
||||
],
|
||||
"max_tokens": 1000,
|
||||
},
|
||||
timeout=60,
|
||||
)
|
||||
data = resp.json()
|
||||
return data["choices"][0]["message"]["content"]
|
||||
except Exception as e:
|
||||
return f"OpenAI API error: {e}"
|
||||
|
||||
|
||||
class AnthropicProcessor(MessageProcessor):
|
||||
"""Calls Anthropic API directly."""
|
||||
|
||||
name = "anthropic"
|
||||
|
||||
def __init__(self, model: str = "claude-sonnet-4-6", api_key: str = ""):
|
||||
self.model = model
|
||||
self.api_key = api_key or os.environ.get("ANTHROPIC_API_KEY", "")
|
||||
if not self.api_key:
|
||||
logger.warning("Anthropic processor: no API key set (ANTHROPIC_API_KEY env var)")
|
||||
|
||||
def process(self, message: str, sender: str, context: dict) -> str:
|
||||
if not self.api_key:
|
||||
return "Anthropic API key not configured. Set ANTHROPIC_API_KEY environment variable."
|
||||
try:
|
||||
import httpx
|
||||
resp = httpx.post(
|
||||
"https://api.anthropic.com/v1/messages",
|
||||
headers={
|
||||
"x-api-key": self.api_key,
|
||||
"anthropic-version": "2023-06-01",
|
||||
"content-type": "application/json",
|
||||
},
|
||||
json={
|
||||
"model": self.model,
|
||||
"max_tokens": 1000,
|
||||
"system": f"You are a technical advisor. Agent '{sender}' is asking you a question.",
|
||||
"messages": [{"role": "user", "content": message}],
|
||||
},
|
||||
timeout=60,
|
||||
)
|
||||
data = resp.json()
|
||||
return data["content"][0]["text"]
|
||||
except Exception as e:
|
||||
return f"Anthropic API error: {e}"
|
||||
|
||||
|
||||
class HTTPForwardProcessor(MessageProcessor):
|
||||
"""Forwards the message to an arbitrary HTTP endpoint."""
|
||||
|
||||
name = "http"
|
||||
|
||||
def __init__(self, url: str = "", headers: dict | None = None):
|
||||
self.url = url or os.environ.get("BRIDGE_FORWARD_URL", "")
|
||||
self.headers = headers or {}
|
||||
|
||||
def process(self, message: str, sender: str, context: dict) -> str:
|
||||
if not self.url:
|
||||
return "HTTP forward URL not configured"
|
||||
try:
|
||||
import httpx
|
||||
resp = httpx.post(
|
||||
self.url,
|
||||
json={"message": message, "sender": sender, **context},
|
||||
headers=self.headers,
|
||||
timeout=60,
|
||||
)
|
||||
return resp.text
|
||||
except Exception as e:
|
||||
return f"HTTP forward error: {e}"
|
||||
|
||||
|
||||
class EchoProcessor(MessageProcessor):
|
||||
"""Simple echo for testing — returns the message back."""
|
||||
|
||||
name = "echo"
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
pass # No config needed
|
||||
|
||||
def process(self, message: str, sender: str, context: dict) -> str:
|
||||
return f"Echo from bridge: {message}"
|
||||
|
||||
|
||||
# Registry of available processors
|
||||
PROCESSORS: dict[str, type[MessageProcessor]] = {
|
||||
"claude-code": ClaudeCodeProcessor,
|
||||
"openai": OpenAIProcessor,
|
||||
"anthropic": AnthropicProcessor,
|
||||
"http": HTTPForwardProcessor,
|
||||
"echo": EchoProcessor,
|
||||
}
|
||||
|
||||
|
||||
def create_processor(name: str, **kwargs) -> MessageProcessor:
|
||||
"""Create a processor by name with optional config."""
|
||||
cls = PROCESSORS.get(name)
|
||||
if not cls:
|
||||
raise ValueError(f"Unknown processor: {name}. Available: {list(PROCESSORS.keys())}")
|
||||
return cls(**kwargs)
|
||||
@ -1,99 +0,0 @@
|
||||
"""A2A HTTP server for the external workspace bridge."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
from .processor import MessageProcessor
|
||||
|
||||
logger = logging.getLogger("bridge.server")
|
||||
|
||||
|
||||
class A2AHandler(BaseHTTPRequestHandler):
|
||||
"""Handles incoming A2A JSON-RPC requests, delegates to a MessageProcessor."""
|
||||
|
||||
processor: MessageProcessor
|
||||
inbox_path: Path
|
||||
resolve_name: Callable[[str], str]
|
||||
|
||||
def log_message(self, format, *args):
|
||||
pass
|
||||
|
||||
def do_POST(self):
|
||||
body = self.rfile.read(int(self.headers.get("Content-Length", 0)))
|
||||
try:
|
||||
request = json.loads(body)
|
||||
except json.JSONDecodeError:
|
||||
self.send_error(400)
|
||||
return
|
||||
|
||||
method = request.get("method", "")
|
||||
req_id = request.get("id", str(uuid.uuid4()))
|
||||
|
||||
if method == "message/send":
|
||||
params = request.get("params", {})
|
||||
message = params.get("message", {})
|
||||
parts = message.get("parts", [])
|
||||
text = parts[0].get("text", "") if parts else ""
|
||||
sender_id = self.headers.get("X-Workspace-ID", "")
|
||||
sender_name = A2AHandler.resolve_name(sender_id) if sender_id else "canvas"
|
||||
|
||||
logger.info(f"📨 {sender_name}: {text[:80]}")
|
||||
|
||||
# Log to inbox
|
||||
entry = {
|
||||
"id": req_id, "sender_id": sender_id,
|
||||
"sender_name": sender_name, "text": text,
|
||||
}
|
||||
with open(A2AHandler.inbox_path, "a") as f:
|
||||
f.write(json.dumps(entry) + "\n")
|
||||
|
||||
# Process with the configured backend
|
||||
context = {"sender_id": sender_id, "sender_name": sender_name}
|
||||
response_text = A2AHandler.processor.process(text, sender_name, context)
|
||||
|
||||
self._send_a2a_response(req_id, response_text)
|
||||
|
||||
elif method == "agent/card":
|
||||
self._send_json(200, {
|
||||
"jsonrpc": "2.0", "id": req_id,
|
||||
"result": {
|
||||
"name": A2AHandler.processor.name,
|
||||
"description": f"External agent powered by {A2AHandler.processor.name}",
|
||||
"version": "1.0.0",
|
||||
},
|
||||
})
|
||||
else:
|
||||
self._send_json(200, {
|
||||
"jsonrpc": "2.0", "id": req_id,
|
||||
"error": {"code": -32601, "message": f"Unsupported: {method}"},
|
||||
})
|
||||
|
||||
def _send_a2a_response(self, req_id, text):
|
||||
self._send_json(200, {
|
||||
"jsonrpc": "2.0", "id": req_id,
|
||||
"result": {
|
||||
"kind": "message", "messageId": str(uuid.uuid4()),
|
||||
"role": "agent",
|
||||
"parts": [{"kind": "text", "text": text}],
|
||||
},
|
||||
})
|
||||
|
||||
def _send_json(self, status, data):
|
||||
body = json.dumps(data).encode()
|
||||
self.send_response(status)
|
||||
self.send_header("Content-Type", "application/json")
|
||||
self.send_header("Content-Length", str(len(body)))
|
||||
self.end_headers()
|
||||
self.wfile.write(body)
|
||||
|
||||
|
||||
def create_server(port: int, processor: MessageProcessor, inbox_path: Path, resolve_name) -> HTTPServer:
|
||||
"""Create an A2A HTTP server with the given processor."""
|
||||
A2AHandler.processor = processor
|
||||
A2AHandler.inbox_path = inbox_path
|
||||
A2AHandler.resolve_name = resolve_name
|
||||
return HTTPServer(("0.0.0.0", port), A2AHandler)
|
||||
@ -1,215 +0,0 @@
|
||||
"""Unit tests for the external workspace bridge."""
|
||||
|
||||
import json
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
import pytest
|
||||
|
||||
# Add scripts/ to path so bridge package is importable
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from bridge.processor import (
|
||||
MessageProcessor,
|
||||
ClaudeCodeProcessor,
|
||||
OpenAIProcessor,
|
||||
AnthropicProcessor,
|
||||
HTTPForwardProcessor,
|
||||
EchoProcessor,
|
||||
PROCESSORS,
|
||||
create_processor,
|
||||
)
|
||||
|
||||
|
||||
# ─── Processor registry ───
|
||||
|
||||
class TestProcessorRegistry:
|
||||
def test_all_processors_registered(self):
|
||||
expected = {"claude-code", "openai", "anthropic", "http", "echo"}
|
||||
assert set(PROCESSORS.keys()) == expected
|
||||
|
||||
def test_create_processor_valid(self):
|
||||
p = create_processor("echo")
|
||||
assert isinstance(p, EchoProcessor)
|
||||
|
||||
def test_create_processor_invalid(self):
|
||||
with pytest.raises(ValueError, match="Unknown processor"):
|
||||
create_processor("nonexistent")
|
||||
|
||||
def test_create_processor_with_kwargs(self):
|
||||
p = create_processor("openai", model="gpt-4o", api_key="test-key")
|
||||
assert p.model == "gpt-4o"
|
||||
assert p.api_key == "test-key"
|
||||
|
||||
|
||||
# ─── EchoProcessor ───
|
||||
|
||||
class TestEchoProcessor:
|
||||
def test_echoes_message(self):
|
||||
p = EchoProcessor()
|
||||
result = p.process("hello", "PM", {})
|
||||
assert result == "Echo from bridge: hello"
|
||||
|
||||
def test_name(self):
|
||||
assert EchoProcessor.name == "echo"
|
||||
|
||||
def test_empty_message(self):
|
||||
p = EchoProcessor()
|
||||
result = p.process("", "PM", {})
|
||||
assert result == "Echo from bridge: "
|
||||
|
||||
|
||||
# ─── OpenAIProcessor ───
|
||||
|
||||
class TestOpenAIProcessor:
|
||||
def test_missing_api_key_returns_error(self):
|
||||
p = OpenAIProcessor(api_key="")
|
||||
result = p.process("hello", "PM", {})
|
||||
assert "not configured" in result
|
||||
|
||||
def test_init_reads_env(self, monkeypatch):
|
||||
monkeypatch.setenv("OPENAI_API_KEY", "sk-test-123")
|
||||
p = OpenAIProcessor()
|
||||
assert p.api_key == "sk-test-123"
|
||||
|
||||
def test_init_explicit_key_overrides_env(self, monkeypatch):
|
||||
monkeypatch.setenv("OPENAI_API_KEY", "from-env")
|
||||
p = OpenAIProcessor(api_key="explicit")
|
||||
assert p.api_key == "explicit"
|
||||
|
||||
@patch("httpx.post")
|
||||
def test_successful_response(self, mock_post):
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.json.return_value = {
|
||||
"choices": [{"message": {"content": "Hello from GPT"}}]
|
||||
}
|
||||
mock_post.return_value = mock_resp
|
||||
|
||||
p = OpenAIProcessor(api_key="test-key")
|
||||
result = p.process("hi", "PM", {})
|
||||
assert result == "Hello from GPT"
|
||||
|
||||
@patch("httpx.post", side_effect=Exception("connection refused"))
|
||||
def test_api_error(self, mock_post):
|
||||
p = OpenAIProcessor(api_key="test-key")
|
||||
result = p.process("hi", "PM", {})
|
||||
assert "OpenAI API error" in result
|
||||
|
||||
|
||||
# ─── AnthropicProcessor ───
|
||||
|
||||
class TestAnthropicProcessor:
|
||||
def test_missing_api_key_returns_error(self):
|
||||
p = AnthropicProcessor(api_key="")
|
||||
result = p.process("hello", "PM", {})
|
||||
assert "not configured" in result
|
||||
|
||||
def test_init_reads_env(self, monkeypatch):
|
||||
monkeypatch.setenv("ANTHROPIC_API_KEY", "sk-ant-test")
|
||||
p = AnthropicProcessor()
|
||||
assert p.api_key == "sk-ant-test"
|
||||
|
||||
@patch("httpx.post")
|
||||
def test_successful_response(self, mock_post):
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.json.return_value = {
|
||||
"content": [{"text": "Hello from Claude"}]
|
||||
}
|
||||
mock_post.return_value = mock_resp
|
||||
|
||||
p = AnthropicProcessor(api_key="test-key")
|
||||
result = p.process("hi", "PM", {})
|
||||
assert result == "Hello from Claude"
|
||||
|
||||
|
||||
# ─── HTTPForwardProcessor ───
|
||||
|
||||
class TestHTTPForwardProcessor:
|
||||
def test_no_url_returns_error(self):
|
||||
p = HTTPForwardProcessor(url="")
|
||||
result = p.process("hi", "PM", {})
|
||||
assert "not configured" in result
|
||||
|
||||
@patch("httpx.post")
|
||||
def test_forwards_message(self, mock_post):
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.text = "forwarded response"
|
||||
mock_post.return_value = mock_resp
|
||||
|
||||
p = HTTPForwardProcessor(url="http://my-agent:8000")
|
||||
result = p.process("hello", "PM", {"sender_id": "ws-1"})
|
||||
assert result == "forwarded response"
|
||||
mock_post.assert_called_once()
|
||||
call_kwargs = mock_post.call_args
|
||||
assert call_kwargs[1]["json"]["message"] == "hello"
|
||||
assert call_kwargs[1]["json"]["sender"] == "PM"
|
||||
|
||||
|
||||
# ─── ClaudeCodeProcessor ───
|
||||
|
||||
class TestClaudeCodeProcessor:
|
||||
def test_name(self):
|
||||
assert ClaudeCodeProcessor.name == "claude-code"
|
||||
|
||||
@patch("subprocess.run")
|
||||
def test_successful_response(self, mock_run):
|
||||
mock_run.return_value = MagicMock(
|
||||
returncode=0, stdout="The answer is 42", stderr=""
|
||||
)
|
||||
p = ClaudeCodeProcessor(cwd="/tmp")
|
||||
result = p.process("what is the answer?", "PM", {})
|
||||
assert result == "The answer is 42"
|
||||
|
||||
@patch("subprocess.run")
|
||||
def test_json_output_parsed(self, mock_run):
|
||||
mock_run.return_value = MagicMock(
|
||||
returncode=0,
|
||||
stdout=json.dumps({"result": "parsed answer", "session_id": "abc"}),
|
||||
stderr="",
|
||||
)
|
||||
p = ClaudeCodeProcessor(cwd="/tmp")
|
||||
result = p.process("test", "PM", {})
|
||||
assert result == "parsed answer"
|
||||
|
||||
@patch("subprocess.run")
|
||||
def test_error_exit_code(self, mock_run):
|
||||
mock_run.return_value = MagicMock(
|
||||
returncode=1, stdout="", stderr="auth error"
|
||||
)
|
||||
p = ClaudeCodeProcessor(cwd="/tmp")
|
||||
result = p.process("test", "PM", {})
|
||||
assert "auth error" in result
|
||||
|
||||
@patch("subprocess.run", side_effect=FileNotFoundError)
|
||||
def test_claude_not_installed(self, mock_run):
|
||||
p = ClaudeCodeProcessor(cwd="/tmp")
|
||||
result = p.process("test", "PM", {})
|
||||
assert "not found" in result.lower()
|
||||
|
||||
def test_model_flag(self):
|
||||
p = ClaudeCodeProcessor(model="opus")
|
||||
assert p.model == "opus"
|
||||
|
||||
|
||||
# ─── MessageProcessor interface ───
|
||||
|
||||
class TestMessageProcessorInterface:
|
||||
def test_cannot_instantiate_abstract(self):
|
||||
with pytest.raises(TypeError):
|
||||
MessageProcessor()
|
||||
|
||||
def test_subclass_must_implement_process(self):
|
||||
class Incomplete(MessageProcessor):
|
||||
name = "incomplete"
|
||||
with pytest.raises(TypeError):
|
||||
Incomplete()
|
||||
|
||||
def test_valid_subclass(self):
|
||||
class Valid(MessageProcessor):
|
||||
name = "valid"
|
||||
def process(self, message, sender, context):
|
||||
return "ok"
|
||||
v = Valid()
|
||||
assert v.process("x", "y", {}) == "ok"
|
||||
@ -1,138 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""External Workspace Bridge — plug any AI agent into Molecule AI via A2A.
|
||||
|
||||
Registers as an external workspace (no Docker container) and processes
|
||||
incoming A2A messages using a configurable backend processor.
|
||||
|
||||
Usage:
|
||||
# Claude Code backend (default)
|
||||
python3 scripts/claude-code-bridge.py
|
||||
|
||||
# OpenAI API backend
|
||||
python3 scripts/claude-code-bridge.py --processor openai --model gpt-4.1-mini
|
||||
|
||||
# Anthropic API backend
|
||||
python3 scripts/claude-code-bridge.py --processor anthropic --model claude-sonnet-4-6
|
||||
|
||||
# Forward to any HTTP endpoint
|
||||
python3 scripts/claude-code-bridge.py --processor http --url http://my-agent:8000/chat
|
||||
|
||||
# Echo (testing)
|
||||
python3 scripts/claude-code-bridge.py --processor echo
|
||||
|
||||
# Management
|
||||
python3 scripts/claude-code-bridge.py --inbox # Show messages
|
||||
python3 scripts/claude-code-bridge.py --clear # Clear inbox
|
||||
python3 scripts/claude-code-bridge.py --stop # Stop bridge
|
||||
|
||||
Environment variables:
|
||||
PLATFORM_URL Platform API (default: http://localhost:8080)
|
||||
BRIDGE_PORT Listen port (default: 9999)
|
||||
BRIDGE_NAME Workspace name (default: Claude Code Advisor)
|
||||
BRIDGE_PARENT_ID Parent workspace ID for hierarchy
|
||||
OPENAI_API_KEY For --processor openai
|
||||
ANTHROPIC_API_KEY For --processor anthropic
|
||||
BRIDGE_FORWARD_URL For --processor http
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s [bridge] %(message)s")
|
||||
logger = logging.getLogger("bridge")
|
||||
|
||||
BRIDGE_DIR = Path(__file__).parent.parent / ".claude-bridge"
|
||||
INBOX = BRIDGE_DIR / "inbox.jsonl"
|
||||
PID_FILE = BRIDGE_DIR / "bridge.pid"
|
||||
|
||||
BRIDGE_DIR.mkdir(exist_ok=True)
|
||||
|
||||
|
||||
def show_inbox():
|
||||
if not INBOX.exists():
|
||||
print("No messages.")
|
||||
return
|
||||
unread = 0
|
||||
for line in INBOX.read_text().splitlines():
|
||||
try:
|
||||
e = json.loads(line)
|
||||
unread += 1
|
||||
print(f" [{e.get('sender_name','?')}] {e.get('text','')[:120]}")
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
print(f"\n{unread} message(s)" if unread else "No messages.")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="External Workspace Bridge")
|
||||
parser.add_argument("--processor", default="claude-code",
|
||||
help="Backend processor: claude-code, openai, anthropic, http, echo")
|
||||
parser.add_argument("--model", default="", help="Model name for the processor")
|
||||
parser.add_argument("--url", default="", help="URL for http processor")
|
||||
parser.add_argument("--name", default=os.environ.get("BRIDGE_NAME", "Claude Code Advisor"))
|
||||
parser.add_argument("--role", default="CEO technical advisor — code review, architecture, debugging")
|
||||
parser.add_argument("--port", type=int, default=int(os.environ.get("BRIDGE_PORT", "9999")))
|
||||
parser.add_argument("--parent-id", default=os.environ.get("BRIDGE_PARENT_ID", ""))
|
||||
parser.add_argument("--inbox", action="store_true", help="Show inbox")
|
||||
parser.add_argument("--clear", action="store_true", help="Clear inbox")
|
||||
parser.add_argument("--stop", action="store_true", help="Stop bridge")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.inbox:
|
||||
show_inbox()
|
||||
return
|
||||
if args.clear:
|
||||
INBOX.unlink(missing_ok=True)
|
||||
print("Inbox cleared")
|
||||
return
|
||||
if args.stop:
|
||||
if PID_FILE.exists():
|
||||
try:
|
||||
os.kill(int(PID_FILE.read_text().strip()), signal.SIGTERM)
|
||||
print("Bridge stopped")
|
||||
except ProcessLookupError:
|
||||
print("Bridge was not running")
|
||||
PID_FILE.unlink(missing_ok=True)
|
||||
return
|
||||
|
||||
# Import here to keep --inbox/--stop fast
|
||||
from bridge.processor import create_processor
|
||||
from bridge.platform import PlatformClient
|
||||
from bridge.server import create_server
|
||||
|
||||
PID_FILE.write_text(str(os.getpid()))
|
||||
|
||||
# Create processor
|
||||
kwargs = {}
|
||||
if args.model:
|
||||
kwargs["model"] = args.model
|
||||
if args.url:
|
||||
kwargs["url"] = args.url
|
||||
processor = create_processor(args.processor, **kwargs)
|
||||
logger.info(f"Processor: {args.processor} ({type(processor).__name__})")
|
||||
|
||||
# Register with platform
|
||||
platform_url = os.environ.get("PLATFORM_URL", "http://localhost:8080")
|
||||
client = PlatformClient(platform_url, args.port, BRIDGE_DIR)
|
||||
ws_id = client.register(args.name, args.role, parent_id=args.parent_id)
|
||||
client.start_heartbeat()
|
||||
|
||||
# Start A2A server
|
||||
server = create_server(args.port, processor, INBOX, client.resolve_name)
|
||||
logger.info(f"Listening on :{args.port} | Workspace: {ws_id}")
|
||||
logger.info(f"Agents see '{args.name}' in list_peers → delegate_task to interact")
|
||||
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
server.shutdown()
|
||||
PID_FILE.unlink(missing_ok=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -1,95 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Deduplicate hook entries in .claude/settings.json across all workspace containers.
|
||||
|
||||
Root cause: molecule_runtime's _deep_merge_hooks() uses unconditional list.extend()
|
||||
when merging plugin settings-fragment.json files. On every plugin install/reinstall
|
||||
each hook handler is appended again, producing 3-4x duplicates that cause every
|
||||
hook to fire 3-4x per event.
|
||||
|
||||
This script fixes the live settings.json in every running workspace container via
|
||||
the shared /proc/<PID>/root filesystem (no docker CLI required), then validates the
|
||||
output is clean JSON. Safe to re-run — idempotent (already-clean files are skipped).
|
||||
|
||||
Upstream fix needed: molecule_runtime.plugins_registry.builtins._deep_merge_hooks()
|
||||
should deduplicate by (matcher, frozenset(commands)) before writing. Tracked in
|
||||
molecule-core issue (filed separately).
|
||||
|
||||
Usage:
|
||||
python3 scripts/dedup_settings_hooks.py [--dry-run]
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import glob
|
||||
import json
|
||||
import sys
|
||||
|
||||
DRY_RUN = "--dry-run" in sys.argv
|
||||
|
||||
|
||||
def dedup_settings(data: dict) -> tuple[dict, dict[str, tuple[int, int]]]:
|
||||
"""Return (deduped_data, stats) where stats[event] = (before_count, after_count)."""
|
||||
if "hooks" not in data:
|
||||
return data, {}
|
||||
new_hooks: dict = {}
|
||||
stats: dict[str, tuple[int, int]] = {}
|
||||
for event, handlers in data["hooks"].items():
|
||||
seen: set = set()
|
||||
deduped: list = []
|
||||
for handler in handlers:
|
||||
matcher = handler.get("matcher", "")
|
||||
commands = frozenset(h.get("command", "") for h in handler.get("hooks", []))
|
||||
key = (matcher, commands)
|
||||
if key not in seen:
|
||||
seen.add(key)
|
||||
deduped.append(handler)
|
||||
stats[event] = (len(handlers), len(deduped))
|
||||
new_hooks[event] = deduped
|
||||
return {**data, "hooks": new_hooks}, stats
|
||||
|
||||
|
||||
def main() -> None:
|
||||
pattern = "/proc/*/root/configs/.claude/settings.json"
|
||||
paths = sorted(glob.glob(pattern))
|
||||
|
||||
fixed: list[tuple[str, dict]] = []
|
||||
already_clean: list[str] = []
|
||||
errors: list[tuple[str, str]] = []
|
||||
|
||||
for path in paths:
|
||||
try:
|
||||
with open(path) as f:
|
||||
data = json.load(f)
|
||||
deduped, stats = dedup_settings(data)
|
||||
changed = any(before != after for before, after in stats.values())
|
||||
if changed:
|
||||
if not DRY_RUN:
|
||||
with open(path, "w") as f:
|
||||
json.dump(deduped, f, indent=2)
|
||||
f.write("\n")
|
||||
fixed.append((path, stats))
|
||||
else:
|
||||
already_clean.append(path)
|
||||
except PermissionError as e:
|
||||
errors.append((path, f"PermissionError: {e}"))
|
||||
except json.JSONDecodeError as e:
|
||||
errors.append((path, f"JSONDecodeError: {e}"))
|
||||
except Exception as e:
|
||||
errors.append((path, str(e)))
|
||||
|
||||
mode = "[DRY RUN] " if DRY_RUN else ""
|
||||
print(f"{mode}Fixed: {len(fixed)}")
|
||||
for path, stats in fixed:
|
||||
pid = path.split("/")[2]
|
||||
summary = ", ".join(f"{ev}: {b}→{a}" for ev, (b, a) in stats.items() if b != a)
|
||||
print(f" PID {pid}: {summary}")
|
||||
print(f"{mode}Already clean: {len(already_clean)}")
|
||||
if errors:
|
||||
print(f"Errors: {len(errors)}")
|
||||
for path, err in errors:
|
||||
print(f" {path}: {err}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -1,67 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Verify settings.json hook deduplication across all workspace containers.
|
||||
|
||||
Exits 0 if all containers have clean (no-duplicate) hook lists.
|
||||
Exits 1 if any container still has duplicate hook entries.
|
||||
|
||||
Usage:
|
||||
python3 scripts/verify_settings_hooks.py
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import glob
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
def has_duplicates(data: dict) -> tuple[bool, dict[str, tuple[int, int]]]:
|
||||
stats: dict[str, tuple[int, int]] = {}
|
||||
duplicate_found = False
|
||||
for event, handlers in data.get("hooks", {}).items():
|
||||
seen: set = set()
|
||||
for handler in handlers:
|
||||
matcher = handler.get("matcher", "")
|
||||
commands = frozenset(h.get("command", "") for h in handler.get("hooks", []))
|
||||
key = (matcher, commands)
|
||||
if key in seen:
|
||||
duplicate_found = True
|
||||
seen.add(key)
|
||||
stats[event] = (len(handlers), len(seen))
|
||||
return duplicate_found, stats
|
||||
|
||||
|
||||
def main() -> None:
|
||||
pattern = "/proc/*/root/configs/.claude/settings.json"
|
||||
paths = sorted(glob.glob(pattern))
|
||||
|
||||
dirty: list[tuple[str, dict]] = []
|
||||
clean = 0
|
||||
errors: list[tuple[str, str]] = []
|
||||
|
||||
for path in paths:
|
||||
try:
|
||||
with open(path) as f:
|
||||
data = json.load(f)
|
||||
dup, stats = has_duplicates(data)
|
||||
if dup:
|
||||
dirty.append((path, stats))
|
||||
else:
|
||||
clean += 1
|
||||
except Exception as e:
|
||||
errors.append((path, str(e)))
|
||||
|
||||
print(f"Clean: {clean} Dirty: {len(dirty)} Errors: {len(errors)}")
|
||||
for path, stats in dirty:
|
||||
pid = path.split("/")[2]
|
||||
summary = ", ".join(f"{ev}: {total} total/{unique} unique" for ev, (total, unique) in stats.items())
|
||||
print(f" DIRTY PID {pid}: {summary}")
|
||||
for path, err in errors:
|
||||
print(f" ERROR {path}: {err}", file=sys.stderr)
|
||||
|
||||
if dirty or errors:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Loading…
Reference in New Issue
Block a user