"""
OpenCode Agent Runtime
======================
Wraps OpenCode CLI as the AI brain for Aetheel, inspired by OpenClaw's cli-runner.ts.
Two modes of operation:
1. **SDK Mode** (preferred) — Connects to a running `opencode serve` instance
via the official Python SDK (`opencode-ai`). Persistent sessions, low latency.
2. **CLI Mode** (fallback) — Spawns `opencode run` as a subprocess for each
request. No persistent server needed, but higher per-request latency.
Architecture (modeled after OpenClaw):
- OpenClaw's `cli-runner.ts` runs CLI agents as subprocesses with configurable
backends (claude-cli, codex-cli, etc.) via `runCommandWithTimeout()`.
- OpenClaw's `cli-backends.ts` defines backend configs with command, args,
output format, model aliases, session handling, etc.
- We replicate this pattern for OpenCode, but leverage OpenCode's `serve` mode
and its Python SDK for a cleaner integration.
Session Management:
- Each Slack thread maps to an OpenCode session (via `conversation_id`).
- Sessions are created on first message and reused for follow-ups.
- This mirrors OpenClaw's session isolation strategy.
Usage:
from agent.opencode_runtime import OpenCodeRuntime
runtime = OpenCodeRuntime(mode="sdk")
response = runtime.chat("What is Python?", session_id="slack-thread-123")
"""
import json
import logging
import os
import shutil
import subprocess
import threading
import time
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
from typing import Any, Callable
logger = logging.getLogger("aetheel.agent")
def _resolve_opencode_command(explicit: str | None = None) -> str:
"""
Resolve the opencode binary path.
Python subprocesses don't source ~/.zshrc or ~/.bashrc, so paths like
~/.opencode/bin won't be in PATH. This function checks common install
locations to find the binary automatically.
Priority:
1. Explicit path (from OPENCODE_COMMAND env var)
2. shutil.which (already in system PATH)
3. ~/.opencode/bin/opencode (official installer default)
4. ~/.local/bin/opencode (common Linux/macOS location)
5. npm global installs (npx-style locations)
"""
cmd = explicit or "opencode"
# If explicit path is absolute and exists, use it directly
if os.path.isabs(cmd) and os.path.isfile(cmd):
return cmd
# Try system PATH first
found = shutil.which(cmd)
if found:
return found
# Check common install locations
home = Path.home()
candidates = [
home / ".opencode" / "bin" / "opencode", # official installer
home / ".local" / "bin" / "opencode", # common Linux/macOS
Path("/usr/local/bin/opencode"), # Homebrew / manual
Path("/opt/homebrew/bin/opencode"), # Homebrew (Apple Silicon)
]
for candidate in candidates:
if candidate.is_file() and os.access(candidate, os.X_OK):
logger.info(f"Auto-discovered opencode at: {candidate}")
return str(candidate)
# Return the original command (will fail at runtime with a clear error)
return cmd
# ---------------------------------------------------------------------------
# Configuration
# ---------------------------------------------------------------------------
class RuntimeMode(Enum):
"""How the runtime connects to OpenCode."""
SDK = "sdk" # via opencode serve + Python SDK
CLI = "cli" # via opencode run subprocess
@dataclass
class OpenCodeConfig:
"""
Configuration for the OpenCode runtime.
Modeled after OpenClaw's CliBackendConfig in cli-backends.ts.
"""
# Connection
mode: RuntimeMode = RuntimeMode.CLI
server_url: str = "http://localhost:4096"
server_password: str | None = None
server_username: str = "opencode"
# CLI settings (for CLI mode, mirroring OpenClaw's DEFAULT_CLAUDE_BACKEND)
command: str = "opencode"
timeout_seconds: int = 120
# Model
model: str | None = None # e.g., "anthropic/claude-sonnet-4-20250514"
provider: str | None = None # e.g., "anthropic"
# Agent behavior
system_prompt: str | None = None
workspace_dir: str | None = None
format: str = "json" # output format: "default" (formatted) or "json" (raw events)
# Session
auto_create_sessions: bool = True
session_ttl_hours: int = 24
@classmethod
def from_env(cls) -> "OpenCodeConfig":
"""Create config from environment variables."""
mode_str = os.environ.get("OPENCODE_MODE", "cli").lower()
mode = RuntimeMode.SDK if mode_str == "sdk" else RuntimeMode.CLI
return cls(
mode=mode,
server_url=os.environ.get(
"OPENCODE_SERVER_URL", "http://localhost:4096"
),
server_password=os.environ.get("OPENCODE_SERVER_PASSWORD"),
server_username=os.environ.get("OPENCODE_SERVER_USERNAME", "opencode"),
command=_resolve_opencode_command(
os.environ.get("OPENCODE_COMMAND")
),
timeout_seconds=int(os.environ.get("OPENCODE_TIMEOUT", "120")),
model=os.environ.get("OPENCODE_MODEL"),
provider=os.environ.get("OPENCODE_PROVIDER"),
system_prompt=os.environ.get("OPENCODE_SYSTEM_PROMPT"),
workspace_dir=os.environ.get(
"OPENCODE_WORKSPACE",
os.environ.get("AETHEEL_WORKSPACE"),
),
format=os.environ.get("OPENCODE_FORMAT", "json"),
)
# ---------------------------------------------------------------------------
# Agent Response
# ---------------------------------------------------------------------------
@dataclass
class AgentResponse:
"""Response from the agent runtime."""
text: str
session_id: str | None = None
model: str | None = None
provider: str | None = None
duration_ms: int = 0
usage: dict | None = None
error: str | None = None
@property
def ok(self) -> bool:
return self.error is None and bool(self.text.strip())
# ---------------------------------------------------------------------------
# Session Store
# ---------------------------------------------------------------------------
class SessionStore:
"""
Maps external IDs (e.g., Slack thread_ts) to OpenCode session IDs.
Mirrors OpenClaw's session isolation: each channel thread gets its own session.
"""
def __init__(self):
self._sessions: dict[str, dict] = {}
self._lock = threading.Lock()
def get(self, external_id: str) -> str | None:
"""Get the OpenCode session ID for an external conversation ID."""
with self._lock:
entry = self._sessions.get(external_id)
if entry:
entry["last_used"] = time.time()
return entry["session_id"]
return None
def set(self, external_id: str, session_id: str) -> None:
"""Map an external ID to an OpenCode session ID."""
with self._lock:
self._sessions[external_id] = {
"session_id": session_id,
"created": time.time(),
"last_used": time.time(),
}
def remove(self, external_id: str) -> None:
"""Remove -a session mapping."""
with self._lock:
self._sessions.pop(external_id, None)
def cleanup(self, ttl_hours: int = 24) -> int:
"""Remove stale sessions older than ttl_hours. Returns count removed."""
cutoff = time.time() - (ttl_hours * 3600)
removed = 0
with self._lock:
stale = [
k
for k, v in self._sessions.items()
if v["last_used"] < cutoff
]
for k in stale:
del self._sessions[k]
removed += 1
return removed
@property
def count(self) -> int:
with self._lock:
return len(self._sessions)
# ---------------------------------------------------------------------------
# OpenCode Runtime
# ---------------------------------------------------------------------------
class OpenCodeRuntime:
"""
OpenCode Agent Runtime — the AI brain for Aetheel.
Inspired by OpenClaw's `runCliAgent()` in cli-runner.ts:
- Resolves the CLI backend config
- Builds CLI args (model, session, system prompt, etc.)
- Runs the command with a timeout
- Parses the JSON or text output
- Returns structured results
We adapt this for OpenCode's two modes:
- SDK mode: uses the opencode-ai Python SDK to talk to `opencode serve`
- CLI mode: spawns `opencode run` subprocess (like OpenClaw's approach)
"""
def __init__(self, config: OpenCodeConfig | None = None):
self._config = config or OpenCodeConfig.from_env()
self._sessions = SessionStore()
self._sdk_client = None
self._sdk_available = False
# Validate OpenCode is available
self._validate_installation()
# Try to initialize SDK client if in SDK mode
if self._config.mode == RuntimeMode.SDK:
self._init_sdk_client()
logger.info(
f"OpenCode runtime initialized "
f"(mode={self._config.mode.value}, "
f"model={self._config.model or 'default'})"
)
# -------------------------------------------------------------------
# Public API
# -------------------------------------------------------------------
def chat(
self,
message: str,
conversation_id: str | None = None,
system_prompt: str | None = None,
) -> AgentResponse:
"""
Send a message to the AI agent and get a response.
This is the main entry point, used by the Slack adapter's message handler.
Args:
message: The user's message text
conversation_id: External conversation ID (e.g., Slack thread_ts)
for session isolation
system_prompt: Optional per-request system prompt override
Returns:
AgentResponse with the AI's reply
"""
started = time.time()
if not message.strip():
return AgentResponse(
text="", error="Empty message", duration_ms=0
)
try:
# Route to the appropriate mode
if self._config.mode == RuntimeMode.SDK and self._sdk_available:
result = self._chat_sdk(message, conversation_id, system_prompt)
else:
result = self._chat_cli(message, conversation_id, system_prompt)
result.duration_ms = int((time.time() - started) * 1000)
return result
except Exception as e:
duration_ms = int((time.time() - started) * 1000)
logger.error(f"Agent error: {e}", exc_info=True)
return AgentResponse(
text="",
error=str(e),
duration_ms=duration_ms,
)
def get_status(self) -> dict:
"""Get the runtime status (for the /status command)."""
status = {
"mode": self._config.mode.value,
"model": self._config.model or "default",
"provider": self._config.provider or "auto",
"active_sessions": self._sessions.count,
"opencode_available": self._is_opencode_available(),
}
if self._config.mode == RuntimeMode.SDK:
status["server_url"] = self._config.server_url
status["sdk_connected"] = self._sdk_available
return status
def cleanup_sessions(self) -> int:
"""Clean up stale sessions. Returns count removed."""
return self._sessions.cleanup(self._config.session_ttl_hours)
# -------------------------------------------------------------------
# CLI Mode: Subprocess execution
# (mirrors OpenClaw's runCliAgent → runCommandWithTimeout pattern)
# -------------------------------------------------------------------
def _chat_cli(
self,
message: str,
conversation_id: str | None = None,
system_prompt: str | None = None,
) -> AgentResponse:
"""
Run OpenCode in CLI mode via `opencode run`.
This mirrors OpenClaw's cli-runner.ts:
1. Build the CLI args (like buildCliArgs)
2. Run the command with a timeout
3. Parse the output (like parseCliJson)
4. Return structured results
"""
# Build CLI args — modeled after OpenClaw's buildCliArgs()
args = self._build_cli_args(message, conversation_id, system_prompt)
logger.info(
f"CLI exec: {self._config.command} run "
f"(prompt_chars={len(message)}, "
f"session={conversation_id or 'new'})"
)
try:
# Run the command — mirrors OpenClaw's runCommandWithTimeout()
result = subprocess.run(
args,
capture_output=True,
text=True,
timeout=self._config.timeout_seconds,
cwd=self._config.workspace_dir or os.getcwd(),
env=self._build_cli_env(),
)
stdout = result.stdout.strip()
stderr = result.stderr.strip()
if result.returncode != 0:
# Mirror OpenClaw's error classification
error_text = stderr or stdout or "CLI command failed"
logger.error(
f"CLI failed (code={result.returncode}): {error_text[:200]}"
)
return AgentResponse(
text="",
error=f"OpenCode CLI error: {error_text[:500]}",
)
# Parse the output — mirrors OpenClaw's parseCliJson/parseCliJsonl
response_text = self._parse_cli_output(stdout)
if not response_text:
response_text = stdout # fallback to raw output
# Extract session ID if returned
session_id = self._extract_session_id(stdout)
if session_id and conversation_id:
self._sessions.set(conversation_id, session_id)
return AgentResponse(
text=response_text,
session_id=session_id,
model=self._config.model,
)
except subprocess.TimeoutExpired:
logger.error(
f"CLI timeout after {self._config.timeout_seconds}s"
)
return AgentResponse(
text="",
error=f"Request timed out after {self._config.timeout_seconds}s",
)
def _build_cli_args(
self,
message: str,
conversation_id: str | None = None,
system_prompt: str | None = None,
) -> list[str]:
"""
Build CLI arguments for `opencode run`.
Modeled after OpenClaw's buildCliArgs() in cli-runner/helpers.ts:
- base args (command + run)
- model arg (--model)
- session arg (--session / --continue)
- system prompt (prepended to message as XML block)
- format arg (--format)
- the prompt itself
"""
args = [self._config.command, "run"]
# Model selection
if self._config.model:
args.extend(["--model", self._config.model])
# Session continuity — like OpenClaw's sessionArg
existing_session = None
if conversation_id:
existing_session = self._sessions.get(conversation_id)
if existing_session:
# Continue an existing session
args.extend(["--continue", "--session", existing_session])
# For new conversations, OpenCode creates a new session automatically
# Output format — use JSON for structured parsing, default for plain text
# Valid choices: "default" (formatted), "json" (raw JSON events)
if self._config.format and self._config.format in ("default", "json"):
args.extend(["--format", self._config.format])
# Build the full prompt — prepend system prompt if provided
# opencode run doesn't have a --system-prompt flag, so we inject it
# as an XML-tagged block before the user message
if system_prompt:
full_message = (
f"\n{system_prompt}\n\n\n"
f"\n{message}\n"
)
else:
full_message = message
# The prompt message (must come last as a positional arg)
args.append(full_message)
return args
def _build_cli_env(self) -> dict[str, str]:
"""
Build environment variables for the CLI subprocess.
Note: OpenCode reads OPENCODE_* env vars as config overrides and
tries to parse their values as JSON. We must NOT set arbitrary
OPENCODE_* vars here — only pass through the parent environment.
"""
env = os.environ.copy()
return env
def _parse_cli_output(self, stdout: str) -> str:
"""
Parse CLI output to extract the response text.
OpenCode's `--format json` emits JSONL (one JSON object per line):
{"type":"step_start", "sessionID":"ses_...", "part":{...}}
{"type":"text", "sessionID":"ses_...", "part":{"type":"text","text":"Hello!"}}
{"type":"step_finish","sessionID":"ses_...", "part":{"type":"step-finish",...}}
We extract text from events where type == "text" and part.text exists.
"""
if not stdout.strip():
return ""
# Parse JSONL lines — collect text from "text" type events
lines = stdout.strip().split("\n")
texts = []
for line in lines:
line = line.strip()
if not line:
continue
try:
event = json.loads(line)
if not isinstance(event, dict):
continue
# OpenCode event format: extract text from part.text
event_type = event.get("type", "")
part = event.get("part", {})
if event_type == "text" and isinstance(part, dict):
text = part.get("text", "")
if text:
texts.append(text)
continue
# Fallback: try generic text extraction (for non-OpenCode formats)
text = self._collect_text(event)
if text:
texts.append(text)
except json.JSONDecodeError:
# Not JSON — might be plain text output (--format default)
texts.append(line)
if texts:
return "\n".join(texts)
# Final fallback to raw text
return stdout.strip()
def _collect_text(self, value: Any) -> str:
"""
Recursively collect text from a parsed JSON object.
Adapted from OpenClaw's collectText() from helpers.ts,
with awareness of OpenCode's event structure.
"""
if not value:
return ""
if isinstance(value, str):
return value
if isinstance(value, list):
return "".join(self._collect_text(item) for item in value)
if isinstance(value, dict):
# Skip OpenCode event wrapper — dig into "part" first
if "part" in value and isinstance(value["part"], dict):
part = value["part"]
if "text" in part and isinstance(part["text"], str):
return part["text"]
# Try common text fields
if "content" in value and isinstance(value["content"], str):
return value["content"]
if "content" in value and isinstance(value["content"], list):
return "".join(
self._collect_text(item) for item in value["content"]
)
if "message" in value and isinstance(value["message"], dict):
return self._collect_text(value["message"])
if "result" in value:
return self._collect_text(value["result"])
return ""
def _extract_session_id(self, stdout: str) -> str | None:
"""
Extract session ID from CLI output.
OpenCode includes sessionID in every JSONL event line:
{"type":"text", "sessionID":"ses_abc123", ...}
We grab it from the first event that has one.
"""
lines = stdout.strip().split("\n")
for line in lines:
line = line.strip()
if not line:
continue
try:
event = json.loads(line)
if not isinstance(event, dict):
continue
# OpenCode format: top-level sessionID
session_id = event.get("sessionID")
if isinstance(session_id, str) and session_id.strip():
return session_id.strip()
# Fallback: check nested part.sessionID
part = event.get("part", {})
if isinstance(part, dict):
session_id = part.get("sessionID")
if isinstance(session_id, str) and session_id.strip():
return session_id.strip()
except json.JSONDecodeError:
continue
return None
# -------------------------------------------------------------------
# SDK Mode: OpenCode serve API
# (enhanced version of CLI mode, using the official Python SDK)
# -------------------------------------------------------------------
def _init_sdk_client(self) -> None:
"""Initialize the OpenCode Python SDK client."""
try:
from opencode_ai import Opencode
kwargs: dict[str, Any] = {
"base_url": self._config.server_url,
}
if self._config.server_password:
import httpx
kwargs["http_client"] = httpx.Client(
auth=(
self._config.server_username,
self._config.server_password,
)
)
self._sdk_client = Opencode(**kwargs)
# Test connectivity
try:
self._sdk_client.app.get()
self._sdk_available = True
logger.info(
f"SDK connected to {self._config.server_url}"
)
except Exception as e:
logger.warning(
f"SDK connection test failed: {e}. "
f"Will fall back to CLI mode."
)
self._sdk_available = False
except ImportError:
logger.warning(
"opencode-ai SDK not installed. "
"Install with: pip install opencode-ai. "
"Falling back to CLI mode."
)
self._sdk_available = False
def _chat_sdk(
self,
message: str,
conversation_id: str | None = None,
system_prompt: str | None = None,
) -> AgentResponse:
"""
Chat using the OpenCode Python SDK.
Uses the server API:
1. Create or reuse a session (POST /session)
2. Send a message (POST /session/:id/message → client.session.chat)
3. Parse the AssistantMessage response
"""
if not self._sdk_client:
return self._chat_cli(message, conversation_id, system_prompt)
try:
# Resolve or create session
session_id = None
if conversation_id:
session_id = self._sessions.get(conversation_id)
if not session_id:
# Create a new session
session = self._sdk_client.session.create()
session_id = session.id
if conversation_id:
self._sessions.set(conversation_id, session_id)
logger.info(f"SDK: created session {session_id}")
# Build message parts
parts = [{"type": "text", "text": message}]
# Build chat params
chat_kwargs: dict[str, Any] = {"parts": parts}
if self._config.model:
chat_kwargs["model"] = self._config.model
if system_prompt:
chat_kwargs["system"] = system_prompt
# Send message and get response
logger.info(
f"SDK chat: session={session_id[:8]}... "
f"prompt_chars={len(message)}"
)
response = self._sdk_client.session.chat(
session_id, **chat_kwargs
)
# Extract text from the AssistantMessage response
response_text = self._extract_sdk_response_text(response)
return AgentResponse(
text=response_text,
session_id=session_id,
model=self._config.model,
)
except Exception as e:
logger.warning(
f"SDK chat failed: {e}. Falling back to CLI mode."
)
# Graceful fallback to CLI mode
return self._chat_cli(message, conversation_id, system_prompt)
def _extract_sdk_response_text(self, response: Any) -> str:
"""Extract text content from the SDK's AssistantMessage response."""
# The response is an AssistantMessage which has parts
if hasattr(response, "parts"):
texts = []
for part in response.parts:
if hasattr(part, "text"):
texts.append(part.text)
elif hasattr(part, "content"):
texts.append(str(part.content))
return "\n".join(texts).strip()
# Fallback: try to get text directly
if hasattr(response, "text"):
return response.text.strip()
# Last resort: stringify
return str(response).strip()
# -------------------------------------------------------------------
# Validation & Utilities
# -------------------------------------------------------------------
def _validate_installation(self) -> None:
"""Check that OpenCode CLI is installed and accessible."""
cmd = self._config.command
# If the resolved command doesn't exist, try resolving again
if not os.path.isfile(cmd) and not shutil.which(cmd):
resolved = _resolve_opencode_command()
if resolved != "opencode" and os.path.isfile(resolved):
self._config.command = resolved
logger.info(f"Resolved opencode binary: {resolved}")
else:
logger.warning(
f"'{cmd}' not found. "
f"Install with: curl -fsSL https://opencode.ai/install | bash "
f"or: npm install -g opencode-ai"
)
if self._config.mode == RuntimeMode.CLI:
logger.warning(
"CLI mode requires opencode to be installed. "
"If using SDK mode, set OPENCODE_MODE=sdk."
)
def _is_opencode_available(self) -> bool:
"""Check if OpenCode CLI is available."""
try:
result = subprocess.run(
[self._config.command, "--version"],
capture_output=True,
text=True,
timeout=5,
)
return result.returncode == 0
except (subprocess.TimeoutExpired, FileNotFoundError, OSError):
return False
# ---------------------------------------------------------------------------
# System Prompt Builder
# (Mirrors OpenClaw's buildSystemPrompt in cli-runner/helpers.ts)
# ---------------------------------------------------------------------------
def build_aetheel_system_prompt(
user_name: str | None = None,
channel_name: str | None = None,
is_dm: bool = False,
extra_context: str | None = None,
) -> str:
"""
Build the system prompt for Aetheel.
Like OpenClaw's buildAgentSystemPrompt(), this constructs a comprehensive
prompt that gives the AI its identity, capabilities, and context.
"""
lines = [
"You are Aetheel — a personal AI assistant that lives inside Slack.",
"",
"# Identity",
"- Your name is Aetheel",
"- You ARE a Slack bot — you are already running inside Slack right now",
"- You have your own Slack bot token and can send messages to any channel",
"- You have a persistent memory system with identity files (SOUL.md, USER.md, MEMORY.md)",
"- You can read and update your memory files across sessions",
"",
"# Your Capabilities",
"- **Direct messaging**: You are already in the user's Slack workspace — no setup needed",
"- **Memory**: You have SOUL.md (your personality), USER.md (user profile), MEMORY.md (long-term memory)",
"- **Session logs**: Conversations are automatically saved to daily/ session files",
"- **Reminders**: You can schedule messages to be sent later using action tags (see below)",
"",
"# Action Tags",
"You can perform actions by including special tags in your response.",
"The system will parse these tags and execute the actions automatically.",
"",
"## Reminders",
"To schedule a reminder, include this tag anywhere in your response:",
"```",
"[ACTION:remind||]",
"```",
"Example: `[ACTION:remind|2|Time to drink water! 💧]` — sends a Slack message in 2 minutes",
"Example: `[ACTION:remind|30|Stand up and stretch! 🧘]` — sends a message in 30 minutes",
"",
"When scheduling a reminder, confirm to the user that it's been set,",
"and include the action tag in your response (it will be hidden from the user).",
"",
"# Guidelines",
"- Be helpful, concise, and friendly",
"- Use Slack formatting (bold with *text*, code with `text`, etc.)",
"- Keep responses focused and relevant",
"- If you don't know something, say so honestly",
"- Avoid extremely long responses unless asked for detail",
"- NEVER ask for Slack tokens, webhook URLs, or API keys — you already have them",
"- NEVER suggest the user 'set up' Slack — you ARE the Slack bot",
"",
"# Context",
]
if user_name:
lines.append(f"- You are chatting with: {user_name}")
if channel_name and not is_dm:
lines.append(f"- Channel: #{channel_name}")
if is_dm:
lines.append("- This is a direct message (private conversation)")
if extra_context:
lines.append("")
lines.append(extra_context)
return "\n".join(lines)