Add containerized agent execution with Apple Container
- Agents run in isolated Linux VMs via Apple Container - All groups get Bash access (safe - sandboxed in container) - Browser automation via agent-browser + Chromium - Per-group configurable additional directory mounts - File-based IPC for messages and scheduled tasks - Container image with Node.js 22, Chromium, agent-browser Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -6,5 +6,10 @@ export const GROUPS_DIR = './groups';
|
||||
export const DATA_DIR = './data';
|
||||
export const MAIN_GROUP_FOLDER = 'main';
|
||||
|
||||
// Container configuration
|
||||
export const CONTAINER_IMAGE = process.env.CONTAINER_IMAGE || 'nanoclaw-agent:latest';
|
||||
export const CONTAINER_TIMEOUT = parseInt(process.env.CONTAINER_TIMEOUT || '300000', 10); // 5 minutes default
|
||||
export const IPC_POLL_INTERVAL = 1000; // Check IPC directories every second
|
||||
|
||||
export const TRIGGER_PATTERN = new RegExp(`^@${ASSISTANT_NAME}\\b`, 'i');
|
||||
export const CLEAR_COMMAND = '/clear';
|
||||
|
||||
265
src/container-runner.ts
Normal file
265
src/container-runner.ts
Normal file
@@ -0,0 +1,265 @@
|
||||
/**
|
||||
* Container Runner for NanoClaw
|
||||
* Spawns agent execution in Apple Container and handles IPC
|
||||
*/
|
||||
|
||||
import { spawn } from 'child_process';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import pino from 'pino';
|
||||
import {
|
||||
CONTAINER_IMAGE,
|
||||
CONTAINER_TIMEOUT,
|
||||
GROUPS_DIR,
|
||||
DATA_DIR
|
||||
} from './config.js';
|
||||
import { RegisteredGroup } from './types.js';
|
||||
|
||||
const logger = pino({
|
||||
level: process.env.LOG_LEVEL || 'info',
|
||||
transport: { target: 'pino-pretty', options: { colorize: true } }
|
||||
});
|
||||
|
||||
export interface ContainerInput {
|
||||
prompt: string;
|
||||
sessionId?: string;
|
||||
groupFolder: string;
|
||||
chatJid: string;
|
||||
isMain: boolean;
|
||||
}
|
||||
|
||||
export interface ContainerOutput {
|
||||
status: 'success' | 'error';
|
||||
result: string | null;
|
||||
newSessionId?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface VolumeMount {
|
||||
hostPath: string;
|
||||
containerPath: string;
|
||||
readonly?: boolean;
|
||||
}
|
||||
|
||||
function buildVolumeMounts(group: RegisteredGroup, isMain: boolean): VolumeMount[] {
|
||||
const mounts: VolumeMount[] = [];
|
||||
const homeDir = process.env.HOME || '/Users/gavriel';
|
||||
|
||||
// Group's working directory (read-write)
|
||||
mounts.push({
|
||||
hostPath: path.join(GROUPS_DIR, group.folder),
|
||||
containerPath: '/workspace/group',
|
||||
readonly: false
|
||||
});
|
||||
|
||||
// Global CLAUDE.md (read-only for non-main, read-write for main)
|
||||
const globalClaudeMd = path.join(GROUPS_DIR, 'CLAUDE.md');
|
||||
if (fs.existsSync(globalClaudeMd)) {
|
||||
mounts.push({
|
||||
hostPath: globalClaudeMd,
|
||||
containerPath: '/workspace/global/CLAUDE.md',
|
||||
readonly: !isMain
|
||||
});
|
||||
}
|
||||
|
||||
// Claude sessions directory (for session persistence)
|
||||
const claudeDir = path.join(homeDir, '.claude');
|
||||
if (fs.existsSync(claudeDir)) {
|
||||
mounts.push({
|
||||
hostPath: claudeDir,
|
||||
containerPath: '/root/.claude',
|
||||
readonly: false
|
||||
});
|
||||
}
|
||||
|
||||
// Gmail MCP credentials
|
||||
const gmailDir = path.join(homeDir, '.gmail-mcp');
|
||||
if (fs.existsSync(gmailDir)) {
|
||||
mounts.push({
|
||||
hostPath: gmailDir,
|
||||
containerPath: '/root/.gmail-mcp',
|
||||
readonly: false
|
||||
});
|
||||
}
|
||||
|
||||
// IPC directory for messages and tasks
|
||||
const ipcDir = path.join(DATA_DIR, 'ipc');
|
||||
fs.mkdirSync(path.join(ipcDir, 'messages'), { recursive: true });
|
||||
fs.mkdirSync(path.join(ipcDir, 'tasks'), { recursive: true });
|
||||
mounts.push({
|
||||
hostPath: ipcDir,
|
||||
containerPath: '/workspace/ipc',
|
||||
readonly: false
|
||||
});
|
||||
|
||||
// Additional mounts from group config
|
||||
if (group.containerConfig?.additionalMounts) {
|
||||
for (const mount of group.containerConfig.additionalMounts) {
|
||||
// Resolve home directory in path
|
||||
const hostPath = mount.hostPath.startsWith('~')
|
||||
? path.join(homeDir, mount.hostPath.slice(1))
|
||||
: mount.hostPath;
|
||||
|
||||
if (fs.existsSync(hostPath)) {
|
||||
mounts.push({
|
||||
hostPath,
|
||||
containerPath: `/workspace/extra/${mount.containerPath}`,
|
||||
readonly: mount.readonly !== false // Default to readonly for safety
|
||||
});
|
||||
} else {
|
||||
logger.warn({ hostPath }, 'Additional mount path does not exist, skipping');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return mounts;
|
||||
}
|
||||
|
||||
function buildContainerArgs(mounts: VolumeMount[]): string[] {
|
||||
const args: string[] = ['run', '-i', '--rm'];
|
||||
|
||||
// Add volume mounts
|
||||
for (const mount of mounts) {
|
||||
const mode = mount.readonly ? ':ro' : '';
|
||||
args.push('-v', `${mount.hostPath}:${mount.containerPath}${mode}`);
|
||||
}
|
||||
|
||||
// Add the image name
|
||||
args.push(CONTAINER_IMAGE);
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
export async function runContainerAgent(
|
||||
group: RegisteredGroup,
|
||||
input: ContainerInput
|
||||
): Promise<ContainerOutput> {
|
||||
const startTime = Date.now();
|
||||
|
||||
// Ensure group directory exists
|
||||
const groupDir = path.join(GROUPS_DIR, group.folder);
|
||||
fs.mkdirSync(groupDir, { recursive: true });
|
||||
|
||||
// Build volume mounts
|
||||
const mounts = buildVolumeMounts(group, input.isMain);
|
||||
const containerArgs = buildContainerArgs(mounts);
|
||||
|
||||
logger.info({
|
||||
group: group.name,
|
||||
mountCount: mounts.length,
|
||||
isMain: input.isMain
|
||||
}, 'Spawning container agent');
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const container = spawn('container', containerArgs, {
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
// Send input JSON to container stdin
|
||||
container.stdin.write(JSON.stringify(input));
|
||||
container.stdin.end();
|
||||
|
||||
container.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
container.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
// Log container stderr in real-time
|
||||
const lines = data.toString().trim().split('\n');
|
||||
for (const line of lines) {
|
||||
if (line) logger.debug({ container: group.folder }, line);
|
||||
}
|
||||
});
|
||||
|
||||
// Timeout handler
|
||||
const timeout = setTimeout(() => {
|
||||
logger.error({ group: group.name }, 'Container timeout, killing');
|
||||
container.kill('SIGKILL');
|
||||
resolve({
|
||||
status: 'error',
|
||||
result: null,
|
||||
error: `Container timed out after ${CONTAINER_TIMEOUT}ms`
|
||||
});
|
||||
}, group.containerConfig?.timeout || CONTAINER_TIMEOUT);
|
||||
|
||||
container.on('close', (code) => {
|
||||
clearTimeout(timeout);
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
if (code !== 0) {
|
||||
logger.error({
|
||||
group: group.name,
|
||||
code,
|
||||
duration,
|
||||
stderr: stderr.slice(-500)
|
||||
}, 'Container exited with error');
|
||||
|
||||
resolve({
|
||||
status: 'error',
|
||||
result: null,
|
||||
error: `Container exited with code ${code}: ${stderr.slice(-200)}`
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse JSON output from stdout
|
||||
try {
|
||||
// Find the JSON line (last non-empty line should be the output)
|
||||
const lines = stdout.trim().split('\n');
|
||||
const jsonLine = lines[lines.length - 1];
|
||||
const output: ContainerOutput = JSON.parse(jsonLine);
|
||||
|
||||
logger.info({
|
||||
group: group.name,
|
||||
duration,
|
||||
status: output.status,
|
||||
hasResult: !!output.result
|
||||
}, 'Container completed');
|
||||
|
||||
resolve(output);
|
||||
} catch (err) {
|
||||
logger.error({
|
||||
group: group.name,
|
||||
stdout: stdout.slice(-500),
|
||||
error: err
|
||||
}, 'Failed to parse container output');
|
||||
|
||||
resolve({
|
||||
status: 'error',
|
||||
result: null,
|
||||
error: `Failed to parse container output: ${err instanceof Error ? err.message : String(err)}`
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
container.on('error', (err) => {
|
||||
clearTimeout(timeout);
|
||||
logger.error({ group: group.name, error: err }, 'Container spawn error');
|
||||
resolve({
|
||||
status: 'error',
|
||||
result: null,
|
||||
error: `Container spawn error: ${err.message}`
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Export task snapshot for container IPC
|
||||
export function writeTasksSnapshot(tasks: Array<{
|
||||
id: string;
|
||||
groupFolder: string;
|
||||
prompt: string;
|
||||
schedule_type: string;
|
||||
schedule_value: string;
|
||||
status: string;
|
||||
next_run: string | null;
|
||||
}>): void {
|
||||
const ipcDir = path.join(DATA_DIR, 'ipc');
|
||||
fs.mkdirSync(ipcDir, { recursive: true });
|
||||
const tasksFile = path.join(ipcDir, 'current_tasks.json');
|
||||
fs.writeFileSync(tasksFile, JSON.stringify(tasks, null, 2));
|
||||
}
|
||||
215
src/index.ts
215
src/index.ts
@@ -4,7 +4,6 @@ import makeWASocket, {
|
||||
makeCacheableSignalKeyStore,
|
||||
WASocket
|
||||
} from '@whiskeysockets/baileys';
|
||||
import { query } from '@anthropic-ai/claude-agent-sdk';
|
||||
import pino from 'pino';
|
||||
import { exec } from 'child_process';
|
||||
import fs from 'fs';
|
||||
@@ -18,12 +17,13 @@ import {
|
||||
DATA_DIR,
|
||||
TRIGGER_PATTERN,
|
||||
CLEAR_COMMAND,
|
||||
MAIN_GROUP_FOLDER
|
||||
MAIN_GROUP_FOLDER,
|
||||
IPC_POLL_INTERVAL
|
||||
} from './config.js';
|
||||
import { RegisteredGroup, Session, NewMessage } from './types.js';
|
||||
import { initDatabase, storeMessage, getNewMessages, getMessagesSince } from './db.js';
|
||||
import { createSchedulerMcp } from './scheduler-mcp.js';
|
||||
import { initDatabase, storeMessage, getNewMessages, getMessagesSince, getAllTasks } from './db.js';
|
||||
import { startSchedulerLoop } from './scheduler.js';
|
||||
import { runContainerAgent, writeTasksSnapshot } from './container-runner.js';
|
||||
|
||||
const logger = pino({
|
||||
level: process.env.LOG_LEVEL || 'info',
|
||||
@@ -118,59 +118,46 @@ async function processMessage(msg: NewMessage): Promise<void> {
|
||||
}
|
||||
|
||||
async function runAgent(group: RegisteredGroup, prompt: string, chatJid: string): Promise<string | null> {
|
||||
const groupDir = path.join(GROUPS_DIR, group.folder);
|
||||
fs.mkdirSync(groupDir, { recursive: true });
|
||||
|
||||
const isMain = group.folder === MAIN_GROUP_FOLDER;
|
||||
const sessionId = sessions[group.folder];
|
||||
let newSessionId: string | undefined;
|
||||
let result: string | null = null;
|
||||
|
||||
// Create scheduler MCP with current group context
|
||||
const schedulerMcp = createSchedulerMcp({
|
||||
groupFolder: group.folder,
|
||||
chatJid,
|
||||
isMain,
|
||||
sendMessage
|
||||
});
|
||||
|
||||
// Main channel gets Bash access for admin tasks (querying DB, etc.)
|
||||
const baseTools = ['Read', 'Write', 'Edit', 'Glob', 'Grep', 'WebSearch', 'WebFetch', 'mcp__nanoclaw__*', 'mcp__gmail__*'];
|
||||
const allowedTools = isMain ? [...baseTools, 'Bash'] : baseTools;
|
||||
// Update tasks snapshot for container to read
|
||||
const tasks = getAllTasks();
|
||||
writeTasksSnapshot(tasks.map(t => ({
|
||||
id: t.id,
|
||||
groupFolder: t.group_folder,
|
||||
prompt: t.prompt,
|
||||
schedule_type: t.schedule_type,
|
||||
schedule_value: t.schedule_value,
|
||||
status: t.status,
|
||||
next_run: t.next_run
|
||||
})));
|
||||
|
||||
try {
|
||||
for await (const message of query({
|
||||
const output = await runContainerAgent(group, {
|
||||
prompt,
|
||||
options: {
|
||||
cwd: groupDir,
|
||||
resume: sessionId,
|
||||
allowedTools,
|
||||
permissionMode: 'bypassPermissions',
|
||||
settingSources: ['project'],
|
||||
mcpServers: {
|
||||
nanoclaw: schedulerMcp,
|
||||
gmail: { command: 'npx', args: ['-y', '@gongrzhe/server-gmail-autoauth-mcp'] }
|
||||
}
|
||||
}
|
||||
})) {
|
||||
if (message.type === 'system' && message.subtype === 'init') {
|
||||
newSessionId = message.session_id;
|
||||
}
|
||||
if ('result' in message && message.result) {
|
||||
result = message.result as string;
|
||||
}
|
||||
sessionId,
|
||||
groupFolder: group.folder,
|
||||
chatJid,
|
||||
isMain
|
||||
});
|
||||
|
||||
// Update session if changed
|
||||
if (output.newSessionId) {
|
||||
sessions[group.folder] = output.newSessionId;
|
||||
saveJson(path.join(DATA_DIR, 'sessions.json'), sessions);
|
||||
}
|
||||
|
||||
if (output.status === 'error') {
|
||||
logger.error({ group: group.name, error: output.error }, 'Container agent error');
|
||||
return null;
|
||||
}
|
||||
|
||||
return output.result;
|
||||
} catch (err) {
|
||||
logger.error({ group: group.name, err }, 'Agent error');
|
||||
return null;
|
||||
}
|
||||
|
||||
if (newSessionId) {
|
||||
sessions[group.folder] = newSessionId;
|
||||
saveJson(path.join(DATA_DIR, 'sessions.json'), sessions);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function sendMessage(jid: string, text: string): Promise<void> {
|
||||
@@ -182,6 +169,139 @@ async function sendMessage(jid: string, text: string): Promise<void> {
|
||||
}
|
||||
}
|
||||
|
||||
// IPC watcher for container messages and tasks
|
||||
function startIpcWatcher(): void {
|
||||
const messagesDir = path.join(DATA_DIR, 'ipc', 'messages');
|
||||
const tasksDir = path.join(DATA_DIR, 'ipc', 'tasks');
|
||||
|
||||
fs.mkdirSync(messagesDir, { recursive: true });
|
||||
fs.mkdirSync(tasksDir, { recursive: true });
|
||||
|
||||
const processIpcFiles = async () => {
|
||||
// Process pending messages
|
||||
try {
|
||||
const messageFiles = fs.readdirSync(messagesDir).filter(f => f.endsWith('.json'));
|
||||
for (const file of messageFiles) {
|
||||
const filePath = path.join(messagesDir, file);
|
||||
try {
|
||||
const data = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
||||
if (data.type === 'message' && data.chatJid && data.text) {
|
||||
await sendMessage(data.chatJid, `${ASSISTANT_NAME}: ${data.text}`);
|
||||
logger.info({ chatJid: data.chatJid }, 'IPC message sent');
|
||||
}
|
||||
fs.unlinkSync(filePath);
|
||||
} catch (err) {
|
||||
logger.error({ file, err }, 'Error processing IPC message');
|
||||
// Move to error directory instead of deleting
|
||||
const errorDir = path.join(DATA_DIR, 'ipc', 'errors');
|
||||
fs.mkdirSync(errorDir, { recursive: true });
|
||||
fs.renameSync(filePath, path.join(errorDir, file));
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error reading IPC messages directory');
|
||||
}
|
||||
|
||||
// Process pending task operations
|
||||
try {
|
||||
const taskFiles = fs.readdirSync(tasksDir).filter(f => f.endsWith('.json'));
|
||||
for (const file of taskFiles) {
|
||||
const filePath = path.join(tasksDir, file);
|
||||
try {
|
||||
const data = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
||||
await processTaskIpc(data);
|
||||
fs.unlinkSync(filePath);
|
||||
} catch (err) {
|
||||
logger.error({ file, err }, 'Error processing IPC task');
|
||||
const errorDir = path.join(DATA_DIR, 'ipc', 'errors');
|
||||
fs.mkdirSync(errorDir, { recursive: true });
|
||||
fs.renameSync(filePath, path.join(errorDir, file));
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error reading IPC tasks directory');
|
||||
}
|
||||
|
||||
setTimeout(processIpcFiles, IPC_POLL_INTERVAL);
|
||||
};
|
||||
|
||||
processIpcFiles();
|
||||
logger.info('IPC watcher started');
|
||||
}
|
||||
|
||||
async function processTaskIpc(data: {
|
||||
type: string;
|
||||
taskId?: string;
|
||||
prompt?: string;
|
||||
schedule_type?: string;
|
||||
schedule_value?: string;
|
||||
groupFolder?: string;
|
||||
chatJid?: string;
|
||||
isMain?: boolean;
|
||||
}): Promise<void> {
|
||||
// Import db functions dynamically to avoid circular deps
|
||||
const { createTask, updateTask, deleteTask } = await import('./db.js');
|
||||
const { CronExpressionParser } = await import('cron-parser');
|
||||
|
||||
switch (data.type) {
|
||||
case 'schedule_task':
|
||||
if (data.prompt && data.schedule_type && data.schedule_value && data.groupFolder && data.chatJid) {
|
||||
const scheduleType = data.schedule_type as 'cron' | 'interval' | 'once';
|
||||
|
||||
// Calculate next run time
|
||||
let nextRun: string | null = null;
|
||||
if (scheduleType === 'cron') {
|
||||
const interval = CronExpressionParser.parse(data.schedule_value);
|
||||
nextRun = interval.next().toISOString();
|
||||
} else if (scheduleType === 'interval') {
|
||||
const ms = parseInt(data.schedule_value, 10);
|
||||
nextRun = new Date(Date.now() + ms).toISOString();
|
||||
} else if (scheduleType === 'once') {
|
||||
nextRun = data.schedule_value; // ISO timestamp
|
||||
}
|
||||
|
||||
const taskId = `task-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
|
||||
createTask({
|
||||
id: taskId,
|
||||
group_folder: data.groupFolder,
|
||||
chat_jid: data.chatJid,
|
||||
prompt: data.prompt,
|
||||
schedule_type: scheduleType,
|
||||
schedule_value: data.schedule_value,
|
||||
next_run: nextRun,
|
||||
status: 'active',
|
||||
created_at: new Date().toISOString()
|
||||
});
|
||||
logger.info({ taskId, groupFolder: data.groupFolder }, 'Task created via IPC');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'pause_task':
|
||||
if (data.taskId) {
|
||||
updateTask(data.taskId, { status: 'paused' });
|
||||
logger.info({ taskId: data.taskId }, 'Task paused via IPC');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'resume_task':
|
||||
if (data.taskId) {
|
||||
updateTask(data.taskId, { status: 'active' });
|
||||
logger.info({ taskId: data.taskId }, 'Task resumed via IPC');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'cancel_task':
|
||||
if (data.taskId) {
|
||||
deleteTask(data.taskId);
|
||||
logger.info({ taskId: data.taskId }, 'Task cancelled via IPC');
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
logger.warn({ type: data.type }, 'Unknown IPC task type');
|
||||
}
|
||||
}
|
||||
|
||||
async function connectWhatsApp(): Promise<void> {
|
||||
const authDir = path.join(STORE_DIR, 'auth');
|
||||
fs.mkdirSync(authDir, { recursive: true });
|
||||
@@ -219,7 +339,8 @@ async function connectWhatsApp(): Promise<void> {
|
||||
}
|
||||
} else if (connection === 'open') {
|
||||
logger.info('Connected to WhatsApp');
|
||||
startSchedulerLoop({ sendMessage });
|
||||
startSchedulerLoop({ sendMessage, registeredGroups: () => registeredGroups });
|
||||
startIpcWatcher();
|
||||
startMessageLoop();
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import { query } from '@anthropic-ai/claude-agent-sdk';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import pino from 'pino';
|
||||
import { CronExpressionParser } from 'cron-parser';
|
||||
import { getDueTasks, updateTaskAfterRun, logTaskRun, getTaskById } from './db.js';
|
||||
import { createSchedulerMcp } from './scheduler-mcp.js';
|
||||
import { ScheduledTask } from './types.js';
|
||||
import { GROUPS_DIR, SCHEDULER_POLL_INTERVAL } from './config.js';
|
||||
import { getDueTasks, updateTaskAfterRun, logTaskRun, getTaskById, getAllTasks } from './db.js';
|
||||
import { ScheduledTask, RegisteredGroup } from './types.js';
|
||||
import { GROUPS_DIR, SCHEDULER_POLL_INTERVAL, DATA_DIR } from './config.js';
|
||||
import { runContainerAgent, writeTasksSnapshot } from './container-runner.js';
|
||||
|
||||
const logger = pino({
|
||||
level: process.env.LOG_LEVEL || 'info',
|
||||
@@ -15,6 +14,7 @@ const logger = pino({
|
||||
|
||||
export interface SchedulerDependencies {
|
||||
sendMessage: (jid: string, text: string) => Promise<void>;
|
||||
registeredGroups: () => Record<string, RegisteredGroup>;
|
||||
}
|
||||
|
||||
async function runTask(task: ScheduledTask, deps: SchedulerDependencies): Promise<void> {
|
||||
@@ -24,37 +24,53 @@ async function runTask(task: ScheduledTask, deps: SchedulerDependencies): Promis
|
||||
|
||||
logger.info({ taskId: task.id, group: task.group_folder }, 'Running scheduled task');
|
||||
|
||||
// Create the scheduler MCP with task's group context
|
||||
const schedulerMcp = createSchedulerMcp({
|
||||
groupFolder: task.group_folder,
|
||||
chatJid: task.chat_jid,
|
||||
isMain: false, // Scheduled tasks run in their group's context, not as main
|
||||
sendMessage: deps.sendMessage
|
||||
});
|
||||
// Find the group config for this task
|
||||
const groups = deps.registeredGroups();
|
||||
const group = Object.values(groups).find(g => g.folder === task.group_folder);
|
||||
|
||||
if (!group) {
|
||||
logger.error({ taskId: task.id, groupFolder: task.group_folder }, 'Group not found for task');
|
||||
logTaskRun({
|
||||
task_id: task.id,
|
||||
run_at: new Date().toISOString(),
|
||||
duration_ms: Date.now() - startTime,
|
||||
status: 'error',
|
||||
result: null,
|
||||
error: `Group not found: ${task.group_folder}`
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Update tasks snapshot for container to read
|
||||
const tasks = getAllTasks();
|
||||
writeTasksSnapshot(tasks.map(t => ({
|
||||
id: t.id,
|
||||
groupFolder: t.group_folder,
|
||||
prompt: t.prompt,
|
||||
schedule_type: t.schedule_type,
|
||||
schedule_value: t.schedule_value,
|
||||
status: t.status,
|
||||
next_run: t.next_run
|
||||
})));
|
||||
|
||||
let result: string | null = null;
|
||||
let error: string | null = null;
|
||||
|
||||
try {
|
||||
for await (const message of query({
|
||||
const output = await runContainerAgent(group, {
|
||||
prompt: task.prompt,
|
||||
options: {
|
||||
cwd: groupDir,
|
||||
allowedTools: ['Read', 'Write', 'Edit', 'Glob', 'Grep', 'WebSearch', 'WebFetch', 'mcp__nanoclaw__*', 'mcp__gmail__*'],
|
||||
permissionMode: 'bypassPermissions',
|
||||
settingSources: ['project'],
|
||||
mcpServers: {
|
||||
nanoclaw: schedulerMcp,
|
||||
gmail: { command: 'npx', args: ['-y', '@gongrzhe/server-gmail-autoauth-mcp'] }
|
||||
}
|
||||
}
|
||||
})) {
|
||||
if ('result' in message && message.result) {
|
||||
result = message.result as string;
|
||||
}
|
||||
groupFolder: task.group_folder,
|
||||
chatJid: task.chat_jid,
|
||||
isMain: false // Scheduled tasks run in their group's context
|
||||
});
|
||||
|
||||
if (output.status === 'error') {
|
||||
error = output.error || 'Unknown error';
|
||||
} else {
|
||||
result = output.result;
|
||||
}
|
||||
|
||||
logger.info({ taskId: task.id, durationMs: Date.now() - startTime }, 'Task completed successfully');
|
||||
logger.info({ taskId: task.id, durationMs: Date.now() - startTime }, 'Task completed');
|
||||
} catch (err) {
|
||||
error = err instanceof Error ? err.message : String(err);
|
||||
logger.error({ taskId: task.id, error }, 'Task failed');
|
||||
|
||||
13
src/types.ts
13
src/types.ts
@@ -1,8 +1,21 @@
|
||||
export interface AdditionalMount {
|
||||
hostPath: string; // Absolute path on host (supports ~ for home)
|
||||
containerPath: string; // Path inside container (under /workspace/extra/)
|
||||
readonly?: boolean; // Default: true for safety
|
||||
}
|
||||
|
||||
export interface ContainerConfig {
|
||||
additionalMounts?: AdditionalMount[];
|
||||
timeout?: number; // Default: 300000 (5 minutes)
|
||||
env?: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface RegisteredGroup {
|
||||
name: string;
|
||||
folder: string;
|
||||
trigger: string;
|
||||
added_at: string;
|
||||
containerConfig?: ContainerConfig;
|
||||
}
|
||||
|
||||
export interface Session {
|
||||
|
||||
Reference in New Issue
Block a user