Skills engine v0.1 + multi-channel infrastructure (#307)
* refactor: multi-channel infrastructure with explicit channel/is_group tracking - Add channels[] array and findChannel() routing in index.ts, replacing hardcoded whatsapp.* calls with channel-agnostic callbacks - Add channel TEXT and is_group INTEGER columns to chats table with COALESCE upsert to protect existing values from null overwrites - is_group defaults to 0 (safe: unknown chats excluded from groups) - WhatsApp passes explicit channel='whatsapp' and isGroup to onChatMetadata - getAvailableGroups filters on is_group instead of JID pattern matching - findChannel logs warnings instead of silently dropping unroutable JIDs - Migration backfills channel/is_group from JID patterns for existing DBs Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> * feat: skills engine v0.1 — deterministic skill packages with rerere resolution Three-way merge engine for applying skill packages on top of a core codebase. Skills declare which files they add/modify, and the engine uses git merge-file for conflict detection with git rerere for automatic resolution of previously-seen conflicts. Key components: - apply: three-way merge with backup/rollback safety net - replay: clean-slate replay for uninstall and rebase - update: core version updates with deletion detection - rebase: bake applied skills into base (one-way) - manifest: validation with path traversal protection - resolution-cache: pre-computed rerere resolutions - structured: npm deps, env vars, docker-compose merging - CI: per-skill test matrix with conflict detection 151 unit tests covering merge, rerere, backup, replay, uninstall, update, rebase, structured ops, and edge cases. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> * feat: add Discord and Telegram skill packages Skill packages for adding Discord and Telegram channels to NanoClaw. Each package includes: - Channel implementation (add/src/channels/) - Three-way merge targets for index.ts, config.ts, routing.test.ts - Intent docs explaining merge invariants - Standalone integration tests - manifest.yaml with dependency/conflict declarations Applied via: npx tsx scripts/apply-skill.ts .claude/skills/add-discord These are inert until applied — no runtime impact. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> * remove unused docs (skills-system-status, implementation-guide) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> --------- Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
92
skills-engine/__tests__/apply.test.ts
Normal file
92
skills-engine/__tests__/apply.test.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
import { applySkill } from '../apply.js';
|
||||
import {
|
||||
cleanup,
|
||||
createMinimalState,
|
||||
createSkillPackage,
|
||||
createTempDir,
|
||||
initGitRepo,
|
||||
setupNanoclawDir,
|
||||
} from './test-helpers.js';
|
||||
|
||||
describe('apply', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
setupNanoclawDir(tmpDir);
|
||||
createMinimalState(tmpDir);
|
||||
initGitRepo(tmpDir);
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
it('rejects when min_skills_system_version is too high', async () => {
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'future-skill',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
min_skills_system_version: '99.0.0',
|
||||
});
|
||||
|
||||
const result = await applySkill(skillDir);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('99.0.0');
|
||||
});
|
||||
|
||||
it('executes post_apply commands on success', async () => {
|
||||
const markerFile = path.join(tmpDir, 'post-apply-marker.txt');
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'post-test',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: ['src/newfile.ts'],
|
||||
modifies: [],
|
||||
addFiles: { 'src/newfile.ts': 'export const x = 1;' },
|
||||
post_apply: [`echo "applied" > "${markerFile}"`],
|
||||
});
|
||||
|
||||
const result = await applySkill(skillDir);
|
||||
expect(result.success).toBe(true);
|
||||
expect(fs.existsSync(markerFile)).toBe(true);
|
||||
expect(fs.readFileSync(markerFile, 'utf-8').trim()).toBe('applied');
|
||||
});
|
||||
|
||||
it('rolls back on post_apply failure', async () => {
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
const existingFile = path.join(tmpDir, 'src/existing.ts');
|
||||
fs.writeFileSync(existingFile, 'original content');
|
||||
|
||||
// Set up base for the modified file
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
|
||||
fs.mkdirSync(baseDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'existing.ts'), 'original content');
|
||||
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'bad-post',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: ['src/added.ts'],
|
||||
modifies: [],
|
||||
addFiles: { 'src/added.ts': 'new file' },
|
||||
post_apply: ['false'], // always fails
|
||||
});
|
||||
|
||||
const result = await applySkill(skillDir);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('post_apply');
|
||||
|
||||
// Added file should be cleaned up
|
||||
expect(fs.existsSync(path.join(tmpDir, 'src/added.ts'))).toBe(false);
|
||||
});
|
||||
});
|
||||
77
skills-engine/__tests__/backup.test.ts
Normal file
77
skills-engine/__tests__/backup.test.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { createBackup, restoreBackup, clearBackup } from '../backup.js';
|
||||
import { createTempDir, setupNanoclawDir, cleanup } from './test-helpers.js';
|
||||
|
||||
describe('backup', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
setupNanoclawDir(tmpDir);
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
it('createBackup copies files and restoreBackup puts them back', () => {
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(tmpDir, 'src', 'app.ts'), 'original content');
|
||||
|
||||
createBackup(['src/app.ts']);
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, 'src', 'app.ts'), 'modified content');
|
||||
expect(fs.readFileSync(path.join(tmpDir, 'src', 'app.ts'), 'utf-8')).toBe('modified content');
|
||||
|
||||
restoreBackup();
|
||||
expect(fs.readFileSync(path.join(tmpDir, 'src', 'app.ts'), 'utf-8')).toBe('original content');
|
||||
});
|
||||
|
||||
it('createBackup skips missing files without error', () => {
|
||||
expect(() => createBackup(['does-not-exist.ts'])).not.toThrow();
|
||||
});
|
||||
|
||||
it('clearBackup removes backup directory', () => {
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(tmpDir, 'src', 'app.ts'), 'content');
|
||||
createBackup(['src/app.ts']);
|
||||
|
||||
const backupDir = path.join(tmpDir, '.nanoclaw', 'backup');
|
||||
expect(fs.existsSync(backupDir)).toBe(true);
|
||||
|
||||
clearBackup();
|
||||
expect(fs.existsSync(backupDir)).toBe(false);
|
||||
});
|
||||
|
||||
it('createBackup writes tombstone for non-existent files', () => {
|
||||
createBackup(['src/newfile.ts']);
|
||||
|
||||
const tombstone = path.join(tmpDir, '.nanoclaw', 'backup', 'src', 'newfile.ts.tombstone');
|
||||
expect(fs.existsSync(tombstone)).toBe(true);
|
||||
});
|
||||
|
||||
it('restoreBackup deletes files with tombstone markers', () => {
|
||||
// Create backup first — file doesn't exist yet, so tombstone is written
|
||||
createBackup(['src/added.ts']);
|
||||
|
||||
// Now the file gets created (simulating skill apply)
|
||||
const filePath = path.join(tmpDir, 'src', 'added.ts');
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
fs.writeFileSync(filePath, 'new content');
|
||||
expect(fs.existsSync(filePath)).toBe(true);
|
||||
|
||||
// Restore should delete the file (tombstone means it didn't exist before)
|
||||
restoreBackup();
|
||||
expect(fs.existsSync(filePath)).toBe(false);
|
||||
});
|
||||
|
||||
it('restoreBackup is no-op when backup dir is empty or missing', () => {
|
||||
clearBackup();
|
||||
expect(() => restoreBackup()).not.toThrow();
|
||||
});
|
||||
});
|
||||
270
skills-engine/__tests__/ci-matrix.test.ts
Normal file
270
skills-engine/__tests__/ci-matrix.test.ts
Normal file
@@ -0,0 +1,270 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { stringify } from 'yaml';
|
||||
|
||||
import {
|
||||
computeOverlapMatrix,
|
||||
extractOverlapInfo,
|
||||
generateMatrix,
|
||||
type SkillOverlapInfo,
|
||||
} from '../../scripts/generate-ci-matrix.js';
|
||||
import { SkillManifest } from '../types.js';
|
||||
import { createTempDir, cleanup } from './test-helpers.js';
|
||||
|
||||
function makeManifest(overrides: Partial<SkillManifest> & { skill: string }): SkillManifest {
|
||||
return {
|
||||
version: '1.0.0',
|
||||
description: 'Test skill',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
conflicts: [],
|
||||
depends: [],
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe('ci-matrix', () => {
|
||||
describe('computeOverlapMatrix', () => {
|
||||
it('detects overlap from shared modifies entries', () => {
|
||||
const skills: SkillOverlapInfo[] = [
|
||||
{ name: 'telegram', modifies: ['src/config.ts', 'src/index.ts'], npmDependencies: [] },
|
||||
{ name: 'discord', modifies: ['src/config.ts', 'src/router.ts'], npmDependencies: [] },
|
||||
];
|
||||
|
||||
const matrix = computeOverlapMatrix(skills);
|
||||
|
||||
expect(matrix).toHaveLength(1);
|
||||
expect(matrix[0].skills).toEqual(['telegram', 'discord']);
|
||||
expect(matrix[0].reason).toContain('shared modifies');
|
||||
expect(matrix[0].reason).toContain('src/config.ts');
|
||||
});
|
||||
|
||||
it('returns no entry for non-overlapping skills', () => {
|
||||
const skills: SkillOverlapInfo[] = [
|
||||
{ name: 'telegram', modifies: ['src/telegram.ts'], npmDependencies: ['grammy'] },
|
||||
{ name: 'discord', modifies: ['src/discord.ts'], npmDependencies: ['discord.js'] },
|
||||
];
|
||||
|
||||
const matrix = computeOverlapMatrix(skills);
|
||||
|
||||
expect(matrix).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('detects overlap from shared npm dependencies', () => {
|
||||
const skills: SkillOverlapInfo[] = [
|
||||
{ name: 'skill-a', modifies: ['src/a.ts'], npmDependencies: ['lodash', 'zod'] },
|
||||
{ name: 'skill-b', modifies: ['src/b.ts'], npmDependencies: ['zod', 'express'] },
|
||||
];
|
||||
|
||||
const matrix = computeOverlapMatrix(skills);
|
||||
|
||||
expect(matrix).toHaveLength(1);
|
||||
expect(matrix[0].skills).toEqual(['skill-a', 'skill-b']);
|
||||
expect(matrix[0].reason).toContain('shared npm packages');
|
||||
expect(matrix[0].reason).toContain('zod');
|
||||
});
|
||||
|
||||
it('reports both modifies and npm overlap in one entry', () => {
|
||||
const skills: SkillOverlapInfo[] = [
|
||||
{ name: 'skill-a', modifies: ['src/config.ts'], npmDependencies: ['zod'] },
|
||||
{ name: 'skill-b', modifies: ['src/config.ts'], npmDependencies: ['zod'] },
|
||||
];
|
||||
|
||||
const matrix = computeOverlapMatrix(skills);
|
||||
|
||||
expect(matrix).toHaveLength(1);
|
||||
expect(matrix[0].reason).toContain('shared modifies');
|
||||
expect(matrix[0].reason).toContain('shared npm packages');
|
||||
});
|
||||
|
||||
it('handles three skills with pairwise overlaps', () => {
|
||||
const skills: SkillOverlapInfo[] = [
|
||||
{ name: 'a', modifies: ['src/config.ts'], npmDependencies: [] },
|
||||
{ name: 'b', modifies: ['src/config.ts', 'src/router.ts'], npmDependencies: [] },
|
||||
{ name: 'c', modifies: ['src/router.ts'], npmDependencies: [] },
|
||||
];
|
||||
|
||||
const matrix = computeOverlapMatrix(skills);
|
||||
|
||||
// a-b overlap on config.ts, b-c overlap on router.ts, a-c no overlap
|
||||
expect(matrix).toHaveLength(2);
|
||||
expect(matrix[0].skills).toEqual(['a', 'b']);
|
||||
expect(matrix[1].skills).toEqual(['b', 'c']);
|
||||
});
|
||||
|
||||
it('returns empty array for single skill', () => {
|
||||
const skills: SkillOverlapInfo[] = [
|
||||
{ name: 'only', modifies: ['src/config.ts'], npmDependencies: ['zod'] },
|
||||
];
|
||||
|
||||
const matrix = computeOverlapMatrix(skills);
|
||||
|
||||
expect(matrix).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('returns empty array for no skills', () => {
|
||||
const matrix = computeOverlapMatrix([]);
|
||||
expect(matrix).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractOverlapInfo', () => {
|
||||
it('extracts modifies and npm dependencies using dirName', () => {
|
||||
const manifest = makeManifest({
|
||||
skill: 'telegram',
|
||||
modifies: ['src/config.ts'],
|
||||
structured: {
|
||||
npm_dependencies: { grammy: '^1.0.0', zod: '^3.0.0' },
|
||||
},
|
||||
});
|
||||
|
||||
const info = extractOverlapInfo(manifest, 'add-telegram');
|
||||
|
||||
expect(info.name).toBe('add-telegram');
|
||||
expect(info.modifies).toEqual(['src/config.ts']);
|
||||
expect(info.npmDependencies).toEqual(['grammy', 'zod']);
|
||||
});
|
||||
|
||||
it('handles manifest without structured field', () => {
|
||||
const manifest = makeManifest({
|
||||
skill: 'simple',
|
||||
modifies: ['src/index.ts'],
|
||||
});
|
||||
|
||||
const info = extractOverlapInfo(manifest, 'add-simple');
|
||||
|
||||
expect(info.npmDependencies).toEqual([]);
|
||||
});
|
||||
|
||||
it('handles structured without npm_dependencies', () => {
|
||||
const manifest = makeManifest({
|
||||
skill: 'env-only',
|
||||
modifies: [],
|
||||
structured: {
|
||||
env_additions: ['MY_VAR'],
|
||||
},
|
||||
});
|
||||
|
||||
const info = extractOverlapInfo(manifest, 'add-env-only');
|
||||
|
||||
expect(info.npmDependencies).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateMatrix with real filesystem', () => {
|
||||
let tmpDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
function createManifestDir(skillsDir: string, name: string, manifest: Record<string, unknown>): void {
|
||||
const dir = path.join(skillsDir, name);
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify(manifest));
|
||||
}
|
||||
|
||||
it('reads manifests from disk and finds overlaps', () => {
|
||||
const skillsDir = path.join(tmpDir, '.claude', 'skills');
|
||||
|
||||
createManifestDir(skillsDir, 'telegram', {
|
||||
skill: 'telegram',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: ['src/telegram.ts'],
|
||||
modifies: ['src/config.ts', 'src/index.ts'],
|
||||
conflicts: [],
|
||||
depends: [],
|
||||
});
|
||||
|
||||
createManifestDir(skillsDir, 'discord', {
|
||||
skill: 'discord',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: ['src/discord.ts'],
|
||||
modifies: ['src/config.ts', 'src/index.ts'],
|
||||
conflicts: [],
|
||||
depends: [],
|
||||
});
|
||||
|
||||
const matrix = generateMatrix(skillsDir);
|
||||
|
||||
expect(matrix).toHaveLength(1);
|
||||
expect(matrix[0].skills).toContain('telegram');
|
||||
expect(matrix[0].skills).toContain('discord');
|
||||
});
|
||||
|
||||
it('returns empty matrix when skills dir does not exist', () => {
|
||||
const matrix = generateMatrix(path.join(tmpDir, 'nonexistent'));
|
||||
expect(matrix).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('returns empty matrix for non-overlapping skills on disk', () => {
|
||||
const skillsDir = path.join(tmpDir, '.claude', 'skills');
|
||||
|
||||
createManifestDir(skillsDir, 'alpha', {
|
||||
skill: 'alpha',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: ['src/alpha.ts'],
|
||||
modifies: ['src/alpha-config.ts'],
|
||||
conflicts: [],
|
||||
depends: [],
|
||||
});
|
||||
|
||||
createManifestDir(skillsDir, 'beta', {
|
||||
skill: 'beta',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: ['src/beta.ts'],
|
||||
modifies: ['src/beta-config.ts'],
|
||||
conflicts: [],
|
||||
depends: [],
|
||||
});
|
||||
|
||||
const matrix = generateMatrix(skillsDir);
|
||||
expect(matrix).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('detects structured npm overlap from disk manifests', () => {
|
||||
const skillsDir = path.join(tmpDir, '.claude', 'skills');
|
||||
|
||||
createManifestDir(skillsDir, 'skill-x', {
|
||||
skill: 'skill-x',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: ['src/x.ts'],
|
||||
conflicts: [],
|
||||
depends: [],
|
||||
structured: {
|
||||
npm_dependencies: { lodash: '^4.0.0' },
|
||||
},
|
||||
});
|
||||
|
||||
createManifestDir(skillsDir, 'skill-y', {
|
||||
skill: 'skill-y',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: ['src/y.ts'],
|
||||
conflicts: [],
|
||||
depends: [],
|
||||
structured: {
|
||||
npm_dependencies: { lodash: '^4.1.0' },
|
||||
},
|
||||
});
|
||||
|
||||
const matrix = generateMatrix(skillsDir);
|
||||
|
||||
expect(matrix).toHaveLength(1);
|
||||
expect(matrix[0].reason).toContain('lodash');
|
||||
});
|
||||
});
|
||||
});
|
||||
43
skills-engine/__tests__/constants.test.ts
Normal file
43
skills-engine/__tests__/constants.test.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
NANOCLAW_DIR,
|
||||
STATE_FILE,
|
||||
BASE_DIR,
|
||||
BACKUP_DIR,
|
||||
LOCK_FILE,
|
||||
CUSTOM_DIR,
|
||||
RESOLUTIONS_DIR,
|
||||
SKILLS_SCHEMA_VERSION,
|
||||
} from '../constants.js';
|
||||
|
||||
describe('constants', () => {
|
||||
const allConstants = {
|
||||
NANOCLAW_DIR,
|
||||
STATE_FILE,
|
||||
BASE_DIR,
|
||||
BACKUP_DIR,
|
||||
LOCK_FILE,
|
||||
CUSTOM_DIR,
|
||||
RESOLUTIONS_DIR,
|
||||
SKILLS_SCHEMA_VERSION,
|
||||
};
|
||||
|
||||
it('all constants are non-empty strings', () => {
|
||||
for (const [name, value] of Object.entries(allConstants)) {
|
||||
expect(value, `${name} should be a non-empty string`).toBeTruthy();
|
||||
expect(typeof value, `${name} should be a string`).toBe('string');
|
||||
}
|
||||
});
|
||||
|
||||
it('path constants use forward slashes and .nanoclaw prefix', () => {
|
||||
const pathConstants = [BASE_DIR, BACKUP_DIR, LOCK_FILE, CUSTOM_DIR, RESOLUTIONS_DIR];
|
||||
for (const p of pathConstants) {
|
||||
expect(p).not.toContain('\\');
|
||||
expect(p).toMatch(/^\.nanoclaw\//);
|
||||
}
|
||||
});
|
||||
|
||||
it('NANOCLAW_DIR is .nanoclaw', () => {
|
||||
expect(NANOCLAW_DIR).toBe('.nanoclaw');
|
||||
});
|
||||
});
|
||||
136
skills-engine/__tests__/customize.test.ts
Normal file
136
skills-engine/__tests__/customize.test.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import {
|
||||
isCustomizeActive,
|
||||
startCustomize,
|
||||
commitCustomize,
|
||||
abortCustomize,
|
||||
} from '../customize.js';
|
||||
import { CUSTOM_DIR } from '../constants.js';
|
||||
import {
|
||||
createTempDir,
|
||||
setupNanoclawDir,
|
||||
createMinimalState,
|
||||
cleanup,
|
||||
writeState,
|
||||
} from './test-helpers.js';
|
||||
import { readState, recordSkillApplication, computeFileHash } from '../state.js';
|
||||
|
||||
describe('customize', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
setupNanoclawDir(tmpDir);
|
||||
createMinimalState(tmpDir);
|
||||
fs.mkdirSync(path.join(tmpDir, CUSTOM_DIR), { recursive: true });
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
it('startCustomize creates pending.yaml and isCustomizeActive returns true', () => {
|
||||
// Need at least one applied skill with file_hashes for snapshot
|
||||
const trackedFile = path.join(tmpDir, 'src', 'app.ts');
|
||||
fs.mkdirSync(path.dirname(trackedFile), { recursive: true });
|
||||
fs.writeFileSync(trackedFile, 'export const x = 1;');
|
||||
recordSkillApplication('test-skill', '1.0.0', {
|
||||
'src/app.ts': computeFileHash(trackedFile),
|
||||
});
|
||||
|
||||
expect(isCustomizeActive()).toBe(false);
|
||||
startCustomize('test customization');
|
||||
expect(isCustomizeActive()).toBe(true);
|
||||
|
||||
const pendingPath = path.join(tmpDir, CUSTOM_DIR, 'pending.yaml');
|
||||
expect(fs.existsSync(pendingPath)).toBe(true);
|
||||
});
|
||||
|
||||
it('abortCustomize removes pending.yaml', () => {
|
||||
const trackedFile = path.join(tmpDir, 'src', 'app.ts');
|
||||
fs.mkdirSync(path.dirname(trackedFile), { recursive: true });
|
||||
fs.writeFileSync(trackedFile, 'export const x = 1;');
|
||||
recordSkillApplication('test-skill', '1.0.0', {
|
||||
'src/app.ts': computeFileHash(trackedFile),
|
||||
});
|
||||
|
||||
startCustomize('test');
|
||||
expect(isCustomizeActive()).toBe(true);
|
||||
|
||||
abortCustomize();
|
||||
expect(isCustomizeActive()).toBe(false);
|
||||
});
|
||||
|
||||
it('commitCustomize with no changes clears pending', () => {
|
||||
const trackedFile = path.join(tmpDir, 'src', 'app.ts');
|
||||
fs.mkdirSync(path.dirname(trackedFile), { recursive: true });
|
||||
fs.writeFileSync(trackedFile, 'export const x = 1;');
|
||||
recordSkillApplication('test-skill', '1.0.0', {
|
||||
'src/app.ts': computeFileHash(trackedFile),
|
||||
});
|
||||
|
||||
startCustomize('no-op');
|
||||
commitCustomize();
|
||||
|
||||
expect(isCustomizeActive()).toBe(false);
|
||||
});
|
||||
|
||||
it('commitCustomize with changes creates patch and records in state', () => {
|
||||
const trackedFile = path.join(tmpDir, 'src', 'app.ts');
|
||||
fs.mkdirSync(path.dirname(trackedFile), { recursive: true });
|
||||
fs.writeFileSync(trackedFile, 'export const x = 1;');
|
||||
recordSkillApplication('test-skill', '1.0.0', {
|
||||
'src/app.ts': computeFileHash(trackedFile),
|
||||
});
|
||||
|
||||
startCustomize('add feature');
|
||||
|
||||
// Modify the tracked file
|
||||
fs.writeFileSync(trackedFile, 'export const x = 2;\nexport const y = 3;');
|
||||
|
||||
commitCustomize();
|
||||
|
||||
expect(isCustomizeActive()).toBe(false);
|
||||
const state = readState();
|
||||
expect(state.custom_modifications).toBeDefined();
|
||||
expect(state.custom_modifications!.length).toBeGreaterThan(0);
|
||||
expect(state.custom_modifications![0].description).toBe('add feature');
|
||||
});
|
||||
|
||||
it('commitCustomize throws descriptive error on diff failure', () => {
|
||||
const trackedFile = path.join(tmpDir, 'src', 'app.ts');
|
||||
fs.mkdirSync(path.dirname(trackedFile), { recursive: true });
|
||||
fs.writeFileSync(trackedFile, 'export const x = 1;');
|
||||
recordSkillApplication('test-skill', '1.0.0', {
|
||||
'src/app.ts': computeFileHash(trackedFile),
|
||||
});
|
||||
|
||||
startCustomize('diff-error test');
|
||||
|
||||
// Modify the tracked file
|
||||
fs.writeFileSync(trackedFile, 'export const x = 2;');
|
||||
|
||||
// Make the base file a directory to cause diff to exit with code 2
|
||||
const baseFilePath = path.join(tmpDir, '.nanoclaw', 'base', 'src', 'app.ts');
|
||||
fs.mkdirSync(baseFilePath, { recursive: true });
|
||||
|
||||
expect(() => commitCustomize()).toThrow(/diff error/i);
|
||||
});
|
||||
|
||||
it('startCustomize while active throws', () => {
|
||||
const trackedFile = path.join(tmpDir, 'src', 'app.ts');
|
||||
fs.mkdirSync(path.dirname(trackedFile), { recursive: true });
|
||||
fs.writeFileSync(trackedFile, 'export const x = 1;');
|
||||
recordSkillApplication('test-skill', '1.0.0', {
|
||||
'src/app.ts': computeFileHash(trackedFile),
|
||||
});
|
||||
|
||||
startCustomize('first');
|
||||
expect(() => startCustomize('second')).toThrow();
|
||||
});
|
||||
});
|
||||
93
skills-engine/__tests__/file-ops.test.ts
Normal file
93
skills-engine/__tests__/file-ops.test.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { executeFileOps } from '../file-ops.js';
|
||||
import { createTempDir, cleanup } from './test-helpers.js';
|
||||
|
||||
describe('file-ops', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
it('rename success', () => {
|
||||
fs.writeFileSync(path.join(tmpDir, 'old.ts'), 'content');
|
||||
const result = executeFileOps([
|
||||
{ type: 'rename', from: 'old.ts', to: 'new.ts' },
|
||||
], tmpDir);
|
||||
expect(result.success).toBe(true);
|
||||
expect(fs.existsSync(path.join(tmpDir, 'new.ts'))).toBe(true);
|
||||
expect(fs.existsSync(path.join(tmpDir, 'old.ts'))).toBe(false);
|
||||
});
|
||||
|
||||
it('move success', () => {
|
||||
fs.writeFileSync(path.join(tmpDir, 'file.ts'), 'content');
|
||||
const result = executeFileOps([
|
||||
{ type: 'move', from: 'file.ts', to: 'sub/file.ts' },
|
||||
], tmpDir);
|
||||
expect(result.success).toBe(true);
|
||||
expect(fs.existsSync(path.join(tmpDir, 'sub', 'file.ts'))).toBe(true);
|
||||
expect(fs.existsSync(path.join(tmpDir, 'file.ts'))).toBe(false);
|
||||
});
|
||||
|
||||
it('delete success', () => {
|
||||
fs.writeFileSync(path.join(tmpDir, 'remove-me.ts'), 'content');
|
||||
const result = executeFileOps([
|
||||
{ type: 'delete', path: 'remove-me.ts' },
|
||||
], tmpDir);
|
||||
expect(result.success).toBe(true);
|
||||
expect(fs.existsSync(path.join(tmpDir, 'remove-me.ts'))).toBe(false);
|
||||
});
|
||||
|
||||
it('rename target exists produces error', () => {
|
||||
fs.writeFileSync(path.join(tmpDir, 'a.ts'), 'a');
|
||||
fs.writeFileSync(path.join(tmpDir, 'b.ts'), 'b');
|
||||
const result = executeFileOps([
|
||||
{ type: 'rename', from: 'a.ts', to: 'b.ts' },
|
||||
], tmpDir);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.errors.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('delete missing file produces warning not error', () => {
|
||||
const result = executeFileOps([
|
||||
{ type: 'delete', path: 'nonexistent.ts' },
|
||||
], tmpDir);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.warnings.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('move creates destination directory', () => {
|
||||
fs.writeFileSync(path.join(tmpDir, 'src.ts'), 'content');
|
||||
const result = executeFileOps([
|
||||
{ type: 'move', from: 'src.ts', to: 'deep/nested/dir/src.ts' },
|
||||
], tmpDir);
|
||||
expect(result.success).toBe(true);
|
||||
expect(fs.existsSync(path.join(tmpDir, 'deep', 'nested', 'dir', 'src.ts'))).toBe(true);
|
||||
});
|
||||
|
||||
it('path escape produces error', () => {
|
||||
fs.writeFileSync(path.join(tmpDir, 'file.ts'), 'content');
|
||||
const result = executeFileOps([
|
||||
{ type: 'rename', from: 'file.ts', to: '../../escaped.ts' },
|
||||
], tmpDir);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.errors.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('source missing produces error for rename', () => {
|
||||
const result = executeFileOps([
|
||||
{ type: 'rename', from: 'missing.ts', to: 'new.ts' },
|
||||
], tmpDir);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.errors.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
60
skills-engine/__tests__/lock.test.ts
Normal file
60
skills-engine/__tests__/lock.test.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { acquireLock, releaseLock, isLocked } from '../lock.js';
|
||||
import { LOCK_FILE } from '../constants.js';
|
||||
import { createTempDir, cleanup } from './test-helpers.js';
|
||||
|
||||
describe('lock', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
fs.mkdirSync(path.join(tmpDir, '.nanoclaw'), { recursive: true });
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
it('acquireLock returns a release function', () => {
|
||||
const release = acquireLock();
|
||||
expect(typeof release).toBe('function');
|
||||
expect(fs.existsSync(path.join(tmpDir, LOCK_FILE))).toBe(true);
|
||||
release();
|
||||
});
|
||||
|
||||
it('releaseLock removes the lock file', () => {
|
||||
acquireLock();
|
||||
expect(fs.existsSync(path.join(tmpDir, LOCK_FILE))).toBe(true);
|
||||
releaseLock();
|
||||
expect(fs.existsSync(path.join(tmpDir, LOCK_FILE))).toBe(false);
|
||||
});
|
||||
|
||||
it('acquire after release succeeds', () => {
|
||||
const release1 = acquireLock();
|
||||
release1();
|
||||
const release2 = acquireLock();
|
||||
expect(typeof release2).toBe('function');
|
||||
release2();
|
||||
});
|
||||
|
||||
it('isLocked returns true when locked', () => {
|
||||
const release = acquireLock();
|
||||
expect(isLocked()).toBe(true);
|
||||
release();
|
||||
});
|
||||
|
||||
it('isLocked returns false when released', () => {
|
||||
const release = acquireLock();
|
||||
release();
|
||||
expect(isLocked()).toBe(false);
|
||||
});
|
||||
|
||||
it('isLocked returns false when no lock exists', () => {
|
||||
expect(isLocked()).toBe(false);
|
||||
});
|
||||
});
|
||||
298
skills-engine/__tests__/manifest.test.ts
Normal file
298
skills-engine/__tests__/manifest.test.ts
Normal file
@@ -0,0 +1,298 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { stringify } from 'yaml';
|
||||
import {
|
||||
readManifest,
|
||||
checkCoreVersion,
|
||||
checkDependencies,
|
||||
checkConflicts,
|
||||
checkSystemVersion,
|
||||
} from '../manifest.js';
|
||||
import {
|
||||
createTempDir,
|
||||
setupNanoclawDir,
|
||||
createMinimalState,
|
||||
createSkillPackage,
|
||||
cleanup,
|
||||
writeState,
|
||||
} from './test-helpers.js';
|
||||
import { recordSkillApplication } from '../state.js';
|
||||
|
||||
describe('manifest', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
setupNanoclawDir(tmpDir);
|
||||
createMinimalState(tmpDir);
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
it('parses a valid manifest', () => {
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'telegram',
|
||||
version: '2.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: ['src/telegram.ts'],
|
||||
modifies: ['src/config.ts'],
|
||||
});
|
||||
const manifest = readManifest(skillDir);
|
||||
expect(manifest.skill).toBe('telegram');
|
||||
expect(manifest.version).toBe('2.0.0');
|
||||
expect(manifest.adds).toEqual(['src/telegram.ts']);
|
||||
expect(manifest.modifies).toEqual(['src/config.ts']);
|
||||
});
|
||||
|
||||
it('throws on missing skill field', () => {
|
||||
const dir = path.join(tmpDir, 'bad-pkg');
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
|
||||
version: '1.0.0', core_version: '1.0.0', adds: [], modifies: [],
|
||||
}));
|
||||
expect(() => readManifest(dir)).toThrow();
|
||||
});
|
||||
|
||||
it('throws on missing version field', () => {
|
||||
const dir = path.join(tmpDir, 'bad-pkg');
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
|
||||
skill: 'test', core_version: '1.0.0', adds: [], modifies: [],
|
||||
}));
|
||||
expect(() => readManifest(dir)).toThrow();
|
||||
});
|
||||
|
||||
it('throws on missing core_version field', () => {
|
||||
const dir = path.join(tmpDir, 'bad-pkg');
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
|
||||
skill: 'test', version: '1.0.0', adds: [], modifies: [],
|
||||
}));
|
||||
expect(() => readManifest(dir)).toThrow();
|
||||
});
|
||||
|
||||
it('throws on missing adds field', () => {
|
||||
const dir = path.join(tmpDir, 'bad-pkg');
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
|
||||
skill: 'test', version: '1.0.0', core_version: '1.0.0', modifies: [],
|
||||
}));
|
||||
expect(() => readManifest(dir)).toThrow();
|
||||
});
|
||||
|
||||
it('throws on missing modifies field', () => {
|
||||
const dir = path.join(tmpDir, 'bad-pkg');
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
|
||||
skill: 'test', version: '1.0.0', core_version: '1.0.0', adds: [],
|
||||
}));
|
||||
expect(() => readManifest(dir)).toThrow();
|
||||
});
|
||||
|
||||
it('throws on path traversal in adds', () => {
|
||||
const dir = path.join(tmpDir, 'bad-pkg');
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
|
||||
skill: 'test', version: '1.0.0', core_version: '1.0.0',
|
||||
adds: ['../etc/passwd'], modifies: [],
|
||||
}));
|
||||
expect(() => readManifest(dir)).toThrow('Invalid path');
|
||||
});
|
||||
|
||||
it('throws on path traversal in modifies', () => {
|
||||
const dir = path.join(tmpDir, 'bad-pkg');
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
|
||||
skill: 'test', version: '1.0.0', core_version: '1.0.0',
|
||||
adds: [], modifies: ['../../secret.ts'],
|
||||
}));
|
||||
expect(() => readManifest(dir)).toThrow('Invalid path');
|
||||
});
|
||||
|
||||
it('throws on absolute path in adds', () => {
|
||||
const dir = path.join(tmpDir, 'bad-pkg');
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
|
||||
skill: 'test', version: '1.0.0', core_version: '1.0.0',
|
||||
adds: ['/etc/passwd'], modifies: [],
|
||||
}));
|
||||
expect(() => readManifest(dir)).toThrow('Invalid path');
|
||||
});
|
||||
|
||||
it('defaults conflicts and depends to empty arrays', () => {
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'test',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
});
|
||||
const manifest = readManifest(skillDir);
|
||||
expect(manifest.conflicts).toEqual([]);
|
||||
expect(manifest.depends).toEqual([]);
|
||||
});
|
||||
|
||||
it('checkCoreVersion returns warning when manifest targets newer core', () => {
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'test',
|
||||
version: '1.0.0',
|
||||
core_version: '2.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
});
|
||||
const manifest = readManifest(skillDir);
|
||||
const result = checkCoreVersion(manifest);
|
||||
expect(result.warning).toBeTruthy();
|
||||
});
|
||||
|
||||
it('checkCoreVersion returns no warning when versions match', () => {
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'test',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
});
|
||||
const manifest = readManifest(skillDir);
|
||||
const result = checkCoreVersion(manifest);
|
||||
expect(result.ok).toBe(true);
|
||||
expect(result.warning).toBeFalsy();
|
||||
});
|
||||
|
||||
it('checkDependencies satisfied when deps present', () => {
|
||||
recordSkillApplication('dep-skill', '1.0.0', {});
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'test',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
depends: ['dep-skill'],
|
||||
});
|
||||
const manifest = readManifest(skillDir);
|
||||
const result = checkDependencies(manifest);
|
||||
expect(result.ok).toBe(true);
|
||||
expect(result.missing).toEqual([]);
|
||||
});
|
||||
|
||||
it('checkDependencies missing when deps not present', () => {
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'test',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
depends: ['missing-skill'],
|
||||
});
|
||||
const manifest = readManifest(skillDir);
|
||||
const result = checkDependencies(manifest);
|
||||
expect(result.ok).toBe(false);
|
||||
expect(result.missing).toContain('missing-skill');
|
||||
});
|
||||
|
||||
it('checkConflicts ok when no conflicts', () => {
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'test',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
conflicts: [],
|
||||
});
|
||||
const manifest = readManifest(skillDir);
|
||||
const result = checkConflicts(manifest);
|
||||
expect(result.ok).toBe(true);
|
||||
expect(result.conflicting).toEqual([]);
|
||||
});
|
||||
|
||||
it('checkConflicts detects conflicting skill', () => {
|
||||
recordSkillApplication('bad-skill', '1.0.0', {});
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'test',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
conflicts: ['bad-skill'],
|
||||
});
|
||||
const manifest = readManifest(skillDir);
|
||||
const result = checkConflicts(manifest);
|
||||
expect(result.ok).toBe(false);
|
||||
expect(result.conflicting).toContain('bad-skill');
|
||||
});
|
||||
|
||||
it('parses new optional fields (author, license, etc)', () => {
|
||||
const dir = path.join(tmpDir, 'full-pkg');
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
|
||||
skill: 'test',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
author: 'tester',
|
||||
license: 'MIT',
|
||||
min_skills_system_version: '0.1.0',
|
||||
tested_with: ['telegram', 'discord'],
|
||||
post_apply: ['echo done'],
|
||||
}));
|
||||
const manifest = readManifest(dir);
|
||||
expect(manifest.author).toBe('tester');
|
||||
expect(manifest.license).toBe('MIT');
|
||||
expect(manifest.min_skills_system_version).toBe('0.1.0');
|
||||
expect(manifest.tested_with).toEqual(['telegram', 'discord']);
|
||||
expect(manifest.post_apply).toEqual(['echo done']);
|
||||
});
|
||||
|
||||
it('checkSystemVersion passes when not set', () => {
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'test',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
});
|
||||
const manifest = readManifest(skillDir);
|
||||
const result = checkSystemVersion(manifest);
|
||||
expect(result.ok).toBe(true);
|
||||
});
|
||||
|
||||
it('checkSystemVersion passes when engine is new enough', () => {
|
||||
const dir = path.join(tmpDir, 'sys-ok');
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
|
||||
skill: 'test',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
min_skills_system_version: '0.1.0',
|
||||
}));
|
||||
const manifest = readManifest(dir);
|
||||
const result = checkSystemVersion(manifest);
|
||||
expect(result.ok).toBe(true);
|
||||
});
|
||||
|
||||
it('checkSystemVersion fails when engine is too old', () => {
|
||||
const dir = path.join(tmpDir, 'sys-fail');
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(path.join(dir, 'manifest.yaml'), stringify({
|
||||
skill: 'test',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
min_skills_system_version: '99.0.0',
|
||||
}));
|
||||
const manifest = readManifest(dir);
|
||||
const result = checkSystemVersion(manifest);
|
||||
expect(result.ok).toBe(false);
|
||||
expect(result.error).toContain('99.0.0');
|
||||
});
|
||||
});
|
||||
97
skills-engine/__tests__/merge.test.ts
Normal file
97
skills-engine/__tests__/merge.test.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { execSync } from 'child_process';
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { isGitRepo, mergeFile, setupRerereAdapter } from '../merge.js';
|
||||
import { createTempDir, initGitRepo, cleanup } from './test-helpers.js';
|
||||
|
||||
describe('merge', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
it('isGitRepo returns true in a git repo', () => {
|
||||
initGitRepo(tmpDir);
|
||||
expect(isGitRepo()).toBe(true);
|
||||
});
|
||||
|
||||
it('isGitRepo returns false outside a git repo', () => {
|
||||
expect(isGitRepo()).toBe(false);
|
||||
});
|
||||
|
||||
describe('mergeFile', () => {
|
||||
beforeEach(() => {
|
||||
initGitRepo(tmpDir);
|
||||
});
|
||||
|
||||
it('clean merge with no overlapping changes', () => {
|
||||
const base = path.join(tmpDir, 'base.txt');
|
||||
const current = path.join(tmpDir, 'current.txt');
|
||||
const skill = path.join(tmpDir, 'skill.txt');
|
||||
|
||||
fs.writeFileSync(base, 'line1\nline2\nline3\n');
|
||||
fs.writeFileSync(current, 'line1-modified\nline2\nline3\n');
|
||||
fs.writeFileSync(skill, 'line1\nline2\nline3-modified\n');
|
||||
|
||||
const result = mergeFile(current, base, skill);
|
||||
expect(result.clean).toBe(true);
|
||||
expect(result.exitCode).toBe(0);
|
||||
|
||||
const merged = fs.readFileSync(current, 'utf-8');
|
||||
expect(merged).toContain('line1-modified');
|
||||
expect(merged).toContain('line3-modified');
|
||||
});
|
||||
|
||||
it('setupRerereAdapter cleans stale MERGE_HEAD before proceeding', () => {
|
||||
// Simulate a stale MERGE_HEAD from a previous crash
|
||||
const gitDir = execSync('git rev-parse --git-dir', {
|
||||
cwd: tmpDir,
|
||||
encoding: 'utf-8',
|
||||
}).trim();
|
||||
const headHash = execSync('git rev-parse HEAD', {
|
||||
cwd: tmpDir,
|
||||
encoding: 'utf-8',
|
||||
}).trim();
|
||||
fs.writeFileSync(path.join(gitDir, 'MERGE_HEAD'), headHash + '\n');
|
||||
fs.writeFileSync(path.join(gitDir, 'MERGE_MSG'), 'stale merge\n');
|
||||
|
||||
// Write a file for the adapter to work with
|
||||
fs.writeFileSync(path.join(tmpDir, 'test.txt'), 'conflicted content');
|
||||
|
||||
// setupRerereAdapter should not throw despite stale MERGE_HEAD
|
||||
expect(() =>
|
||||
setupRerereAdapter('test.txt', 'base', 'ours', 'theirs'),
|
||||
).not.toThrow();
|
||||
|
||||
// MERGE_HEAD should still exist (newly written by setupRerereAdapter)
|
||||
expect(fs.existsSync(path.join(gitDir, 'MERGE_HEAD'))).toBe(true);
|
||||
});
|
||||
|
||||
it('conflict with overlapping changes', () => {
|
||||
const base = path.join(tmpDir, 'base.txt');
|
||||
const current = path.join(tmpDir, 'current.txt');
|
||||
const skill = path.join(tmpDir, 'skill.txt');
|
||||
|
||||
fs.writeFileSync(base, 'line1\nline2\nline3\n');
|
||||
fs.writeFileSync(current, 'line1-ours\nline2\nline3\n');
|
||||
fs.writeFileSync(skill, 'line1-theirs\nline2\nline3\n');
|
||||
|
||||
const result = mergeFile(current, base, skill);
|
||||
expect(result.clean).toBe(false);
|
||||
expect(result.exitCode).toBeGreaterThan(0);
|
||||
|
||||
const merged = fs.readFileSync(current, 'utf-8');
|
||||
expect(merged).toContain('<<<<<<<');
|
||||
expect(merged).toContain('>>>>>>>');
|
||||
});
|
||||
});
|
||||
});
|
||||
77
skills-engine/__tests__/path-remap.test.ts
Normal file
77
skills-engine/__tests__/path-remap.test.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
import { loadPathRemap, recordPathRemap, resolvePathRemap } from '../path-remap.js';
|
||||
import {
|
||||
cleanup,
|
||||
createMinimalState,
|
||||
createTempDir,
|
||||
setupNanoclawDir,
|
||||
} from './test-helpers.js';
|
||||
|
||||
describe('path-remap', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
setupNanoclawDir(tmpDir);
|
||||
createMinimalState(tmpDir);
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
describe('resolvePathRemap', () => {
|
||||
it('returns remapped path when entry exists', () => {
|
||||
const remap = { 'src/old.ts': 'src/new.ts' };
|
||||
expect(resolvePathRemap('src/old.ts', remap)).toBe('src/new.ts');
|
||||
});
|
||||
|
||||
it('returns original path when no remap entry', () => {
|
||||
const remap = { 'src/old.ts': 'src/new.ts' };
|
||||
expect(resolvePathRemap('src/other.ts', remap)).toBe('src/other.ts');
|
||||
});
|
||||
|
||||
it('returns original path when remap is empty', () => {
|
||||
expect(resolvePathRemap('src/file.ts', {})).toBe('src/file.ts');
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadPathRemap', () => {
|
||||
it('returns empty object when no remap in state', () => {
|
||||
const remap = loadPathRemap();
|
||||
expect(remap).toEqual({});
|
||||
});
|
||||
|
||||
it('returns remap from state', () => {
|
||||
recordPathRemap({ 'src/a.ts': 'src/b.ts' });
|
||||
const remap = loadPathRemap();
|
||||
expect(remap).toEqual({ 'src/a.ts': 'src/b.ts' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('recordPathRemap', () => {
|
||||
it('records new remap entries', () => {
|
||||
recordPathRemap({ 'src/old.ts': 'src/new.ts' });
|
||||
expect(loadPathRemap()).toEqual({ 'src/old.ts': 'src/new.ts' });
|
||||
});
|
||||
|
||||
it('merges with existing remap', () => {
|
||||
recordPathRemap({ 'src/a.ts': 'src/b.ts' });
|
||||
recordPathRemap({ 'src/c.ts': 'src/d.ts' });
|
||||
expect(loadPathRemap()).toEqual({
|
||||
'src/a.ts': 'src/b.ts',
|
||||
'src/c.ts': 'src/d.ts',
|
||||
});
|
||||
});
|
||||
|
||||
it('overwrites existing key on conflict', () => {
|
||||
recordPathRemap({ 'src/a.ts': 'src/b.ts' });
|
||||
recordPathRemap({ 'src/a.ts': 'src/c.ts' });
|
||||
expect(loadPathRemap()).toEqual({ 'src/a.ts': 'src/c.ts' });
|
||||
});
|
||||
});
|
||||
});
|
||||
434
skills-engine/__tests__/rebase.test.ts
Normal file
434
skills-engine/__tests__/rebase.test.ts
Normal file
@@ -0,0 +1,434 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
import { parse } from 'yaml';
|
||||
|
||||
import { rebase } from '../rebase.js';
|
||||
import {
|
||||
cleanup,
|
||||
createMinimalState,
|
||||
createTempDir,
|
||||
initGitRepo,
|
||||
setupNanoclawDir,
|
||||
writeState,
|
||||
} from './test-helpers.js';
|
||||
|
||||
describe('rebase', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
setupNanoclawDir(tmpDir);
|
||||
createMinimalState(tmpDir);
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
it('rebase with one skill: patch created, state updated, rebased_at set', async () => {
|
||||
// Set up base file
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
|
||||
fs.mkdirSync(baseDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'index.ts'), 'const x = 1;\n');
|
||||
|
||||
// Set up working tree with skill modification
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'index.ts'),
|
||||
'const x = 1;\nconst y = 2; // added by skill\n',
|
||||
);
|
||||
|
||||
// Write state with applied skill
|
||||
writeState(tmpDir, {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [
|
||||
{
|
||||
name: 'test-skill',
|
||||
version: '1.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: {
|
||||
'src/index.ts': 'abc123',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
initGitRepo(tmpDir);
|
||||
|
||||
const result = await rebase();
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.filesInPatch).toBeGreaterThan(0);
|
||||
expect(result.rebased_at).toBeDefined();
|
||||
expect(result.patchFile).toBeDefined();
|
||||
|
||||
// Verify patch file exists
|
||||
const patchPath = path.join(tmpDir, '.nanoclaw', 'combined.patch');
|
||||
expect(fs.existsSync(patchPath)).toBe(true);
|
||||
|
||||
const patchContent = fs.readFileSync(patchPath, 'utf-8');
|
||||
expect(patchContent).toContain('added by skill');
|
||||
|
||||
// Verify state was updated
|
||||
const stateContent = fs.readFileSync(
|
||||
path.join(tmpDir, '.nanoclaw', 'state.yaml'),
|
||||
'utf-8',
|
||||
);
|
||||
const state = parse(stateContent);
|
||||
expect(state.rebased_at).toBeDefined();
|
||||
expect(state.applied_skills).toHaveLength(1);
|
||||
expect(state.applied_skills[0].name).toBe('test-skill');
|
||||
|
||||
// File hashes should be updated to actual current values
|
||||
const currentHash = state.applied_skills[0].file_hashes['src/index.ts'];
|
||||
expect(currentHash).toBeDefined();
|
||||
expect(currentHash).not.toBe('abc123'); // Should be recomputed
|
||||
|
||||
// Working tree file should still have the skill's changes
|
||||
const workingContent = fs.readFileSync(
|
||||
path.join(tmpDir, 'src', 'index.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(workingContent).toContain('added by skill');
|
||||
});
|
||||
|
||||
it('rebase flattens: base updated to match working tree', async () => {
|
||||
// Set up base file (clean core)
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
|
||||
fs.mkdirSync(baseDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'index.ts'), 'const x = 1;\n');
|
||||
|
||||
// Working tree has skill modification
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'index.ts'),
|
||||
'const x = 1;\nconst y = 2; // skill\n',
|
||||
);
|
||||
|
||||
writeState(tmpDir, {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [
|
||||
{
|
||||
name: 'my-skill',
|
||||
version: '1.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: {
|
||||
'src/index.ts': 'oldhash',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
initGitRepo(tmpDir);
|
||||
|
||||
const result = await rebase();
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Base should now include the skill's changes (flattened)
|
||||
const baseContent = fs.readFileSync(
|
||||
path.join(tmpDir, '.nanoclaw', 'base', 'src', 'index.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(baseContent).toContain('skill');
|
||||
expect(baseContent).toBe('const x = 1;\nconst y = 2; // skill\n');
|
||||
});
|
||||
|
||||
it('rebase with multiple skills + custom mods: all collapsed into single patch', async () => {
|
||||
// Set up base files
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
|
||||
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'src', 'index.ts'), 'const x = 1;\n');
|
||||
fs.writeFileSync(
|
||||
path.join(baseDir, 'src', 'config.ts'),
|
||||
'export const port = 3000;\n',
|
||||
);
|
||||
|
||||
// Set up working tree with modifications from multiple skills
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'index.ts'),
|
||||
'const x = 1;\nconst y = 2; // skill-a\n',
|
||||
);
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'config.ts'),
|
||||
'export const port = 3000;\nexport const host = "0.0.0.0"; // skill-b\n',
|
||||
);
|
||||
// File added by skill
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'plugin.ts'),
|
||||
'export const plugin = true;\n',
|
||||
);
|
||||
|
||||
// Write state with multiple skills and custom modifications
|
||||
writeState(tmpDir, {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [
|
||||
{
|
||||
name: 'skill-a',
|
||||
version: '1.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: {
|
||||
'src/index.ts': 'hash-a1',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'skill-b',
|
||||
version: '2.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: {
|
||||
'src/config.ts': 'hash-b1',
|
||||
'src/plugin.ts': 'hash-b2',
|
||||
},
|
||||
},
|
||||
],
|
||||
custom_modifications: [
|
||||
{
|
||||
description: 'tweaked config',
|
||||
applied_at: new Date().toISOString(),
|
||||
files_modified: ['src/config.ts'],
|
||||
patch_file: '.nanoclaw/custom/001-tweaked-config.patch',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
initGitRepo(tmpDir);
|
||||
|
||||
const result = await rebase();
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.filesInPatch).toBeGreaterThanOrEqual(2);
|
||||
|
||||
// Verify combined patch includes changes from both skills
|
||||
const patchContent = fs.readFileSync(
|
||||
path.join(tmpDir, '.nanoclaw', 'combined.patch'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(patchContent).toContain('skill-a');
|
||||
expect(patchContent).toContain('skill-b');
|
||||
|
||||
// Verify state: custom_modifications should be cleared
|
||||
const stateContent = fs.readFileSync(
|
||||
path.join(tmpDir, '.nanoclaw', 'state.yaml'),
|
||||
'utf-8',
|
||||
);
|
||||
const state = parse(stateContent);
|
||||
expect(state.custom_modifications).toBeUndefined();
|
||||
expect(state.rebased_at).toBeDefined();
|
||||
|
||||
// applied_skills should still be present (informational)
|
||||
expect(state.applied_skills).toHaveLength(2);
|
||||
|
||||
// Base should be flattened — include all skill changes
|
||||
const baseIndex = fs.readFileSync(
|
||||
path.join(tmpDir, '.nanoclaw', 'base', 'src', 'index.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(baseIndex).toContain('skill-a');
|
||||
|
||||
const baseConfig = fs.readFileSync(
|
||||
path.join(tmpDir, '.nanoclaw', 'base', 'src', 'config.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(baseConfig).toContain('skill-b');
|
||||
});
|
||||
|
||||
it('rebase clears resolution cache', async () => {
|
||||
// Set up base + working tree
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
|
||||
fs.mkdirSync(baseDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'index.ts'), 'const x = 1;\n');
|
||||
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'index.ts'),
|
||||
'const x = 1;\n// skill\n',
|
||||
);
|
||||
|
||||
// Create a fake resolution cache entry
|
||||
const resDir = path.join(tmpDir, '.nanoclaw', 'resolutions', 'skill-a+skill-b');
|
||||
fs.mkdirSync(resDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(resDir, 'meta.yaml'), 'skills: [skill-a, skill-b]\n');
|
||||
|
||||
writeState(tmpDir, {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [
|
||||
{
|
||||
name: 'my-skill',
|
||||
version: '1.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: { 'src/index.ts': 'hash' },
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
initGitRepo(tmpDir);
|
||||
|
||||
const result = await rebase();
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Resolution cache should be cleared
|
||||
const resolutions = fs.readdirSync(
|
||||
path.join(tmpDir, '.nanoclaw', 'resolutions'),
|
||||
);
|
||||
expect(resolutions).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('rebase with new base: base updated, changes merged', async () => {
|
||||
// Set up current base (multi-line so changes don't conflict)
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
|
||||
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(baseDir, 'src', 'index.ts'),
|
||||
'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\n',
|
||||
);
|
||||
|
||||
// Working tree: skill adds at bottom
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'index.ts'),
|
||||
'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nskill change\n',
|
||||
);
|
||||
|
||||
writeState(tmpDir, {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [
|
||||
{
|
||||
name: 'my-skill',
|
||||
version: '1.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: {
|
||||
'src/index.ts': 'oldhash',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
initGitRepo(tmpDir);
|
||||
|
||||
// New base: core update at top
|
||||
const newBase = path.join(tmpDir, 'new-core');
|
||||
fs.mkdirSync(path.join(newBase, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(newBase, 'src', 'index.ts'),
|
||||
'core v2 header\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\n',
|
||||
);
|
||||
|
||||
const result = await rebase(newBase);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.patchFile).toBeDefined();
|
||||
|
||||
// Verify base was updated to new core
|
||||
const baseContent = fs.readFileSync(
|
||||
path.join(tmpDir, '.nanoclaw', 'base', 'src', 'index.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(baseContent).toContain('core v2 header');
|
||||
|
||||
// Working tree should have both core v2 and skill changes merged
|
||||
const workingContent = fs.readFileSync(
|
||||
path.join(tmpDir, 'src', 'index.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(workingContent).toContain('core v2 header');
|
||||
expect(workingContent).toContain('skill change');
|
||||
|
||||
// State should reflect rebase
|
||||
const stateContent = fs.readFileSync(
|
||||
path.join(tmpDir, '.nanoclaw', 'state.yaml'),
|
||||
'utf-8',
|
||||
);
|
||||
const state = parse(stateContent);
|
||||
expect(state.rebased_at).toBeDefined();
|
||||
});
|
||||
|
||||
it('rebase with new base: conflict returns backupPending', async () => {
|
||||
// Set up current base — short file so changes overlap
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
|
||||
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(baseDir, 'src', 'index.ts'),
|
||||
'const x = 1;\n',
|
||||
);
|
||||
|
||||
// Working tree: skill replaces the same line
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'index.ts'),
|
||||
'const x = 42; // skill override\n',
|
||||
);
|
||||
|
||||
writeState(tmpDir, {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [
|
||||
{
|
||||
name: 'my-skill',
|
||||
version: '1.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: {
|
||||
'src/index.ts': 'oldhash',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
initGitRepo(tmpDir);
|
||||
|
||||
// New base: also changes the same line — guaranteed conflict
|
||||
const newBase = path.join(tmpDir, 'new-core');
|
||||
fs.mkdirSync(path.join(newBase, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(newBase, 'src', 'index.ts'),
|
||||
'const x = 999; // core v2\n',
|
||||
);
|
||||
|
||||
const result = await rebase(newBase);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.mergeConflicts).toContain('src/index.ts');
|
||||
expect(result.backupPending).toBe(true);
|
||||
expect(result.error).toContain('Merge conflicts');
|
||||
|
||||
// combined.patch should still exist
|
||||
expect(result.patchFile).toBeDefined();
|
||||
const patchPath = path.join(tmpDir, '.nanoclaw', 'combined.patch');
|
||||
expect(fs.existsSync(patchPath)).toBe(true);
|
||||
|
||||
// Working tree should have conflict markers (not rolled back)
|
||||
const workingContent = fs.readFileSync(
|
||||
path.join(tmpDir, 'src', 'index.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(workingContent).toContain('<<<<<<<');
|
||||
expect(workingContent).toContain('>>>>>>>');
|
||||
|
||||
// State should NOT be updated yet (conflicts pending)
|
||||
const stateContent = fs.readFileSync(
|
||||
path.join(tmpDir, '.nanoclaw', 'state.yaml'),
|
||||
'utf-8',
|
||||
);
|
||||
const state = parse(stateContent);
|
||||
expect(state.rebased_at).toBeUndefined();
|
||||
});
|
||||
|
||||
it('error when no skills applied', async () => {
|
||||
// State has no applied skills (created by createMinimalState)
|
||||
initGitRepo(tmpDir);
|
||||
|
||||
const result = await rebase();
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('No skills applied');
|
||||
expect(result.filesInPatch).toBe(0);
|
||||
});
|
||||
});
|
||||
297
skills-engine/__tests__/replay.test.ts
Normal file
297
skills-engine/__tests__/replay.test.ts
Normal file
@@ -0,0 +1,297 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
import { findSkillDir, replaySkills } from '../replay.js';
|
||||
import {
|
||||
cleanup,
|
||||
createMinimalState,
|
||||
createSkillPackage,
|
||||
createTempDir,
|
||||
initGitRepo,
|
||||
setupNanoclawDir,
|
||||
} from './test-helpers.js';
|
||||
|
||||
describe('replay', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
setupNanoclawDir(tmpDir);
|
||||
createMinimalState(tmpDir);
|
||||
initGitRepo(tmpDir);
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
describe('findSkillDir', () => {
|
||||
it('finds skill directory by name', () => {
|
||||
const skillsRoot = path.join(tmpDir, '.claude', 'skills', 'telegram');
|
||||
fs.mkdirSync(skillsRoot, { recursive: true });
|
||||
const { stringify } = require('yaml');
|
||||
fs.writeFileSync(
|
||||
path.join(skillsRoot, 'manifest.yaml'),
|
||||
stringify({
|
||||
skill: 'telegram',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: [],
|
||||
}),
|
||||
);
|
||||
|
||||
const result = findSkillDir('telegram', tmpDir);
|
||||
expect(result).toBe(skillsRoot);
|
||||
});
|
||||
|
||||
it('returns null for missing skill', () => {
|
||||
const result = findSkillDir('nonexistent', tmpDir);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('returns null when .claude/skills does not exist', () => {
|
||||
const result = findSkillDir('anything', tmpDir);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('replaySkills', () => {
|
||||
it('replays a single skill from base', async () => {
|
||||
// Set up base file
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
|
||||
fs.mkdirSync(baseDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'config.ts'), 'base content\n');
|
||||
|
||||
// Set up current file (will be overwritten by replay)
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'config.ts'),
|
||||
'modified content\n',
|
||||
);
|
||||
|
||||
// Create skill package
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'telegram',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: ['src/telegram.ts'],
|
||||
modifies: ['src/config.ts'],
|
||||
addFiles: { 'src/telegram.ts': 'telegram code\n' },
|
||||
modifyFiles: { 'src/config.ts': 'base content\ntelegram config\n' },
|
||||
});
|
||||
|
||||
const result = await replaySkills({
|
||||
skills: ['telegram'],
|
||||
skillDirs: { telegram: skillDir },
|
||||
projectRoot: tmpDir,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.perSkill.telegram.success).toBe(true);
|
||||
|
||||
// Added file should exist
|
||||
expect(fs.existsSync(path.join(tmpDir, 'src', 'telegram.ts'))).toBe(
|
||||
true,
|
||||
);
|
||||
expect(
|
||||
fs.readFileSync(path.join(tmpDir, 'src', 'telegram.ts'), 'utf-8'),
|
||||
).toBe('telegram code\n');
|
||||
|
||||
// Modified file should be merged from base
|
||||
const config = fs.readFileSync(
|
||||
path.join(tmpDir, 'src', 'config.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(config).toContain('telegram config');
|
||||
});
|
||||
|
||||
it('replays two skills in order', async () => {
|
||||
// Set up base
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
|
||||
fs.mkdirSync(baseDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(baseDir, 'config.ts'),
|
||||
'line1\nline2\nline3\nline4\nline5\n',
|
||||
);
|
||||
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'config.ts'),
|
||||
'line1\nline2\nline3\nline4\nline5\n',
|
||||
);
|
||||
|
||||
// Skill 1 adds at top
|
||||
const skill1Dir = createSkillPackage(tmpDir, {
|
||||
skill: 'telegram',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: ['src/telegram.ts'],
|
||||
modifies: ['src/config.ts'],
|
||||
addFiles: { 'src/telegram.ts': 'tg code' },
|
||||
modifyFiles: {
|
||||
'src/config.ts': 'telegram import\nline1\nline2\nline3\nline4\nline5\n',
|
||||
},
|
||||
dirName: 'skill-pkg-tg',
|
||||
});
|
||||
|
||||
// Skill 2 adds at bottom
|
||||
const skill2Dir = createSkillPackage(tmpDir, {
|
||||
skill: 'discord',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: ['src/discord.ts'],
|
||||
modifies: ['src/config.ts'],
|
||||
addFiles: { 'src/discord.ts': 'dc code' },
|
||||
modifyFiles: {
|
||||
'src/config.ts': 'line1\nline2\nline3\nline4\nline5\ndiscord import\n',
|
||||
},
|
||||
dirName: 'skill-pkg-dc',
|
||||
});
|
||||
|
||||
const result = await replaySkills({
|
||||
skills: ['telegram', 'discord'],
|
||||
skillDirs: { telegram: skill1Dir, discord: skill2Dir },
|
||||
projectRoot: tmpDir,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.perSkill.telegram.success).toBe(true);
|
||||
expect(result.perSkill.discord.success).toBe(true);
|
||||
|
||||
// Both added files should exist
|
||||
expect(fs.existsSync(path.join(tmpDir, 'src', 'telegram.ts'))).toBe(
|
||||
true,
|
||||
);
|
||||
expect(fs.existsSync(path.join(tmpDir, 'src', 'discord.ts'))).toBe(
|
||||
true,
|
||||
);
|
||||
|
||||
// Config should have both changes
|
||||
const config = fs.readFileSync(
|
||||
path.join(tmpDir, 'src', 'config.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(config).toContain('telegram import');
|
||||
expect(config).toContain('discord import');
|
||||
});
|
||||
|
||||
it('stops on first conflict and does not process later skills', async () => {
|
||||
// After reset, current=base. Skill 1 merges cleanly (changes line 1).
|
||||
// Skill 2 also changes line 1 differently → conflict with skill 1's result.
|
||||
// Skill 3 should NOT be processed due to break-on-conflict.
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
|
||||
fs.mkdirSync(baseDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'config.ts'), 'line1\n');
|
||||
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(tmpDir, 'src', 'config.ts'), 'line1\n');
|
||||
|
||||
// Skill 1: changes line 1 — merges cleanly since current=base after reset
|
||||
const skill1Dir = createSkillPackage(tmpDir, {
|
||||
skill: 'skill-a',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: ['src/config.ts'],
|
||||
modifyFiles: { 'src/config.ts': 'line1-from-skill-a\n' },
|
||||
dirName: 'skill-pkg-a',
|
||||
});
|
||||
|
||||
// Skill 2: also changes line 1 differently → conflict with skill-a's result
|
||||
const skill2Dir = createSkillPackage(tmpDir, {
|
||||
skill: 'skill-b',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: [],
|
||||
modifies: ['src/config.ts'],
|
||||
modifyFiles: { 'src/config.ts': 'line1-from-skill-b\n' },
|
||||
dirName: 'skill-pkg-b',
|
||||
});
|
||||
|
||||
// Skill 3: adds a new file — should be skipped
|
||||
const skill3Dir = createSkillPackage(tmpDir, {
|
||||
skill: 'skill-c',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: ['src/newfile.ts'],
|
||||
modifies: [],
|
||||
addFiles: { 'src/newfile.ts': 'should not appear' },
|
||||
dirName: 'skill-pkg-c',
|
||||
});
|
||||
|
||||
const result = await replaySkills({
|
||||
skills: ['skill-a', 'skill-b', 'skill-c'],
|
||||
skillDirs: { 'skill-a': skill1Dir, 'skill-b': skill2Dir, 'skill-c': skill3Dir },
|
||||
projectRoot: tmpDir,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.mergeConflicts).toBeDefined();
|
||||
expect(result.mergeConflicts!.length).toBeGreaterThan(0);
|
||||
// Skill B caused the conflict
|
||||
expect(result.perSkill['skill-b']?.success).toBe(false);
|
||||
// Skill C should NOT have been processed
|
||||
expect(result.perSkill['skill-c']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('returns error for missing skill dir', async () => {
|
||||
const result = await replaySkills({
|
||||
skills: ['missing'],
|
||||
skillDirs: {},
|
||||
projectRoot: tmpDir,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('missing');
|
||||
expect(result.perSkill.missing.success).toBe(false);
|
||||
});
|
||||
|
||||
it('resets files to base before replay', async () => {
|
||||
// Set up base
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
|
||||
fs.mkdirSync(baseDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'config.ts'), 'base content\n');
|
||||
|
||||
// Current has drift
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'config.ts'),
|
||||
'drifted content\n',
|
||||
);
|
||||
|
||||
// Also a stale added file
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'stale-add.ts'),
|
||||
'should be removed',
|
||||
);
|
||||
|
||||
const skillDir = createSkillPackage(tmpDir, {
|
||||
skill: 'skill1',
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: ['src/stale-add.ts'],
|
||||
modifies: ['src/config.ts'],
|
||||
addFiles: { 'src/stale-add.ts': 'fresh add' },
|
||||
modifyFiles: { 'src/config.ts': 'base content\nskill addition\n' },
|
||||
});
|
||||
|
||||
const result = await replaySkills({
|
||||
skills: ['skill1'],
|
||||
skillDirs: { skill1: skillDir },
|
||||
projectRoot: tmpDir,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// The added file should have the fresh content (not stale)
|
||||
expect(
|
||||
fs.readFileSync(path.join(tmpDir, 'src', 'stale-add.ts'), 'utf-8'),
|
||||
).toBe('fresh add');
|
||||
});
|
||||
});
|
||||
});
|
||||
283
skills-engine/__tests__/resolution-cache.test.ts
Normal file
283
skills-engine/__tests__/resolution-cache.test.ts
Normal file
@@ -0,0 +1,283 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { parse, stringify } from 'yaml';
|
||||
import {
|
||||
findResolutionDir,
|
||||
loadResolutions,
|
||||
saveResolution,
|
||||
} from '../resolution-cache.js';
|
||||
import { createTempDir, setupNanoclawDir, initGitRepo, cleanup } from './test-helpers.js';
|
||||
|
||||
function sha256(content: string): string {
|
||||
return crypto.createHash('sha256').update(content).digest('hex');
|
||||
}
|
||||
|
||||
const dummyHashes = { base: 'aaa', current: 'bbb', skill: 'ccc' };
|
||||
|
||||
describe('resolution-cache', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
setupNanoclawDir(tmpDir);
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
it('findResolutionDir returns null when not found', () => {
|
||||
const result = findResolutionDir(['skill-a', 'skill-b'], tmpDir);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('saveResolution creates directory structure with files and meta', () => {
|
||||
saveResolution(
|
||||
['skill-b', 'skill-a'],
|
||||
[{ relPath: 'src/config.ts', preimage: 'conflict content', resolution: 'resolved content', inputHashes: dummyHashes }],
|
||||
{ core_version: '1.0.0' },
|
||||
tmpDir,
|
||||
);
|
||||
|
||||
// Skills are sorted, so key is "skill-a+skill-b"
|
||||
const resDir = path.join(tmpDir, '.nanoclaw', 'resolutions', 'skill-a+skill-b');
|
||||
expect(fs.existsSync(resDir)).toBe(true);
|
||||
|
||||
// Check preimage and resolution files exist
|
||||
expect(fs.existsSync(path.join(resDir, 'src/config.ts.preimage'))).toBe(true);
|
||||
expect(fs.existsSync(path.join(resDir, 'src/config.ts.resolution'))).toBe(true);
|
||||
|
||||
// Check meta.yaml exists and has expected fields
|
||||
const metaPath = path.join(resDir, 'meta.yaml');
|
||||
expect(fs.existsSync(metaPath)).toBe(true);
|
||||
const meta = parse(fs.readFileSync(metaPath, 'utf-8'));
|
||||
expect(meta.core_version).toBe('1.0.0');
|
||||
expect(meta.skills).toEqual(['skill-a', 'skill-b']);
|
||||
});
|
||||
|
||||
it('saveResolution writes file_hashes to meta.yaml', () => {
|
||||
const hashes = {
|
||||
base: sha256('base content'),
|
||||
current: sha256('current content'),
|
||||
skill: sha256('skill content'),
|
||||
};
|
||||
|
||||
saveResolution(
|
||||
['alpha', 'beta'],
|
||||
[{ relPath: 'src/config.ts', preimage: 'pre', resolution: 'post', inputHashes: hashes }],
|
||||
{},
|
||||
tmpDir,
|
||||
);
|
||||
|
||||
const resDir = path.join(tmpDir, '.nanoclaw', 'resolutions', 'alpha+beta');
|
||||
const meta = parse(fs.readFileSync(path.join(resDir, 'meta.yaml'), 'utf-8'));
|
||||
expect(meta.file_hashes).toBeDefined();
|
||||
expect(meta.file_hashes['src/config.ts']).toEqual(hashes);
|
||||
});
|
||||
|
||||
it('findResolutionDir returns path after save', () => {
|
||||
saveResolution(
|
||||
['alpha', 'beta'],
|
||||
[{ relPath: 'file.ts', preimage: 'pre', resolution: 'post', inputHashes: dummyHashes }],
|
||||
{},
|
||||
tmpDir,
|
||||
);
|
||||
|
||||
const result = findResolutionDir(['alpha', 'beta'], tmpDir);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain('alpha+beta');
|
||||
});
|
||||
|
||||
it('findResolutionDir finds shipped resolutions in .claude/resolutions', () => {
|
||||
const shippedDir = path.join(tmpDir, '.claude', 'resolutions', 'alpha+beta');
|
||||
fs.mkdirSync(shippedDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(shippedDir, 'meta.yaml'), 'skills: [alpha, beta]\n');
|
||||
|
||||
const result = findResolutionDir(['alpha', 'beta'], tmpDir);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain('.claude/resolutions/alpha+beta');
|
||||
});
|
||||
|
||||
it('findResolutionDir prefers shipped over project-level', () => {
|
||||
// Create both shipped and project-level
|
||||
const shippedDir = path.join(tmpDir, '.claude', 'resolutions', 'a+b');
|
||||
fs.mkdirSync(shippedDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(shippedDir, 'meta.yaml'), 'skills: [a, b]\n');
|
||||
|
||||
saveResolution(
|
||||
['a', 'b'],
|
||||
[{ relPath: 'f.ts', preimage: 'x', resolution: 'project', inputHashes: dummyHashes }],
|
||||
{},
|
||||
tmpDir,
|
||||
);
|
||||
|
||||
const result = findResolutionDir(['a', 'b'], tmpDir);
|
||||
expect(result).toContain('.claude/resolutions/a+b');
|
||||
});
|
||||
|
||||
it('skills are sorted so order does not matter', () => {
|
||||
saveResolution(
|
||||
['zeta', 'alpha'],
|
||||
[{ relPath: 'f.ts', preimage: 'a', resolution: 'b', inputHashes: dummyHashes }],
|
||||
{},
|
||||
tmpDir,
|
||||
);
|
||||
|
||||
// Find with reversed order should still work
|
||||
const result = findResolutionDir(['alpha', 'zeta'], tmpDir);
|
||||
expect(result).not.toBeNull();
|
||||
|
||||
// Also works with original order
|
||||
const result2 = findResolutionDir(['zeta', 'alpha'], tmpDir);
|
||||
expect(result2).not.toBeNull();
|
||||
expect(result).toBe(result2);
|
||||
});
|
||||
|
||||
describe('loadResolutions hash verification', () => {
|
||||
const baseContent = 'base file content';
|
||||
const currentContent = 'current file content';
|
||||
const skillContent = 'skill file content';
|
||||
const preimageContent = 'preimage with conflict markers';
|
||||
const resolutionContent = 'resolved content';
|
||||
const rerereHash = 'abc123def456';
|
||||
|
||||
function setupResolutionDir(fileHashes: Record<string, any>) {
|
||||
// Create a shipped resolution directory
|
||||
const resDir = path.join(tmpDir, '.claude', 'resolutions', 'alpha+beta');
|
||||
fs.mkdirSync(path.join(resDir, 'src'), { recursive: true });
|
||||
|
||||
// Write preimage, resolution, and hash sidecar
|
||||
fs.writeFileSync(path.join(resDir, 'src/config.ts.preimage'), preimageContent);
|
||||
fs.writeFileSync(path.join(resDir, 'src/config.ts.resolution'), resolutionContent);
|
||||
fs.writeFileSync(path.join(resDir, 'src/config.ts.preimage.hash'), rerereHash);
|
||||
|
||||
// Write meta.yaml
|
||||
const meta: any = {
|
||||
skills: ['alpha', 'beta'],
|
||||
apply_order: ['alpha', 'beta'],
|
||||
core_version: '1.0.0',
|
||||
resolved_at: new Date().toISOString(),
|
||||
tested: true,
|
||||
test_passed: true,
|
||||
resolution_source: 'maintainer',
|
||||
input_hashes: {},
|
||||
output_hash: '',
|
||||
file_hashes: fileHashes,
|
||||
};
|
||||
fs.writeFileSync(path.join(resDir, 'meta.yaml'), stringify(meta));
|
||||
|
||||
return resDir;
|
||||
}
|
||||
|
||||
function setupInputFiles() {
|
||||
// Create base file
|
||||
fs.mkdirSync(path.join(tmpDir, '.nanoclaw', 'base', 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(tmpDir, '.nanoclaw', 'base', 'src', 'config.ts'), baseContent);
|
||||
|
||||
// Create current file
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(tmpDir, 'src', 'config.ts'), currentContent);
|
||||
}
|
||||
|
||||
function createSkillDir() {
|
||||
const skillDir = path.join(tmpDir, 'skill-pkg');
|
||||
fs.mkdirSync(path.join(skillDir, 'modify', 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(skillDir, 'modify', 'src', 'config.ts'), skillContent);
|
||||
return skillDir;
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
initGitRepo(tmpDir);
|
||||
});
|
||||
|
||||
it('loads with matching file_hashes', () => {
|
||||
setupInputFiles();
|
||||
const skillDir = createSkillDir();
|
||||
|
||||
setupResolutionDir({
|
||||
'src/config.ts': {
|
||||
base: sha256(baseContent),
|
||||
current: sha256(currentContent),
|
||||
skill: sha256(skillContent),
|
||||
},
|
||||
});
|
||||
|
||||
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
|
||||
expect(result).toBe(true);
|
||||
|
||||
// Verify rr-cache entry was created
|
||||
const gitDir = path.join(tmpDir, '.git');
|
||||
const cacheEntry = path.join(gitDir, 'rr-cache', rerereHash);
|
||||
expect(fs.existsSync(path.join(cacheEntry, 'preimage'))).toBe(true);
|
||||
expect(fs.existsSync(path.join(cacheEntry, 'postimage'))).toBe(true);
|
||||
});
|
||||
|
||||
it('skips pair with mismatched base hash', () => {
|
||||
setupInputFiles();
|
||||
const skillDir = createSkillDir();
|
||||
|
||||
setupResolutionDir({
|
||||
'src/config.ts': {
|
||||
base: 'wrong_hash',
|
||||
current: sha256(currentContent),
|
||||
skill: sha256(skillContent),
|
||||
},
|
||||
});
|
||||
|
||||
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
|
||||
expect(result).toBe(false);
|
||||
|
||||
// rr-cache entry should NOT be created
|
||||
const gitDir = path.join(tmpDir, '.git');
|
||||
expect(fs.existsSync(path.join(gitDir, 'rr-cache', rerereHash))).toBe(false);
|
||||
});
|
||||
|
||||
it('skips pair with mismatched current hash', () => {
|
||||
setupInputFiles();
|
||||
const skillDir = createSkillDir();
|
||||
|
||||
setupResolutionDir({
|
||||
'src/config.ts': {
|
||||
base: sha256(baseContent),
|
||||
current: 'wrong_hash',
|
||||
skill: sha256(skillContent),
|
||||
},
|
||||
});
|
||||
|
||||
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('skips pair with mismatched skill hash', () => {
|
||||
setupInputFiles();
|
||||
const skillDir = createSkillDir();
|
||||
|
||||
setupResolutionDir({
|
||||
'src/config.ts': {
|
||||
base: sha256(baseContent),
|
||||
current: sha256(currentContent),
|
||||
skill: 'wrong_hash',
|
||||
},
|
||||
});
|
||||
|
||||
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('skips pair with no file_hashes entry for that file', () => {
|
||||
setupInputFiles();
|
||||
const skillDir = createSkillDir();
|
||||
|
||||
// file_hashes exists but doesn't include src/config.ts
|
||||
setupResolutionDir({});
|
||||
|
||||
const result = loadResolutions(['alpha', 'beta'], tmpDir, skillDir);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
120
skills-engine/__tests__/state.test.ts
Normal file
120
skills-engine/__tests__/state.test.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import {
|
||||
readState,
|
||||
writeState,
|
||||
recordSkillApplication,
|
||||
computeFileHash,
|
||||
compareSemver,
|
||||
recordCustomModification,
|
||||
getCustomModifications,
|
||||
} from '../state.js';
|
||||
import {
|
||||
createTempDir,
|
||||
setupNanoclawDir,
|
||||
createMinimalState,
|
||||
writeState as writeStateHelper,
|
||||
cleanup,
|
||||
} from './test-helpers.js';
|
||||
|
||||
describe('state', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
setupNanoclawDir(tmpDir);
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
it('readState/writeState roundtrip', () => {
|
||||
const state = {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
};
|
||||
writeState(state);
|
||||
const result = readState();
|
||||
expect(result.skills_system_version).toBe('0.1.0');
|
||||
expect(result.core_version).toBe('1.0.0');
|
||||
expect(result.applied_skills).toEqual([]);
|
||||
});
|
||||
|
||||
it('readState throws when no state file exists', () => {
|
||||
expect(() => readState()).toThrow();
|
||||
});
|
||||
|
||||
it('readState throws when version is newer than current', () => {
|
||||
writeStateHelper(tmpDir, {
|
||||
skills_system_version: '99.0.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
expect(() => readState()).toThrow();
|
||||
});
|
||||
|
||||
it('recordSkillApplication adds a skill', () => {
|
||||
createMinimalState(tmpDir);
|
||||
recordSkillApplication('my-skill', '1.0.0', { 'src/foo.ts': 'abc123' });
|
||||
const state = readState();
|
||||
expect(state.applied_skills).toHaveLength(1);
|
||||
expect(state.applied_skills[0].name).toBe('my-skill');
|
||||
expect(state.applied_skills[0].version).toBe('1.0.0');
|
||||
expect(state.applied_skills[0].file_hashes).toEqual({ 'src/foo.ts': 'abc123' });
|
||||
});
|
||||
|
||||
it('re-applying same skill replaces it', () => {
|
||||
createMinimalState(tmpDir);
|
||||
recordSkillApplication('my-skill', '1.0.0', { 'a.ts': 'hash1' });
|
||||
recordSkillApplication('my-skill', '2.0.0', { 'a.ts': 'hash2' });
|
||||
const state = readState();
|
||||
expect(state.applied_skills).toHaveLength(1);
|
||||
expect(state.applied_skills[0].version).toBe('2.0.0');
|
||||
expect(state.applied_skills[0].file_hashes).toEqual({ 'a.ts': 'hash2' });
|
||||
});
|
||||
|
||||
it('computeFileHash produces consistent sha256', () => {
|
||||
const filePath = path.join(tmpDir, 'hashtest.txt');
|
||||
fs.writeFileSync(filePath, 'hello world');
|
||||
const hash1 = computeFileHash(filePath);
|
||||
const hash2 = computeFileHash(filePath);
|
||||
expect(hash1).toBe(hash2);
|
||||
expect(hash1).toMatch(/^[a-f0-9]{64}$/);
|
||||
});
|
||||
|
||||
describe('compareSemver', () => {
|
||||
it('1.0.0 < 1.1.0', () => {
|
||||
expect(compareSemver('1.0.0', '1.1.0')).toBeLessThan(0);
|
||||
});
|
||||
|
||||
it('0.9.0 < 0.10.0', () => {
|
||||
expect(compareSemver('0.9.0', '0.10.0')).toBeLessThan(0);
|
||||
});
|
||||
|
||||
it('1.0.0 = 1.0.0', () => {
|
||||
expect(compareSemver('1.0.0', '1.0.0')).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
it('recordCustomModification adds to array', () => {
|
||||
createMinimalState(tmpDir);
|
||||
recordCustomModification('tweak', ['src/a.ts'], 'custom/001-tweak.patch');
|
||||
const mods = getCustomModifications();
|
||||
expect(mods).toHaveLength(1);
|
||||
expect(mods[0].description).toBe('tweak');
|
||||
expect(mods[0].files_modified).toEqual(['src/a.ts']);
|
||||
expect(mods[0].patch_file).toBe('custom/001-tweak.patch');
|
||||
});
|
||||
|
||||
it('getCustomModifications returns empty when none recorded', () => {
|
||||
createMinimalState(tmpDir);
|
||||
const mods = getCustomModifications();
|
||||
expect(mods).toEqual([]);
|
||||
});
|
||||
});
|
||||
204
skills-engine/__tests__/structured.test.ts
Normal file
204
skills-engine/__tests__/structured.test.ts
Normal file
@@ -0,0 +1,204 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import {
|
||||
areRangesCompatible,
|
||||
mergeNpmDependencies,
|
||||
mergeEnvAdditions,
|
||||
mergeDockerComposeServices,
|
||||
} from '../structured.js';
|
||||
import { createTempDir, cleanup } from './test-helpers.js';
|
||||
|
||||
describe('structured', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
describe('areRangesCompatible', () => {
|
||||
it('identical versions are compatible', () => {
|
||||
const result = areRangesCompatible('^1.0.0', '^1.0.0');
|
||||
expect(result.compatible).toBe(true);
|
||||
});
|
||||
|
||||
it('compatible ^ ranges resolve to higher', () => {
|
||||
const result = areRangesCompatible('^1.0.0', '^1.1.0');
|
||||
expect(result.compatible).toBe(true);
|
||||
expect(result.resolved).toBe('^1.1.0');
|
||||
});
|
||||
|
||||
it('incompatible major ^ ranges', () => {
|
||||
const result = areRangesCompatible('^1.0.0', '^2.0.0');
|
||||
expect(result.compatible).toBe(false);
|
||||
});
|
||||
|
||||
it('compatible ~ ranges', () => {
|
||||
const result = areRangesCompatible('~1.0.0', '~1.0.3');
|
||||
expect(result.compatible).toBe(true);
|
||||
expect(result.resolved).toBe('~1.0.3');
|
||||
});
|
||||
|
||||
it('mismatched prefixes are incompatible', () => {
|
||||
const result = areRangesCompatible('^1.0.0', '~1.0.0');
|
||||
expect(result.compatible).toBe(false);
|
||||
});
|
||||
|
||||
it('handles double-digit version parts numerically', () => {
|
||||
// ^1.9.0 vs ^1.10.0 — 10 > 9 numerically, but "9" > "10" as strings
|
||||
const result = areRangesCompatible('^1.9.0', '^1.10.0');
|
||||
expect(result.compatible).toBe(true);
|
||||
expect(result.resolved).toBe('^1.10.0');
|
||||
});
|
||||
|
||||
it('handles double-digit patch versions', () => {
|
||||
const result = areRangesCompatible('~1.0.9', '~1.0.10');
|
||||
expect(result.compatible).toBe(true);
|
||||
expect(result.resolved).toBe('~1.0.10');
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeNpmDependencies', () => {
|
||||
it('adds new dependencies', () => {
|
||||
const pkgPath = path.join(tmpDir, 'package.json');
|
||||
fs.writeFileSync(pkgPath, JSON.stringify({
|
||||
name: 'test',
|
||||
dependencies: { existing: '^1.0.0' },
|
||||
}, null, 2));
|
||||
|
||||
mergeNpmDependencies(pkgPath, { newdep: '^2.0.0' });
|
||||
|
||||
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));
|
||||
expect(pkg.dependencies.newdep).toBe('^2.0.0');
|
||||
expect(pkg.dependencies.existing).toBe('^1.0.0');
|
||||
});
|
||||
|
||||
it('resolves compatible ^ ranges', () => {
|
||||
const pkgPath = path.join(tmpDir, 'package.json');
|
||||
fs.writeFileSync(pkgPath, JSON.stringify({
|
||||
name: 'test',
|
||||
dependencies: { dep: '^1.0.0' },
|
||||
}, null, 2));
|
||||
|
||||
mergeNpmDependencies(pkgPath, { dep: '^1.1.0' });
|
||||
|
||||
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));
|
||||
expect(pkg.dependencies.dep).toBe('^1.1.0');
|
||||
});
|
||||
|
||||
it('sorts devDependencies after merge', () => {
|
||||
const pkgPath = path.join(tmpDir, 'package.json');
|
||||
fs.writeFileSync(pkgPath, JSON.stringify({
|
||||
name: 'test',
|
||||
dependencies: {},
|
||||
devDependencies: { zlib: '^1.0.0', acorn: '^2.0.0' },
|
||||
}, null, 2));
|
||||
|
||||
mergeNpmDependencies(pkgPath, { middle: '^1.0.0' });
|
||||
|
||||
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));
|
||||
const devKeys = Object.keys(pkg.devDependencies);
|
||||
expect(devKeys).toEqual(['acorn', 'zlib']);
|
||||
});
|
||||
|
||||
it('throws on incompatible major versions', () => {
|
||||
const pkgPath = path.join(tmpDir, 'package.json');
|
||||
fs.writeFileSync(pkgPath, JSON.stringify({
|
||||
name: 'test',
|
||||
dependencies: { dep: '^1.0.0' },
|
||||
}, null, 2));
|
||||
|
||||
expect(() => mergeNpmDependencies(pkgPath, { dep: '^2.0.0' })).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeEnvAdditions', () => {
|
||||
it('adds new variables', () => {
|
||||
const envPath = path.join(tmpDir, '.env.example');
|
||||
fs.writeFileSync(envPath, 'EXISTING_VAR=value\n');
|
||||
|
||||
mergeEnvAdditions(envPath, ['NEW_VAR']);
|
||||
|
||||
const content = fs.readFileSync(envPath, 'utf-8');
|
||||
expect(content).toContain('NEW_VAR=');
|
||||
expect(content).toContain('EXISTING_VAR=value');
|
||||
});
|
||||
|
||||
it('skips existing variables', () => {
|
||||
const envPath = path.join(tmpDir, '.env.example');
|
||||
fs.writeFileSync(envPath, 'MY_VAR=original\n');
|
||||
|
||||
mergeEnvAdditions(envPath, ['MY_VAR']);
|
||||
|
||||
const content = fs.readFileSync(envPath, 'utf-8');
|
||||
// Should not add duplicate - only 1 occurrence of MY_VAR=
|
||||
const matches = content.match(/MY_VAR=/g);
|
||||
expect(matches).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('recognizes lowercase and mixed-case env vars as existing', () => {
|
||||
const envPath = path.join(tmpDir, '.env.example');
|
||||
fs.writeFileSync(envPath, 'my_lower_var=value\nMixed_Case=abc\n');
|
||||
|
||||
mergeEnvAdditions(envPath, ['my_lower_var', 'Mixed_Case']);
|
||||
|
||||
const content = fs.readFileSync(envPath, 'utf-8');
|
||||
// Should not add duplicates
|
||||
const lowerMatches = content.match(/my_lower_var=/g);
|
||||
expect(lowerMatches).toHaveLength(1);
|
||||
const mixedMatches = content.match(/Mixed_Case=/g);
|
||||
expect(mixedMatches).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('creates file if it does not exist', () => {
|
||||
const envPath = path.join(tmpDir, '.env.example');
|
||||
mergeEnvAdditions(envPath, ['NEW_VAR']);
|
||||
|
||||
expect(fs.existsSync(envPath)).toBe(true);
|
||||
const content = fs.readFileSync(envPath, 'utf-8');
|
||||
expect(content).toContain('NEW_VAR=');
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeDockerComposeServices', () => {
|
||||
it('adds new services', () => {
|
||||
const composePath = path.join(tmpDir, 'docker-compose.yaml');
|
||||
fs.writeFileSync(composePath, 'version: "3"\nservices:\n web:\n image: nginx\n');
|
||||
|
||||
mergeDockerComposeServices(composePath, {
|
||||
redis: { image: 'redis:7' },
|
||||
});
|
||||
|
||||
const content = fs.readFileSync(composePath, 'utf-8');
|
||||
expect(content).toContain('redis');
|
||||
});
|
||||
|
||||
it('skips existing services', () => {
|
||||
const composePath = path.join(tmpDir, 'docker-compose.yaml');
|
||||
fs.writeFileSync(composePath, 'version: "3"\nservices:\n web:\n image: nginx\n');
|
||||
|
||||
mergeDockerComposeServices(composePath, {
|
||||
web: { image: 'apache' },
|
||||
});
|
||||
|
||||
const content = fs.readFileSync(composePath, 'utf-8');
|
||||
expect(content).toContain('nginx');
|
||||
});
|
||||
|
||||
it('throws on port collision', () => {
|
||||
const composePath = path.join(tmpDir, 'docker-compose.yaml');
|
||||
fs.writeFileSync(composePath, 'version: "3"\nservices:\n web:\n image: nginx\n ports:\n - "8080:80"\n');
|
||||
|
||||
expect(() => mergeDockerComposeServices(composePath, {
|
||||
api: { image: 'node', ports: ['8080:3000'] },
|
||||
})).toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
99
skills-engine/__tests__/test-helpers.ts
Normal file
99
skills-engine/__tests__/test-helpers.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import { execSync } from 'child_process';
|
||||
import fs from 'fs';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { stringify } from 'yaml';
|
||||
|
||||
export function createTempDir(): string {
|
||||
return fs.mkdtempSync(path.join(os.tmpdir(), 'nanoclaw-test-'));
|
||||
}
|
||||
|
||||
export function setupNanoclawDir(tmpDir: string): void {
|
||||
fs.mkdirSync(path.join(tmpDir, '.nanoclaw', 'base', 'src'), { recursive: true });
|
||||
fs.mkdirSync(path.join(tmpDir, '.nanoclaw', 'backup'), { recursive: true });
|
||||
}
|
||||
|
||||
export function writeState(tmpDir: string, state: any): void {
|
||||
const statePath = path.join(tmpDir, '.nanoclaw', 'state.yaml');
|
||||
fs.writeFileSync(statePath, stringify(state), 'utf-8');
|
||||
}
|
||||
|
||||
export function createMinimalState(tmpDir: string): void {
|
||||
writeState(tmpDir, {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
}
|
||||
|
||||
export function createSkillPackage(tmpDir: string, opts: {
|
||||
skill?: string;
|
||||
version?: string;
|
||||
core_version?: string;
|
||||
adds?: string[];
|
||||
modifies?: string[];
|
||||
addFiles?: Record<string, string>;
|
||||
modifyFiles?: Record<string, string>;
|
||||
conflicts?: string[];
|
||||
depends?: string[];
|
||||
test?: string;
|
||||
structured?: any;
|
||||
file_ops?: any[];
|
||||
post_apply?: string[];
|
||||
min_skills_system_version?: string;
|
||||
dirName?: string;
|
||||
}): string {
|
||||
const skillDir = path.join(tmpDir, opts.dirName ?? 'skill-pkg');
|
||||
fs.mkdirSync(skillDir, { recursive: true });
|
||||
|
||||
const manifest: Record<string, unknown> = {
|
||||
skill: opts.skill ?? 'test-skill',
|
||||
version: opts.version ?? '1.0.0',
|
||||
description: 'Test skill',
|
||||
core_version: opts.core_version ?? '1.0.0',
|
||||
adds: opts.adds ?? [],
|
||||
modifies: opts.modifies ?? [],
|
||||
conflicts: opts.conflicts ?? [],
|
||||
depends: opts.depends ?? [],
|
||||
test: opts.test,
|
||||
structured: opts.structured,
|
||||
file_ops: opts.file_ops,
|
||||
};
|
||||
if (opts.post_apply) manifest.post_apply = opts.post_apply;
|
||||
if (opts.min_skills_system_version) manifest.min_skills_system_version = opts.min_skills_system_version;
|
||||
|
||||
fs.writeFileSync(path.join(skillDir, 'manifest.yaml'), stringify(manifest));
|
||||
|
||||
if (opts.addFiles) {
|
||||
const addDir = path.join(skillDir, 'add');
|
||||
for (const [relPath, content] of Object.entries(opts.addFiles)) {
|
||||
const fullPath = path.join(addDir, relPath);
|
||||
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
|
||||
fs.writeFileSync(fullPath, content);
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.modifyFiles) {
|
||||
const modDir = path.join(skillDir, 'modify');
|
||||
for (const [relPath, content] of Object.entries(opts.modifyFiles)) {
|
||||
const fullPath = path.join(modDir, relPath);
|
||||
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
|
||||
fs.writeFileSync(fullPath, content);
|
||||
}
|
||||
}
|
||||
|
||||
return skillDir;
|
||||
}
|
||||
|
||||
export function initGitRepo(dir: string): void {
|
||||
execSync('git init', { cwd: dir, stdio: 'pipe' });
|
||||
execSync('git config user.email "test@test.com"', { cwd: dir, stdio: 'pipe' });
|
||||
execSync('git config user.name "Test"', { cwd: dir, stdio: 'pipe' });
|
||||
execSync('git config rerere.enabled true', { cwd: dir, stdio: 'pipe' });
|
||||
fs.writeFileSync(path.join(dir, '.gitignore'), 'node_modules\n');
|
||||
execSync('git add -A && git commit -m "init"', { cwd: dir, stdio: 'pipe' });
|
||||
}
|
||||
|
||||
export function cleanup(dir: string): void {
|
||||
fs.rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
261
skills-engine/__tests__/uninstall.test.ts
Normal file
261
skills-engine/__tests__/uninstall.test.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
import { stringify } from 'yaml';
|
||||
|
||||
import { uninstallSkill } from '../uninstall.js';
|
||||
import {
|
||||
cleanup,
|
||||
createTempDir,
|
||||
initGitRepo,
|
||||
setupNanoclawDir,
|
||||
writeState,
|
||||
} from './test-helpers.js';
|
||||
|
||||
describe('uninstall', () => {
|
||||
let tmpDir: string;
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
setupNanoclawDir(tmpDir);
|
||||
initGitRepo(tmpDir);
|
||||
process.chdir(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
function setupSkillPackage(
|
||||
name: string,
|
||||
opts: {
|
||||
adds?: Record<string, string>;
|
||||
modifies?: Record<string, string>;
|
||||
modifiesBase?: Record<string, string>;
|
||||
} = {},
|
||||
): void {
|
||||
const skillDir = path.join(tmpDir, '.claude', 'skills', name);
|
||||
fs.mkdirSync(skillDir, { recursive: true });
|
||||
|
||||
const addsList = Object.keys(opts.adds ?? {});
|
||||
const modifiesList = Object.keys(opts.modifies ?? {});
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(skillDir, 'manifest.yaml'),
|
||||
stringify({
|
||||
skill: name,
|
||||
version: '1.0.0',
|
||||
core_version: '1.0.0',
|
||||
adds: addsList,
|
||||
modifies: modifiesList,
|
||||
}),
|
||||
);
|
||||
|
||||
if (opts.adds) {
|
||||
const addDir = path.join(skillDir, 'add');
|
||||
for (const [relPath, content] of Object.entries(opts.adds)) {
|
||||
const fullPath = path.join(addDir, relPath);
|
||||
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
|
||||
fs.writeFileSync(fullPath, content);
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.modifies) {
|
||||
const modDir = path.join(skillDir, 'modify');
|
||||
for (const [relPath, content] of Object.entries(opts.modifies)) {
|
||||
const fullPath = path.join(modDir, relPath);
|
||||
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
|
||||
fs.writeFileSync(fullPath, content);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it('returns error for non-applied skill', async () => {
|
||||
writeState(tmpDir, {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
const result = await uninstallSkill('nonexistent');
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('not applied');
|
||||
});
|
||||
|
||||
it('blocks uninstall after rebase', async () => {
|
||||
writeState(tmpDir, {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
rebased_at: new Date().toISOString(),
|
||||
applied_skills: [
|
||||
{
|
||||
name: 'telegram',
|
||||
version: '1.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: { 'src/config.ts': 'abc' },
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = await uninstallSkill('telegram');
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('Cannot uninstall');
|
||||
expect(result.error).toContain('after rebase');
|
||||
});
|
||||
|
||||
it('returns custom patch warning', async () => {
|
||||
writeState(tmpDir, {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [
|
||||
{
|
||||
name: 'telegram',
|
||||
version: '1.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: {},
|
||||
custom_patch: '.nanoclaw/custom/001.patch',
|
||||
custom_patch_description: 'My tweak',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = await uninstallSkill('telegram');
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.customPatchWarning).toContain('custom patch');
|
||||
expect(result.customPatchWarning).toContain('My tweak');
|
||||
});
|
||||
|
||||
it('uninstalls only skill → files reset to base', async () => {
|
||||
// Set up base
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
|
||||
fs.mkdirSync(baseDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'config.ts'), 'base config\n');
|
||||
|
||||
// Set up current files (as if skill was applied)
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'config.ts'),
|
||||
'base config\ntelegram config\n',
|
||||
);
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'telegram.ts'),
|
||||
'telegram code\n',
|
||||
);
|
||||
|
||||
// Set up skill package in .claude/skills/
|
||||
setupSkillPackage('telegram', {
|
||||
adds: { 'src/telegram.ts': 'telegram code\n' },
|
||||
modifies: {
|
||||
'src/config.ts': 'base config\ntelegram config\n',
|
||||
},
|
||||
});
|
||||
|
||||
writeState(tmpDir, {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [
|
||||
{
|
||||
name: 'telegram',
|
||||
version: '1.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: {
|
||||
'src/config.ts': 'abc',
|
||||
'src/telegram.ts': 'def',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = await uninstallSkill('telegram');
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.skill).toBe('telegram');
|
||||
|
||||
// config.ts should be reset to base
|
||||
expect(
|
||||
fs.readFileSync(path.join(tmpDir, 'src', 'config.ts'), 'utf-8'),
|
||||
).toBe('base config\n');
|
||||
|
||||
// telegram.ts (add-only) should be removed
|
||||
expect(fs.existsSync(path.join(tmpDir, 'src', 'telegram.ts'))).toBe(false);
|
||||
});
|
||||
|
||||
it('uninstalls one of two → other preserved', async () => {
|
||||
// Set up base
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base', 'src');
|
||||
fs.mkdirSync(baseDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(baseDir, 'config.ts'),
|
||||
'line1\nline2\nline3\nline4\nline5\n',
|
||||
);
|
||||
|
||||
// Current has both skills applied
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src', 'config.ts'),
|
||||
'telegram import\nline1\nline2\nline3\nline4\nline5\ndiscord import\n',
|
||||
);
|
||||
fs.writeFileSync(path.join(tmpDir, 'src', 'telegram.ts'), 'tg code\n');
|
||||
fs.writeFileSync(path.join(tmpDir, 'src', 'discord.ts'), 'dc code\n');
|
||||
|
||||
// Set up both skill packages
|
||||
setupSkillPackage('telegram', {
|
||||
adds: { 'src/telegram.ts': 'tg code\n' },
|
||||
modifies: {
|
||||
'src/config.ts':
|
||||
'telegram import\nline1\nline2\nline3\nline4\nline5\n',
|
||||
},
|
||||
});
|
||||
|
||||
setupSkillPackage('discord', {
|
||||
adds: { 'src/discord.ts': 'dc code\n' },
|
||||
modifies: {
|
||||
'src/config.ts':
|
||||
'line1\nline2\nline3\nline4\nline5\ndiscord import\n',
|
||||
},
|
||||
});
|
||||
|
||||
writeState(tmpDir, {
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [
|
||||
{
|
||||
name: 'telegram',
|
||||
version: '1.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: {
|
||||
'src/config.ts': 'abc',
|
||||
'src/telegram.ts': 'def',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'discord',
|
||||
version: '1.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: {
|
||||
'src/config.ts': 'ghi',
|
||||
'src/discord.ts': 'jkl',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = await uninstallSkill('telegram');
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// discord.ts should still exist
|
||||
expect(fs.existsSync(path.join(tmpDir, 'src', 'discord.ts'))).toBe(true);
|
||||
|
||||
// telegram.ts should be gone
|
||||
expect(fs.existsSync(path.join(tmpDir, 'src', 'telegram.ts'))).toBe(false);
|
||||
|
||||
// config should have discord import but not telegram
|
||||
const config = fs.readFileSync(
|
||||
path.join(tmpDir, 'src', 'config.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(config).toContain('discord import');
|
||||
expect(config).not.toContain('telegram import');
|
||||
});
|
||||
});
|
||||
413
skills-engine/__tests__/update.test.ts
Normal file
413
skills-engine/__tests__/update.test.ts
Normal file
@@ -0,0 +1,413 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { stringify } from 'yaml';
|
||||
|
||||
import { cleanup, createTempDir, initGitRepo, setupNanoclawDir } from './test-helpers.js';
|
||||
|
||||
// We need to mock process.cwd() since update.ts uses it
|
||||
let tmpDir: string;
|
||||
|
||||
describe('update', () => {
|
||||
beforeEach(() => {
|
||||
tmpDir = createTempDir();
|
||||
setupNanoclawDir(tmpDir);
|
||||
initGitRepo(tmpDir);
|
||||
vi.spyOn(process, 'cwd').mockReturnValue(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
cleanup(tmpDir);
|
||||
});
|
||||
|
||||
function writeStateFile(state: Record<string, unknown>): void {
|
||||
const statePath = path.join(tmpDir, '.nanoclaw', 'state.yaml');
|
||||
fs.writeFileSync(statePath, stringify(state), 'utf-8');
|
||||
}
|
||||
|
||||
function createNewCoreDir(files: Record<string, string>): string {
|
||||
const newCoreDir = path.join(tmpDir, 'new-core');
|
||||
fs.mkdirSync(newCoreDir, { recursive: true });
|
||||
|
||||
for (const [relPath, content] of Object.entries(files)) {
|
||||
const fullPath = path.join(newCoreDir, relPath);
|
||||
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
|
||||
fs.writeFileSync(fullPath, content);
|
||||
}
|
||||
|
||||
return newCoreDir;
|
||||
}
|
||||
|
||||
describe('previewUpdate', () => {
|
||||
it('detects new files in update', async () => {
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'src/new-file.ts': 'export const x = 1;',
|
||||
});
|
||||
|
||||
const { previewUpdate } = await import('../update.js');
|
||||
const preview = previewUpdate(newCoreDir);
|
||||
|
||||
expect(preview.filesChanged).toContain('src/new-file.ts');
|
||||
expect(preview.currentVersion).toBe('1.0.0');
|
||||
});
|
||||
|
||||
it('detects changed files vs base', async () => {
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
|
||||
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'original');
|
||||
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'src/index.ts': 'modified',
|
||||
});
|
||||
|
||||
const { previewUpdate } = await import('../update.js');
|
||||
const preview = previewUpdate(newCoreDir);
|
||||
|
||||
expect(preview.filesChanged).toContain('src/index.ts');
|
||||
});
|
||||
|
||||
it('does not list unchanged files', async () => {
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
|
||||
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'same content');
|
||||
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'src/index.ts': 'same content',
|
||||
});
|
||||
|
||||
const { previewUpdate } = await import('../update.js');
|
||||
const preview = previewUpdate(newCoreDir);
|
||||
|
||||
expect(preview.filesChanged).not.toContain('src/index.ts');
|
||||
});
|
||||
|
||||
it('identifies conflict risk with applied skills', async () => {
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
|
||||
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'original');
|
||||
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [
|
||||
{
|
||||
name: 'telegram',
|
||||
version: '1.0.0',
|
||||
applied_at: new Date().toISOString(),
|
||||
file_hashes: { 'src/index.ts': 'abc123' },
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'src/index.ts': 'updated core',
|
||||
});
|
||||
|
||||
const { previewUpdate } = await import('../update.js');
|
||||
const preview = previewUpdate(newCoreDir);
|
||||
|
||||
expect(preview.conflictRisk).toContain('src/index.ts');
|
||||
});
|
||||
|
||||
it('identifies custom patches at risk', async () => {
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
|
||||
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'src/config.ts'), 'original');
|
||||
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
custom_modifications: [
|
||||
{
|
||||
description: 'custom tweak',
|
||||
applied_at: new Date().toISOString(),
|
||||
files_modified: ['src/config.ts'],
|
||||
patch_file: '.nanoclaw/custom/001-tweak.patch',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'src/config.ts': 'updated core config',
|
||||
});
|
||||
|
||||
const { previewUpdate } = await import('../update.js');
|
||||
const preview = previewUpdate(newCoreDir);
|
||||
|
||||
expect(preview.customPatchesAtRisk).toContain('src/config.ts');
|
||||
});
|
||||
|
||||
it('reads version from package.json in new core', async () => {
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'package.json': JSON.stringify({ version: '2.0.0' }),
|
||||
});
|
||||
|
||||
const { previewUpdate } = await import('../update.js');
|
||||
const preview = previewUpdate(newCoreDir);
|
||||
|
||||
expect(preview.newVersion).toBe('2.0.0');
|
||||
});
|
||||
|
||||
it('detects files deleted in new core', async () => {
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
|
||||
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'keep this');
|
||||
fs.writeFileSync(path.join(baseDir, 'src/removed.ts'), 'delete this');
|
||||
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
// New core only has index.ts — removed.ts is gone
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'src/index.ts': 'keep this',
|
||||
});
|
||||
|
||||
const { previewUpdate } = await import('../update.js');
|
||||
const preview = previewUpdate(newCoreDir);
|
||||
|
||||
expect(preview.filesDeleted).toContain('src/removed.ts');
|
||||
expect(preview.filesChanged).not.toContain('src/removed.ts');
|
||||
});
|
||||
});
|
||||
|
||||
describe('applyUpdate', () => {
|
||||
it('rejects when customize session is active', async () => {
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
// Create the pending.yaml that indicates active customize
|
||||
const customDir = path.join(tmpDir, '.nanoclaw', 'custom');
|
||||
fs.mkdirSync(customDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(customDir, 'pending.yaml'), 'active: true');
|
||||
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'src/index.ts': 'new content',
|
||||
});
|
||||
|
||||
const { applyUpdate } = await import('../update.js');
|
||||
const result = await applyUpdate(newCoreDir);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('customize session');
|
||||
});
|
||||
|
||||
it('copies new files that do not exist yet', async () => {
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'src/brand-new.ts': 'export const fresh = true;',
|
||||
});
|
||||
|
||||
const { applyUpdate } = await import('../update.js');
|
||||
const result = await applyUpdate(newCoreDir);
|
||||
|
||||
expect(result.error).toBeUndefined();
|
||||
expect(result.success).toBe(true);
|
||||
expect(
|
||||
fs.readFileSync(path.join(tmpDir, 'src/brand-new.ts'), 'utf-8'),
|
||||
).toBe('export const fresh = true;');
|
||||
});
|
||||
|
||||
it('performs clean three-way merge', async () => {
|
||||
// Set up base
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
|
||||
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(baseDir, 'src/index.ts'),
|
||||
'line 1\nline 2\nline 3\n',
|
||||
);
|
||||
|
||||
// Current has user changes at the bottom
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src/index.ts'),
|
||||
'line 1\nline 2\nline 3\nuser addition\n',
|
||||
);
|
||||
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
// New core changes at the top
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'src/index.ts': 'core update\nline 1\nline 2\nline 3\n',
|
||||
'package.json': JSON.stringify({ version: '2.0.0' }),
|
||||
});
|
||||
|
||||
const { applyUpdate } = await import('../update.js');
|
||||
const result = await applyUpdate(newCoreDir);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.newVersion).toBe('2.0.0');
|
||||
|
||||
const merged = fs.readFileSync(
|
||||
path.join(tmpDir, 'src/index.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(merged).toContain('core update');
|
||||
expect(merged).toContain('user addition');
|
||||
});
|
||||
|
||||
it('updates base directory after successful merge', async () => {
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
|
||||
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'old base');
|
||||
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(tmpDir, 'src/index.ts'), 'old base');
|
||||
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'src/index.ts': 'new base content',
|
||||
});
|
||||
|
||||
const { applyUpdate } = await import('../update.js');
|
||||
await applyUpdate(newCoreDir);
|
||||
|
||||
const newBase = fs.readFileSync(
|
||||
path.join(tmpDir, '.nanoclaw', 'base', 'src/index.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(newBase).toBe('new base content');
|
||||
});
|
||||
|
||||
it('updates core_version in state after success', async () => {
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'package.json': JSON.stringify({ version: '2.0.0' }),
|
||||
});
|
||||
|
||||
const { applyUpdate } = await import('../update.js');
|
||||
const result = await applyUpdate(newCoreDir);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.previousVersion).toBe('1.0.0');
|
||||
expect(result.newVersion).toBe('2.0.0');
|
||||
|
||||
// Verify state file was updated
|
||||
const { readState } = await import('../state.js');
|
||||
const state = readState();
|
||||
expect(state.core_version).toBe('2.0.0');
|
||||
});
|
||||
|
||||
it('restores backup on merge conflict', async () => {
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
|
||||
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(baseDir, 'src/index.ts'),
|
||||
'line 1\nline 2\nline 3\n',
|
||||
);
|
||||
|
||||
// Current has conflicting change on same line
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, 'src/index.ts'),
|
||||
'line 1\nuser changed line 2\nline 3\n',
|
||||
);
|
||||
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
// New core also changes line 2 — guaranteed conflict
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'src/index.ts': 'line 1\ncore changed line 2\nline 3\n',
|
||||
});
|
||||
|
||||
const { applyUpdate } = await import('../update.js');
|
||||
const result = await applyUpdate(newCoreDir);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.mergeConflicts).toContain('src/index.ts');
|
||||
expect(result.backupPending).toBe(true);
|
||||
|
||||
// File should have conflict markers (backup preserved, not restored)
|
||||
const content = fs.readFileSync(
|
||||
path.join(tmpDir, 'src/index.ts'),
|
||||
'utf-8',
|
||||
);
|
||||
expect(content).toContain('<<<<<<<');
|
||||
expect(content).toContain('>>>>>>>');
|
||||
});
|
||||
|
||||
it('removes files deleted in new core', async () => {
|
||||
const baseDir = path.join(tmpDir, '.nanoclaw', 'base');
|
||||
fs.mkdirSync(path.join(baseDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(baseDir, 'src/index.ts'), 'keep');
|
||||
fs.writeFileSync(path.join(baseDir, 'src/removed.ts'), 'old content');
|
||||
|
||||
// Working tree has both files
|
||||
fs.mkdirSync(path.join(tmpDir, 'src'), { recursive: true });
|
||||
fs.writeFileSync(path.join(tmpDir, 'src/index.ts'), 'keep');
|
||||
fs.writeFileSync(path.join(tmpDir, 'src/removed.ts'), 'old content');
|
||||
|
||||
writeStateFile({
|
||||
skills_system_version: '0.1.0',
|
||||
core_version: '1.0.0',
|
||||
applied_skills: [],
|
||||
});
|
||||
|
||||
// New core only has index.ts
|
||||
const newCoreDir = createNewCoreDir({
|
||||
'src/index.ts': 'keep',
|
||||
});
|
||||
|
||||
const { applyUpdate } = await import('../update.js');
|
||||
const result = await applyUpdate(newCoreDir);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(fs.existsSync(path.join(tmpDir, 'src/index.ts'))).toBe(true);
|
||||
expect(fs.existsSync(path.join(tmpDir, 'src/removed.ts'))).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user