Browse Source

cartography

Alvin Unreal 2 months ago
parent
commit
3af008005b
9 changed files with 524 additions and 3 deletions
  1. 9 0
      bun.lock
  2. 79 0
      cartography.md
  3. 3 0
      package.json
  4. 280 0
      scripts/cartography.ts
  5. 3 0
      src/index.ts
  6. 7 0
      src/tools/cartography/index.ts
  7. 80 0
      src/tools/cartography/tool.ts
  8. 2 2
      src/tools/index.ts
  9. 61 1
      src/tools/skill/builtin.ts

+ 9 - 0
bun.lock

@@ -9,8 +9,11 @@
         "@modelcontextprotocol/sdk": "^1.25.1",
         "@opencode-ai/plugin": "^1.1.19",
         "@opencode-ai/sdk": "^1.1.19",
+        "hash-wasm": "^4.12.0",
+        "ignore": "^7.0.5",
         "vscode-jsonrpc": "^8.2.0",
         "vscode-languageserver-protocol": "^3.17.5",
+        "yaml": "^2.8.2",
         "zod": "^4.1.8",
       },
       "devDependencies": {
@@ -146,6 +149,8 @@
 
     "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="],
 
+    "hash-wasm": ["hash-wasm@4.12.0", "", {}, "sha512-+/2B2rYLb48I/evdOIhP+K/DD2ca2fgBjp6O+GBEnCDk2e4rpeXIK8GvIyRPjTezgmWn9gmKwkQjjx6BtqDHVQ=="],
+
     "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
 
     "hono": ["hono@4.11.4", "", {}, "sha512-U7tt8JsyrxSRKspfhtLET79pU8K+tInj5QZXs1jSugO1Vq5dFj3kmZsRldo29mTBfcjDRVRXrEZ6LS63Cog9ZA=="],
@@ -154,6 +159,8 @@
 
     "iconv-lite": ["iconv-lite@0.7.2", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw=="],
 
+    "ignore": ["ignore@7.0.5", "", {}, "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg=="],
+
     "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="],
 
     "ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="],
@@ -254,6 +261,8 @@
 
     "wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
 
+    "yaml": ["yaml@2.8.2", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A=="],
+
     "zod": ["zod@4.3.5", "", {}, "sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g=="],
 
     "zod-to-json-schema": ["zod-to-json-schema@3.25.1", "", { "peerDependencies": { "zod": "^3.25 || ^4" } }, "sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA=="],

+ 79 - 0
cartography.md

@@ -0,0 +1,79 @@
+# 🗺️ Cartography Skill
+
+**Objective**: Provide AI agents with a high-fidelity, hierarchical "mental map" of a codebase to enable precise context preparation and flow understanding.
+
+## 🏛️ Core Architecture
+
+Cartography operates through an orchestrated "bottom-up" analysis pattern, combining deterministic hashing with LLM reasoning.
+
+### 1. The Helper Script (`cartography-helper`)
+A lightweight utility designed for the Orchestrator to handle deterministic file operations.
+- **Scanning**: Discovers directory structures while respecting `.gitignore` and default excludes (node_modules, .git, etc.).
+- **Hashing**: Calculates MD5 hashes for individual files and a composite "Folder Hash" (hash of all valid file hashes in that directory).
+- **Compact Frontmatter**: Manages a minimal YAML block in `codemap.md` to track state:
+  ```yaml
+  ---
+  h: [folder_hash]
+  f: [{p: path, h: file_hash}, ...]
+  ---
+  ```
+- **Lifecycle**: If `codemap.md` doesn't exist, it scaffolds it. If it exists but hashes match, it skips processing.
+
+### 2. Orchestration Strategy
+The Orchestrator acts as the "Surveyor General," determining the scope and sequence of the map.
+- **Importance Filtering**: Categorizes folders by project relevance (e.g., `src/`, `app/` are High; `tests/`, `docs/` are Low).
+- **Extension Selection**: Dynamically decides which extensions to track based on the project language (e.g., `.ts` for TypeScript projects, `.py` for Python).
+- **Parallel Execution**: Spawns multiple **Explorer** agents to analyze folders in parallel.
+- **Dependency Chaining**: Ensures sub-folders are mapped *before* parent folders so the parent analysis can reference sub-folder summaries.
+
+### 3. Analysis Pattern (The Explorer)
+Explorers are tasked with generating the human/AI-readable body of the `codemap.md`.
+
+**Capture Requirements:**
+- **Purpose**: 1-2 sentence high-level role of the file.
+- **Key Exports**: Critical components, classes, or functions (excluding signatures).
+- **Dependencies**: Internal project imports that define the relationship between files.
+- **Data Flow**: The narrative journey of data (e.g., `Webhook -> Validator -> Queue`).
+
+**Constraint**: Avoid volatile information like function parameters or line numbers that change frequently.
+
+## 🔄 Operational Workflow
+
+1.  **Discovery Phase**: Orchestrator runs the helper script to scan the root and identifies "High Importance" directories.
+2.  **Initial Hash Check**: The script identifies which folders are "Dirty" (hash mismatch or missing `codemap.md`).
+3.  **Leaf-Node Analysis**: Explorers are dispatched to the deepest sub-folders first.
+4.  **Incremental Update**: 
+    - If a file hash changes, the Explorer re-analyzes only that file and updates the Folder Summary.
+    - If no hashes change, the file is skipped entirely.
+5.  **Hierarchy Assembly**: As sub-folders finish, parent Explorers synthesize those results into higher-level summaries until the Root Codemap is reached.
+
+## 🤖 LLM Prompting Goal
+The resulting `codemap.md` files serve as a "Pre-flight Checklist" for any future agent task. Instead of reading 100 files, an agent reads 1-5 `codemap.md` files to understand exactly where logic lives and how systems interact.
+
+---
+
+## 💬 Design Q&A (Decisions & Logic)
+
+**Q: What is the primary use case?**
+**A:** LLM context preparation. It provides agents with a structured map of the codebase before they begin work, reducing token waste and improving accuracy.
+
+**Q: How are folders prioritized?**
+**A:** Via "Code vs Non-Code" classification. Orchestrator identifies source directories (`src`, `lib`, `app`) as high priority and ignores noise (`tests`, `docs`, `dist`).
+
+**Q: Why MD5 for hashing?**
+**A:** Speed. The goal is rapid change detection to determine if an LLM needs to re-analyze a file, not cryptographic security.
+
+**Q: What is the "Folder Hash" logic?**
+**A:** It is a hash of all hashes of the "allowed" files within that folder. If any tracked file changes, the folder hash changes, triggering a re-map.
+
+**Q: Why avoid function parameters in the codemap?**
+**A:** They change too often. The codemap focuses on stable architectural "flows" and "purposes" rather than volatile signatures.
+
+**Q: How does the hierarchy work?**
+**A:** One `codemap.md` per folder. Sub-folders must be mapped before their parents so the parent can synthesize the sub-folder's high-level purpose into its own map.
+
+**Q: What is the script's specific responsibility?**
+**A:** The script is deterministic. It calculates hashes, manages the compact frontmatter, and scaffolds the file. It *never* generates the descriptive body; that is reserved for the Explorer agents.
+
+**Q: How is parallelism handled?**
+**A:** Explorers run in parallel for all "leaf" folders (folders with no sub-folders). Once a layer is complete, the Orchestrator moves up the tree.

+ 3 - 0
package.json

@@ -49,8 +49,11 @@
     "@modelcontextprotocol/sdk": "^1.25.1",
     "@opencode-ai/plugin": "^1.1.19",
     "@opencode-ai/sdk": "^1.1.19",
+    "hash-wasm": "^4.12.0",
+    "ignore": "^7.0.5",
     "vscode-jsonrpc": "^8.2.0",
     "vscode-languageserver-protocol": "^3.17.5",
+    "yaml": "^2.8.2",
     "zod": "^4.1.8"
   },
   "devDependencies": {

+ 280 - 0
scripts/cartography.ts

@@ -0,0 +1,280 @@
+#!/usr/bin/env bun
+import { existsSync, readdirSync, readFileSync, writeFileSync } from 'node:fs';
+import { join, resolve } from 'node:path';
+import { createMD5, md5 } from 'hash-wasm';
+import ignore from 'ignore';
+import { parse, stringify } from 'yaml';
+
+interface FileEntry {
+  p: string;
+  h: string;
+}
+
+interface Frontmatter {
+  h: string;
+  f: FileEntry[];
+}
+
+const DEFAULT_IGNORE = [
+  'node_modules',
+  '.git',
+  'dist',
+  'build',
+  '.next',
+  'coverage',
+  '.turbo',
+  'out',
+  '*.log',
+  '.DS_Store',
+];
+
+function parseGitignore(folder: string): ignore.Ignore {
+  const gitignorePath = join(folder, '.gitignore');
+
+  if (existsSync(gitignorePath)) {
+    const content = readFileSync(gitignorePath, 'utf-8');
+    return ignore().add(content.split('\n'));
+  }
+
+  return ignore();
+}
+
+function shouldIgnore(relPath: string, ignorer: ignore.Ignore): boolean {
+  if (DEFAULT_IGNORE.some((pattern) => relPath.includes(pattern))) {
+    return true;
+  }
+  return ignorer.ignores(relPath);
+}
+
+function getFiles(
+  folder: string,
+  extensions: string[],
+  ignorer: ignore.Ignore,
+): string[] {
+  const files: string[] = [];
+
+  function scan(dir: string, base: string = '') {
+    const entries = readdirSync(dir, { withFileTypes: true });
+
+    for (const entry of entries) {
+      const fullPath = join(dir, entry.name);
+      const relPath = base ? join(base, entry.name) : entry.name;
+
+      if (shouldIgnore(relPath, ignorer)) {
+        continue;
+      }
+
+      if (entry.isDirectory()) {
+        scan(fullPath, relPath);
+      } else if (entry.isFile()) {
+        const ext = entry.name.includes('.')
+          ? '.' + entry.name.split('.').pop()!
+          : '';
+        if (extensions.includes(ext)) {
+          files.push(relPath);
+        }
+      }
+    }
+  }
+
+  scan(folder);
+
+  return files.sort((a, b) => a.localeCompare(b));
+}
+
+async function calculateHashes(
+  folder: string,
+  files: string[],
+): Promise<Map<string, string>> {
+  const hashes = new Map<string, string>();
+
+  for (const file of files) {
+    const fullPath = join(folder, file);
+    try {
+      const content = await Bun.file(fullPath).text();
+      hashes.set(file, await md5(content));
+    } catch (error) {
+      console.error(`Failed to hash ${file}:`, error);
+    }
+  }
+
+  return hashes;
+}
+
+async function calculateFolderHash(
+  fileHashes: Map<string, string>,
+): Promise<string> {
+  const hasher = await createMD5();
+  hasher.init();
+
+  const sortedEntries = Array.from(fileHashes.entries()).sort(([a], [b]) =>
+    a.localeCompare(b),
+  );
+
+  for (const [path, hash] of sortedEntries) {
+    hasher.update(`${path}:${hash}|`);
+  }
+
+  return hasher.digest();
+}
+
+interface ParsedFrontmatter {
+  frontmatter: Frontmatter | null;
+  body: string;
+}
+
+function parseFrontmatter(content: string): ParsedFrontmatter {
+  const match = content.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
+
+  if (!match) {
+    return { frontmatter: null, body: content };
+  }
+
+  try {
+    const frontmatter = parse(match[1]) as Frontmatter;
+    return { frontmatter, body: match[2] };
+  } catch {
+    return { frontmatter: null, body: content };
+  }
+}
+
+function formatFrontmatter(frontmatter: Frontmatter): string {
+  return `---
+h: ${frontmatter.h}
+f:
+${frontmatter.f.map((f) => `  - p: ${f.p}\n    h: ${f.h}`).join('\n')}
+---
+`;
+}
+
+async function updateCodemap(
+  folder: string,
+  extensions: string[],
+): Promise<{ updated: boolean; fileCount: number; changedFiles: string[] }> {
+  const ignorer = parseGitignore(folder);
+  const files = getFiles(folder, extensions, ignorer);
+  const fileHashes = await calculateHashes(folder, files);
+  const folderHash = await calculateFolderHash(fileHashes);
+
+  const codemapPath = join(folder, 'codemap.md');
+  let body = '';
+  let changedFiles: string[] = [];
+
+  if (existsSync(codemapPath)) {
+    const content = readFileSync(codemapPath, 'utf-8');
+    const { frontmatter, body: existingBody } = parseFrontmatter(content);
+
+    if (frontmatter?.h === folderHash) {
+      return { updated: false, fileCount: files.length, changedFiles: [] };
+    }
+
+    body = existingBody;
+
+    if (frontmatter) {
+      const oldHashes = new Map(frontmatter.f.map((f) => [f.p, f.h]));
+
+      for (const [path, hash] of fileHashes) {
+        if (oldHashes.get(path) !== hash) {
+          changedFiles.push(path);
+        }
+      }
+    } else {
+      changedFiles = files;
+    }
+  } else {
+    changedFiles = files;
+  }
+
+  const frontmatter: Frontmatter = {
+    h: folderHash,
+    f: files.map((p) => ({ p, h: fileHashes.get(p)! })),
+  };
+
+  const content = formatFrontmatter(frontmatter) + body;
+  writeFileSync(codemapPath, content, 'utf-8');
+
+  return { updated: true, fileCount: files.length, changedFiles };
+}
+
+async function main() {
+  const command = process.argv[2];
+  const folderArg = process.argv[3];
+  const folder = folderArg ? resolve(folderArg) : process.cwd();
+
+  const extArg = process.argv.find((a) => a.startsWith('--extensions'));
+  const extensions = extArg
+    ? extArg
+        .split('=')[1]
+        .split(',')
+        .map((e) => '.' + e.trim().replace(/^\./, ''))
+    : ['.ts', '.tsx', '.js', '.jsx', '.py', '.go', '.rs'];
+
+  switch (command) {
+    case 'scan': {
+      const ignorer = parseGitignore(folder);
+      const files = getFiles(folder, extensions, ignorer);
+      console.log(JSON.stringify({ folder, files }, null, 2));
+      break;
+    }
+
+    case 'hash': {
+      const ignorer = parseGitignore(folder);
+      const files = getFiles(folder, extensions, ignorer);
+      const fileHashes = await calculateHashes(folder, files);
+      const folderHash = await calculateFolderHash(fileHashes);
+      console.log(
+        JSON.stringify(
+          {
+            folderHash,
+            files: Object.fromEntries(fileHashes),
+          },
+          null,
+          2,
+        ),
+      );
+      break;
+    }
+
+    case 'update': {
+      const result = await updateCodemap(folder, extensions);
+      if (result.updated) {
+        console.log(
+          JSON.stringify(
+            {
+              updated: true,
+              folder,
+              fileCount: result.fileCount,
+              changedFiles: result.changedFiles,
+            },
+            null,
+            2,
+          ),
+        );
+      } else {
+        console.log(
+          JSON.stringify(
+            {
+              updated: false,
+              folder,
+              message: 'No changes detected',
+            },
+            null,
+            2,
+          ),
+        );
+      }
+      break;
+    }
+
+    default:
+      console.error(
+        'Usage: cartography <scan|hash|update> [folder] [--extensions ts,tsx,js]',
+      );
+      process.exit(1);
+  }
+}
+
+main().catch((error) => {
+  console.error('Error:', error);
+  process.exit(1);
+});

+ 3 - 0
src/index.ts

@@ -13,6 +13,7 @@ import {
   ast_grep_replace,
   ast_grep_search,
   createBackgroundTools,
+  createCartographyTool,
   createSkillTools,
   grep,
   lsp_diagnostics,
@@ -54,6 +55,7 @@ const OhMyOpenCodeLite: Plugin = async (ctx) => {
     tmuxConfig,
     config,
   );
+  const cartographyTool = createCartographyTool(ctx);
   const mcps = createBuiltinMcps(config.disabled_mcps);
   const skillMcpManager = SkillMcpManager.getInstance();
   const skillTools = createSkillTools(skillMcpManager, config);
@@ -88,6 +90,7 @@ const OhMyOpenCodeLite: Plugin = async (ctx) => {
       ast_grep_search,
       ast_grep_replace,
       antigravity_quota,
+      cartography: cartographyTool,
       ...skillTools,
     },
 

+ 7 - 0
src/tools/cartography/index.ts

@@ -0,0 +1,7 @@
+import { createCartographyTool } from './tool';
+
+export { createCartographyTool };
+export const cartographyTool = {
+  name: 'cartography',
+  create: createCartographyTool,
+};

+ 80 - 0
src/tools/cartography/tool.ts

@@ -0,0 +1,80 @@
+import { join } from 'node:path';
+import { fileURLToPath } from 'node:url';
+import {
+  type PluginInput,
+  type ToolDefinition,
+  tool,
+} from '@opencode-ai/plugin';
+
+export function createCartographyTool(ctx: PluginInput): ToolDefinition {
+  return tool({
+    description:
+      'Cartography helper script for codebase mapping. Use for directory scanning, hash calculation, and codemap.md generation.',
+    args: {
+      command: tool.schema
+        .enum(['scan', 'hash', 'update'])
+        .describe(
+          'Command to run: scan (list files), hash (calculate hashes), update (generate/update codemap.md)',
+        ),
+      folder: tool.schema
+        .string()
+        .optional()
+        .describe('Target folder path (relative to session directory)'),
+      extensions: tool.schema
+        .string()
+        .optional()
+        .describe(
+          'File extensions to map, comma-separated without dots (e.g., "ts,tsx,js")',
+        ),
+    },
+    execute: async (args, toolContext) => {
+      const sessionDir = await getSessionDirectory(ctx, toolContext);
+
+      const scriptPath = join(
+        fileURLToPath(import.meta.url),
+        '../../../scripts/cartography.ts',
+      );
+
+      const extensions = (args.extensions as string) || 'ts,tsx,js,jsx';
+      const commandArgs = [
+        'run',
+        scriptPath,
+        args.command as string,
+        (args.folder as string) || '.',
+        `--extensions=${extensions}`,
+      ];
+
+      const result = await Bun.$`bun ${commandArgs}`.cwd(sessionDir);
+
+      try {
+        const json = JSON.parse(result.stdout.toString());
+        return JSON.stringify(json);
+      } catch {
+        return JSON.stringify({
+          output: result.stdout.toString(),
+          stderr: result.stderr.toString(),
+        });
+      }
+    },
+  });
+}
+
+async function getSessionDirectory(
+  ctx: PluginInput,
+  toolContext: Record<string, unknown>,
+): Promise<string> {
+  try {
+    const sessionID = toolContext.sessionID as string;
+    const session = await ctx.client.session.get({
+      path: { id: sessionID },
+    });
+
+    if (session?.data?.directory) {
+      return session.data.directory;
+    }
+  } catch (error) {
+    console.error('Failed to get session directory:', error);
+  }
+
+  return ctx.directory;
+}

+ 2 - 2
src/tools/index.ts

@@ -1,7 +1,8 @@
 // AST-grep tools
 export { ast_grep_replace, ast_grep_search } from './ast-grep';
 export { createBackgroundTools } from './background';
-
+// Cartography tool
+export { createCartographyTool } from './cartography';
 // Grep tool (ripgrep-based)
 export { grep } from './grep';
 export {
@@ -11,7 +12,6 @@ export {
   lsp_rename,
   lspManager,
 } from './lsp';
-
 // Antigravity quota tool
 export { antigravity_quota } from './quota';
 

+ 61 - 1
src/tools/skill/builtin.ts

@@ -14,7 +14,7 @@ export const DEFAULT_AGENT_SKILLS: Record<AgentName, string[]> = {
   designer: ['playwright'],
   oracle: [],
   librarian: [],
-  explorer: [],
+  explorer: ['cartography'],
   fixer: [],
 };
 
@@ -149,6 +149,58 @@ Recommended action: [Proceed with simplifications/Minor tweaks only/Already mini
 
 Remember: Perfect is the enemy of good. The simplest code that works is often the best code. Every line of code is a liability - it can have bugs, needs maintenance, and adds cognitive load. Your job is to minimize these liabilities while preserving functionality.`;
 
+const CARTOGRAPHY_TEMPLATE = `# Cartography Skill
+
+You are a code cartographer. Your mission is to create structured codemaps that help LLMs understand codebase structure and flows.
+
+## Your Task
+
+Generate a \`codemap.md\` file for the assigned folder that documents:
+- **Purpose**: What this folder contains and its role in the project
+- **Per-file analysis**: For each file, document:
+  - Purpose (1-2 sentences)
+  - Key exports (main functions/classes/components)
+  - Dependencies (imports from other project files)
+  - Data flows (how data moves through the file)
+
+## Format
+
+Use this structure:
+
+\`\`\`markdown
+# [Folder Name]
+
+## Purpose
+[What this folder contains and its role in the project]
+
+## Files
+
+### [filename.ext]
+**Purpose**: [1-2 sentences]
+
+**Exports**: [main exports]
+
+**Dependencies**: [imports from other project files]
+
+**Data Flow**: [input → processing → output]
+
+### [next file.ext]
+...
+\`\`\`
+
+## Guidelines
+
+- Focus on **what** and **why**, not implementation details
+- Avoid listing function parameters (they change often)
+- Document flows and relationships, not signatures
+- Be concise but informative
+- Reference the frontmatter hashes for change tracking
+
+## Frontmatter
+
+The helper script manages frontmatter with hashes. You only update the body content when needed. Check the frontmatter to see which files have changed since the last update.
+`;
+
 const PLAYWRIGHT_TEMPLATE = `# Playwright Browser Automation Skill
 
 This skill provides browser automation capabilities via the Playwright MCP server.
@@ -202,9 +254,17 @@ const playwrightSkill: SkillDefinition = {
   },
 };
 
+const cartographySkill: SkillDefinition = {
+  name: 'cartography',
+  description:
+    'Codebase mapping and structure documentation. Generate hierarchical codemaps to help AI agents understand code organization, dependencies, and data flows. Uses parallel Explorers for efficient large-scale analysis.',
+  template: CARTOGRAPHY_TEMPLATE,
+};
+
 const builtinSkillsMap = new Map<string, SkillDefinition>([
   [yagniEnforcementSkill.name, yagniEnforcementSkill],
   [playwrightSkill.name, playwrightSkill],
+  [cartographySkill.name, cartographySkill],
 ]);
 
 export function getBuiltinSkills(): SkillDefinition[] {