Browse Source

feat: add serve-mode orchestrator prompt injection + fix CJS build for GitHub installs (#261)

* feat: add serve-mode orchestrator prompt injection

Adds experimental.chat.system.transform hook to inject the full
orchestrator system prompt when serving via the OpenCode serve API.

In serve mode, agent state is built from raw config before plugin
config hooks run. The frozen agents map lacks the orchestrator prompt.
This hook injects ORCHESTRATOR_PROMPT at LLM call time, bypassing
the frozen map entirely.

Changes:
- src/agents/orchestrator.ts: export ORCHESTRATOR_PROMPT
- src/index.ts: add sessionAgentMap + chat.message hook to track
  session→agent; add experimental.chat.system.transform hook to
  inject orchestrator prompt for serve-mode sessions

* build: add prepare script for GitHub installs

Adds prepare script with bundled build (--external flags instead of
--packages external) to avoid CJS/ESM interop failures in OpenCode's
embedded Bun runtime when installing from GitHub.

The original build script is unchanged — prepare is a standalone
command used only during GitHub/git installation.

---------

Co-authored-by: plutofog <plutofog@proton.me>
Link 4 days ago
parent
commit
0bc95f7bd5
3 changed files with 40 additions and 1 deletions
  1. 1 0
      package.json
  2. 1 1
      src/agents/orchestrator.ts
  3. 38 0
      src/index.ts

+ 1 - 0
package.json

@@ -37,6 +37,7 @@
   ],
   "scripts": {
     "build": "bun build src/index.ts --outdir dist --target bun --format esm --packages external && bun build src/cli/index.ts --outdir dist/cli --target bun --format esm --packages external && tsc --emitDeclarationOnly && bun run generate-schema",
+    "prepare": "bun build src/index.ts --outdir dist --target bun --format esm --external @ast-grep/napi --external @opencode-ai/plugin --external @opencode-ai/sdk",
     "contributors:add": "all-contributors add",
     "contributors:check": "all-contributors check",
     "contributors:generate": "all-contributors generate",

+ 1 - 1
src/agents/orchestrator.ts

@@ -23,7 +23,7 @@ export function resolvePrompt(
   return base;
 }
 
-const ORCHESTRATOR_PROMPT = `<Role>
+export const ORCHESTRATOR_PROMPT = `<Role>
 You are an AI coding orchestrator that optimizes for quality, speed, cost, and reliability by delegating to specialists when it provides net efficiency gains.
 </Role>
 

+ 38 - 0
src/index.ts

@@ -164,6 +164,9 @@ const OhMyOpenCodeLite: Plugin = async (ctx) => {
     config.fallback?.enabled !== false && Object.keys(runtimeChains).length > 0,
   );
 
+  // Track session → agent mapping for serve-mode system prompt injection
+  const sessionAgentMap = new Map<string, string>();
+
   // Initialize todo-continuation hook (opt-in auto-continue for incomplete todos)
   const todoContinuationHook = createTodoContinuationHook(ctx, {
     maxContinuations: config.todoContinuation?.maxContinuations ?? 5,
@@ -450,6 +453,41 @@ const OhMyOpenCodeLite: Plugin = async (ctx) => {
 
     'chat.headers': chatHeadersHook['chat.headers'],
 
+    // Track which agent each session uses (needed for serve-mode prompt injection)
+    'chat.message': async (input: {
+      sessionID: string;
+      agent?: string;
+    }) => {
+      if (input.agent) {
+        sessionAgentMap.set(input.sessionID, input.agent);
+      }
+    },
+
+    // Inject orchestrator system prompt for serve-mode sessions.
+    // In serve mode, the agent's prompt field may be absent from the agents registry
+    // (built before plugin config hooks run). This hook injects it at LLM call time.
+    'experimental.chat.system.transform': async (
+      input: { sessionID?: string },
+      output: { system: string[] },
+    ): Promise<void> => {
+      const agentName = input.sessionID
+        ? sessionAgentMap.get(input.sessionID)
+        : undefined;
+      if (agentName === 'orchestrator') {
+        const alreadyInjected = output.system.some(
+          (s) =>
+            typeof s === 'string' &&
+            s.includes('<Role>') &&
+            s.includes('orchestrator'),
+        );
+        if (!alreadyInjected) {
+          // Prepend the orchestrator prompt to the system array
+          const { ORCHESTRATOR_PROMPT } = await import('./agents/orchestrator');
+          output.system[0] = ORCHESTRATOR_PROMPT + (output.system[0] ? '\n\n' + output.system[0] : '');
+        }
+      }
+    },
+
     // Inject phase reminder and filter available skills before sending to API (doesn't show in UI)
     'experimental.chat.messages.transform': async (
       input: Record<string, never>,