import type { PluginInput } from '@opencode-ai/plugin'; import type { PluginConfig } from '../config'; /** * Interview Manager — Composition root. * * Two modes: * * 1. **Dashboard mode** (dashboard:true or port>0): * First process to bind the port becomes the dashboard (dumb aggregator). * Other processes register as sessions and push state to it. * Sessions drive LLM interaction locally, dashboard just serves the web UI. * * 2. **Per-session mode** (default, port=0, dashboard:false): * Upstream behavior. Each process runs its own interview server on a random * port. Lazy startup on first /interview command. */ export declare function createInterviewManager(ctx: PluginInput, config: PluginConfig): { registerCommand: (config: Record) => void; handleCommandExecuteBefore: (input: { command: string; sessionID: string; arguments: string; }, output: { parts: Array<{ type: string; text?: string; }>; }) => Promise; handleEvent: (input: { event: { type: string; properties?: Record; }; }) => Promise; };