import { EventEmitter } from 'events'; import { type TaskLoopOptions, type ChatMessage, type ChatSetting, type TaskLoop, type TextMessage, type ToolCallResult, type ToolCall } from '../../task-loop.js'; import { IConnectionArgs, MessageHandler, WebSocketMessage } from './adapter.js'; export declare class TaskLoopAdapter { emitter: EventEmitter; private messageHandlers; private connectionOptions; constructor(option?: any); /** * @description 发送消息 * @param message - 包含 command 和 args 的消息 */ postMessage(message: WebSocketMessage): void; /** * @description 注册接受消息的句柄 * @param callback - 消息回调 * @returns {{ dispose: () => void }} - 可销毁的监听器 */ onDidReceiveMessage(callback: MessageHandler): { dispose: () => void; }; /** * @description 连接到 mcp 服务端 * @param mcpOption */ addMcp(mcpOption: IConnectionArgs): void; } interface StdioMCPConfig { command: string; args: string[]; env?: { [key: string]: string; }; cwd?: string; description?: string; prompts?: string[]; resources?: string[]; } interface HttpMCPConfig { url: string; type?: string; env?: { [key: string]: string; }; description?: string; prompts?: string[]; resources?: string[]; } export interface OmAgentConfiguration { version: string; mcpServers: { [key: string]: StdioMCPConfig | HttpMCPConfig; }; defaultLLM: { baseURL: string; apiToken: string; model: string; }; } export interface DefaultLLM { baseURL: string; apiToken?: string; model: string; } export interface AinvokeConfig { messages: ChatMessage[] | string; settings?: Partial; until?: { toolName: string; needCall?: boolean; forceCall?: boolean; }; reflux?: { enabled?: boolean; saveDir?: string; }; } export declare function UserMessage(content: string): TextMessage; export declare function AssistantMessage(content: string): TextMessage; export declare class OmAgent { private _adapter; private _loop?; private _defaultLLM?; constructor(); /** * @description Load MCP configuration from file. * Supports multiple MCP backends and a default LLM model configuration. * * @example * Example configuration: * { * "version": "1.0.0", * "mcpServers": { * "openmemory": { * "command": "npx", * "args": ["-y", "openmemory"], * "env": { * "OPENMEMORY_API_KEY": "YOUR_API_KEY", * "CLIENT_NAME": "openmemory" * }, * "description": "A MCP for long-term memory support" * } * }, * "defaultLLM": { * "baseURL": "https://api.openmemory.ai", * "apiToken": "YOUR_API_KEY", * "model": "deepseek-chat" * } * } * * @param configPath - Path to the configuration file */ loadMcpConfig(configPath: string): void; /** * @description Add MCP server */ addMcpServer(connectionArgs: IConnectionArgs): void; getLoop(loopOption?: TaskLoopOptions): Promise; setDefaultLLM(option: DefaultLLM): void; getPrompt(promptId: string, args: Record): Promise; /** * @description Asynchronous invoking agent by string or messages * @param messages Chat message or string * @param settings Chat setting and task loop options * @returns */ private _ainvoke; ainvoke({ messages, settings, until, reflux }: AinvokeConfig): Promise; } export {};