// SPDX-License-Identifier: MIT
// Copyright contributors to the openassistant project
import { ReactNode, useEffect, useState } from 'react';
import {
MessageModel,
useAssistant,
UseAssistantProps,
} from '@openassistant/core';
import { generateId } from '@openassistant/utils';
import { Message } from '@ai-sdk/ui-utils';
import MessageCard from './message-card';
import PromptInputWithBottomActions from './prompt-input-with-bottom-actions';
import { ChatContainer } from './chat-container';
import {
createWelcomeMessage,
sendImageMessageHandler,
sendTextMessageHandler,
} from './assistant-utils';
/**
* Type of AiAssistantProps.
*/
export type AiAssistantProps = UseAssistantProps & {
theme?: 'dark' | 'light';
/** The welcome message of the assistant. */
welcomeMessage: string | ReactNode;
/** The ideas of the assistant, which will be shown above the prompt input box. */
ideas?: { title: string; description: string }[];
/** The callback function to handle the refresh ideas. */
onRefreshIdeas?: () => void;
/** Set the avatar of the user. */
userAvatar?: ReactNode | string;
/** Set the avatar of the assistant. */
assistantAvatar?: ReactNode | string;
/** Set the flag to indicate if the message is draggable. */
isMessageDraggable?: boolean;
/** Set the flag to indicate if the voice is enabled. */
enableVoice?: boolean;
/** Set the flag to indicate if the screen capture is enabled. */
enableScreenCapture?: boolean;
/** The screen captured base64. */
screenCapturedBase64?: string;
/** The screen captured prompt. */
screenCapturedPrompt?: string;
/** The callback function to handle the screenshot click. */
onScreenshotClick?: () => void;
/** The callback function to handle the screenshot remove. */
onRemoveScreenshot?: () => void;
/** The callback function to handle the feedback. */
onFeedback?: (question: string) => void;
/** The callback function to handle the messages updated. */
onMessagesUpdated?: (messages: MessageModel[]) => void;
/** The callback function to handle the tool finished. */
onToolFinished?: (toolCallId: string, additionalData: unknown) => void;
/** The callback function to handle the restart chat. */
onRestartChat?: () => void;
/** The font size of the assistant. */
fontSize?: string;
/** The class name of the bot message. */
botMessageClassName?: string;
/** The class name of the user message. */
userMessageClassName?: string;
/** The link to the github issue. */
githubIssueLink?: string;
/** The flag to indicate if the markdown is enabled. */
useMarkdown?: boolean;
/** The flag to indicate if the tools are shown. */
showTools?: boolean;
/** The initial messages of the assistant. */
initialMessages?: MessageModel[];
};
function rebuildMessages(historyMessages: MessageModel[]): Message[] {
const result: Message[] = [];
for (const msg of historyMessages) {
if (msg.direction === 'outgoing') {
// Handle user messages
result.push({
id: generateId(),
role: 'user',
content: '',
parts: msg.messageContent?.parts || [],
});
} else if (msg.direction === 'incoming') {
// Handle assistant messages with tool calls
if (msg.messageContent?.parts?.length) {
// Add tool invocations message
result.push({
id: generateId(),
role: 'assistant',
content: '',
// return parts without property "additionalData"
parts: msg.messageContent.parts.map((part) => ({
...part,
additionalData: undefined,
})),
});
}
}
}
return result;
}
/**
* Main AI Assistant component for React applications
*
* @param {AiAssistantProps} props - The props of the Assistant component. See {@link AiAssistantProps} for more details.
* @returns {JSX.Element} The rendered AI Assistant component
* @example
* ```tsx
*
* ```
*/
export function AiAssistant(props: AiAssistantProps) {
const [messages, setMessages] = useState(
props.initialMessages && props.initialMessages.length > 0
? props.initialMessages
: []
);
const [isPrompting, setIsPrompting] = useState(false);
const {
stopChat,
restartChat,
sendTextMessage,
sendImageMessage,
audioToText,
getComponents,
initializeAssistant,
} = useAssistant({
chatEndpoint: props.chatEndpoint,
voiceEndpoint: props.voiceEndpoint,
modelProvider: props.modelProvider,
model: props.model,
apiKey: props.apiKey,
instructions: props.instructions,
tools: props.tools,
name: props.name,
description: props.description,
version: props.version,
baseUrl: props.baseUrl,
historyMessages: rebuildMessages(props.initialMessages || []),
});
// when instructions change, initialize the assistant
useEffect(() => {
initializeAssistant();
}, [initializeAssistant, props.instructions]);
const isScreenshotAvailable =
props.screenCapturedBase64?.startsWith('data:image');
/**
* Handles sending a message, either as text or image based on the presence of a screenshot.
* @param {string} message - The message to be sent.
*/
const onSendMessage = async (message: string) => {
const messageHandlerProps = {
newMessage: message,
messages,
setMessages,
setTypingIndicator: setIsPrompting,
onMessagesUpdated: props.onMessagesUpdated,
onToolFinished: props.onToolFinished ?? (() => {}),
};
if (isScreenshotAvailable) {
// Handle image message
await sendImageMessageHandler({
...messageHandlerProps,
imageBase64String: props.screenCapturedBase64!,
sendImageMessage,
});
// delete the screenshot
props.onRemoveScreenshot?.();
} else {
// Handle text message
await sendTextMessageHandler({
...messageHandlerProps,
sendTextMessage,
});
}
};
/**
* Handles voice messages by converting audio to text.
* @param {Blob} audioBlob - The audio blob to be converted to text.
* @returns {Promise} The transcribed text from the audio, or an empty string if transcription fails.
*/
const onVoiceMessage = async (audioBlob: Blob) => {
return (await audioToText(audioBlob)) || '';
};
/**
* Stops the currently running chat and updates the message list.
* This function is called when the user wants to interrupt the ongoing conversation.
*/
const onStopChat = () => {
// Set the prompting state to false to indicate that the chat has stopped
setIsPrompting(false);
// stop processing
stopChat();
};
const reportQuestion = (messageIndex: number) => {
// report the message
const question = `${messages[messageIndex]}`;
if (props.onFeedback) {
props.onFeedback(question || '');
}
};
/**
* Restart the current chat
*/
const onRestartChat = async () => {
// set the prompting state to false
setIsPrompting(false);
// reset the messages
setMessages([]);
// restart the assistant
await restartChat();
// call the onRestartChat callback
props.onRestartChat?.();
};
// scroll to bottom when new message is added
useEffect(() => {
// hack to scroll to bottom
const element = document.getElementById('chat-message-list');
if (element?.firstElementChild) {
element.scrollTop = element.firstElementChild.scrollHeight + 100;
}
}, [messages]);
const getAvatar = (direction: string | number) => {
return direction === 'incoming'
? props.assistantAvatar ||
: props.userAvatar;
};
return (