// Copyright 2025 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import '../../../ui/components/tooltips/tooltips.js'; import type * as Host from '../../../core/host/host.js'; import * as i18n from '../../../core/i18n/i18n.js'; import type * as Platform from '../../../core/platform/platform.js'; import * as SDK from '../../../core/sdk/sdk.js'; import * as Protocol from '../../../generated/protocol.js'; import * as AiAssistanceModel from '../../../models/ai_assistance/ai_assistance.js'; import * as PanelsCommon from '../../../panels/common/common.js'; import * as PanelUtils from '../../../panels/utils/utils.js'; import * as Buttons from '../../../ui/components/buttons/buttons.js'; import * as Input from '../../../ui/components/input/input.js'; import * as Snackbars from '../../../ui/components/snackbars/snackbars.js'; import * as UI from '../../../ui/legacy/legacy.js'; import * as Lit from '../../../ui/lit/lit.js'; import * as VisualLogging from '../../../ui/visual_logging/visual_logging.js'; import chatInputStyles from './chatInput.css.js'; const {html, Directives: {createRef, ref}} = Lit; const UIStrings = { /** * @description Label added to the text input to describe the context for screen readers. Not shown visibly on screen. */ inputTextAriaDescription: 'You can also use one of the suggested prompts above to start your conversation', /** * @description Label added to the button that reveals the selected context item in DevTools */ revealContextDescription: 'Reveal the selected context item in DevTools', /** * @description The footer disclaimer that links to more information about the AI feature. */ learnAbout: 'Learn about AI in DevTools', } as const; /* * Strings that don't need to be translated at this time. */ const UIStringsNotTranslate = { /** * @description Title for the send icon button. */ sendButtonTitle: 'Send', /** * @description Title for the start new chat */ startNewChat: 'Start new chat', /** * @description Title for the cancel icon button. */ cancelButtonTitle: 'Cancel', /** * @description Label for the "select an element" button. */ selectAnElement: 'Select an element', /** * @description Title for the take screenshot button. */ takeScreenshotButtonTitle: 'Take screenshot', /** * @description Title for the remove image input button. */ removeImageInputButtonTitle: 'Remove image input', /** * @description Title for the add image button. */ addImageButtonTitle: 'Add image', /** * @description Text displayed when the chat input is disabled due to reading past conversation. */ pastConversation: 'You\'re viewing a past conversation.', /** * @description Message displayed in toast in case of any failures while taking a screenshot of the page. */ screenshotFailureMessage: 'Failed to take a screenshot. Please try again.', /** * @description Message displayed in toast in case of any failures while uploading an image file as input. */ uploadImageFailureMessage: 'Failed to upload image. Please try again.', /** * @description Label added to the button that add selected context from the current panel in AI Assistance panel. */ addContext: 'Add item for context', /** * @description Label added to the button that remove the currently selected element in AI Assistance panel. */ removeContextElement: 'Remove element from context', /** * @description Label added to the button that remove the currently selected context in AI Assistance panel. */ removeContextRequest: 'Remove request from context', /** * @description Label added to the button that remove the currently selected context in AI Assistance panel. */ removeContextFile: 'Remove file from context', /** * @description Label added to the button that remove the currently selected context in AI Assistance panel. */ removeContextPerfInsight: 'Remove performance insight from context', /** * @description Label added to the button that remove the currently selected context in AI Assistance panel. */ removeContext: 'Remove from context', } as const; const str_ = i18n.i18n.registerUIStrings('panels/ai_assistance/components/ChatInput.ts', UIStrings); const i18nString = i18n.i18n.getLocalizedString.bind(undefined, str_); const lockedString = i18n.i18n.lockedString; const SCREENSHOT_QUALITY = 80; const JPEG_MIME_TYPE = 'image/jpeg'; const SHOW_LOADING_STATE_TIMEOUT = 100; const RELEVANT_DATA_LINK_CHAT_ID = 'relevant-data-link-chat'; const RELEVANT_DATA_LINK_FOOTER_ID = 'relevant-data-link-footer'; export type ImageInputData = { isLoading: true, }|{ isLoading: false, data: string, mimeType: string, inputType: AiAssistanceModel.AiAgent.MultimodalInputType, }; export interface ViewInput { isLoading: boolean; isTextInputEmpty: boolean; blockedByCrossOrigin: boolean; isTextInputDisabled: boolean; inputPlaceholder: Platform.UIString.LocalizedString; selectedContext: AiAssistanceModel.AiAgent.ConversationContext|null; inspectElementToggled: boolean; disclaimerText: string; conversationType: AiAssistanceModel.AiHistoryStorage.ConversationType; multimodalInputEnabled: boolean; imageInput?: ImageInputData; uploadImageInputEnabled: boolean; isReadOnly: boolean; textAreaRef: Lit.Directives.Ref; onContextClick: () => void; onInspectElementClick: () => void; onSubmit: (ev: SubmitEvent) => void; onTextAreaKeyDown: (ev: KeyboardEvent) => void; onCancel: (ev: SubmitEvent) => void; onNewConversation: () => void; onTextInputChange: (input: string) => void; onTakeScreenshot: () => void; onRemoveImageInput: () => void; onImageUpload: (ev: Event) => void; onImagePaste: (event: ClipboardEvent) => void; onImageDragOver: (event: DragEvent) => void; onImageDrop: (event: DragEvent) => void; onContextRemoved: (() => void)|null; onContextAdd: (() => void)|null; } export type ViewOutput = undefined; function getContextRemoveLabel(context: AiAssistanceModel.AiAgent.ConversationContext): Platform.UIString.LocalizedString { if (context instanceof AiAssistanceModel.FileAgent.FileContext) { return lockedString(UIStringsNotTranslate.removeContextFile); } if (context instanceof AiAssistanceModel.StylingAgent.NodeContext) { return lockedString(UIStringsNotTranslate.removeContextElement); } if (context instanceof AiAssistanceModel.NetworkAgent.RequestContext) { return lockedString(UIStringsNotTranslate.removeContextRequest); } if (context instanceof AiAssistanceModel.PerformanceAgent.PerformanceTraceContext) { return lockedString(UIStringsNotTranslate.removeContextPerfInsight); } return lockedString(UIStringsNotTranslate.removeContext); } export const DEFAULT_VIEW = (input: ViewInput, _output: ViewOutput, target: HTMLElement): void => { const chatInputContainerCls = Lit.Directives.classMap({ 'chat-input-container': true, 'single-line-layout': !input.selectedContext && !input.onContextAdd, disabled: input.isTextInputDisabled, }); const renderRelevantDataDisclaimer = (tooltipId: string): Lit.LitTemplate => { const classes = Lit.Directives.classMap({ 'chat-input-disclaimer': true, 'hide-divider': !input.isLoading && input.blockedByCrossOrigin, }); // clang-format off return html`
 ${lockedString('is sent to Google')}
${input.disclaimerText}
`; // clang-format on }; // clang-format off Lit.render(html` ${input.isReadOnly ? html`
${lockedString(UIStringsNotTranslate.pastConversation)} ${lockedString(UIStringsNotTranslate.startNewChat)}
` : html`
${(input.multimodalInputEnabled && input.imageInput && !input.isTextInputDisabled) ? html`
${input.imageInput.isLoading ? html`
` : html` Image input` }
` : Lit.nothing}
${input.selectedContext ? html`
${input.conversationType === AiAssistanceModel.AiHistoryStorage.ConversationType.STYLING ? html` ` : Lit.nothing}
` : input.onContextAdd ? html` ` : Lit.nothing}
${renderRelevantDataDisclaimer(RELEVANT_DATA_LINK_CHAT_ID)}
${(input.multimodalInputEnabled && !input.blockedByCrossOrigin) ? html` ${input.uploadImageInputEnabled ? html` ` : Lit.nothing} ` : Lit.nothing} ${input.isLoading ? html` ` : input.blockedByCrossOrigin ? html` ${lockedString(UIStringsNotTranslate.startNewChat)}` : html` ` }
` }
${renderRelevantDataDisclaimer(RELEVANT_DATA_LINK_FOOTER_ID)}
`, target,); // clang-format on }; /** * ChatInput is a presenter for the input area in the AI Assistance panel. */ export class ChatInput extends UI.Widget.Widget implements SDK.TargetManager.Observer { isLoading = false; blockedByCrossOrigin = false; isTextInputDisabled = false; inputPlaceholder = '' as Platform.UIString.LocalizedString; selectedContext: AiAssistanceModel.AiAgent.ConversationContext|null = null; inspectElementToggled = false; disclaimerText = ''; conversationType = AiAssistanceModel.AiHistoryStorage.ConversationType.STYLING; multimodalInputEnabled = false; uploadImageInputEnabled = false; isReadOnly = false; #textAreaRef = createRef(); #imageInput?: ImageInputData; setInputValue(text: string): void { if (this.#textAreaRef.value) { this.#textAreaRef.value.value = text; } this.performUpdate(); } #isTextInputEmpty(): boolean { return !this.#textAreaRef.value?.value?.trim(); } onTextSubmit: (text: string, imageInput?: Host.AidaClient.Part, multimodalInputType?: AiAssistanceModel.AiAgent.MultimodalInputType) => void = () => {}; onContextClick = (): void => {}; onInspectElementClick = (): void => {}; onCancelClick = (): void => {}; onNewConversation = (): void => {}; onContextRemoved: (() => void)|null = null; onContextAdd: (() => void)|null = null; async #handleTakeScreenshot(): Promise { const mainTarget = SDK.TargetManager.TargetManager.instance().primaryPageTarget(); if (!mainTarget) { throw new Error('Could not find main target'); } const model = mainTarget.model(SDK.ScreenCaptureModel.ScreenCaptureModel); if (!model) { throw new Error('Could not find model'); } const showLoadingTimeout = setTimeout(() => { this.#imageInput = {isLoading: true}; this.performUpdate(); }, SHOW_LOADING_STATE_TIMEOUT); const bytes = await model.captureScreenshot( Protocol.Page.CaptureScreenshotRequestFormat.Jpeg, SCREENSHOT_QUALITY, SDK.ScreenCaptureModel.ScreenshotMode.FROM_VIEWPORT, ); clearTimeout(showLoadingTimeout); if (bytes) { this.#imageInput = { isLoading: false, data: bytes, mimeType: JPEG_MIME_TYPE, inputType: AiAssistanceModel.AiAgent.MultimodalInputType.SCREENSHOT }; this.performUpdate(); void this.updateComplete.then(() => { this.focusTextInput(); }); } else { this.#imageInput = undefined; this.performUpdate(); Snackbars.Snackbar.Snackbar.show({message: lockedString(UIStringsNotTranslate.screenshotFailureMessage)}); } } targetAdded(_target: SDK.Target.Target): void { } targetRemoved(_target: SDK.Target.Target): void { } #handleRemoveImageInput(): void { this.#imageInput = undefined; this.performUpdate(); void this.updateComplete.then(() => { this.focusTextInput(); }); } #handleImageDataTransferEvent(dataTransfer: DataTransfer|null, event: Event): void { if (this.conversationType !== AiAssistanceModel.AiHistoryStorage.ConversationType.STYLING) { return; } const files = dataTransfer?.files; if (!files || files.length === 0) { return; } const imageFile = Array.from(files).find(file => file.type.startsWith('image/')); if (!imageFile) { return; } event.preventDefault(); void this.#handleLoadImage(imageFile); } #handleImagePaste = (event: ClipboardEvent): void => { this.#handleImageDataTransferEvent(event.clipboardData, event); }; #handleImageDragOver = (event: DragEvent): void => { if (this.conversationType !== AiAssistanceModel.AiHistoryStorage.ConversationType.STYLING) { return; } event.preventDefault(); }; #handleImageDrop = (event: DragEvent): void => { this.#handleImageDataTransferEvent(event.dataTransfer, event); }; async #handleLoadImage(file: File): Promise { const showLoadingTimeout = setTimeout(() => { this.#imageInput = {isLoading: true}; this.performUpdate(); }, SHOW_LOADING_STATE_TIMEOUT); try { const reader = new FileReader(); const dataUrl = await new Promise((resolve, reject) => { reader.onload = () => { if (typeof reader.result === 'string') { resolve(reader.result); } else { reject(new Error('FileReader result was not a string.')); } }; reader.readAsDataURL(file); }); const commaIndex = dataUrl.indexOf(','); const bytes = dataUrl.substring(commaIndex + 1); this.#imageInput = { isLoading: false, data: bytes, mimeType: file.type, inputType: AiAssistanceModel.AiAgent.MultimodalInputType.UPLOADED_IMAGE }; } catch { this.#imageInput = undefined; Snackbars.Snackbar.Snackbar.show({message: lockedString(UIStringsNotTranslate.uploadImageFailureMessage)}); } clearTimeout(showLoadingTimeout); this.performUpdate(); void this.updateComplete.then(() => { this.focusTextInput(); }); } #view: typeof DEFAULT_VIEW; constructor(element?: HTMLElement, view?: typeof DEFAULT_VIEW) { super(element); this.#view = view ?? DEFAULT_VIEW; } override wasShown(): void { super.wasShown(); SDK.TargetManager.TargetManager.instance().addModelListener( SDK.ResourceTreeModel.ResourceTreeModel, SDK.ResourceTreeModel.Events.PrimaryPageChanged, this.#onPrimaryPageChanged, this); } override willHide(): void { super.willHide(); SDK.TargetManager.TargetManager.instance().removeModelListener( SDK.ResourceTreeModel.ResourceTreeModel, SDK.ResourceTreeModel.Events.PrimaryPageChanged, this.#onPrimaryPageChanged, this); } #onPrimaryPageChanged(): void { this.#imageInput = undefined; this.performUpdate(); } override performUpdate(): void { this.#view( { inputPlaceholder: this.inputPlaceholder, isLoading: this.isLoading, blockedByCrossOrigin: this.blockedByCrossOrigin, isTextInputDisabled: this.isTextInputDisabled, selectedContext: this.selectedContext, inspectElementToggled: this.inspectElementToggled, isTextInputEmpty: this.#isTextInputEmpty(), disclaimerText: this.disclaimerText, conversationType: this.conversationType, multimodalInputEnabled: this.multimodalInputEnabled, imageInput: this.#imageInput, uploadImageInputEnabled: this.uploadImageInputEnabled, isReadOnly: this.isReadOnly, textAreaRef: this.#textAreaRef, onContextClick: this.onContextClick, onInspectElementClick: this.onInspectElementClick, onImagePaste: this.#handleImagePaste, onNewConversation: this.onNewConversation, onTextInputChange: () => { this.requestUpdate(); }, onTakeScreenshot: this.#handleTakeScreenshot.bind(this), onRemoveImageInput: this.#handleRemoveImageInput.bind(this), onSubmit: this.onSubmit, onTextAreaKeyDown: this.onTextAreaKeyDown, onCancel: this.onCancel, onImageUpload: this.onImageUpload, onImageDragOver: this.#handleImageDragOver, onImageDrop: this.#handleImageDrop, onContextRemoved: this.onContextRemoved, onContextAdd: this.onContextAdd, }, undefined, this.contentElement); } focusTextInput(): void { this.#textAreaRef.value?.focus(); } onSubmit = (event: SubmitEvent): void => { event.preventDefault(); if (this.#imageInput?.isLoading) { return; } const imageInput = !this.#imageInput?.isLoading && this.#imageInput?.data ? {inlineData: {data: this.#imageInput.data, mimeType: this.#imageInput.mimeType}} : undefined; this.onTextSubmit(this.#textAreaRef.value?.value ?? '', imageInput, this.#imageInput?.inputType); this.#imageInput = undefined; this.setInputValue(''); }; onTextAreaKeyDown = (event: KeyboardEvent): void => { if (!event.target || !(event.target instanceof HTMLTextAreaElement)) { return; } // Go to a new line on Shift+Enter. On Enter, submit unless the // user is in IME composition. if (event.key === 'Enter' && !event.shiftKey && !event.isComposing) { event.preventDefault(); if (!event.target?.value || this.#imageInput?.isLoading) { return; } const imageInput = !this.#imageInput?.isLoading && this.#imageInput?.data ? {inlineData: {data: this.#imageInput.data, mimeType: this.#imageInput.mimeType}} : undefined; this.onTextSubmit(event.target.value, imageInput, this.#imageInput?.inputType); this.#imageInput = undefined; this.setInputValue(''); } }; onCancel = (ev: SubmitEvent): void => { ev.preventDefault(); if (!this.isLoading) { return; } this.onCancelClick(); }; onImageUpload = (ev: Event): void => { ev.stopPropagation(); const fileSelector = UI.UIUtils.createFileSelectorElement(this.#handleLoadImage.bind(this), '.jpeg,.jpg,.png'); fileSelector.click(); }; }