import { ClipboardEvent, useEffect, useMemo, useRef, useState } from 'react'; import { CallbackGeneratedChunk, useAppContext } from '../utils/app.context'; import ChatMessage from './ChatMessage'; import { CanvasType, Message, PendingMessage } from '../utils/types'; import { classNames, cleanCurrentUrl } from '../utils/misc'; import CanvasPyInterpreter from './CanvasPyInterpreter'; import StorageUtils from '../utils/storage'; import { useVSCodeContext } from '../utils/llama-vscode'; import { useChatTextarea, ChatTextareaApi } from './useChatTextarea.ts'; import { ArrowUpIcon, StopIcon, PaperClipIcon, } from '@heroicons/react/24/solid'; import { ChatExtraContextApi, useChatExtraContext, } from './useChatExtraContext.tsx'; import Dropzone from 'react-dropzone'; import toast from 'react-hot-toast'; import ChatInputExtraContextItem from './ChatInputExtraContextItem.tsx'; import { scrollToBottom, useChatScroll } from './useChatScroll.tsx'; /** * A message display is a message node with additional information for rendering. * For example, siblings of the message node are stored as their last node (aka leaf node). */ export interface MessageDisplay { msg: Message | PendingMessage; siblingLeafNodeIds: Message['id'][]; siblingCurrIdx: number; isPending?: boolean; } /** * If the current URL contains "?m=...", prefill the message input with the value. * If the current URL contains "?q=...", prefill and SEND the message. */ const prefilledMsg = { content() { const url = new URL(window.location.href); return url.searchParams.get('m') ?? url.searchParams.get('q') ?? ''; }, shouldSend() { const url = new URL(window.location.href); return url.searchParams.has('q'); }, clear() { cleanCurrentUrl(['m', 'q']); }, }; function getListMessageDisplay( msgs: Readonly, leafNodeId: Message['id'] ): MessageDisplay[] { const currNodes = StorageUtils.filterByLeafNodeId(msgs, leafNodeId, true); const res: MessageDisplay[] = []; const nodeMap = new Map(); for (const msg of msgs) { nodeMap.set(msg.id, msg); } // find leaf node from a message node const findLeafNode = (msgId: Message['id']): Message['id'] => { let currNode: Message | undefined = nodeMap.get(msgId); while (currNode) { if (currNode.children.length === 0) break; currNode = nodeMap.get(currNode.children.at(-1) ?? -1); } return currNode?.id ?? -1; }; // traverse the current nodes for (const msg of currNodes) { const parentNode = nodeMap.get(msg.parent ?? -1); if (!parentNode) continue; const siblings = parentNode.children; if (msg.type !== 'root') { res.push({ msg, siblingLeafNodeIds: siblings.map(findLeafNode), siblingCurrIdx: siblings.indexOf(msg.id), }); } } return res; } export default function ChatScreen() { const { viewingChat, sendMessage, isGenerating, stopGenerating, pendingMessages, canvasData, replaceMessageAndGenerate, } = useAppContext(); const textarea: ChatTextareaApi = useChatTextarea(prefilledMsg.content()); const extraContext = useChatExtraContext(); useVSCodeContext(textarea, extraContext); const msgListRef = useRef(null); useChatScroll(msgListRef); // keep track of leaf node for rendering const [currNodeId, setCurrNodeId] = useState(-1); const messages: MessageDisplay[] = useMemo(() => { if (!viewingChat) return []; else return getListMessageDisplay(viewingChat.messages, currNodeId); }, [currNodeId, viewingChat]); const currConvId = viewingChat?.conv.id ?? null; const pendingMsg: PendingMessage | undefined = pendingMessages[currConvId ?? '']; useEffect(() => { // reset to latest node when conversation changes setCurrNodeId(-1); // scroll to bottom when conversation changes scrollToBottom(false, 1); }, [currConvId]); const onChunk: CallbackGeneratedChunk = (currLeafNodeId?: Message['id']) => { if (currLeafNodeId) { setCurrNodeId(currLeafNodeId); } // useChatScroll will handle the auto scroll }; const sendNewMessage = async () => { const lastInpMsg = textarea.value(); if (lastInpMsg.trim().length === 0 || isGenerating(currConvId ?? '')) { toast.error('Please enter a message'); return; } textarea.setValue(''); scrollToBottom(false); setCurrNodeId(-1); // get the last message node const lastMsgNodeId = messages.at(-1)?.msg.id ?? null; if ( !(await sendMessage( currConvId, lastMsgNodeId, lastInpMsg, extraContext.items, onChunk )) ) { // restore the input message if failed textarea.setValue(lastInpMsg); } // OK extraContext.clearItems(); }; // for vscode context textarea.refOnSubmit.current = sendNewMessage; const handleEditMessage = async (msg: Message, content: string) => { if (!viewingChat) return; setCurrNodeId(msg.id); scrollToBottom(false); await replaceMessageAndGenerate( viewingChat.conv.id, msg.parent, content, msg.extra, onChunk ); setCurrNodeId(-1); scrollToBottom(false); }; const handleRegenerateMessage = async (msg: Message) => { if (!viewingChat) return; setCurrNodeId(msg.parent); scrollToBottom(false); await replaceMessageAndGenerate( viewingChat.conv.id, msg.parent, null, msg.extra, onChunk ); setCurrNodeId(-1); scrollToBottom(false); }; const hasCanvas = !!canvasData; useEffect(() => { if (prefilledMsg.shouldSend()) { // send the prefilled message if needed sendNewMessage(); } else { // otherwise, focus on the input textarea.focus(); } prefilledMsg.clear(); // no need to keep track of sendNewMessage // eslint-disable-next-line react-hooks/exhaustive-deps }, [textarea.ref]); // due to some timing issues of StorageUtils.appendMsg(), we need to make sure the pendingMsg is not duplicated upon rendering (i.e. appears once in the saved conversation and once in the pendingMsg) const pendingMsgDisplay: MessageDisplay[] = pendingMsg && messages.at(-1)?.msg.id !== pendingMsg.id ? [ { msg: pendingMsg, siblingLeafNodeIds: [], siblingCurrIdx: 0, isPending: true, }, ] : []; return (
{/* chat messages */}
{/* placeholder to shift the message to the bottom */} {viewingChat ? ( '' ) : ( <>
Send a message to start
)}
{[...messages, ...pendingMsgDisplay].map((msg) => ( ))}
{/* chat input */} stopGenerating(currConvId ?? '')} isGenerating={isGenerating(currConvId ?? '')} />
{canvasData?.type === CanvasType.PY_INTERPRETER && ( )}
); } function ServerInfo() { const { serverProps } = useAppContext(); return (
Server Info

Model: {serverProps?.model_path?.split(/(\\|\/)/).pop()}
Build: {serverProps?.build_info}

); } function ChatInput({ textarea, extraContext, onSend, onStop, isGenerating, }: { textarea: ChatTextareaApi; extraContext: ChatExtraContextApi; onSend: () => void; onStop: () => void; isGenerating: boolean; }) { const { config } = useAppContext(); const [isDrag, setIsDrag] = useState(false); return (
{ setIsDrag(false); extraContext.onFileAdded(files); }} onDragEnter={() => setIsDrag(true)} onDragLeave={() => setIsDrag(false)} multiple={true} > {({ getRootProps, getInputProps }) => (
) => { const text = e.clipboardData.getData('text/plain'); if ( text.length > 0 && config.pasteLongTextToFileLen > 0 && text.length > config.pasteLongTextToFileLen ) { // if the text is too long, we will convert it to a file extraContext.addItems([ { type: 'context', name: 'Pasted Content', content: text, }, ]); e.preventDefault(); return; } // if a file is pasted, we will handle it here const files = Array.from(e.clipboardData.items) .filter((item) => item.kind === 'file') .map((item) => item.getAsFile()) .filter((file) => file !== null); if (files.length > 0) { e.preventDefault(); extraContext.onFileAdded(files); } }} {...getRootProps()} > {!isGenerating && ( )}
{/* buttons area */}
{isGenerating ? ( ) : ( )}
)}
); }