Files
ai/web/src/components/ChatPanel.vue
2026-04-13 21:42:28 -04:00

745 lines
21 KiB
Vue

<script setup lang="ts">
import { ref, onMounted, onUnmounted, computed, onBeforeUnmount, watch, nextTick } from 'vue'
import { register } from 'vue-advanced-chat'
import Badge from 'primevue/badge'
import Button from 'primevue/button'
import { wsManager } from '../composables/useWebSocket'
import type { WebSocketMessage } from '../composables/useWebSocket'
import { useChannelStore } from '../stores/channel'
register()
const channelStore = useChannelStore()
// Measure container height and feed a concrete pixel value to vue-advanced-chat,
// because height: 100% doesn't reliably resolve through flex chains.
const chatContainerRef = ref<HTMLElement | null>(null)
const chatHeight = ref('400px')
let resizeObserver: ResizeObserver | null = null
onMounted(() => {
if (chatContainerRef.value) {
resizeObserver = new ResizeObserver(entries => {
const h = entries[0].contentRect.height
if (h > 0) chatHeight.value = h + 'px'
})
resizeObserver.observe(chatContainerRef.value)
}
})
onBeforeUnmount(() => {
resizeObserver?.disconnect()
})
const SESSION_ID = 'default'
const CURRENT_USER_ID = 'user-123'
const AGENT_ID = 'agent'
const BACKEND_URL = 'http://localhost:8080'
// Chat state
const messages = ref<any[]>([])
const messagesLoaded = ref(false)
const isConnected = wsManager.isConnected
// Reactive rooms that update based on WebSocket connection and agent processing state
const rooms = computed(() => [{
roomId: SESSION_ID,
roomName: 'AI Agent',
avatar: null,
users: [
{ _id: CURRENT_USER_ID, username: 'You' },
{ _id: AGENT_ID, username: 'AI Agent' }
],
unreadCount: 0,
typingUsers: isAgentProcessing.value ? [AGENT_ID] : []
}])
// Streaming state
let currentStreamingMessageId: string | null = null
let toolCallMessageId: string | null = null
let lastSentMessageId: string | null = null
let streamingBuffer = ''
const isAgentProcessing = ref(false)
const addToolCallBubble = (label: string) => {
removeToolCallBubble()
toolCallMessageId = `tool-call-${Date.now()}`
const timestamp = new Date().toTimeString().split(' ')[0].slice(0, 5)
messages.value = [...messages.value, {
_id: toolCallMessageId,
content: `${label}`,
senderId: AGENT_ID,
timestamp,
date: new Date().toLocaleDateString(),
saved: false,
distributed: false,
seen: false,
files: [],
toolCall: true
}]
}
const appendToolCallStatus = (status: string) => {
if (!toolCallMessageId) return
const idx = messages.value.findIndex(m => m._id === toolCallMessageId)
if (idx !== -1) {
messages.value[idx] = {
...messages.value[idx],
content: messages.value[idx].content + `\n↳ ${status}`
}
messages.value = [...messages.value]
}
}
const removeToolCallBubble = () => {
if (toolCallMessageId) {
messages.value = messages.value.filter(m => m._id !== toolCallMessageId)
toolCallMessageId = null
}
}
// Generate message ID
const generateMessageId = () => `msg-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`
// Storage for images received during streaming
const streamingImages = ref<any[]>([])
// Handle WebSocket messages
const handleMessage = (data: WebSocketMessage) => {
console.log('[ChatPanel] Received message:', data)
if (data.type === 'conversation_history') {
messages.value = (data.messages as any[]).map((m: any) => {
const ts = new Date(m.timestamp / 1000) // microseconds → ms
const files = (m.files ?? []).map((b: any) => ({
name: `image_${b.id}.png`,
size: 0,
type: b.mimeType.split('/')[1] ?? 'png',
url: `data:${b.mimeType};base64,${b.data}`,
preview: `data:${b.mimeType};base64,${b.data}`,
}))
return {
_id: m.id,
content: m.content,
senderId: m.role === 'user' ? CURRENT_USER_ID : AGENT_ID,
timestamp: ts.toTimeString().split(' ')[0].slice(0, 5),
date: ts.toLocaleDateString(),
saved: true,
distributed: true,
seen: true,
files,
}
})
messagesLoaded.value = true
return
}
if (data.type === 'agent_tool_call') {
addToolCallBubble(data.label ?? data.toolName ?? 'Tool call...')
return
}
if (data.type === 'subagent_tool_call') {
appendToolCallStatus(data.toolName ?? data.label ?? 'tool')
return
}
if (data.type === 'subagent_chunk') {
// Subagent final text — not shown separately; the main agent will incorporate it in its response
return
}
if (data.type === 'image') {
// Handle image message - attach to current streaming message or create standalone
console.log('[ChatPanel] Processing image message')
const imageFile = {
name: `chart_${Date.now()}.png`,
size: 0,
type: 'png',
url: `data:${data.mimeType};base64,${data.data}`,
preview: `data:${data.mimeType};base64,${data.data}`
}
if (currentStreamingMessageId) {
// Attach to current streaming message
streamingImages.value.push(imageFile)
const msgIndex = messages.value.findIndex(m => m._id === currentStreamingMessageId)
if (msgIndex !== -1) {
messages.value[msgIndex] = {
...messages.value[msgIndex],
files: [...streamingImages.value]
}
messages.value = [...messages.value]
}
} else {
// No active streaming message - create a standalone image message
const timestamp = new Date().toTimeString().split(' ')[0].slice(0, 5)
messages.value = [...messages.value, {
_id: generateMessageId(),
content: data.caption || '',
senderId: AGENT_ID,
timestamp: timestamp,
date: new Date().toLocaleDateString(),
saved: true,
distributed: true,
seen: true,
files: [imageFile]
}]
}
} else if (data.type === 'agent_chunk') {
console.log('[ChatPanel] Processing agent_chunk, content:', data.content, 'done:', data.done)
const timestamp = new Date().toTimeString().split(' ')[0].slice(0, 5)
if (!currentStreamingMessageId) {
console.log('[ChatPanel] Starting new streaming message')
// Remove any ephemeral tool-call bubble before starting the real response
removeToolCallBubble()
// Set up streaming state and mark user message as seen
isAgentProcessing.value = true
currentStreamingMessageId = generateMessageId()
streamingBuffer = data.content
streamingImages.value = []
// Mark the last sent user message as seen (double-checkmark)
if (lastSentMessageId) {
const idx = messages.value.findIndex(m => m._id === lastSentMessageId)
if (idx !== -1) {
messages.value[idx] = { ...messages.value[idx], seen: true }
messages.value = [...messages.value]
}
lastSentMessageId = null
}
// Only add the agent bubble once there is actual content to show
if (data.content) {
messages.value = [...messages.value, {
_id: currentStreamingMessageId,
content: streamingBuffer,
senderId: AGENT_ID,
timestamp: timestamp,
date: new Date().toLocaleDateString(),
saved: false,
distributed: false,
seen: false,
files: []
}]
}
} else {
// Update (or lazily create) the streaming message
streamingBuffer += data.content
const msgIndex = messages.value.findIndex(m => m._id === currentStreamingMessageId)
if (msgIndex !== -1) {
const updatedMessage: any = {
...messages.value[msgIndex],
content: streamingBuffer,
files: [...streamingImages.value] // Include accumulated images
}
// Add plot images if present in metadata
if (data.metadata && data.metadata.plot_urls && Array.isArray(data.metadata.plot_urls)) {
const plotFiles = data.metadata.plot_urls.map((url: string, idx: number) => ({
name: `plot_${idx + 1}.png`,
size: 0,
type: 'png',
url: `${BACKEND_URL}${url}`,
preview: `${BACKEND_URL}${url}`
}))
updatedMessage.files = [...updatedMessage.files, ...plotFiles]
}
messages.value[msgIndex] = updatedMessage
messages.value = [...messages.value]
} else if (streamingBuffer) {
// First chunk with content after an empty ack — create the bubble now
const timestamp2 = new Date().toTimeString().split(' ')[0].slice(0, 5)
messages.value = [...messages.value, {
_id: currentStreamingMessageId!,
content: streamingBuffer,
senderId: AGENT_ID,
timestamp: timestamp2,
date: new Date().toLocaleDateString(),
saved: false,
distributed: false,
seen: false,
files: [...streamingImages.value]
}]
}
}
if (data.done) {
// Mark message as complete
const msgIndex = messages.value.findIndex(m => m._id === currentStreamingMessageId)
if (msgIndex !== -1) {
const finalMessage: any = {
...messages.value[msgIndex],
saved: true,
distributed: true,
seen: true,
files: [...streamingImages.value] // Include all accumulated images
}
// Ensure plot images are included in final message
if (data.metadata && data.metadata.plot_urls && Array.isArray(data.metadata.plot_urls)) {
const plotFiles = data.metadata.plot_urls.map((url: string, idx: number) => ({
name: `plot_${idx + 1}.png`,
size: 0,
type: 'png',
url: `${BACKEND_URL}${url}`,
preview: `${BACKEND_URL}${url}`
}))
finalMessage.files = [...finalMessage.files, ...plotFiles]
}
messages.value[msgIndex] = finalMessage
messages.value = [...messages.value]
}
currentStreamingMessageId = null
streamingBuffer = ''
streamingImages.value = []
isAgentProcessing.value = false
removeToolCallBubble()
}
}
}
// Stop agent processing
const stopAgent = () => {
wsManager.send({ type: 'agent_stop', session_id: SESSION_ID })
isAgentProcessing.value = false
removeToolCallBubble()
lastSentMessageId = null
}
// Send message handler
const sendMessage = async (event: any) => {
// Extract data from CustomEvent.detail[0]
const data = event.detail?.[0] || event
const content = data.content || ''
const files = data.files
const roomId = data.roomId
const timestamp = new Date().toTimeString().split(' ')[0].slice(0, 5)
const messageId = generateMessageId()
// Process files
let attachments: string[] = []
let messageFiles: any[] = []
if (files && files.length > 0) {
// Upload files to backend
for (const file of files) {
try {
const formData = new FormData()
// Convert blob URL to actual file if needed
const response = await fetch(file.url)
const blob = await response.blob()
const actualFile = new File([blob], file.name, { type: file.type })
formData.append('file', actualFile)
const uploadResponse = await fetch(`${BACKEND_URL}/api/upload`, {
method: 'POST',
body: formData
})
if (!uploadResponse.ok) {
throw new Error('Upload failed')
}
const uploadData = await uploadResponse.json()
const fileUrl = `${BACKEND_URL}${uploadData.url}`
attachments.push(fileUrl)
messageFiles.push({
name: file.name,
size: uploadData.size,
type: file.extension || file.type,
url: fileUrl,
preview: file.url // Keep the blob URL for preview
})
} catch (error) {
console.error('File upload error:', error)
}
}
}
// Add user message to UI
const userMessage = {
_id: messageId,
content: content,
senderId: CURRENT_USER_ID,
timestamp: timestamp,
date: new Date().toLocaleDateString(),
saved: true,
distributed: false,
seen: false,
files: messageFiles
}
messages.value = [...messages.value, userMessage]
// Send to backend via WebSocket
const wsMessage = {
type: 'agent_user_message',
session_id: roomId || SESSION_ID,
content: content,
attachments: attachments
}
wsManager.send(wsMessage)
// Track this message so the agent_chunk handler can mark it seen
lastSentMessageId = messageId
// Show typing indicator immediately (before first chunk arrives)
isAgentProcessing.value = true
// Add thinking bubble in a macrotask so it runs in the same execution context
// as the WebSocket handler (where tool bubbles work). nextTick / sync both fail
// because vue-advanced-chat processes the send-message event asynchronously.
setTimeout(() => addToolCallBubble('Thinking...'), 0)
// Mark as distributed (single checkmark) after confirming WS send
setTimeout(() => {
const msgIndex = messages.value.findIndex(m => m._id === messageId)
if (msgIndex !== -1) {
messages.value[msgIndex] = {
...messages.value[msgIndex],
distributed: true,
}
messages.value = [...messages.value]
}
}, 100)
}
// Fetch messages handler (for pagination, not needed for simple chat)
const fetchMessages = ({ room, options }: any) => {
messagesLoaded.value = true
}
// Open file handler
const openFile = ({ file }: any) => {
window.open(file.url, '_blank')
}
// Styles to match TradingView dark theme
const chatStyles = {
general: {
color: '#dbdbdb',
colorButtonClear: '#dbdbdb',
colorButton: '#0f0f0f',
backgroundColorButton: '#089981',
backgroundInput: '#0f0f0f',
colorPlaceholder: '#8a8a8a',
colorCaret: '#dbdbdb',
colorSpinner: '#089981',
borderStyle: '1px solid #2e2e2e',
backgroundScrollIcon: '#1c1c1c'
},
container: {
border: 'none',
borderRadius: '0',
boxShadow: 'none'
},
header: {
background: '#1c1c1c',
colorRoomName: '#dbdbdb',
colorRoomInfo: '#8a8a8a',
position: 'absolute',
width: '100%'
},
footer: {
background: '#1c1c1c',
borderStyleInput: '1px solid #2e2e2e',
borderInputSelected: '#089981',
backgroundReply: '#1c1c1c',
backgroundTagActive: '#2e2e2e',
backgroundTag: '#141414'
},
content: {
background: '#0f0f0f'
},
sidemenu: {
background: '#0f0f0f',
backgroundHover: '#141414',
backgroundActive: '#1c1c1c',
colorActive: '#dbdbdb',
borderColorSearch: '#2e2e2e'
},
dropdown: {
background: '#1c1c1c',
backgroundHover: '#2e2e2e'
},
message: {
background: '#141414',
backgroundMe: '#089981',
color: '#dbdbdb',
colorStarted: '#8a8a8a',
backgroundDeleted: '#0f0f0f',
backgroundSelected: '#2e2e2e',
colorDeleted: '#8a8a8a',
colorUsername: '#8a8a8a',
colorTimestamp: '#8a8a8a',
backgroundDate: 'rgba(0, 0, 0, 0.4)',
colorDate: '#8a8a8a',
backgroundSystem: 'rgba(0, 0, 0, 0.4)',
colorSystem: '#8a8a8a',
backgroundMedia: 'rgba(0, 0, 0, 0.2)',
backgroundReply: 'rgba(0, 0, 0, 0.2)',
colorReplyUsername: '#dbdbdb',
colorReply: '#b8b8b8',
colorTag: '#089981',
backgroundImage: '#1c1c1c',
colorNewMessages: '#089981',
backgroundScrollCounter: '#089981',
colorScrollCounter: '#0f0f0f',
backgroundReaction: 'none',
borderStyleReaction: 'none',
backgroundReactionHover: '#2e2e2e',
borderStyleReactionHover: 'none',
colorReactionCounter: '#dbdbdb',
backgroundReactionMe: '#089981',
borderStyleReactionMe: 'none',
backgroundReactionHoverMe: '#089981',
borderStyleReactionHoverMe: 'none',
colorReactionCounterMe: '#0f0f0f',
backgroundAudioRecord: '#f23645',
backgroundAudioLine: 'rgba(255, 255, 255, 0.15)',
backgroundAudioProgress: '#089981',
backgroundAudioProgressSelector: '#089981',
colorFileExtension: '#8a8a8a'
},
markdown: {
background: 'rgba(28, 28, 28, 0.9)',
border: 'rgba(46, 46, 46, 0.9)',
color: '#089981',
colorMulti: '#dbdbdb'
},
room: {
colorUsername: '#dbdbdb',
colorMessage: '#8a8a8a',
colorTimestamp: '#8a8a8a',
colorStateOnline: '#089981',
colorStateOffline: '#8a8a8a',
backgroundCounterBadge: '#089981',
colorCounterBadge: '#0f0f0f'
},
emoji: {
background: '#1c1c1c'
},
icons: {
search: '#8a8a8a',
add: '#dbdbdb',
toggle: '#dbdbdb',
menu: '#dbdbdb',
close: '#8a8a8a',
closeImage: '#dbdbdb',
file: '#089981',
paperclip: '#8a8a8a',
closeOutline: '#dbdbdb',
closePreview: '#dbdbdb',
send: '#089981',
sendDisabled: '#8a8a8a',
emoji: '#8a8a8a',
emojiReaction: '#8a8a8a',
document: '#089981',
pencil: '#8a8a8a',
checkmark: '#8a8a8a',
checkmarkSeen: '#089981',
eye: '#dbdbdb',
dropdownMessage: '#dbdbdb',
dropdownMessageBackground: 'rgba(0, 0, 0, 0.4)',
dropdownRoom: '#dbdbdb',
dropdownScroll: '#1c1c1c',
microphone: '#8a8a8a',
audioPlay: '#089981',
audioPause: '#089981',
audioCancel: '#f23645',
audioConfirm: '#089981'
}
}
onMounted(() => {
wsManager.addHandler(handleMessage)
setTimeout(() => {
// Inject styles into shadow DOM to widen message bubbles (fallback if already ready at mount)
injectShadowStyles()
const chatInput = document.querySelector('.vac-textarea') as HTMLTextAreaElement
if (chatInput) {
chatInput.focus()
}
}, 300)
})
const injectShadowStyles = () => {
const chatEl = document.querySelector('vue-advanced-chat')
if (chatEl?.shadowRoot) {
// Remove any previously injected style to avoid duplicates
chatEl.shadowRoot.querySelector('#vac-width-override')?.remove()
const style = document.createElement('style')
style.id = 'vac-width-override'
style.textContent = `
.vac-message-wrapper .vac-message-box { max-width: 80%; }
.vac-message-wrapper .vac-offset-current { margin-left: 20%; }
`
chatEl.shadowRoot.appendChild(style)
}
}
watch(() => channelStore.isReady, async (ready) => {
if (!ready) return
await nextTick()
setTimeout(() => {
injectShadowStyles()
const chatInput = document.querySelector('.vac-textarea') as HTMLTextAreaElement
chatInput?.focus()
}, 100)
})
onUnmounted(() => {
wsManager.removeHandler(handleMessage)
})
</script>
<template>
<div class="chat-container" ref="chatContainerRef">
<!--
<div class="chat-header-custom">
<span class="chat-title">AI Agent Chat</span>
<Badge
:value="isConnected ? 'Connected' : 'Disconnected'"
:severity="isConnected ? 'success' : 'danger'"
/>
</div>
-->
<!-- Workspace loading overlay -->
<div v-if="!channelStore.isReady" class="workspace-loading">
<i class="pi pi-spin pi-spinner workspace-loading-spinner" />
<span class="workspace-loading-message">{{ channelStore.statusMessage || 'Connecting...' }}</span>
</div>
<vue-advanced-chat
v-else
:height="chatHeight"
:current-user-id="CURRENT_USER_ID"
:rooms="JSON.stringify(rooms)"
:messages="JSON.stringify(messages)"
:messages-loaded="messagesLoaded"
:room-id="SESSION_ID"
:styles="JSON.stringify(chatStyles)"
:single-room="true"
:rooms-list-opened="false"
:show-add-room="false"
:show-audio="false"
:show-files="false"
:show-emojis="true"
:show-reaction-emojis="false"
:message-actions="JSON.stringify([])"
:accepted-files="'image/*,video/*,application/pdf'"
:message-images="true"
@send-message="sendMessage"
@fetch-messages="fetchMessages"
@open-file="openFile"
/>
<!-- Stop button overlay -->
<div v-if="isAgentProcessing" class="stop-button-container">
<Button
icon="pi pi-stop-circle"
label="Stop"
severity="danger"
@click="stopAgent"
class="stop-button"
/>
</div>
</div>
</template>
<style scoped>
.chat-container {
height: 100% !important;
display: flex;
flex-direction: column;
background: var(--p-surface-0) !important;
overflow: hidden;
position: relative;
}
.workspace-loading {
position: fixed;
inset: 0;
z-index: 9999;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
gap: 1rem;
background: #0f0f0f;
color: #888;
}
.workspace-loading-spinner {
font-size: 2rem;
color: #089981;
}
.workspace-loading-message {
font-size: 0.875rem;
}
.chat-container :deep(.vac-container) {
height: 100% !important;
}
.chat-container :deep(.vac-text-message) {
white-space: pre-wrap;
}
/* Override the default 50% desktop width on the inner message box */
.chat-container :deep(.vac-message-wrapper .vac-message-box) {
flex: 0 0 80% !important;
max-width: 80% !important;
}
.chat-header-custom {
display: flex;
justify-content: space-between;
align-items: center;
padding: 1rem 1.5rem;
background: var(--p-surface-50);
border-bottom: 1px solid var(--p-surface-200);
flex-shrink: 0;
}
.chat-title {
font-size: 1.25rem;
font-weight: 600;
color: var(--p-surface-900);
}
.stop-button-container {
position: absolute;
bottom: 80px;
right: 20px;
z-index: 1000;
}
.stop-button {
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
animation: pulse 2s infinite;
}
@keyframes pulse {
0%, 100% {
opacity: 1;
}
50% {
opacity: 0.8;
}
}
</style>