- Add model-tags parser for @Tag syntax in chat messages - Support Anthropic models (Sonnet, Haiku, Opus) via @tag - Remove Qdrant vector database from infrastructure and configs - Simplify license model config to use null fallbacks - Add greeting stream after model switch via @tag - Fix protobuf field names to camelCase for v7 compatibility - Add 429 rate limit retry logic with exponential backoff - Remove RAG references from agent harness documentation
980 lines
38 KiB
TypeScript
980 lines
38 KiB
TypeScript
import type { FastifyInstance, FastifyRequest } from 'fastify';
|
|
import type { WebSocket } from '@fastify/websocket';
|
|
import type { Authenticator } from '../auth/authenticator.js';
|
|
import type { AgentHarness, HarnessFactory } from '../harness/agent-harness.js';
|
|
import type { HarnessEvent } from '../harness/harness-events.js';
|
|
import type { InboundMessage } from '../types/messages.js';
|
|
import { randomUUID } from 'crypto';
|
|
import { parseModelTag, MODEL_TAGS } from '../llm/model-tags.js';
|
|
import type { LLMProvider } from '../llm/provider.js';
|
|
import type { SessionRegistry, EventSubscriber, Session } from '../events/index.js';
|
|
import type { OHLCService, BarUpdateCallback } from '../services/ohlc-service.js';
|
|
import type { SymbolIndexService } from '../services/symbol-index-service.js';
|
|
import type { ContainerManager } from '../k8s/container-manager.js';
|
|
import type { ConversationService } from '../services/conversation-service.js';
|
|
import {
|
|
WorkspaceManager,
|
|
ContainerSync,
|
|
DEFAULT_STORES,
|
|
type ChannelAdapter,
|
|
type ChannelCapabilities,
|
|
type SnapshotMessage,
|
|
type PatchMessage,
|
|
} from '../workspace/index.js';
|
|
|
|
/**
|
|
* Safe JSON stringifier that handles BigInt values
|
|
* Converts BigInt to Number (safe for timestamps and other integer values)
|
|
*/
|
|
function jsonStringifySafe(obj: any): string {
|
|
return JSON.stringify(obj, (_key, value) =>
|
|
typeof value === 'bigint' ? Number(value) : value
|
|
);
|
|
}
|
|
|
|
function makeChunkDebouncer(send: (content: string) => void, delayMs = 200) {
|
|
let buffer = '';
|
|
let timer: ReturnType<typeof setTimeout> | null = null;
|
|
|
|
function flush() {
|
|
if (timer !== null) { clearTimeout(timer); timer = null; }
|
|
if (buffer.length > 0) { send(buffer); buffer = ''; }
|
|
}
|
|
|
|
function add(content: string) {
|
|
buffer += content;
|
|
if (timer !== null) clearTimeout(timer);
|
|
timer = setTimeout(flush, delayMs);
|
|
}
|
|
|
|
return { add, flush };
|
|
}
|
|
|
|
export type SessionStatus = 'authenticating' | 'spinning_up' | 'initializing' | 'ready' | 'error'
|
|
|
|
function sendStatus(socket: WebSocket, status: SessionStatus, message: string): void {
|
|
socket.send(JSON.stringify({ type: 'status', status, message }))
|
|
}
|
|
|
|
export interface WebSocketHandlerConfig {
|
|
authenticator: Authenticator;
|
|
containerManager: ContainerManager;
|
|
sessionRegistry: SessionRegistry;
|
|
eventSubscriber: EventSubscriber;
|
|
createHarness: HarnessFactory;
|
|
ohlcService?: OHLCService; // Optional for historical data support
|
|
symbolIndexService?: SymbolIndexService; // Optional for symbol search
|
|
conversationService?: ConversationService; // Optional for history replay on reconnect
|
|
}
|
|
|
|
/**
|
|
* WebSocket channel handler
|
|
*
|
|
* Handles WebSocket connections for chat and integrates with the event system
|
|
* for container-to-client notifications.
|
|
*/
|
|
interface BarSubscription {
|
|
ticker: string;
|
|
periodSeconds: number;
|
|
callback: BarUpdateCallback;
|
|
openBars: boolean;
|
|
}
|
|
|
|
export class WebSocketHandler {
|
|
private config: WebSocketHandlerConfig;
|
|
private harnesses = new Map<string, AgentHarness>();
|
|
private workspaces = new Map<string, WorkspaceManager>();
|
|
/** Per-session realtime bar subscriptions for cleanup on disconnect */
|
|
private barSubscriptions = new Map<string, BarSubscription[]>();
|
|
/** "sessionId:pandas_ta_name" → active request_id; supersedes stale requests on scroll */
|
|
private activeEvaluations = new Map<string, string>();
|
|
|
|
constructor(config: WebSocketHandlerConfig) {
|
|
this.config = config;
|
|
}
|
|
|
|
/**
|
|
* Register WebSocket routes
|
|
*/
|
|
register(app: FastifyInstance): void {
|
|
app.get(
|
|
'/ws/chat',
|
|
{ websocket: true },
|
|
async (socket: WebSocket, request: FastifyRequest) => {
|
|
await this.handleConnection(socket, request, app);
|
|
}
|
|
);
|
|
}
|
|
|
|
/**
|
|
* Handle WebSocket connection
|
|
*/
|
|
private async handleConnection(
|
|
socket: WebSocket,
|
|
request: FastifyRequest,
|
|
app: FastifyInstance
|
|
): Promise<void> {
|
|
const logger = app.log;
|
|
|
|
// Send initial connecting message
|
|
sendStatus(socket, 'authenticating', 'Authenticating...');
|
|
|
|
// Authenticate (returns immediately if container is spinning up)
|
|
const { authContext, isSpinningUp } = await this.config.authenticator.authenticateWebSocket(request);
|
|
if (!authContext) {
|
|
logger.warn('WebSocket authentication failed');
|
|
socket.send(
|
|
JSON.stringify({
|
|
type: 'error',
|
|
message: 'Authentication failed',
|
|
})
|
|
);
|
|
socket.close(1008, 'Authentication failed');
|
|
return;
|
|
}
|
|
|
|
logger.info(
|
|
{ userId: authContext.userId, sessionId: authContext.sessionId, isSpinningUp },
|
|
'WebSocket connection authenticated'
|
|
);
|
|
|
|
// If container is spinning up, wait for it to be ready before continuing
|
|
if (isSpinningUp) {
|
|
sendStatus(socket, 'spinning_up', 'Your personal agent is starting up, please wait...');
|
|
|
|
const startupPingInterval = setInterval(() => {
|
|
if (socket.readyState === 1) socket.ping();
|
|
}, 10000);
|
|
|
|
const ready = await this.config.containerManager.waitForContainerReady(authContext.userId, 120000);
|
|
clearInterval(startupPingInterval);
|
|
if (!ready) {
|
|
logger.warn({ userId: authContext.userId }, 'Sandbox failed to become ready within timeout');
|
|
socket.send(JSON.stringify({ type: 'error', message: 'Agent workspace failed to start. Please try again later.' }));
|
|
socket.close(1011, 'Container startup timeout');
|
|
return;
|
|
}
|
|
|
|
logger.info({ userId: authContext.userId }, 'Sandbox is ready, proceeding with session setup');
|
|
}
|
|
|
|
sendStatus(socket, 'initializing', 'Starting your workspace...');
|
|
|
|
// Create WebSocket channel adapter
|
|
const wsAdapter: ChannelAdapter = {
|
|
sendSnapshot: (msg: SnapshotMessage) => {
|
|
socket.send(JSON.stringify(msg));
|
|
},
|
|
sendPatch: (msg: PatchMessage) => {
|
|
socket.send(JSON.stringify(msg));
|
|
},
|
|
sendText: (msg) => {
|
|
socket.send(JSON.stringify({
|
|
type: 'text',
|
|
text: msg.text,
|
|
}));
|
|
},
|
|
sendChunk: (content) => {
|
|
socket.send(JSON.stringify({
|
|
type: 'agent_chunk',
|
|
content,
|
|
done: false,
|
|
}));
|
|
},
|
|
sendImage: (msg) => {
|
|
socket.send(JSON.stringify({
|
|
type: 'image',
|
|
data: msg.data,
|
|
mimeType: msg.mimeType,
|
|
caption: msg.caption,
|
|
}));
|
|
},
|
|
sendToolCall: (toolName, label) => {
|
|
socket.send(JSON.stringify({
|
|
type: 'agent_tool_call',
|
|
toolName,
|
|
label: label ?? toolName,
|
|
}));
|
|
},
|
|
getCapabilities: (): ChannelCapabilities => ({
|
|
supportsSync: true,
|
|
supportsImages: true,
|
|
supportsMarkdown: true,
|
|
supportsStreaming: true,
|
|
supportsTradingViewEmbed: true,
|
|
}),
|
|
};
|
|
|
|
// Declare harness and workspace outside try block so they're available in catch
|
|
let harness: AgentHarness | undefined;
|
|
let workspace: WorkspaceManager | undefined;
|
|
|
|
try {
|
|
// Create and connect harness first so MCP client is available for ContainerSync
|
|
harness = this.config.createHarness({
|
|
userId: authContext.userId,
|
|
sessionId: authContext.sessionId,
|
|
license: authContext.license,
|
|
mcpServerUrl: authContext.mcpServerUrl,
|
|
logger,
|
|
channelAdapter: wsAdapter,
|
|
channelType: authContext.channelType,
|
|
channelUserId: authContext.channelUserId,
|
|
});
|
|
|
|
await harness.initialize();
|
|
|
|
// Wire ContainerSync now that MCP client is connected, then initialize workspace
|
|
const containerSync = new ContainerSync(harness.getMcpClient(), logger);
|
|
workspace = new WorkspaceManager({
|
|
userId: authContext.userId,
|
|
sessionId: authContext.sessionId,
|
|
stores: DEFAULT_STORES,
|
|
containerSync,
|
|
logger,
|
|
});
|
|
|
|
await workspace.initialize();
|
|
workspace.setAdapter(wsAdapter);
|
|
harness.setWorkspaceManager(workspace);
|
|
this.workspaces.set(authContext.sessionId, workspace);
|
|
this.harnesses.set(authContext.sessionId, harness);
|
|
|
|
// Push all store snapshots to the client now, before 'connected'.
|
|
// Empty seqs force full snapshots for every store, so the browser's
|
|
// message queue has the current workspace state (including persistent
|
|
// stores loaded from the container) before TradingView initializes.
|
|
await workspace.handleHello({});
|
|
|
|
// Register session for event system
|
|
// Container endpoint is derived from the MCP server URL (same container, different port)
|
|
const containerEventEndpoint = this.getContainerEventEndpoint(authContext.mcpServerUrl);
|
|
|
|
const session: Session = {
|
|
userId: authContext.userId,
|
|
sessionId: authContext.sessionId,
|
|
socket,
|
|
channelType: 'websocket',
|
|
containerEndpoint: containerEventEndpoint,
|
|
connectedAt: new Date(),
|
|
};
|
|
|
|
this.config.sessionRegistry.register(session);
|
|
|
|
// Subscribe to informational events from user's container
|
|
await this.config.eventSubscriber.onSessionConnect(session);
|
|
|
|
logger.info(
|
|
{ userId: authContext.userId, containerEndpoint: containerEventEndpoint },
|
|
'Session registered for events'
|
|
);
|
|
|
|
sendStatus(socket, 'ready', 'Your workspace is ready!');
|
|
socket.send(
|
|
JSON.stringify({
|
|
type: 'connected',
|
|
sessionId: authContext.sessionId,
|
|
userId: authContext.userId,
|
|
licenseType: authContext.license.licenseType,
|
|
message: 'Connected to Dexorder AI',
|
|
modelTags: MODEL_TAGS.map(m => m.tag),
|
|
})
|
|
);
|
|
|
|
// Replay conversation history so the UI pre-populates on reconnect;
|
|
// greet new users on their first conversation
|
|
if (this.config.conversationService) {
|
|
const history = await this.config.conversationService.getHistory(
|
|
authContext.userId,
|
|
authContext.sessionId
|
|
);
|
|
if (history.length > 0) {
|
|
socket.send(JSON.stringify({ type: 'conversation_history', messages: history }));
|
|
} else {
|
|
// First conversation — auto-send greeting prompt and stream the response
|
|
socket.send(JSON.stringify({ type: 'agent_chunk', content: '', done: false }));
|
|
const greetingDebouncer = makeChunkDebouncer(content =>
|
|
socket.send(JSON.stringify({ type: 'agent_chunk', content, done: false }))
|
|
);
|
|
for await (const event of harness!.streamGreeting()) {
|
|
const e = event as HarnessEvent;
|
|
switch (e.type) {
|
|
case 'chunk':
|
|
greetingDebouncer.add(e.content);
|
|
break;
|
|
case 'tool_call':
|
|
greetingDebouncer.flush();
|
|
socket.send(JSON.stringify({ type: 'agent_tool_call', toolName: e.toolName, label: e.label }));
|
|
break;
|
|
case 'image':
|
|
greetingDebouncer.flush();
|
|
socket.send(JSON.stringify({ type: 'image', data: e.data, mimeType: e.mimeType, caption: e.caption }));
|
|
break;
|
|
case 'error':
|
|
greetingDebouncer.flush();
|
|
socket.send(JSON.stringify({ type: 'text', text: `An error occurred during greeting.` }));
|
|
break;
|
|
case 'done':
|
|
break;
|
|
}
|
|
}
|
|
greetingDebouncer.flush();
|
|
socket.send(JSON.stringify({ type: 'agent_chunk', content: '', done: true }));
|
|
}
|
|
}
|
|
|
|
// Handle messages
|
|
socket.on('message', async (data: Buffer) => {
|
|
try {
|
|
logger.info({ rawMessage: data.toString().substring(0, 500) }, 'WebSocket message received');
|
|
const payload = JSON.parse(data.toString());
|
|
logger.info({ type: payload.type, request_id: payload.request_id }, 'WebSocket message parsed');
|
|
|
|
// Route based on message type
|
|
if (payload.type === 'message' || payload.type === 'agent_user_message') {
|
|
if (!harness) {
|
|
logger.error('Harness not initialized');
|
|
socket.send(JSON.stringify({ type: 'error', message: 'Session not ready' }));
|
|
return;
|
|
}
|
|
|
|
// Check for @ModelTag at the start of the message
|
|
const parsedTag = parseModelTag(payload.content ?? '');
|
|
let messageContent: string = payload.content ?? '';
|
|
let modelOverride: { modelId: string; provider?: LLMProvider } | undefined;
|
|
|
|
if (parsedTag) {
|
|
await harness.clearHistory();
|
|
socket.send(JSON.stringify({ type: 'model_switched', tag: parsedTag.tag, modelId: parsedTag.modelId, rest: parsedTag.rest }));
|
|
messageContent = parsedTag.rest;
|
|
modelOverride = { modelId: parsedTag.modelId, provider: parsedTag.provider };
|
|
logger.info({ tag: parsedTag.tag, modelId: parsedTag.modelId }, 'Model tag switch');
|
|
}
|
|
|
|
// Chat message - send to agent harness with streaming
|
|
const inboundMessage: InboundMessage = {
|
|
messageId: randomUUID(),
|
|
userId: authContext.userId,
|
|
sessionId: authContext.sessionId,
|
|
content: messageContent,
|
|
attachments: payload.attachments,
|
|
timestamp: new Date(),
|
|
};
|
|
|
|
try {
|
|
// Acknowledge receipt immediately so the client can show the seen indicator
|
|
socket.send(JSON.stringify({ type: 'agent_chunk', content: '', done: false }));
|
|
|
|
logger.info('Streaming harness response');
|
|
let fatalError = false;
|
|
const msgDebouncer = makeChunkDebouncer(content =>
|
|
socket.send(JSON.stringify({ type: 'agent_chunk', content, done: false }))
|
|
);
|
|
const stream = (parsedTag && !messageContent)
|
|
? harness.streamGreeting(modelOverride)
|
|
: harness.streamMessage(inboundMessage, { modelOverride });
|
|
for await (const event of stream) {
|
|
const e = event as HarnessEvent;
|
|
switch (e.type) {
|
|
case 'chunk':
|
|
msgDebouncer.add(e.content);
|
|
break;
|
|
case 'tool_call':
|
|
msgDebouncer.flush();
|
|
socket.send(JSON.stringify({ type: 'agent_tool_call', toolName: e.toolName, label: e.label }));
|
|
break;
|
|
case 'subagent_tool_call':
|
|
msgDebouncer.flush();
|
|
socket.send(JSON.stringify({ type: 'subagent_tool_call', agentName: e.agentName, toolName: e.toolName, label: e.label }));
|
|
break;
|
|
case 'subagent_chunk':
|
|
msgDebouncer.flush();
|
|
socket.send(JSON.stringify({ type: 'subagent_chunk', agentName: e.agentName, content: e.content }));
|
|
break;
|
|
case 'subagent_thinking':
|
|
msgDebouncer.flush();
|
|
socket.send(JSON.stringify({ type: 'subagent_thinking', agentName: e.agentName, content: e.content }));
|
|
break;
|
|
case 'image':
|
|
msgDebouncer.flush();
|
|
socket.send(JSON.stringify({ type: 'image', data: e.data, mimeType: e.mimeType, caption: e.caption }));
|
|
break;
|
|
case 'error':
|
|
msgDebouncer.flush();
|
|
socket.send(JSON.stringify({ type: 'text', text: `An unrecoverable error occurred in the ${e.source}.` }));
|
|
if (e.fatal) fatalError = true;
|
|
break;
|
|
case 'done':
|
|
break;
|
|
}
|
|
}
|
|
msgDebouncer.flush();
|
|
|
|
if (fatalError) {
|
|
socket.close(1011, 'Fatal error');
|
|
return;
|
|
}
|
|
|
|
// Send done marker after all events have been streamed
|
|
logger.debug('Sending done marker to client');
|
|
socket.send(
|
|
JSON.stringify({
|
|
type: 'agent_chunk',
|
|
content: '',
|
|
done: true,
|
|
})
|
|
);
|
|
} catch (error) {
|
|
logger.error({ error }, 'Error streaming response');
|
|
socket.send(
|
|
JSON.stringify({
|
|
type: 'error',
|
|
message: 'Failed to generate response',
|
|
})
|
|
);
|
|
}
|
|
} else if (payload.type === 'hello') {
|
|
// Workspace sync: hello message
|
|
logger.debug({ seqs: payload.seqs }, 'Handling workspace hello');
|
|
await workspace!.handleHello(payload.seqs || {});
|
|
} else if (payload.type === 'patch') {
|
|
// Workspace sync: patch message
|
|
logger.debug({ store: payload.store, seq: payload.seq }, 'Handling workspace patch');
|
|
await workspace!.handlePatch(payload.store, payload.seq, payload.patch || []);
|
|
} else if (payload.type === 'client_log') {
|
|
const level: string = payload.level ?? 'log';
|
|
const msg = `[client:${authContext.sessionId}] ${payload.message ?? ''}`;
|
|
const logMeta = { source: 'client', sessionId: authContext.sessionId };
|
|
if (level === 'error') logger.error(logMeta, msg);
|
|
else if (level === 'warn') logger.warn(logMeta, msg);
|
|
else if (level === 'debug') logger.debug(logMeta, msg);
|
|
else logger.info(logMeta, msg);
|
|
} else if (payload.type === 'agent_stop') {
|
|
logger.info('Agent stop requested');
|
|
harness?.interrupt();
|
|
} else if (payload.type === 'read_details') {
|
|
// Read the details field for a category item
|
|
const { category, name } = payload;
|
|
if (!harness) {
|
|
socket.send(JSON.stringify({ type: 'details_error', category, name, error: 'Session not ready' }));
|
|
} else {
|
|
try {
|
|
const details = await harness.readDetails(category, name);
|
|
if (details === null) {
|
|
socket.send(JSON.stringify({ type: 'details_error', category, name, error: 'Item not found or has no details' }));
|
|
} else {
|
|
socket.send(JSON.stringify({ type: 'details_data', category, name, details }));
|
|
}
|
|
} catch (error) {
|
|
logger.error({ error, category, name }, 'Error reading details');
|
|
socket.send(JSON.stringify({ type: 'details_error', category, name, error: 'Failed to read details' }));
|
|
}
|
|
}
|
|
} else if (payload.type === 'read_output') {
|
|
// Read persisted output (analysis + images) for a research item
|
|
const { category, name } = payload;
|
|
if (!harness) {
|
|
socket.send(JSON.stringify({ type: 'output_error', category, name, error: 'Session not ready' }));
|
|
} else {
|
|
try {
|
|
const output = await harness.readOutput(category, name);
|
|
if (!output) {
|
|
socket.send(JSON.stringify({ type: 'output_error', category, name, error: 'No output found — run the script first' }));
|
|
} else {
|
|
socket.send(jsonStringifySafe({ type: 'output_data', category, name, ...output }));
|
|
}
|
|
} catch (error) {
|
|
logger.error({ error, category, name }, 'Error reading output');
|
|
socket.send(JSON.stringify({ type: 'output_error', category, name, error: 'Failed to read output' }));
|
|
}
|
|
}
|
|
} else if (payload.type === 'update_details') {
|
|
// User submitted a revised details string — diff and invoke the appropriate subagent
|
|
const { category, name, details: newDetails } = payload;
|
|
if (!harness) {
|
|
socket.send(JSON.stringify({ type: 'details_updated', category, name, success: false, error: 'Session not ready' }));
|
|
} else {
|
|
try {
|
|
let hadError = false;
|
|
for await (const event of harness.streamDetailsUpdate(category, name, newDetails)) {
|
|
const e = event as HarnessEvent;
|
|
switch (e.type) {
|
|
case 'chunk':
|
|
socket.send(JSON.stringify({ type: 'subagent_chunk', agentName: category, content: e.content }));
|
|
break;
|
|
case 'subagent_chunk':
|
|
socket.send(JSON.stringify({ type: 'subagent_chunk', agentName: e.agentName, content: e.content }));
|
|
break;
|
|
case 'subagent_tool_call':
|
|
socket.send(JSON.stringify({ type: 'subagent_tool_call', agentName: e.agentName, toolName: e.toolName, label: e.label }));
|
|
break;
|
|
case 'subagent_thinking':
|
|
socket.send(JSON.stringify({ type: 'subagent_thinking', agentName: e.agentName, content: e.content }));
|
|
break;
|
|
case 'tool_call':
|
|
socket.send(JSON.stringify({ type: 'agent_tool_call', toolName: e.toolName, label: e.label }));
|
|
break;
|
|
case 'image':
|
|
socket.send(JSON.stringify({ type: 'image', data: e.data, mimeType: e.mimeType, caption: e.caption }));
|
|
break;
|
|
case 'error':
|
|
hadError = true;
|
|
socket.send(JSON.stringify({ type: 'subagent_chunk', agentName: category, content: `Error in ${e.source}` }));
|
|
break;
|
|
case 'done':
|
|
break;
|
|
}
|
|
}
|
|
socket.send(JSON.stringify({ type: 'details_updated', category, name, success: !hadError }));
|
|
} catch (error) {
|
|
logger.error({ error, category, name }, 'Error updating details');
|
|
socket.send(JSON.stringify({ type: 'details_updated', category, name, success: false, error: 'Failed to update details' }));
|
|
}
|
|
}
|
|
} else if (this.isDatafeedMessage(payload)) {
|
|
// Historical data request - send to OHLC service
|
|
logger.info({ type: payload.type }, 'Routing to datafeed handler');
|
|
await this.handleDatafeedMessage(socket, payload, logger, authContext);
|
|
} else {
|
|
logger.warn({ type: payload.type }, 'Unknown message type received');
|
|
}
|
|
} catch (error) {
|
|
logger.error({ error }, 'Error handling WebSocket message');
|
|
socket.send(
|
|
JSON.stringify({
|
|
type: 'error',
|
|
message: 'Failed to process message',
|
|
})
|
|
);
|
|
}
|
|
});
|
|
|
|
// Handle disconnection
|
|
socket.on('close', async (code: number, reason: Buffer) => {
|
|
clearInterval(pingInterval);
|
|
logger.info({ sessionId: authContext.sessionId, code, reason: reason?.toString() }, 'WebSocket disconnected');
|
|
|
|
// Unregister from event system
|
|
const removedSession = this.config.sessionRegistry.unregister(authContext.sessionId);
|
|
if (removedSession) {
|
|
await this.config.eventSubscriber.onSessionDisconnect(removedSession);
|
|
}
|
|
|
|
// Cleanup realtime bar subscriptions
|
|
const sessionId = authContext.sessionId;
|
|
const subs = this.barSubscriptions.get(sessionId);
|
|
if (subs && this.config.ohlcService) {
|
|
for (const { ticker, periodSeconds, callback, openBars } of subs) {
|
|
this.config.ohlcService.unsubscribeFromTicker(ticker, periodSeconds, callback, openBars);
|
|
}
|
|
this.barSubscriptions.delete(sessionId);
|
|
logger.info({ sessionId, count: subs.length }, 'Cleaned up realtime bar subscriptions');
|
|
}
|
|
|
|
// Cleanup active indicator evaluations for this session
|
|
const evalPrefix = `${sessionId}:`;
|
|
for (const key of this.activeEvaluations.keys()) {
|
|
if (key.startsWith(evalPrefix)) this.activeEvaluations.delete(key);
|
|
}
|
|
|
|
// Cleanup workspace
|
|
await workspace!.shutdown();
|
|
this.workspaces.delete(authContext.sessionId);
|
|
|
|
// Cleanup harness
|
|
if (harness) {
|
|
await harness.cleanup();
|
|
this.harnesses.delete(authContext.sessionId);
|
|
}
|
|
});
|
|
|
|
socket.on('error', (error: any) => {
|
|
logger.error({ error, sessionId: authContext.sessionId }, 'WebSocket error');
|
|
});
|
|
|
|
// Ping every 30 seconds to keep the connection alive through CloudFlare proxy.
|
|
// CloudFlare drops idle WebSocket connections after ~100 seconds.
|
|
const pingInterval = setInterval(() => {
|
|
if (socket.readyState === 1) { // OPEN
|
|
socket.ping();
|
|
}
|
|
}, 30000);
|
|
} catch (error) {
|
|
logger.error({ error }, 'Failed to initialize session');
|
|
socket.send(JSON.stringify({ type: 'text', text: 'An unrecoverable error occurred in the agent harness.' }));
|
|
socket.close(1011, 'Internal server error');
|
|
if (workspace) {
|
|
await workspace.shutdown();
|
|
this.workspaces.delete(authContext.sessionId);
|
|
}
|
|
if (harness) {
|
|
await harness.cleanup();
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Derive the container's XPUB event endpoint from the MCP server URL.
|
|
*
|
|
* MCP URL format: http://sandbox-user-abc123.sandbox.svc.cluster.local:3000
|
|
* Event endpoint: tcp://sandbox-user-abc123.sandbox.svc.cluster.local:5570
|
|
*/
|
|
private getContainerEventEndpoint(mcpServerUrl: string): string {
|
|
try {
|
|
const url = new URL(mcpServerUrl);
|
|
// Replace protocol and port
|
|
return `tcp://${url.hostname}:5570`;
|
|
} catch {
|
|
// Fallback if URL parsing fails
|
|
return mcpServerUrl.replace('http://', 'tcp://').replace(':3000', ':5570');
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Check if message is a datafeed message (TradingView protocol)
|
|
*/
|
|
private isDatafeedMessage(payload: any): boolean {
|
|
const datafeedTypes = [
|
|
'get_config',
|
|
'search_symbols',
|
|
'resolve_symbol',
|
|
'get_bars',
|
|
'subscribe_bars',
|
|
'unsubscribe_bars',
|
|
'evaluate_indicator',
|
|
];
|
|
return datafeedTypes.includes(payload.type);
|
|
}
|
|
|
|
/**
|
|
* Handle datafeed messages (TradingView protocol)
|
|
*/
|
|
private async handleDatafeedMessage(
|
|
socket: WebSocket,
|
|
payload: any,
|
|
logger: any,
|
|
authContext?: any
|
|
): Promise<void> {
|
|
logger.info({ type: payload.type, payload }, 'handleDatafeedMessage called');
|
|
const ohlcService = this.config.ohlcService;
|
|
const symbolIndexService = this.config.symbolIndexService;
|
|
|
|
logger.info({
|
|
hasOhlcService: !!ohlcService,
|
|
hasSymbolIndexService: !!symbolIndexService
|
|
}, 'Service availability');
|
|
|
|
const requestId = payload.request_id || randomUUID();
|
|
|
|
if (!ohlcService && !symbolIndexService) {
|
|
logger.warn({ requestId }, 'No datafeed services available yet');
|
|
socket.send(JSON.stringify({
|
|
type: 'error',
|
|
request_id: requestId,
|
|
error_message: 'Services initializing, please retry',
|
|
}));
|
|
return;
|
|
}
|
|
|
|
try {
|
|
switch (payload.type) {
|
|
case 'get_config': {
|
|
const config = ohlcService ? await ohlcService.getConfig() : { supported_resolutions: ['1', '5', '15', '60', '1D'] };
|
|
socket.send(
|
|
JSON.stringify({
|
|
type: 'get_config_response',
|
|
request_id: requestId,
|
|
config,
|
|
})
|
|
);
|
|
break;
|
|
}
|
|
|
|
case 'search_symbols': {
|
|
logger.info({ query: payload.query, limit: payload.limit }, 'Handling search_symbols');
|
|
const symbolIndexService = this.config.symbolIndexService;
|
|
|
|
const results = symbolIndexService
|
|
? await symbolIndexService.search(payload.query, payload.limit || 30)
|
|
: [];
|
|
|
|
logger.info({ resultsCount: results.length }, 'Search complete');
|
|
socket.send(
|
|
JSON.stringify({
|
|
type: 'search_symbols_response',
|
|
request_id: requestId,
|
|
results,
|
|
})
|
|
);
|
|
break;
|
|
}
|
|
|
|
case 'resolve_symbol': {
|
|
logger.info({ symbol: payload.symbol }, 'Handling resolve_symbol');
|
|
const symbolIndexService = this.config.symbolIndexService;
|
|
|
|
const symbolInfo = symbolIndexService
|
|
? await symbolIndexService.resolveSymbol(payload.symbol)
|
|
: null;
|
|
|
|
logger.info({ found: !!symbolInfo }, 'Symbol resolution complete');
|
|
|
|
if (!symbolInfo) {
|
|
logger.warn({ symbol: payload.symbol }, 'Symbol not found');
|
|
socket.send(
|
|
JSON.stringify({
|
|
type: 'error',
|
|
request_id: requestId,
|
|
error_message: `Symbol not found: ${payload.symbol}`,
|
|
})
|
|
);
|
|
} else {
|
|
logger.info({ symbolInfo }, 'Sending symbol_info response');
|
|
socket.send(
|
|
JSON.stringify({
|
|
type: 'resolve_symbol_response',
|
|
request_id: requestId,
|
|
symbol_info: symbolInfo,
|
|
})
|
|
);
|
|
}
|
|
break;
|
|
}
|
|
|
|
case 'get_bars': {
|
|
if (!ohlcService) {
|
|
socket.send(JSON.stringify({
|
|
type: 'get_bars_response',
|
|
request_id: requestId,
|
|
error: 'OHLC service not available',
|
|
}));
|
|
break;
|
|
}
|
|
try {
|
|
const history = await ohlcService.fetchOHLC(
|
|
payload.symbol,
|
|
payload.period_seconds,
|
|
payload.from_time,
|
|
payload.to_time,
|
|
payload.countback
|
|
);
|
|
logger.info({ requestId, barCount: history.bars?.length ?? 0, noData: history.noData, socketState: socket.readyState }, 'Sending get_bars_response');
|
|
socket.send(jsonStringifySafe({ type: 'get_bars_response', request_id: requestId, history }));
|
|
logger.info({ requestId }, 'get_bars_response sent');
|
|
} catch (err: any) {
|
|
const errorMessage = err?.message ?? String(err);
|
|
logger.error({ requestId, ticker: payload.symbol, errorMessage }, 'get_bars failed');
|
|
socket.send(JSON.stringify({ type: 'get_bars_response', request_id: requestId, error: errorMessage }));
|
|
}
|
|
break;
|
|
}
|
|
|
|
case 'subscribe_bars': {
|
|
if (!ohlcService || !authContext) {
|
|
socket.send(JSON.stringify({
|
|
type: 'subscribe_bars_response',
|
|
request_id: requestId,
|
|
subscription_id: payload.subscription_id,
|
|
success: false,
|
|
message: 'Realtime service not available',
|
|
}));
|
|
break;
|
|
}
|
|
|
|
const subTicker: string = payload.symbol;
|
|
const subPeriod: number = payload.period_seconds ?? payload.resolution ?? 60;
|
|
// 'open' = in-progress bar snapshots every tick (chart); 'closed' = completed bars only (strategies)
|
|
const openBars: boolean = (payload.bar_type ?? 'open') === 'open';
|
|
const sessionId = authContext.sessionId;
|
|
|
|
// Create a per-subscription callback that forwards bars to this socket
|
|
const barCallback: BarUpdateCallback = (bar) => {
|
|
if (socket.readyState !== 1 /* OPEN */) return;
|
|
const symbolMeta = symbolIndexService?.getSymbolByTicker(bar.ticker);
|
|
const priceDivisor = (symbolMeta?.price_precision ?? 0) > 0
|
|
? Math.pow(10, symbolMeta!.price_precision!)
|
|
: 1;
|
|
const sizeDivisor = (symbolMeta?.size_precision ?? 0) > 0
|
|
? Math.pow(10, symbolMeta!.size_precision!)
|
|
: 1;
|
|
socket.send(JSON.stringify({
|
|
type: 'bar_update',
|
|
subscription_id: payload.subscription_id,
|
|
ticker: bar.ticker,
|
|
period_seconds: bar.periodSeconds,
|
|
is_closed: bar.isClosed,
|
|
bar: {
|
|
// Convert nanoseconds → seconds for client compatibility
|
|
time: Number(bar.timestamp / 1_000_000_000n),
|
|
open: bar.open / priceDivisor,
|
|
high: bar.high / priceDivisor,
|
|
low: bar.low / priceDivisor,
|
|
close: bar.close / priceDivisor,
|
|
volume: bar.volume / sizeDivisor,
|
|
},
|
|
}));
|
|
};
|
|
|
|
ohlcService.subscribeToTicker(subTicker, subPeriod, barCallback, openBars);
|
|
|
|
// Track for cleanup on disconnect
|
|
if (!this.barSubscriptions.has(sessionId)) {
|
|
this.barSubscriptions.set(sessionId, []);
|
|
}
|
|
this.barSubscriptions.get(sessionId)!.push({
|
|
ticker: subTicker,
|
|
periodSeconds: subPeriod,
|
|
callback: barCallback,
|
|
openBars,
|
|
});
|
|
|
|
logger.info({ sessionId, ticker: subTicker, period: subPeriod }, 'Subscribed to realtime bars');
|
|
|
|
socket.send(JSON.stringify({
|
|
type: 'subscribe_bars_response',
|
|
request_id: requestId,
|
|
subscription_id: payload.subscription_id,
|
|
success: true,
|
|
}));
|
|
break;
|
|
}
|
|
|
|
case 'unsubscribe_bars': {
|
|
if (!ohlcService || !authContext) break;
|
|
|
|
const unsubTicker: string = payload.symbol;
|
|
const unsubPeriod: number = payload.period_seconds ?? payload.resolution ?? 60;
|
|
const sessionId = authContext.sessionId;
|
|
|
|
const subs = this.barSubscriptions.get(sessionId);
|
|
if (subs) {
|
|
const idx = subs.findIndex(
|
|
s => s.ticker === unsubTicker && s.periodSeconds === unsubPeriod
|
|
);
|
|
if (idx >= 0) {
|
|
const [removed] = subs.splice(idx, 1);
|
|
ohlcService.unsubscribeFromTicker(unsubTicker, unsubPeriod, removed.callback, removed.openBars);
|
|
logger.info({ sessionId, ticker: unsubTicker, period: unsubPeriod }, 'Unsubscribed from realtime bars');
|
|
}
|
|
}
|
|
|
|
socket.send(JSON.stringify({
|
|
type: 'unsubscribe_bars_response',
|
|
request_id: requestId,
|
|
subscription_id: payload.subscription_id,
|
|
success: true,
|
|
}));
|
|
break;
|
|
}
|
|
|
|
case 'evaluate_indicator': {
|
|
// Direct MCP call — bypasses the agent/LLM for performance
|
|
const harness = this.harnesses.get(authContext.sessionId);
|
|
if (!harness) {
|
|
socket.send(JSON.stringify({
|
|
type: 'evaluate_indicator_result',
|
|
request_id: requestId,
|
|
error: 'Session not initialized',
|
|
}));
|
|
break;
|
|
}
|
|
|
|
// Supersede any in-flight request for the same indicator (e.g. rapid scrolling)
|
|
const evalKey = `${authContext.sessionId}:${payload.pandas_ta_name}`;
|
|
const prevRequestId = this.activeEvaluations.get(evalKey);
|
|
if (prevRequestId) {
|
|
socket.send(JSON.stringify({
|
|
type: 'evaluate_indicator_result',
|
|
request_id: prevRequestId,
|
|
error: 'superseded',
|
|
}));
|
|
}
|
|
this.activeEvaluations.set(evalKey, requestId);
|
|
|
|
try {
|
|
const mcpResult = await harness.callMcpTool('EvaluateIndicator', {
|
|
symbol: payload.symbol,
|
|
from_time: payload.from_time,
|
|
to_time: payload.to_time,
|
|
period_seconds: payload.period_seconds,
|
|
pandas_ta_name: payload.pandas_ta_name,
|
|
parameters: payload.parameters ?? {},
|
|
}) as any;
|
|
|
|
// Discard result if a newer request arrived while we were awaiting
|
|
if (this.activeEvaluations.get(evalKey) !== requestId) break;
|
|
this.activeEvaluations.delete(evalKey);
|
|
|
|
// MCP returns { content: [{type: 'text', text: '...json...'}] }
|
|
// When the tool raises an exception, the MCP framework sets isError: true
|
|
// and puts the raw exception text in content[0].text (not JSON-wrapped).
|
|
const rawText = mcpResult?.content?.[0]?.text ?? mcpResult?.[0]?.text;
|
|
if (mcpResult?.isError || rawText == null) {
|
|
const errMsg = rawText ?? 'evaluate_indicator returned no content';
|
|
logger.error({ pandas_ta_name: payload.pandas_ta_name, rawText }, 'evaluate_indicator sandbox error');
|
|
socket.send(JSON.stringify({
|
|
type: 'evaluate_indicator_result',
|
|
request_id: requestId,
|
|
error: errMsg,
|
|
}));
|
|
break;
|
|
}
|
|
let data: any;
|
|
try {
|
|
data = JSON.parse(rawText);
|
|
} catch {
|
|
// Sandbox returned non-JSON (e.g. bare exception text)
|
|
logger.error({ pandas_ta_name: payload.pandas_ta_name, rawText }, 'evaluate_indicator returned non-JSON');
|
|
socket.send(JSON.stringify({
|
|
type: 'evaluate_indicator_result',
|
|
request_id: requestId,
|
|
error: rawText,
|
|
}));
|
|
break;
|
|
}
|
|
socket.send(JSON.stringify({
|
|
type: 'evaluate_indicator_result',
|
|
request_id: requestId,
|
|
...data,
|
|
}));
|
|
} catch (err: any) {
|
|
if (this.activeEvaluations.get(evalKey) === requestId) {
|
|
this.activeEvaluations.delete(evalKey);
|
|
}
|
|
logger.error({ err: err?.message, pandas_ta_name: payload.pandas_ta_name }, 'evaluate_indicator handler error');
|
|
socket.send(JSON.stringify({
|
|
type: 'evaluate_indicator_result',
|
|
request_id: requestId,
|
|
error: err?.message ?? String(err),
|
|
}));
|
|
}
|
|
break;
|
|
}
|
|
|
|
default:
|
|
logger.warn({ type: payload.type }, 'Unknown datafeed message type');
|
|
}
|
|
} catch (error: any) {
|
|
logger.error({ error, type: payload.type }, 'Error handling datafeed message');
|
|
socket.send(
|
|
jsonStringifySafe({
|
|
type: 'error',
|
|
request_id: requestId,
|
|
error_code: 'INTERNAL_ERROR',
|
|
error_message: error.message || 'Internal server error',
|
|
})
|
|
);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Flush and clean up all active sessions.
|
|
* Called during graceful shutdown to ensure conversations are persisted.
|
|
*/
|
|
async endAllSessions(): Promise<void> {
|
|
const cleanups = Array.from(this.harnesses.values()).map(h => h.cleanup());
|
|
await Promise.allSettled(cleanups);
|
|
this.harnesses.clear();
|
|
}
|
|
}
|