From dacfa9c1a336c031efbe977078990821133f1d25 Mon Sep 17 00:00:00 2001 From: Tim Olson Date: Mon, 13 Apr 2026 22:24:12 -0400 Subject: [PATCH] workspace out of cache; welcome.md --- gateway/src/channels/websocket-handler.ts | 26 ++++++- gateway/src/harness/agent-harness.ts | 76 +++++++++---------- .../src/harness/memory/conversation-store.ts | 2 +- gateway/src/harness/prompts/welcome.md | 1 + gateway/src/llm/middleware.ts | 9 ++- 5 files changed, 70 insertions(+), 44 deletions(-) create mode 100644 gateway/src/harness/prompts/welcome.md diff --git a/gateway/src/channels/websocket-handler.ts b/gateway/src/channels/websocket-handler.ts index fa891900..195825dd 100644 --- a/gateway/src/channels/websocket-handler.ts +++ b/gateway/src/channels/websocket-handler.ts @@ -257,7 +257,8 @@ export class WebSocketHandler { }) ); - // Replay conversation history so the UI pre-populates on reconnect + // Replay conversation history so the UI pre-populates on reconnect; + // greet new users on their first conversation if (this.config.conversationService) { const history = await this.config.conversationService.getHistory( authContext.userId, @@ -265,6 +266,29 @@ export class WebSocketHandler { ); if (history.length > 0) { socket.send(JSON.stringify({ type: 'conversation_history', messages: history })); + } else { + // First conversation — auto-send greeting prompt and stream the response + socket.send(JSON.stringify({ type: 'agent_chunk', content: '', done: false })); + for await (const event of harness!.streamGreeting()) { + const e = event as HarnessEvent; + switch (e.type) { + case 'chunk': + socket.send(JSON.stringify({ type: 'agent_chunk', content: e.content, done: false })); + break; + case 'tool_call': + socket.send(JSON.stringify({ type: 'agent_tool_call', toolName: e.toolName, label: e.label })); + break; + case 'image': + socket.send(JSON.stringify({ type: 'image', data: e.data, mimeType: e.mimeType, caption: e.caption })); + break; + case 'error': + socket.send(JSON.stringify({ type: 'text', text: `An error occurred during greeting.` })); + break; + case 'done': + break; + } + } + socket.send(JSON.stringify({ type: 'agent_chunk', content: '', done: true })); } } diff --git a/gateway/src/harness/agent-harness.ts b/gateway/src/harness/agent-harness.ts index ff9ab22d..26ae19d9 100644 --- a/gateway/src/harness/agent-harness.ts +++ b/gateway/src/harness/agent-harness.ts @@ -11,7 +11,7 @@ import { LLMProviderFactory, type ProviderConfig } from '../llm/provider.js'; import { ModelRouter, RoutingStrategy } from '../llm/router.js'; import type { ModelMiddleware } from '../llm/middleware.js'; import type { WorkspaceManager } from '../workspace/workspace-manager.js'; -import type { ChannelAdapter, PathTriggerContext } from '../workspace/index.js'; +import type { ChannelAdapter } from '../workspace/index.js'; import type { ResearchSubagent } from './subagents/research/index.js'; import type { IndicatorSubagent } from './subagents/indicator/index.js'; import type { WebExploreSubagent } from './subagents/web-explore/index.js'; @@ -79,6 +79,7 @@ export interface AgentHarnessConfig extends HarnessSessionConfig { */ export class AgentHarness { private static systemPromptTemplate: string | null = null; + private static welcomePrompt: string | null = null; private config: AgentHarnessConfig; private modelFactory: LLMProviderFactory; @@ -117,7 +118,6 @@ export class AgentHarness { logger: config.logger, }); - this.registerWorkspaceTriggers(); } /** @@ -131,6 +131,17 @@ export class AgentHarness { return AgentHarness.systemPromptTemplate; } + /** + * Load welcome prompt from file (cached) + */ + private static async loadWelcomePrompt(): Promise { + if (!AgentHarness.welcomePrompt) { + const promptPath = join(__dirname, 'prompts', 'welcome.md'); + AgentHarness.welcomePrompt = (await readFile(promptPath, 'utf-8')).trim(); + } + return AgentHarness.welcomePrompt; + } + /** * Set the channel adapter (can be called after construction) */ @@ -716,7 +727,6 @@ export class AgentHarness { */ setWorkspaceManager(workspace: WorkspaceManager): void { this.workspaceManager = workspace; - this.registerWorkspaceTriggers(); } /** @@ -738,29 +748,21 @@ export class AgentHarness { // 2. Load recent conversation history const channelKey = this.config.channelType ?? ChannelType.WEBSOCKET; let storedMessages = this.conversationStore - ? await this.conversationStore.getRecentMessages( + ? await this.conversationStore.getFullHistory( this.config.userId, this.config.sessionId, this.config.historyLimit, channelKey ) : []; - // First turn: seed conversation history with current workspace state - if (storedMessages.length === 0 && this.workspaceManager && this.conversationStore) { - const workspaceJSON = this.workspaceManager.serializeState(); - const content = `[Workspace State]\n\`\`\`json\n${workspaceJSON}\n\`\`\``; - await this.conversationStore.saveMessage( - this.config.userId, this.config.sessionId, - 'workspace', content, { isWorkspaceContext: true }, channelKey - ); - storedMessages = await this.conversationStore.getRecentMessages( - this.config.userId, this.config.sessionId, this.config.historyLimit, channelKey - ); - } - const history = this.conversationStore ? this.conversationStore.toLangChainMessages(storedMessages) : []; this.config.logger.debug({ historyLength: history.length }, 'Conversation history loaded'); + // Inject current workspace state fresh on every turn — not persisted to conversation history + const workspaceContext = this.workspaceManager + ? `[Workspace State]\n\`\`\`json\n${this.workspaceManager.serializeState()}\n\`\`\`` + : undefined; + // 4. Get the configured model this.config.logger.debug('Routing to model'); const { model, middleware } = await this.modelRouter.route( @@ -773,7 +775,7 @@ export class AgentHarness { this.config.logger.info({ modelName: model.constructor.name }, 'Model selected'); // 5. Build LangChain messages - const langchainMessages = this.buildLangChainMessages(systemPrompt, history, message.content); + const langchainMessages = this.buildLangChainMessages(systemPrompt, history, workspaceContext, message.content); this.config.logger.debug({ messageCount: langchainMessages.length }, 'LangChain messages built'); // 6. Get tools for main agent from registry @@ -899,6 +901,22 @@ export class AgentHarness { } } + /** + * Stream a greeting response for first-time users. + * Sends "Who are you and what can you do?" through the normal message pipeline. + */ + async *streamGreeting(): AsyncGenerator { + const content = await AgentHarness.loadWelcomePrompt(); + const greetingMessage: InboundMessage = { + messageId: `greeting_${Date.now()}`, + userId: this.config.userId, + sessionId: this.config.sessionId, + content, + timestamp: new Date(), + }; + yield* this.streamMessage(greetingMessage); + } + /** * Handle incoming message from user. * Consumes streamMessage and dispatches events to the channel adapter for @@ -944,11 +962,13 @@ export class AgentHarness { private buildLangChainMessages( systemPrompt: string, history: BaseMessage[], + workspaceContext: string | undefined, currentUserMessage: string ): BaseMessage[] { return [ new SystemMessage(systemPrompt), ...history, + ...(workspaceContext ? [new HumanMessage(workspaceContext)] : []), new HumanMessage(currentUserMessage), ]; } @@ -1077,26 +1097,6 @@ export class AgentHarness { return { cleanedResult: result, images: [] }; } - /** - * Register workspace path triggers to record state changes into conversation history. - */ - private registerWorkspaceTriggers(): void { - if (!this.workspaceManager || !this.conversationStore) return; - const channelKey = this.config.channelType ?? ChannelType.WEBSOCKET; - - for (const store of ['shapes', 'indicators', 'chartState']) { - this.workspaceManager.onPathChange(`/${store}/*`, async (_old: unknown, newVal: unknown, ctx: PathTriggerContext) => { - const content = `[Workspace Update] ${ctx.store}${ctx.path}\n${JSON.stringify(newVal, null, 2)}`; - await this.conversationStore!.saveMessage( - this.config.userId, this.config.sessionId, - 'workspace', content, - { isWorkspaceUpdate: true, store: ctx.store, seq: ctx.seq }, - channelKey - ); - }); - } - } - /** * End the session: flush conversation to cold storage, then release resources. * Called by channel handlers on disconnect, session expiry, or graceful shutdown. diff --git a/gateway/src/harness/memory/conversation-store.ts b/gateway/src/harness/memory/conversation-store.ts index 93b10848..1e04fd4f 100644 --- a/gateway/src/harness/memory/conversation-store.ts +++ b/gateway/src/harness/memory/conversation-store.ts @@ -163,7 +163,7 @@ export class ConversationStore { * Convert stored messages to LangChain message format */ toLangChainMessages(messages: StoredMessage[]): BaseMessage[] { - return messages.map((msg) => { + return messages.filter(msg => msg.role !== 'workspace').map((msg) => { switch (msg.role) { case 'user': return new HumanMessage(msg.content); diff --git a/gateway/src/harness/prompts/welcome.md b/gateway/src/harness/prompts/welcome.md new file mode 100644 index 00000000..673d1c84 --- /dev/null +++ b/gateway/src/harness/prompts/welcome.md @@ -0,0 +1 @@ +This is your first chat with a new user. Welcome them to Dexorder and describe who are you and what can you do. diff --git a/gateway/src/llm/middleware.ts b/gateway/src/llm/middleware.ts index f3507ec4..4ac4f567 100644 --- a/gateway/src/llm/middleware.ts +++ b/gateway/src/llm/middleware.ts @@ -42,13 +42,14 @@ export class AnthropicCachingMiddleware implements ModelMiddleware { addCacheControl(systemMsg); } - // 2. Tag the last cacheable message that isn't the current user input. - // The current user message is always the last element; we want the one before it. - // We look backwards for the last AIMessage or HumanMessage (excluding the final message). + // 2. Tag the last AIMessage (assistant turn) before the current user input. + // Workspace state is injected as a HumanMessage just before the current user input; + // by matching only 'ai' we ensure the cache boundary is the last assistant response, + // leaving workspace context and the new user input uncached (always fresh). const candidates = result.slice(0, -1); for (let i = candidates.length - 1; i >= 0; i--) { const t = candidates[i]._getType(); - if (t === 'ai' || t === 'human') { + if (t === 'ai') { addCacheControl(candidates[i]); break; }