workspace out of cache; welcome.md

This commit is contained in:
2026-04-13 22:24:12 -04:00
parent 45a21ac933
commit dacfa9c1a3
5 changed files with 70 additions and 44 deletions

View File

@@ -257,7 +257,8 @@ export class WebSocketHandler {
})
);
// Replay conversation history so the UI pre-populates on reconnect
// Replay conversation history so the UI pre-populates on reconnect;
// greet new users on their first conversation
if (this.config.conversationService) {
const history = await this.config.conversationService.getHistory(
authContext.userId,
@@ -265,6 +266,29 @@ export class WebSocketHandler {
);
if (history.length > 0) {
socket.send(JSON.stringify({ type: 'conversation_history', messages: history }));
} else {
// First conversation — auto-send greeting prompt and stream the response
socket.send(JSON.stringify({ type: 'agent_chunk', content: '', done: false }));
for await (const event of harness!.streamGreeting()) {
const e = event as HarnessEvent;
switch (e.type) {
case 'chunk':
socket.send(JSON.stringify({ type: 'agent_chunk', content: e.content, done: false }));
break;
case 'tool_call':
socket.send(JSON.stringify({ type: 'agent_tool_call', toolName: e.toolName, label: e.label }));
break;
case 'image':
socket.send(JSON.stringify({ type: 'image', data: e.data, mimeType: e.mimeType, caption: e.caption }));
break;
case 'error':
socket.send(JSON.stringify({ type: 'text', text: `An error occurred during greeting.` }));
break;
case 'done':
break;
}
}
socket.send(JSON.stringify({ type: 'agent_chunk', content: '', done: true }));
}
}

View File

@@ -11,7 +11,7 @@ import { LLMProviderFactory, type ProviderConfig } from '../llm/provider.js';
import { ModelRouter, RoutingStrategy } from '../llm/router.js';
import type { ModelMiddleware } from '../llm/middleware.js';
import type { WorkspaceManager } from '../workspace/workspace-manager.js';
import type { ChannelAdapter, PathTriggerContext } from '../workspace/index.js';
import type { ChannelAdapter } from '../workspace/index.js';
import type { ResearchSubagent } from './subagents/research/index.js';
import type { IndicatorSubagent } from './subagents/indicator/index.js';
import type { WebExploreSubagent } from './subagents/web-explore/index.js';
@@ -79,6 +79,7 @@ export interface AgentHarnessConfig extends HarnessSessionConfig {
*/
export class AgentHarness {
private static systemPromptTemplate: string | null = null;
private static welcomePrompt: string | null = null;
private config: AgentHarnessConfig;
private modelFactory: LLMProviderFactory;
@@ -117,7 +118,6 @@ export class AgentHarness {
logger: config.logger,
});
this.registerWorkspaceTriggers();
}
/**
@@ -131,6 +131,17 @@ export class AgentHarness {
return AgentHarness.systemPromptTemplate;
}
/**
* Load welcome prompt from file (cached)
*/
private static async loadWelcomePrompt(): Promise<string> {
if (!AgentHarness.welcomePrompt) {
const promptPath = join(__dirname, 'prompts', 'welcome.md');
AgentHarness.welcomePrompt = (await readFile(promptPath, 'utf-8')).trim();
}
return AgentHarness.welcomePrompt;
}
/**
* Set the channel adapter (can be called after construction)
*/
@@ -716,7 +727,6 @@ export class AgentHarness {
*/
setWorkspaceManager(workspace: WorkspaceManager): void {
this.workspaceManager = workspace;
this.registerWorkspaceTriggers();
}
/**
@@ -738,29 +748,21 @@ export class AgentHarness {
// 2. Load recent conversation history
const channelKey = this.config.channelType ?? ChannelType.WEBSOCKET;
let storedMessages = this.conversationStore
? await this.conversationStore.getRecentMessages(
? await this.conversationStore.getFullHistory(
this.config.userId, this.config.sessionId, this.config.historyLimit, channelKey
)
: [];
// First turn: seed conversation history with current workspace state
if (storedMessages.length === 0 && this.workspaceManager && this.conversationStore) {
const workspaceJSON = this.workspaceManager.serializeState();
const content = `[Workspace State]\n\`\`\`json\n${workspaceJSON}\n\`\`\``;
await this.conversationStore.saveMessage(
this.config.userId, this.config.sessionId,
'workspace', content, { isWorkspaceContext: true }, channelKey
);
storedMessages = await this.conversationStore.getRecentMessages(
this.config.userId, this.config.sessionId, this.config.historyLimit, channelKey
);
}
const history = this.conversationStore
? this.conversationStore.toLangChainMessages(storedMessages)
: [];
this.config.logger.debug({ historyLength: history.length }, 'Conversation history loaded');
// Inject current workspace state fresh on every turn — not persisted to conversation history
const workspaceContext = this.workspaceManager
? `[Workspace State]\n\`\`\`json\n${this.workspaceManager.serializeState()}\n\`\`\``
: undefined;
// 4. Get the configured model
this.config.logger.debug('Routing to model');
const { model, middleware } = await this.modelRouter.route(
@@ -773,7 +775,7 @@ export class AgentHarness {
this.config.logger.info({ modelName: model.constructor.name }, 'Model selected');
// 5. Build LangChain messages
const langchainMessages = this.buildLangChainMessages(systemPrompt, history, message.content);
const langchainMessages = this.buildLangChainMessages(systemPrompt, history, workspaceContext, message.content);
this.config.logger.debug({ messageCount: langchainMessages.length }, 'LangChain messages built');
// 6. Get tools for main agent from registry
@@ -899,6 +901,22 @@ export class AgentHarness {
}
}
/**
* Stream a greeting response for first-time users.
* Sends "Who are you and what can you do?" through the normal message pipeline.
*/
async *streamGreeting(): AsyncGenerator<HarnessEvent> {
const content = await AgentHarness.loadWelcomePrompt();
const greetingMessage: InboundMessage = {
messageId: `greeting_${Date.now()}`,
userId: this.config.userId,
sessionId: this.config.sessionId,
content,
timestamp: new Date(),
};
yield* this.streamMessage(greetingMessage);
}
/**
* Handle incoming message from user.
* Consumes streamMessage and dispatches events to the channel adapter for
@@ -944,11 +962,13 @@ export class AgentHarness {
private buildLangChainMessages(
systemPrompt: string,
history: BaseMessage[],
workspaceContext: string | undefined,
currentUserMessage: string
): BaseMessage[] {
return [
new SystemMessage(systemPrompt),
...history,
...(workspaceContext ? [new HumanMessage(workspaceContext)] : []),
new HumanMessage(currentUserMessage),
];
}
@@ -1077,26 +1097,6 @@ export class AgentHarness {
return { cleanedResult: result, images: [] };
}
/**
* Register workspace path triggers to record state changes into conversation history.
*/
private registerWorkspaceTriggers(): void {
if (!this.workspaceManager || !this.conversationStore) return;
const channelKey = this.config.channelType ?? ChannelType.WEBSOCKET;
for (const store of ['shapes', 'indicators', 'chartState']) {
this.workspaceManager.onPathChange(`/${store}/*`, async (_old: unknown, newVal: unknown, ctx: PathTriggerContext) => {
const content = `[Workspace Update] ${ctx.store}${ctx.path}\n${JSON.stringify(newVal, null, 2)}`;
await this.conversationStore!.saveMessage(
this.config.userId, this.config.sessionId,
'workspace', content,
{ isWorkspaceUpdate: true, store: ctx.store, seq: ctx.seq },
channelKey
);
});
}
}
/**
* End the session: flush conversation to cold storage, then release resources.
* Called by channel handlers on disconnect, session expiry, or graceful shutdown.

View File

@@ -163,7 +163,7 @@ export class ConversationStore {
* Convert stored messages to LangChain message format
*/
toLangChainMessages(messages: StoredMessage[]): BaseMessage[] {
return messages.map((msg) => {
return messages.filter(msg => msg.role !== 'workspace').map((msg) => {
switch (msg.role) {
case 'user':
return new HumanMessage(msg.content);

View File

@@ -0,0 +1 @@
This is your first chat with a new user. Welcome them to Dexorder and describe who are you and what can you do.

View File

@@ -42,13 +42,14 @@ export class AnthropicCachingMiddleware implements ModelMiddleware {
addCacheControl(systemMsg);
}
// 2. Tag the last cacheable message that isn't the current user input.
// The current user message is always the last element; we want the one before it.
// We look backwards for the last AIMessage or HumanMessage (excluding the final message).
// 2. Tag the last AIMessage (assistant turn) before the current user input.
// Workspace state is injected as a HumanMessage just before the current user input;
// by matching only 'ai' we ensure the cache boundary is the last assistant response,
// leaving workspace context and the new user input uncached (always fresh).
const candidates = result.slice(0, -1);
for (let i = candidates.length - 1; i >= 0; i--) {
const t = candidates[i]._getType();
if (t === 'ai' || t === 'human') {
if (t === 'ai') {
addCacheControl(candidates[i]);
break;
}