diff --git a/.idea/ai.iml b/.idea/ai.iml
index bb280a92..f0cd7df8 100644
--- a/.idea/ai.iml
+++ b/.idea/ai.iml
@@ -5,6 +5,7 @@
+
@@ -17,6 +18,8 @@
+
+
diff --git a/deploy/k8s/dev/kustomization.yaml b/deploy/k8s/dev/kustomization.yaml
index 550b77f5..538a8586 100644
--- a/deploy/k8s/dev/kustomization.yaml
+++ b/deploy/k8s/dev/kustomization.yaml
@@ -212,6 +212,27 @@ generatorOptions:
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/flink/src/main/java/com/dexorder/flink/iceberg/SchemaInitializer.java b/flink/src/main/java/com/dexorder/flink/iceberg/SchemaInitializer.java
index b6f385ee..bf600c38 100644
--- a/flink/src/main/java/com/dexorder/flink/iceberg/SchemaInitializer.java
+++ b/flink/src/main/java/com/dexorder/flink/iceberg/SchemaInitializer.java
@@ -80,7 +80,8 @@ public class SchemaInitializer {
*/
// Bump this when the schema changes. Tables with a different (or missing) version
// will be dropped and recreated. Increment by 1 for each incompatible change.
- private static final String OHLC_SCHEMA_VERSION = "1";
+ // v2: open/high/low/close changed from required to optional to support null gap bars
+ private static final String OHLC_SCHEMA_VERSION = "2";
private static final String SCHEMA_VERSION_PROP = "app.schema.version";
private void initializeOhlcTable() {
@@ -109,7 +110,7 @@ public class SchemaInitializer {
Table existing = catalog.loadTable(tableId);
String existingVersion = existing.properties().get(SCHEMA_VERSION_PROP);
if (!OHLC_SCHEMA_VERSION.equals(existingVersion)) {
- LOG.warn("Table {} has schema version '{}', expected '{}' — manual migration required",
+ LOG.warn("Table {} has schema version '{}', expected '{}' — skipping (manual migration required if needed)",
tableId, existingVersion, OHLC_SCHEMA_VERSION);
}
LOG.info("Table {} already exists at schema version {} — skipping creation", tableId, existingVersion);
@@ -127,11 +128,11 @@ public class SchemaInitializer {
required(2, "period_seconds", Types.IntegerType.get(), "OHLC period in seconds"),
required(3, "timestamp", Types.LongType.get(), "Candle timestamp in microseconds since epoch"),
- // OHLC price data
- required(4, "open", Types.LongType.get(), "Opening price"),
- required(5, "high", Types.LongType.get(), "Highest price"),
- required(6, "low", Types.LongType.get(), "Lowest price"),
- required(7, "close", Types.LongType.get(), "Closing price"),
+ // OHLC price data — optional to support gap bars (null = no trades that period)
+ optional(4, "open", Types.LongType.get(), "Opening price"),
+ optional(5, "high", Types.LongType.get(), "Highest price"),
+ optional(6, "low", Types.LongType.get(), "Lowest price"),
+ optional(7, "close", Types.LongType.get(), "Closing price"),
// Volume data
optional(8, "volume", Types.LongType.get(), "Total volume"),
diff --git a/flink/src/main/java/com/dexorder/flink/publisher/OHLCBatchDeserializer.java b/flink/src/main/java/com/dexorder/flink/publisher/OHLCBatchDeserializer.java
index bce8a74b..10972e31 100644
--- a/flink/src/main/java/com/dexorder/flink/publisher/OHLCBatchDeserializer.java
+++ b/flink/src/main/java/com/dexorder/flink/publisher/OHLCBatchDeserializer.java
@@ -65,11 +65,11 @@ public class OHLCBatchDeserializer implements DeserializationSchema 0) {
+ LOG.info("Committed {} rows ({} gap bars) to Iceberg for request_id={}",
+ batch.getRowCount(), gapCount, batch.getRequestId());
+ } else {
+ LOG.info("Committed {} rows to Iceberg for request_id={}", batch.getRowCount(), batch.getRequestId());
+ }
// Emit batch downstream only after successful commit
out.collect(batch);
diff --git a/gateway/src/channels/websocket-handler.ts b/gateway/src/channels/websocket-handler.ts
index 3fd969ad..461a8796 100644
--- a/gateway/src/channels/websocket-handler.ts
+++ b/gateway/src/channels/websocket-handler.ts
@@ -16,6 +16,7 @@ import {
type SnapshotMessage,
type PatchMessage,
} from '../workspace/index.js';
+import { resolutionToSeconds } from '../types/ohlc.js';
/**
* Safe JSON stringifier that handles BigInt values
@@ -486,7 +487,7 @@ export class WebSocketHandler {
}
const history = await ohlcService.fetchOHLC(
payload.symbol,
- payload.resolution,
+ resolutionToSeconds(payload.resolution),
payload.from_time,
payload.to_time,
payload.countback
diff --git a/gateway/src/clients/duckdb-client.ts b/gateway/src/clients/duckdb-client.ts
index d0f486e4..4194cd43 100644
--- a/gateway/src/clients/duckdb-client.ts
+++ b/gateway/src/clients/duckdb-client.ts
@@ -420,7 +420,7 @@ export class DuckDBClient {
WHERE ticker = ?
AND period_seconds = ?
AND timestamp >= ?
- AND timestamp <= ?
+ AND timestamp < ?
ORDER BY timestamp ASC
`;
@@ -441,10 +441,11 @@ export class DuckDBClient {
count: rows.length
}, 'Loaded OHLC data from Iceberg');
- // Convert timestamp strings to numbers (microseconds as Number is fine for display)
+ // Keep timestamp as bigint to preserve full microsecond precision.
+ // Convert to seconds (divide first) only when producing TradingView bars.
return rows.map((row: any) => ({
...row,
- timestamp: Number(row.timestamp)
+ timestamp: BigInt(row.timestamp)
}));
} catch (error: any) {
this.logger.error({
@@ -484,7 +485,7 @@ export class DuckDBClient {
WHERE ticker = ?
AND period_seconds = ?
AND timestamp >= ?
- AND timestamp <= ?
+ AND timestamp < ?
`;
const params = [
@@ -525,6 +526,7 @@ export class DuckDBClient {
// For now, simple check: if we have any data, assume complete
// TODO: Implement proper gap detection by checking for missing periods
const periodMicros = BigInt(period_seconds) * 1000000n;
+ // end_time is exclusive, so expected count = (end - start) / period (no +1)
const expectedBars = Number((end_time - start_time) / periodMicros);
if (data.length < expectedBars * 0.95) { // Allow 5% tolerance
diff --git a/gateway/src/clients/iceberg-client.ts b/gateway/src/clients/iceberg-client.ts
index 4a6c62b5..4a87eb68 100644
--- a/gateway/src/clients/iceberg-client.ts
+++ b/gateway/src/clients/iceberg-client.ts
@@ -39,7 +39,7 @@ export interface IcebergMessage {
id: string;
user_id: string;
session_id: string;
- role: 'user' | 'assistant' | 'system';
+ role: 'user' | 'assistant' | 'system' | 'workspace';
content: string;
metadata: string; // JSON string
timestamp: number; // microseconds
diff --git a/gateway/src/harness/agent-harness.ts b/gateway/src/harness/agent-harness.ts
index dbc63ce2..bb284377 100644
--- a/gateway/src/harness/agent-harness.ts
+++ b/gateway/src/harness/agent-harness.ts
@@ -8,8 +8,9 @@ import type { InboundMessage, OutboundMessage } from '../types/messages.js';
import { MCPClientConnector } from './mcp-client.js';
import { LLMProviderFactory, type ProviderConfig } from '../llm/provider.js';
import { ModelRouter, RoutingStrategy } from '../llm/router.js';
+import type { ModelMiddleware } from '../llm/middleware.js';
import type { WorkspaceManager } from '../workspace/workspace-manager.js';
-import type { ChannelAdapter } from '../workspace/index.js';
+import type { ChannelAdapter, PathTriggerContext } from '../workspace/index.js';
import type { ResearchSubagent } from './subagents/research/index.js';
import type { DynamicStructuredTool } from '@langchain/core/tools';
import { getToolRegistry } from '../tools/tool-registry.js';
@@ -70,10 +71,10 @@ export class AgentHarness {
private config: AgentHarnessConfig;
private modelFactory: LLMProviderFactory;
private modelRouter: ModelRouter;
+ private middleware: ModelMiddleware | undefined;
private mcpClient: MCPClientConnector;
private workspaceManager?: WorkspaceManager;
private channelAdapter?: ChannelAdapter;
- private isFirstMessage: boolean = true;
private researchSubagent?: ResearchSubagent;
private availableMCPTools: MCPToolInfo[] = [];
private researchImageCapture: Array<{ data: string; mimeType: string }> = [];
@@ -94,6 +95,8 @@ export class AgentHarness {
mcpServerUrl: config.mcpServerUrl,
logger: config.logger,
});
+
+ this.registerWorkspaceTriggers();
}
/**
@@ -193,7 +196,7 @@ export class AgentHarness {
const { createResearchSubagent } = await import('./subagents/research/index.js');
// Create a model for the research subagent
- const model = await this.modelRouter.route(
+ const { model } = await this.modelRouter.route(
'research analysis', // dummy query
this.config.license,
RoutingStrategy.COMPLEXITY,
@@ -429,11 +432,25 @@ export class AgentHarness {
// 2. Load recent conversation history
const channelKey = this.config.channelType ?? ChannelType.WEBSOCKET;
- const storedMessages = this.conversationStore
+ let storedMessages = this.conversationStore
? await this.conversationStore.getRecentMessages(
this.config.userId, this.config.sessionId, this.config.historyLimit, channelKey
)
: [];
+
+ // First turn: seed conversation history with current workspace state
+ if (storedMessages.length === 0 && this.workspaceManager && this.conversationStore) {
+ const workspaceJSON = this.workspaceManager.serializeState();
+ const content = `[Workspace State]\n\`\`\`json\n${workspaceJSON}\n\`\`\``;
+ await this.conversationStore.saveMessage(
+ this.config.userId, this.config.sessionId,
+ 'workspace', content, { isWorkspaceContext: true }, channelKey
+ );
+ storedMessages = await this.conversationStore.getRecentMessages(
+ this.config.userId, this.config.sessionId, this.config.historyLimit, channelKey
+ );
+ }
+
const history = this.conversationStore
? this.conversationStore.toLangChainMessages(storedMessages)
: [];
@@ -441,12 +458,13 @@ export class AgentHarness {
// 4. Get the configured model
this.config.logger.debug('Routing to model');
- const model = await this.modelRouter.route(
+ const { model, middleware } = await this.modelRouter.route(
message.content,
this.config.license,
RoutingStrategy.COMPLEXITY,
this.config.userId
);
+ this.middleware = middleware;
this.config.logger.info({ modelName: model.constructor.name }, 'Model selected');
// 5. Build LangChain messages
@@ -489,6 +507,11 @@ export class AgentHarness {
'Tools loaded for main agent'
);
+ // Apply middleware (e.g. Anthropic prompt caching)
+ const processedMessages = this.middleware
+ ? this.middleware.processMessages(langchainMessages, tools)
+ : langchainMessages;
+
// 7. Bind tools to model
const modelWithTools = tools.length > 0 && model.bindTools ? model.bindTools(tools) : model;
@@ -501,7 +524,7 @@ export class AgentHarness {
// 8. Call LLM with tool calling loop
this.config.logger.info('Invoking LLM with tool support');
- const assistantMessage = await this.executeWithToolCalling(modelWithTools, langchainMessages, tools);
+ const assistantMessage = await this.executeWithToolCalling(modelWithTools, processedMessages, tools, 10);
this.config.logger.info(
{ responseLength: assistantMessage.length },
@@ -518,11 +541,6 @@ export class AgentHarness {
);
}
- // Mark first message as processed
- if (this.isFirstMessage) {
- this.isFirstMessage = false;
- }
-
return {
messageId: `msg_${Date.now()}`,
sessionId: message.sessionId,
@@ -556,16 +574,10 @@ export class AgentHarness {
private async buildSystemPrompt(): Promise {
// Load template and populate with license info
const template = await AgentHarness.loadSystemPromptTemplate();
- let prompt = template
+ const prompt = template
.replace('{{licenseType}}', this.config.license.licenseType)
.replace('{{features}}', JSON.stringify(this.config.license.features, null, 2));
- // Add full workspace state from WorkspaceManager (first message only)
- if (this.isFirstMessage && this.workspaceManager) {
- const workspaceJSON = this.workspaceManager.serializeState();
- prompt += `\n\n# Current Workspace State\n\`\`\`json\n${workspaceJSON}\n\`\`\``;
- }
-
return prompt;
}
@@ -574,9 +586,14 @@ export class AgentHarness {
*/
private getToolLabel(toolName: string): string {
const labels: Record = {
- research_agent: 'Researching...',
+ research: 'Researching...',
get_chart_data: 'Fetching chart data...',
symbol_lookup: 'Looking up symbol...',
+ category_list: 'Seeing what we have...',
+ category_edit: 'Coding...',
+ category_write: 'Coding...',
+ category_read: 'Inspecting...',
+ execute_research: 'Running script...',
};
return labels[toolName] ?? `Running ${toolName}...`;
}
@@ -685,6 +702,26 @@ export class AgentHarness {
return result;
}
+ /**
+ * Register workspace path triggers to record state changes into conversation history.
+ */
+ private registerWorkspaceTriggers(): void {
+ if (!this.workspaceManager || !this.conversationStore) return;
+ const channelKey = this.config.channelType ?? ChannelType.WEBSOCKET;
+
+ for (const store of ['shapes', 'indicators', 'chartState']) {
+ this.workspaceManager.onPathChange(`/${store}/*`, async (_old: unknown, newVal: unknown, ctx: PathTriggerContext) => {
+ const content = `[Workspace Update] ${ctx.store}${ctx.path}\n${JSON.stringify(newVal, null, 2)}`;
+ await this.conversationStore!.saveMessage(
+ this.config.userId, this.config.sessionId,
+ 'workspace', content,
+ { isWorkspaceUpdate: true, store: ctx.store, seq: ctx.seq },
+ channelKey
+ );
+ });
+ }
+ }
+
/**
* End the session: flush conversation to cold storage, then release resources.
* Called by channel handlers on disconnect, session expiry, or graceful shutdown.
diff --git a/gateway/src/harness/memory/conversation-store.ts b/gateway/src/harness/memory/conversation-store.ts
index afbf81bc..93b10848 100644
--- a/gateway/src/harness/memory/conversation-store.ts
+++ b/gateway/src/harness/memory/conversation-store.ts
@@ -11,7 +11,7 @@ export interface StoredMessage {
id: string;
userId: string;
sessionId: string;
- role: 'user' | 'assistant' | 'system';
+ role: 'user' | 'assistant' | 'system' | 'workspace';
content: string;
timestamp: number; // microseconds (Iceberg convention)
metadata?: Record;
@@ -44,7 +44,7 @@ export class ConversationStore {
async saveMessage(
userId: string,
sessionId: string,
- role: 'user' | 'assistant' | 'system',
+ role: 'user' | 'assistant' | 'system' | 'workspace',
content: string,
metadata?: Record,
channelType?: string
@@ -171,6 +171,8 @@ export class ConversationStore {
return new AIMessage(msg.content);
case 'system':
return new SystemMessage(msg.content);
+ case 'workspace':
+ return new HumanMessage(msg.content);
default:
throw new Error(`Unknown role: ${msg.role}`);
}
diff --git a/gateway/src/harness/prompts/system-prompt.md b/gateway/src/harness/prompts/system-prompt.md
index 8ef95240..371f2e15 100644
--- a/gateway/src/harness/prompts/system-prompt.md
+++ b/gateway/src/harness/prompts/system-prompt.md
@@ -59,6 +59,11 @@ Example usage:
- User: "Does Friday price action correlate with Monday?"
- You: Call research tool with instruction="Analyze correlation between Friday and Monday price action during NY trading hours (9:30-4:00 ET)", name="Friday-Monday Correlation"
+### category_list
+List existing research scripts (category="research").
+Use this before calling the research tool to check whether a relevant script already exists.
+If one does, pass its exact name to the research tool so the subagent updates it rather than creating a new one.
+
### symbol-lookup
Look up trading symbols and get metadata.
Use this when users mention tickers or need symbol information.
diff --git a/gateway/src/harness/subagents/research/config.yaml b/gateway/src/harness/subagents/research/config.yaml
index 6dc6f7dc..4dcb3fe3 100644
--- a/gateway/src/harness/subagents/research/config.yaml
+++ b/gateway/src/harness/subagents/research/config.yaml
@@ -12,6 +12,7 @@ maxTokens: 8192
memoryFiles:
- api-reference.md
- usage-examples.md
+ - pandas-ta-reference.md
# System prompt file
systemPromptFile: system-prompt.md
diff --git a/gateway/src/harness/subagents/research/memory/pandas-ta-reference.md b/gateway/src/harness/subagents/research/memory/pandas-ta-reference.md
new file mode 100644
index 00000000..c38d0eeb
--- /dev/null
+++ b/gateway/src/harness/subagents/research/memory/pandas-ta-reference.md
@@ -0,0 +1,227 @@
+# pandas-ta Reference for Research Scripts
+
+The sandbox environment uses **pandas-ta** as the standard indicator library. Always use it for technical indicator calculations; do not write manual rolling/ewm implementations.
+
+```python
+import pandas_ta as ta
+```
+
+## Calling Convention
+
+pandas-ta functions accept a Series (or OHLCV columns) plus keyword parameters that match pandas-ta's documented argument names:
+
+```python
+# Single-series indicator
+rsi = ta.rsi(df['close'], length=14) # returns Series
+
+# OHLCV indicator
+atr = ta.atr(df['high'], df['low'], df['close'], length=14)
+
+# Multi-output indicator (returns DataFrame)
+macd_df = ta.macd(df['close'], fast=12, slow=26, signal=9)
+# columns: MACD_12_26_9, MACDh_12_26_9, MACDs_12_26_9
+
+bbands_df = ta.bbands(df['close'], length=20, std=2.0)
+# columns: BBL_20_2.0, BBM_20_2.0, BBU_20_2.0, BBB_20_2.0, BBP_20_2.0
+```
+
+## Default Parameters
+
+Key defaults to keep in mind:
+- Most period/length indicators: `length=14` (use `length=` not `timeperiod=`)
+- `bbands`: `length=20, std=2.0` (note: single `std`, not separate upper/lower)
+- `macd`: `fast=12, slow=26, signal=9`
+- `stoch`: `k=14, d=3, smooth_k=3`
+- `psar`: `af0=0.02, af=0.02, max_af=0.2`
+- `vwap`: `anchor='D'` (requires DatetimeIndex)
+- `ichimoku`: `tenkan=9, kijun=26, senkou=52`
+
+## Available Indicators
+
+These match the indicators supported by the TradingView web client. Use the pandas-ta function name shown here (lowercase):
+
+### Overlap / Moving Averages — plotted on the price pane
+
+| Function | Description |
+|----------|-------------|
+| `sma` | Simple Moving Average — plain arithmetic mean over `length` periods |
+| `ema` | Exponential Moving Average — more weight on recent prices |
+| `wma` | Weighted Moving Average — linearly increasing weights |
+| `dema` | Double EMA — two layers of EMA to reduce lag |
+| `tema` | Triple EMA — three layers of EMA, even less lag than DEMA |
+| `trima` | Triangular MA — double-smoothed SMA, very smooth |
+| `kama` | Kaufman Adaptive MA — adapts speed to market noise/trending conditions |
+| `t3` | T3 Moving Average — Tillson's smooth, low-lag MA using six EMAs |
+| `hma` | Hull MA — very low-lag MA using WMAs |
+| `alma` | Arnaud Legoux MA — Gaussian-weighted MA with reduced lag and noise |
+| `midpoint` | Midpoint of close over `length` periods: (highest + lowest) / 2 |
+| `midprice` | Midpoint of high/low over `length` periods |
+| `supertrend` | Trend-following band (ATR-based) that flips above/below price |
+| `ichimoku` | Ichimoku Cloud — multi-line Japanese trend/support/resistance system |
+| `vwap` | Volume-Weighted Average Price — average price weighted by volume, resets on `anchor` |
+| `vwma` | Volume-Weighted MA — like SMA but candles weighted by volume |
+| `bbands` | Bollinger Bands — SMA ± N standard deviations; returns upper, mid, lower bands |
+
+### Momentum — typically plotted in a separate pane
+
+| Function | Description |
+|----------|-------------|
+| `rsi` | Relative Strength Index — 0–100 oscillator measuring speed of price changes |
+| `macd` | MACD — difference of two EMAs plus signal line and histogram |
+| `stoch` | Stochastic Oscillator — %K/%D, measures close vs recent high/low range |
+| `stochrsi` | Stochastic RSI — applies stochastic formula to RSI values |
+| `cci` | Commodity Channel Index — deviation of price from its statistical mean |
+| `willr` | Williams %R — inverse stochastic, −100 to 0 oscillator |
+| `mom` | Momentum — raw price change over `length` periods |
+| `roc` | Rate of Change — percentage price change over `length` periods |
+| `trix` | TRIX — 1-period % change of a triple-smoothed EMA |
+| `cmo` | Chande Momentum Oscillator — ratio of up/down momentum, −100 to 100 |
+| `adx` | Average Directional Index — strength of trend (0–100, direction-agnostic) |
+| `aroon` | Aroon — measures how recently the highest/lowest price occurred; returns Up, Down, Oscillator |
+| `ao` | Awesome Oscillator — difference of 5- and 34-period simple MAs of midprice |
+| `bop` | Balance of Power — measures buying vs selling pressure: (close−open)/(high−low) |
+| `uo` | Ultimate Oscillator — weighted combo of three period (fast/medium/slow) buying pressure ratios |
+| `apo` | Absolute Price Oscillator — difference between two EMAs (like MACD without signal line) |
+| `mfi` | Money Flow Index — RSI-like oscillator using price × volume |
+| `coppock` | Coppock Curve — long-term momentum oscillator based on rate-of-change |
+| `dpo` | Detrended Price Oscillator — removes trend to show cycle oscillations |
+| `fisher` | Fisher Transform — converts price into a Gaussian normal distribution |
+| `rvgi` | Relative Vigor Index — compares close−open to high−low to measure trend vigor |
+| `kst` | Know Sure Thing — momentum oscillator from four ROC periods, smoothed |
+
+### Volatility — plotted on price pane or separate
+
+| Function | Description |
+|----------|-------------|
+| `atr` | Average True Range — average of true range (greatest of H−L, H−prevC, L−prevC) |
+| `kc` | Keltner Channels — EMA ± N × ATR bands around price |
+| `donchian` | Donchian Channels — highest high / lowest low over `length` periods |
+
+### Volume — plotted in separate pane
+
+| Function | Description |
+|----------|-------------|
+| `obv` | On Balance Volume — cumulative volume, added on up days, subtracted on down days |
+| `ad` | Accumulation/Distribution — running total of the money flow multiplier × volume |
+| `adosc` | Chaikin Oscillator — EMA difference of the A/D line |
+| `cmf` | Chaikin Money Flow — sum of (money flow volume) / sum of volume over `length` |
+| `eom` | Ease of Movement — relates price change to volume; high = price moves easily |
+| `efi` | Elder's Force Index — combines price change direction with volume magnitude |
+| `kvo` | Klinger Volume Oscillator — EMA difference of volume force |
+| `pvt` | Price Volume Trend — cumulative: volume × percentage price change |
+
+### Statistics / Price Transforms
+
+| Function | Description |
+|----------|-------------|
+| `stdev` | Standard Deviation of close over `length` periods |
+| `linreg` | Linear Regression Curve — least-squares line endpoint value over `length` periods |
+| `slope` | Linear Regression Slope — gradient of the regression line |
+| `hl2` | Median Price — (high + low) / 2 |
+| `hlc3` | Typical Price — (high + low + close) / 3 |
+| `ohlc4` | Average Price — (open + high + low + close) / 4 |
+
+### Trend
+
+| Function | Description |
+|----------|-------------|
+| `psar` | Parabolic SAR — trailing stop-and-reverse dots that follow price |
+| `vortex` | Vortex Indicator — VI+ / VI− lines measuring upward vs downward trend movement |
+| `chop` | Choppiness Index — 0–100, high = choppy/sideways, low = strong trend |
+
+## Usage Examples
+
+### Single-output indicators
+
+```python
+import pandas_ta as ta
+
+df['rsi'] = ta.rsi(df['close'], length=14)
+df['ema_20'] = ta.ema(df['close'], length=20)
+df['sma_50'] = ta.sma(df['close'], length=50)
+df['atr'] = ta.atr(df['high'], df['low'], df['close'], length=14)
+df['obv'] = ta.obv(df['close'], df['volume'])
+df['adx'] = ta.adx(df['high'], df['low'], df['close'], length=14)['ADX_14']
+```
+
+### Multi-output indicators — extract columns by position
+
+```python
+# MACD → MACD_12_26_9, MACDh_12_26_9, MACDs_12_26_9
+macd_df = ta.macd(df['close'], fast=12, slow=26, signal=9)
+df['macd'] = macd_df.iloc[:, 0] # MACD line
+df['macd_hist'] = macd_df.iloc[:, 1] # Histogram
+df['macd_signal'] = macd_df.iloc[:, 2] # Signal line
+
+# Bollinger Bands → BBL, BBM, BBU, BBB, BBP
+bb_df = ta.bbands(df['close'], length=20, std=2.0)
+df['bb_lower'] = bb_df.iloc[:, 0] # BBL
+df['bb_mid'] = bb_df.iloc[:, 1] # BBM
+df['bb_upper'] = bb_df.iloc[:, 2] # BBU
+
+# Stochastic → STOCHk, STOCHd
+stoch_df = ta.stoch(df['high'], df['low'], df['close'], k=14, d=3, smooth_k=3)
+df['stoch_k'] = stoch_df.iloc[:, 0]
+df['stoch_d'] = stoch_df.iloc[:, 1]
+
+# Keltner Channels → KCLe, KCBe, KCUe
+kc_df = ta.kc(df['high'], df['low'], df['close'], length=20)
+df['kc_lower'] = kc_df.iloc[:, 0]
+df['kc_mid'] = kc_df.iloc[:, 1]
+df['kc_upper'] = kc_df.iloc[:, 2]
+
+# ADX → ADX_14, DMP_14, DMN_14
+adx_df = ta.adx(df['high'], df['low'], df['close'], length=14)
+df['adx'] = adx_df.iloc[:, 0] # ADX strength
+df['dmp'] = adx_df.iloc[:, 1] # +DI
+df['dmn'] = adx_df.iloc[:, 2] # -DI
+
+# Aroon → AROOND_14, AROONU_14, AROONOSC_14
+aroon_df = ta.aroon(df['high'], df['low'], length=14)
+df['aroon_down'] = aroon_df.iloc[:, 0]
+df['aroon_up'] = aroon_df.iloc[:, 1]
+
+# Donchian Channels → DCL, DCM, DCU
+dc_df = ta.donchian(df['high'], df['low'], lower_length=20, upper_length=20)
+df['dc_lower'] = dc_df.iloc[:, 0]
+df['dc_mid'] = dc_df.iloc[:, 1]
+df['dc_upper'] = dc_df.iloc[:, 2]
+```
+
+### Charting with indicators
+
+```python
+import pandas_ta as ta
+from dexorder.api import get_api
+import asyncio
+
+api = get_api()
+
+df = asyncio.run(api.data.historical_ohlc(
+ ticker="BINANCE:BTC/USDT",
+ period_seconds=3600,
+ start_time="2024-01-01",
+ end_time="2024-01-08",
+ extra_columns=["volume"]
+))
+
+# Compute indicators
+df['ema_20'] = ta.ema(df['close'], length=20)
+df['rsi'] = ta.rsi(df['close'], length=14)
+macd_df = ta.macd(df['close'])
+df['macd'] = macd_df.iloc[:, 0]
+df['macd_signal'] = macd_df.iloc[:, 2]
+
+# Main price chart with EMA overlay
+fig, ax = api.charting.plot_ohlc(df, title="BTC/USDT 1H", volume=True)
+ax.plot(df.index, df['ema_20'], label="EMA 20", color="orange", linewidth=1.5)
+ax.legend()
+
+# RSI panel
+rsi_ax = api.charting.add_indicator_panel(fig, df, columns=["rsi"], ylabel="RSI", ylim=(0, 100))
+rsi_ax.axhline(70, color='red', linestyle='--', alpha=0.5)
+rsi_ax.axhline(30, color='green', linestyle='--', alpha=0.5)
+
+# MACD panel
+api.charting.add_indicator_panel(fig, df, columns=["macd", "macd_signal"], ylabel="MACD")
+```
diff --git a/gateway/src/harness/subagents/research/memory/usage-examples.md b/gateway/src/harness/subagents/research/memory/usage-examples.md
index 1bf9d063..45a6e86d 100644
--- a/gateway/src/harness/subagents/research/memory/usage-examples.md
+++ b/gateway/src/harness/subagents/research/memory/usage-examples.md
@@ -112,10 +112,12 @@ fig, ax = api.charting.plot_ohlc(
### Adding Indicator Panels
+Use **pandas-ta** for all indicator calculations. Do not write manual rolling/ewm implementations.
+
```python
from dexorder.api import get_api
import asyncio
-import pandas as pd
+import pandas_ta as ta
api = get_api()
@@ -127,8 +129,9 @@ df = asyncio.run(api.data.historical_ohlc(
end_time="2021-12-21"
))
-# Calculate a simple moving average
-df['sma_20'] = df['close'].rolling(window=20).mean()
+# Calculate indicators using pandas-ta
+df['sma_20'] = ta.sma(df['close'], length=20)
+df['rsi'] = ta.rsi(df['close'], length=14)
# Create chart
fig, ax = api.charting.plot_ohlc(df, title="BTC/USDT with SMA")
@@ -138,7 +141,6 @@ ax.plot(df.index, df['sma_20'], label="SMA 20", color="blue", linewidth=2)
ax.legend()
# Add RSI indicator panel below
-df['rsi'] = calculate_rsi(df['close'], 14) # Your RSI calculation
rsi_ax = api.charting.add_indicator_panel(
fig, df,
columns=["rsi"],
@@ -149,12 +151,40 @@ rsi_ax.axhline(70, color='red', linestyle='--', alpha=0.5)
rsi_ax.axhline(30, color='green', linestyle='--', alpha=0.5)
```
+### Multi-Output Indicators
+
+Some pandas-ta indicators return a DataFrame. Extract the columns you need:
+
+```python
+import pandas_ta as ta
+
+# MACD returns: MACD_12_26_9, MACDh_12_26_9, MACDs_12_26_9
+macd_df = ta.macd(df['close'], fast=12, slow=26, signal=9)
+df['macd'] = macd_df.iloc[:, 0] # MACD line
+df['macd_hist'] = macd_df.iloc[:, 1] # Histogram
+df['macd_signal'] = macd_df.iloc[:, 2] # Signal line
+
+# Bollinger Bands returns: BBL, BBM, BBU, BBB, BBP
+bb_df = ta.bbands(df['close'], length=20, std=2.0)
+df['bb_upper'] = bb_df.iloc[:, 2] # BBU
+df['bb_mid'] = bb_df.iloc[:, 1] # BBM
+df['bb_lower'] = bb_df.iloc[:, 0] # BBL
+
+# Stochastic returns: STOCHk, STOCHd
+stoch_df = ta.stoch(df['high'], df['low'], df['close'], k=14, d=3, smooth_k=3)
+df['stoch_k'] = stoch_df.iloc[:, 0]
+df['stoch_d'] = stoch_df.iloc[:, 1]
+
+# ATR (uses high, low, close)
+df['atr'] = ta.atr(df['high'], df['low'], df['close'], length=14)
+```
+
## Complete Example
```python
from dexorder.api import get_api
import asyncio
-import pandas as pd
+import pandas_ta as ta
# Get API instance
api = get_api()
@@ -168,9 +198,9 @@ df = asyncio.run(api.data.historical_ohlc(
extra_columns=["volume"]
))
-# Add some analysis
-df['sma_20'] = df['close'].rolling(window=20).mean()
-df['sma_50'] = df['close'].rolling(window=50).mean()
+# Add moving averages using pandas-ta
+df['sma_20'] = ta.sma(df['close'], length=20)
+df['ema_50'] = ta.ema(df['close'], length=50)
# Create chart with volume
fig, ax = api.charting.plot_ohlc(
@@ -182,7 +212,7 @@ fig, ax = api.charting.plot_ohlc(
# Overlay moving averages
ax.plot(df.index, df['sma_20'], label="SMA 20", color="blue", linewidth=1.5)
-ax.plot(df.index, df['sma_50'], label="SMA 50", color="red", linewidth=1.5)
+ax.plot(df.index, df['ema_50'], label="EMA 50", color="red", linewidth=1.5)
ax.legend()
# Print summary statistics
diff --git a/gateway/src/harness/subagents/research/system-prompt.md b/gateway/src/harness/subagents/research/system-prompt.md
index 51e0c904..d660be45 100644
--- a/gateway/src/harness/subagents/research/system-prompt.md
+++ b/gateway/src/harness/subagents/research/system-prompt.md
@@ -51,7 +51,140 @@ The API provides two main components:
- `api.data` - DataAPI for fetching OHLC market data
- `api.charting` - ChartingAPI for creating financial charts
-See your knowledge base for complete API documentation and examples.
+See your knowledge base for complete API documentation, examples, and the full pandas-ta indicator reference (see `pandas-ta-reference.md`).
+
+## Technical Indicators — pandas-ta
+
+The sandbox environment uses **pandas-ta** as the standard indicator library. Always use it for technical indicator calculations; do not write manual rolling/ewm implementations.
+
+```python
+import pandas_ta as ta
+```
+
+### Calling Convention
+
+pandas-ta functions accept a Series (or OHLCV columns) plus keyword parameters that match pandas-ta's documented argument names:
+
+```python
+# Single-series indicator
+rsi = ta.rsi(df['close'], length=14) # returns Series
+
+# OHLCV indicator
+atr = ta.atr(df['high'], df['low'], df['close'], length=14)
+
+# Multi-output indicator (returns DataFrame)
+macd_df = ta.macd(df['close'], fast=12, slow=26, signal=9)
+# columns: MACD_12_26_9, MACDh_12_26_9, MACDs_12_26_9
+
+bbands_df = ta.bbands(df['close'], length=20, std=2.0)
+# columns: BBL_20_2.0, BBM_20_2.0, BBU_20_2.0, BBB_20_2.0, BBP_20_2.0
+```
+
+### Available Indicators (canonical list)
+
+These match the indicators supported by the TradingView web client. Use the pandas-ta function name shown here (lowercase):
+
+**Overlap / Moving Averages** — plotted on the price pane
+
+| Function | Description |
+|----------|-------------|
+| `sma` | Simple Moving Average — plain arithmetic mean over `length` periods |
+| `ema` | Exponential Moving Average — more weight on recent prices |
+| `wma` | Weighted Moving Average — linearly increasing weights |
+| `dema` | Double EMA — two layers of EMA to reduce lag |
+| `tema` | Triple EMA — three layers of EMA, even less lag than DEMA |
+| `trima` | Triangular MA — double-smoothed SMA, very smooth |
+| `kama` | Kaufman Adaptive MA — adapts speed to market noise/trending conditions |
+| `t3` | T3 Moving Average — Tillson's smooth, low-lag MA using six EMAs |
+| `hma` | Hull MA — very low-lag MA using WMAs |
+| `alma` | Arnaud Legoux MA — Gaussian-weighted MA with reduced lag and noise |
+| `midpoint` | Midpoint of close over `length` periods: (highest + lowest) / 2 |
+| `midprice` | Midpoint of high/low over `length` periods |
+| `supertrend` | Trend-following band (ATR-based) that flips above/below price |
+| `ichimoku` | Ichimoku Cloud — multi-line Japanese trend/support/resistance system |
+| `vwap` | Volume-Weighted Average Price — average price weighted by volume, resets on `anchor` |
+| `vwma` | Volume-Weighted MA — like SMA but candles weighted by volume |
+| `bbands` | Bollinger Bands — SMA ± N standard deviations; returns upper, mid, lower bands |
+
+**Momentum** — typically plotted in a separate pane
+
+| Function | Description |
+|----------|-------------|
+| `rsi` | Relative Strength Index — 0–100 oscillator measuring speed of price changes |
+| `macd` | MACD — difference of two EMAs plus signal line and histogram |
+| `stoch` | Stochastic Oscillator — %K/%D, measures close vs recent high/low range |
+| `stochrsi` | Stochastic RSI — applies stochastic formula to RSI values |
+| `cci` | Commodity Channel Index — deviation of price from its statistical mean |
+| `willr` | Williams %R — inverse stochastic, −100 to 0 oscillator |
+| `mom` | Momentum — raw price change over `length` periods |
+| `roc` | Rate of Change — percentage price change over `length` periods |
+| `trix` | TRIX — 1-period % change of a triple-smoothed EMA |
+| `cmo` | Chande Momentum Oscillator — ratio of up/down momentum, −100 to 100 |
+| `adx` | Average Directional Index — strength of trend (0–100, direction-agnostic) |
+| `aroon` | Aroon — measures how recently the highest/lowest price occurred; returns Up, Down, Oscillator |
+| `ao` | Awesome Oscillator — difference of 5- and 34-period simple MAs of midprice |
+| `bop` | Balance of Power — measures buying vs selling pressure: (close−open)/(high−low) |
+| `uo` | Ultimate Oscillator — weighted combo of three period (fast/medium/slow) buying pressure ratios |
+| `apo` | Absolute Price Oscillator — difference between two EMAs (like MACD without signal line) |
+| `mfi` | Money Flow Index — RSI-like oscillator using price × volume |
+| `coppock` | Coppock Curve — long-term momentum oscillator based on rate-of-change |
+| `dpo` | Detrended Price Oscillator — removes trend to show cycle oscillations |
+| `fisher` | Fisher Transform — converts price into a Gaussian normal distribution |
+| `rvgi` | Relative Vigor Index — compares close−open to high−low to measure trend vigor |
+| `kst` | Know Sure Thing — momentum oscillator from four ROC periods, smoothed |
+
+**Volatility** — plotted on price pane or separate
+
+| Function | Description |
+|----------|-------------|
+| `atr` | Average True Range — average of true range (greatest of H−L, H−prevC, L−prevC) |
+| `kc` | Keltner Channels — EMA ± N × ATR bands around price |
+| `donchian` | Donchian Channels — highest high / lowest low over `length` periods |
+
+**Volume** — plotted in separate pane
+
+| Function | Description |
+|----------|-------------|
+| `obv` | On Balance Volume — cumulative volume, added on up days, subtracted on down days |
+| `ad` | Accumulation/Distribution — running total of the money flow multiplier × volume |
+| `adosc` | Chaikin Oscillator — EMA difference of the A/D line |
+| `cmf` | Chaikin Money Flow — sum of (money flow volume) / sum of volume over `length` |
+| `eom` | Ease of Movement — relates price change to volume; high = price moves easily |
+| `efi` | Elder's Force Index — combines price change direction with volume magnitude |
+| `kvo` | Klinger Volume Oscillator — EMA difference of volume force |
+| `pvt` | Price Volume Trend — cumulative: volume × percentage price change |
+
+**Statistics / Price Transforms**
+
+| Function | Description |
+|----------|-------------|
+| `stdev` | Standard Deviation of close over `length` periods |
+| `linreg` | Linear Regression Curve — least-squares line endpoint value over `length` periods |
+| `slope` | Linear Regression Slope — gradient of the regression line |
+| `hl2` | Median Price — (high + low) / 2 |
+| `hlc3` | Typical Price — (high + low + close) / 3 |
+| `ohlc4` | Average Price — (open + high + low + close) / 4 |
+
+**Trend**
+
+| Function | Description |
+|----------|-------------|
+| `psar` | Parabolic SAR — trailing stop-and-reverse dots that follow price |
+| `vortex` | Vortex Indicator — VI+ / VI− lines measuring upward vs downward trend movement |
+| `chop` | Choppiness Index — 0–100, high = choppy/sideways, low = strong trend |
+
+### Default Parameters
+
+Key defaults to keep in mind:
+- Most period/length indicators: `length=14` (use `length=` not `timeperiod=`)
+- `bbands`: `length=20, std=2.0` (note: single `std`, not separate upper/lower)
+- `macd`: `fast=12, slow=26, signal=9`
+- `stoch`: `k=14, d=3, smooth_k=3`
+- `psar`: `af0=0.02, af=0.02, max_af=0.2`
+- `vwap`: `anchor='D'` (requires DatetimeIndex)
+- `ichimoku`: `tenkan=9, kijun=26, senkou=52`
+
+For multi-output indicator column extraction patterns and complete charting examples, fetch `pandas-ta-reference.md` from your knowledge base.
## Coding Loop Pattern
@@ -59,11 +192,9 @@ When a user requests analysis:
1. **Understand the request**: What data is needed? What analysis? What visualization?
-2. **Check for existing scripts**: Use `category_list` to see if a similar script exists
- - If exists and suitable: use `category_read` to review it
- - Consider editing existing script vs creating new one
+2. **Use the provided name**: The instruction will begin with `Research script name: ""`. Always use that exact name when calling `category_write` or `category_edit`. Check first with `category_read` — if the script already exists, use `category_edit` to update it rather than creating a new one with `category_write`.
-3. **Write the script**: Use `category_write` (or `category_edit`)
+3. **Write the script**: Use `category_write` (new) or `category_edit` (existing)
- Write clean, well-commented Python code
- Include proper error handling
- Use appropriate ticker symbols, time ranges, and periods
@@ -106,10 +237,7 @@ When a user requests analysis:
- Add `conda_packages: ["package-name"]` to metadata
- Packages are auto-installed during validation
-- **Script naming**: Choose descriptive, unique names. Examples:
- - "BTC Weekly Analysis"
- - "ETH Volume Profile"
- - "Market Correlation Heatmap"
+- **Script naming**: Always use the name provided in the instruction (`Research script name: ""`). Do not invent a different name.
- **Error handling**: Wrap data fetching in try/except to provide helpful error messages
diff --git a/gateway/src/llm/middleware.ts b/gateway/src/llm/middleware.ts
new file mode 100644
index 00000000..f3507ec4
--- /dev/null
+++ b/gateway/src/llm/middleware.ts
@@ -0,0 +1,89 @@
+import type { BaseMessage } from '@langchain/core/messages';
+import { SystemMessage, HumanMessage, AIMessage } from '@langchain/core/messages';
+import type { StructuredTool } from '@langchain/core/tools';
+
+/**
+ * Provider-agnostic hook to preprocess messages before an LLM call.
+ * Applied transparently by the harness; implementations are provider-specific.
+ */
+export interface ModelMiddleware {
+ processMessages(messages: BaseMessage[], tools: StructuredTool[]): BaseMessage[];
+}
+
+/**
+ * No-op implementation for providers that don't support prompt caching.
+ */
+export class NoopMiddleware implements ModelMiddleware {
+ processMessages(messages: BaseMessage[]): BaseMessage[] {
+ return messages;
+ }
+}
+
+/**
+ * Mirrors Python's AnthropicPromptCachingMiddleware logic.
+ *
+ * Tags with cache_control: { type: 'ephemeral' }:
+ * 1. The system message last content block (stable prompt prefix — always a cache hit after turn 1)
+ * 2. The last non-current cacheable message (AIMessage or HumanMessage before the final user message)
+ * so the full conversation prefix is cached on the next turn.
+ *
+ * Requires ChatAnthropic to be configured with:
+ * clientOptions: { defaultHeaders: { 'anthropic-beta': 'prompt-caching-2024-07-31' } }
+ */
+export class AnthropicCachingMiddleware implements ModelMiddleware {
+ processMessages(messages: BaseMessage[], _tools: StructuredTool[]): BaseMessage[] {
+ if (messages.length === 0) return messages;
+
+ const result = messages.map(msg => cloneMessage(msg));
+
+ // 1. Tag system message
+ const systemMsg = result.find(m => m._getType() === 'system');
+ if (systemMsg) {
+ addCacheControl(systemMsg);
+ }
+
+ // 2. Tag the last cacheable message that isn't the current user input.
+ // The current user message is always the last element; we want the one before it.
+ // We look backwards for the last AIMessage or HumanMessage (excluding the final message).
+ const candidates = result.slice(0, -1);
+ for (let i = candidates.length - 1; i >= 0; i--) {
+ const t = candidates[i]._getType();
+ if (t === 'ai' || t === 'human') {
+ addCacheControl(candidates[i]);
+ break;
+ }
+ }
+
+ return result;
+ }
+}
+
+/**
+ * Shallow-clone a message so we don't mutate history objects.
+ */
+function cloneMessage(msg: BaseMessage): BaseMessage {
+ const type = msg._getType();
+ const content = typeof msg.content === 'string'
+ ? msg.content
+ : JSON.parse(JSON.stringify(msg.content));
+
+ if (type === 'system') return new SystemMessage({ content, additional_kwargs: { ...msg.additional_kwargs } });
+ if (type === 'human') return new HumanMessage({ content, additional_kwargs: { ...msg.additional_kwargs } });
+ if (type === 'ai') return new AIMessage({ content, additional_kwargs: { ...msg.additional_kwargs }, tool_calls: (msg as AIMessage).tool_calls });
+ // For other types (tool messages etc.), return as-is — we don't tag them
+ return msg;
+}
+
+/**
+ * Add cache_control to the last content block of a message.
+ * Converts string content to a block array if needed.
+ */
+function addCacheControl(msg: BaseMessage): void {
+ if (typeof msg.content === 'string') {
+ // Convert to block array
+ (msg as any).content = [{ type: 'text', text: msg.content, cache_control: { type: 'ephemeral' } }];
+ } else if (Array.isArray(msg.content) && msg.content.length > 0) {
+ const last = msg.content[msg.content.length - 1] as any;
+ last.cache_control = { type: 'ephemeral' };
+ }
+}
diff --git a/gateway/src/llm/provider.ts b/gateway/src/llm/provider.ts
index 20f98049..1859959b 100644
--- a/gateway/src/llm/provider.ts
+++ b/gateway/src/llm/provider.ts
@@ -1,6 +1,10 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { ChatAnthropic } from '@langchain/anthropic';
import type { FastifyBaseLogger } from 'fastify';
+import { type ModelMiddleware, NoopMiddleware, AnthropicCachingMiddleware } from './middleware.js';
+
+export type { ModelMiddleware };
+export { NoopMiddleware, AnthropicCachingMiddleware };
/**
* Supported LLM providers
@@ -64,7 +68,7 @@ export class LLMProviderFactory {
/**
* Create a chat model instance
*/
- createModel(modelConfig: ModelConfig): BaseChatModel {
+ createModel(modelConfig: ModelConfig): { model: BaseChatModel; middleware: ModelMiddleware } {
this.logger.debug(
{ provider: modelConfig.provider, model: modelConfig.model },
'Creating LLM model'
@@ -82,17 +86,20 @@ export class LLMProviderFactory {
/**
* Create Anthropic Claude model
*/
- private createAnthropicModel(config: ModelConfig): ChatAnthropic {
+ private createAnthropicModel(config: ModelConfig): { model: ChatAnthropic; middleware: AnthropicCachingMiddleware } {
if (!this.config.anthropicApiKey) {
throw new Error('Anthropic API key not configured');
}
- return new ChatAnthropic({
+ const model = new ChatAnthropic({
model: config.model,
temperature: config.temperature ?? 0.7,
maxTokens: config.maxTokens ?? 4096,
anthropicApiKey: this.config.anthropicApiKey,
+ clientOptions: { defaultHeaders: { 'anthropic-beta': 'prompt-caching-2024-07-31' } },
});
+
+ return { model, middleware: new AnthropicCachingMiddleware() };
}
/**
diff --git a/gateway/src/llm/router.ts b/gateway/src/llm/router.ts
index eb2651c1..6b5f66f7 100644
--- a/gateway/src/llm/router.ts
+++ b/gateway/src/llm/router.ts
@@ -1,6 +1,7 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { FastifyBaseLogger } from 'fastify';
import { LLMProviderFactory, type ModelConfig, LLMProvider, type LicenseModelsConfig } from './provider.js';
+import type { ModelMiddleware } from './middleware.js';
import type { License } from '../types/user.js';
/**
@@ -42,7 +43,7 @@ export class ModelRouter {
license: License,
strategy: RoutingStrategy = RoutingStrategy.USER_PREFERENCE,
userId?: string
- ): Promise {
+ ): Promise<{ model: BaseChatModel; middleware: ModelMiddleware }> {
let modelConfig: ModelConfig;
switch (strategy) {
diff --git a/gateway/src/main.ts b/gateway/src/main.ts
index d4cf4573..fc744838 100644
--- a/gateway/src/main.ts
+++ b/gateway/src/main.ts
@@ -586,7 +586,7 @@ try {
toolRegistry.registerAgentTools({
agentName: 'main',
platformTools: ['symbol_lookup', 'get_chart_data'],
- mcpTools: [], // No MCP tools for main agent by default (can be extended later)
+ mcpTools: ['category_list'], // category_list lets the main agent see existing research scripts
});
// Research subagent: only MCP tools for script creation/execution
diff --git a/gateway/src/services/ohlc-service.ts b/gateway/src/services/ohlc-service.ts
index e996cd14..da5e6630 100644
--- a/gateway/src/services/ohlc-service.ts
+++ b/gateway/src/services/ohlc-service.ts
@@ -27,7 +27,6 @@ import type {
import {
secondsToMicros,
backendToTradingView,
- resolutionToSeconds,
DEFAULT_SUPPORTED_RESOLUTIONS,
} from '../types/ohlc.js';
@@ -67,25 +66,32 @@ export class OHLCService {
*/
async fetchOHLC(
ticker: string,
- resolution: string,
+ period_seconds: number,
from_time: number, // Unix timestamp in SECONDS
to_time: number, // Unix timestamp in SECONDS
countback?: number
): Promise {
this.logger.debug({
ticker,
- resolution,
+ period_seconds,
from_time,
to_time,
countback,
}, 'Fetching OHLC data');
- // Convert resolution to period_seconds
- const period_seconds = resolutionToSeconds(resolution);
-
- // Convert times to microseconds
- const start_time = secondsToMicros(from_time);
- const end_time = secondsToMicros(to_time);
+ // Convert times to microseconds, then align to period boundaries using
+ // [ceil(start), ceil(end)) semantics:
+ // - start: ceil to next period boundary — excludes any in-progress candle whose
+ // official timestamp is before from_time.
+ // - end: ceil to next period boundary, used as EXCLUSIVE upper bound — includes
+ // the last candle whose timestamp < to_time, excludes one sitting exactly on
+ // to_time (which would be the next candle, not yet started).
+ const periodMicros = BigInt(period_seconds) * 1_000_000n;
+ const raw_start = secondsToMicros(from_time);
+ const raw_end = secondsToMicros(to_time);
+ // bigint ceiling: ceil(a/b)*b = ((a + b - 1) / b) * b
+ const start_time = ((raw_start + periodMicros - 1n) / periodMicros) * periodMicros;
+ const end_time = ((raw_end + periodMicros - 1n) / periodMicros) * periodMicros; // exclusive
// Step 1: Check Iceberg for existing data
let data = await this.icebergClient.queryOHLC(ticker, period_seconds, start_time, end_time);
@@ -100,25 +106,26 @@ export class OHLCService {
if (missingRanges.length === 0 && data.length > 0) {
// All data exists in Iceberg
- this.logger.debug({ ticker, resolution, cached: true }, 'OHLC data found in cache');
- return this.formatHistoryResult(data, countback);
+ this.logger.debug({ ticker, period_seconds, cached: true }, 'OHLC data found in cache');
+ return this.formatHistoryResult(data, start_time, end_time, period_seconds, countback);
}
// Step 3: Request missing data via relay
- this.logger.debug({ ticker, resolution, missingRanges: missingRanges.length }, 'Requesting missing OHLC data');
+ this.logger.debug({ ticker, period_seconds, missingRanges: missingRanges.length }, 'Requesting missing OHLC data');
try {
const notification = await this.relayClient.requestHistoricalOHLC(
ticker,
period_seconds,
start_time,
- end_time,
- countback
+ end_time
+ // countback is NOT passed as a limit — the ingestor must fetch the full range.
+ // Countback is applied below after we have the complete dataset.
);
this.logger.info({
ticker,
- resolution,
+ period_seconds,
row_count: notification.row_count,
status: notification.status,
}, 'Historical data request completed');
@@ -126,13 +133,13 @@ export class OHLCService {
// Step 4: Query Iceberg again for complete dataset
data = await this.icebergClient.queryOHLC(ticker, period_seconds, start_time, end_time);
- return this.formatHistoryResult(data, countback);
+ return this.formatHistoryResult(data, start_time, end_time, period_seconds, countback);
} catch (error: any) {
this.logger.error({
error,
ticker,
- resolution,
+ period_seconds,
}, 'Failed to fetch historical data');
// Return empty result on error
@@ -144,9 +151,22 @@ export class OHLCService {
}
/**
- * Format OHLC data as TradingView history result
+ * Format OHLC data as TradingView history result.
+ *
+ * Interior gaps (confirmed trading periods with no trades) arrive as null-OHLC
+ * rows from Iceberg. Edge gaps (data not yet ingested, in-progress candles) are
+ * simply absent rows. Both are returned as-is; clients fill as appropriate.
*/
- private formatHistoryResult(data: any[], countback?: number): HistoryResult {
+ private formatHistoryResult(
+ data: any[],
+ // @ts-ignore
+ start_time: bigint,
+ // @ts-ignore
+ end_time: bigint,
+ // @ts-ignore
+ period_seconds: number,
+ countback?: number
+ ): HistoryResult {
if (data.length === 0) {
return {
bars: [],
@@ -154,13 +174,11 @@ export class OHLCService {
};
}
- // Convert to TradingView format
+ // Convert to TradingView format without null-filling missing slots.
let bars: TradingViewBar[] = data.map(backendToTradingView);
- // Sort by time
bars.sort((a, b) => a.time - b.time);
- // Apply countback limit if specified
if (countback && bars.length > countback) {
bars = bars.slice(-countback);
}
diff --git a/gateway/src/tools/platform/get-chart-data.tool.ts b/gateway/src/tools/platform/get-chart-data.tool.ts
index f4efa55b..ae155f63 100644
--- a/gateway/src/tools/platform/get-chart-data.tool.ts
+++ b/gateway/src/tools/platform/get-chart-data.tool.ts
@@ -52,7 +52,7 @@ Parameters:
// Build request with workspace defaults
const finalTicker = ticker ?? chartState.symbol;
- const finalPeriod = period ?? parsePeriod(chartState.period);
+ const finalPeriod = period ?? chartState.period;
const finalFromTime = await parseTime(from_time, chartState.start_time, logger);
const finalToTime = await parseTime(to_time, chartState.end_time, logger);
const requestedColumns = columns ?? [];
@@ -83,7 +83,7 @@ Parameters:
// Fetch data from OHLCService
const historyResult = await ohlcService.fetchOHLC(
finalTicker,
- finalPeriod.toString(),
+ finalPeriod,
finalFromTime,
finalToTime,
countback
@@ -167,7 +167,7 @@ async function getChartState(workspaceManager: WorkspaceManager, logger: Fastify
symbol: 'BINANCE:BTC/USDT',
start_time: null,
end_time: null,
- period: '15',
+ period: 900,
selected_shapes: [],
};
}
@@ -180,35 +180,12 @@ async function getChartState(workspaceManager: WorkspaceManager, logger: Fastify
symbol: 'BINANCE:BTC/USDT',
start_time: null,
end_time: null,
- period: '15',
+ period: 900,
selected_shapes: [],
};
}
}
-/**
- * Parse period string to seconds
- * Handles period as either a number (already in seconds) or string (minutes)
- */
-function parsePeriod(period: string | number | null): number | null {
- if (period === null) {
- return null;
- }
-
- if (typeof period === 'number') {
- return period;
- }
-
- // Period in workspace is stored as string representing minutes
- // Convert to seconds
- const minutes = parseInt(period, 10);
- if (isNaN(minutes)) {
- return null;
- }
-
- return minutes * 60;
-}
-
/**
* Parse time parameter (Unix seconds, date string, or null)
* Returns Unix timestamp in seconds
diff --git a/gateway/src/tools/platform/research-agent.tool.ts b/gateway/src/tools/platform/research-agent.tool.ts
index 60e61458..27726dd4 100644
--- a/gateway/src/tools/platform/research-agent.tool.ts
+++ b/gateway/src/tools/platform/research-agent.tool.ts
@@ -30,13 +30,16 @@ Use this tool for:
The research subagent will write and execute Python scripts, capture output and charts, and return results.`,
schema: z.object({
+ name: z.string().describe('The name of the research script to create or update (e.g. "btc_ema_analysis"). Use the same name across calls to revise the same script rather than creating a new one.'),
instruction: z.string().describe('The research task or analysis to perform. Be specific about what data, indicators, timeframes, and output you want.'),
}),
- func: async ({ instruction }: { instruction: string }): Promise => {
- logger.info({ instruction: instruction.substring(0, 100) }, 'Delegating to research subagent');
+ func: async ({ name, instruction }: { name: string; instruction: string }): Promise => {
+ logger.info({ name, instruction: instruction.substring(0, 100) }, 'Delegating to research subagent');
+
+ const prompt = `Research script name: "${name}"\n\n${instruction}`;
try {
- const result = await researchSubagent.executeWithImages(context, instruction);
+ const result = await researchSubagent.executeWithImages(context, prompt);
// Return in the format that AgentHarness.processToolResult() knows how to handle
// (extracts images and passes them to channelAdapter)
diff --git a/gateway/src/types/ohlc.ts b/gateway/src/types/ohlc.ts
index 1508a70f..269e42a6 100644
--- a/gateway/src/types/ohlc.ts
+++ b/gateway/src/types/ohlc.ts
@@ -11,12 +11,12 @@
* TradingView bar format (used by web frontend)
*/
export interface TradingViewBar {
- time: number; // Unix timestamp in SECONDS
- open: number;
- high: number;
- low: number;
- close: number;
- volume?: number;
+ time: number; // Unix timestamp in SECONDS
+ open: number | null; // null for gap bars (no trades that period)
+ high: number | null;
+ low: number | null;
+ close: number | null;
+ volume?: number | null;
// Optional extra columns from ohlc.proto
buy_vol?: number;
sell_vol?: number;
@@ -31,14 +31,14 @@ export interface TradingViewBar {
* Backend OHLC format (from Iceberg)
*/
export interface BackendOHLC {
- timestamp: number; // Unix timestamp in MICROSECONDS
+ timestamp: bigint; // Unix timestamp in MICROSECONDS — kept as bigint to preserve precision
ticker: string;
period_seconds: number;
- open: number;
- high: number;
- low: number;
- close: number;
- volume: number;
+ open: number | null; // null for gap bars (no trades that period)
+ high: number | null;
+ low: number | null;
+ close: number | null;
+ volume: number | null;
}
/**
@@ -171,7 +171,7 @@ export function backendToTradingView(backend: BackendOHLC): TradingViewBar {
high: backend.high,
low: backend.low,
close: backend.close,
- volume: backend.volume,
+ volume: backend.volume ?? undefined,
};
}
diff --git a/gateway/src/workspace/sync-registry.ts b/gateway/src/workspace/sync-registry.ts
index c7a872dd..825717dc 100644
--- a/gateway/src/workspace/sync-registry.ts
+++ b/gateway/src/workspace/sync-registry.ts
@@ -110,11 +110,33 @@ class SyncEntry {
return this.history.filter((entry) => entry.seq > sinceSeq);
}
+ /**
+ * Ensure intermediate objects exist for all patch add/replace operations.
+ * Called before applying patches to gracefully handle paths into previously-absent sub-objects.
+ */
+ private ensureIntermediatePaths(doc: any, patch: JsonPatchOp[]): void {
+ for (const op of patch) {
+ if (op.op !== 'add' && op.op !== 'replace') continue;
+ const parts = op.path.split('/').filter(Boolean);
+ if (parts.length <= 1) continue;
+ let current = doc;
+ for (let i = 0; i < parts.length - 1; i++) {
+ const part = parts[i];
+ if (current[part] === undefined || current[part] === null) {
+ current[part] = {};
+ }
+ current = current[part];
+ }
+ }
+ }
+
/**
* Apply a patch to state (used when applying local changes).
*/
applyPatch(patch: JsonPatchOp[]): void {
- const result = applyPatch(deepClone(this.state), patch, false, false);
+ const doc = deepClone(this.state) as any;
+ this.ensureIntermediatePaths(doc, patch);
+ const result = applyPatch(doc, patch, false, false);
this.state = result.newDocument;
}
@@ -130,7 +152,8 @@ class SyncEntry {
try {
if (clientBaseSeq === this.seq) {
// No conflict - apply directly
- const currentState = deepClone(this.state);
+ const currentState = deepClone(this.state) as any;
+ this.ensureIntermediatePaths(currentState, patch);
const result = applyPatch(currentState, patch, false, false);
this.state = result.newDocument;
this.commitPatch(patch);
@@ -160,14 +183,15 @@ class SyncEntry {
const frontendPaths = new Set(patch.map((op) => op.path));
// Apply frontend patch first
- const currentState = deepClone(this.state);
+ const currentState = deepClone(this.state) as any;
+ this.ensureIntermediatePaths(currentState, patch);
let newState: unknown;
try {
const result = applyPatch(currentState, patch, false, false);
newState = result.newDocument;
} catch (e) {
logger?.warn(
- { store: this.storeName, error: e },
+ { store: this.storeName, err: e },
'Failed to apply client patch during conflict resolution'
);
return { needsSnapshot: true, resolvedState: this.state };
@@ -182,7 +206,7 @@ class SyncEntry {
newState = result.newDocument;
} catch (e) {
logger?.debug(
- { store: this.storeName, error: e },
+ { store: this.storeName, err: e },
'Skipping backend patch during conflict resolution'
);
}
@@ -209,7 +233,7 @@ class SyncEntry {
return { needsSnapshot: true, resolvedState: this.state };
} catch (e) {
logger?.error(
- { store: this.storeName, error: e },
+ { store: this.storeName, err: e },
'Unexpected error applying client patch'
);
return { needsSnapshot: true, resolvedState: this.state };
diff --git a/gateway/src/workspace/types.ts b/gateway/src/workspace/types.ts
index 126c629f..a0cf631f 100644
--- a/gateway/src/workspace/types.ts
+++ b/gateway/src/workspace/types.ts
@@ -237,7 +237,7 @@ export interface ChartState {
symbol: string;
start_time: number | null; // unix timestamp
end_time: number | null; // unix timestamp
- period: string; // OHLC duration (e.g., '15' for 15 minutes)
+ period: number; // OHLC period in seconds (e.g., 900 for 15 minutes)
selected_shapes: string[]; // list of shape ID's
}
diff --git a/ingestor/src/ccxt-fetcher.js b/ingestor/src/ccxt-fetcher.js
index dfce5557..7e59ea0d 100644
--- a/ingestor/src/ccxt-fetcher.js
+++ b/ingestor/src/ccxt-fetcher.js
@@ -113,11 +113,12 @@ export class CCXTFetcher {
'Fetching historical OHLC'
);
- const allCandles = [];
+ const fetchedCandles = [];
let since = startMs;
- // CCXT typically limits to 1000 candles per request
- const batchSize = limit || 1000;
+ // Always page in fixed batches of 1000 regardless of any limit hint.
+ // The caller's limit/countback is irrelevant to how much we need to fetch from the exchange.
+ const PAGE_SIZE = 1000;
while (since < endMs) {
try {
@@ -125,27 +126,26 @@ export class CCXTFetcher {
symbol,
timeframe,
since,
- batchSize
+ PAGE_SIZE
);
if (candles.length === 0) {
break;
}
- // Filter candles within the time range
+ // Filter candles within the requested time range
const filteredCandles = candles.filter(c => {
const timestamp = c[0];
- return timestamp >= startMs && timestamp <= endMs;
+ return timestamp >= startMs && timestamp < endMs; // endMs is exclusive
});
- allCandles.push(...filteredCandles);
+ fetchedCandles.push(...filteredCandles);
- // Move to next batch
+ // Advance to next batch start
const lastTimestamp = candles[candles.length - 1][0];
since = lastTimestamp + (periodSeconds * 1000);
- // Break if we've reached the end time or limit
- if (since >= endMs || (limit && allCandles.length >= limit)) {
+ if (since >= endMs) {
break;
}
@@ -163,8 +163,46 @@ export class CCXTFetcher {
// Get metadata for proper denomination
const metadata = await this.getMetadata(ticker);
- // Convert to our OHLC format
- return allCandles.map(candle => this.convertToOHLC(candle, ticker, periodSeconds, metadata));
+ // Build a map of fetched candles by timestamp (ms)
+ const fetchedByTs = new Map(fetchedCandles.map(c => [c[0], c]));
+
+ if (fetchedCandles.length === 0) {
+ // No data from exchange — return empty so caller writes a NOT_FOUND marker.
+ return [];
+ }
+
+ const periodMs = periodSeconds * 1000;
+
+ // Only create null gap bars for interior gaps — periods where real data exists
+ // on BOTH sides (i.e., between the first and last real bar). Do NOT append
+ // null bars before the first real bar or after the last real bar: those edge
+ // positions may be in-progress candles or simply outside the exchange's history,
+ // and we have no positive signal that a gap exists there.
+ const realTimestamps = [...fetchedByTs.keys()].sort((a, b) => a - b);
+ const firstRealTs = realTimestamps[0];
+ const lastRealTs = realTimestamps[realTimestamps.length - 1];
+
+ const allCandles = [];
+ let gapCount = 0;
+
+ for (let ts = firstRealTs; ts <= lastRealTs; ts += periodMs) {
+ if (fetchedByTs.has(ts)) {
+ allCandles.push(this.convertToOHLC(fetchedByTs.get(ts), ticker, periodSeconds, metadata));
+ } else {
+ // Interior gap — confirmed by real bars on both sides
+ gapCount++;
+ allCandles.push(this.createGapBar(ts, ticker, periodSeconds, metadata));
+ }
+ }
+
+ if (gapCount > 0) {
+ this.logger.info(
+ { ticker, gapCount, total: allCandles.length },
+ 'Filled interior gap bars for missing periods in source data'
+ );
+ }
+
+ return allCandles;
}
/**
@@ -227,6 +265,24 @@ export class CCXTFetcher {
};
}
+ /**
+ * Create a gap bar for a period with no trade data.
+ * All OHLC/volume fields are null — the timestamp slot is reserved but unpopulated.
+ */
+ createGapBar(timestampMs, ticker, periodSeconds, metadata) {
+ return {
+ ticker,
+ timestamp: (timestampMs * 1000).toString(), // Convert ms to microseconds
+ open: null,
+ high: null,
+ low: null,
+ close: null,
+ volume: null,
+ open_time: (timestampMs * 1000).toString(),
+ close_time: ((timestampMs + periodSeconds * 1000) * 1000).toString()
+ };
+ }
+
/**
* Convert CCXT trade to our Tick format
* Uses denominators from market metadata for proper integer representation
diff --git a/ingestor/src/kafka-producer.js b/ingestor/src/kafka-producer.js
index dfbf366f..f534a937 100644
--- a/ingestor/src/kafka-producer.js
+++ b/ingestor/src/kafka-producer.js
@@ -180,15 +180,20 @@ export class KafkaProducer {
status: metadata.status || 'OK',
errorMessage: metadata.error_message
},
- rows: ohlcData.map(candle => ({
- timestamp: candle.timestamp,
- ticker: candle.ticker,
- open: candle.open,
- high: candle.high,
- low: candle.low,
- close: candle.close,
- volume: candle.volume
- }))
+ rows: ohlcData.map(candle => {
+ // null open/high/low/close signals a gap bar (no trades that period).
+ // Omit fields from the protobuf message when null so hasOpen() etc. return false.
+ const row = {
+ timestamp: candle.timestamp,
+ ticker: candle.ticker,
+ };
+ if (candle.open != null) row.open = candle.open;
+ if (candle.high != null) row.high = candle.high;
+ if (candle.low != null) row.low = candle.low;
+ if (candle.close != null) row.close = candle.close;
+ if (candle.volume != null) row.volume = candle.volume;
+ return row;
+ })
};
// Encode as protobuf OHLCBatch with ZMQ envelope
diff --git a/protobuf/ohlc.proto b/protobuf/ohlc.proto
index 3093fe24..46062976 100644
--- a/protobuf/ohlc.proto
+++ b/protobuf/ohlc.proto
@@ -9,11 +9,11 @@ message OHLC {
uint64 timestamp = 1;
// The prices and volumes must be adjusted by the rational denominator provided
- // by the market metadata
- int64 open = 2;
- int64 high = 3;
- int64 low = 4;
- int64 close = 5;
+ // by the market metadata. Optional to support null bars for periods with no trades.
+ optional int64 open = 2;
+ optional int64 high = 3;
+ optional int64 low = 4;
+ optional int64 close = 5;
optional int64 volume = 6;
optional int64 buy_vol = 7;
optional int64 sell_vol = 8;
diff --git a/sandbox/dexorder/iceberg_client.py b/sandbox/dexorder/iceberg_client.py
index 6e223eaa..966e3299 100644
--- a/sandbox/dexorder/iceberg_client.py
+++ b/sandbox/dexorder/iceberg_client.py
@@ -10,7 +10,7 @@ from pyiceberg.expressions import (
And,
EqualTo,
GreaterThanOrEqual,
- LessThanOrEqual
+ LessThan,
)
log = logging.getLogger(__name__)
@@ -98,7 +98,7 @@ class IcebergClient:
EqualTo("ticker", ticker),
EqualTo("period_seconds", period_seconds),
GreaterThanOrEqual("timestamp", start_time),
- LessThanOrEqual("timestamp", end_time)
+ LessThan("timestamp", end_time) # end_time is exclusive
)
)
@@ -110,6 +110,10 @@ class IcebergClient:
if not df.empty:
df = df.sort_values("timestamp")
+ # Convert integer microsecond timestamps to DatetimeIndex
+ df.index = pd.to_datetime(df["timestamp"], unit="us", utc=True)
+ df.index.name = "datetime"
+ df = df.drop(columns=["timestamp"])
# Apply price/volume conversion if metadata client available
if self.metadata_client is not None:
df = self._apply_denominators(df, ticker)
@@ -186,9 +190,9 @@ class IcebergClient:
# Convert period to microseconds
period_micros = period_seconds * 1_000_000
- # Generate expected timestamps
- expected_timestamps = list(range(start_time, end_time + 1, period_micros))
- actual_timestamps = set(df['timestamp'].values)
+ # Generate expected timestamps — end_time is exclusive
+ expected_timestamps = list(range(start_time, end_time, period_micros))
+ actual_timestamps = set(df.index.view('int64') // 1000)
# Find gaps
missing = sorted(set(expected_timestamps) - actual_timestamps)
diff --git a/sandbox/dexorder/ohlc_client.py b/sandbox/dexorder/ohlc_client.py
index c7c6bdba..2dfadcd0 100644
--- a/sandbox/dexorder/ohlc_client.py
+++ b/sandbox/dexorder/ohlc_client.py
@@ -12,6 +12,8 @@ from .symbol_metadata_client import SymbolMetadataClient
log = logging.getLogger(__name__)
+log = logging.getLogger(__name__)
+
class OHLCClient:
"""
@@ -118,6 +120,11 @@ class OHLCClient:
TimeoutError: If historical data request times out
ValueError: If request fails
"""
+ # Align times to period boundaries: [ceil(start), ceil(end)) exclusive
+ period_micros = period_seconds * 1_000_000
+ start_time = ((start_time + period_micros - 1) // period_micros) * period_micros
+ end_time = ((end_time + period_micros - 1) // period_micros) * period_micros # exclusive
+
# Step 1: Check Iceberg for existing data
df = self.iceberg.query_ohlc(ticker, period_seconds, start_time, end_time)
@@ -128,7 +135,7 @@ class OHLCClient:
if not missing_ranges:
# All data exists in Iceberg
- return df
+ return self._forward_fill_gaps(df, period_seconds)
# Step 3: Request missing data for each range
# For simplicity, request entire range (relay can merge adjacent requests)
@@ -147,6 +154,39 @@ class OHLCClient:
# Step 5: Query Iceberg again for complete dataset
df = self.iceberg.query_ohlc(ticker, period_seconds, start_time, end_time)
+ return self._forward_fill_gaps(df, period_seconds)
+
+ def _forward_fill_gaps(self, df: pd.DataFrame, period_seconds: int) -> pd.DataFrame:
+ """
+ Forward-fill interior missing bars by carrying the last known close into
+ open, high, low, and close of any gap bar.
+
+ Only interior gaps (rows already present with null OHLC from the ingestor,
+ or timestamp slots missing between real bars) are filled. Edge gaps (before
+ the first real bar or after the last real bar) are left as-is.
+ """
+ if df.empty:
+ return df
+
+ df = df.sort_index()
+
+ # Identify rows that are gap bars (null close)
+ is_gap = df['close'].isna()
+
+ if not is_gap.any():
+ return df
+
+ # Forward-fill close across gap rows, then copy into open/high/low
+ df['close'] = df['close'].ffill()
+ price_cols = ['open', 'high', 'low']
+ for col in price_cols:
+ if col in df.columns:
+ df[col] = df[col].where(~is_gap, df['close'])
+
+ # Zero out volume for filled gap rows
+ if 'volume' in df.columns:
+ df['volume'] = df['volume'].where(~is_gap, 0.0)
+
return df
async def __aenter__(self):
diff --git a/sandbox/protobuf/ingestor.proto b/sandbox/protobuf/ingestor.proto
deleted file mode 100644
index 43e82c32..00000000
--- a/sandbox/protobuf/ingestor.proto
+++ /dev/null
@@ -1,329 +0,0 @@
-syntax = "proto3";
-
-option java_multiple_files = true;
-option java_package = "com.dexorder.proto";
-
-// Request for data ingestion (used in Relay → Ingestor work queue)
-message DataRequest {
- // Unique request ID for tracking
- string request_id = 1;
-
- // Type of request
- RequestType type = 2;
-
- // Market identifier
- string ticker = 3;
-
- // For historical requests
- optional HistoricalParams historical = 4;
-
- // For realtime requests
- optional RealtimeParams realtime = 5;
-
- // Optional client ID for notification routing (async architecture)
- // Flink uses this to determine notification topic
- optional string client_id = 6;
-
- enum RequestType {
- HISTORICAL_OHLC = 0;
- REALTIME_TICKS = 1;
- }
-}
-
-message HistoricalParams {
- // Start time (microseconds since epoch)
- uint64 start_time = 1;
-
- // End time (microseconds since epoch)
- uint64 end_time = 2;
-
- // OHLC period in seconds (e.g., 60 = 1m, 300 = 5m, 3600 = 1h, 86400 = 1d)
- uint32 period_seconds = 3;
-
- // Maximum number of candles to return (optional limit)
- optional uint32 limit = 4;
-}
-
-message RealtimeParams {
- // Whether to include tick data
- bool include_ticks = 1;
-
- // Whether to include aggregated OHLC
- bool include_ohlc = 2;
-
- // OHLC periods to generate in seconds (e.g., [60, 300, 900] for 1m, 5m, 15m)
- repeated uint32 ohlc_period_seconds = 3;
-}
-
-// Control messages for ingestors (Flink → Ingestor control channel)
-message IngestorControl {
- // Control action type
- ControlAction action = 1;
-
- // Request ID to cancel (for CANCEL action)
- optional string request_id = 2;
-
- // Configuration updates (for CONFIG_UPDATE action)
- optional IngestorConfig config = 3;
-
- enum ControlAction {
- CANCEL = 0; // Cancel a specific request
- SHUTDOWN = 1; // Graceful shutdown signal
- CONFIG_UPDATE = 2; // Update ingestor configuration
- HEARTBEAT = 3; // Keep-alive signal
- }
-}
-
-message IngestorConfig {
- // Maximum concurrent requests per ingestor
- optional uint32 max_concurrent = 1;
-
- // Request timeout in seconds
- optional uint32 timeout_seconds = 2;
-
- // Kafka topic for output
- optional string kafka_topic = 3;
-}
-
-// Historical data response from ingestor to Flink (Ingestor → Flink response channel)
-message DataResponse {
- // Request ID this is responding to
- string request_id = 1;
-
- // Status of the request
- ResponseStatus status = 2;
-
- // Error message if status is not OK
- optional string error_message = 3;
-
- // Serialized OHLC data (repeated OHLCV protobuf messages)
- repeated bytes ohlc_data = 4;
-
- // Total number of candles returned
- uint32 total_records = 5;
-
- enum ResponseStatus {
- OK = 0;
- NOT_FOUND = 1;
- ERROR = 2;
- }
-}
-
-// Client request submission for historical OHLC data (Client → Relay)
-// Relay immediately responds with SubmitResponse containing request_id
-message SubmitHistoricalRequest {
- // Client-generated request ID for tracking
- string request_id = 1;
-
- // Market identifier (e.g., "BINANCE:BTC/USDT")
- string ticker = 2;
-
- // Start time (microseconds since epoch)
- uint64 start_time = 3;
-
- // End time (microseconds since epoch)
- uint64 end_time = 4;
-
- // OHLC period in seconds (e.g., 60 = 1m, 300 = 5m, 3600 = 1h)
- uint32 period_seconds = 5;
-
- // Optional limit on number of candles
- optional uint32 limit = 6;
-
- // Optional client ID for notification routing (e.g., "client-abc-123")
- // Notifications will be published to topic: "RESPONSE:{client_id}"
- optional string client_id = 7;
-}
-
-// Immediate response to SubmitHistoricalRequest (Relay → Client)
-message SubmitResponse {
- // Request ID (echoed from request)
- string request_id = 1;
-
- // Status of submission
- SubmitStatus status = 2;
-
- // Error message if status is not QUEUED
- optional string error_message = 3;
-
- // Topic to subscribe to for result notification
- // e.g., "RESPONSE:client-abc-123" or "HISTORY_READY:{request_id}"
- string notification_topic = 4;
-
- enum SubmitStatus {
- QUEUED = 0; // Request queued successfully
- DUPLICATE = 1; // Request ID already exists
- INVALID = 2; // Invalid parameters
- ERROR = 3; // Internal error
- }
-}
-
-// Historical data ready notification (Flink → Relay → Client via pub/sub)
-// Published after Flink writes data to Iceberg
-message HistoryReadyNotification {
- // Request ID
- string request_id = 1;
-
- // Market identifier
- string ticker = 2;
-
- // OHLC period in seconds
- uint32 period_seconds = 3;
-
- // Start time (microseconds since epoch)
- uint64 start_time = 4;
-
- // End time (microseconds since epoch)
- uint64 end_time = 5;
-
- // Status of the data fetch
- NotificationStatus status = 6;
-
- // Error message if status is not OK
- optional string error_message = 7;
-
- // Iceberg table information for client queries
- string iceberg_namespace = 10;
- string iceberg_table = 11;
-
- // Number of records written
- uint32 row_count = 12;
-
- // Timestamp when data was written (microseconds since epoch)
- uint64 completed_at = 13;
-
- enum NotificationStatus {
- OK = 0; // Data successfully written to Iceberg
- NOT_FOUND = 1; // No data found for the requested period
- ERROR = 2; // Error during fetch or processing
- TIMEOUT = 3; // Request timed out
- }
-}
-
-// Legacy message for backward compatibility (Client → Relay)
-message OHLCRequest {
- // Request ID for tracking
- string request_id = 1;
-
- // Market identifier
- string ticker = 2;
-
- // Start time (microseconds since epoch)
- uint64 start_time = 3;
-
- // End time (microseconds since epoch)
- uint64 end_time = 4;
-
- // OHLC period in seconds (e.g., 60 = 1m, 300 = 5m, 3600 = 1h)
- uint32 period_seconds = 5;
-
- // Optional limit on number of candles
- optional uint32 limit = 6;
-}
-
-// Generic response for any request (Flink → Client)
-message Response {
- // Request ID this is responding to
- string request_id = 1;
-
- // Status of the request
- ResponseStatus status = 2;
-
- // Error message if status is not OK
- optional string error_message = 3;
-
- // Generic payload data (serialized protobuf messages)
- repeated bytes data = 4;
-
- // Total number of records
- optional uint32 total_records = 5;
-
- // Whether this is the final response (for paginated results)
- bool is_final = 6;
-
- enum ResponseStatus {
- OK = 0;
- NOT_FOUND = 1;
- ERROR = 2;
- }
-}
-
-// CEP trigger registration (Client → Flink)
-message CEPTriggerRequest {
- // Unique trigger ID
- string trigger_id = 1;
-
- // Flink SQL CEP pattern/condition
- string sql_pattern = 2;
-
- // Markets to monitor
- repeated string tickers = 3;
-
- // Callback endpoint (for DEALER/ROUTER routing)
- optional string callback_id = 4;
-
- // Optional parameters for the CEP query
- map parameters = 5;
-}
-
-// CEP trigger acknowledgment (Flink → Client)
-message CEPTriggerAck {
- // Trigger ID being acknowledged
- string trigger_id = 1;
-
- // Status of registration
- TriggerStatus status = 2;
-
- // Error message if status is not OK
- optional string error_message = 3;
-
- enum TriggerStatus {
- REGISTERED = 0;
- ALREADY_REGISTERED = 1;
- INVALID_SQL = 2;
- ERROR = 3;
- }
-}
-
-// CEP trigger event callback (Flink → Client)
-message CEPTriggerEvent {
- // Trigger ID that fired
- string trigger_id = 1;
-
- // Timestamp when trigger fired (microseconds since epoch)
- uint64 timestamp = 2;
-
- // Schema information for the result rows
- ResultSchema schema = 3;
-
- // Result rows from the Flink SQL query
- repeated ResultRow rows = 4;
-
- // Additional context from the CEP pattern
- map context = 5;
-}
-
-message ResultSchema {
- // Column names in order
- repeated string column_names = 1;
-
- // Column types (using Flink SQL type names)
- repeated string column_types = 2;
-}
-
-message ResultRow {
- // Encoded row data (one bytes field per column, in schema order)
- // Each value is encoded as a protobuf-serialized FieldValue
- repeated bytes values = 1;
-}
-
-message FieldValue {
- oneof value {
- string string_val = 1;
- int64 int_val = 2;
- double double_val = 3;
- bool bool_val = 4;
- bytes bytes_val = 5;
- uint64 timestamp_val = 6;
- }
-}
\ No newline at end of file
diff --git a/sandbox/protobuf/market.proto b/sandbox/protobuf/market.proto
deleted file mode 100644
index 0d2f6155..00000000
--- a/sandbox/protobuf/market.proto
+++ /dev/null
@@ -1,22 +0,0 @@
-syntax = "proto3";
-
-option java_multiple_files = true;
-option java_package = "com.dexorder.proto";
-
-message Market {
- // The prices and volumes must be adjusted by the rational denominator provided
- // by the market metadata
- string exchange_id = 2; // e.g., BINANCE
- string market_id = 3; // e.g., BTC/USDT
- string market_type = 4; // e.g., Spot
- string description = 5; // e.g., Bitcoin/Tether on Binance
- repeated string column_names = 6; // e.g., ['open', 'high', 'low', 'close', 'volume', 'taker_vol', 'maker_vol']
- string base_asset = 9;
- string quote_asset = 10;
- uint64 earliest_time = 11;
- uint64 tick_denom = 12; // denominator applied to all OHLC price data
- uint64 base_denom = 13; // denominator applied to base asset units
- uint64 quote_denom = 14; // denominator applied to quote asset units
- repeated uint32 supported_period_seconds = 15;
-
-}
diff --git a/sandbox/protobuf/ohlc.proto b/sandbox/protobuf/ohlc.proto
deleted file mode 100644
index 3093fe24..00000000
--- a/sandbox/protobuf/ohlc.proto
+++ /dev/null
@@ -1,61 +0,0 @@
-syntax = "proto3";
-
-option java_multiple_files = true;
-option java_package = "com.dexorder.proto";
-
-// Single OHLC row
-message OHLC {
- // Timestamp in microseconds since epoch
- uint64 timestamp = 1;
-
- // The prices and volumes must be adjusted by the rational denominator provided
- // by the market metadata
- int64 open = 2;
- int64 high = 3;
- int64 low = 4;
- int64 close = 5;
- optional int64 volume = 6;
- optional int64 buy_vol = 7;
- optional int64 sell_vol = 8;
- optional int64 open_time = 9;
- optional int64 high_time = 10;
- optional int64 low_time = 11;
- optional int64 close_time = 12;
- optional int64 open_interest = 13;
- string ticker = 14;
-}
-
-// Batch of OHLC rows with metadata for historical request tracking
-// Used for Kafka messages from ingestor → Flink
-message OHLCBatch {
- // Metadata for tracking this request through the pipeline
- OHLCBatchMetadata metadata = 1;
-
- // OHLC rows in this batch
- repeated OHLC rows = 2;
-}
-
-// Metadata for tracking historical data requests through the pipeline
-message OHLCBatchMetadata {
- // Request ID from client
- string request_id = 1;
-
- // Optional client ID for notification routing
- optional string client_id = 2;
-
- // Market identifier
- string ticker = 3;
-
- // OHLC period in seconds
- uint32 period_seconds = 4;
-
- // Time range requested (microseconds since epoch)
- uint64 start_time = 5;
- uint64 end_time = 6;
-
- // Status for marker messages (OK, NOT_FOUND, ERROR)
- string status = 7;
-
- // Error message if status is ERROR
- optional string error_message = 8;
-}
diff --git a/sandbox/protobuf/tick.proto b/sandbox/protobuf/tick.proto
deleted file mode 100644
index 5efb40bd..00000000
--- a/sandbox/protobuf/tick.proto
+++ /dev/null
@@ -1,51 +0,0 @@
-syntax = "proto3";
-
-option java_multiple_files = true;
-option java_package = "com.dexorder.proto";
-
-message Tick {
- // Unique identifier for the trade
- string trade_id = 1;
-
- // Market identifier (matches Market.market_id)
- string ticker = 2;
-
- // Timestamp in microseconds since epoch
- uint64 timestamp = 3;
-
- // Price (must be adjusted by tick_denom from Market metadata)
- int64 price = 4;
-
- // Base asset amount (must be adjusted by base_denom from Market metadata)
- int64 amount = 5;
-
- // Quote asset amount (must be adjusted by quote_denom from Market metadata)
- int64 quote_amount = 6;
-
- // Side: true = taker buy (market buy), false = taker sell (market sell)
- bool taker_buy = 7;
-
- // Position effect: true = close position, false = open position
- // Only relevant for derivatives/futures markets
- optional bool to_close = 8;
-
- // Sequence number for ordering (if provided by exchange)
- optional uint64 sequence = 9;
-
- // Additional flags for special trade types
- optional TradeFlags flags = 10;
-}
-
-message TradeFlags {
- // Liquidation trade
- bool is_liquidation = 1;
-
- // Block trade (large OTC trade)
- bool is_block_trade = 2;
-
- // Maker side was a post-only order
- bool maker_post_only = 3;
-
- // Trade occurred during auction
- bool is_auction = 4;
-}
diff --git a/sandbox/protobuf/user_events.proto b/sandbox/protobuf/user_events.proto
deleted file mode 100644
index cd9ff847..00000000
--- a/sandbox/protobuf/user_events.proto
+++ /dev/null
@@ -1,258 +0,0 @@
-syntax = "proto3";
-
-option java_multiple_files = true;
-option java_package = "com.dexorder.proto";
-
-// User container event system for delivering notifications to users
-// via active sessions or external channels (Telegram, email, push).
-//
-// Two ZMQ patterns:
-// - XPUB/SUB (port 5570): Fast path for informational events to active sessions
-// - DEALER/ROUTER (port 5571): Guaranteed delivery for critical events with ack
-//
-// See doc/protocol.md and doc/user_container_events.md for details.
-
-// =============================================================================
-// User Event (Container → Gateway)
-// Message Type ID: 0x20
-// =============================================================================
-
-message UserEvent {
- // User ID this event belongs to
- string user_id = 1;
-
- // Unique event ID for deduplication and ack tracking (UUID)
- string event_id = 2;
-
- // Timestamp when event was generated (Unix milliseconds)
- int64 timestamp = 3;
-
- // Type of event
- EventType event_type = 4;
-
- // Event payload (JSON or nested protobuf, depending on event_type)
- bytes payload = 5;
-
- // Delivery specification (priority and channel preferences)
- DeliverySpec delivery = 6;
-}
-
-enum EventType {
- // Trading events
- ORDER_PLACED = 0;
- ORDER_FILLED = 1;
- ORDER_CANCELLED = 2;
- ORDER_REJECTED = 3;
- ORDER_EXPIRED = 4;
-
- // Alert events
- ALERT_TRIGGERED = 10;
- ALERT_CREATED = 11;
- ALERT_DELETED = 12;
-
- // Position events
- POSITION_OPENED = 20;
- POSITION_CLOSED = 21;
- POSITION_UPDATED = 22;
- POSITION_LIQUIDATED = 23;
-
- // Workspace/chart events
- WORKSPACE_CHANGED = 30;
- CHART_ANNOTATION_ADDED = 31;
- CHART_ANNOTATION_REMOVED = 32;
- INDICATOR_UPDATED = 33;
-
- // Strategy events
- STRATEGY_STARTED = 40;
- STRATEGY_STOPPED = 41;
- STRATEGY_LOG = 42;
- STRATEGY_ERROR = 43;
- BACKTEST_COMPLETED = 44;
-
- // System events
- CONTAINER_STARTING = 50;
- CONTAINER_READY = 51;
- CONTAINER_SHUTTING_DOWN = 52;
- EVENT_ERROR = 53;
-}
-
-// =============================================================================
-// Delivery Specification
-// =============================================================================
-
-message DeliverySpec {
- // Priority determines routing behavior
- Priority priority = 1;
-
- // Ordered list of channel preferences (try first, then second, etc.)
- repeated ChannelPreference channels = 2;
-}
-
-enum Priority {
- // Drop if no active session (fire-and-forget via XPUB)
- // Use for: indicator updates, chart syncs, strategy logs when watching
- INFORMATIONAL = 0;
-
- // Best effort delivery - queue briefly, deliver when possible
- // Uses XPUB if subscribed, otherwise DEALER
- // Use for: alerts, position updates
- NORMAL = 1;
-
- // Must deliver - retry until acked, escalate channels
- // Always uses DEALER for guaranteed delivery
- // Use for: order fills, liquidations, critical errors
- CRITICAL = 2;
-}
-
-message ChannelPreference {
- // Channel to deliver to
- ChannelType channel = 1;
-
- // If true, skip this channel if user is not connected to it
- // If false, deliver even if user is not actively connected
- // (e.g., send Telegram message even if user isn't in Telegram chat)
- bool only_if_active = 2;
-}
-
-enum ChannelType {
- // Whatever channel the user currently has open (WebSocket, Telegram session)
- ACTIVE_SESSION = 0;
-
- // Specific channels
- WEB = 1; // WebSocket to web UI
- TELEGRAM = 2; // Telegram bot message
- EMAIL = 3; // Email notification
- PUSH = 4; // Mobile push notification (iOS/Android)
- DISCORD = 5; // Discord webhook (future)
- SLACK = 6; // Slack webhook (future)
-}
-
-// =============================================================================
-// Event Acknowledgment (Gateway → Container)
-// Message Type ID: 0x21
-// =============================================================================
-
-message EventAck {
- // Event ID being acknowledged
- string event_id = 1;
-
- // Delivery status
- AckStatus status = 2;
-
- // Error message if status is ERROR
- string error_message = 3;
-
- // Channel that successfully delivered (for logging/debugging)
- ChannelType delivered_via = 4;
-}
-
-enum AckStatus {
- // Successfully delivered to at least one channel
- DELIVERED = 0;
-
- // Accepted and queued for delivery (e.g., rate limited, will retry)
- QUEUED = 1;
-
- // Permanent failure - all channels failed
- ACK_ERROR = 2;
-}
-
-// =============================================================================
-// Event Payloads
-// These are JSON-encoded in the UserEvent.payload field.
-// Defined here for documentation; actual encoding is JSON for flexibility.
-// =============================================================================
-
-// Payload for ORDER_PLACED, ORDER_FILLED, ORDER_CANCELLED, etc.
-message OrderEventPayload {
- string order_id = 1;
- string symbol = 2;
- string side = 3; // "buy" or "sell"
- string order_type = 4; // "market", "limit", "stop_limit", etc.
- string quantity = 5; // Decimal string
- string price = 6; // Decimal string (for limit orders)
- string fill_price = 7; // Decimal string (for fills)
- string fill_quantity = 8; // Decimal string (for partial fills)
- string status = 9; // "open", "filled", "cancelled", etc.
- string exchange = 10;
- int64 timestamp = 11; // Unix milliseconds
- string strategy_id = 12; // If order was placed by a strategy
- string error_message = 13; // If rejected/failed
-}
-
-// Payload for ALERT_TRIGGERED
-message AlertEventPayload {
- string alert_id = 1;
- string symbol = 2;
- string condition = 3; // Human-readable condition (e.g., "BTC > 50000")
- string triggered_price = 4; // Decimal string
- int64 timestamp = 5;
-}
-
-// Payload for POSITION_OPENED, POSITION_CLOSED, POSITION_UPDATED
-message PositionEventPayload {
- string position_id = 1;
- string symbol = 2;
- string side = 3; // "long" or "short"
- string size = 4; // Decimal string
- string entry_price = 5; // Decimal string
- string current_price = 6; // Decimal string
- string unrealized_pnl = 7; // Decimal string
- string realized_pnl = 8; // Decimal string (for closed positions)
- string leverage = 9; // Decimal string (for margin)
- string liquidation_price = 10;
- string exchange = 11;
- int64 timestamp = 12;
-}
-
-// Payload for WORKSPACE_CHANGED, CHART_ANNOTATION_*, INDICATOR_UPDATED
-message WorkspaceEventPayload {
- string workspace_id = 1;
- string change_type = 2; // "symbol_changed", "timeframe_changed", "annotation_added", etc.
- string symbol = 3;
- string timeframe = 4;
-
- // For annotations
- string annotation_id = 5;
- string annotation_type = 6; // "trendline", "horizontal", "rectangle", "text", etc.
- string annotation_data = 7; // JSON string with coordinates, style, etc.
-
- // For indicators
- string indicator_name = 8;
- string indicator_params = 9; // JSON string with indicator parameters
-
- int64 timestamp = 10;
-}
-
-// Payload for STRATEGY_LOG, STRATEGY_ERROR
-message StrategyEventPayload {
- string strategy_id = 1;
- string strategy_name = 2;
- string log_level = 3; // "debug", "info", "warn", "error"
- string message = 4;
- string details = 5; // JSON string with additional context
- int64 timestamp = 6;
-}
-
-// Payload for BACKTEST_COMPLETED
-message BacktestEventPayload {
- string backtest_id = 1;
- string strategy_id = 2;
- string strategy_name = 3;
- string symbol = 4;
- string timeframe = 5;
- int64 start_time = 6;
- int64 end_time = 7;
-
- // Results summary
- int32 total_trades = 8;
- int32 winning_trades = 9;
- int32 losing_trades = 10;
- string total_pnl = 11; // Decimal string
- string win_rate = 12; // Decimal string (0-1)
- string sharpe_ratio = 13; // Decimal string
- string max_drawdown = 14; // Decimal string (0-1)
-
- string results_path = 15; // Path to full results file
- int64 completed_at = 16;
-}
diff --git a/web/src/assets/theme.css b/web/src/assets/theme.css
index bd12e43e..821ce6a6 100644
--- a/web/src/assets/theme.css
+++ b/web/src/assets/theme.css
@@ -1,21 +1,21 @@
/* web/src/assets/theme.css */
:root {
- --p-primary-color: #00d4aa; /* teal accent */
- --p-primary-contrast-color: #0a0e1a;
- --p-surface-0: #0a0e1a; /* deepest background */
- --p-surface-50: #0f1629;
- --p-surface-100: #161e35;
- --p-surface-200: #1e2a45;
- --p-surface-300: #263452;
- --p-surface-400: #34446a;
- --p-surface-700: #8892a4;
- --p-surface-800: #aab4c5;
- --p-surface-900: #cdd6e8;
-
+ --p-primary-color: #26A69A; /* TV green */
+ --p-primary-contrast-color: #131722;
+ --p-surface-0: #131722; /* TV bg */
+ --p-surface-50: #1a1e2b;
+ --p-surface-100: #1e222d;
+ --p-surface-200: #2A2E39; /* TV grid */
+ --p-surface-300: #363b4a;
+ --p-surface-400: #434857;
+ --p-surface-700: #787B86; /* TV subtext */
+ --p-surface-800: #B2B5BE;
+ --p-surface-900: #D3D4DC; /* TV text */
+
/* Semantic trading colors */
- --color-bull: #26a69a;
- --color-bear: #ef5350;
- --color-neutral: #8892a4;
+ --color-bull: #26A69A;
+ --color-bear: #EF5350;
+ --color-neutral: #787B86;
}
html, body, #app {
diff --git a/web/src/components/ChartView.vue b/web/src/components/ChartView.vue
index 032f6b18..98b46b61 100644
--- a/web/src/components/ChartView.vue
+++ b/web/src/components/ChartView.vue
@@ -6,6 +6,14 @@ import { useTradingViewShapes } from '../composables/useTradingViewShapes'
import { useTradingViewIndicators } from '../composables/useTradingViewIndicators'
import { useChartStore } from '../stores/chart'
import type { IChartingLibraryWidget } from '../types/tradingview'
+import { intervalToSeconds } from '../utils'
+
+// Convert seconds to TradingView interval string
+function secondsToInterval(seconds: number): string {
+ if (seconds % 86400 === 0) return `${seconds / 86400}D`
+ if (seconds % 3600 === 0) return `${seconds / 3600}H`
+ return `${seconds / 60}` // plain number = minutes
+}
const chartContainer = ref(null)
const chartStore = useChartStore()
@@ -31,7 +39,7 @@ onMounted(() => {
tvWidget = new window.TradingView.widget({
symbol: chartStore.symbol, // Use symbol from store
datafeed: datafeed,
- interval: chartStore.period as any,
+ interval: secondsToInterval(chartStore.period) as any,
container: chartContainer.value!,
library_path: '/charting_library/',
locale: 'en',
@@ -190,9 +198,10 @@ function setupChartListeners() {
// Listen for period changes
chart.onIntervalChanged().subscribe(null, (interval: string) => {
- console.log('[ChartView] Period changed to:', interval)
+ const seconds = intervalToSeconds(interval)
+ console.log('[ChartView] Period changed to:', interval, `(${seconds}s)`)
isUpdatingFromChart = true
- chartStore.period = interval
+ chartStore.period = seconds
isUpdatingFromChart = false
})
@@ -244,10 +253,11 @@ function setupStoreWatchers() {
(newPeriod) => {
if (isUpdatingFromChart) return
- console.log('[ChartView] Store period changed externally to:', newPeriod)
- if (chart.resolution() !== newPeriod) {
- chart.setResolution(newPeriod, () => {
- console.log('[ChartView] Chart period updated to:', newPeriod)
+ const tvInterval = secondsToInterval(newPeriod)
+ console.log('[ChartView] Store period changed externally to:', newPeriod, `-> ${tvInterval}`)
+ if (chart.resolution() !== tvInterval) {
+ chart.setResolution(tvInterval, () => {
+ console.log('[ChartView] Chart period updated to:', tvInterval)
})
}
}
diff --git a/web/src/components/ChatPanel.vue b/web/src/components/ChatPanel.vue
index 460477d6..507d8ed0 100644
--- a/web/src/components/ChatPanel.vue
+++ b/web/src/components/ChatPanel.vue
@@ -36,10 +36,35 @@ const rooms = computed(() => [{
// Streaming state
let currentStreamingMessageId: string | null = null
+let toolCallMessageId: string | null = null
let lastSentMessageId: string | null = null
let streamingBuffer = ''
const isAgentProcessing = ref(false)
-const toolCallStatus = ref(null)
+
+const addToolCallBubble = (label: string) => {
+ removeToolCallBubble()
+ toolCallMessageId = `tool-call-${Date.now()}`
+ const timestamp = new Date().toTimeString().split(' ')[0].slice(0, 5)
+ messages.value = [...messages.value, {
+ _id: toolCallMessageId,
+ content: `⚙ ${label}`,
+ senderId: AGENT_ID,
+ timestamp,
+ date: new Date().toLocaleDateString(),
+ saved: false,
+ distributed: false,
+ seen: false,
+ files: [],
+ toolCall: true
+ }]
+}
+
+const removeToolCallBubble = () => {
+ if (toolCallMessageId) {
+ messages.value = messages.value.filter(m => m._id !== toolCallMessageId)
+ toolCallMessageId = null
+ }
+}
// Generate message ID
const generateMessageId = () => `msg-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`
@@ -52,7 +77,7 @@ const handleMessage = (data: WebSocketMessage) => {
console.log('[ChatPanel] Received message:', data)
if (data.type === 'agent_tool_call') {
- toolCallStatus.value = data.label ?? data.toolName ?? null
+ addToolCallBubble(data.label ?? data.toolName ?? 'Tool call...')
return
}
@@ -99,12 +124,13 @@ const handleMessage = (data: WebSocketMessage) => {
if (!currentStreamingMessageId) {
console.log('[ChatPanel] Starting new streaming message')
+ // Remove any ephemeral tool-call bubble before starting the real response
+ removeToolCallBubble()
// Set up streaming state and mark user message as seen
isAgentProcessing.value = true
currentStreamingMessageId = generateMessageId()
streamingBuffer = data.content
streamingImages.value = []
- toolCallStatus.value = null
// Mark the last sent user message as seen (double-checkmark)
if (lastSentMessageId) {
@@ -205,7 +231,7 @@ const handleMessage = (data: WebSocketMessage) => {
streamingBuffer = ''
streamingImages.value = []
isAgentProcessing.value = false
- toolCallStatus.value = null
+ removeToolCallBubble()
}
}
}
@@ -221,7 +247,7 @@ const stopAgent = () => {
}
wsManager.send(wsMessage)
isAgentProcessing.value = false
- toolCallStatus.value = null
+ removeToolCallBubble()
lastSentMessageId = null
}
@@ -336,34 +362,137 @@ const chatTheme = 'dark'
// Styles to match TradingView dark theme
const chatStyles = computed(() => JSON.stringify({
general: {
- color: '#d1d4dc',
- colorSpinner: '#2962ff',
- borderStyle: '1px solid #2a2e39'
+ color: '#D3D4DC',
+ colorButtonClear: '#D3D4DC',
+ colorButton: '#131722',
+ backgroundColorButton: '#26A69A',
+ backgroundInput: '#131722',
+ colorPlaceholder: '#787B86',
+ colorCaret: '#D3D4DC',
+ colorSpinner: '#26A69A',
+ borderStyle: '1px solid #2A2E39',
+ backgroundScrollIcon: '#2A2E39'
},
container: {
- background: '#131722'
+ border: 'none',
+ borderRadius: '0',
+ boxShadow: 'none'
},
header: {
- background: '#1e222d',
- colorRoomName: '#d1d4dc',
- colorRoomInfo: '#787b86'
+ background: '#2A2E39',
+ colorRoomName: '#D3D4DC',
+ colorRoomInfo: '#787B86',
+ position: 'absolute',
+ width: '100%'
},
footer: {
- background: '#1e222d',
- borderStyleInput: '1px solid #2a2e39',
- backgroundInput: '#1e222d',
- colorInput: '#d1d4dc',
- colorPlaceholder: '#787b86',
- colorIcons: '#787b86'
+ background: '#2A2E39',
+ borderStyleInput: '1px solid #2A2E39',
+ borderInputSelected: '#26A69A',
+ backgroundReply: '#2A2E39',
+ backgroundTagActive: '#2A2E39',
+ backgroundTag: '#1E222D'
},
content: {
background: '#131722'
},
+ sidemenu: {
+ background: '#131722',
+ backgroundHover: '#1E222D',
+ backgroundActive: '#2A2E39',
+ colorActive: '#D3D4DC',
+ borderColorSearch: '#2A2E39'
+ },
+ dropdown: {
+ background: '#2A2E39',
+ backgroundHover: '#363B4A'
+ },
message: {
- background: '#1e222d',
- backgroundMe: '#2962ff',
- color: '#d1d4dc',
- colorMe: '#ffffff'
+ background: '#1E222D',
+ backgroundMe: '#26A69A',
+ color: '#D3D4DC',
+ colorStarted: '#787B86',
+ backgroundDeleted: '#131722',
+ backgroundSelected: '#2A2E39',
+ colorDeleted: '#787B86',
+ colorUsername: '#787B86',
+ colorTimestamp: '#787B86',
+ backgroundDate: 'rgba(0, 0, 0, 0.3)',
+ colorDate: '#787B86',
+ backgroundSystem: 'rgba(0, 0, 0, 0.3)',
+ colorSystem: '#787B86',
+ backgroundMedia: 'rgba(0, 0, 0, 0.18)',
+ backgroundReply: 'rgba(0, 0, 0, 0.18)',
+ colorReplyUsername: '#D3D4DC',
+ colorReply: '#B2B5BE',
+ colorTag: '#26A69A',
+ backgroundImage: '#2A2E39',
+ colorNewMessages: '#26A69A',
+ backgroundScrollCounter: '#26A69A',
+ colorScrollCounter: '#131722',
+ backgroundReaction: 'none',
+ borderStyleReaction: 'none',
+ backgroundReactionHover: '#2A2E39',
+ borderStyleReactionHover: 'none',
+ colorReactionCounter: '#D3D4DC',
+ backgroundReactionMe: '#26A69A',
+ borderStyleReactionMe: 'none',
+ backgroundReactionHoverMe: '#26A69A',
+ borderStyleReactionHoverMe: 'none',
+ colorReactionCounterMe: '#131722',
+ backgroundAudioRecord: '#EF5350',
+ backgroundAudioLine: 'rgba(255, 255, 255, 0.15)',
+ backgroundAudioProgress: '#26A69A',
+ backgroundAudioProgressSelector: '#26A69A',
+ colorFileExtension: '#787B86'
+ },
+ markdown: {
+ background: 'rgba(42, 46, 57, 0.8)',
+ border: 'rgba(55, 60, 74, 0.9)',
+ color: '#26A69A',
+ colorMulti: '#D3D4DC'
+ },
+ room: {
+ colorUsername: '#D3D4DC',
+ colorMessage: '#787B86',
+ colorTimestamp: '#787B86',
+ colorStateOnline: '#26A69A',
+ colorStateOffline: '#787B86',
+ backgroundCounterBadge: '#26A69A',
+ colorCounterBadge: '#131722'
+ },
+ emoji: {
+ background: '#2A2E39'
+ },
+ icons: {
+ search: '#787B86',
+ add: '#D3D4DC',
+ toggle: '#D3D4DC',
+ menu: '#D3D4DC',
+ close: '#787B86',
+ closeImage: '#D3D4DC',
+ file: '#26A69A',
+ paperclip: '#787B86',
+ closeOutline: '#D3D4DC',
+ closePreview: '#D3D4DC',
+ send: '#26A69A',
+ sendDisabled: '#787B86',
+ emoji: '#787B86',
+ emojiReaction: '#787B86',
+ document: '#26A69A',
+ pencil: '#787B86',
+ checkmark: '#787B86',
+ checkmarkSeen: '#26A69A',
+ eye: '#D3D4DC',
+ dropdownMessage: '#D3D4DC',
+ dropdownMessageBackground: 'rgba(0, 0, 0, 0.3)',
+ dropdownRoom: '#D3D4DC',
+ dropdownScroll: '#2A2E39',
+ microphone: '#787B86',
+ audioPlay: '#26A69A',
+ audioPause: '#26A69A',
+ audioCancel: '#EF5350',
+ audioConfirm: '#26A69A'
}
}))
@@ -429,7 +558,6 @@ onUnmounted(() => {