data fixes; indicator=>workspace sync

This commit is contained in:
2026-03-31 20:29:12 -04:00
parent 998f69fa1a
commit cd28e18e52
45 changed files with 1324 additions and 1239 deletions

3
.idea/ai.iml generated
View File

@@ -5,6 +5,7 @@
<sourceFolder url="file://$MODULE_DIR$/backend.old/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/backend.old/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/client-py" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/sandbox" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/.venv" />
<excludeFolder url="file://$MODULE_DIR$/backend.old/data" />
<excludeFolder url="file://$MODULE_DIR$/doc.old" />
@@ -17,6 +18,8 @@
<excludeFolder url="file://$MODULE_DIR$/relay/protobuf" />
<excludeFolder url="file://$MODULE_DIR$/relay/target" />
<excludeFolder url="file://$MODULE_DIR$/doc/competition" />
<excludeFolder url="file://$MODULE_DIR$/sandbox/dexorder_sandbox.egg-info" />
<excludeFolder url="file://$MODULE_DIR$/sandbox/protobuf" />
</content>
<orderEntry type="jdk" jdkName="Python 3.12 (ai)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />

View File

@@ -212,6 +212,27 @@ generatorOptions:

View File

@@ -80,7 +80,8 @@ public class SchemaInitializer {
*/
// Bump this when the schema changes. Tables with a different (or missing) version
// will be dropped and recreated. Increment by 1 for each incompatible change.
private static final String OHLC_SCHEMA_VERSION = "1";
// v2: open/high/low/close changed from required to optional to support null gap bars
private static final String OHLC_SCHEMA_VERSION = "2";
private static final String SCHEMA_VERSION_PROP = "app.schema.version";
private void initializeOhlcTable() {
@@ -109,7 +110,7 @@ public class SchemaInitializer {
Table existing = catalog.loadTable(tableId);
String existingVersion = existing.properties().get(SCHEMA_VERSION_PROP);
if (!OHLC_SCHEMA_VERSION.equals(existingVersion)) {
LOG.warn("Table {} has schema version '{}', expected '{}' — manual migration required",
LOG.warn("Table {} has schema version '{}', expected '{}' — skipping (manual migration required if needed)",
tableId, existingVersion, OHLC_SCHEMA_VERSION);
}
LOG.info("Table {} already exists at schema version {} — skipping creation", tableId, existingVersion);
@@ -127,11 +128,11 @@ public class SchemaInitializer {
required(2, "period_seconds", Types.IntegerType.get(), "OHLC period in seconds"),
required(3, "timestamp", Types.LongType.get(), "Candle timestamp in microseconds since epoch"),
// OHLC price data
required(4, "open", Types.LongType.get(), "Opening price"),
required(5, "high", Types.LongType.get(), "Highest price"),
required(6, "low", Types.LongType.get(), "Lowest price"),
required(7, "close", Types.LongType.get(), "Closing price"),
// OHLC price data — optional to support gap bars (null = no trades that period)
optional(4, "open", Types.LongType.get(), "Opening price"),
optional(5, "high", Types.LongType.get(), "Highest price"),
optional(6, "low", Types.LongType.get(), "Lowest price"),
optional(7, "close", Types.LongType.get(), "Closing price"),
// Volume data
optional(8, "volume", Types.LongType.get(), "Total volume"),

View File

@@ -65,11 +65,11 @@ public class OHLCBatchDeserializer implements DeserializationSchema<OHLCBatchWra
rows.add(new OHLCBatchWrapper.OHLCRow(
row.getTimestamp(),
row.getTicker(),
row.getOpen(),
row.getHigh(),
row.getLow(),
row.getClose(),
row.hasVolume() ? row.getVolume() : 0
row.hasOpen() ? row.getOpen() : null,
row.hasHigh() ? row.getHigh() : null,
row.hasLow() ? row.getLow() : null,
row.hasClose() ? row.getClose() : null,
row.hasVolume() ? row.getVolume() : null
));
}

View File

@@ -107,21 +107,22 @@ public class OHLCBatchWrapper implements Serializable {
}
/**
* Single OHLC row
* Single OHLC row. open/high/low/close/volume are nullable to support gap bars
* (periods where no trades occurred).
*/
public static class OHLCRow implements Serializable {
private static final long serialVersionUID = 1L;
private final long timestamp;
private final String ticker;
private final long open;
private final long high;
private final long low;
private final long close;
private final long volume;
private final Long open; // null for gap bars
private final Long high; // null for gap bars
private final Long low; // null for gap bars
private final Long close; // null for gap bars
private final Long volume; // null when no volume data
public OHLCRow(long timestamp, String ticker, long open, long high,
long low, long close, long volume) {
public OHLCRow(long timestamp, String ticker, Long open, Long high,
Long low, Long close, Long volume) {
this.timestamp = timestamp;
this.ticker = ticker;
this.open = open;
@@ -139,35 +140,37 @@ public class OHLCBatchWrapper implements Serializable {
return ticker;
}
public long getOpen() {
public Long getOpen() {
return open;
}
public long getHigh() {
public Long getHigh() {
return high;
}
public long getLow() {
public Long getLow() {
return low;
}
public long getClose() {
public Long getClose() {
return close;
}
public long getVolume() {
public Long getVolume() {
return volume;
}
public boolean isGapBar() {
return open == null && high == null && low == null && close == null;
}
@Override
public String toString() {
return "OHLCRow{" +
"timestamp=" + timestamp +
", ticker='" + ticker + '\'' +
", open=" + open +
", high=" + high +
", low=" + low +
", close=" + close +
(isGapBar() ? ", gap=true" :
", open=" + open + ", high=" + high + ", low=" + low + ", close=" + close) +
", volume=" + volume +
'}';
}

View File

@@ -77,11 +77,11 @@ public class HistoricalBatchWriter extends RichFlatMapFunction<OHLCBatchWrapper,
record.setField("ticker", batch.getTicker());
record.setField("period_seconds", batch.getPeriodSeconds());
record.setField("timestamp", row.getTimestamp());
record.setField("open", row.getOpen());
record.setField("high", row.getHigh());
record.setField("low", row.getLow());
record.setField("close", row.getClose());
record.setField("volume", row.getVolume() != 0 ? row.getVolume() : null);
record.setField("open", row.getOpen()); // null for gap bars
record.setField("high", row.getHigh()); // null for gap bars
record.setField("low", row.getLow()); // null for gap bars
record.setField("close", row.getClose()); // null for gap bars
record.setField("volume", row.getVolume());
record.setField("buy_vol", null);
record.setField("sell_vol", null);
record.setField("open_time", null);
@@ -102,7 +102,13 @@ public class HistoricalBatchWriter extends RichFlatMapFunction<OHLCBatchWrapper,
.appendFile(writer.toDataFile())
.commit();
LOG.info("Committed {} rows to Iceberg for request_id={}", batch.getRowCount(), batch.getRequestId());
long gapCount = batch.getRows().stream().filter(OHLCBatchWrapper.OHLCRow::isGapBar).count();
if (gapCount > 0) {
LOG.info("Committed {} rows ({} gap bars) to Iceberg for request_id={}",
batch.getRowCount(), gapCount, batch.getRequestId());
} else {
LOG.info("Committed {} rows to Iceberg for request_id={}", batch.getRowCount(), batch.getRequestId());
}
// Emit batch downstream only after successful commit
out.collect(batch);

View File

@@ -16,6 +16,7 @@ import {
type SnapshotMessage,
type PatchMessage,
} from '../workspace/index.js';
import { resolutionToSeconds } from '../types/ohlc.js';
/**
* Safe JSON stringifier that handles BigInt values
@@ -486,7 +487,7 @@ export class WebSocketHandler {
}
const history = await ohlcService.fetchOHLC(
payload.symbol,
payload.resolution,
resolutionToSeconds(payload.resolution),
payload.from_time,
payload.to_time,
payload.countback

View File

@@ -420,7 +420,7 @@ export class DuckDBClient {
WHERE ticker = ?
AND period_seconds = ?
AND timestamp >= ?
AND timestamp <= ?
AND timestamp < ?
ORDER BY timestamp ASC
`;
@@ -441,10 +441,11 @@ export class DuckDBClient {
count: rows.length
}, 'Loaded OHLC data from Iceberg');
// Convert timestamp strings to numbers (microseconds as Number is fine for display)
// Keep timestamp as bigint to preserve full microsecond precision.
// Convert to seconds (divide first) only when producing TradingView bars.
return rows.map((row: any) => ({
...row,
timestamp: Number(row.timestamp)
timestamp: BigInt(row.timestamp)
}));
} catch (error: any) {
this.logger.error({
@@ -484,7 +485,7 @@ export class DuckDBClient {
WHERE ticker = ?
AND period_seconds = ?
AND timestamp >= ?
AND timestamp <= ?
AND timestamp < ?
`;
const params = [
@@ -525,6 +526,7 @@ export class DuckDBClient {
// For now, simple check: if we have any data, assume complete
// TODO: Implement proper gap detection by checking for missing periods
const periodMicros = BigInt(period_seconds) * 1000000n;
// end_time is exclusive, so expected count = (end - start) / period (no +1)
const expectedBars = Number((end_time - start_time) / periodMicros);
if (data.length < expectedBars * 0.95) { // Allow 5% tolerance

View File

@@ -39,7 +39,7 @@ export interface IcebergMessage {
id: string;
user_id: string;
session_id: string;
role: 'user' | 'assistant' | 'system';
role: 'user' | 'assistant' | 'system' | 'workspace';
content: string;
metadata: string; // JSON string
timestamp: number; // microseconds

View File

@@ -8,8 +8,9 @@ import type { InboundMessage, OutboundMessage } from '../types/messages.js';
import { MCPClientConnector } from './mcp-client.js';
import { LLMProviderFactory, type ProviderConfig } from '../llm/provider.js';
import { ModelRouter, RoutingStrategy } from '../llm/router.js';
import type { ModelMiddleware } from '../llm/middleware.js';
import type { WorkspaceManager } from '../workspace/workspace-manager.js';
import type { ChannelAdapter } from '../workspace/index.js';
import type { ChannelAdapter, PathTriggerContext } from '../workspace/index.js';
import type { ResearchSubagent } from './subagents/research/index.js';
import type { DynamicStructuredTool } from '@langchain/core/tools';
import { getToolRegistry } from '../tools/tool-registry.js';
@@ -70,10 +71,10 @@ export class AgentHarness {
private config: AgentHarnessConfig;
private modelFactory: LLMProviderFactory;
private modelRouter: ModelRouter;
private middleware: ModelMiddleware | undefined;
private mcpClient: MCPClientConnector;
private workspaceManager?: WorkspaceManager;
private channelAdapter?: ChannelAdapter;
private isFirstMessage: boolean = true;
private researchSubagent?: ResearchSubagent;
private availableMCPTools: MCPToolInfo[] = [];
private researchImageCapture: Array<{ data: string; mimeType: string }> = [];
@@ -94,6 +95,8 @@ export class AgentHarness {
mcpServerUrl: config.mcpServerUrl,
logger: config.logger,
});
this.registerWorkspaceTriggers();
}
/**
@@ -193,7 +196,7 @@ export class AgentHarness {
const { createResearchSubagent } = await import('./subagents/research/index.js');
// Create a model for the research subagent
const model = await this.modelRouter.route(
const { model } = await this.modelRouter.route(
'research analysis', // dummy query
this.config.license,
RoutingStrategy.COMPLEXITY,
@@ -429,11 +432,25 @@ export class AgentHarness {
// 2. Load recent conversation history
const channelKey = this.config.channelType ?? ChannelType.WEBSOCKET;
const storedMessages = this.conversationStore
let storedMessages = this.conversationStore
? await this.conversationStore.getRecentMessages(
this.config.userId, this.config.sessionId, this.config.historyLimit, channelKey
)
: [];
// First turn: seed conversation history with current workspace state
if (storedMessages.length === 0 && this.workspaceManager && this.conversationStore) {
const workspaceJSON = this.workspaceManager.serializeState();
const content = `[Workspace State]\n\`\`\`json\n${workspaceJSON}\n\`\`\``;
await this.conversationStore.saveMessage(
this.config.userId, this.config.sessionId,
'workspace', content, { isWorkspaceContext: true }, channelKey
);
storedMessages = await this.conversationStore.getRecentMessages(
this.config.userId, this.config.sessionId, this.config.historyLimit, channelKey
);
}
const history = this.conversationStore
? this.conversationStore.toLangChainMessages(storedMessages)
: [];
@@ -441,12 +458,13 @@ export class AgentHarness {
// 4. Get the configured model
this.config.logger.debug('Routing to model');
const model = await this.modelRouter.route(
const { model, middleware } = await this.modelRouter.route(
message.content,
this.config.license,
RoutingStrategy.COMPLEXITY,
this.config.userId
);
this.middleware = middleware;
this.config.logger.info({ modelName: model.constructor.name }, 'Model selected');
// 5. Build LangChain messages
@@ -489,6 +507,11 @@ export class AgentHarness {
'Tools loaded for main agent'
);
// Apply middleware (e.g. Anthropic prompt caching)
const processedMessages = this.middleware
? this.middleware.processMessages(langchainMessages, tools)
: langchainMessages;
// 7. Bind tools to model
const modelWithTools = tools.length > 0 && model.bindTools ? model.bindTools(tools) : model;
@@ -501,7 +524,7 @@ export class AgentHarness {
// 8. Call LLM with tool calling loop
this.config.logger.info('Invoking LLM with tool support');
const assistantMessage = await this.executeWithToolCalling(modelWithTools, langchainMessages, tools);
const assistantMessage = await this.executeWithToolCalling(modelWithTools, processedMessages, tools, 10);
this.config.logger.info(
{ responseLength: assistantMessage.length },
@@ -518,11 +541,6 @@ export class AgentHarness {
);
}
// Mark first message as processed
if (this.isFirstMessage) {
this.isFirstMessage = false;
}
return {
messageId: `msg_${Date.now()}`,
sessionId: message.sessionId,
@@ -556,16 +574,10 @@ export class AgentHarness {
private async buildSystemPrompt(): Promise<string> {
// Load template and populate with license info
const template = await AgentHarness.loadSystemPromptTemplate();
let prompt = template
const prompt = template
.replace('{{licenseType}}', this.config.license.licenseType)
.replace('{{features}}', JSON.stringify(this.config.license.features, null, 2));
// Add full workspace state from WorkspaceManager (first message only)
if (this.isFirstMessage && this.workspaceManager) {
const workspaceJSON = this.workspaceManager.serializeState();
prompt += `\n\n# Current Workspace State\n\`\`\`json\n${workspaceJSON}\n\`\`\``;
}
return prompt;
}
@@ -574,9 +586,14 @@ export class AgentHarness {
*/
private getToolLabel(toolName: string): string {
const labels: Record<string, string> = {
research_agent: 'Researching...',
research: 'Researching...',
get_chart_data: 'Fetching chart data...',
symbol_lookup: 'Looking up symbol...',
category_list: 'Seeing what we have...',
category_edit: 'Coding...',
category_write: 'Coding...',
category_read: 'Inspecting...',
execute_research: 'Running script...',
};
return labels[toolName] ?? `Running ${toolName}...`;
}
@@ -685,6 +702,26 @@ export class AgentHarness {
return result;
}
/**
* Register workspace path triggers to record state changes into conversation history.
*/
private registerWorkspaceTriggers(): void {
if (!this.workspaceManager || !this.conversationStore) return;
const channelKey = this.config.channelType ?? ChannelType.WEBSOCKET;
for (const store of ['shapes', 'indicators', 'chartState']) {
this.workspaceManager.onPathChange(`/${store}/*`, async (_old: unknown, newVal: unknown, ctx: PathTriggerContext) => {
const content = `[Workspace Update] ${ctx.store}${ctx.path}\n${JSON.stringify(newVal, null, 2)}`;
await this.conversationStore!.saveMessage(
this.config.userId, this.config.sessionId,
'workspace', content,
{ isWorkspaceUpdate: true, store: ctx.store, seq: ctx.seq },
channelKey
);
});
}
}
/**
* End the session: flush conversation to cold storage, then release resources.
* Called by channel handlers on disconnect, session expiry, or graceful shutdown.

View File

@@ -11,7 +11,7 @@ export interface StoredMessage {
id: string;
userId: string;
sessionId: string;
role: 'user' | 'assistant' | 'system';
role: 'user' | 'assistant' | 'system' | 'workspace';
content: string;
timestamp: number; // microseconds (Iceberg convention)
metadata?: Record<string, unknown>;
@@ -44,7 +44,7 @@ export class ConversationStore {
async saveMessage(
userId: string,
sessionId: string,
role: 'user' | 'assistant' | 'system',
role: 'user' | 'assistant' | 'system' | 'workspace',
content: string,
metadata?: Record<string, unknown>,
channelType?: string
@@ -171,6 +171,8 @@ export class ConversationStore {
return new AIMessage(msg.content);
case 'system':
return new SystemMessage(msg.content);
case 'workspace':
return new HumanMessage(msg.content);
default:
throw new Error(`Unknown role: ${msg.role}`);
}

View File

@@ -59,6 +59,11 @@ Example usage:
- User: "Does Friday price action correlate with Monday?"
- You: Call research tool with instruction="Analyze correlation between Friday and Monday price action during NY trading hours (9:30-4:00 ET)", name="Friday-Monday Correlation"
### category_list
List existing research scripts (category="research").
Use this before calling the research tool to check whether a relevant script already exists.
If one does, pass its exact name to the research tool so the subagent updates it rather than creating a new one.
### symbol-lookup
Look up trading symbols and get metadata.
Use this when users mention tickers or need symbol information.

View File

@@ -12,6 +12,7 @@ maxTokens: 8192
memoryFiles:
- api-reference.md
- usage-examples.md
- pandas-ta-reference.md
# System prompt file
systemPromptFile: system-prompt.md

View File

@@ -0,0 +1,227 @@
# pandas-ta Reference for Research Scripts
The sandbox environment uses **pandas-ta** as the standard indicator library. Always use it for technical indicator calculations; do not write manual rolling/ewm implementations.
```python
import pandas_ta as ta
```
## Calling Convention
pandas-ta functions accept a Series (or OHLCV columns) plus keyword parameters that match pandas-ta's documented argument names:
```python
# Single-series indicator
rsi = ta.rsi(df['close'], length=14) # returns Series
# OHLCV indicator
atr = ta.atr(df['high'], df['low'], df['close'], length=14)
# Multi-output indicator (returns DataFrame)
macd_df = ta.macd(df['close'], fast=12, slow=26, signal=9)
# columns: MACD_12_26_9, MACDh_12_26_9, MACDs_12_26_9
bbands_df = ta.bbands(df['close'], length=20, std=2.0)
# columns: BBL_20_2.0, BBM_20_2.0, BBU_20_2.0, BBB_20_2.0, BBP_20_2.0
```
## Default Parameters
Key defaults to keep in mind:
- Most period/length indicators: `length=14` (use `length=` not `timeperiod=`)
- `bbands`: `length=20, std=2.0` (note: single `std`, not separate upper/lower)
- `macd`: `fast=12, slow=26, signal=9`
- `stoch`: `k=14, d=3, smooth_k=3`
- `psar`: `af0=0.02, af=0.02, max_af=0.2`
- `vwap`: `anchor='D'` (requires DatetimeIndex)
- `ichimoku`: `tenkan=9, kijun=26, senkou=52`
## Available Indicators
These match the indicators supported by the TradingView web client. Use the pandas-ta function name shown here (lowercase):
### Overlap / Moving Averages — plotted on the price pane
| Function | Description |
|----------|-------------|
| `sma` | Simple Moving Average — plain arithmetic mean over `length` periods |
| `ema` | Exponential Moving Average — more weight on recent prices |
| `wma` | Weighted Moving Average — linearly increasing weights |
| `dema` | Double EMA — two layers of EMA to reduce lag |
| `tema` | Triple EMA — three layers of EMA, even less lag than DEMA |
| `trima` | Triangular MA — double-smoothed SMA, very smooth |
| `kama` | Kaufman Adaptive MA — adapts speed to market noise/trending conditions |
| `t3` | T3 Moving Average — Tillson's smooth, low-lag MA using six EMAs |
| `hma` | Hull MA — very low-lag MA using WMAs |
| `alma` | Arnaud Legoux MA — Gaussian-weighted MA with reduced lag and noise |
| `midpoint` | Midpoint of close over `length` periods: (highest + lowest) / 2 |
| `midprice` | Midpoint of high/low over `length` periods |
| `supertrend` | Trend-following band (ATR-based) that flips above/below price |
| `ichimoku` | Ichimoku Cloud — multi-line Japanese trend/support/resistance system |
| `vwap` | Volume-Weighted Average Price — average price weighted by volume, resets on `anchor` |
| `vwma` | Volume-Weighted MA — like SMA but candles weighted by volume |
| `bbands` | Bollinger Bands — SMA ± N standard deviations; returns upper, mid, lower bands |
### Momentum — typically plotted in a separate pane
| Function | Description |
|----------|-------------|
| `rsi` | Relative Strength Index — 0100 oscillator measuring speed of price changes |
| `macd` | MACD — difference of two EMAs plus signal line and histogram |
| `stoch` | Stochastic Oscillator — %K/%D, measures close vs recent high/low range |
| `stochrsi` | Stochastic RSI — applies stochastic formula to RSI values |
| `cci` | Commodity Channel Index — deviation of price from its statistical mean |
| `willr` | Williams %R — inverse stochastic, 100 to 0 oscillator |
| `mom` | Momentum — raw price change over `length` periods |
| `roc` | Rate of Change — percentage price change over `length` periods |
| `trix` | TRIX — 1-period % change of a triple-smoothed EMA |
| `cmo` | Chande Momentum Oscillator — ratio of up/down momentum, 100 to 100 |
| `adx` | Average Directional Index — strength of trend (0100, direction-agnostic) |
| `aroon` | Aroon — measures how recently the highest/lowest price occurred; returns Up, Down, Oscillator |
| `ao` | Awesome Oscillator — difference of 5- and 34-period simple MAs of midprice |
| `bop` | Balance of Power — measures buying vs selling pressure: (closeopen)/(highlow) |
| `uo` | Ultimate Oscillator — weighted combo of three period (fast/medium/slow) buying pressure ratios |
| `apo` | Absolute Price Oscillator — difference between two EMAs (like MACD without signal line) |
| `mfi` | Money Flow Index — RSI-like oscillator using price × volume |
| `coppock` | Coppock Curve — long-term momentum oscillator based on rate-of-change |
| `dpo` | Detrended Price Oscillator — removes trend to show cycle oscillations |
| `fisher` | Fisher Transform — converts price into a Gaussian normal distribution |
| `rvgi` | Relative Vigor Index — compares closeopen to highlow to measure trend vigor |
| `kst` | Know Sure Thing — momentum oscillator from four ROC periods, smoothed |
### Volatility — plotted on price pane or separate
| Function | Description |
|----------|-------------|
| `atr` | Average True Range — average of true range (greatest of HL, HprevC, LprevC) |
| `kc` | Keltner Channels — EMA ± N × ATR bands around price |
| `donchian` | Donchian Channels — highest high / lowest low over `length` periods |
### Volume — plotted in separate pane
| Function | Description |
|----------|-------------|
| `obv` | On Balance Volume — cumulative volume, added on up days, subtracted on down days |
| `ad` | Accumulation/Distribution — running total of the money flow multiplier × volume |
| `adosc` | Chaikin Oscillator — EMA difference of the A/D line |
| `cmf` | Chaikin Money Flow — sum of (money flow volume) / sum of volume over `length` |
| `eom` | Ease of Movement — relates price change to volume; high = price moves easily |
| `efi` | Elder's Force Index — combines price change direction with volume magnitude |
| `kvo` | Klinger Volume Oscillator — EMA difference of volume force |
| `pvt` | Price Volume Trend — cumulative: volume × percentage price change |
### Statistics / Price Transforms
| Function | Description |
|----------|-------------|
| `stdev` | Standard Deviation of close over `length` periods |
| `linreg` | Linear Regression Curve — least-squares line endpoint value over `length` periods |
| `slope` | Linear Regression Slope — gradient of the regression line |
| `hl2` | Median Price — (high + low) / 2 |
| `hlc3` | Typical Price — (high + low + close) / 3 |
| `ohlc4` | Average Price — (open + high + low + close) / 4 |
### Trend
| Function | Description |
|----------|-------------|
| `psar` | Parabolic SAR — trailing stop-and-reverse dots that follow price |
| `vortex` | Vortex Indicator — VI+ / VI lines measuring upward vs downward trend movement |
| `chop` | Choppiness Index — 0100, high = choppy/sideways, low = strong trend |
## Usage Examples
### Single-output indicators
```python
import pandas_ta as ta
df['rsi'] = ta.rsi(df['close'], length=14)
df['ema_20'] = ta.ema(df['close'], length=20)
df['sma_50'] = ta.sma(df['close'], length=50)
df['atr'] = ta.atr(df['high'], df['low'], df['close'], length=14)
df['obv'] = ta.obv(df['close'], df['volume'])
df['adx'] = ta.adx(df['high'], df['low'], df['close'], length=14)['ADX_14']
```
### Multi-output indicators — extract columns by position
```python
# MACD → MACD_12_26_9, MACDh_12_26_9, MACDs_12_26_9
macd_df = ta.macd(df['close'], fast=12, slow=26, signal=9)
df['macd'] = macd_df.iloc[:, 0] # MACD line
df['macd_hist'] = macd_df.iloc[:, 1] # Histogram
df['macd_signal'] = macd_df.iloc[:, 2] # Signal line
# Bollinger Bands → BBL, BBM, BBU, BBB, BBP
bb_df = ta.bbands(df['close'], length=20, std=2.0)
df['bb_lower'] = bb_df.iloc[:, 0] # BBL
df['bb_mid'] = bb_df.iloc[:, 1] # BBM
df['bb_upper'] = bb_df.iloc[:, 2] # BBU
# Stochastic → STOCHk, STOCHd
stoch_df = ta.stoch(df['high'], df['low'], df['close'], k=14, d=3, smooth_k=3)
df['stoch_k'] = stoch_df.iloc[:, 0]
df['stoch_d'] = stoch_df.iloc[:, 1]
# Keltner Channels → KCLe, KCBe, KCUe
kc_df = ta.kc(df['high'], df['low'], df['close'], length=20)
df['kc_lower'] = kc_df.iloc[:, 0]
df['kc_mid'] = kc_df.iloc[:, 1]
df['kc_upper'] = kc_df.iloc[:, 2]
# ADX → ADX_14, DMP_14, DMN_14
adx_df = ta.adx(df['high'], df['low'], df['close'], length=14)
df['adx'] = adx_df.iloc[:, 0] # ADX strength
df['dmp'] = adx_df.iloc[:, 1] # +DI
df['dmn'] = adx_df.iloc[:, 2] # -DI
# Aroon → AROOND_14, AROONU_14, AROONOSC_14
aroon_df = ta.aroon(df['high'], df['low'], length=14)
df['aroon_down'] = aroon_df.iloc[:, 0]
df['aroon_up'] = aroon_df.iloc[:, 1]
# Donchian Channels → DCL, DCM, DCU
dc_df = ta.donchian(df['high'], df['low'], lower_length=20, upper_length=20)
df['dc_lower'] = dc_df.iloc[:, 0]
df['dc_mid'] = dc_df.iloc[:, 1]
df['dc_upper'] = dc_df.iloc[:, 2]
```
### Charting with indicators
```python
import pandas_ta as ta
from dexorder.api import get_api
import asyncio
api = get_api()
df = asyncio.run(api.data.historical_ohlc(
ticker="BINANCE:BTC/USDT",
period_seconds=3600,
start_time="2024-01-01",
end_time="2024-01-08",
extra_columns=["volume"]
))
# Compute indicators
df['ema_20'] = ta.ema(df['close'], length=20)
df['rsi'] = ta.rsi(df['close'], length=14)
macd_df = ta.macd(df['close'])
df['macd'] = macd_df.iloc[:, 0]
df['macd_signal'] = macd_df.iloc[:, 2]
# Main price chart with EMA overlay
fig, ax = api.charting.plot_ohlc(df, title="BTC/USDT 1H", volume=True)
ax.plot(df.index, df['ema_20'], label="EMA 20", color="orange", linewidth=1.5)
ax.legend()
# RSI panel
rsi_ax = api.charting.add_indicator_panel(fig, df, columns=["rsi"], ylabel="RSI", ylim=(0, 100))
rsi_ax.axhline(70, color='red', linestyle='--', alpha=0.5)
rsi_ax.axhline(30, color='green', linestyle='--', alpha=0.5)
# MACD panel
api.charting.add_indicator_panel(fig, df, columns=["macd", "macd_signal"], ylabel="MACD")
```

View File

@@ -112,10 +112,12 @@ fig, ax = api.charting.plot_ohlc(
### Adding Indicator Panels
Use **pandas-ta** for all indicator calculations. Do not write manual rolling/ewm implementations.
```python
from dexorder.api import get_api
import asyncio
import pandas as pd
import pandas_ta as ta
api = get_api()
@@ -127,8 +129,9 @@ df = asyncio.run(api.data.historical_ohlc(
end_time="2021-12-21"
))
# Calculate a simple moving average
df['sma_20'] = df['close'].rolling(window=20).mean()
# Calculate indicators using pandas-ta
df['sma_20'] = ta.sma(df['close'], length=20)
df['rsi'] = ta.rsi(df['close'], length=14)
# Create chart
fig, ax = api.charting.plot_ohlc(df, title="BTC/USDT with SMA")
@@ -138,7 +141,6 @@ ax.plot(df.index, df['sma_20'], label="SMA 20", color="blue", linewidth=2)
ax.legend()
# Add RSI indicator panel below
df['rsi'] = calculate_rsi(df['close'], 14) # Your RSI calculation
rsi_ax = api.charting.add_indicator_panel(
fig, df,
columns=["rsi"],
@@ -149,12 +151,40 @@ rsi_ax.axhline(70, color='red', linestyle='--', alpha=0.5)
rsi_ax.axhline(30, color='green', linestyle='--', alpha=0.5)
```
### Multi-Output Indicators
Some pandas-ta indicators return a DataFrame. Extract the columns you need:
```python
import pandas_ta as ta
# MACD returns: MACD_12_26_9, MACDh_12_26_9, MACDs_12_26_9
macd_df = ta.macd(df['close'], fast=12, slow=26, signal=9)
df['macd'] = macd_df.iloc[:, 0] # MACD line
df['macd_hist'] = macd_df.iloc[:, 1] # Histogram
df['macd_signal'] = macd_df.iloc[:, 2] # Signal line
# Bollinger Bands returns: BBL, BBM, BBU, BBB, BBP
bb_df = ta.bbands(df['close'], length=20, std=2.0)
df['bb_upper'] = bb_df.iloc[:, 2] # BBU
df['bb_mid'] = bb_df.iloc[:, 1] # BBM
df['bb_lower'] = bb_df.iloc[:, 0] # BBL
# Stochastic returns: STOCHk, STOCHd
stoch_df = ta.stoch(df['high'], df['low'], df['close'], k=14, d=3, smooth_k=3)
df['stoch_k'] = stoch_df.iloc[:, 0]
df['stoch_d'] = stoch_df.iloc[:, 1]
# ATR (uses high, low, close)
df['atr'] = ta.atr(df['high'], df['low'], df['close'], length=14)
```
## Complete Example
```python
from dexorder.api import get_api
import asyncio
import pandas as pd
import pandas_ta as ta
# Get API instance
api = get_api()
@@ -168,9 +198,9 @@ df = asyncio.run(api.data.historical_ohlc(
extra_columns=["volume"]
))
# Add some analysis
df['sma_20'] = df['close'].rolling(window=20).mean()
df['sma_50'] = df['close'].rolling(window=50).mean()
# Add moving averages using pandas-ta
df['sma_20'] = ta.sma(df['close'], length=20)
df['ema_50'] = ta.ema(df['close'], length=50)
# Create chart with volume
fig, ax = api.charting.plot_ohlc(
@@ -182,7 +212,7 @@ fig, ax = api.charting.plot_ohlc(
# Overlay moving averages
ax.plot(df.index, df['sma_20'], label="SMA 20", color="blue", linewidth=1.5)
ax.plot(df.index, df['sma_50'], label="SMA 50", color="red", linewidth=1.5)
ax.plot(df.index, df['ema_50'], label="EMA 50", color="red", linewidth=1.5)
ax.legend()
# Print summary statistics

View File

@@ -51,7 +51,140 @@ The API provides two main components:
- `api.data` - DataAPI for fetching OHLC market data
- `api.charting` - ChartingAPI for creating financial charts
See your knowledge base for complete API documentation and examples.
See your knowledge base for complete API documentation, examples, and the full pandas-ta indicator reference (see `pandas-ta-reference.md`).
## Technical Indicators — pandas-ta
The sandbox environment uses **pandas-ta** as the standard indicator library. Always use it for technical indicator calculations; do not write manual rolling/ewm implementations.
```python
import pandas_ta as ta
```
### Calling Convention
pandas-ta functions accept a Series (or OHLCV columns) plus keyword parameters that match pandas-ta's documented argument names:
```python
# Single-series indicator
rsi = ta.rsi(df['close'], length=14) # returns Series
# OHLCV indicator
atr = ta.atr(df['high'], df['low'], df['close'], length=14)
# Multi-output indicator (returns DataFrame)
macd_df = ta.macd(df['close'], fast=12, slow=26, signal=9)
# columns: MACD_12_26_9, MACDh_12_26_9, MACDs_12_26_9
bbands_df = ta.bbands(df['close'], length=20, std=2.0)
# columns: BBL_20_2.0, BBM_20_2.0, BBU_20_2.0, BBB_20_2.0, BBP_20_2.0
```
### Available Indicators (canonical list)
These match the indicators supported by the TradingView web client. Use the pandas-ta function name shown here (lowercase):
**Overlap / Moving Averages** — plotted on the price pane
| Function | Description |
|----------|-------------|
| `sma` | Simple Moving Average — plain arithmetic mean over `length` periods |
| `ema` | Exponential Moving Average — more weight on recent prices |
| `wma` | Weighted Moving Average — linearly increasing weights |
| `dema` | Double EMA — two layers of EMA to reduce lag |
| `tema` | Triple EMA — three layers of EMA, even less lag than DEMA |
| `trima` | Triangular MA — double-smoothed SMA, very smooth |
| `kama` | Kaufman Adaptive MA — adapts speed to market noise/trending conditions |
| `t3` | T3 Moving Average — Tillson's smooth, low-lag MA using six EMAs |
| `hma` | Hull MA — very low-lag MA using WMAs |
| `alma` | Arnaud Legoux MA — Gaussian-weighted MA with reduced lag and noise |
| `midpoint` | Midpoint of close over `length` periods: (highest + lowest) / 2 |
| `midprice` | Midpoint of high/low over `length` periods |
| `supertrend` | Trend-following band (ATR-based) that flips above/below price |
| `ichimoku` | Ichimoku Cloud — multi-line Japanese trend/support/resistance system |
| `vwap` | Volume-Weighted Average Price — average price weighted by volume, resets on `anchor` |
| `vwma` | Volume-Weighted MA — like SMA but candles weighted by volume |
| `bbands` | Bollinger Bands — SMA ± N standard deviations; returns upper, mid, lower bands |
**Momentum** — typically plotted in a separate pane
| Function | Description |
|----------|-------------|
| `rsi` | Relative Strength Index — 0100 oscillator measuring speed of price changes |
| `macd` | MACD — difference of two EMAs plus signal line and histogram |
| `stoch` | Stochastic Oscillator — %K/%D, measures close vs recent high/low range |
| `stochrsi` | Stochastic RSI — applies stochastic formula to RSI values |
| `cci` | Commodity Channel Index — deviation of price from its statistical mean |
| `willr` | Williams %R — inverse stochastic, 100 to 0 oscillator |
| `mom` | Momentum — raw price change over `length` periods |
| `roc` | Rate of Change — percentage price change over `length` periods |
| `trix` | TRIX — 1-period % change of a triple-smoothed EMA |
| `cmo` | Chande Momentum Oscillator — ratio of up/down momentum, 100 to 100 |
| `adx` | Average Directional Index — strength of trend (0100, direction-agnostic) |
| `aroon` | Aroon — measures how recently the highest/lowest price occurred; returns Up, Down, Oscillator |
| `ao` | Awesome Oscillator — difference of 5- and 34-period simple MAs of midprice |
| `bop` | Balance of Power — measures buying vs selling pressure: (closeopen)/(highlow) |
| `uo` | Ultimate Oscillator — weighted combo of three period (fast/medium/slow) buying pressure ratios |
| `apo` | Absolute Price Oscillator — difference between two EMAs (like MACD without signal line) |
| `mfi` | Money Flow Index — RSI-like oscillator using price × volume |
| `coppock` | Coppock Curve — long-term momentum oscillator based on rate-of-change |
| `dpo` | Detrended Price Oscillator — removes trend to show cycle oscillations |
| `fisher` | Fisher Transform — converts price into a Gaussian normal distribution |
| `rvgi` | Relative Vigor Index — compares closeopen to highlow to measure trend vigor |
| `kst` | Know Sure Thing — momentum oscillator from four ROC periods, smoothed |
**Volatility** — plotted on price pane or separate
| Function | Description |
|----------|-------------|
| `atr` | Average True Range — average of true range (greatest of HL, HprevC, LprevC) |
| `kc` | Keltner Channels — EMA ± N × ATR bands around price |
| `donchian` | Donchian Channels — highest high / lowest low over `length` periods |
**Volume** — plotted in separate pane
| Function | Description |
|----------|-------------|
| `obv` | On Balance Volume — cumulative volume, added on up days, subtracted on down days |
| `ad` | Accumulation/Distribution — running total of the money flow multiplier × volume |
| `adosc` | Chaikin Oscillator — EMA difference of the A/D line |
| `cmf` | Chaikin Money Flow — sum of (money flow volume) / sum of volume over `length` |
| `eom` | Ease of Movement — relates price change to volume; high = price moves easily |
| `efi` | Elder's Force Index — combines price change direction with volume magnitude |
| `kvo` | Klinger Volume Oscillator — EMA difference of volume force |
| `pvt` | Price Volume Trend — cumulative: volume × percentage price change |
**Statistics / Price Transforms**
| Function | Description |
|----------|-------------|
| `stdev` | Standard Deviation of close over `length` periods |
| `linreg` | Linear Regression Curve — least-squares line endpoint value over `length` periods |
| `slope` | Linear Regression Slope — gradient of the regression line |
| `hl2` | Median Price — (high + low) / 2 |
| `hlc3` | Typical Price — (high + low + close) / 3 |
| `ohlc4` | Average Price — (open + high + low + close) / 4 |
**Trend**
| Function | Description |
|----------|-------------|
| `psar` | Parabolic SAR — trailing stop-and-reverse dots that follow price |
| `vortex` | Vortex Indicator — VI+ / VI lines measuring upward vs downward trend movement |
| `chop` | Choppiness Index — 0100, high = choppy/sideways, low = strong trend |
### Default Parameters
Key defaults to keep in mind:
- Most period/length indicators: `length=14` (use `length=` not `timeperiod=`)
- `bbands`: `length=20, std=2.0` (note: single `std`, not separate upper/lower)
- `macd`: `fast=12, slow=26, signal=9`
- `stoch`: `k=14, d=3, smooth_k=3`
- `psar`: `af0=0.02, af=0.02, max_af=0.2`
- `vwap`: `anchor='D'` (requires DatetimeIndex)
- `ichimoku`: `tenkan=9, kijun=26, senkou=52`
For multi-output indicator column extraction patterns and complete charting examples, fetch `pandas-ta-reference.md` from your knowledge base.
## Coding Loop Pattern
@@ -59,11 +192,9 @@ When a user requests analysis:
1. **Understand the request**: What data is needed? What analysis? What visualization?
2. **Check for existing scripts**: Use `category_list` to see if a similar script exists
- If exists and suitable: use `category_read` to review it
- Consider editing existing script vs creating new one
2. **Use the provided name**: The instruction will begin with `Research script name: "<name>"`. Always use that exact name when calling `category_write` or `category_edit`. Check first with `category_read` — if the script already exists, use `category_edit` to update it rather than creating a new one with `category_write`.
3. **Write the script**: Use `category_write` (or `category_edit`)
3. **Write the script**: Use `category_write` (new) or `category_edit` (existing)
- Write clean, well-commented Python code
- Include proper error handling
- Use appropriate ticker symbols, time ranges, and periods
@@ -106,10 +237,7 @@ When a user requests analysis:
- Add `conda_packages: ["package-name"]` to metadata
- Packages are auto-installed during validation
- **Script naming**: Choose descriptive, unique names. Examples:
- "BTC Weekly Analysis"
- "ETH Volume Profile"
- "Market Correlation Heatmap"
- **Script naming**: Always use the name provided in the instruction (`Research script name: "<name>"`). Do not invent a different name.
- **Error handling**: Wrap data fetching in try/except to provide helpful error messages

View File

@@ -0,0 +1,89 @@
import type { BaseMessage } from '@langchain/core/messages';
import { SystemMessage, HumanMessage, AIMessage } from '@langchain/core/messages';
import type { StructuredTool } from '@langchain/core/tools';
/**
* Provider-agnostic hook to preprocess messages before an LLM call.
* Applied transparently by the harness; implementations are provider-specific.
*/
export interface ModelMiddleware {
processMessages(messages: BaseMessage[], tools: StructuredTool[]): BaseMessage[];
}
/**
* No-op implementation for providers that don't support prompt caching.
*/
export class NoopMiddleware implements ModelMiddleware {
processMessages(messages: BaseMessage[]): BaseMessage[] {
return messages;
}
}
/**
* Mirrors Python's AnthropicPromptCachingMiddleware logic.
*
* Tags with cache_control: { type: 'ephemeral' }:
* 1. The system message last content block (stable prompt prefix — always a cache hit after turn 1)
* 2. The last non-current cacheable message (AIMessage or HumanMessage before the final user message)
* so the full conversation prefix is cached on the next turn.
*
* Requires ChatAnthropic to be configured with:
* clientOptions: { defaultHeaders: { 'anthropic-beta': 'prompt-caching-2024-07-31' } }
*/
export class AnthropicCachingMiddleware implements ModelMiddleware {
processMessages(messages: BaseMessage[], _tools: StructuredTool[]): BaseMessage[] {
if (messages.length === 0) return messages;
const result = messages.map(msg => cloneMessage(msg));
// 1. Tag system message
const systemMsg = result.find(m => m._getType() === 'system');
if (systemMsg) {
addCacheControl(systemMsg);
}
// 2. Tag the last cacheable message that isn't the current user input.
// The current user message is always the last element; we want the one before it.
// We look backwards for the last AIMessage or HumanMessage (excluding the final message).
const candidates = result.slice(0, -1);
for (let i = candidates.length - 1; i >= 0; i--) {
const t = candidates[i]._getType();
if (t === 'ai' || t === 'human') {
addCacheControl(candidates[i]);
break;
}
}
return result;
}
}
/**
* Shallow-clone a message so we don't mutate history objects.
*/
function cloneMessage(msg: BaseMessage): BaseMessage {
const type = msg._getType();
const content = typeof msg.content === 'string'
? msg.content
: JSON.parse(JSON.stringify(msg.content));
if (type === 'system') return new SystemMessage({ content, additional_kwargs: { ...msg.additional_kwargs } });
if (type === 'human') return new HumanMessage({ content, additional_kwargs: { ...msg.additional_kwargs } });
if (type === 'ai') return new AIMessage({ content, additional_kwargs: { ...msg.additional_kwargs }, tool_calls: (msg as AIMessage).tool_calls });
// For other types (tool messages etc.), return as-is — we don't tag them
return msg;
}
/**
* Add cache_control to the last content block of a message.
* Converts string content to a block array if needed.
*/
function addCacheControl(msg: BaseMessage): void {
if (typeof msg.content === 'string') {
// Convert to block array
(msg as any).content = [{ type: 'text', text: msg.content, cache_control: { type: 'ephemeral' } }];
} else if (Array.isArray(msg.content) && msg.content.length > 0) {
const last = msg.content[msg.content.length - 1] as any;
last.cache_control = { type: 'ephemeral' };
}
}

View File

@@ -1,6 +1,10 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { ChatAnthropic } from '@langchain/anthropic';
import type { FastifyBaseLogger } from 'fastify';
import { type ModelMiddleware, NoopMiddleware, AnthropicCachingMiddleware } from './middleware.js';
export type { ModelMiddleware };
export { NoopMiddleware, AnthropicCachingMiddleware };
/**
* Supported LLM providers
@@ -64,7 +68,7 @@ export class LLMProviderFactory {
/**
* Create a chat model instance
*/
createModel(modelConfig: ModelConfig): BaseChatModel {
createModel(modelConfig: ModelConfig): { model: BaseChatModel; middleware: ModelMiddleware } {
this.logger.debug(
{ provider: modelConfig.provider, model: modelConfig.model },
'Creating LLM model'
@@ -82,17 +86,20 @@ export class LLMProviderFactory {
/**
* Create Anthropic Claude model
*/
private createAnthropicModel(config: ModelConfig): ChatAnthropic {
private createAnthropicModel(config: ModelConfig): { model: ChatAnthropic; middleware: AnthropicCachingMiddleware } {
if (!this.config.anthropicApiKey) {
throw new Error('Anthropic API key not configured');
}
return new ChatAnthropic({
const model = new ChatAnthropic({
model: config.model,
temperature: config.temperature ?? 0.7,
maxTokens: config.maxTokens ?? 4096,
anthropicApiKey: this.config.anthropicApiKey,
clientOptions: { defaultHeaders: { 'anthropic-beta': 'prompt-caching-2024-07-31' } },
});
return { model, middleware: new AnthropicCachingMiddleware() };
}
/**

View File

@@ -1,6 +1,7 @@
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { FastifyBaseLogger } from 'fastify';
import { LLMProviderFactory, type ModelConfig, LLMProvider, type LicenseModelsConfig } from './provider.js';
import type { ModelMiddleware } from './middleware.js';
import type { License } from '../types/user.js';
/**
@@ -42,7 +43,7 @@ export class ModelRouter {
license: License,
strategy: RoutingStrategy = RoutingStrategy.USER_PREFERENCE,
userId?: string
): Promise<BaseChatModel> {
): Promise<{ model: BaseChatModel; middleware: ModelMiddleware }> {
let modelConfig: ModelConfig;
switch (strategy) {

View File

@@ -586,7 +586,7 @@ try {
toolRegistry.registerAgentTools({
agentName: 'main',
platformTools: ['symbol_lookup', 'get_chart_data'],
mcpTools: [], // No MCP tools for main agent by default (can be extended later)
mcpTools: ['category_list'], // category_list lets the main agent see existing research scripts
});
// Research subagent: only MCP tools for script creation/execution

View File

@@ -27,7 +27,6 @@ import type {
import {
secondsToMicros,
backendToTradingView,
resolutionToSeconds,
DEFAULT_SUPPORTED_RESOLUTIONS,
} from '../types/ohlc.js';
@@ -67,25 +66,32 @@ export class OHLCService {
*/
async fetchOHLC(
ticker: string,
resolution: string,
period_seconds: number,
from_time: number, // Unix timestamp in SECONDS
to_time: number, // Unix timestamp in SECONDS
countback?: number
): Promise<HistoryResult> {
this.logger.debug({
ticker,
resolution,
period_seconds,
from_time,
to_time,
countback,
}, 'Fetching OHLC data');
// Convert resolution to period_seconds
const period_seconds = resolutionToSeconds(resolution);
// Convert times to microseconds
const start_time = secondsToMicros(from_time);
const end_time = secondsToMicros(to_time);
// Convert times to microseconds, then align to period boundaries using
// [ceil(start), ceil(end)) semantics:
// - start: ceil to next period boundary — excludes any in-progress candle whose
// official timestamp is before from_time.
// - end: ceil to next period boundary, used as EXCLUSIVE upper bound — includes
// the last candle whose timestamp < to_time, excludes one sitting exactly on
// to_time (which would be the next candle, not yet started).
const periodMicros = BigInt(period_seconds) * 1_000_000n;
const raw_start = secondsToMicros(from_time);
const raw_end = secondsToMicros(to_time);
// bigint ceiling: ceil(a/b)*b = ((a + b - 1) / b) * b
const start_time = ((raw_start + periodMicros - 1n) / periodMicros) * periodMicros;
const end_time = ((raw_end + periodMicros - 1n) / periodMicros) * periodMicros; // exclusive
// Step 1: Check Iceberg for existing data
let data = await this.icebergClient.queryOHLC(ticker, period_seconds, start_time, end_time);
@@ -100,25 +106,26 @@ export class OHLCService {
if (missingRanges.length === 0 && data.length > 0) {
// All data exists in Iceberg
this.logger.debug({ ticker, resolution, cached: true }, 'OHLC data found in cache');
return this.formatHistoryResult(data, countback);
this.logger.debug({ ticker, period_seconds, cached: true }, 'OHLC data found in cache');
return this.formatHistoryResult(data, start_time, end_time, period_seconds, countback);
}
// Step 3: Request missing data via relay
this.logger.debug({ ticker, resolution, missingRanges: missingRanges.length }, 'Requesting missing OHLC data');
this.logger.debug({ ticker, period_seconds, missingRanges: missingRanges.length }, 'Requesting missing OHLC data');
try {
const notification = await this.relayClient.requestHistoricalOHLC(
ticker,
period_seconds,
start_time,
end_time,
countback
end_time
// countback is NOT passed as a limit — the ingestor must fetch the full range.
// Countback is applied below after we have the complete dataset.
);
this.logger.info({
ticker,
resolution,
period_seconds,
row_count: notification.row_count,
status: notification.status,
}, 'Historical data request completed');
@@ -126,13 +133,13 @@ export class OHLCService {
// Step 4: Query Iceberg again for complete dataset
data = await this.icebergClient.queryOHLC(ticker, period_seconds, start_time, end_time);
return this.formatHistoryResult(data, countback);
return this.formatHistoryResult(data, start_time, end_time, period_seconds, countback);
} catch (error: any) {
this.logger.error({
error,
ticker,
resolution,
period_seconds,
}, 'Failed to fetch historical data');
// Return empty result on error
@@ -144,9 +151,22 @@ export class OHLCService {
}
/**
* Format OHLC data as TradingView history result
* Format OHLC data as TradingView history result.
*
* Interior gaps (confirmed trading periods with no trades) arrive as null-OHLC
* rows from Iceberg. Edge gaps (data not yet ingested, in-progress candles) are
* simply absent rows. Both are returned as-is; clients fill as appropriate.
*/
private formatHistoryResult(data: any[], countback?: number): HistoryResult {
private formatHistoryResult(
data: any[],
// @ts-ignore
start_time: bigint,
// @ts-ignore
end_time: bigint,
// @ts-ignore
period_seconds: number,
countback?: number
): HistoryResult {
if (data.length === 0) {
return {
bars: [],
@@ -154,13 +174,11 @@ export class OHLCService {
};
}
// Convert to TradingView format
// Convert to TradingView format without null-filling missing slots.
let bars: TradingViewBar[] = data.map(backendToTradingView);
// Sort by time
bars.sort((a, b) => a.time - b.time);
// Apply countback limit if specified
if (countback && bars.length > countback) {
bars = bars.slice(-countback);
}

View File

@@ -52,7 +52,7 @@ Parameters:
// Build request with workspace defaults
const finalTicker = ticker ?? chartState.symbol;
const finalPeriod = period ?? parsePeriod(chartState.period);
const finalPeriod = period ?? chartState.period;
const finalFromTime = await parseTime(from_time, chartState.start_time, logger);
const finalToTime = await parseTime(to_time, chartState.end_time, logger);
const requestedColumns = columns ?? [];
@@ -83,7 +83,7 @@ Parameters:
// Fetch data from OHLCService
const historyResult = await ohlcService.fetchOHLC(
finalTicker,
finalPeriod.toString(),
finalPeriod,
finalFromTime,
finalToTime,
countback
@@ -167,7 +167,7 @@ async function getChartState(workspaceManager: WorkspaceManager, logger: Fastify
symbol: 'BINANCE:BTC/USDT',
start_time: null,
end_time: null,
period: '15',
period: 900,
selected_shapes: [],
};
}
@@ -180,35 +180,12 @@ async function getChartState(workspaceManager: WorkspaceManager, logger: Fastify
symbol: 'BINANCE:BTC/USDT',
start_time: null,
end_time: null,
period: '15',
period: 900,
selected_shapes: [],
};
}
}
/**
* Parse period string to seconds
* Handles period as either a number (already in seconds) or string (minutes)
*/
function parsePeriod(period: string | number | null): number | null {
if (period === null) {
return null;
}
if (typeof period === 'number') {
return period;
}
// Period in workspace is stored as string representing minutes
// Convert to seconds
const minutes = parseInt(period, 10);
if (isNaN(minutes)) {
return null;
}
return minutes * 60;
}
/**
* Parse time parameter (Unix seconds, date string, or null)
* Returns Unix timestamp in seconds

View File

@@ -30,13 +30,16 @@ Use this tool for:
The research subagent will write and execute Python scripts, capture output and charts, and return results.`,
schema: z.object({
name: z.string().describe('The name of the research script to create or update (e.g. "btc_ema_analysis"). Use the same name across calls to revise the same script rather than creating a new one.'),
instruction: z.string().describe('The research task or analysis to perform. Be specific about what data, indicators, timeframes, and output you want.'),
}),
func: async ({ instruction }: { instruction: string }): Promise<string> => {
logger.info({ instruction: instruction.substring(0, 100) }, 'Delegating to research subagent');
func: async ({ name, instruction }: { name: string; instruction: string }): Promise<string> => {
logger.info({ name, instruction: instruction.substring(0, 100) }, 'Delegating to research subagent');
const prompt = `Research script name: "${name}"\n\n${instruction}`;
try {
const result = await researchSubagent.executeWithImages(context, instruction);
const result = await researchSubagent.executeWithImages(context, prompt);
// Return in the format that AgentHarness.processToolResult() knows how to handle
// (extracts images and passes them to channelAdapter)

View File

@@ -11,12 +11,12 @@
* TradingView bar format (used by web frontend)
*/
export interface TradingViewBar {
time: number; // Unix timestamp in SECONDS
open: number;
high: number;
low: number;
close: number;
volume?: number;
time: number; // Unix timestamp in SECONDS
open: number | null; // null for gap bars (no trades that period)
high: number | null;
low: number | null;
close: number | null;
volume?: number | null;
// Optional extra columns from ohlc.proto
buy_vol?: number;
sell_vol?: number;
@@ -31,14 +31,14 @@ export interface TradingViewBar {
* Backend OHLC format (from Iceberg)
*/
export interface BackendOHLC {
timestamp: number; // Unix timestamp in MICROSECONDS
timestamp: bigint; // Unix timestamp in MICROSECONDS — kept as bigint to preserve precision
ticker: string;
period_seconds: number;
open: number;
high: number;
low: number;
close: number;
volume: number;
open: number | null; // null for gap bars (no trades that period)
high: number | null;
low: number | null;
close: number | null;
volume: number | null;
}
/**
@@ -171,7 +171,7 @@ export function backendToTradingView(backend: BackendOHLC): TradingViewBar {
high: backend.high,
low: backend.low,
close: backend.close,
volume: backend.volume,
volume: backend.volume ?? undefined,
};
}

View File

@@ -110,11 +110,33 @@ class SyncEntry {
return this.history.filter((entry) => entry.seq > sinceSeq);
}
/**
* Ensure intermediate objects exist for all patch add/replace operations.
* Called before applying patches to gracefully handle paths into previously-absent sub-objects.
*/
private ensureIntermediatePaths(doc: any, patch: JsonPatchOp[]): void {
for (const op of patch) {
if (op.op !== 'add' && op.op !== 'replace') continue;
const parts = op.path.split('/').filter(Boolean);
if (parts.length <= 1) continue;
let current = doc;
for (let i = 0; i < parts.length - 1; i++) {
const part = parts[i];
if (current[part] === undefined || current[part] === null) {
current[part] = {};
}
current = current[part];
}
}
}
/**
* Apply a patch to state (used when applying local changes).
*/
applyPatch(patch: JsonPatchOp[]): void {
const result = applyPatch(deepClone(this.state), patch, false, false);
const doc = deepClone(this.state) as any;
this.ensureIntermediatePaths(doc, patch);
const result = applyPatch(doc, patch, false, false);
this.state = result.newDocument;
}
@@ -130,7 +152,8 @@ class SyncEntry {
try {
if (clientBaseSeq === this.seq) {
// No conflict - apply directly
const currentState = deepClone(this.state);
const currentState = deepClone(this.state) as any;
this.ensureIntermediatePaths(currentState, patch);
const result = applyPatch(currentState, patch, false, false);
this.state = result.newDocument;
this.commitPatch(patch);
@@ -160,14 +183,15 @@ class SyncEntry {
const frontendPaths = new Set(patch.map((op) => op.path));
// Apply frontend patch first
const currentState = deepClone(this.state);
const currentState = deepClone(this.state) as any;
this.ensureIntermediatePaths(currentState, patch);
let newState: unknown;
try {
const result = applyPatch(currentState, patch, false, false);
newState = result.newDocument;
} catch (e) {
logger?.warn(
{ store: this.storeName, error: e },
{ store: this.storeName, err: e },
'Failed to apply client patch during conflict resolution'
);
return { needsSnapshot: true, resolvedState: this.state };
@@ -182,7 +206,7 @@ class SyncEntry {
newState = result.newDocument;
} catch (e) {
logger?.debug(
{ store: this.storeName, error: e },
{ store: this.storeName, err: e },
'Skipping backend patch during conflict resolution'
);
}
@@ -209,7 +233,7 @@ class SyncEntry {
return { needsSnapshot: true, resolvedState: this.state };
} catch (e) {
logger?.error(
{ store: this.storeName, error: e },
{ store: this.storeName, err: e },
'Unexpected error applying client patch'
);
return { needsSnapshot: true, resolvedState: this.state };

View File

@@ -237,7 +237,7 @@ export interface ChartState {
symbol: string;
start_time: number | null; // unix timestamp
end_time: number | null; // unix timestamp
period: string; // OHLC duration (e.g., '15' for 15 minutes)
period: number; // OHLC period in seconds (e.g., 900 for 15 minutes)
selected_shapes: string[]; // list of shape ID's
}

View File

@@ -113,11 +113,12 @@ export class CCXTFetcher {
'Fetching historical OHLC'
);
const allCandles = [];
const fetchedCandles = [];
let since = startMs;
// CCXT typically limits to 1000 candles per request
const batchSize = limit || 1000;
// Always page in fixed batches of 1000 regardless of any limit hint.
// The caller's limit/countback is irrelevant to how much we need to fetch from the exchange.
const PAGE_SIZE = 1000;
while (since < endMs) {
try {
@@ -125,27 +126,26 @@ export class CCXTFetcher {
symbol,
timeframe,
since,
batchSize
PAGE_SIZE
);
if (candles.length === 0) {
break;
}
// Filter candles within the time range
// Filter candles within the requested time range
const filteredCandles = candles.filter(c => {
const timestamp = c[0];
return timestamp >= startMs && timestamp <= endMs;
return timestamp >= startMs && timestamp < endMs; // endMs is exclusive
});
allCandles.push(...filteredCandles);
fetchedCandles.push(...filteredCandles);
// Move to next batch
// Advance to next batch start
const lastTimestamp = candles[candles.length - 1][0];
since = lastTimestamp + (periodSeconds * 1000);
// Break if we've reached the end time or limit
if (since >= endMs || (limit && allCandles.length >= limit)) {
if (since >= endMs) {
break;
}
@@ -163,8 +163,46 @@ export class CCXTFetcher {
// Get metadata for proper denomination
const metadata = await this.getMetadata(ticker);
// Convert to our OHLC format
return allCandles.map(candle => this.convertToOHLC(candle, ticker, periodSeconds, metadata));
// Build a map of fetched candles by timestamp (ms)
const fetchedByTs = new Map(fetchedCandles.map(c => [c[0], c]));
if (fetchedCandles.length === 0) {
// No data from exchange — return empty so caller writes a NOT_FOUND marker.
return [];
}
const periodMs = periodSeconds * 1000;
// Only create null gap bars for interior gaps — periods where real data exists
// on BOTH sides (i.e., between the first and last real bar). Do NOT append
// null bars before the first real bar or after the last real bar: those edge
// positions may be in-progress candles or simply outside the exchange's history,
// and we have no positive signal that a gap exists there.
const realTimestamps = [...fetchedByTs.keys()].sort((a, b) => a - b);
const firstRealTs = realTimestamps[0];
const lastRealTs = realTimestamps[realTimestamps.length - 1];
const allCandles = [];
let gapCount = 0;
for (let ts = firstRealTs; ts <= lastRealTs; ts += periodMs) {
if (fetchedByTs.has(ts)) {
allCandles.push(this.convertToOHLC(fetchedByTs.get(ts), ticker, periodSeconds, metadata));
} else {
// Interior gap — confirmed by real bars on both sides
gapCount++;
allCandles.push(this.createGapBar(ts, ticker, periodSeconds, metadata));
}
}
if (gapCount > 0) {
this.logger.info(
{ ticker, gapCount, total: allCandles.length },
'Filled interior gap bars for missing periods in source data'
);
}
return allCandles;
}
/**
@@ -227,6 +265,24 @@ export class CCXTFetcher {
};
}
/**
* Create a gap bar for a period with no trade data.
* All OHLC/volume fields are null — the timestamp slot is reserved but unpopulated.
*/
createGapBar(timestampMs, ticker, periodSeconds, metadata) {
return {
ticker,
timestamp: (timestampMs * 1000).toString(), // Convert ms to microseconds
open: null,
high: null,
low: null,
close: null,
volume: null,
open_time: (timestampMs * 1000).toString(),
close_time: ((timestampMs + periodSeconds * 1000) * 1000).toString()
};
}
/**
* Convert CCXT trade to our Tick format
* Uses denominators from market metadata for proper integer representation

View File

@@ -180,15 +180,20 @@ export class KafkaProducer {
status: metadata.status || 'OK',
errorMessage: metadata.error_message
},
rows: ohlcData.map(candle => ({
timestamp: candle.timestamp,
ticker: candle.ticker,
open: candle.open,
high: candle.high,
low: candle.low,
close: candle.close,
volume: candle.volume
}))
rows: ohlcData.map(candle => {
// null open/high/low/close signals a gap bar (no trades that period).
// Omit fields from the protobuf message when null so hasOpen() etc. return false.
const row = {
timestamp: candle.timestamp,
ticker: candle.ticker,
};
if (candle.open != null) row.open = candle.open;
if (candle.high != null) row.high = candle.high;
if (candle.low != null) row.low = candle.low;
if (candle.close != null) row.close = candle.close;
if (candle.volume != null) row.volume = candle.volume;
return row;
})
};
// Encode as protobuf OHLCBatch with ZMQ envelope

View File

@@ -9,11 +9,11 @@ message OHLC {
uint64 timestamp = 1;
// The prices and volumes must be adjusted by the rational denominator provided
// by the market metadata
int64 open = 2;
int64 high = 3;
int64 low = 4;
int64 close = 5;
// by the market metadata. Optional to support null bars for periods with no trades.
optional int64 open = 2;
optional int64 high = 3;
optional int64 low = 4;
optional int64 close = 5;
optional int64 volume = 6;
optional int64 buy_vol = 7;
optional int64 sell_vol = 8;

View File

@@ -10,7 +10,7 @@ from pyiceberg.expressions import (
And,
EqualTo,
GreaterThanOrEqual,
LessThanOrEqual
LessThan,
)
log = logging.getLogger(__name__)
@@ -98,7 +98,7 @@ class IcebergClient:
EqualTo("ticker", ticker),
EqualTo("period_seconds", period_seconds),
GreaterThanOrEqual("timestamp", start_time),
LessThanOrEqual("timestamp", end_time)
LessThan("timestamp", end_time) # end_time is exclusive
)
)
@@ -110,6 +110,10 @@ class IcebergClient:
if not df.empty:
df = df.sort_values("timestamp")
# Convert integer microsecond timestamps to DatetimeIndex
df.index = pd.to_datetime(df["timestamp"], unit="us", utc=True)
df.index.name = "datetime"
df = df.drop(columns=["timestamp"])
# Apply price/volume conversion if metadata client available
if self.metadata_client is not None:
df = self._apply_denominators(df, ticker)
@@ -186,9 +190,9 @@ class IcebergClient:
# Convert period to microseconds
period_micros = period_seconds * 1_000_000
# Generate expected timestamps
expected_timestamps = list(range(start_time, end_time + 1, period_micros))
actual_timestamps = set(df['timestamp'].values)
# Generate expected timestamps — end_time is exclusive
expected_timestamps = list(range(start_time, end_time, period_micros))
actual_timestamps = set(df.index.view('int64') // 1000)
# Find gaps
missing = sorted(set(expected_timestamps) - actual_timestamps)

View File

@@ -12,6 +12,8 @@ from .symbol_metadata_client import SymbolMetadataClient
log = logging.getLogger(__name__)
log = logging.getLogger(__name__)
class OHLCClient:
"""
@@ -118,6 +120,11 @@ class OHLCClient:
TimeoutError: If historical data request times out
ValueError: If request fails
"""
# Align times to period boundaries: [ceil(start), ceil(end)) exclusive
period_micros = period_seconds * 1_000_000
start_time = ((start_time + period_micros - 1) // period_micros) * period_micros
end_time = ((end_time + period_micros - 1) // period_micros) * period_micros # exclusive
# Step 1: Check Iceberg for existing data
df = self.iceberg.query_ohlc(ticker, period_seconds, start_time, end_time)
@@ -128,7 +135,7 @@ class OHLCClient:
if not missing_ranges:
# All data exists in Iceberg
return df
return self._forward_fill_gaps(df, period_seconds)
# Step 3: Request missing data for each range
# For simplicity, request entire range (relay can merge adjacent requests)
@@ -147,6 +154,39 @@ class OHLCClient:
# Step 5: Query Iceberg again for complete dataset
df = self.iceberg.query_ohlc(ticker, period_seconds, start_time, end_time)
return self._forward_fill_gaps(df, period_seconds)
def _forward_fill_gaps(self, df: pd.DataFrame, period_seconds: int) -> pd.DataFrame:
"""
Forward-fill interior missing bars by carrying the last known close into
open, high, low, and close of any gap bar.
Only interior gaps (rows already present with null OHLC from the ingestor,
or timestamp slots missing between real bars) are filled. Edge gaps (before
the first real bar or after the last real bar) are left as-is.
"""
if df.empty:
return df
df = df.sort_index()
# Identify rows that are gap bars (null close)
is_gap = df['close'].isna()
if not is_gap.any():
return df
# Forward-fill close across gap rows, then copy into open/high/low
df['close'] = df['close'].ffill()
price_cols = ['open', 'high', 'low']
for col in price_cols:
if col in df.columns:
df[col] = df[col].where(~is_gap, df['close'])
# Zero out volume for filled gap rows
if 'volume' in df.columns:
df['volume'] = df['volume'].where(~is_gap, 0.0)
return df
async def __aenter__(self):

View File

@@ -1,329 +0,0 @@
syntax = "proto3";
option java_multiple_files = true;
option java_package = "com.dexorder.proto";
// Request for data ingestion (used in Relay → Ingestor work queue)
message DataRequest {
// Unique request ID for tracking
string request_id = 1;
// Type of request
RequestType type = 2;
// Market identifier
string ticker = 3;
// For historical requests
optional HistoricalParams historical = 4;
// For realtime requests
optional RealtimeParams realtime = 5;
// Optional client ID for notification routing (async architecture)
// Flink uses this to determine notification topic
optional string client_id = 6;
enum RequestType {
HISTORICAL_OHLC = 0;
REALTIME_TICKS = 1;
}
}
message HistoricalParams {
// Start time (microseconds since epoch)
uint64 start_time = 1;
// End time (microseconds since epoch)
uint64 end_time = 2;
// OHLC period in seconds (e.g., 60 = 1m, 300 = 5m, 3600 = 1h, 86400 = 1d)
uint32 period_seconds = 3;
// Maximum number of candles to return (optional limit)
optional uint32 limit = 4;
}
message RealtimeParams {
// Whether to include tick data
bool include_ticks = 1;
// Whether to include aggregated OHLC
bool include_ohlc = 2;
// OHLC periods to generate in seconds (e.g., [60, 300, 900] for 1m, 5m, 15m)
repeated uint32 ohlc_period_seconds = 3;
}
// Control messages for ingestors (Flink → Ingestor control channel)
message IngestorControl {
// Control action type
ControlAction action = 1;
// Request ID to cancel (for CANCEL action)
optional string request_id = 2;
// Configuration updates (for CONFIG_UPDATE action)
optional IngestorConfig config = 3;
enum ControlAction {
CANCEL = 0; // Cancel a specific request
SHUTDOWN = 1; // Graceful shutdown signal
CONFIG_UPDATE = 2; // Update ingestor configuration
HEARTBEAT = 3; // Keep-alive signal
}
}
message IngestorConfig {
// Maximum concurrent requests per ingestor
optional uint32 max_concurrent = 1;
// Request timeout in seconds
optional uint32 timeout_seconds = 2;
// Kafka topic for output
optional string kafka_topic = 3;
}
// Historical data response from ingestor to Flink (Ingestor → Flink response channel)
message DataResponse {
// Request ID this is responding to
string request_id = 1;
// Status of the request
ResponseStatus status = 2;
// Error message if status is not OK
optional string error_message = 3;
// Serialized OHLC data (repeated OHLCV protobuf messages)
repeated bytes ohlc_data = 4;
// Total number of candles returned
uint32 total_records = 5;
enum ResponseStatus {
OK = 0;
NOT_FOUND = 1;
ERROR = 2;
}
}
// Client request submission for historical OHLC data (Client → Relay)
// Relay immediately responds with SubmitResponse containing request_id
message SubmitHistoricalRequest {
// Client-generated request ID for tracking
string request_id = 1;
// Market identifier (e.g., "BINANCE:BTC/USDT")
string ticker = 2;
// Start time (microseconds since epoch)
uint64 start_time = 3;
// End time (microseconds since epoch)
uint64 end_time = 4;
// OHLC period in seconds (e.g., 60 = 1m, 300 = 5m, 3600 = 1h)
uint32 period_seconds = 5;
// Optional limit on number of candles
optional uint32 limit = 6;
// Optional client ID for notification routing (e.g., "client-abc-123")
// Notifications will be published to topic: "RESPONSE:{client_id}"
optional string client_id = 7;
}
// Immediate response to SubmitHistoricalRequest (Relay → Client)
message SubmitResponse {
// Request ID (echoed from request)
string request_id = 1;
// Status of submission
SubmitStatus status = 2;
// Error message if status is not QUEUED
optional string error_message = 3;
// Topic to subscribe to for result notification
// e.g., "RESPONSE:client-abc-123" or "HISTORY_READY:{request_id}"
string notification_topic = 4;
enum SubmitStatus {
QUEUED = 0; // Request queued successfully
DUPLICATE = 1; // Request ID already exists
INVALID = 2; // Invalid parameters
ERROR = 3; // Internal error
}
}
// Historical data ready notification (Flink → Relay → Client via pub/sub)
// Published after Flink writes data to Iceberg
message HistoryReadyNotification {
// Request ID
string request_id = 1;
// Market identifier
string ticker = 2;
// OHLC period in seconds
uint32 period_seconds = 3;
// Start time (microseconds since epoch)
uint64 start_time = 4;
// End time (microseconds since epoch)
uint64 end_time = 5;
// Status of the data fetch
NotificationStatus status = 6;
// Error message if status is not OK
optional string error_message = 7;
// Iceberg table information for client queries
string iceberg_namespace = 10;
string iceberg_table = 11;
// Number of records written
uint32 row_count = 12;
// Timestamp when data was written (microseconds since epoch)
uint64 completed_at = 13;
enum NotificationStatus {
OK = 0; // Data successfully written to Iceberg
NOT_FOUND = 1; // No data found for the requested period
ERROR = 2; // Error during fetch or processing
TIMEOUT = 3; // Request timed out
}
}
// Legacy message for backward compatibility (Client → Relay)
message OHLCRequest {
// Request ID for tracking
string request_id = 1;
// Market identifier
string ticker = 2;
// Start time (microseconds since epoch)
uint64 start_time = 3;
// End time (microseconds since epoch)
uint64 end_time = 4;
// OHLC period in seconds (e.g., 60 = 1m, 300 = 5m, 3600 = 1h)
uint32 period_seconds = 5;
// Optional limit on number of candles
optional uint32 limit = 6;
}
// Generic response for any request (Flink → Client)
message Response {
// Request ID this is responding to
string request_id = 1;
// Status of the request
ResponseStatus status = 2;
// Error message if status is not OK
optional string error_message = 3;
// Generic payload data (serialized protobuf messages)
repeated bytes data = 4;
// Total number of records
optional uint32 total_records = 5;
// Whether this is the final response (for paginated results)
bool is_final = 6;
enum ResponseStatus {
OK = 0;
NOT_FOUND = 1;
ERROR = 2;
}
}
// CEP trigger registration (Client → Flink)
message CEPTriggerRequest {
// Unique trigger ID
string trigger_id = 1;
// Flink SQL CEP pattern/condition
string sql_pattern = 2;
// Markets to monitor
repeated string tickers = 3;
// Callback endpoint (for DEALER/ROUTER routing)
optional string callback_id = 4;
// Optional parameters for the CEP query
map<string, string> parameters = 5;
}
// CEP trigger acknowledgment (Flink → Client)
message CEPTriggerAck {
// Trigger ID being acknowledged
string trigger_id = 1;
// Status of registration
TriggerStatus status = 2;
// Error message if status is not OK
optional string error_message = 3;
enum TriggerStatus {
REGISTERED = 0;
ALREADY_REGISTERED = 1;
INVALID_SQL = 2;
ERROR = 3;
}
}
// CEP trigger event callback (Flink → Client)
message CEPTriggerEvent {
// Trigger ID that fired
string trigger_id = 1;
// Timestamp when trigger fired (microseconds since epoch)
uint64 timestamp = 2;
// Schema information for the result rows
ResultSchema schema = 3;
// Result rows from the Flink SQL query
repeated ResultRow rows = 4;
// Additional context from the CEP pattern
map<string, string> context = 5;
}
message ResultSchema {
// Column names in order
repeated string column_names = 1;
// Column types (using Flink SQL type names)
repeated string column_types = 2;
}
message ResultRow {
// Encoded row data (one bytes field per column, in schema order)
// Each value is encoded as a protobuf-serialized FieldValue
repeated bytes values = 1;
}
message FieldValue {
oneof value {
string string_val = 1;
int64 int_val = 2;
double double_val = 3;
bool bool_val = 4;
bytes bytes_val = 5;
uint64 timestamp_val = 6;
}
}

View File

@@ -1,22 +0,0 @@
syntax = "proto3";
option java_multiple_files = true;
option java_package = "com.dexorder.proto";
message Market {
// The prices and volumes must be adjusted by the rational denominator provided
// by the market metadata
string exchange_id = 2; // e.g., BINANCE
string market_id = 3; // e.g., BTC/USDT
string market_type = 4; // e.g., Spot
string description = 5; // e.g., Bitcoin/Tether on Binance
repeated string column_names = 6; // e.g., ['open', 'high', 'low', 'close', 'volume', 'taker_vol', 'maker_vol']
string base_asset = 9;
string quote_asset = 10;
uint64 earliest_time = 11;
uint64 tick_denom = 12; // denominator applied to all OHLC price data
uint64 base_denom = 13; // denominator applied to base asset units
uint64 quote_denom = 14; // denominator applied to quote asset units
repeated uint32 supported_period_seconds = 15;
}

View File

@@ -1,61 +0,0 @@
syntax = "proto3";
option java_multiple_files = true;
option java_package = "com.dexorder.proto";
// Single OHLC row
message OHLC {
// Timestamp in microseconds since epoch
uint64 timestamp = 1;
// The prices and volumes must be adjusted by the rational denominator provided
// by the market metadata
int64 open = 2;
int64 high = 3;
int64 low = 4;
int64 close = 5;
optional int64 volume = 6;
optional int64 buy_vol = 7;
optional int64 sell_vol = 8;
optional int64 open_time = 9;
optional int64 high_time = 10;
optional int64 low_time = 11;
optional int64 close_time = 12;
optional int64 open_interest = 13;
string ticker = 14;
}
// Batch of OHLC rows with metadata for historical request tracking
// Used for Kafka messages from ingestor → Flink
message OHLCBatch {
// Metadata for tracking this request through the pipeline
OHLCBatchMetadata metadata = 1;
// OHLC rows in this batch
repeated OHLC rows = 2;
}
// Metadata for tracking historical data requests through the pipeline
message OHLCBatchMetadata {
// Request ID from client
string request_id = 1;
// Optional client ID for notification routing
optional string client_id = 2;
// Market identifier
string ticker = 3;
// OHLC period in seconds
uint32 period_seconds = 4;
// Time range requested (microseconds since epoch)
uint64 start_time = 5;
uint64 end_time = 6;
// Status for marker messages (OK, NOT_FOUND, ERROR)
string status = 7;
// Error message if status is ERROR
optional string error_message = 8;
}

View File

@@ -1,51 +0,0 @@
syntax = "proto3";
option java_multiple_files = true;
option java_package = "com.dexorder.proto";
message Tick {
// Unique identifier for the trade
string trade_id = 1;
// Market identifier (matches Market.market_id)
string ticker = 2;
// Timestamp in microseconds since epoch
uint64 timestamp = 3;
// Price (must be adjusted by tick_denom from Market metadata)
int64 price = 4;
// Base asset amount (must be adjusted by base_denom from Market metadata)
int64 amount = 5;
// Quote asset amount (must be adjusted by quote_denom from Market metadata)
int64 quote_amount = 6;
// Side: true = taker buy (market buy), false = taker sell (market sell)
bool taker_buy = 7;
// Position effect: true = close position, false = open position
// Only relevant for derivatives/futures markets
optional bool to_close = 8;
// Sequence number for ordering (if provided by exchange)
optional uint64 sequence = 9;
// Additional flags for special trade types
optional TradeFlags flags = 10;
}
message TradeFlags {
// Liquidation trade
bool is_liquidation = 1;
// Block trade (large OTC trade)
bool is_block_trade = 2;
// Maker side was a post-only order
bool maker_post_only = 3;
// Trade occurred during auction
bool is_auction = 4;
}

View File

@@ -1,258 +0,0 @@
syntax = "proto3";
option java_multiple_files = true;
option java_package = "com.dexorder.proto";
// User container event system for delivering notifications to users
// via active sessions or external channels (Telegram, email, push).
//
// Two ZMQ patterns:
// - XPUB/SUB (port 5570): Fast path for informational events to active sessions
// - DEALER/ROUTER (port 5571): Guaranteed delivery for critical events with ack
//
// See doc/protocol.md and doc/user_container_events.md for details.
// =============================================================================
// User Event (Container → Gateway)
// Message Type ID: 0x20
// =============================================================================
message UserEvent {
// User ID this event belongs to
string user_id = 1;
// Unique event ID for deduplication and ack tracking (UUID)
string event_id = 2;
// Timestamp when event was generated (Unix milliseconds)
int64 timestamp = 3;
// Type of event
EventType event_type = 4;
// Event payload (JSON or nested protobuf, depending on event_type)
bytes payload = 5;
// Delivery specification (priority and channel preferences)
DeliverySpec delivery = 6;
}
enum EventType {
// Trading events
ORDER_PLACED = 0;
ORDER_FILLED = 1;
ORDER_CANCELLED = 2;
ORDER_REJECTED = 3;
ORDER_EXPIRED = 4;
// Alert events
ALERT_TRIGGERED = 10;
ALERT_CREATED = 11;
ALERT_DELETED = 12;
// Position events
POSITION_OPENED = 20;
POSITION_CLOSED = 21;
POSITION_UPDATED = 22;
POSITION_LIQUIDATED = 23;
// Workspace/chart events
WORKSPACE_CHANGED = 30;
CHART_ANNOTATION_ADDED = 31;
CHART_ANNOTATION_REMOVED = 32;
INDICATOR_UPDATED = 33;
// Strategy events
STRATEGY_STARTED = 40;
STRATEGY_STOPPED = 41;
STRATEGY_LOG = 42;
STRATEGY_ERROR = 43;
BACKTEST_COMPLETED = 44;
// System events
CONTAINER_STARTING = 50;
CONTAINER_READY = 51;
CONTAINER_SHUTTING_DOWN = 52;
EVENT_ERROR = 53;
}
// =============================================================================
// Delivery Specification
// =============================================================================
message DeliverySpec {
// Priority determines routing behavior
Priority priority = 1;
// Ordered list of channel preferences (try first, then second, etc.)
repeated ChannelPreference channels = 2;
}
enum Priority {
// Drop if no active session (fire-and-forget via XPUB)
// Use for: indicator updates, chart syncs, strategy logs when watching
INFORMATIONAL = 0;
// Best effort delivery - queue briefly, deliver when possible
// Uses XPUB if subscribed, otherwise DEALER
// Use for: alerts, position updates
NORMAL = 1;
// Must deliver - retry until acked, escalate channels
// Always uses DEALER for guaranteed delivery
// Use for: order fills, liquidations, critical errors
CRITICAL = 2;
}
message ChannelPreference {
// Channel to deliver to
ChannelType channel = 1;
// If true, skip this channel if user is not connected to it
// If false, deliver even if user is not actively connected
// (e.g., send Telegram message even if user isn't in Telegram chat)
bool only_if_active = 2;
}
enum ChannelType {
// Whatever channel the user currently has open (WebSocket, Telegram session)
ACTIVE_SESSION = 0;
// Specific channels
WEB = 1; // WebSocket to web UI
TELEGRAM = 2; // Telegram bot message
EMAIL = 3; // Email notification
PUSH = 4; // Mobile push notification (iOS/Android)
DISCORD = 5; // Discord webhook (future)
SLACK = 6; // Slack webhook (future)
}
// =============================================================================
// Event Acknowledgment (Gateway → Container)
// Message Type ID: 0x21
// =============================================================================
message EventAck {
// Event ID being acknowledged
string event_id = 1;
// Delivery status
AckStatus status = 2;
// Error message if status is ERROR
string error_message = 3;
// Channel that successfully delivered (for logging/debugging)
ChannelType delivered_via = 4;
}
enum AckStatus {
// Successfully delivered to at least one channel
DELIVERED = 0;
// Accepted and queued for delivery (e.g., rate limited, will retry)
QUEUED = 1;
// Permanent failure - all channels failed
ACK_ERROR = 2;
}
// =============================================================================
// Event Payloads
// These are JSON-encoded in the UserEvent.payload field.
// Defined here for documentation; actual encoding is JSON for flexibility.
// =============================================================================
// Payload for ORDER_PLACED, ORDER_FILLED, ORDER_CANCELLED, etc.
message OrderEventPayload {
string order_id = 1;
string symbol = 2;
string side = 3; // "buy" or "sell"
string order_type = 4; // "market", "limit", "stop_limit", etc.
string quantity = 5; // Decimal string
string price = 6; // Decimal string (for limit orders)
string fill_price = 7; // Decimal string (for fills)
string fill_quantity = 8; // Decimal string (for partial fills)
string status = 9; // "open", "filled", "cancelled", etc.
string exchange = 10;
int64 timestamp = 11; // Unix milliseconds
string strategy_id = 12; // If order was placed by a strategy
string error_message = 13; // If rejected/failed
}
// Payload for ALERT_TRIGGERED
message AlertEventPayload {
string alert_id = 1;
string symbol = 2;
string condition = 3; // Human-readable condition (e.g., "BTC > 50000")
string triggered_price = 4; // Decimal string
int64 timestamp = 5;
}
// Payload for POSITION_OPENED, POSITION_CLOSED, POSITION_UPDATED
message PositionEventPayload {
string position_id = 1;
string symbol = 2;
string side = 3; // "long" or "short"
string size = 4; // Decimal string
string entry_price = 5; // Decimal string
string current_price = 6; // Decimal string
string unrealized_pnl = 7; // Decimal string
string realized_pnl = 8; // Decimal string (for closed positions)
string leverage = 9; // Decimal string (for margin)
string liquidation_price = 10;
string exchange = 11;
int64 timestamp = 12;
}
// Payload for WORKSPACE_CHANGED, CHART_ANNOTATION_*, INDICATOR_UPDATED
message WorkspaceEventPayload {
string workspace_id = 1;
string change_type = 2; // "symbol_changed", "timeframe_changed", "annotation_added", etc.
string symbol = 3;
string timeframe = 4;
// For annotations
string annotation_id = 5;
string annotation_type = 6; // "trendline", "horizontal", "rectangle", "text", etc.
string annotation_data = 7; // JSON string with coordinates, style, etc.
// For indicators
string indicator_name = 8;
string indicator_params = 9; // JSON string with indicator parameters
int64 timestamp = 10;
}
// Payload for STRATEGY_LOG, STRATEGY_ERROR
message StrategyEventPayload {
string strategy_id = 1;
string strategy_name = 2;
string log_level = 3; // "debug", "info", "warn", "error"
string message = 4;
string details = 5; // JSON string with additional context
int64 timestamp = 6;
}
// Payload for BACKTEST_COMPLETED
message BacktestEventPayload {
string backtest_id = 1;
string strategy_id = 2;
string strategy_name = 3;
string symbol = 4;
string timeframe = 5;
int64 start_time = 6;
int64 end_time = 7;
// Results summary
int32 total_trades = 8;
int32 winning_trades = 9;
int32 losing_trades = 10;
string total_pnl = 11; // Decimal string
string win_rate = 12; // Decimal string (0-1)
string sharpe_ratio = 13; // Decimal string
string max_drawdown = 14; // Decimal string (0-1)
string results_path = 15; // Path to full results file
int64 completed_at = 16;
}

View File

@@ -1,21 +1,21 @@
/* web/src/assets/theme.css */
:root {
--p-primary-color: #00d4aa; /* teal accent */
--p-primary-contrast-color: #0a0e1a;
--p-surface-0: #0a0e1a; /* deepest background */
--p-surface-50: #0f1629;
--p-surface-100: #161e35;
--p-surface-200: #1e2a45;
--p-surface-300: #263452;
--p-surface-400: #34446a;
--p-surface-700: #8892a4;
--p-surface-800: #aab4c5;
--p-surface-900: #cdd6e8;
--p-primary-color: #26A69A; /* TV green */
--p-primary-contrast-color: #131722;
--p-surface-0: #131722; /* TV bg */
--p-surface-50: #1a1e2b;
--p-surface-100: #1e222d;
--p-surface-200: #2A2E39; /* TV grid */
--p-surface-300: #363b4a;
--p-surface-400: #434857;
--p-surface-700: #787B86; /* TV subtext */
--p-surface-800: #B2B5BE;
--p-surface-900: #D3D4DC; /* TV text */
/* Semantic trading colors */
--color-bull: #26a69a;
--color-bear: #ef5350;
--color-neutral: #8892a4;
--color-bull: #26A69A;
--color-bear: #EF5350;
--color-neutral: #787B86;
}
html, body, #app {

View File

@@ -6,6 +6,14 @@ import { useTradingViewShapes } from '../composables/useTradingViewShapes'
import { useTradingViewIndicators } from '../composables/useTradingViewIndicators'
import { useChartStore } from '../stores/chart'
import type { IChartingLibraryWidget } from '../types/tradingview'
import { intervalToSeconds } from '../utils'
// Convert seconds to TradingView interval string
function secondsToInterval(seconds: number): string {
if (seconds % 86400 === 0) return `${seconds / 86400}D`
if (seconds % 3600 === 0) return `${seconds / 3600}H`
return `${seconds / 60}` // plain number = minutes
}
const chartContainer = ref<HTMLDivElement | null>(null)
const chartStore = useChartStore()
@@ -31,7 +39,7 @@ onMounted(() => {
tvWidget = new window.TradingView.widget({
symbol: chartStore.symbol, // Use symbol from store
datafeed: datafeed,
interval: chartStore.period as any,
interval: secondsToInterval(chartStore.period) as any,
container: chartContainer.value!,
library_path: '/charting_library/',
locale: 'en',
@@ -190,9 +198,10 @@ function setupChartListeners() {
// Listen for period changes
chart.onIntervalChanged().subscribe(null, (interval: string) => {
console.log('[ChartView] Period changed to:', interval)
const seconds = intervalToSeconds(interval)
console.log('[ChartView] Period changed to:', interval, `(${seconds}s)`)
isUpdatingFromChart = true
chartStore.period = interval
chartStore.period = seconds
isUpdatingFromChart = false
})
@@ -244,10 +253,11 @@ function setupStoreWatchers() {
(newPeriod) => {
if (isUpdatingFromChart) return
console.log('[ChartView] Store period changed externally to:', newPeriod)
if (chart.resolution() !== newPeriod) {
chart.setResolution(newPeriod, () => {
console.log('[ChartView] Chart period updated to:', newPeriod)
const tvInterval = secondsToInterval(newPeriod)
console.log('[ChartView] Store period changed externally to:', newPeriod, `-> ${tvInterval}`)
if (chart.resolution() !== tvInterval) {
chart.setResolution(tvInterval, () => {
console.log('[ChartView] Chart period updated to:', tvInterval)
})
}
}

View File

@@ -36,10 +36,35 @@ const rooms = computed(() => [{
// Streaming state
let currentStreamingMessageId: string | null = null
let toolCallMessageId: string | null = null
let lastSentMessageId: string | null = null
let streamingBuffer = ''
const isAgentProcessing = ref(false)
const toolCallStatus = ref<string | null>(null)
const addToolCallBubble = (label: string) => {
removeToolCallBubble()
toolCallMessageId = `tool-call-${Date.now()}`
const timestamp = new Date().toTimeString().split(' ')[0].slice(0, 5)
messages.value = [...messages.value, {
_id: toolCallMessageId,
content: `${label}`,
senderId: AGENT_ID,
timestamp,
date: new Date().toLocaleDateString(),
saved: false,
distributed: false,
seen: false,
files: [],
toolCall: true
}]
}
const removeToolCallBubble = () => {
if (toolCallMessageId) {
messages.value = messages.value.filter(m => m._id !== toolCallMessageId)
toolCallMessageId = null
}
}
// Generate message ID
const generateMessageId = () => `msg-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`
@@ -52,7 +77,7 @@ const handleMessage = (data: WebSocketMessage) => {
console.log('[ChatPanel] Received message:', data)
if (data.type === 'agent_tool_call') {
toolCallStatus.value = data.label ?? data.toolName ?? null
addToolCallBubble(data.label ?? data.toolName ?? 'Tool call...')
return
}
@@ -99,12 +124,13 @@ const handleMessage = (data: WebSocketMessage) => {
if (!currentStreamingMessageId) {
console.log('[ChatPanel] Starting new streaming message')
// Remove any ephemeral tool-call bubble before starting the real response
removeToolCallBubble()
// Set up streaming state and mark user message as seen
isAgentProcessing.value = true
currentStreamingMessageId = generateMessageId()
streamingBuffer = data.content
streamingImages.value = []
toolCallStatus.value = null
// Mark the last sent user message as seen (double-checkmark)
if (lastSentMessageId) {
@@ -205,7 +231,7 @@ const handleMessage = (data: WebSocketMessage) => {
streamingBuffer = ''
streamingImages.value = []
isAgentProcessing.value = false
toolCallStatus.value = null
removeToolCallBubble()
}
}
}
@@ -221,7 +247,7 @@ const stopAgent = () => {
}
wsManager.send(wsMessage)
isAgentProcessing.value = false
toolCallStatus.value = null
removeToolCallBubble()
lastSentMessageId = null
}
@@ -336,34 +362,137 @@ const chatTheme = 'dark'
// Styles to match TradingView dark theme
const chatStyles = computed(() => JSON.stringify({
general: {
color: '#d1d4dc',
colorSpinner: '#2962ff',
borderStyle: '1px solid #2a2e39'
color: '#D3D4DC',
colorButtonClear: '#D3D4DC',
colorButton: '#131722',
backgroundColorButton: '#26A69A',
backgroundInput: '#131722',
colorPlaceholder: '#787B86',
colorCaret: '#D3D4DC',
colorSpinner: '#26A69A',
borderStyle: '1px solid #2A2E39',
backgroundScrollIcon: '#2A2E39'
},
container: {
background: '#131722'
border: 'none',
borderRadius: '0',
boxShadow: 'none'
},
header: {
background: '#1e222d',
colorRoomName: '#d1d4dc',
colorRoomInfo: '#787b86'
background: '#2A2E39',
colorRoomName: '#D3D4DC',
colorRoomInfo: '#787B86',
position: 'absolute',
width: '100%'
},
footer: {
background: '#1e222d',
borderStyleInput: '1px solid #2a2e39',
backgroundInput: '#1e222d',
colorInput: '#d1d4dc',
colorPlaceholder: '#787b86',
colorIcons: '#787b86'
background: '#2A2E39',
borderStyleInput: '1px solid #2A2E39',
borderInputSelected: '#26A69A',
backgroundReply: '#2A2E39',
backgroundTagActive: '#2A2E39',
backgroundTag: '#1E222D'
},
content: {
background: '#131722'
},
sidemenu: {
background: '#131722',
backgroundHover: '#1E222D',
backgroundActive: '#2A2E39',
colorActive: '#D3D4DC',
borderColorSearch: '#2A2E39'
},
dropdown: {
background: '#2A2E39',
backgroundHover: '#363B4A'
},
message: {
background: '#1e222d',
backgroundMe: '#2962ff',
color: '#d1d4dc',
colorMe: '#ffffff'
background: '#1E222D',
backgroundMe: '#26A69A',
color: '#D3D4DC',
colorStarted: '#787B86',
backgroundDeleted: '#131722',
backgroundSelected: '#2A2E39',
colorDeleted: '#787B86',
colorUsername: '#787B86',
colorTimestamp: '#787B86',
backgroundDate: 'rgba(0, 0, 0, 0.3)',
colorDate: '#787B86',
backgroundSystem: 'rgba(0, 0, 0, 0.3)',
colorSystem: '#787B86',
backgroundMedia: 'rgba(0, 0, 0, 0.18)',
backgroundReply: 'rgba(0, 0, 0, 0.18)',
colorReplyUsername: '#D3D4DC',
colorReply: '#B2B5BE',
colorTag: '#26A69A',
backgroundImage: '#2A2E39',
colorNewMessages: '#26A69A',
backgroundScrollCounter: '#26A69A',
colorScrollCounter: '#131722',
backgroundReaction: 'none',
borderStyleReaction: 'none',
backgroundReactionHover: '#2A2E39',
borderStyleReactionHover: 'none',
colorReactionCounter: '#D3D4DC',
backgroundReactionMe: '#26A69A',
borderStyleReactionMe: 'none',
backgroundReactionHoverMe: '#26A69A',
borderStyleReactionHoverMe: 'none',
colorReactionCounterMe: '#131722',
backgroundAudioRecord: '#EF5350',
backgroundAudioLine: 'rgba(255, 255, 255, 0.15)',
backgroundAudioProgress: '#26A69A',
backgroundAudioProgressSelector: '#26A69A',
colorFileExtension: '#787B86'
},
markdown: {
background: 'rgba(42, 46, 57, 0.8)',
border: 'rgba(55, 60, 74, 0.9)',
color: '#26A69A',
colorMulti: '#D3D4DC'
},
room: {
colorUsername: '#D3D4DC',
colorMessage: '#787B86',
colorTimestamp: '#787B86',
colorStateOnline: '#26A69A',
colorStateOffline: '#787B86',
backgroundCounterBadge: '#26A69A',
colorCounterBadge: '#131722'
},
emoji: {
background: '#2A2E39'
},
icons: {
search: '#787B86',
add: '#D3D4DC',
toggle: '#D3D4DC',
menu: '#D3D4DC',
close: '#787B86',
closeImage: '#D3D4DC',
file: '#26A69A',
paperclip: '#787B86',
closeOutline: '#D3D4DC',
closePreview: '#D3D4DC',
send: '#26A69A',
sendDisabled: '#787B86',
emoji: '#787B86',
emojiReaction: '#787B86',
document: '#26A69A',
pencil: '#787B86',
checkmark: '#787B86',
checkmarkSeen: '#26A69A',
eye: '#D3D4DC',
dropdownMessage: '#D3D4DC',
dropdownMessageBackground: 'rgba(0, 0, 0, 0.3)',
dropdownRoom: '#D3D4DC',
dropdownScroll: '#2A2E39',
microphone: '#787B86',
audioPlay: '#26A69A',
audioPause: '#26A69A',
audioCancel: '#EF5350',
audioConfirm: '#26A69A'
}
}))
@@ -429,7 +558,6 @@ onUnmounted(() => {
<!-- Stop button overlay -->
<div v-if="isAgentProcessing" class="stop-button-container">
<div v-if="toolCallStatus" class="tool-call-status">{{ toolCallStatus }}</div>
<Button
icon="pi pi-stop-circle"
label="Stop"
@@ -459,12 +587,12 @@ onUnmounted(() => {
justify-content: center;
gap: 1rem;
background: #131722;
color: #787b86;
color: #787B86;
}
.workspace-loading-spinner {
font-size: 2rem;
color: #787b86;
color: #787B86;
}
.workspace-loading-message {
@@ -485,6 +613,7 @@ onUnmounted(() => {
max-width: 80% !important;
}
.chat-header-custom {
display: flex;
justify-content: space-between;
@@ -504,21 +633,10 @@ onUnmounted(() => {
.stop-button-container {
position: absolute;
bottom: 80px;
right: 20px;
left: 20px;
z-index: 1000;
}
.tool-call-status {
background: rgba(30, 34, 45, 0.92);
color: #787b86;
font-size: 0.75rem;
padding: 4px 10px;
border-radius: 4px;
margin-bottom: 6px;
text-align: center;
border: 1px solid #2a2e39;
}
.stop-button {
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
animation: pulse 2s infinite;

View File

@@ -9,6 +9,7 @@
*/
import { wsManager, type MessageHandler } from './useWebSocket'
import { intervalToSeconds } from '../utils'
import type {
IBasicDataFeed,
DatafeedConfiguration,
@@ -241,14 +242,39 @@ export class WebSocketDatafeed implements IBasicDataFeed {
console.log('[TradingView Datafeed] Raw bar sample:', response.history.bars?.[0])
console.log('[TradingView Datafeed] Denominators:', denoms)
const bars: Bar[] = (response.history.bars || []).map((bar: any) => ({
time: bar.time * 1000, // Convert to milliseconds
open: parseFloat(bar.open) / denoms.tick,
high: parseFloat(bar.high) / denoms.tick,
low: parseFloat(bar.low) / denoms.tick,
close: parseFloat(bar.close) / denoms.tick,
volume: parseFloat(bar.volume) / denoms.base
}))
const rawBars: any[] = response.history.bars || []
// Parse bars, preserving null OHLC for gap bars (no trades that period)
const parsedBars: Bar[] = rawBars.map((bar: any) => {
if (bar.open === null || bar.close === null) {
return { time: bar.time * 1000, open: null, high: null, low: null, close: null }
}
return {
time: bar.time * 1000,
open: parseFloat(bar.open) / denoms.tick,
high: parseFloat(bar.high) / denoms.tick,
low: parseFloat(bar.low) / denoms.tick,
close: parseFloat(bar.close) / denoms.tick,
volume: parseFloat(bar.volume) / denoms.base
}
})
parsedBars.sort((a, b) => a.time - b.time)
// Fill any gaps between returned bars with null bars so TradingView
// receives a contiguous array of the correct length.
const periodMs = intervalToSeconds(resolution) * 1000
const bars: Bar[] = []
for (let i = 0; i < parsedBars.length; i++) {
if (i > 0) {
const prev = parsedBars[i - 1].time
const curr = parsedBars[i].time
for (let t = prev + periodMs; t < curr; t += periodMs) {
bars.push({ time: t, open: null, high: null, low: null, close: null })
}
}
bars.push(parsedBars[i])
}
console.log('[TradingView Datafeed] Scaled bar sample:', bars[0])

View File

@@ -5,180 +5,146 @@ import { useChartStore } from '../stores/chart'
import type { IndicatorInstance } from '../stores/indicators'
/**
* Mapping between TA-Lib indicator names and TradingView indicator names
* Only includes indicators that are present in BOTH systems (inner join)
* Mapping between pandas-ta indicator function names and TradingView indicator display names.
* Only includes indicators present in BOTH systems (inner join).
* Keys are lowercase pandas-ta function names; values are TradingView display strings.
*/
const TALIB_TO_TV_NAMES: Record<string, string> = {
// Overlap Studies (14)
'SMA': 'Moving Average',
'EMA': 'Moving Average Exponential',
'WMA': 'Weighted Moving Average',
'DEMA': 'DEMA',
'TEMA': 'TEMA',
'TRIMA': 'Triangular Moving Average',
'KAMA': 'KAMA',
'MAMA': 'MESA Adaptive Moving Average',
'T3': 'T3',
'BBANDS': 'Bollinger Bands',
'MIDPOINT': 'Midpoint',
'MIDPRICE': 'Midprice',
'SAR': 'Parabolic SAR',
'HT_TRENDLINE': 'Hilbert Transform - Instantaneous Trendline',
const PANDAS_TA_TO_TV_NAMES: Record<string, string> = {
// Overlap / Moving Averages (17)
'sma': 'Moving Average',
'ema': 'Moving Average Exponential',
'wma': 'Moving Average Weighted',
'dema': 'Double EMA',
'tema': 'Triple EMA',
'trima': 'Triangular Moving Average',
'kama': 'Moving Average Adaptive',
't3': 'T3',
'hma': 'Hull Moving Average',
'alma': 'Arnaud Legoux Moving Average',
'midpoint': 'Midpoint',
'midprice': 'Midprice',
'supertrend': 'SuperTrend',
'ichimoku': 'Ichimoku Cloud',
'vwap': 'VWAP',
'vwma': 'VWMA',
'bbands': 'Bollinger Bands',
// Momentum Indicators (21)
'RSI': 'Relative Strength Index',
'MOM': 'Momentum',
'ROC': 'Rate of Change',
'TRIX': 'TRIX',
'CMO': 'Chande Momentum Oscillator',
'DX': 'Directional Movement Index',
'ADX': 'Average Directional Movement Index',
'ADXR': 'Average Directional Movement Index Rating',
'APO': 'Absolute Price Oscillator',
'PPO': 'Percentage Price Oscillator',
'MACD': 'MACD',
'MFI': 'Money Flow Index',
'STOCH': 'Stochastic',
'STOCHF': 'Stochastic Fast',
'STOCHRSI': 'Stochastic RSI',
'WILLR': 'Williams %R',
'CCI': 'Commodity Channel Index',
'AROON': 'Aroon',
'AROONOSC': 'Aroon Oscillator',
'BOP': 'Balance Of Power',
'ULTOSC': 'Ultimate Oscillator',
// Momentum (22)
'rsi': 'Relative Strength Index',
'macd': 'MACD',
'stoch': 'Stochastic',
'stochrsi':'Stochastic RSI',
'cci': 'Commodity Channel Index',
'willr': 'Williams %R',
'mom': 'Momentum',
'roc': 'Rate Of Change',
'trix': 'TRIX',
'cmo': 'Chande Momentum Oscillator',
'adx': 'Average Directional Index',
'aroon': 'Aroon',
'ao': 'Awesome Oscillator',
'bop': 'Balance of Power',
'uo': 'Ultimate Oscillator',
'apo': 'Price Oscillator',
'mfi': 'Money Flow Index',
'coppock': 'Coppock Curve',
'dpo': 'Detrended Price Oscillator',
'fisher': 'Fisher Transform',
'rvgi': 'Relative Vigor Index',
'kst': 'Know Sure Thing',
// Volume Indicators (3)
'AD': 'Chaikin A/D Line',
'ADOSC': 'Chaikin A/D Oscillator',
'OBV': 'On Balance Volume',
// Volatility (3)
'atr': 'Average True Range',
'kc': 'Keltner Channels',
'donchian': 'Donchian Channels',
// Volatility Indicators (3)
'ATR': 'Average True Range',
'NATR': 'Normalized Average True Range',
'TRANGE': 'True Range',
// Volume (8)
'obv': 'On Balance Volume',
'ad': 'Accumulation/Distribution',
'adosc': 'Chaikin Oscillator',
'cmf': 'Chaikin Money Flow',
'eom': 'Ease of Movement',
'efi': "Elder's Force Index",
'kvo': 'Klinger Oscillator',
'pvt': 'Price Volume Trend',
// Price Transform (4)
'AVGPRICE': 'Average Price',
'MEDPRICE': 'Median Price',
'TYPPRICE': 'Typical Price',
'WCLPRICE': 'Weighted Close Price',
// Statistics / Price Transforms (6)
'stdev': 'Standard Deviation',
'linreg': 'Linear Regression Curve',
'slope': 'Linear Regression Slope',
'hl2': 'Median Price',
'hlc3': 'Typical Price',
'ohlc4': 'Average Price',
// Cycle Indicators (5)
'HT_DCPERIOD': 'Hilbert Transform - Dominant Cycle Period',
'HT_DCPHASE': 'Hilbert Transform - Dominant Cycle Phase',
'HT_PHASOR': 'Hilbert Transform - Phasor Components',
'HT_SINE': 'Hilbert Transform - SineWave',
'HT_TRENDMODE': 'Hilbert Transform - Trend vs Cycle Mode',
// Statistic Functions (9)
'BETA': 'Beta',
'CORREL': 'Pearson\'s Correlation Coefficient',
'LINEARREG': 'Linear Regression',
'LINEARREG_ANGLE': 'Linear Regression Angle',
'LINEARREG_INTERCEPT': 'Linear Regression Intercept',
'LINEARREG_SLOPE': 'Linear Regression Slope',
'STDDEV': 'Standard Deviation',
'TSF': 'Time Series Forecast',
'VAR': 'Variance',
// Trend (3)
'psar': 'Parabolic SAR',
'vortex': 'Vortex Indicator',
'chop': 'Choppiness Index',
}
// Total: 60 TA-Lib indicators
// Total: 59 indicators
/**
* Custom indicators (implemented in backend, not in TA-Lib)
* Reverse mapping from TradingView display name to pandas-ta function name
*/
const CUSTOM_TO_TV_NAMES: Record<string, string> = {
'VWAP': 'VWAP',
'VWMA': 'VWMA',
'HMA': 'Hull Moving Average',
'SUPERTREND': 'SuperTrend',
'DONCHIAN': 'Donchian Channels',
'KELTNER': 'Keltner Channels',
'CMF': 'Chaikin Money Flow',
'VORTEX': 'Vortex Indicator',
'AO': 'Awesome Oscillator',
'AC': 'Accelerator Oscillator',
'CHOP': 'Choppiness Index',
'MASS': 'Mass Index',
}
// Combined mapping (TA-Lib + Custom)
const ALL_BACKEND_TO_TV_NAMES: Record<string, string> = {
...TALIB_TO_TV_NAMES,
...CUSTOM_TO_TV_NAMES
}
// Total: 72 indicators (60 TA-Lib + 12 Custom)
/**
* Reverse mapping from TradingView to backend
*/
const TV_TO_TALIB_NAMES: Record<string, string> = Object.fromEntries(
Object.entries(TALIB_TO_TV_NAMES).map(([k, v]) => [v, k])
)
const TV_TO_CUSTOM_NAMES: Record<string, string> = Object.fromEntries(
Object.entries(CUSTOM_TO_TV_NAMES).map(([k, v]) => [v, k])
)
const TV_TO_BACKEND_NAMES: Record<string, string> = Object.fromEntries(
Object.entries(ALL_BACKEND_TO_TV_NAMES).map(([k, v]) => [v, k])
const TV_TO_PANDAS_TA_NAMES: Record<string, string> = Object.fromEntries(
Object.entries(PANDAS_TA_TO_TV_NAMES).map(([k, v]) => [v, k])
)
/**
* Convert TA-Lib parameters to TradingView inputs
* Convert pandas-ta parameters to TradingView inputs
*/
function convertTALibParamsToTVInputs(talibName: string, talibParams: Record<string, any>): Record<string, any> {
function convertPandasTaParamsToTVInputs(pandasTaName: string, params: Record<string, any>): Record<string, any> {
const tvInputs: Record<string, any> = {}
// Common parameter mappings
const paramMapping: Record<string, string> = {
'timeperiod': 'length',
'fastperiod': 'fastLength',
'slowperiod': 'slowLength',
'signalperiod': 'signalLength',
'nbdevup': 'mult',
'nbdevdn': 'mult',
'fastlimit': 'fastLimit',
'slowlimit': 'slowLimit',
'acceleration': 'start',
'maximum': 'increment',
'fastk_period': 'kPeriod',
'slowk_period': 'kPeriod',
'slowd_period': 'dPeriod',
'fastd_period': 'dPeriod',
}
// Special handling for specific indicators
if (talibName === 'BBANDS') {
tvInputs.length = talibParams.timeperiod || 20
tvInputs.mult = talibParams.nbdevup || 2
if (pandasTaName === 'bbands') {
tvInputs.length = params.length || 20
tvInputs.mult = params.upper_std ?? params.std ?? 2
tvInputs.source = 'close'
} else if (talibName === 'MACD') {
tvInputs.fastLength = talibParams.fastperiod || 12
tvInputs.slowLength = talibParams.slowperiod || 26
tvInputs.signalLength = talibParams.signalperiod || 9
} else if (pandasTaName === 'macd') {
tvInputs.fastLength = params.fast || 12
tvInputs.slowLength = params.slow || 26
tvInputs.signalLength = params.signal || 9
tvInputs.source = 'close'
} else if (talibName === 'RSI') {
tvInputs.length = talibParams.timeperiod || 14
} else if (pandasTaName === 'stoch') {
tvInputs.kPeriod = params.k || 14
tvInputs.dPeriod = params.d || 3
tvInputs.smoothK = params.smooth_k || 3
} else if (pandasTaName === 'stochrsi') {
tvInputs.length = params.length || 14
tvInputs.rsiLength = params.rsi_length || 14
tvInputs.kPeriod = params.k || 3
tvInputs.dPeriod = params.d || 3
} else if (pandasTaName === 'psar') {
tvInputs.start = params.af0 || 0.02
tvInputs.increment = params.af || 0.02
tvInputs.max = params.max_af || 0.2
} else if (pandasTaName === 'ichimoku') {
tvInputs.conversionPeriod = params.tenkan || 9
tvInputs.basePeriod = params.kijun || 26
tvInputs.laggingSpanPeriod = params.senkou || 52
} else if (pandasTaName === 'vwap') {
tvInputs.anchor = params.anchor || 'D'
} else if (['apo', 'ppo'].includes(pandasTaName)) {
tvInputs.fastLength = params.fast || 12
tvInputs.slowLength = params.slow || 26
tvInputs.source = 'close'
} else if (['SMA', 'EMA', 'WMA', 'DEMA', 'TEMA', 'TRIMA'].includes(talibName)) {
tvInputs.length = talibParams.timeperiod || 14
tvInputs.source = 'close'
} else if (talibName === 'STOCH') {
tvInputs.kPeriod = talibParams.fastk_period || 14
tvInputs.dPeriod = talibParams.slowd_period || 3
tvInputs.smoothK = talibParams.slowk_period || 3
} else if (talibName === 'ATR') {
tvInputs.length = talibParams.timeperiod || 14
} else if (talibName === 'CCI') {
tvInputs.length = talibParams.timeperiod || 20
} else if (pandasTaName === 'uo') {
tvInputs.fast = params.fast || 7
tvInputs.medium = params.medium || 14
tvInputs.slow = params.slow || 28
} else if (pandasTaName === 'adosc') {
tvInputs.fastLength = params.fast || 3
tvInputs.slowLength = params.slow || 10
} else if (pandasTaName === 'kc') {
tvInputs.length = params.length || 20
tvInputs.mult = params.scalar || 2
} else {
// Generic parameter conversion
for (const [talibParam, value] of Object.entries(talibParams)) {
const tvParam = paramMapping[talibParam] || talibParam
tvInputs[tvParam] = value
// Generic: pass length through; skip source
if (params.length !== undefined) tvInputs.length = params.length
for (const [k, v] of Object.entries(params)) {
if (k === 'length' || k === 'source') continue
tvInputs[k] = v
}
}
@@ -186,68 +152,72 @@ function convertTALibParamsToTVInputs(talibName: string, talibParams: Record<str
}
/**
* Convert TradingView inputs to TA-Lib parameters
* Convert TradingView inputs to pandas-ta parameters
*/
function convertTVInputsToTALibParams(tvName: string, tvInputs: Record<string, any>): { talibName: string | null, talibParams: Record<string, any> } {
const talibName = TV_TO_BACKEND_NAMES[tvName] || null
if (!talibName) {
console.warn('[Indicators] No backend mapping for TradingView indicator:', tvName)
return { talibName: null, talibParams: {} }
function convertTVInputsToPandasTaParams(tvName: string, tvInputs: Record<string, any>): { pandasTaName: string | null, pandasTaParams: Record<string, any> } {
const pandasTaName = TV_TO_PANDAS_TA_NAMES[tvName] || null
if (!pandasTaName) {
console.warn('[Indicators] No pandas-ta mapping for TradingView indicator:', tvName)
return { pandasTaName: null, pandasTaParams: {} }
}
const talibParams: Record<string, any> = {}
const pandasTaParams: Record<string, any> = {}
// Reverse parameter mappings
const reverseMapping: Record<string, string> = {
'length': 'timeperiod',
'fastLength': 'fastperiod',
'slowLength': 'slowperiod',
'signalLength': 'signalperiod',
'mult': 'nbdevup',
'fastLimit': 'fastlimit',
'slowLimit': 'slowlimit',
'start': 'acceleration',
'increment': 'maximum',
'kPeriod': 'fastk_period',
'dPeriod': 'slowd_period',
'smoothK': 'slowk_period',
}
// Special handling for specific indicators
if (talibName === 'BBANDS') {
// TradingView uses in_0 for length, in_1 for multiplier
talibParams.timeperiod = tvInputs.in_0 || tvInputs.length || 20
talibParams.nbdevup = tvInputs.in_1 || tvInputs.mult || 2
talibParams.nbdevdn = tvInputs.in_1 || tvInputs.mult || 2
talibParams.matype = 0 // SMA
} else if (talibName === 'MACD') {
talibParams.fastperiod = tvInputs.fastLength || 12
talibParams.slowperiod = tvInputs.slowLength || 26
talibParams.signalperiod = tvInputs.signalLength || 9
} else if (talibName === 'RSI') {
talibParams.timeperiod = tvInputs.in_0 || tvInputs.length || 14
} else if (['SMA', 'EMA', 'WMA', 'DEMA', 'TEMA', 'TRIMA'].includes(talibName)) {
talibParams.timeperiod = tvInputs.in_0 || tvInputs.length || 14
} else if (talibName === 'STOCH') {
talibParams.fastk_period = tvInputs.kPeriod || 14
talibParams.slowd_period = tvInputs.dPeriod || 3
talibParams.slowk_period = tvInputs.smoothK || 3
talibParams.slowk_matype = 0 // SMA
talibParams.slowd_matype = 0 // SMA
} else if (talibName === 'ATR') {
talibParams.timeperiod = tvInputs.length || 14
} else if (talibName === 'CCI') {
talibParams.timeperiod = tvInputs.length || 20
if (pandasTaName === 'bbands') {
pandasTaParams.length = tvInputs.in_0 ?? tvInputs.length ?? 20
const std = tvInputs.in_1 ?? tvInputs.mult ?? 2
pandasTaParams.upper_std = std
pandasTaParams.lower_std = std
} else if (pandasTaName === 'macd') {
pandasTaParams.fast = tvInputs.fastLength ?? 12
pandasTaParams.slow = tvInputs.slowLength ?? 26
pandasTaParams.signal = tvInputs.signalLength ?? 9
} else if (pandasTaName === 'stoch') {
pandasTaParams.k = tvInputs.kPeriod ?? 14
pandasTaParams.d = tvInputs.dPeriod ?? 3
pandasTaParams.smooth_k = tvInputs.smoothK ?? 3
} else if (pandasTaName === 'stochrsi') {
pandasTaParams.length = tvInputs.length ?? 14
pandasTaParams.rsi_length = tvInputs.rsiLength ?? 14
pandasTaParams.k = tvInputs.kPeriod ?? 3
pandasTaParams.d = tvInputs.dPeriod ?? 3
} else if (pandasTaName === 'psar') {
pandasTaParams.af0 = tvInputs.start ?? 0.02
pandasTaParams.af = tvInputs.increment ?? 0.02
pandasTaParams.max_af = tvInputs.max ?? 0.2
} else if (pandasTaName === 'ichimoku') {
pandasTaParams.tenkan = tvInputs.conversionPeriod ?? 9
pandasTaParams.kijun = tvInputs.basePeriod ?? 26
pandasTaParams.senkou = tvInputs.laggingSpanPeriod ?? 52
} else if (pandasTaName === 'vwap') {
pandasTaParams.anchor = tvInputs.anchor ?? 'D'
} else if (['apo', 'ppo'].includes(pandasTaName)) {
pandasTaParams.fast = tvInputs.fastLength ?? 12
pandasTaParams.slow = tvInputs.slowLength ?? 26
} else if (pandasTaName === 'uo') {
pandasTaParams.fast = tvInputs.fast ?? 7
pandasTaParams.medium = tvInputs.medium ?? 14
pandasTaParams.slow = tvInputs.slow ?? 28
} else if (pandasTaName === 'adosc') {
pandasTaParams.fast = tvInputs.fastLength ?? 3
pandasTaParams.slow = tvInputs.slowLength ?? 10
} else if (pandasTaName === 'kc') {
pandasTaParams.length = tvInputs.length ?? 20
pandasTaParams.scalar = tvInputs.mult ?? 2
} else {
// Generic parameter conversion
for (const [tvParam, value] of Object.entries(tvInputs)) {
if (tvParam === 'source') continue // Skip source parameter
const talibParam = reverseMapping[tvParam] || tvParam
talibParams[talibParam] = value
// Generic: extract length; skip source
for (const [k, v] of Object.entries(tvInputs)) {
if (k === 'source') continue
pandasTaParams[k] = v
}
// Normalise TV in_0 → length for simple single-period indicators
if (tvInputs.in_0 !== undefined && pandasTaParams.length === undefined) {
pandasTaParams.length = tvInputs.in_0
delete pandasTaParams.in_0
}
}
return { talibName, talibParams }
return { pandasTaName, pandasTaParams }
}
/**
@@ -319,12 +289,12 @@ export function useTradingViewIndicators(tvWidget: IChartingLibraryWidget) {
console.log('[Indicators] Study name extracted:', studyName)
const talibName = TV_TO_BACKEND_NAMES[studyName]
console.log('[Indicators] Backend mapping:', studyName, '->', talibName)
const pandasTaName = TV_TO_PANDAS_TA_NAMES[studyName]
console.log('[Indicators] pandas-ta mapping:', studyName, '->', pandasTaName)
if (!talibName) {
console.log('[Indicators] TradingView study not mapped to backend:', studyName)
console.log('[Indicators] Available mappings:', Object.keys(TV_TO_BACKEND_NAMES).slice(0, 10))
if (!pandasTaName) {
console.log('[Indicators] TradingView study not mapped to pandas-ta:', studyName)
console.log('[Indicators] Available mappings:', Object.keys(TV_TO_PANDAS_TA_NAMES).slice(0, 10))
return null
}
@@ -357,16 +327,16 @@ export function useTradingViewIndicators(tvWidget: IChartingLibraryWidget) {
console.log('[Indicators] TV inputs:', tvInputs)
const { talibParams } = convertTVInputsToTALibParams(studyName, tvInputs)
console.log('[Indicators] Converted TA-Lib params:', talibParams)
const { pandasTaParams } = convertTVInputsToPandasTaParams(studyName, tvInputs)
console.log('[Indicators] Converted pandas-ta params:', pandasTaParams)
const now = Math.floor(Date.now() / 1000)
const indicator = {
id: studyId || tvStudy.id || `ind_${Date.now()}`,
talib_name: talibName,
instance_name: `${talibName}_${Date.now()}`,
parameters: talibParams,
pandas_ta_name: pandasTaName,
instance_name: `${pandasTaName}_${Date.now()}`,
parameters: pandasTaParams,
tv_study_id: studyId || tvStudy.id,
tv_indicator_name: studyName,
tv_inputs: tvInputs,
@@ -591,17 +561,17 @@ export function useTradingViewIndicators(tvWidget: IChartingLibraryWidget) {
console.log('[Indicators] TV inputs:', tvInputs)
const { talibParams } = convertTVInputsToTALibParams(studyName, tvInputs)
const { pandasTaParams } = convertTVInputsToPandasTaParams(studyName, tvInputs)
console.log('[Indicators] Old params:', existingIndicator.parameters)
console.log('[Indicators] New params:', talibParams)
console.log('[Indicators] New params:', pandasTaParams)
// Update the store with new parameters
if (JSON.stringify(existingIndicator.parameters) !== JSON.stringify(talibParams)) {
if (JSON.stringify(existingIndicator.parameters) !== JSON.stringify(pandasTaParams)) {
console.log('[Indicators] Parameters changed, updating store...')
isUpdatingStore = true
indicatorStore.updateIndicator(existingIndicator.id, {
parameters: talibParams,
parameters: pandasTaParams,
tv_inputs: tvInputs
})
console.log('[Indicators] Indicator updated in store!')
@@ -816,14 +786,14 @@ export function useTradingViewIndicators(tvWidget: IChartingLibraryWidget) {
if (!chart) return
const currentSymbol = chartStore.symbol
const tvName = ALL_BACKEND_TO_TV_NAMES[indicator.talib_name]
const tvName = PANDAS_TA_TO_TV_NAMES[indicator.pandas_ta_name]
if (!tvName) {
console.warn('[Indicators] No TradingView mapping for backend indicator:', indicator.talib_name)
console.warn('[Indicators] No TradingView mapping for pandas-ta indicator:', indicator.pandas_ta_name)
return
}
const tvInputs = convertTALibParamsToTVInputs(indicator.talib_name, indicator.parameters)
const tvInputs = convertPandasTaParamsToTVInputs(indicator.pandas_ta_name, indicator.parameters)
console.log(`[Indicators] Creating TradingView study: ${tvName} with inputs:`, tvInputs)
@@ -866,7 +836,7 @@ export function useTradingViewIndicators(tvWidget: IChartingLibraryWidget) {
return
}
const tvInputs = convertTALibParamsToTVInputs(indicator.talib_name, indicator.parameters)
const tvInputs = convertPandasTaParamsToTVInputs(indicator.pandas_ta_name, indicator.parameters)
// Update study inputs
chart.getStudyById(indicator.tv_study_id).applyOverrides(tvInputs)

View File

@@ -144,37 +144,6 @@ function convertTVShapeToShape(tvShape: any, symbol: string, shapeId?: string, s
}
}
/**
* Convert interval string to seconds
*/
function intervalToSeconds(interval: string): number {
// Handle plain numbers (TradingView uses integers for sub-hour intervals in minutes)
const numericInterval = parseInt(interval)
if (!isNaN(numericInterval) && interval === numericInterval.toString()) {
return numericInterval * 60 // Plain number means minutes
}
// Handle formatted intervals like "15M", "1H", "1D"
const match = interval.match(/^(\d+)([SMHDW])$/)
if (!match) {
console.warn('[TradingView Shapes] Unknown interval format:', interval)
return 60 // Default to 1 minute
}
const value = parseInt(match[1])
const unit = match[2]
const multipliers: Record<string, number> = {
'S': 1,
'M': 60,
'H': 3600,
'D': 86400,
'W': 604800
}
return value * (multipliers[unit] || 60)
}
/**
* Canonicalize timestamp to candle boundary
* TradingView requires timestamps to align exactly with candle start times
@@ -532,9 +501,8 @@ export function useTradingViewShapes(tvWidget: IChartingLibraryWidget) {
const currentSymbol = chartStore.symbol
// Get current chart period and convert to seconds for timestamp canonicalization
const period = chartStore.period
const intervalSeconds = intervalToSeconds(period)
// period is already stored in seconds
const intervalSeconds = chartStore.period
// Convert points to TradingView format and canonicalize timestamps to candle boundaries
const tvPoints = shape.points.map(p => {

View File

@@ -5,7 +5,7 @@ export interface ChartState {
symbol: string
start_time: number | null
end_time: number | null
period: string
period: number
selected_shapes: string[]
}
@@ -13,7 +13,7 @@ export const useChartStore = defineStore('chartState', () => {
const symbol = ref<string>('BINANCE:BTC/USDT')
const start_time = ref<number | null>(null)
const end_time = ref<number | null>(null)
const period = ref<string>('15')
const period = ref<number>(900) // seconds; default 15 minutes
const selected_shapes = ref<string[]>([])
return { symbol, start_time, end_time, period, selected_shapes }

View File

@@ -3,7 +3,7 @@ import { ref } from 'vue'
export interface IndicatorInstance {
id: string
talib_name: string
pandas_ta_name: string
instance_name: string
parameters: Record<string, any>
tv_study_id?: string

23
web/src/utils.ts Normal file
View File

@@ -0,0 +1,23 @@
/**
* Convert a TradingView interval string to seconds.
* Plain numbers are minutes (TradingView convention), suffixed forms use the suffix.
* Examples: "15" → 900, "1D" → 86400, "1W" → 604800
*/
export function intervalToSeconds(interval: string): number {
const numericInterval = parseInt(interval)
if (!isNaN(numericInterval) && interval === numericInterval.toString()) {
return numericInterval * 60 // plain number = minutes
}
const match = interval.match(/^(\d+)([SMHDW])$/)
if (match) {
const value = parseInt(match[1])
switch (match[2]) {
case 'S': return value
case 'M': return value * 60
case 'H': return value * 3600
case 'D': return value * 86400
case 'W': return value * 604800
}
}
return 60 // fallback: 1 minute
}