// Kafka producer for writing market data import { Kafka } from 'kafkajs'; import { encodeMessage, MessageTypeId, Tick, OHLC, OHLCBatch, Market } from './proto/messages.js'; export class KafkaProducer { constructor(config, logger) { this.config = config; this.logger = logger; this.kafka = new Kafka({ clientId: 'ccxt-ingestor', brokers: config.kafka_brokers || ['localhost:9092'], logLevel: 0 // Error only }); this.producer = this.kafka.producer(); this.isConnected = false; } /** * Connect to Kafka */ async connect() { await this.producer.connect(); this.isConnected = true; this.logger.info('Connected to Kafka'); } /** * Write a tick message to Kafka * @param {string} topic - Kafka topic name * @param {object} tickData - Tick data object */ async writeTick(topic, tickData) { if (!this.isConnected) { throw new Error('Kafka producer not connected'); } const [frame1, frame2] = encodeMessage(MessageTypeId.TICK, tickData, Tick); const message = Buffer.concat([frame1, frame2]); await this.producer.send({ topic, messages: [ { key: tickData.ticker, value: message, timestamp: tickData.timestamp.toString() } ] }); this.logger.debug({ ticker: tickData.ticker, topic }, 'Wrote tick to Kafka'); } /** * Write multiple ticks to Kafka in batch * @param {string} topic - Kafka topic name * @param {Array} ticksData - Array of tick data objects */ async writeTicks(topic, ticksData) { if (!this.isConnected) { throw new Error('Kafka producer not connected'); } if (ticksData.length === 0) { return; } const messages = ticksData.map(tickData => { const [frame1, frame2] = encodeMessage(MessageTypeId.TICK, tickData, Tick); const message = Buffer.concat([frame1, frame2]); return { key: tickData.ticker, value: message, timestamp: tickData.timestamp.toString() }; }); await this.producer.send({ topic, messages }); this.logger.debug( { count: ticksData.length, topic }, 'Wrote ticks batch to Kafka' ); } /** * Write an OHLC message to Kafka * @param {string} topic - Kafka topic name * @param {object} ohlcData - OHLC data object */ async writeOHLC(topic, ohlcData) { if (!this.isConnected) { throw new Error('Kafka producer not connected'); } const [frame1, frame2] = encodeMessage(MessageTypeId.OHLC, ohlcData, OHLC); const message = Buffer.concat([frame1, frame2]); await this.producer.send({ topic, messages: [ { key: ohlcData.ticker, value: message } ] }); this.logger.debug({ ticker: ohlcData.ticker, topic }, 'Wrote OHLC to Kafka'); } /** * Write multiple OHLC candles to Kafka as an OHLCBatch message. * * Historical mode: pass explicit metadata and isLastPage flag. * Realtime mode: omit metadata (null/undefined) — writes individual OHLC messages instead. * * @param {string} topic - Kafka topic name * @param {Array} ohlcData - Array of OHLC candle objects * @param {object|null} metadata - Request metadata for historical batches; null for realtime * @param {boolean} isLastPage - True if this is the final page of a historical query */ async writeOHLCs(topic, ohlcData, metadata = null, isLastPage = false) { if (!this.isConnected) { throw new Error('Kafka producer not connected'); } if (ohlcData.length === 0) { return; } if (!metadata) { // Realtime mode — write individual OHLC messages (no batch wrapper) const messages = ohlcData.map(candle => { const protoCandle = { timestamp: candle.timestamp, ticker: candle.ticker, open: candle.open, high: candle.high, low: candle.low, close: candle.close, volume: candle.volume }; const [frame1, frame2] = encodeMessage(MessageTypeId.OHLC, protoCandle, OHLC); const value = Buffer.concat([frame1, frame2]); return { key: candle.ticker, value }; }); await this.producer.send({ topic, messages }); this.logger.debug( { count: ohlcData.length, topic, type: 'individual' }, 'Wrote OHLC messages to Kafka' ); return; } // Historical mode — write as OHLCBatch with metadata const batch = { metadata: { requestId: metadata.request_id, clientId: metadata.client_id, ticker: metadata.ticker, periodSeconds: metadata.period_seconds, startTime: metadata.start_time, endTime: metadata.end_time, status: metadata.status || 'OK', errorMessage: metadata.error_message, isLastPage }, rows: ohlcData.map(candle => { const row = { timestamp: candle.timestamp, ticker: candle.ticker, open: candle.open, high: candle.high, low: candle.low, close: candle.close, }; if (candle.volume != null) row.volume = candle.volume; return row; }) }; const [frame1, frame2] = encodeMessage(MessageTypeId.OHLC_BATCH, batch, OHLCBatch); const value = Buffer.concat([frame1, frame2]); await this.producer.send({ topic, messages: [{ key: metadata.ticker, value }] }); this.logger.debug( { request_id: metadata.request_id, count: ohlcData.length, isLastPage, topic }, 'Wrote OHLCBatch to Kafka' ); } /** * Write a marker message to Kafka for NOT_FOUND or ERROR cases * This allows Flink to publish notifications even when no data is available * @param {string} topic - Kafka topic name * @param {object} marker - Marker object with request metadata and status */ async writeMarker(topic, marker) { if (!this.isConnected) { throw new Error('Kafka producer not connected'); } // Create an empty OHLCBatch with status in metadata. // Markers are always the terminal message for a request (is_last_page = true). const batch = { metadata: { requestId: marker.request_id, clientId: marker.client_id, ticker: marker.ticker, periodSeconds: marker.period_seconds, startTime: marker.start_time, endTime: marker.end_time, status: marker.status, // 'NOT_FOUND' or 'ERROR' errorMessage: marker.error_message || marker.message, isLastPage: true }, rows: [] // Empty rows array indicates marker message }; // Encode as protobuf OHLCBatch with ZMQ envelope const [frame1, frame2] = encodeMessage(MessageTypeId.OHLC_BATCH, batch, OHLCBatch); const value = Buffer.concat([frame1, frame2]); await this.producer.send({ topic, messages: [ { key: marker.ticker, value } ] }); this.logger.info( { request_id: marker.request_id, status: marker.status, topic }, 'Wrote marker to Kafka' ); } /** * Write market metadata messages to Kafka * @param {string} topic - Kafka topic name * @param {Array} messages - Array of {key, value} objects where value is Market metadata */ async writeMarketMetadata(topic, messages) { if (!this.isConnected) { throw new Error('Kafka producer not connected'); } if (messages.length === 0) { return; } const kafkaMessages = messages.map(({ key, value }) => { const [frame1, frame2] = encodeMessage(MessageTypeId.MARKET, value, Market); const encodedValue = Buffer.concat([frame1, frame2]); return { key, value: encodedValue }; }); await this.producer.send({ topic, messages: kafkaMessages }); this.logger.debug( { count: messages.length, topic }, 'Wrote market metadata to Kafka' ); } /** * Disconnect from Kafka */ async disconnect() { if (this.isConnected) { await this.producer.disconnect(); this.isConnected = false; this.logger.info('Disconnected from Kafka'); } } }