backend redesign
This commit is contained in:
270
ingestor/src/kafka-producer.js
Normal file
270
ingestor/src/kafka-producer.js
Normal file
@@ -0,0 +1,270 @@
|
||||
// Kafka producer for writing market data
|
||||
import { Kafka } from 'kafkajs';
|
||||
import { encodeMessage, MessageTypeId, Tick, OHLC, OHLCBatch } from './proto/messages.js';
|
||||
|
||||
export class KafkaProducer {
|
||||
constructor(config, logger) {
|
||||
this.config = config;
|
||||
this.logger = logger;
|
||||
|
||||
this.kafka = new Kafka({
|
||||
clientId: 'ccxt-ingestor',
|
||||
brokers: config.kafka_brokers || ['localhost:9092'],
|
||||
logLevel: 0 // Error only
|
||||
});
|
||||
|
||||
this.producer = this.kafka.producer();
|
||||
this.isConnected = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Connect to Kafka
|
||||
*/
|
||||
async connect() {
|
||||
await this.producer.connect();
|
||||
this.isConnected = true;
|
||||
this.logger.info('Connected to Kafka');
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a tick message to Kafka
|
||||
* @param {string} topic - Kafka topic name
|
||||
* @param {object} tickData - Tick data object
|
||||
*/
|
||||
async writeTick(topic, tickData) {
|
||||
if (!this.isConnected) {
|
||||
throw new Error('Kafka producer not connected');
|
||||
}
|
||||
|
||||
const [frame1, frame2] = encodeMessage(MessageTypeId.TICK, tickData, Tick);
|
||||
const message = Buffer.concat([frame1, frame2]);
|
||||
|
||||
await this.producer.send({
|
||||
topic,
|
||||
messages: [
|
||||
{
|
||||
key: tickData.ticker,
|
||||
value: message,
|
||||
timestamp: tickData.timestamp.toString()
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
this.logger.debug({ ticker: tickData.ticker, topic }, 'Wrote tick to Kafka');
|
||||
}
|
||||
|
||||
/**
|
||||
* Write multiple ticks to Kafka in batch
|
||||
* @param {string} topic - Kafka topic name
|
||||
* @param {Array<object>} ticksData - Array of tick data objects
|
||||
*/
|
||||
async writeTicks(topic, ticksData) {
|
||||
if (!this.isConnected) {
|
||||
throw new Error('Kafka producer not connected');
|
||||
}
|
||||
|
||||
if (ticksData.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const messages = ticksData.map(tickData => {
|
||||
const [frame1, frame2] = encodeMessage(MessageTypeId.TICK, tickData, Tick);
|
||||
const message = Buffer.concat([frame1, frame2]);
|
||||
|
||||
return {
|
||||
key: tickData.ticker,
|
||||
value: message,
|
||||
timestamp: tickData.timestamp.toString()
|
||||
};
|
||||
});
|
||||
|
||||
await this.producer.send({
|
||||
topic,
|
||||
messages
|
||||
});
|
||||
|
||||
this.logger.debug(
|
||||
{ count: ticksData.length, topic },
|
||||
'Wrote ticks batch to Kafka'
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write an OHLC message to Kafka
|
||||
* @param {string} topic - Kafka topic name
|
||||
* @param {object} ohlcData - OHLC data object
|
||||
*/
|
||||
async writeOHLC(topic, ohlcData) {
|
||||
if (!this.isConnected) {
|
||||
throw new Error('Kafka producer not connected');
|
||||
}
|
||||
|
||||
const [frame1, frame2] = encodeMessage(MessageTypeId.OHLC, ohlcData, OHLC);
|
||||
const message = Buffer.concat([frame1, frame2]);
|
||||
|
||||
await this.producer.send({
|
||||
topic,
|
||||
messages: [
|
||||
{
|
||||
key: ohlcData.ticker,
|
||||
value: message
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
this.logger.debug({ ticker: ohlcData.ticker, topic }, 'Wrote OHLC to Kafka');
|
||||
}
|
||||
|
||||
/**
|
||||
* Write multiple OHLC candles to Kafka as an OHLCBatch message
|
||||
* Uses protobuf encoding with metadata in batch wrapper
|
||||
* @param {string} topic - Kafka topic name
|
||||
* @param {Array<object>} ohlcData - Array of OHLC data objects (may include __metadata in first record)
|
||||
*/
|
||||
async writeOHLCs(topic, ohlcData) {
|
||||
if (!this.isConnected) {
|
||||
throw new Error('Kafka producer not connected');
|
||||
}
|
||||
|
||||
if (ohlcData.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract metadata from first record if present
|
||||
const firstCandle = ohlcData[0];
|
||||
const metadata = firstCandle.__metadata;
|
||||
|
||||
if (!metadata) {
|
||||
// No metadata - write individual OHLC messages (realtime mode)
|
||||
const messages = ohlcData.map(candle => {
|
||||
const protoCandle = {
|
||||
timestamp: candle.timestamp,
|
||||
ticker: candle.ticker,
|
||||
open: candle.open,
|
||||
high: candle.high,
|
||||
low: candle.low,
|
||||
close: candle.close,
|
||||
volume: candle.volume
|
||||
};
|
||||
|
||||
const [frame1, frame2] = encodeMessage(MessageTypeId.OHLC, protoCandle, OHLC);
|
||||
const value = Buffer.concat([frame1, frame2]);
|
||||
|
||||
return {
|
||||
key: candle.ticker,
|
||||
value
|
||||
};
|
||||
});
|
||||
|
||||
await this.producer.send({
|
||||
topic,
|
||||
messages
|
||||
});
|
||||
|
||||
this.logger.debug(
|
||||
{ count: ohlcData.length, topic, type: 'individual' },
|
||||
'Wrote OHLC messages to Kafka'
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Historical mode - write as OHLCBatch with metadata
|
||||
const batch = {
|
||||
metadata: {
|
||||
requestId: metadata.request_id,
|
||||
clientId: metadata.client_id,
|
||||
ticker: metadata.ticker,
|
||||
periodSeconds: metadata.period_seconds,
|
||||
startTime: metadata.start_time,
|
||||
endTime: metadata.end_time,
|
||||
status: metadata.status || 'OK',
|
||||
errorMessage: metadata.error_message
|
||||
},
|
||||
rows: ohlcData.map(candle => ({
|
||||
timestamp: candle.timestamp,
|
||||
ticker: candle.ticker,
|
||||
open: candle.open,
|
||||
high: candle.high,
|
||||
low: candle.low,
|
||||
close: candle.close,
|
||||
volume: candle.volume
|
||||
}))
|
||||
};
|
||||
|
||||
// Encode as protobuf OHLCBatch with ZMQ envelope
|
||||
const [frame1, frame2] = encodeMessage(MessageTypeId.OHLC_BATCH, batch, OHLCBatch);
|
||||
const value = Buffer.concat([frame1, frame2]);
|
||||
|
||||
await this.producer.send({
|
||||
topic,
|
||||
messages: [
|
||||
{
|
||||
key: metadata.ticker,
|
||||
value
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
this.logger.debug(
|
||||
{ request_id: metadata.request_id, count: ohlcData.length, topic, type: 'batch' },
|
||||
'Wrote OHLCBatch to Kafka'
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a marker message to Kafka for NOT_FOUND or ERROR cases
|
||||
* This allows Flink to publish notifications even when no data is available
|
||||
* @param {string} topic - Kafka topic name
|
||||
* @param {object} marker - Marker object with request metadata and status
|
||||
*/
|
||||
async writeMarker(topic, marker) {
|
||||
if (!this.isConnected) {
|
||||
throw new Error('Kafka producer not connected');
|
||||
}
|
||||
|
||||
// Create an empty OHLCBatch with status in metadata
|
||||
const batch = {
|
||||
metadata: {
|
||||
requestId: marker.request_id,
|
||||
clientId: marker.client_id,
|
||||
ticker: marker.ticker,
|
||||
periodSeconds: marker.period_seconds,
|
||||
startTime: marker.start_time,
|
||||
endTime: marker.end_time,
|
||||
status: marker.status, // 'NOT_FOUND' or 'ERROR'
|
||||
errorMessage: marker.error_message || marker.message
|
||||
},
|
||||
rows: [] // Empty rows array indicates marker message
|
||||
};
|
||||
|
||||
// Encode as protobuf OHLCBatch with ZMQ envelope
|
||||
const [frame1, frame2] = encodeMessage(MessageTypeId.OHLC_BATCH, batch, OHLCBatch);
|
||||
const value = Buffer.concat([frame1, frame2]);
|
||||
|
||||
await this.producer.send({
|
||||
topic,
|
||||
messages: [
|
||||
{
|
||||
key: marker.ticker,
|
||||
value
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
this.logger.info(
|
||||
{ request_id: marker.request_id, status: marker.status, topic },
|
||||
'Wrote marker to Kafka'
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Disconnect from Kafka
|
||||
*/
|
||||
async disconnect() {
|
||||
if (this.isConnected) {
|
||||
await this.producer.disconnect();
|
||||
this.isConnected = false;
|
||||
this.logger.info('Disconnected from Kafka');
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user