Add Ticker24h support: hourly market snapshots with USD-normalized volume filtering

This commit is contained in:
2026-04-26 18:39:52 -04:00
parent 85fcbe1330
commit 0178b5d29d
45 changed files with 1995 additions and 170 deletions

View File

@@ -16,6 +16,12 @@ import com.dexorder.flink.publisher.RealtimeBarFunction;
import com.dexorder.flink.publisher.RealtimeBarPublisher;
import com.dexorder.flink.publisher.TickWrapper;
import com.dexorder.flink.publisher.TickDeserializer;
import com.dexorder.flink.quotes.Ticker24hFunction;
import com.dexorder.flink.quotes.Ticker24hPublisher;
import com.dexorder.flink.quotes.Ticker24hScheduler;
import com.dexorder.flink.quotes.Ticker24hWrapper;
import com.dexorder.flink.quotes.TickerBatchDeserializer;
import com.dexorder.flink.quotes.TickerBatchWrapper;
import com.dexorder.flink.sink.HistoricalBatchWriter;
import com.dexorder.flink.sink.SymbolMetadataWriter;
import com.dexorder.flink.zmq.ZmqChannelManager;
@@ -273,6 +279,35 @@ public class TradingFlinkApp {
LOG.info("Realtime tick pipeline configured: market-tick → OHLC bars → clients (periods={})",
java.util.Arrays.toString(periods));
// Ticker24h pipeline: market-ticker Kafka → QuoteCurrencyIndex → ZMQ XPUB
KafkaSource<TickerBatchWrapper> tickerSource = KafkaSource.<TickerBatchWrapper>builder()
.setBootstrapServers(config.getKafkaBootstrapServers())
.setTopics(config.getKafkaTickerTopic())
.setGroupId("flink-ticker24h-consumer")
.setStartingOffsets(OffsetsInitializer.latest())
.setValueOnlyDeserializer(new TickerBatchDeserializer())
.build();
DataStream<TickerBatchWrapper> tickerBatchStream = env
.fromSource(tickerSource, WatermarkStrategy.noWatermarks(), "TickerBatch Kafka Source");
DataStream<Ticker24hWrapper> ticker24hStream = tickerBatchStream
.flatMap(new Ticker24hFunction())
.setParallelism(1)
.name("Ticker24hFunction");
ticker24hStream.addSink(new Ticker24hPublisher(notificationEndpoint))
.setParallelism(1)
.name("Ticker24hPublisher");
LOG.info("Ticker24h pipeline configured: market-ticker → Ticker24hFunction → clients");
// Start Ticker24h scheduler (fires on startup + hourly for all configured exchanges)
Ticker24hScheduler ticker24hScheduler = new Ticker24hScheduler(
broker, config.getSupportedExchanges());
ticker24hScheduler.start();
LOG.info("Ticker24hScheduler started for exchanges: {}", config.getSupportedExchanges());
// TODO: Set up CEP patterns and triggers
LOG.info("Flink job configured, starting execution");
@@ -281,6 +316,7 @@ public class TradingFlinkApp {
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
LOG.info("Shutting down Trading Flink Application");
try {
ticker24hScheduler.stop();
notificationForwarder.close();
subscriptionManager.stop();
broker.stop();

View File

@@ -4,7 +4,10 @@ import org.yaml.snakeyaml.Yaml;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
@@ -136,6 +139,26 @@ public class AppConfig {
return getString("kafka_ohlc_topic", "market-ohlc");
}
public String getKafkaTickerTopic() {
return getString("kafka_ticker_topic", "market-ticker");
}
/**
* Comma-separated list of exchange IDs to fetch Ticker24h snapshots for.
* Default: BINANCE only.
*/
public List<String> getSupportedExchanges() {
String raw = getString("supported_exchanges", "BINANCE");
List<String> result = new ArrayList<>();
for (String part : raw.split(",")) {
String trimmed = part.trim().toUpperCase();
if (!trimmed.isEmpty()) {
result.add(trimmed);
}
}
return result;
}
// Notification config:
// Task managers PUSH notifications to this endpoint (job manager PULL address)
public String getNotificationPublishEndpoint() {

View File

@@ -61,6 +61,8 @@ public class IngestorBroker implements AutoCloseable {
private static final long HEARTBEAT_TIMEOUT_MS = 25_000;
/** Re-queue historical job if not completed within this window (ms) */
private static final long HISTORICAL_TIMEOUT_MS = 120_000;
/** Re-queue ticker snapshot job if not completed within this window (ms) */
private static final long TICKER_SNAPSHOT_TIMEOUT_MS = 30_000;
private final ZmqChannelManager zmqManager;
private volatile boolean running;
@@ -113,6 +115,23 @@ public class IngestorBroker implements AutoCloseable {
LOG.info("IngestorBroker stopped");
}
/**
* Submit a TICKER_SNAPSHOT request from outside the broker thread (thread-safe).
* Called by Ticker24hScheduler on startup and hourly.
* Uses sentinel ticker "@TICKER24H.{EXCHANGE}" (e.g., "@TICKER24H.BINANCE").
*/
public void submitTicker24hRequest(String exchange) {
String jobId = UUID.randomUUID().toString();
DataRequest request = DataRequest.newBuilder()
.setRequestId(jobId)
.setJobId(jobId)
.setType(DataRequest.RequestType.TICKER_SNAPSHOT)
.setTicker("@TICKER24H." + exchange.toUpperCase())
.build();
externalSubmissions.add(request);
LOG.info("Enqueued TICKER_SNAPSHOT request: exchange={}, jobId={}", exchange, jobId);
}
/**
* Submit a realtime data request from outside the broker thread (thread-safe).
* Called by RealtimeSubscriptionManager when subscription ref count goes 0→1.
@@ -219,21 +238,38 @@ public class IngestorBroker implements AutoCloseable {
try {
SubmitHistoricalRequest req = SubmitHistoricalRequest.parseFrom(payload);
String jobId = UUID.randomUUID().toString();
DataRequest dataRequest = DataRequest.newBuilder()
.setRequestId(req.getRequestId())
.setJobId(jobId)
.setType(DataRequest.RequestType.HISTORICAL_OHLC)
.setTicker(req.getTicker())
.setHistorical(com.dexorder.proto.HistoricalParams.newBuilder()
.setStartTime(req.getStartTime())
.setEndTime(req.getEndTime())
.setPeriodSeconds(req.getPeriodSeconds())
.build())
.setClientId(req.hasClientId() ? req.getClientId() : "")
.build();
String ticker = req.getTicker();
String clientId = req.hasClientId() ? req.getClientId() : "";
DataRequest dataRequest;
if (ticker.startsWith("@TICKER24H.")) {
// Client-initiated ticker snapshot — route to TICKER_SNAPSHOT, not OHLC
dataRequest = DataRequest.newBuilder()
.setRequestId(req.getRequestId())
.setJobId(jobId)
.setType(DataRequest.RequestType.TICKER_SNAPSHOT)
.setTicker(ticker)
.setClientId(clientId)
.build();
LOG.info("Routing client-initiated TICKER_SNAPSHOT: request_id={}, ticker={}, client_id={}",
req.getRequestId(), ticker, clientId);
} else {
dataRequest = DataRequest.newBuilder()
.setRequestId(req.getRequestId())
.setJobId(jobId)
.setType(DataRequest.RequestType.HISTORICAL_OHLC)
.setTicker(ticker)
.setHistorical(com.dexorder.proto.HistoricalParams.newBuilder()
.setStartTime(req.getStartTime())
.setEndTime(req.getEndTime())
.setPeriodSeconds(req.getPeriodSeconds())
.build())
.setClientId(clientId)
.build();
LOG.info("Received historical request from relay: request_id={}, ticker={}",
req.getRequestId(), ticker);
}
enqueueJob(dataRequest);
LOG.info("Received historical request from relay: request_id={}, ticker={}",
req.getRequestId(), req.getTicker());
} catch (Exception e) {
LOG.error("Failed to parse SubmitHistoricalRequest from relay", e);
}
@@ -411,8 +447,14 @@ public class IngestorBroker implements AutoCloseable {
for (Map.Entry<String, ActiveJob> entry : activeJobs.entrySet()) {
ActiveJob job = entry.getValue();
long timeout = job.type == DataRequest.RequestType.REALTIME_TICKS
? HEARTBEAT_TIMEOUT_MS : HISTORICAL_TIMEOUT_MS;
long timeout;
if (job.type == DataRequest.RequestType.REALTIME_TICKS) {
timeout = HEARTBEAT_TIMEOUT_MS;
} else if (job.type == DataRequest.RequestType.TICKER_SNAPSHOT) {
timeout = TICKER_SNAPSHOT_TIMEOUT_MS;
} else {
timeout = HISTORICAL_TIMEOUT_MS;
}
if (now - job.lastHeartbeat > timeout) {
timedOut.add(entry.getKey());
}
@@ -460,7 +502,8 @@ public class IngestorBroker implements AutoCloseable {
boolean exchangeMatch = exchange.isEmpty() || slot.exchange.equals(exchange);
boolean typeMatch = slot.slotType == SlotType.ANY
|| (slot.slotType == SlotType.HISTORICAL
&& requestType == DataRequest.RequestType.HISTORICAL_OHLC)
&& (requestType == DataRequest.RequestType.HISTORICAL_OHLC
|| requestType == DataRequest.RequestType.TICKER_SNAPSHOT))
|| (slot.slotType == SlotType.REALTIME
&& requestType == DataRequest.RequestType.REALTIME_TICKS);
if (exchangeMatch && typeMatch) {

View File

@@ -19,17 +19,21 @@ import java.util.regex.Pattern;
* must go through {@link #enqueuePublish(byte[]...)} so they are sent from the single loop
* thread — ZMQ sockets are not thread-safe.
*
* Topic format: {@code {ticker}|ohlc:{period_seconds}}
* Example: {@code BTC/USDT.BINANCE|ohlc:60}
* Topic formats:
* Closed bars: {@code {ticker}|ohlc:{period_seconds}} (strategies, existing consumers)
* Open bars: {@code {ticker}|ohlc:{period_seconds}:open} (chart, live price updates)
*
* Both topic forms map to the same underlying ingestor activation for that ticker.
*
* Reference counting:
* tickerRefs — across all periods for a ticker; 0→1 triggers ingestor activation
* topicRefs — per (ticker, period); consulted by RealtimeOHLCPublisher to filter output
* tickerRefs — across all subscribed topics for a ticker; 0→1 triggers ingestor activation
* topicRefs — per topic string; consulted by RealtimeOHLCPublisher to filter output
*/
public class RealtimeSubscriptionManager implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(RealtimeSubscriptionManager.class);
private static final Pattern TOPIC_PATTERN = Pattern.compile("^(.+)\\|ohlc:(\\d+)$");
// Matches both "{ticker}|ohlc:{period}" and "{ticker}|ohlc:{period}:open"
private static final Pattern TOPIC_PATTERN = Pattern.compile("^(.+)\\|ohlc:(\\d+)(:open)?$");
private final ZmqChannelManager zmqManager;
private final ZMQ.Socket xpubSocket;

View File

@@ -3,8 +3,11 @@ package com.dexorder.flink.publisher;
import java.io.Serializable;
/**
* A single completed OHLC bar for a given ticker and period.
* A single OHLC bar for a given ticker and period.
* Output type of RealtimeBarFunction, input type of RealtimeBarPublisher.
*
* isClosed=true → window fully closed; published on topic "{ticker}|ohlc:{period}"
* isClosed=false → window still open (snapshot); published on "{ticker}|ohlc:{period}:open"
*/
public class RealtimeBar implements Serializable {
private static final long serialVersionUID = 1L;
@@ -23,11 +26,14 @@ public class RealtimeBar implements Serializable {
private long volume;
/** Number of ticks in this window */
private int tickCount;
/** True if this bar's time window has fully closed; false if still accumulating. */
private boolean isClosed;
public RealtimeBar() {}
public RealtimeBar(String ticker, int periodSeconds, long windowStartMs,
long open, long high, long low, long close, long volume, int tickCount) {
long open, long high, long low, long close, long volume, int tickCount,
boolean isClosed) {
this.ticker = ticker;
this.periodSeconds = periodSeconds;
this.windowStartMs = windowStartMs;
@@ -37,6 +43,7 @@ public class RealtimeBar implements Serializable {
this.close = close;
this.volume = volume;
this.tickCount = tickCount;
this.isClosed = isClosed;
}
public String getTicker() { return ticker; }
@@ -48,6 +55,7 @@ public class RealtimeBar implements Serializable {
public long getClose() { return close; }
public long getVolume() { return volume; }
public int getTickCount() { return tickCount; }
public boolean isClosed() { return isClosed; }
public void setTicker(String ticker) { this.ticker = ticker; }
public void setPeriodSeconds(int periodSeconds) { this.periodSeconds = periodSeconds; }
@@ -58,16 +66,22 @@ public class RealtimeBar implements Serializable {
public void setClose(long close) { this.close = close; }
public void setVolume(long volume) { this.volume = volume; }
public void setTickCount(int tickCount) { this.tickCount = tickCount; }
public void setClosed(boolean closed) { this.isClosed = closed; }
/** ZMQ topic for this bar: e.g., "BTC/USDT.BINANCE|ohlc:60" */
/**
* ZMQ topic for this bar.
* Closed bars: "{ticker}|ohlc:{period}" (strategies, existing consumers)
* Open bars: "{ticker}|ohlc:{period}:open" (chart, live price updates)
*/
public String topic() {
return ticker + "|ohlc:" + periodSeconds;
return ticker + "|ohlc:" + periodSeconds + (isClosed ? "" : ":open");
}
@Override
public String toString() {
return "RealtimeBar{ticker='" + ticker + "', period=" + periodSeconds +
"s, windowStart=" + windowStartMs + ", O=" + open + " H=" + high +
" L=" + low + " C=" + close + ", ticks=" + tickCount + '}';
" L=" + low + " C=" + close + ", ticks=" + tickCount +
", closed=" + isClosed + '}';
}
}

View File

@@ -19,8 +19,11 @@ import org.slf4j.LoggerFactory;
* emitted immediately when the boundary is crossed, so bars are delayed by at most
* one tick interval (~10s for realtime polling).
*
* Periods are configurable at construction time. All configured periods are computed
* for every ticker receiving ticks; the ZMQ publisher filters to active subscriptions.
* Emits two types of bars per tick:
* - Open bar (isClosed=false): the current accumulator state, every tick.
* Topic: "{ticker}|ohlc:{period}:open" — consumed by charts for live price display.
* - Closed bar (isClosed=true): emitted once when a window boundary is crossed.
* Topic: "{ticker}|ohlc:{period}" — consumed by strategies/triggers.
*
* Accumulator layout (long[7]):
* [0] open
@@ -68,26 +71,31 @@ public class RealtimeBarFunction extends RichFlatMapFunction<TickWrapper, Realti
long[] accum = accumState.get(period);
if (accum == null) {
// First tick for this period
accumState.put(period, openWindow(tick, windowStart));
// First tick for this period — open a new window and emit the first open bar
long[] newAccum = openWindow(tick, windowStart);
accumState.put(period, newAccum);
out.collect(toBar(tick.getTicker(), period, newAccum, false));
} else if (accum[5] != windowStart) {
// Window boundary crossed — emit completed bar then start fresh
// Window boundary crossed — emit the closed bar, then start a fresh window
if (accum[6] > 0) {
out.collect(toBar(tick.getTicker(), period, accum));
LOG.debug("Emitted bar: ticker={}, period={}s, windowStart={}, ticks={}",
out.collect(toBar(tick.getTicker(), period, accum, true));
LOG.debug("Emitted closed bar: ticker={}, period={}s, windowStart={}, ticks={}",
tick.getTicker(), period, accum[5], accum[6]);
}
accumState.put(period, openWindow(tick, windowStart));
long[] newAccum = openWindow(tick, windowStart);
accumState.put(period, newAccum);
out.collect(toBar(tick.getTicker(), period, newAccum, false));
} else {
// Same window — update
// Same window — update accumulator and emit current open bar
accum[1] = Math.max(accum[1], tick.getPrice()); // high
accum[2] = Math.min(accum[2], tick.getPrice()); // low
accum[3] = tick.getPrice(); // close
accum[4] += tick.getAmount(); // volume
accum[6]++; // tick count
accumState.put(period, accum);
out.collect(toBar(tick.getTicker(), period, accum, false));
}
}
}
@@ -104,13 +112,14 @@ public class RealtimeBarFunction extends RichFlatMapFunction<TickWrapper, Realti
};
}
private static RealtimeBar toBar(String ticker, int period, long[] accum) {
private static RealtimeBar toBar(String ticker, int period, long[] accum, boolean isClosed) {
return new RealtimeBar(
ticker, period,
accum[5], // windowStartMs
accum[0], accum[1], accum[2], accum[3], // O H L C
accum[4], // volume
(int) accum[6] // tickCount
(int) accum[6], // tickCount
isClosed
);
}
}

View File

@@ -0,0 +1,172 @@
package com.dexorder.flink.quotes;
import com.dexorder.proto.QuoteCurrencyIndex;
import com.dexorder.proto.QuoteCurrencyRate;
import com.dexorder.proto.Ticker24h;
import com.dexorder.proto.TickerStats;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* Flink function that converts TickerBatch messages into Ticker24h snapshots.
*
* Maintains an in-memory cross-exchange price index to compute std_quote_volume
* (quote volume normalized to USD) for each ticker. USD stablecoins are hardcoded
* to 1.0; crypto quote currencies are looked up from the exchange price index
* using priority order: BINANCE → COINBASE → KRAKEN → others.
*
* Must run with parallelism=1 (maintains non-replicated cross-exchange state).
*/
public class Ticker24hFunction extends RichFlatMapFunction<TickerBatchWrapper, Ticker24hWrapper> {
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(Ticker24hFunction.class);
private static final Set<String> USD_STABLECOINS = new HashSet<>(Arrays.asList(
"USDT", "USDC", "BUSD", "TUSD", "DAI", "USDP", "GUSD"
));
// Exchanges checked in priority order when looking up cross-currency rates
private static final List<String> EXCHANGE_PRIORITY = Arrays.asList(
"BINANCE", "COINBASE", "KRAKEN"
);
// exchange → (ticker → lastPrice), maintained across all received batches
private transient Map<String, Map<String, Double>> exchangePriceIndex;
@Override
public void open(Configuration parameters) {
exchangePriceIndex = new HashMap<>();
}
@Override
public void flatMap(TickerBatchWrapper batch, Collector<Ticker24hWrapper> out) {
String exchangeId = batch.getExchangeId();
long fetchedAt = batch.getFetchedAt();
List<TickerBatchWrapper.TickerStatsRow> rows = batch.getTickers();
// Update cross-exchange price index with this batch's prices
Map<String, Double> priceMap = new HashMap<>(rows.size() * 2);
for (TickerBatchWrapper.TickerStatsRow row : rows) {
if (row.lastPrice > 0) {
priceMap.put(row.ticker, row.lastPrice);
}
}
exchangePriceIndex.put(exchangeId, priceMap);
// Build QuoteCurrencyIndex from all unique quote assets in this batch
Set<String> quoteAssets = new LinkedHashSet<>();
for (TickerBatchWrapper.TickerStatsRow row : rows) {
quoteAssets.add(row.quoteAsset);
}
Map<String, Double> usdRates = new HashMap<>();
Map<String, String> usdSources = new HashMap<>();
QuoteCurrencyIndex.Builder indexBuilder = QuoteCurrencyIndex.newBuilder()
.setGeneratedAt(fetchedAt);
for (String quoteAsset : quoteAssets) {
QuoteCurrencyRate rate = buildRate(quoteAsset, fetchedAt);
if (rate != null) {
indexBuilder.addRates(rate);
usdRates.put(quoteAsset, rate.getUsdRate());
usdSources.put(quoteAsset, rate.getSourceTicker());
}
}
QuoteCurrencyIndex currencyIndex = indexBuilder.build();
// Build Ticker24h with std_quote_volume for each ticker
Ticker24h.Builder ticker24hBuilder = Ticker24h.newBuilder()
.setExchangeId(exchangeId)
.setGeneratedAt(fetchedAt)
.setCurrencyIndex(currencyIndex);
for (TickerBatchWrapper.TickerStatsRow row : rows) {
TickerStats.Builder tsBuilder = TickerStats.newBuilder()
.setTicker(row.ticker)
.setExchangeId(row.exchangeId)
.setBaseAsset(row.baseAsset)
.setQuoteAsset(row.quoteAsset)
.setLastPrice(row.lastPrice)
.setPriceChangePct(row.priceChangePct)
.setQuoteVolume24H(row.quoteVolume24h)
.setTimestamp(row.timestamp);
if (row.bidPrice != null) tsBuilder.setBidPrice(row.bidPrice);
if (row.askPrice != null) tsBuilder.setAskPrice(row.askPrice);
if (row.open24h != null) tsBuilder.setOpen24H(row.open24h);
if (row.high24h != null) tsBuilder.setHigh24H(row.high24h);
if (row.low24h != null) tsBuilder.setLow24H(row.low24h);
if (row.volume24h != null) tsBuilder.setVolume24H(row.volume24h);
if (row.numTrades != null) tsBuilder.setNumTrades(row.numTrades);
Double usdRate = usdRates.get(row.quoteAsset);
if (usdRate != null) {
tsBuilder.setStdQuoteVolume(row.quoteVolume24h * usdRate);
}
ticker24hBuilder.addTickers(tsBuilder.build());
}
byte[] protoBytes = ticker24hBuilder.build().toByteArray();
String clientId = batch.getClientId();
String topic = (clientId != null && !clientId.isEmpty())
? "RESPONSE:" + clientId
: exchangeId + "|ticker24h";
LOG.info("Built Ticker24h snapshot: exchange={}, tickers={}, bytes={}, topic={}",
exchangeId, rows.size(), protoBytes.length, topic);
out.collect(new Ticker24hWrapper(exchangeId, topic, protoBytes));
}
/**
* Build a USD rate for a quote currency.
* Returns null if no conversion path is known (fiat, or crypto with no available pair).
*/
private QuoteCurrencyRate buildRate(String currency, long timestampNs) {
if (USD_STABLECOINS.contains(currency)) {
return QuoteCurrencyRate.newBuilder()
.setCurrency(currency)
.setUsdRate(1.0)
.setSourceTicker("hardcoded")
.setTimestamp(timestampNs)
.build();
}
// Try priority exchanges first, then any remaining exchange
List<String> orderedExchanges = new ArrayList<>(EXCHANGE_PRIORITY);
for (String ex : exchangePriceIndex.keySet()) {
if (!orderedExchanges.contains(ex)) {
orderedExchanges.add(ex);
}
}
for (String exchange : orderedExchanges) {
Map<String, Double> priceMap = exchangePriceIndex.get(exchange);
if (priceMap == null) continue;
for (String stablecoin : Arrays.asList("USDT", "USDC")) {
String pairTicker = currency + "/" + stablecoin + "." + exchange;
Double price = priceMap.get(pairTicker);
if (price != null && price > 0) {
return QuoteCurrencyRate.newBuilder()
.setCurrency(currency)
.setUsdRate(price)
.setSourceTicker(pairTicker)
.setTimestamp(timestampNs)
.build();
}
}
}
LOG.debug("No USD conversion path for quote currency: {}", currency);
return null;
}
}

View File

@@ -0,0 +1,78 @@
package com.dexorder.flink.quotes;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.zeromq.SocketType;
import org.zeromq.ZContext;
import org.zeromq.ZMQ;
/**
* Flink sink that publishes Ticker24h snapshots to subscribers via ZMQ.
*
* Connects a ZMQ PUSH socket to the job manager's notification PULL endpoint.
* HistoryNotificationForwarder receives these frames and enqueues them to
* RealtimeSubscriptionManager, which publishes them on the MARKET_DATA_PUB XPUB socket.
* Clients subscribed to "{exchange_id}|ticker24h" receive the snapshot.
*
* Wire format (matches other notification publishers):
* Frame 1: topic bytes (e.g., "BINANCE|ticker24h")
* Frame 2: [0x01] (protocol version)
* Frame 3: [0x0D][Ticker24h protobuf bytes] (type 0x0D = TICKER_24H)
*
* Parallelism MUST be 1.
*/
public class Ticker24hPublisher extends RichSinkFunction<Ticker24hWrapper> {
private static final Logger LOG = LoggerFactory.getLogger(Ticker24hPublisher.class);
private static final long serialVersionUID = 1L;
private static final byte PROTOCOL_VERSION = 0x01;
private static final byte MSG_TYPE_TICKER_24H = 0x0D;
private final String jobManagerPullEndpoint;
private transient ZContext context;
private transient ZMQ.Socket pushSocket;
public Ticker24hPublisher(String jobManagerPullEndpoint) {
this.jobManagerPullEndpoint = jobManagerPullEndpoint;
}
@Override
public void open(Configuration parameters) {
context = new ZContext();
pushSocket = context.createSocket(SocketType.PUSH);
pushSocket.setLinger(1000);
pushSocket.setSndHWM(10000);
pushSocket.connect(jobManagerPullEndpoint);
LOG.info("Ticker24hPublisher PUSH connected to {}", jobManagerPullEndpoint);
}
@Override
public void invoke(Ticker24hWrapper wrapper, Context context) {
try {
byte[] protoBytes = wrapper.getProtoBytes();
byte[] messageFrame = new byte[1 + protoBytes.length];
messageFrame[0] = MSG_TYPE_TICKER_24H;
System.arraycopy(protoBytes, 0, messageFrame, 1, protoBytes.length);
String topic = wrapper.getZmqTopic();
pushSocket.sendMore(topic.getBytes(ZMQ.CHARSET));
pushSocket.sendMore(new byte[]{PROTOCOL_VERSION});
pushSocket.send(messageFrame, 0);
LOG.info("Published Ticker24h snapshot: topic={}, bytes={}", topic, protoBytes.length);
} catch (Exception e) {
LOG.error("Failed to publish Ticker24h: exchange={}", wrapper.getExchangeId(), e);
}
}
@Override
public void close() {
if (pushSocket != null) pushSocket.close();
if (context != null) context.close();
LOG.info("Ticker24hPublisher closed");
}
}

View File

@@ -0,0 +1,71 @@
package com.dexorder.flink.quotes;
import com.dexorder.flink.ingestor.IngestorBroker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* Schedules periodic TICKER_SNAPSHOT requests for all configured exchanges.
*
* Fires immediately on startup, then at the top of each hour.
* The IngestorBroker dispatches the requests to ingestor workers, which call
* fetchTickers() and publish TickerBatch messages to the market-ticker Kafka topic.
*/
public class Ticker24hScheduler {
private static final Logger LOG = LoggerFactory.getLogger(Ticker24hScheduler.class);
private final IngestorBroker broker;
private final List<String> exchanges;
private final ScheduledExecutorService scheduler;
public Ticker24hScheduler(IngestorBroker broker, List<String> exchanges) {
this.broker = broker;
this.exchanges = exchanges;
this.scheduler = Executors.newSingleThreadScheduledExecutor(r -> {
Thread t = new Thread(r, "Ticker24hScheduler");
t.setDaemon(true);
return t;
});
}
public void start() {
// Fire immediately for all exchanges
fireAll();
// Schedule next firing at top of next hour, then every hour after that
long delayMs = msUntilNextHour();
scheduler.scheduleAtFixedRate(this::fireAll, delayMs, 3_600_000L, TimeUnit.MILLISECONDS);
long delayMin = delayMs / 60_000;
LOG.info("Ticker24hScheduler started: fired immediately for {}, next firing in ~{}min",
exchanges, delayMin);
}
public void stop() {
scheduler.shutdownNow();
LOG.info("Ticker24hScheduler stopped");
}
private void fireAll() {
LOG.info("Ticker24hScheduler firing TICKER_SNAPSHOT requests for exchanges: {}", exchanges);
for (String exchange : exchanges) {
try {
broker.submitTicker24hRequest(exchange);
} catch (Exception e) {
LOG.error("Failed to submit TICKER_SNAPSHOT for exchange={}", exchange, e);
}
}
}
/** Milliseconds until the next full-hour boundary (e.g., 14:00:00.000). */
private static long msUntilNextHour() {
long now = System.currentTimeMillis();
long nextHour = (now / 3_600_000L + 1) * 3_600_000L;
return nextHour - now;
}
}

View File

@@ -0,0 +1,24 @@
package com.dexorder.flink.quotes;
import java.io.Serializable;
/**
* Wrapper for a serialized Ticker24h proto message, ready for ZMQ publication.
*/
public class Ticker24hWrapper implements Serializable {
private static final long serialVersionUID = 1L;
private final String exchangeId;
private final String zmqTopic; // "RESPONSE:{clientId}" or "{exchange}|ticker24h"
private final byte[] protoBytes; // serialized Ticker24h proto
public Ticker24hWrapper(String exchangeId, String zmqTopic, byte[] protoBytes) {
this.exchangeId = exchangeId;
this.zmqTopic = zmqTopic;
this.protoBytes = protoBytes;
}
public String getExchangeId() { return exchangeId; }
public String getZmqTopic() { return zmqTopic; }
public byte[] getProtoBytes() { return protoBytes; }
}

View File

@@ -0,0 +1,95 @@
package com.dexorder.flink.quotes;
import com.dexorder.proto.TickerBatch;
import com.dexorder.proto.TickerStats;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Kafka deserializer for TickerBatch protobuf messages from the market-ticker topic.
* Wire format: [0x01 version][0x0C type][protobuf bytes]
*/
public class TickerBatchDeserializer implements DeserializationSchema<TickerBatchWrapper> {
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(TickerBatchDeserializer.class);
private static final byte PROTOCOL_VERSION = 0x01;
private static final byte MSG_TYPE_TICKER_BATCH = 0x0C;
@Override
public TickerBatchWrapper deserialize(byte[] message) throws IOException {
try {
if (message.length < 2) {
throw new IOException("Message too short: " + message.length + " bytes");
}
byte version = message[0];
if (version != PROTOCOL_VERSION) {
throw new IOException("Unsupported protocol version: " + version);
}
byte messageType = message[1];
if (messageType != MSG_TYPE_TICKER_BATCH) {
throw new IOException("Unexpected message type: 0x" + Integer.toHexString(messageType & 0xFF));
}
byte[] protoPayload = new byte[message.length - 2];
System.arraycopy(message, 2, protoPayload, 0, protoPayload.length);
TickerBatchWrapper wrapper = parseTickerBatch(protoPayload);
LOG.debug("Deserialized TickerBatch: exchange={}, tickers={}",
wrapper.getExchangeId(), wrapper.getTickerCount());
return wrapper;
} catch (Exception e) {
LOG.error("Failed to deserialize TickerBatch", e);
throw new IOException("Failed to deserialize TickerBatch", e);
}
}
private TickerBatchWrapper parseTickerBatch(byte[] payload) throws Exception {
TickerBatch batch = TickerBatch.parseFrom(payload);
List<TickerBatchWrapper.TickerStatsRow> rows = new ArrayList<>(batch.getTickersCount());
for (TickerStats ts : batch.getTickersList()) {
rows.add(new TickerBatchWrapper.TickerStatsRow(
ts.getTicker(),
ts.getExchangeId(),
ts.getBaseAsset(),
ts.getQuoteAsset(),
ts.getLastPrice(),
ts.getPriceChangePct(),
ts.getQuoteVolume24H(),
ts.getTimestamp(),
ts.hasBidPrice() ? ts.getBidPrice() : null,
ts.hasAskPrice() ? ts.getAskPrice() : null,
ts.hasOpen24H() ? ts.getOpen24H() : null,
ts.hasHigh24H() ? ts.getHigh24H() : null,
ts.hasLow24H() ? ts.getLow24H() : null,
ts.hasVolume24H() ? ts.getVolume24H() : null,
ts.hasNumTrades() ? ts.getNumTrades() : null
));
}
return new TickerBatchWrapper(
batch.getExchangeId(), rows, batch.getFetchedAt(),
batch.hasClientId() ? batch.getClientId() : "",
batch.hasRequestId() ? batch.getRequestId() : "");
}
@Override
public boolean isEndOfStream(TickerBatchWrapper nextElement) {
return false;
}
@Override
public TypeInformation<TickerBatchWrapper> getProducedType() {
return TypeInformation.of(TickerBatchWrapper.class);
}
}

View File

@@ -0,0 +1,86 @@
package com.dexorder.flink.quotes;
import java.io.Serializable;
import java.util.List;
/**
* POJO wrapper for TickerBatch Kafka messages from market-ticker topic.
* Unwraps the protobuf into plain Java fields for Flink processing.
*/
public class TickerBatchWrapper implements Serializable {
private static final long serialVersionUID = 1L;
private final String exchangeId;
private final List<TickerStatsRow> tickers;
private final long fetchedAt; // nanoseconds
private final String clientId; // non-empty = client-initiated; "" = scheduled broadcast
private final String requestId; // echoed for tracing
public TickerBatchWrapper(String exchangeId, List<TickerStatsRow> tickers, long fetchedAt,
String clientId, String requestId) {
this.exchangeId = exchangeId;
this.tickers = tickers;
this.fetchedAt = fetchedAt;
this.clientId = clientId != null ? clientId : "";
this.requestId = requestId != null ? requestId : "";
}
public String getExchangeId() { return exchangeId; }
public List<TickerStatsRow> getTickers() { return tickers; }
public long getFetchedAt() { return fetchedAt; }
public String getClientId() { return clientId; }
public String getRequestId() { return requestId; }
public int getTickerCount() { return tickers != null ? tickers.size() : 0; }
@Override
public String toString() {
return "TickerBatchWrapper{exchangeId='" + exchangeId + "', count=" + getTickerCount() + '}';
}
/**
* Single ticker stats row. Optional fields are null when the exchange did not provide them.
*/
public static class TickerStatsRow implements Serializable {
private static final long serialVersionUID = 1L;
public final String ticker;
public final String exchangeId;
public final String baseAsset;
public final String quoteAsset;
public final double lastPrice;
public final double priceChangePct;
public final double quoteVolume24h;
public final long timestamp; // nanoseconds
// Optional fields — null if not provided by exchange
public final Double bidPrice;
public final Double askPrice;
public final Double open24h;
public final Double high24h;
public final Double low24h;
public final Double volume24h;
public final Integer numTrades;
public TickerStatsRow(
String ticker, String exchangeId, String baseAsset, String quoteAsset,
double lastPrice, double priceChangePct, double quoteVolume24h, long timestamp,
Double bidPrice, Double askPrice,
Double open24h, Double high24h, Double low24h, Double volume24h,
Integer numTrades) {
this.ticker = ticker;
this.exchangeId = exchangeId;
this.baseAsset = baseAsset;
this.quoteAsset = quoteAsset;
this.lastPrice = lastPrice;
this.priceChangePct = priceChangePct;
this.quoteVolume24h = quoteVolume24h;
this.timestamp = timestamp;
this.bidPrice = bidPrice;
this.askPrice = askPrice;
this.open24h = open24h;
this.high24h = high24h;
this.low24h = low24h;
this.volume24h = volume24h;
this.numTrades = numTrades;
}
}
}

View File

@@ -28,6 +28,16 @@ topics:
compression.type: snappy
cleanup.policy: delete
# 24-hour rolling ticker snapshots for all symbols on an exchange.
# Written by ingestors on TICKER_SNAPSHOT requests; consumed by Ticker24hConsumer.
- name: market-ticker
partitions: 3
replication: 2
config:
retention.ms: 7200000 # 2 hours (hourly refresh; keep one backup)
compression.type: snappy
cleanup.policy: delete
# Symbol metadata from ingestors
- name: symbol-metadata
partitions: 3