redis pipeline autoflush after 10000 entries

This commit is contained in:
tim
2025-04-01 10:54:25 -04:00
parent 34fa439b3c
commit d49f142fe3
2 changed files with 42 additions and 14 deletions

View File

@@ -11,12 +11,39 @@ from dexorder import config
log = logging.getLogger(__name__)
class FlushingPipeline:
def __init__(self, redis: Redis):
self.redis = redis
self.pipe: Pipeline = redis.pipeline()
self.full_pipes: list[Pipeline] = []
self.count = 0
self.flush_at = 10_000
def __getattr__(self, item):
if item in ('sadd', 'srem', 'hset', 'hdel', 'json'):
self.count += 1
if self.count >= self.flush_at:
self.full_pipes.append(self.pipe)
self.pipe = self.redis.pipeline()
self.count = 0
return getattr(self.pipe, item)
async def execute(self):
for pipe in self.full_pipes:
await pipe.execute()
await self.pipe.execute()
self.pipe = None
self.full_pipes.clear()
self.count = 0
class Memcache:
@staticmethod
@asynccontextmanager
async def batch():
old_redis: Redis = current_redis.get()
pipe: Pipeline = old_redis.pipeline()
pipe = FlushingPipeline(old_redis)
# noinspection PyTypeChecker
current_redis.set(pipe)
try:
yield pipe

View File

@@ -91,19 +91,20 @@ class RedisState (SeriesCollection):
hsets[series][key] = value
else:
raise NotImplementedError
r: Redis = current_redis.get()
for series, keys in sadds.items():
await r.sadd(series, *keys)
for series, keys in sdels.items():
await r.srem(series, *keys)
for series, kvs in hsets.items():
await r.hset(series, mapping=kvs)
for series, keys in hdels.items():
await r.hdel(series, *keys)
block_series = f'{chain_id}|head'
headstr = hexstr(fork.head)
await r.json(json_encoder).set(block_series,'$',[fork.height, headstr])
pubs.append((str(chain_id), 'head', [fork.height, headstr]))
async with memcache.batch() as r:
r: Pipeline
for series, keys in sadds.items():
r.sadd(series, *keys)
for series, keys in sdels.items():
r.srem(series, *keys)
for series, kvs in hsets.items():
r.hset(series, mapping=kvs)
for series, keys in hdels.items():
r.hdel(series, *keys)
block_series = f'{chain_id}|head'
headstr = hexstr(fork.head)
r.json(json_encoder).set(block_series,'$',[fork.height, headstr])
pubs.append((str(chain_id), 'head', [fork.height, headstr]))
# separate batch for pubs
if pubs:
await publish_all(pubs)