redis pipeline autoflush after 10000 entries
This commit is contained in:
@@ -11,12 +11,39 @@ from dexorder import config
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FlushingPipeline:
|
||||
def __init__(self, redis: Redis):
|
||||
self.redis = redis
|
||||
self.pipe: Pipeline = redis.pipeline()
|
||||
self.full_pipes: list[Pipeline] = []
|
||||
self.count = 0
|
||||
self.flush_at = 10_000
|
||||
|
||||
def __getattr__(self, item):
|
||||
if item in ('sadd', 'srem', 'hset', 'hdel', 'json'):
|
||||
self.count += 1
|
||||
if self.count >= self.flush_at:
|
||||
self.full_pipes.append(self.pipe)
|
||||
self.pipe = self.redis.pipeline()
|
||||
self.count = 0
|
||||
return getattr(self.pipe, item)
|
||||
|
||||
async def execute(self):
|
||||
for pipe in self.full_pipes:
|
||||
await pipe.execute()
|
||||
await self.pipe.execute()
|
||||
self.pipe = None
|
||||
self.full_pipes.clear()
|
||||
self.count = 0
|
||||
|
||||
|
||||
class Memcache:
|
||||
@staticmethod
|
||||
@asynccontextmanager
|
||||
async def batch():
|
||||
old_redis: Redis = current_redis.get()
|
||||
pipe: Pipeline = old_redis.pipeline()
|
||||
pipe = FlushingPipeline(old_redis)
|
||||
# noinspection PyTypeChecker
|
||||
current_redis.set(pipe)
|
||||
try:
|
||||
yield pipe
|
||||
|
||||
@@ -91,18 +91,19 @@ class RedisState (SeriesCollection):
|
||||
hsets[series][key] = value
|
||||
else:
|
||||
raise NotImplementedError
|
||||
r: Redis = current_redis.get()
|
||||
async with memcache.batch() as r:
|
||||
r: Pipeline
|
||||
for series, keys in sadds.items():
|
||||
await r.sadd(series, *keys)
|
||||
r.sadd(series, *keys)
|
||||
for series, keys in sdels.items():
|
||||
await r.srem(series, *keys)
|
||||
r.srem(series, *keys)
|
||||
for series, kvs in hsets.items():
|
||||
await r.hset(series, mapping=kvs)
|
||||
r.hset(series, mapping=kvs)
|
||||
for series, keys in hdels.items():
|
||||
await r.hdel(series, *keys)
|
||||
r.hdel(series, *keys)
|
||||
block_series = f'{chain_id}|head'
|
||||
headstr = hexstr(fork.head)
|
||||
await r.json(json_encoder).set(block_series,'$',[fork.height, headstr])
|
||||
r.json(json_encoder).set(block_series,'$',[fork.height, headstr])
|
||||
pubs.append((str(chain_id), 'head', [fork.height, headstr]))
|
||||
# separate batch for pubs
|
||||
if pubs:
|
||||
|
||||
Reference in New Issue
Block a user