backend redesign

This commit is contained in:
2026-03-11 18:47:11 -04:00
parent 8ff277c8c6
commit e99ef5d2dd
210 changed files with 12147 additions and 155 deletions

View File

@@ -0,0 +1,224 @@
"""
Trigger queue - priority queue with sequence number assignment.
All operations flow through this queue:
- WebSocket messages from users
- Cron scheduled tasks
- DataSource bar updates
- Manual triggers
Queue assigns seq numbers on dequeue, executes triggers, and submits to coordinator.
"""
import asyncio
import logging
from typing import Optional
from .context import ExecutionContext, clear_execution_context, set_execution_context
from .coordinator import CommitCoordinator
from .types import Priority, PriorityTuple, Trigger
logger = logging.getLogger(__name__)
class TriggerQueue:
"""
Priority queue for trigger execution.
Key responsibilities:
- Maintain priority queue (high priority dequeued first)
- Assign sequence numbers on dequeue (determines commit order)
- Execute triggers with context set
- Submit results to CommitCoordinator
- Handle execution errors gracefully
"""
def __init__(self, coordinator: CommitCoordinator):
"""
Initialize trigger queue.
Args:
coordinator: CommitCoordinator for handling commits
"""
self._coordinator = coordinator
self._queue: asyncio.PriorityQueue = asyncio.PriorityQueue()
self._seq_counter = 0
self._seq_lock = asyncio.Lock()
self._processor_task: Optional[asyncio.Task] = None
self._running = False
async def start(self) -> None:
"""Start the queue processor"""
if self._running:
logger.warning("TriggerQueue already running")
return
self._running = True
self._processor_task = asyncio.create_task(self._process_loop())
logger.info("TriggerQueue started")
async def stop(self) -> None:
"""Stop the queue processor gracefully"""
if not self._running:
return
self._running = False
if self._processor_task:
self._processor_task.cancel()
try:
await self._processor_task
except asyncio.CancelledError:
pass
logger.info("TriggerQueue stopped")
async def enqueue(
self,
trigger: Trigger,
priority_override: Optional[Priority | PriorityTuple] = None
) -> int:
"""
Add a trigger to the queue.
Args:
trigger: Trigger to execute
priority_override: Override priority (simple Priority or tuple)
If None, uses trigger's priority/priority_tuple
If Priority enum, creates single-element tuple
If tuple, uses as-is
Returns:
Queue sequence number (appended to priority tuple)
Examples:
# Simple priority
await queue.enqueue(trigger, Priority.USER_AGENT)
# Results in: (Priority.USER_AGENT, queue_seq)
# Tuple priority with event time
await queue.enqueue(
trigger,
(Priority.DATA_SOURCE, bar_data['time'])
)
# Results in: (Priority.DATA_SOURCE, bar_time, queue_seq)
# Let trigger decide
await queue.enqueue(trigger)
"""
# Get monotonic seq for queue ordering (appended to tuple)
async with self._seq_lock:
queue_seq = self._seq_counter
self._seq_counter += 1
# Determine priority tuple
if priority_override is not None:
if isinstance(priority_override, Priority):
# Convert simple priority to tuple
priority_tuple = (priority_override.value, queue_seq)
else:
# Use provided tuple, append queue_seq
priority_tuple = priority_override + (queue_seq,)
else:
# Let trigger determine its own priority tuple
priority_tuple = trigger.get_priority_tuple(queue_seq)
# Priority queue: (priority_tuple, trigger)
# Python's PriorityQueue compares tuples element-by-element
await self._queue.put((priority_tuple, trigger))
logger.debug(
f"Enqueued: {trigger.name} with priority_tuple={priority_tuple}"
)
return queue_seq
async def _process_loop(self) -> None:
"""
Main processing loop.
Dequeues triggers, assigns execution seq, executes, and submits to coordinator.
"""
execution_seq = 0 # Separate counter for execution sequence
while self._running:
try:
# Wait for next trigger (with timeout to check _running flag)
try:
priority_tuple, trigger = await asyncio.wait_for(
self._queue.get(), timeout=1.0
)
except asyncio.TimeoutError:
continue
# Assign execution sequence number
execution_seq += 1
logger.info(
f"Dequeued: seq={execution_seq}, trigger={trigger.name}, "
f"priority_tuple={priority_tuple}"
)
# Execute in background (don't block queue)
asyncio.create_task(
self._execute_trigger(execution_seq, trigger)
)
except Exception as e:
logger.error(f"Error in process loop: {e}", exc_info=True)
async def _execute_trigger(self, seq: int, trigger: Trigger) -> None:
"""
Execute a trigger with proper context and error handling.
Args:
seq: Execution sequence number
trigger: Trigger to execute
"""
# Set up execution context
ctx = ExecutionContext(
seq=seq,
trigger_name=trigger.name,
)
set_execution_context(ctx)
# Record execution start with coordinator
await self._coordinator.start_execution(seq, trigger)
try:
logger.info(f"Executing: seq={seq}, trigger={trigger.name}")
# Execute trigger (can be long-running)
commit_intents = await trigger.execute()
logger.info(
f"Execution complete: seq={seq}, {len(commit_intents)} commit intents"
)
# Submit for sequential commit
await self._coordinator.submit_for_commit(seq, commit_intents)
except Exception as e:
logger.error(
f"Execution failed: seq={seq}, trigger={trigger.name}, error={e}",
exc_info=True,
)
# Notify coordinator of failure
await self._coordinator.execution_failed(seq, e)
finally:
clear_execution_context()
def get_queue_size(self) -> int:
"""Get current queue size (approximate)"""
return self._queue.qsize()
def is_running(self) -> bool:
"""Check if queue processor is running"""
return self._running
def __repr__(self) -> str:
return (
f"TriggerQueue(running={self._running}, queue_size={self.get_queue_size()})"
)