1212
1313logger = make_logger (__name__ )
1414
15+ _DEFAULT_BATCH_SIZE = 50
16+
1517
1618class SpanEventType (str , Enum ):
1719 START = "start"
@@ -28,15 +30,18 @@ class _SpanQueueItem:
2830class AsyncSpanQueue :
2931 """Background FIFO queue for async span processing.
3032
31- Span events are enqueued synchronously (non-blocking) and processed
32- sequentially by a background drain task. This keeps tracing HTTP calls
33- off the critical request path while preserving start-before-end ordering.
33+ Span events are enqueued synchronously (non-blocking) and drained by a
34+ background task. Items are processed in batches: all START events in a
35+ batch are flushed concurrently, then all END events, so that per-span
36+ start-before-end ordering is preserved while HTTP calls for independent
37+ spans execute in parallel.
3438 """
3539
36- def __init__ (self ) -> None :
40+ def __init__ (self , batch_size : int = _DEFAULT_BATCH_SIZE ) -> None :
3741 self ._queue : asyncio .Queue [_SpanQueueItem ] = asyncio .Queue ()
3842 self ._drain_task : asyncio .Task [None ] | None = None
3943 self ._stopping = False
44+ self ._batch_size = batch_size
4045
4146 def enqueue (
4247 self ,
@@ -54,9 +59,45 @@ def _ensure_drain_running(self) -> None:
5459 if self ._drain_task is None or self ._drain_task .done ():
5560 self ._drain_task = asyncio .create_task (self ._drain_loop ())
5661
62+ # ------------------------------------------------------------------
63+ # Drain loop
64+ # ------------------------------------------------------------------
65+
5766 async def _drain_loop (self ) -> None :
5867 while True :
59- item = await self ._queue .get ()
68+ # Block until at least one item is available.
69+ first = await self ._queue .get ()
70+ batch : list [_SpanQueueItem ] = [first ]
71+
72+ # Opportunistically grab more ready items (non-blocking).
73+ while len (batch ) < self ._batch_size :
74+ try :
75+ batch .append (self ._queue .get_nowait ())
76+ except asyncio .QueueEmpty :
77+ break
78+
79+ try :
80+ # Separate START and END events. Processing all STARTs before
81+ # ENDs ensures that on_span_start completes before on_span_end
82+ # for any span whose both events land in the same batch.
83+ starts = [i for i in batch if i .event_type == SpanEventType .START ]
84+ ends = [i for i in batch if i .event_type == SpanEventType .END ]
85+
86+ if starts :
87+ await self ._process_items (starts )
88+ if ends :
89+ await self ._process_items (ends )
90+ finally :
91+ for _ in batch :
92+ self ._queue .task_done ()
93+ # Release span data for GC.
94+ batch .clear ()
95+
96+ @staticmethod
97+ async def _process_items (items : list [_SpanQueueItem ]) -> None :
98+ """Process a list of span events concurrently."""
99+
100+ async def _handle (item : _SpanQueueItem ) -> None :
60101 try :
61102 if item .event_type == SpanEventType .START :
62103 coros = [p .on_span_start (item .span ) for p in item .processors ]
@@ -72,9 +113,15 @@ async def _drain_loop(self) -> None:
72113 exc_info = result ,
73114 )
74115 except Exception :
75- logger .exception ("Unexpected error in span queue drain loop for span %s" , item .span .id )
76- finally :
77- self ._queue .task_done ()
116+ logger .exception (
117+ "Unexpected error in span queue for span %s" , item .span .id
118+ )
119+
120+ await asyncio .gather (* [_handle (item ) for item in items ])
121+
122+ # ------------------------------------------------------------------
123+ # Shutdown
124+ # ------------------------------------------------------------------
78125
79126 async def shutdown (self , timeout : float = 30.0 ) -> None :
80127 self ._stopping = True
0 commit comments