1111from ddtrace .internal .datastreams .processor import DataStreamsCtx
1212from ddtrace .internal .datastreams .processor import PartitionKey
1313from ddtrace .internal .native import DDSketch
14+ from ddtrace .internal .service import ServiceStatus
15+ from ddtrace .internal .service import ServiceStatusError
1416from tests .datastreams .test_public_api import MockedTracer
1517
1618
@@ -24,11 +26,24 @@ class CustomError(Exception):
2426@pytest .fixture
2527def dsm_processor (tracer ):
2628 processor = tracer .data_streams_processor
29+ # Clean up any existing context to prevent test pollution
30+ try :
31+ del processor ._current_context .value
32+ except AttributeError :
33+ pass
34+
2735 with mock .patch ("ddtrace.internal.datastreams.data_streams_processor" , return_value = processor ):
2836 yield processor
2937 # flush buckets for the next test run
3038 processor .periodic ()
3139
40+ try :
41+ processor .shutdown (timeout = 5 )
42+ except ServiceStatusError as e :
43+ # Expected: processor already stopped by tracer shutdown during test teardown
44+ if e .current_status == ServiceStatus .RUNNING :
45+ raise
46+
3247
3348@pytest .mark .parametrize ("payload_and_length" , [("test" , 4 ), ("你" .encode ("utf-8" ), 3 ), (b"test2" , 5 )])
3449@pytest .mark .parametrize ("key_and_length" , [("test-key" , 8 ), ("你" .encode ("utf-8" ), 3 ), (b"t2" , 2 )])
@@ -44,11 +59,6 @@ def test_data_streams_payload_size(dsm_processor, consumer, producer, kafka_topi
4459 expected_payload_size += len (PROPAGATION_KEY_BASE_64 ) # Add in header key length
4560 expected_payload_size += DSM_TEST_PATH_HEADER_SIZE # to account for path header we add
4661
47- try :
48- del dsm_processor ._current_context .value
49- except AttributeError :
50- pass
51-
5262 producer .produce (kafka_topic , payload , key = key , headers = test_headers )
5363 producer .flush ()
5464 consumer .poll ()
@@ -65,10 +75,6 @@ def test_data_streams_payload_size(dsm_processor, consumer, producer, kafka_topi
6575
6676def test_data_streams_kafka_serializing (dsm_processor , deserializing_consumer , serializing_producer , kafka_topic ):
6777 PAYLOAD = bytes ("data streams" , encoding = "utf-8" )
68- try :
69- del dsm_processor ._current_context .value
70- except AttributeError :
71- pass
7278 serializing_producer .produce (kafka_topic , value = PAYLOAD , key = "test_key_2" )
7379 serializing_producer .flush ()
7480 message = None
@@ -80,10 +86,6 @@ def test_data_streams_kafka_serializing(dsm_processor, deserializing_consumer, s
8086
8187def test_data_streams_kafka (dsm_processor , consumer , producer , kafka_topic ):
8288 PAYLOAD = bytes ("data streams" , encoding = "utf-8" )
83- try :
84- del dsm_processor ._current_context .value
85- except AttributeError :
86- pass
8789 producer .produce (kafka_topic , PAYLOAD , key = "test_key_1" )
8890 producer .produce (kafka_topic , PAYLOAD , key = "test_key_2" )
8991 producer .flush ()
@@ -127,10 +129,6 @@ def _read_single_message(consumer):
127129
128130 PAYLOAD = bytes ("data streams" , encoding = "utf-8" )
129131 consumer = non_auto_commit_consumer
130- try :
131- del dsm_processor ._current_context .value
132- except AttributeError :
133- pass
134132 buckets = dsm_processor ._buckets
135133 producer .produce (kafka_topic , PAYLOAD , key = "test_key_1" )
136134 producer .produce (kafka_topic , PAYLOAD , key = "test_key_2" )
@@ -170,10 +168,6 @@ def _read_single_message(consumer):
170168
171169 consumer = non_auto_commit_consumer
172170 PAYLOAD = bytes ("data streams" , encoding = "utf-8" )
173- try :
174- del dsm_processor ._current_context .value
175- except AttributeError :
176- pass
177171 producer .produce (kafka_topic , PAYLOAD , key = "test_key_1" )
178172 producer .produce (kafka_topic , PAYLOAD , key = "test_key_2" )
179173 producer .flush ()
@@ -207,10 +201,6 @@ def _read_single_message(consumer):
207201 return message
208202
209203 PAYLOAD = bytes ("data streams" , encoding = "utf-8" )
210- try :
211- del dsm_processor ._current_context .value
212- except AttributeError :
213- pass
214204 producer .produce (kafka_topic , PAYLOAD , key = "test_key_1" )
215205 producer .produce (kafka_topic , PAYLOAD , key = "test_key_2" )
216206 producer .flush ()
@@ -236,10 +226,6 @@ def test_data_streams_kafka_produce_api_compatibility(dsm_processor, consumer, p
236226 kafka_topic = empty_kafka_topic
237227
238228 PAYLOAD = bytes ("data streams" , encoding = "utf-8" )
239- try :
240- del dsm_processor ._current_context .value
241- except AttributeError :
242- pass
243229
244230 # All of these should work
245231 producer .produce (kafka_topic )
0 commit comments