@@ -194,24 +194,23 @@ def process_images(
194
194
task_logger .info (f"Sending { len (images )} images to Pipeline { pipeline } " )
195
195
urls = [source_image .public_url () for source_image in images if source_image .public_url ()]
196
196
197
- source_images : list [SourceImageRequest ] = []
197
+ source_image_requests : list [SourceImageRequest ] = []
198
198
detection_requests : list [DetectionRequest ] = []
199
199
200
200
for source_image , url in zip (images , urls ):
201
201
if url :
202
- source_images .append (
203
- SourceImageRequest (
204
- id = str (source_image .pk ),
205
- url = url ,
206
- )
202
+ source_image_request = SourceImageRequest (
203
+ id = str (source_image .pk ),
204
+ url = url ,
207
205
)
208
- # Only re-process detections created by the pipeline's detector
206
+ source_image_requests .append (source_image_request )
207
+ # Re-process all existing detections if they exist
209
208
for detection in source_image .detections .all ():
210
209
bbox = detection .get_bbox ()
211
210
if bbox and detection .detection_algorithm :
212
211
detection_requests .append (
213
212
DetectionRequest (
214
- source_image = source_images [ - 1 ] ,
213
+ source_image = source_image_request ,
215
214
bbox = bbox ,
216
215
crop_image_url = detection .url (),
217
216
algorithm = AlgorithmReference (
@@ -231,7 +230,7 @@ def process_images(
231
230
232
231
request_data = PipelineRequest (
233
232
pipeline = pipeline .slug ,
234
- source_images = source_images ,
233
+ source_images = source_image_requests ,
235
234
config = config ,
236
235
detections = detection_requests ,
237
236
)
@@ -253,7 +252,8 @@ def process_images(
253
252
pipeline = pipeline .slug ,
254
253
total_time = 0 ,
255
254
source_images = [
256
- SourceImageResponse (id = source_image .id , url = source_image .url ) for source_image in source_images
255
+ SourceImageResponse (id = source_image_request .id , url = source_image_request .url )
256
+ for source_image_request in source_image_requests
257
257
],
258
258
detections = [],
259
259
errors = msg ,
@@ -992,7 +992,7 @@ def collect_images(
992
992
)
993
993
994
994
def choose_processing_service_for_pipeline (
995
- self , job_id : int , pipeline_name : str , project_id : int
995
+ self , job_id : int | None , pipeline_name : str , project_id : int
996
996
) -> ProcessingService :
997
997
# @TODO use the cached `last_checked_latency` and a max age to avoid checking every time
998
998
0 commit comments