17
17
TaxonRank ,
18
18
group_images_into_events ,
19
19
)
20
+ from ami .ml .models .backend import Backend
20
21
from ami .ml .tasks import create_detection_images
21
22
from ami .tests .fixtures .storage import GeneratedTestFrame , create_storage_source , populate_bucket
22
23
@@ -33,62 +34,56 @@ def update_site_settings(**kwargs):
33
34
return site
34
35
35
36
36
- def create_ml_pipeline ( project ):
37
- from ami . ml . models import Algorithm , Pipeline
38
-
39
- pipelines_to_add = [
37
+ # @TODO: To test this: delete project in admin, then run migrate
38
+ # (this will execute the signal in ami-platform/ami/tests/fixtures/signals.py)
39
+ def create_ml_backends ( project ):
40
+ backends_to_add = [
40
41
{
41
- "name" : "ML Dummy Backend" ,
42
- "slug" : "dummy" ,
43
- "version" : 1 ,
44
- "algorithms" : [
45
- {"name" : "Dummy Detector" , "key" : 1 },
46
- {"name" : "Random Detector" , "key" : 2 },
47
- {"name" : "Always Moth Classifier" , "key" : 3 },
48
- ],
49
- "projects" : {"name" : project .name },
50
- "endpoint_url" : "http://ml_backend:2000/pipeline/process" ,
42
+ "projects" : [{"name" : project .name }],
43
+ "endpoint_url" : "http://ml_backend:2000" ,
51
44
},
52
45
]
53
46
54
- for pipeline_data in pipelines_to_add :
55
- pipeline , created = Pipeline .objects .get_or_create (
56
- name = pipeline_data ["name" ],
57
- slug = pipeline_data ["slug" ],
58
- version = pipeline_data ["version" ],
59
- endpoint_url = pipeline_data ["endpoint_url" ],
47
+ for backend_data in backends_to_add :
48
+ backend , created = Backend .objects .get_or_create (
49
+ endpoint_url = backend_data ["endpoint_url" ],
60
50
)
61
51
62
52
if created :
63
- logger .info (f'Successfully created { pipeline_data [ "name " ]} .' )
53
+ logger .info (f'Successfully created backend with { backend_data [ "endpoint_url " ]} .' )
64
54
else :
65
- logger .info (f'Using existing pipeline { pipeline_data ["name" ]} .' )
55
+ logger .info (f'Using existing backend with { backend_data ["endpoint_url" ]} .' )
56
+
57
+ for project_data in backend_data ["projects" ]:
58
+ try :
59
+ project = Project .objects .get (name = project_data ["name" ])
60
+ backend .projects .add (project )
61
+ except Exception :
62
+ logger .error (f'Could not find project { project_data ["name" ]} .' )
66
63
67
- for algorithm_data in pipeline_data ["algorithms" ]:
68
- algorithm , _ = Algorithm .objects .get_or_create (name = algorithm_data ["name" ], key = algorithm_data ["key" ])
69
- pipeline .algorithms .add (algorithm )
64
+ backend .save ()
70
65
71
- pipeline . save ()
66
+ backend . create_pipelines ()
72
67
73
- return pipeline
68
+ return backend
74
69
75
70
76
71
def setup_test_project (reuse = True ) -> tuple [Project , Deployment ]:
72
+ short_id = "1ed10463"
77
73
if reuse :
78
- project , _ = Project .objects .get_or_create (name = "Test Project" )
74
+ project , _ = Project .objects .get_or_create (name = f "Test Project { short_id } " )
79
75
data_source = create_storage_source (project , "Test Data Source" )
80
76
deployment , _ = Deployment .objects .get_or_create (
81
77
project = project , name = "Test Deployment" , defaults = dict (data_source = data_source )
82
78
)
83
- create_ml_pipeline (project )
79
+ create_ml_backends (project )
84
80
else :
85
- short_id = uuid .uuid4 ().hex [:8 ]
86
81
project = Project .objects .create (name = f"Test Project { short_id } " )
87
82
data_source = create_storage_source (project , f"Test Data Source { short_id } " )
88
83
deployment = Deployment .objects .create (
89
84
project = project , name = f"Test Deployment { short_id } " , data_source = data_source
90
85
)
91
- create_ml_pipeline (project )
86
+ create_ml_backends (project )
92
87
return project , deployment
93
88
94
89
0 commit comments