Skip to content

Commit b60246a

Browse files
authored
Merge branch 'develop' into aurelien/project-thumbnail-endpoint
2 parents e417dd9 + 7c40092 commit b60246a

40 files changed

+646
-584
lines changed
Lines changed: 154 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,154 @@
1+
"""schema
2+
3+
Revision ID: daf666ef6f34
4+
Revises:
5+
Create Date: 2025-09-17 16:00:56.718056
6+
7+
"""
8+
9+
from collections.abc import Sequence
10+
11+
import sqlalchemy as sa
12+
from alembic import op
13+
14+
# revision identifiers, used by Alembic.
15+
revision: str = "daf666ef6f34"
16+
down_revision: str | Sequence[str] | None = None
17+
branch_labels: str | Sequence[str] | None = None
18+
depends_on: str | Sequence[str] | None = None
19+
20+
21+
def upgrade() -> None:
22+
"""Upgrade schema."""
23+
# ### commands auto generated by Alembic - please adjust! ###
24+
op.create_table(
25+
"projects",
26+
sa.Column("id", sa.Text(), nullable=False),
27+
sa.Column("name", sa.String(length=255), nullable=False),
28+
sa.Column("task_type", sa.String(length=50), nullable=False),
29+
sa.Column("exclusive_labels", sa.Boolean(), nullable=False),
30+
sa.Column("created_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
31+
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
32+
sa.PrimaryKeyConstraint("id"),
33+
)
34+
op.create_table(
35+
"sinks",
36+
sa.Column("id", sa.Text(), nullable=False),
37+
sa.Column("name", sa.String(length=255), nullable=False),
38+
sa.Column("sink_type", sa.String(length=50), nullable=False),
39+
sa.Column("rate_limit", sa.Float(), nullable=True),
40+
sa.Column("config_data", sa.JSON(), nullable=False),
41+
sa.Column("output_formats", sa.JSON(), nullable=False),
42+
sa.Column("created_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
43+
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
44+
sa.PrimaryKeyConstraint("id"),
45+
sa.UniqueConstraint("name"),
46+
)
47+
op.create_table(
48+
"sources",
49+
sa.Column("id", sa.Text(), nullable=False),
50+
sa.Column("name", sa.String(length=255), nullable=False),
51+
sa.Column("source_type", sa.String(length=50), nullable=False),
52+
sa.Column("config_data", sa.JSON(), nullable=False),
53+
sa.Column("created_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
54+
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
55+
sa.PrimaryKeyConstraint("id"),
56+
sa.UniqueConstraint("name"),
57+
)
58+
op.create_table(
59+
"dataset_revisions",
60+
sa.Column("id", sa.Text(), nullable=False),
61+
sa.Column("project_id", sa.Text(), nullable=False),
62+
sa.Column("files_deleted", sa.Boolean(), nullable=False),
63+
sa.Column("created_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
64+
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
65+
sa.ForeignKeyConstraint(["project_id"], ["projects.id"], ondelete="CASCADE"),
66+
sa.PrimaryKeyConstraint("id"),
67+
)
68+
op.create_table(
69+
"labels",
70+
sa.Column("id", sa.Text(), nullable=False),
71+
sa.Column("project_id", sa.Text(), nullable=False),
72+
sa.Column("name", sa.String(length=255), nullable=False),
73+
sa.Column("created_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
74+
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
75+
sa.Column("color", sa.String(length=7), nullable=True),
76+
sa.Column("hotkey", sa.String(length=10), nullable=True),
77+
sa.ForeignKeyConstraint(["project_id"], ["projects.id"], ondelete="CASCADE"),
78+
sa.PrimaryKeyConstraint("id"),
79+
sa.UniqueConstraint("project_id", "hotkey", name="uq_project_label_hotkey"),
80+
sa.UniqueConstraint("project_id", "name", name="uq_project_label_name"),
81+
)
82+
op.create_table(
83+
"model_revisions",
84+
sa.Column("id", sa.Text(), nullable=False),
85+
sa.Column("project_id", sa.Text(), nullable=False),
86+
sa.Column("architecture", sa.String(length=100), nullable=False),
87+
sa.Column("parent_revision", sa.Text(), nullable=True),
88+
sa.Column("training_status", sa.String(length=50), nullable=False),
89+
sa.Column("training_configuration", sa.JSON(), nullable=False),
90+
sa.Column("training_dataset_id", sa.Text(), nullable=True),
91+
sa.Column("label_schema_revision", sa.JSON(), nullable=False),
92+
sa.Column("training_started_at", sa.DateTime(), nullable=True),
93+
sa.Column("training_finished_at", sa.DateTime(), nullable=True),
94+
sa.Column("files_deleted", sa.Boolean(), nullable=False),
95+
sa.Column("created_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
96+
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
97+
sa.ForeignKeyConstraint(["parent_revision"], ["model_revisions.id"]),
98+
sa.ForeignKeyConstraint(["project_id"], ["projects.id"], ondelete="CASCADE"),
99+
sa.ForeignKeyConstraint(["training_dataset_id"], ["dataset_revisions.id"]),
100+
sa.PrimaryKeyConstraint("id"),
101+
)
102+
op.create_table(
103+
"dataset_items",
104+
sa.Column("id", sa.Text(), nullable=False),
105+
sa.Column("project_id", sa.Text(), nullable=False),
106+
sa.Column("name", sa.String(length=255), nullable=False),
107+
sa.Column("format", sa.String(length=50), nullable=False),
108+
sa.Column("width", sa.Integer(), nullable=False),
109+
sa.Column("height", sa.Integer(), nullable=False),
110+
sa.Column("size", sa.Integer(), nullable=False),
111+
sa.Column("annotation_data", sa.JSON(), nullable=True),
112+
sa.Column("user_reviewed", sa.Boolean(), nullable=False),
113+
sa.Column("prediction_model_id", sa.Text(), nullable=True),
114+
sa.Column("source_id", sa.Text(), nullable=True),
115+
sa.Column("subset", sa.String(length=20), nullable=False),
116+
sa.Column("subset_assigned_at", sa.DateTime(), nullable=True),
117+
sa.Column("created_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
118+
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
119+
sa.ForeignKeyConstraint(["prediction_model_id"], ["model_revisions.id"], ondelete="SET NULL"),
120+
sa.ForeignKeyConstraint(["project_id"], ["projects.id"], ondelete="CASCADE"),
121+
sa.ForeignKeyConstraint(["source_id"], ["sources.id"], ondelete="SET NULL"),
122+
sa.PrimaryKeyConstraint("id"),
123+
)
124+
op.create_table(
125+
"pipelines",
126+
sa.Column("project_id", sa.Text(), nullable=False),
127+
sa.Column("source_id", sa.Text(), nullable=True),
128+
sa.Column("sink_id", sa.Text(), nullable=True),
129+
sa.Column("model_revision_id", sa.Text(), nullable=True),
130+
sa.Column("is_running", sa.Boolean(), nullable=False),
131+
sa.Column("data_collection_policies", sa.JSON(), nullable=False),
132+
sa.Column("created_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
133+
sa.Column("updated_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False),
134+
sa.ForeignKeyConstraint(["model_revision_id"], ["model_revisions.id"], ondelete="RESTRICT"),
135+
sa.ForeignKeyConstraint(["project_id"], ["projects.id"], ondelete="CASCADE"),
136+
sa.ForeignKeyConstraint(["sink_id"], ["sinks.id"], ondelete="RESTRICT"),
137+
sa.ForeignKeyConstraint(["source_id"], ["sources.id"], ondelete="RESTRICT"),
138+
sa.PrimaryKeyConstraint("project_id"),
139+
)
140+
# ### end Alembic commands ###
141+
142+
143+
def downgrade() -> None:
144+
"""Downgrade schema."""
145+
# ### commands auto generated by Alembic - please adjust! ###
146+
op.drop_table("pipelines")
147+
op.drop_table("dataset_items")
148+
op.drop_table("model_revisions")
149+
op.drop_table("labels")
150+
op.drop_table("dataset_revisions")
151+
op.drop_table("sources")
152+
op.drop_table("sinks")
153+
op.drop_table("projects")
154+
# ### end Alembic commands ###

backend/app/api/endpoints/models.py

Lines changed: 1 addition & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from typing import Annotated
55
from uuid import UUID
66

7-
from fastapi import APIRouter, Body, Depends, HTTPException, status
7+
from fastapi import APIRouter, Depends, HTTPException, status
88
from fastapi.openapi.models import Example
99

1010
from app.api.dependencies import get_model_id, get_model_service
@@ -55,28 +55,6 @@ async def get_model(
5555
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
5656

5757

58-
@router.patch(
59-
"/{model_id}",
60-
responses={
61-
status.HTTP_200_OK: {"description": "Model successfully updated", "model": Model},
62-
status.HTTP_400_BAD_REQUEST: {"description": "Invalid model ID or request body"},
63-
status.HTTP_404_NOT_FOUND: {"description": "Model not found"},
64-
},
65-
)
66-
async def update_model_metadata(
67-
model_id: Annotated[UUID, Depends(get_model_id)],
68-
model_metadata: Annotated[dict, Body(openapi_examples=UPDATE_MODEL_BODY_EXAMPLES)],
69-
model_service: Annotated[ModelService, Depends(get_model_service)],
70-
) -> Model:
71-
"""Update the metadata of an existing model"""
72-
if "format" in model_metadata:
73-
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="The 'format' field cannot be changed")
74-
try:
75-
return model_service.update_model(model_id, model_metadata)
76-
except ResourceNotFoundError as e:
77-
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
78-
79-
8058
@router.delete(
8159
"/{model_id}",
8260
status_code=status.HTTP_204_NO_CONTENT,

backend/app/cli.py

Lines changed: 23 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -5,19 +5,14 @@
55

66
import logging
77
import sys
8+
from datetime import datetime, timedelta
89

910
import click
1011

1112
from app.db import MigrationManager, get_db_session
12-
from app.db.schema import DatasetItemDB, LabelDB, ModelDB, PipelineDB, ProjectDB, SinkDB, SourceDB
13-
from app.schemas import (
14-
DisconnectedSinkConfig,
15-
DisconnectedSourceConfig,
16-
ModelFormat,
17-
OutputFormat,
18-
SinkType,
19-
SourceType,
20-
)
13+
from app.db.schema import DatasetItemDB, LabelDB, ModelRevisionDB, PipelineDB, ProjectDB, SinkDB, SourceDB
14+
from app.schemas import DisconnectedSinkConfig, DisconnectedSourceConfig, OutputFormat, SinkType, SourceType
15+
from app.schemas.model import TrainingStatus
2116
from app.schemas.project import TaskType
2217
from app.settings import get_settings
2318

@@ -84,8 +79,7 @@ def check_db() -> None:
8479

8580
@cli.command()
8681
@click.option("--with-model", default=False)
87-
@click.option("--model-name", default="card-detection-ssd")
88-
def seed(with_model: bool, model_name: str) -> None:
82+
def seed(with_model: bool) -> None:
8983
"""Seed the database with test data."""
9084
# If the app is running, it needs to be restarted since it doesn't track direct DB changes
9185
# Fixed IDs are used to ensure consistency in tests
@@ -97,7 +91,12 @@ def seed(with_model: bool, model_name: str) -> None:
9791
task_type=TaskType.DETECTION,
9892
exclusive_labels=True,
9993
)
100-
pipeline = PipelineDB()
94+
project.labels = [
95+
LabelDB(name="card", color="#FF0000", hotkey="c"),
96+
LabelDB(name="person", color="#00FF00", hotkey="p"),
97+
]
98+
db.add(project)
99+
db.flush()
101100

102101
# Create default disconnected source and sink
103102
disconnected_source_cfg = DisconnectedSourceConfig()
@@ -115,10 +114,9 @@ def seed(with_model: bool, model_name: str) -> None:
115114
output_formats=[],
116115
config_data={},
117116
)
118-
db.add(disconnected_source)
119-
db.add(disconnected_sink)
117+
db.add_all([disconnected_source, disconnected_sink])
120118

121-
project.pipeline = pipeline
119+
pipeline = PipelineDB(project_id=project.id)
122120
pipeline.source = SourceDB(
123121
id="f6b1ac22-e36c-4b36-9a23-62b0881e4223",
124122
name="Video Source",
@@ -134,17 +132,18 @@ def seed(with_model: bool, model_name: str) -> None:
134132
config_data={"folder_path": "data/output"},
135133
)
136134
if with_model:
137-
pipeline.model = ModelDB(
135+
pipeline.model_revision = ModelRevisionDB(
138136
id="977eeb18-eaac-449d-bc80-e340fbe052ad",
139-
name=model_name,
140-
format=ModelFormat.OPENVINO,
137+
project_id=project.id,
138+
architecture="Object_Detection_SSD",
139+
training_status=TrainingStatus.SUCCESSFUL,
140+
training_started_at=datetime.now() - timedelta(hours=24),
141+
training_finished_at=datetime.now() - timedelta(hours=23),
142+
training_configuration={},
143+
label_schema_revision={},
141144
)
142145
pipeline.is_running = True
143-
project.labels = [
144-
LabelDB(name="card", color="#FF0000", hotkey="c"),
145-
LabelDB(name="person", color="#00FF00", hotkey="p"),
146-
]
147-
db.add(project)
146+
db.add(pipeline)
148147
db.commit()
149148
click.echo("✓ Seeding successful!")
150149

@@ -155,7 +154,7 @@ def clean_db() -> None:
155154
with get_db_session() as db:
156155
db.query(DatasetItemDB).delete()
157156
db.query(ProjectDB).delete()
158-
db.query(ModelDB).delete()
157+
db.query(ModelRevisionDB).delete()
159158
db.query(SinkDB).delete()
160159
db.query(SourceDB).delete()
161160
db.commit()

backend/app/db/schema.py

Lines changed: 25 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ class PipelineDB(Base):
4545
project_id: Mapped[str] = mapped_column(Text, ForeignKey("projects.id", ondelete="CASCADE"), primary_key=True)
4646
source_id: Mapped[str | None] = mapped_column(Text, ForeignKey("sources.id", ondelete="RESTRICT"))
4747
sink_id: Mapped[str | None] = mapped_column(Text, ForeignKey("sinks.id", ondelete="RESTRICT"))
48-
model_id: Mapped[str | None] = mapped_column(Text, ForeignKey("models.id", ondelete="RESTRICT"))
48+
model_revision_id: Mapped[str | None] = mapped_column(Text, ForeignKey("model_revisions.id", ondelete="RESTRICT"))
4949
is_running: Mapped[bool] = mapped_column(Boolean, default=False)
5050
data_collection_policies: Mapped[list] = mapped_column(JSON, nullable=False, default=list)
5151
created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp())
@@ -54,7 +54,7 @@ class PipelineDB(Base):
5454
project = relationship("ProjectDB", back_populates="pipeline")
5555
sink = relationship("SinkDB", uselist=False)
5656
source = relationship("SourceDB", uselist=False)
57-
model = relationship("ModelDB", uselist=False)
57+
model_revision = relationship("ModelRevisionDB", uselist=False)
5858

5959

6060
class SinkDB(Base):
@@ -70,12 +70,30 @@ class SinkDB(Base):
7070
updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp())
7171

7272

73-
class ModelDB(Base):
74-
__tablename__ = "models"
73+
class ModelRevisionDB(Base):
74+
__tablename__ = "model_revisions"
7575

7676
id: Mapped[str] = mapped_column(Text, primary_key=True, default=lambda: str(uuid4()))
77-
name: Mapped[str] = mapped_column(String(255), nullable=False)
78-
format: Mapped[str] = mapped_column(String(50), nullable=False)
77+
project_id: Mapped[str] = mapped_column(Text, ForeignKey("projects.id", ondelete="CASCADE"), nullable=False)
78+
architecture: Mapped[str] = mapped_column(String(100), nullable=False)
79+
parent_revision: Mapped[str | None] = mapped_column(Text, ForeignKey("model_revisions.id"), nullable=True)
80+
training_status: Mapped[str] = mapped_column(String(50), nullable=False)
81+
training_configuration: Mapped[dict] = mapped_column(JSON, nullable=False)
82+
training_dataset_id: Mapped[str | None] = mapped_column(Text, ForeignKey("dataset_revisions.id"), nullable=True)
83+
label_schema_revision: Mapped[dict] = mapped_column(JSON, nullable=False)
84+
training_started_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
85+
training_finished_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
86+
files_deleted: Mapped[bool] = mapped_column(Boolean, default=False)
87+
created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp())
88+
updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp())
89+
90+
91+
class DatasetRevisionDB(Base):
92+
__tablename__ = "dataset_revisions"
93+
94+
id: Mapped[str] = mapped_column(Text, primary_key=True, default=lambda: str(uuid4()))
95+
project_id: Mapped[str] = mapped_column(Text, ForeignKey("projects.id", ondelete="CASCADE"), nullable=False)
96+
files_deleted: Mapped[bool] = mapped_column(Boolean, default=False)
7997
created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp())
8098
updated_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.current_timestamp())
8199

@@ -93,7 +111,7 @@ class DatasetItemDB(Base):
93111
annotation_data: Mapped[list] = mapped_column(JSON, nullable=False, default=list)
94112
user_reviewed: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
95113
prediction_model_id: Mapped[str | None] = mapped_column(
96-
Text, ForeignKey("models.id", ondelete="SET NULL"), nullable=True
114+
Text, ForeignKey("model_revisions.id", ondelete="SET NULL"), nullable=True
97115
)
98116
source_id: Mapped[str | None] = mapped_column(Text, ForeignKey("sources.id", ondelete="SET NULL"), nullable=True)
99117
subset: Mapped[str | None] = mapped_column(String(20), nullable=False)

backend/app/entities/stream_data.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33

44
from dataclasses import dataclass
55
from typing import Any
6+
from uuid import UUID
67

78
import numpy as np
89
from model_api.models.result import Result
@@ -16,7 +17,7 @@ class InferenceData:
1617

1718
prediction: Result # prediction result, e.g., bounding boxes, masks, etc.
1819
visualized_prediction: np.ndarray # visualized prediction (e.g., bounding boxes, masks, etc. drawn on the frame)
19-
model_name: str # name of the model that produced the prediction
20+
model_id: UUID # ID of the model that produced the prediction
2021

2122

2223
@dataclass(kw_only=True)

backend/app/repositories/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
from .dataset_item_repo import DatasetItemRepository
55
from .label_repo import LabelRepository
6-
from .model_repo import ModelRepository
6+
from .model_revision_repo import ModelRevisionRepository
77
from .pipeline_repo import PipelineRepository
88
from .project_repo import ProjectRepository
99
from .sink_repo import SinkRepository
@@ -12,7 +12,7 @@
1212
__all__ = [
1313
"DatasetItemRepository",
1414
"LabelRepository",
15-
"ModelRepository",
15+
"ModelRevisionRepository",
1616
"PipelineRepository",
1717
"ProjectRepository",
1818
"SinkRepository",

0 commit comments

Comments
 (0)