Implement FastAPI Temporal MVP pipeline
This commit is contained in:
22
app/api/routers/assets.py
Normal file
22
app/api/routers/assets.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""Asset routes."""
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.api.schemas.asset import AssetRead
|
||||
from app.application.services.asset_service import AssetService
|
||||
from app.infra.db.session import get_db_session
|
||||
|
||||
router = APIRouter(prefix="/orders", tags=["assets"])
|
||||
asset_service = AssetService()
|
||||
|
||||
|
||||
@router.get("/{order_id}/assets", response_model=list[AssetRead])
|
||||
async def list_order_assets(
|
||||
order_id: int,
|
||||
session: AsyncSession = Depends(get_db_session),
|
||||
) -> list[AssetRead]:
|
||||
"""List assets generated for an order."""
|
||||
|
||||
return await asset_service.list_order_assets(session, order_id)
|
||||
|
||||
13
app/api/routers/health.py
Normal file
13
app/api/routers/health.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Health check routes."""
|
||||
|
||||
from fastapi import APIRouter
|
||||
|
||||
router = APIRouter(tags=["health"])
|
||||
|
||||
|
||||
@router.get("/healthz")
|
||||
async def healthcheck() -> dict[str, str]:
|
||||
"""Return a simple health check response."""
|
||||
|
||||
return {"status": "ok"}
|
||||
|
||||
32
app/api/routers/orders.py
Normal file
32
app/api/routers/orders.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""Order routes."""
|
||||
|
||||
from fastapi import APIRouter, Depends, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.api.schemas.order import CreateOrderRequest, CreateOrderResponse, OrderDetailResponse
|
||||
from app.application.services.order_service import OrderService
|
||||
from app.infra.db.session import get_db_session
|
||||
|
||||
router = APIRouter(prefix="/orders", tags=["orders"])
|
||||
order_service = OrderService()
|
||||
|
||||
|
||||
@router.post("", response_model=CreateOrderResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def create_order(
|
||||
payload: CreateOrderRequest,
|
||||
session: AsyncSession = Depends(get_db_session),
|
||||
) -> CreateOrderResponse:
|
||||
"""Create a new image pipeline order."""
|
||||
|
||||
return await order_service.create_order(session, payload)
|
||||
|
||||
|
||||
@router.get("/{order_id}", response_model=OrderDetailResponse)
|
||||
async def get_order(
|
||||
order_id: int,
|
||||
session: AsyncSession = Depends(get_db_session),
|
||||
) -> OrderDetailResponse:
|
||||
"""Fetch order details."""
|
||||
|
||||
return await order_service.get_order(session, order_id)
|
||||
|
||||
32
app/api/routers/reviews.py
Normal file
32
app/api/routers/reviews.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""Review routes."""
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.api.schemas.review import PendingReviewResponse, SubmitReviewRequest, SubmitReviewResponse
|
||||
from app.application.services.review_service import ReviewService
|
||||
from app.infra.db.session import get_db_session
|
||||
|
||||
router = APIRouter(prefix="/reviews", tags=["reviews"])
|
||||
review_service = ReviewService()
|
||||
|
||||
|
||||
@router.get("/pending", response_model=list[PendingReviewResponse])
|
||||
async def list_pending_reviews(
|
||||
session: AsyncSession = Depends(get_db_session),
|
||||
) -> list[PendingReviewResponse]:
|
||||
"""List review tasks waiting for manual input."""
|
||||
|
||||
return await review_service.list_pending_reviews(session)
|
||||
|
||||
|
||||
@router.post("/{order_id}/submit", response_model=SubmitReviewResponse)
|
||||
async def submit_review(
|
||||
order_id: int,
|
||||
payload: SubmitReviewRequest,
|
||||
session: AsyncSession = Depends(get_db_session),
|
||||
) -> SubmitReviewResponse:
|
||||
"""Submit a review decision for a workflow."""
|
||||
|
||||
return await review_service.submit_review(session, order_id, payload)
|
||||
|
||||
22
app/api/routers/workflows.py
Normal file
22
app/api/routers/workflows.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""Workflow routes."""
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.api.schemas.workflow import WorkflowStatusResponse
|
||||
from app.application.services.workflow_service import WorkflowService
|
||||
from app.infra.db.session import get_db_session
|
||||
|
||||
router = APIRouter(prefix="/workflows", tags=["workflows"])
|
||||
workflow_service = WorkflowService()
|
||||
|
||||
|
||||
@router.get("/{order_id}", response_model=WorkflowStatusResponse)
|
||||
async def get_workflow_status(
|
||||
order_id: int,
|
||||
session: AsyncSession = Depends(get_db_session),
|
||||
) -> WorkflowStatusResponse:
|
||||
"""Fetch persisted workflow status for an order."""
|
||||
|
||||
return await workflow_service.get_workflow_status(session, order_id)
|
||||
|
||||
23
app/api/schemas/asset.py
Normal file
23
app/api/schemas/asset.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Asset API schemas."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
from app.domain.enums import AssetType, WorkflowStepName
|
||||
|
||||
|
||||
class AssetRead(BaseModel):
|
||||
"""Serialized asset response."""
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: int
|
||||
order_id: int
|
||||
asset_type: AssetType
|
||||
step_name: WorkflowStepName | None
|
||||
uri: str
|
||||
metadata_json: dict[str, Any] | None
|
||||
created_at: datetime
|
||||
|
||||
47
app/api/schemas/order.py
Normal file
47
app/api/schemas/order.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Order API schemas."""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.api.schemas.asset import AssetRead
|
||||
from app.domain.enums import CustomerLevel, OrderStatus, ServiceMode, WorkflowStepName
|
||||
|
||||
|
||||
class CreateOrderRequest(BaseModel):
|
||||
"""Request payload for creating an order."""
|
||||
|
||||
customer_level: CustomerLevel
|
||||
service_mode: ServiceMode
|
||||
model_id: int
|
||||
pose_id: int
|
||||
garment_asset_id: int
|
||||
scene_ref_asset_id: int
|
||||
|
||||
|
||||
class CreateOrderResponse(BaseModel):
|
||||
"""Response returned after an order has been created."""
|
||||
|
||||
order_id: int
|
||||
workflow_id: str
|
||||
status: OrderStatus
|
||||
|
||||
|
||||
class OrderDetailResponse(BaseModel):
|
||||
"""Order detail response."""
|
||||
|
||||
order_id: int
|
||||
customer_level: CustomerLevel
|
||||
service_mode: ServiceMode
|
||||
status: OrderStatus
|
||||
model_id: int
|
||||
pose_id: int
|
||||
garment_asset_id: int
|
||||
scene_ref_asset_id: int
|
||||
final_asset_id: int | None
|
||||
workflow_id: str | None
|
||||
current_step: WorkflowStepName | None
|
||||
final_asset: AssetRead | None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
36
app/api/schemas/review.py
Normal file
36
app/api/schemas/review.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""Review API schemas."""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.domain.enums import ReviewDecision, WorkflowStepName
|
||||
|
||||
|
||||
class SubmitReviewRequest(BaseModel):
|
||||
"""Request payload for review submission."""
|
||||
|
||||
decision: ReviewDecision
|
||||
reviewer_id: int
|
||||
selected_asset_id: int | None = None
|
||||
comment: str | None = None
|
||||
|
||||
|
||||
class SubmitReviewResponse(BaseModel):
|
||||
"""Response returned after a review signal is sent."""
|
||||
|
||||
order_id: int
|
||||
workflow_id: str
|
||||
decision: ReviewDecision
|
||||
status: str
|
||||
|
||||
|
||||
class PendingReviewResponse(BaseModel):
|
||||
"""Response model for pending review items."""
|
||||
|
||||
review_task_id: int
|
||||
order_id: int
|
||||
workflow_id: str
|
||||
current_step: WorkflowStepName | None
|
||||
created_at: datetime
|
||||
|
||||
38
app/api/schemas/workflow.py
Normal file
38
app/api/schemas/workflow.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Workflow API schemas."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
from app.domain.enums import OrderStatus, StepStatus, WorkflowStepName
|
||||
|
||||
|
||||
class WorkflowStepRead(BaseModel):
|
||||
"""Serialized workflow step record."""
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: int
|
||||
workflow_run_id: int
|
||||
step_name: WorkflowStepName
|
||||
step_status: StepStatus
|
||||
input_json: dict[str, Any] | None
|
||||
output_json: dict[str, Any] | None
|
||||
error_message: str | None
|
||||
started_at: datetime
|
||||
ended_at: datetime | None
|
||||
|
||||
|
||||
class WorkflowStatusResponse(BaseModel):
|
||||
"""Serialized workflow run details."""
|
||||
|
||||
order_id: int
|
||||
workflow_id: str
|
||||
workflow_type: str
|
||||
workflow_status: OrderStatus
|
||||
current_step: WorkflowStepName | None
|
||||
steps: list[WorkflowStepRead]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
26
app/application/services/asset_service.py
Normal file
26
app/application/services/asset_service.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""Asset application service."""
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.api.schemas.asset import AssetRead
|
||||
from app.infra.db.models.asset import AssetORM
|
||||
from app.infra.db.models.order import OrderORM
|
||||
|
||||
|
||||
class AssetService:
|
||||
"""Application service for asset queries."""
|
||||
|
||||
async def list_order_assets(self, session: AsyncSession, order_id: int) -> list[AssetRead]:
|
||||
"""Return all assets belonging to an order."""
|
||||
|
||||
order = await session.get(OrderORM, order_id)
|
||||
if order is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Order not found")
|
||||
|
||||
result = await session.execute(
|
||||
select(AssetORM).where(AssetORM.order_id == order_id).order_by(AssetORM.created_at.asc())
|
||||
)
|
||||
return [AssetRead.model_validate(asset) for asset in result.scalars().all()]
|
||||
|
||||
122
app/application/services/order_service.py
Normal file
122
app/application/services/order_service.py
Normal file
@@ -0,0 +1,122 @@
|
||||
"""Order application service."""
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.api.schemas.asset import AssetRead
|
||||
from app.api.schemas.order import CreateOrderRequest, CreateOrderResponse, OrderDetailResponse
|
||||
from app.application.services.workflow_service import WorkflowService
|
||||
from app.domain.enums import CustomerLevel, OrderStatus, ServiceMode
|
||||
from app.infra.db.models.order import OrderORM
|
||||
from app.infra.db.models.workflow_run import WorkflowRunORM
|
||||
from app.workers.workflows.types import PipelineWorkflowInput
|
||||
|
||||
|
||||
class OrderService:
|
||||
"""Application service for order management."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.workflow_service = WorkflowService()
|
||||
|
||||
async def create_order(self, session, payload: CreateOrderRequest) -> CreateOrderResponse:
|
||||
"""Create an order, persist a workflow run, and start Temporal execution."""
|
||||
|
||||
self._validate_mode(payload.customer_level, payload.service_mode)
|
||||
|
||||
order = OrderORM(
|
||||
customer_level=payload.customer_level,
|
||||
service_mode=payload.service_mode,
|
||||
status=OrderStatus.CREATED,
|
||||
model_id=payload.model_id,
|
||||
pose_id=payload.pose_id,
|
||||
garment_asset_id=payload.garment_asset_id,
|
||||
scene_ref_asset_id=payload.scene_ref_asset_id,
|
||||
)
|
||||
session.add(order)
|
||||
await session.flush()
|
||||
|
||||
workflow_id = f"order-{order.id}"
|
||||
workflow_run = WorkflowRunORM(
|
||||
order_id=order.id,
|
||||
workflow_id=workflow_id,
|
||||
workflow_type=self.workflow_service.workflow_type_for_mode(payload.service_mode),
|
||||
status=OrderStatus.CREATED,
|
||||
)
|
||||
session.add(workflow_run)
|
||||
await session.commit()
|
||||
|
||||
workflow_input = PipelineWorkflowInput(
|
||||
order_id=order.id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
customer_level=order.customer_level,
|
||||
service_mode=order.service_mode,
|
||||
model_id=order.model_id,
|
||||
pose_id=order.pose_id,
|
||||
garment_asset_id=order.garment_asset_id,
|
||||
scene_ref_asset_id=order.scene_ref_asset_id,
|
||||
)
|
||||
|
||||
try:
|
||||
await self.workflow_service.start_workflow(workflow_input)
|
||||
except Exception as exc:
|
||||
order.status = OrderStatus.FAILED
|
||||
workflow_run.status = OrderStatus.FAILED
|
||||
await session.commit()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail=f"Failed to start Temporal workflow: {exc}",
|
||||
) from exc
|
||||
|
||||
return CreateOrderResponse(order_id=order.id, workflow_id=workflow_id, status=order.status)
|
||||
|
||||
async def get_order(self, session, order_id: int) -> OrderDetailResponse:
|
||||
"""Return a single order with workflow context and final asset."""
|
||||
|
||||
result = await session.execute(
|
||||
select(OrderORM)
|
||||
.where(OrderORM.id == order_id)
|
||||
.options(
|
||||
selectinload(OrderORM.assets),
|
||||
selectinload(OrderORM.workflow_runs),
|
||||
)
|
||||
)
|
||||
order = result.scalar_one_or_none()
|
||||
if order is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Order not found")
|
||||
|
||||
workflow_run = order.workflow_runs[0] if order.workflow_runs else None
|
||||
final_asset = next((asset for asset in order.assets if asset.id == order.final_asset_id), None)
|
||||
|
||||
return OrderDetailResponse(
|
||||
order_id=order.id,
|
||||
customer_level=order.customer_level,
|
||||
service_mode=order.service_mode,
|
||||
status=order.status,
|
||||
model_id=order.model_id,
|
||||
pose_id=order.pose_id,
|
||||
garment_asset_id=order.garment_asset_id,
|
||||
scene_ref_asset_id=order.scene_ref_asset_id,
|
||||
final_asset_id=order.final_asset_id,
|
||||
workflow_id=workflow_run.workflow_id if workflow_run else None,
|
||||
current_step=workflow_run.current_step if workflow_run else None,
|
||||
final_asset=AssetRead.model_validate(final_asset) if final_asset else None,
|
||||
created_at=order.created_at,
|
||||
updated_at=order.updated_at,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _validate_mode(customer_level: CustomerLevel, service_mode: ServiceMode) -> None:
|
||||
"""Validate the allowed customer-level and service-mode combinations."""
|
||||
|
||||
if customer_level == CustomerLevel.LOW and service_mode != ServiceMode.AUTO_BASIC:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Low-level customers only support auto_basic",
|
||||
)
|
||||
if customer_level == CustomerLevel.MID and service_mode != ServiceMode.SEMI_PRO:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Mid-level customers only support semi_pro",
|
||||
)
|
||||
|
||||
112
app/application/services/review_service.py
Normal file
112
app/application/services/review_service.py
Normal file
@@ -0,0 +1,112 @@
|
||||
"""Review application service."""
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy import select
|
||||
|
||||
from app.api.schemas.review import PendingReviewResponse, SubmitReviewRequest, SubmitReviewResponse
|
||||
from app.application.services.workflow_service import WorkflowService
|
||||
from app.domain.enums import OrderStatus, ReviewTaskStatus
|
||||
from app.infra.db.models.asset import AssetORM
|
||||
from app.infra.db.models.order import OrderORM
|
||||
from app.infra.db.models.review_task import ReviewTaskORM
|
||||
from app.infra.db.models.workflow_run import WorkflowRunORM
|
||||
from app.workers.workflows.types import ReviewSignalPayload
|
||||
|
||||
|
||||
class ReviewService:
|
||||
"""Application service for review flows."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.workflow_service = WorkflowService()
|
||||
|
||||
async def list_pending_reviews(self, session) -> list[PendingReviewResponse]:
|
||||
"""Return all pending review tasks."""
|
||||
|
||||
result = await session.execute(
|
||||
select(ReviewTaskORM, WorkflowRunORM)
|
||||
.join(WorkflowRunORM, WorkflowRunORM.order_id == ReviewTaskORM.order_id)
|
||||
.where(ReviewTaskORM.status == ReviewTaskStatus.PENDING)
|
||||
.order_by(ReviewTaskORM.created_at.asc())
|
||||
)
|
||||
|
||||
return [
|
||||
PendingReviewResponse(
|
||||
review_task_id=review_task.id,
|
||||
order_id=review_task.order_id,
|
||||
workflow_id=workflow_run.workflow_id,
|
||||
current_step=workflow_run.current_step,
|
||||
created_at=review_task.created_at,
|
||||
)
|
||||
for review_task, workflow_run in result.all()
|
||||
]
|
||||
|
||||
async def submit_review(self, session, order_id: int, payload: SubmitReviewRequest) -> SubmitReviewResponse:
|
||||
"""Persist the review submission and signal the Temporal workflow."""
|
||||
|
||||
order = await session.get(OrderORM, order_id)
|
||||
if order is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Order not found")
|
||||
if order.status != OrderStatus.WAITING_REVIEW:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Order is not waiting for review",
|
||||
)
|
||||
|
||||
workflow_result = await session.execute(
|
||||
select(WorkflowRunORM).where(WorkflowRunORM.order_id == order_id)
|
||||
)
|
||||
workflow_run = workflow_result.scalar_one_or_none()
|
||||
if workflow_run is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Workflow not found")
|
||||
|
||||
if payload.selected_asset_id is not None:
|
||||
asset = await session.get(AssetORM, payload.selected_asset_id)
|
||||
if asset is None or asset.order_id != order_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Selected asset does not belong to the order",
|
||||
)
|
||||
|
||||
pending_result = await session.execute(
|
||||
select(ReviewTaskORM)
|
||||
.where(
|
||||
ReviewTaskORM.order_id == order_id,
|
||||
ReviewTaskORM.status == ReviewTaskStatus.PENDING,
|
||||
)
|
||||
.order_by(ReviewTaskORM.created_at.desc())
|
||||
)
|
||||
review_task = pending_result.scalars().first()
|
||||
if review_task is None:
|
||||
review_task = ReviewTaskORM(order_id=order_id, status=ReviewTaskStatus.SUBMITTED)
|
||||
session.add(review_task)
|
||||
|
||||
review_task.status = ReviewTaskStatus.SUBMITTED
|
||||
review_task.decision = payload.decision
|
||||
review_task.reviewer_id = payload.reviewer_id
|
||||
review_task.selected_asset_id = payload.selected_asset_id
|
||||
review_task.comment = payload.comment
|
||||
await session.commit()
|
||||
|
||||
try:
|
||||
await self.workflow_service.signal_review(
|
||||
workflow_run.workflow_id,
|
||||
ReviewSignalPayload(
|
||||
decision=payload.decision,
|
||||
reviewer_id=payload.reviewer_id,
|
||||
selected_asset_id=payload.selected_asset_id,
|
||||
comment=payload.comment,
|
||||
),
|
||||
)
|
||||
except Exception as exc:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail=f"Failed to signal Temporal workflow: {exc}",
|
||||
) from exc
|
||||
|
||||
return SubmitReviewResponse(
|
||||
order_id=order_id,
|
||||
workflow_id=workflow_run.workflow_id,
|
||||
decision=payload.decision,
|
||||
status="submitted",
|
||||
)
|
||||
|
||||
77
app/application/services/workflow_service.py
Normal file
77
app/application/services/workflow_service.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""Temporal workflow application service."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.api.schemas.workflow import WorkflowStatusResponse, WorkflowStepRead
|
||||
from app.domain.enums import ServiceMode
|
||||
from app.infra.db.models.workflow_run import WorkflowRunORM
|
||||
from app.infra.temporal.client import get_temporal_client
|
||||
from app.infra.temporal.task_queues import IMAGE_PIPELINE_CONTROL_TASK_QUEUE
|
||||
from app.workers.workflows.low_end_pipeline import LowEndPipelineWorkflow
|
||||
from app.workers.workflows.mid_end_pipeline import MidEndPipelineWorkflow
|
||||
from app.workers.workflows.types import PipelineWorkflowInput, ReviewSignalPayload
|
||||
|
||||
|
||||
class WorkflowService:
|
||||
"""Application service for Temporal workflow orchestration."""
|
||||
|
||||
@staticmethod
|
||||
def workflow_type_for_mode(service_mode: ServiceMode) -> str:
|
||||
"""Return the workflow class name for a service mode."""
|
||||
|
||||
if service_mode == ServiceMode.AUTO_BASIC:
|
||||
return LowEndPipelineWorkflow.__name__
|
||||
return MidEndPipelineWorkflow.__name__
|
||||
|
||||
async def start_workflow(self, workflow_input: PipelineWorkflowInput) -> None:
|
||||
"""Start the appropriate Temporal workflow for an order."""
|
||||
|
||||
client = await get_temporal_client()
|
||||
workflow_id = f"order-{workflow_input.order_id}"
|
||||
workflow_callable = (
|
||||
LowEndPipelineWorkflow.run
|
||||
if workflow_input.service_mode == ServiceMode.AUTO_BASIC
|
||||
else MidEndPipelineWorkflow.run
|
||||
)
|
||||
await client.start_workflow(
|
||||
workflow_callable,
|
||||
workflow_input,
|
||||
id=workflow_id,
|
||||
task_queue=IMAGE_PIPELINE_CONTROL_TASK_QUEUE,
|
||||
run_timeout=timedelta(minutes=30),
|
||||
task_timeout=timedelta(seconds=30),
|
||||
)
|
||||
|
||||
async def signal_review(self, workflow_id: str, payload: ReviewSignalPayload) -> None:
|
||||
"""Send a review signal to a running Temporal workflow."""
|
||||
|
||||
client = await get_temporal_client()
|
||||
handle = client.get_workflow_handle(workflow_id=workflow_id)
|
||||
await handle.signal("submit_review", payload)
|
||||
|
||||
async def get_workflow_status(self, session, order_id: int) -> WorkflowStatusResponse:
|
||||
"""Return persisted workflow execution state for an order."""
|
||||
|
||||
result = await session.execute(
|
||||
select(WorkflowRunORM)
|
||||
.where(WorkflowRunORM.order_id == order_id)
|
||||
.options(selectinload(WorkflowRunORM.steps))
|
||||
)
|
||||
workflow_run = result.scalar_one_or_none()
|
||||
if workflow_run is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Workflow not found")
|
||||
|
||||
return WorkflowStatusResponse(
|
||||
order_id=workflow_run.order_id,
|
||||
workflow_id=workflow_run.workflow_id,
|
||||
workflow_type=workflow_run.workflow_type,
|
||||
workflow_status=workflow_run.status,
|
||||
current_step=workflow_run.current_step,
|
||||
steps=[WorkflowStepRead.model_validate(step) for step in workflow_run.steps],
|
||||
created_at=workflow_run.created_at,
|
||||
updated_at=workflow_run.updated_at,
|
||||
)
|
||||
37
app/config/settings.py
Normal file
37
app/config/settings.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""Application settings."""
|
||||
|
||||
from functools import lru_cache
|
||||
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Runtime settings loaded from environment variables."""
|
||||
|
||||
app_name: str = "temporal-image-pipeline"
|
||||
api_prefix: str = "/api/v1"
|
||||
debug: bool = False
|
||||
auto_create_tables: bool = True
|
||||
database_url: str = "sqlite+aiosqlite:///./temporal_demo.db"
|
||||
temporal_address: str = "localhost:7233"
|
||||
temporal_namespace: str = "default"
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
case_sensitive=False,
|
||||
)
|
||||
|
||||
@property
|
||||
def sync_database_url(self) -> str:
|
||||
"""Return a synchronous SQLAlchemy URL for migrations."""
|
||||
|
||||
return self.database_url.replace("+aiosqlite", "")
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_settings() -> Settings:
|
||||
"""Return the cached application settings."""
|
||||
|
||||
return Settings()
|
||||
|
||||
83
app/domain/enums.py
Normal file
83
app/domain/enums.py
Normal file
@@ -0,0 +1,83 @@
|
||||
"""Domain enums shared across the application."""
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class CustomerLevel(str, Enum):
|
||||
"""Supported customer tiers."""
|
||||
|
||||
LOW = "low"
|
||||
MID = "mid"
|
||||
|
||||
|
||||
class ServiceMode(str, Enum):
|
||||
"""Supported service delivery modes."""
|
||||
|
||||
AUTO_BASIC = "auto_basic"
|
||||
SEMI_PRO = "semi_pro"
|
||||
|
||||
|
||||
class OrderStatus(str, Enum):
|
||||
"""Lifecycle states for an order and workflow run."""
|
||||
|
||||
CREATED = "created"
|
||||
RUNNING = "running"
|
||||
WAITING_REVIEW = "waiting_review"
|
||||
SUCCEEDED = "succeeded"
|
||||
FAILED = "failed"
|
||||
CANCELLED = "cancelled"
|
||||
|
||||
|
||||
class WorkflowStepName(str, Enum):
|
||||
"""Canonical workflow step names."""
|
||||
|
||||
PREPARE_MODEL = "prepare_model"
|
||||
TRYON = "tryon"
|
||||
SCENE = "scene"
|
||||
TEXTURE = "texture"
|
||||
FACE = "face"
|
||||
FUSION = "fusion"
|
||||
QC = "qc"
|
||||
EXPORT = "export"
|
||||
REVIEW = "review"
|
||||
|
||||
|
||||
class ReviewDecision(str, Enum):
|
||||
"""Supported review decisions for the mid-end workflow."""
|
||||
|
||||
APPROVE = "approve"
|
||||
RERUN_SCENE = "rerun_scene"
|
||||
RERUN_FACE = "rerun_face"
|
||||
RERUN_FUSION = "rerun_fusion"
|
||||
REJECT = "reject"
|
||||
|
||||
|
||||
class StepStatus(str, Enum):
|
||||
"""Execution status of a single workflow step record."""
|
||||
|
||||
PENDING = "pending"
|
||||
RUNNING = "running"
|
||||
WAITING = "waiting"
|
||||
SUCCEEDED = "succeeded"
|
||||
FAILED = "failed"
|
||||
|
||||
|
||||
class ReviewTaskStatus(str, Enum):
|
||||
"""Status of a human review task."""
|
||||
|
||||
PENDING = "pending"
|
||||
SUBMITTED = "submitted"
|
||||
|
||||
|
||||
class AssetType(str, Enum):
|
||||
"""Asset classes produced by the pipeline."""
|
||||
|
||||
PREPARED_MODEL = "prepared_model"
|
||||
TRYON = "tryon"
|
||||
SCENE = "scene"
|
||||
TEXTURE = "texture"
|
||||
FACE = "face"
|
||||
FUSION = "fusion"
|
||||
QC_CANDIDATE = "qc_candidate"
|
||||
FINAL = "final"
|
||||
|
||||
21
app/domain/models/asset.py
Normal file
21
app/domain/models/asset.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Domain asset model."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from app.domain.enums import AssetType, WorkflowStepName
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class Asset:
|
||||
"""Business representation of a generated asset."""
|
||||
|
||||
id: int
|
||||
order_id: int
|
||||
asset_type: AssetType
|
||||
step_name: WorkflowStepName | None
|
||||
uri: str
|
||||
metadata_json: dict[str, Any] | None
|
||||
created_at: datetime
|
||||
|
||||
26
app/domain/models/order.py
Normal file
26
app/domain/models/order.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""Domain order model."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from app.domain.enums import CustomerLevel, OrderStatus, ServiceMode, WorkflowStepName
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class Order:
|
||||
"""Business representation of an image order."""
|
||||
|
||||
id: int
|
||||
customer_level: CustomerLevel
|
||||
service_mode: ServiceMode
|
||||
status: OrderStatus
|
||||
model_id: int
|
||||
pose_id: int
|
||||
garment_asset_id: int
|
||||
scene_ref_asset_id: int
|
||||
final_asset_id: int | None
|
||||
workflow_id: str | None
|
||||
current_step: WorkflowStepName | None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
22
app/domain/models/review_task.py
Normal file
22
app/domain/models/review_task.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""Domain review task model."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from app.domain.enums import ReviewDecision, ReviewTaskStatus
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ReviewTask:
|
||||
"""Business representation of a review task."""
|
||||
|
||||
id: int
|
||||
order_id: int
|
||||
status: ReviewTaskStatus
|
||||
decision: ReviewDecision | None
|
||||
reviewer_id: int | None
|
||||
selected_asset_id: int | None
|
||||
comment: str | None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
21
app/domain/models/workflow_run.py
Normal file
21
app/domain/models/workflow_run.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Domain workflow run model."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from app.domain.enums import OrderStatus, WorkflowStepName
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class WorkflowRun:
|
||||
"""Business representation of a workflow execution."""
|
||||
|
||||
id: int
|
||||
order_id: int
|
||||
workflow_id: str
|
||||
workflow_type: str
|
||||
status: OrderStatus
|
||||
current_step: WorkflowStepName | None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
23
app/domain/models/workflow_step.py
Normal file
23
app/domain/models/workflow_step.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Domain workflow step model."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from app.domain.enums import StepStatus, WorkflowStepName
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class WorkflowStep:
|
||||
"""Business representation of a workflow step attempt."""
|
||||
|
||||
id: int
|
||||
workflow_run_id: int
|
||||
step_name: WorkflowStepName
|
||||
step_status: StepStatus
|
||||
input_json: dict[str, Any] | None
|
||||
output_json: dict[str, Any] | None
|
||||
error_message: str | None
|
||||
started_at: datetime
|
||||
ended_at: datetime | None
|
||||
|
||||
33
app/infra/db/base.py
Normal file
33
app/infra/db/base.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""Database base declarations."""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import DateTime
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
|
||||
|
||||
def utc_now() -> datetime:
|
||||
"""Return the current UTC timestamp."""
|
||||
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
"""Shared declarative base."""
|
||||
|
||||
|
||||
class TimestampMixin:
|
||||
"""Mixin that adds created and updated timestamps."""
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utc_now,
|
||||
nullable=False,
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utc_now,
|
||||
onupdate=utc_now,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
31
app/infra/db/models/asset.py
Normal file
31
app/infra/db/models/asset.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Asset ORM model."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import Enum, ForeignKey, Integer, JSON, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.domain.enums import AssetType, WorkflowStepName
|
||||
from app.infra.db.base import Base, TimestampMixin
|
||||
|
||||
|
||||
class AssetORM(TimestampMixin, Base):
|
||||
"""Persisted generated asset."""
|
||||
|
||||
__tablename__ = "assets"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
order_id: Mapped[int] = mapped_column(ForeignKey("orders.id"), nullable=False, index=True)
|
||||
asset_type: Mapped[AssetType] = mapped_column(
|
||||
Enum(AssetType, native_enum=False),
|
||||
nullable=False,
|
||||
)
|
||||
step_name: Mapped[WorkflowStepName | None] = mapped_column(
|
||||
Enum(WorkflowStepName, native_enum=False),
|
||||
nullable=True,
|
||||
)
|
||||
uri: Mapped[str] = mapped_column(String(500), nullable=False)
|
||||
metadata_json: Mapped[dict[str, Any] | None] = mapped_column(JSON, nullable=True)
|
||||
|
||||
order = relationship("OrderORM", back_populates="assets")
|
||||
|
||||
38
app/infra/db/models/order.py
Normal file
38
app/infra/db/models/order.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Order ORM model."""
|
||||
|
||||
from sqlalchemy import Enum, Integer
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.domain.enums import CustomerLevel, OrderStatus, ServiceMode
|
||||
from app.infra.db.base import Base, TimestampMixin
|
||||
|
||||
|
||||
class OrderORM(TimestampMixin, Base):
|
||||
"""Persisted order record."""
|
||||
|
||||
__tablename__ = "orders"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
customer_level: Mapped[CustomerLevel] = mapped_column(
|
||||
Enum(CustomerLevel, native_enum=False),
|
||||
nullable=False,
|
||||
)
|
||||
service_mode: Mapped[ServiceMode] = mapped_column(
|
||||
Enum(ServiceMode, native_enum=False),
|
||||
nullable=False,
|
||||
)
|
||||
status: Mapped[OrderStatus] = mapped_column(
|
||||
Enum(OrderStatus, native_enum=False),
|
||||
nullable=False,
|
||||
default=OrderStatus.CREATED,
|
||||
)
|
||||
model_id: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
pose_id: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
garment_asset_id: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
scene_ref_asset_id: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
final_asset_id: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
|
||||
assets = relationship("AssetORM", back_populates="order", lazy="selectin")
|
||||
review_tasks = relationship("ReviewTaskORM", back_populates="order", lazy="selectin")
|
||||
workflow_runs = relationship("WorkflowRunORM", back_populates="order", lazy="selectin")
|
||||
|
||||
31
app/infra/db/models/review_task.py
Normal file
31
app/infra/db/models/review_task.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Review task ORM model."""
|
||||
|
||||
from sqlalchemy import Enum, ForeignKey, Integer, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.domain.enums import ReviewDecision, ReviewTaskStatus
|
||||
from app.infra.db.base import Base, TimestampMixin
|
||||
|
||||
|
||||
class ReviewTaskORM(TimestampMixin, Base):
|
||||
"""Persisted review task."""
|
||||
|
||||
__tablename__ = "review_tasks"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
order_id: Mapped[int] = mapped_column(ForeignKey("orders.id"), nullable=False, index=True)
|
||||
status: Mapped[ReviewTaskStatus] = mapped_column(
|
||||
Enum(ReviewTaskStatus, native_enum=False),
|
||||
nullable=False,
|
||||
default=ReviewTaskStatus.PENDING,
|
||||
)
|
||||
decision: Mapped[ReviewDecision | None] = mapped_column(
|
||||
Enum(ReviewDecision, native_enum=False),
|
||||
nullable=True,
|
||||
)
|
||||
reviewer_id: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
selected_asset_id: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||
comment: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
|
||||
order = relationship("OrderORM", back_populates="review_tasks")
|
||||
|
||||
36
app/infra/db/models/workflow_run.py
Normal file
36
app/infra/db/models/workflow_run.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""Workflow run ORM model."""
|
||||
|
||||
from sqlalchemy import Enum, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.domain.enums import OrderStatus, WorkflowStepName
|
||||
from app.infra.db.base import Base, TimestampMixin
|
||||
|
||||
|
||||
class WorkflowRunORM(TimestampMixin, Base):
|
||||
"""Persisted workflow execution state."""
|
||||
|
||||
__tablename__ = "workflow_runs"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
order_id: Mapped[int] = mapped_column(ForeignKey("orders.id"), nullable=False, index=True)
|
||||
workflow_id: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
|
||||
workflow_type: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
status: Mapped[OrderStatus] = mapped_column(
|
||||
Enum(OrderStatus, native_enum=False),
|
||||
nullable=False,
|
||||
default=OrderStatus.CREATED,
|
||||
)
|
||||
current_step: Mapped[WorkflowStepName | None] = mapped_column(
|
||||
Enum(WorkflowStepName, native_enum=False),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
order = relationship("OrderORM", back_populates="workflow_runs")
|
||||
steps = relationship(
|
||||
"WorkflowStepORM",
|
||||
back_populates="workflow_run",
|
||||
lazy="selectin",
|
||||
order_by="WorkflowStepORM.started_at",
|
||||
)
|
||||
|
||||
42
app/infra/db/models/workflow_step.py
Normal file
42
app/infra/db/models/workflow_step.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Workflow step ORM model."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import DateTime, Enum, ForeignKey, Integer, JSON, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.domain.enums import StepStatus, WorkflowStepName
|
||||
from app.infra.db.base import Base, utc_now
|
||||
|
||||
|
||||
class WorkflowStepORM(Base):
|
||||
"""Persisted workflow step execution record."""
|
||||
|
||||
__tablename__ = "workflow_steps"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
workflow_run_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("workflow_runs.id"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
step_name: Mapped[WorkflowStepName] = mapped_column(
|
||||
Enum(WorkflowStepName, native_enum=False),
|
||||
nullable=False,
|
||||
)
|
||||
step_status: Mapped[StepStatus] = mapped_column(
|
||||
Enum(StepStatus, native_enum=False),
|
||||
nullable=False,
|
||||
)
|
||||
input_json: Mapped[dict[str, Any] | None] = mapped_column(JSON, nullable=True)
|
||||
output_json: Mapped[dict[str, Any] | None] = mapped_column(JSON, nullable=True)
|
||||
error_message: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
started_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=utc_now,
|
||||
nullable=False,
|
||||
)
|
||||
ended_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
workflow_run = relationship("WorkflowRunORM", back_populates="steps")
|
||||
65
app/infra/db/session.py
Normal file
65
app/infra/db/session.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Async database engine and session helpers."""
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
|
||||
|
||||
from app.config.settings import get_settings
|
||||
from app.infra.db.base import Base
|
||||
|
||||
_engine: AsyncEngine | None = None
|
||||
_session_factory: async_sessionmaker[AsyncSession] | None = None
|
||||
|
||||
|
||||
def get_async_engine() -> AsyncEngine:
|
||||
"""Return the lazily created async SQLAlchemy engine."""
|
||||
|
||||
global _engine
|
||||
if _engine is None:
|
||||
_engine = create_async_engine(
|
||||
get_settings().database_url,
|
||||
future=True,
|
||||
echo=False,
|
||||
)
|
||||
return _engine
|
||||
|
||||
|
||||
def get_session_factory() -> async_sessionmaker[AsyncSession]:
|
||||
"""Return the lazily created async session factory."""
|
||||
|
||||
global _session_factory
|
||||
if _session_factory is None:
|
||||
_session_factory = async_sessionmaker(get_async_engine(), expire_on_commit=False)
|
||||
return _session_factory
|
||||
|
||||
|
||||
async def get_db_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Yield a database session for FastAPI dependencies."""
|
||||
|
||||
async with get_session_factory()() as session:
|
||||
yield session
|
||||
|
||||
|
||||
async def init_database() -> None:
|
||||
"""Create database tables when running the MVP without migrations."""
|
||||
|
||||
from app.infra.db.models.asset import AssetORM
|
||||
from app.infra.db.models.order import OrderORM
|
||||
from app.infra.db.models.review_task import ReviewTaskORM
|
||||
from app.infra.db.models.workflow_run import WorkflowRunORM
|
||||
from app.infra.db.models.workflow_step import WorkflowStepORM
|
||||
|
||||
del AssetORM, OrderORM, ReviewTaskORM, WorkflowRunORM, WorkflowStepORM
|
||||
|
||||
async with get_async_engine().begin() as connection:
|
||||
await connection.run_sync(Base.metadata.create_all)
|
||||
|
||||
|
||||
async def dispose_database() -> None:
|
||||
"""Dispose the active engine and clear cached session objects."""
|
||||
|
||||
global _engine, _session_factory
|
||||
if _engine is not None:
|
||||
await _engine.dispose()
|
||||
_engine = None
|
||||
_session_factory = None
|
||||
35
app/infra/temporal/client.py
Normal file
35
app/infra/temporal/client.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""Temporal client helpers."""
|
||||
|
||||
import asyncio
|
||||
|
||||
from temporalio.client import Client
|
||||
|
||||
from app.config.settings import get_settings
|
||||
|
||||
_client: Client | None = None
|
||||
_client_lock = asyncio.Lock()
|
||||
|
||||
|
||||
async def get_temporal_client() -> Client:
|
||||
"""Return a cached Temporal client."""
|
||||
|
||||
global _client
|
||||
if _client is not None:
|
||||
return _client
|
||||
|
||||
async with _client_lock:
|
||||
if _client is None:
|
||||
settings = get_settings()
|
||||
_client = await Client.connect(
|
||||
settings.temporal_address,
|
||||
namespace=settings.temporal_namespace,
|
||||
)
|
||||
return _client
|
||||
|
||||
|
||||
def set_temporal_client(client: Client | None) -> None:
|
||||
"""Override the cached Temporal client, primarily for tests."""
|
||||
|
||||
global _client
|
||||
_client = client
|
||||
|
||||
8
app/infra/temporal/task_queues.py
Normal file
8
app/infra/temporal/task_queues.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""Temporal task queue names."""
|
||||
|
||||
IMAGE_PIPELINE_CONTROL_TASK_QUEUE = "image-pipeline-control"
|
||||
IMAGE_PIPELINE_IMAGE_GEN_TASK_QUEUE = "image-pipeline-image-gen"
|
||||
IMAGE_PIPELINE_POST_PROCESS_TASK_QUEUE = "image-pipeline-post-process"
|
||||
IMAGE_PIPELINE_QC_TASK_QUEUE = "image-pipeline-qc"
|
||||
IMAGE_PIPELINE_EXPORT_TASK_QUEUE = "image-pipeline-export"
|
||||
|
||||
39
app/main.py
Normal file
39
app/main.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""FastAPI application entrypoint."""
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
from app.api.routers.assets import router as assets_router
|
||||
from app.api.routers.health import router as health_router
|
||||
from app.api.routers.orders import router as orders_router
|
||||
from app.api.routers.reviews import router as reviews_router
|
||||
from app.api.routers.workflows import router as workflows_router
|
||||
from app.config.settings import get_settings
|
||||
from app.infra.db.session import init_database
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(_: FastAPI):
|
||||
"""Initialize local resources for the MVP runtime."""
|
||||
|
||||
settings = get_settings()
|
||||
if settings.auto_create_tables:
|
||||
await init_database()
|
||||
yield
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
"""Create and configure the FastAPI application."""
|
||||
|
||||
settings = get_settings()
|
||||
app = FastAPI(title=settings.app_name, debug=settings.debug, lifespan=lifespan)
|
||||
app.include_router(health_router)
|
||||
app.include_router(orders_router, prefix=settings.api_prefix)
|
||||
app.include_router(assets_router, prefix=settings.api_prefix)
|
||||
app.include_router(reviews_router, prefix=settings.api_prefix)
|
||||
app.include_router(workflows_router, prefix=settings.api_prefix)
|
||||
return app
|
||||
|
||||
|
||||
app = create_app()
|
||||
20
app/workers/activities/export_activities.py
Normal file
20
app/workers/activities/export_activities.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""Export mock activity."""
|
||||
|
||||
from temporalio import activity
|
||||
|
||||
from app.domain.enums import AssetType
|
||||
from app.workers.activities.tryon_activities import execute_asset_step
|
||||
from app.workers.workflows.types import MockActivityResult, StepActivityInput
|
||||
|
||||
|
||||
@activity.defn
|
||||
async def run_export_activity(payload: StepActivityInput) -> MockActivityResult:
|
||||
"""Mock final asset export."""
|
||||
|
||||
return await execute_asset_step(
|
||||
payload,
|
||||
AssetType.FINAL,
|
||||
filename="final.png",
|
||||
finalize=True,
|
||||
)
|
||||
|
||||
15
app/workers/activities/face_activities.py
Normal file
15
app/workers/activities/face_activities.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Face mock activity."""
|
||||
|
||||
from temporalio import activity
|
||||
|
||||
from app.domain.enums import AssetType
|
||||
from app.workers.activities.tryon_activities import execute_asset_step
|
||||
from app.workers.workflows.types import MockActivityResult, StepActivityInput
|
||||
|
||||
|
||||
@activity.defn
|
||||
async def run_face_activity(payload: StepActivityInput) -> MockActivityResult:
|
||||
"""Mock face enhancement."""
|
||||
|
||||
return await execute_asset_step(payload, AssetType.FACE)
|
||||
|
||||
19
app/workers/activities/fusion_activities.py
Normal file
19
app/workers/activities/fusion_activities.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Fusion mock activity."""
|
||||
|
||||
from temporalio import activity
|
||||
|
||||
from app.domain.enums import AssetType
|
||||
from app.workers.activities.tryon_activities import execute_asset_step
|
||||
from app.workers.workflows.types import MockActivityResult, StepActivityInput
|
||||
|
||||
|
||||
@activity.defn
|
||||
async def run_fusion_activity(payload: StepActivityInput) -> MockActivityResult:
|
||||
"""Mock face and body fusion."""
|
||||
|
||||
return await execute_asset_step(
|
||||
payload,
|
||||
AssetType.FUSION,
|
||||
extra_metadata={"face_asset_id": payload.selected_asset_id},
|
||||
)
|
||||
|
||||
69
app/workers/activities/qc_activities.py
Normal file
69
app/workers/activities/qc_activities.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""Quality-control mock activity."""
|
||||
|
||||
from temporalio import activity
|
||||
|
||||
from app.domain.enums import AssetType, OrderStatus, StepStatus
|
||||
from app.infra.db.models.asset import AssetORM
|
||||
from app.infra.db.session import get_session_factory
|
||||
from app.workers.activities.tryon_activities import create_step_record, jsonable, load_order_and_run, mock_uri, utc_now
|
||||
from app.workers.workflows.types import MockActivityResult, StepActivityInput
|
||||
|
||||
|
||||
@activity.defn
|
||||
async def run_qc_activity(payload: StepActivityInput) -> MockActivityResult:
|
||||
"""Mock automated quality control."""
|
||||
|
||||
async with get_session_factory()() as session:
|
||||
order, workflow_run = await load_order_and_run(session, payload.order_id, payload.workflow_run_id)
|
||||
step = create_step_record(payload)
|
||||
session.add(step)
|
||||
|
||||
order.status = OrderStatus.RUNNING
|
||||
workflow_run.status = OrderStatus.RUNNING
|
||||
workflow_run.current_step = payload.step_name
|
||||
await session.flush()
|
||||
|
||||
try:
|
||||
passed = not payload.metadata.get("force_fail", False)
|
||||
candidate_asset_ids: list[int] = []
|
||||
candidate_uri: str | None = None
|
||||
|
||||
if passed:
|
||||
candidate = AssetORM(
|
||||
order_id=payload.order_id,
|
||||
asset_type=AssetType.QC_CANDIDATE,
|
||||
step_name=payload.step_name,
|
||||
uri=mock_uri(payload.order_id, payload.step_name.value, "candidate.png"),
|
||||
metadata_json=jsonable({"source_asset_id": payload.source_asset_id}),
|
||||
)
|
||||
session.add(candidate)
|
||||
await session.flush()
|
||||
candidate_asset_ids = [candidate.id]
|
||||
candidate_uri = candidate.uri
|
||||
|
||||
result = MockActivityResult(
|
||||
step_name=payload.step_name,
|
||||
success=True,
|
||||
asset_id=candidate_asset_ids[0] if candidate_asset_ids else None,
|
||||
uri=candidate_uri,
|
||||
score=0.95 if passed else 0.35,
|
||||
passed=passed,
|
||||
message="mock success" if passed else "mock qc rejected",
|
||||
candidate_asset_ids=candidate_asset_ids,
|
||||
metadata={"source_asset_id": payload.source_asset_id},
|
||||
)
|
||||
|
||||
step.step_status = StepStatus.SUCCEEDED if passed else StepStatus.FAILED
|
||||
step.output_json = jsonable(result)
|
||||
step.error_message = None if passed else "QC rejected the asset"
|
||||
step.ended_at = utc_now()
|
||||
await session.commit()
|
||||
return result
|
||||
except Exception as exc:
|
||||
step.step_status = StepStatus.FAILED
|
||||
step.error_message = str(exc)
|
||||
step.ended_at = utc_now()
|
||||
order.status = OrderStatus.FAILED
|
||||
workflow_run.status = OrderStatus.FAILED
|
||||
await session.commit()
|
||||
raise
|
||||
117
app/workers/activities/review_activities.py
Normal file
117
app/workers/activities/review_activities.py
Normal file
@@ -0,0 +1,117 @@
|
||||
"""Review state management mock activities."""
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
from temporalio import activity
|
||||
|
||||
from app.domain.enums import OrderStatus, ReviewDecision, ReviewTaskStatus, StepStatus, WorkflowStepName
|
||||
from app.infra.db.models.review_task import ReviewTaskORM
|
||||
from app.infra.db.models.workflow_step import WorkflowStepORM
|
||||
from app.infra.db.session import get_session_factory
|
||||
from app.workers.activities.tryon_activities import jsonable, load_order_and_run, utc_now
|
||||
from app.workers.workflows.types import (
|
||||
ReviewResolutionActivityInput,
|
||||
ReviewWaitActivityInput,
|
||||
WorkflowFailureActivityInput,
|
||||
)
|
||||
|
||||
|
||||
@activity.defn
|
||||
async def mark_waiting_for_review_activity(payload: ReviewWaitActivityInput) -> None:
|
||||
"""Mark a workflow as waiting for a human review."""
|
||||
|
||||
async with get_session_factory()() as session:
|
||||
order, workflow_run = await load_order_and_run(session, payload.order_id, payload.workflow_run_id)
|
||||
review_step = WorkflowStepORM(
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.REVIEW,
|
||||
step_status=StepStatus.WAITING,
|
||||
input_json=jsonable(payload),
|
||||
started_at=utc_now(),
|
||||
)
|
||||
session.add(review_step)
|
||||
session.add(
|
||||
ReviewTaskORM(
|
||||
order_id=payload.order_id,
|
||||
status=ReviewTaskStatus.PENDING,
|
||||
selected_asset_id=payload.candidate_asset_ids[0] if payload.candidate_asset_ids else None,
|
||||
comment=payload.comment,
|
||||
)
|
||||
)
|
||||
|
||||
order.status = OrderStatus.WAITING_REVIEW
|
||||
workflow_run.status = OrderStatus.WAITING_REVIEW
|
||||
workflow_run.current_step = WorkflowStepName.REVIEW
|
||||
await session.commit()
|
||||
|
||||
|
||||
@activity.defn
|
||||
async def complete_review_wait_activity(payload: ReviewResolutionActivityInput) -> None:
|
||||
"""Resolve the current waiting-review step before the next branch runs."""
|
||||
|
||||
async with get_session_factory()() as session:
|
||||
order, workflow_run = await load_order_and_run(session, payload.order_id, payload.workflow_run_id)
|
||||
step_result = await session.execute(
|
||||
select(WorkflowStepORM)
|
||||
.where(
|
||||
WorkflowStepORM.workflow_run_id == payload.workflow_run_id,
|
||||
WorkflowStepORM.step_name == WorkflowStepName.REVIEW,
|
||||
WorkflowStepORM.step_status == StepStatus.WAITING,
|
||||
)
|
||||
.order_by(WorkflowStepORM.started_at.desc(), WorkflowStepORM.id.desc())
|
||||
)
|
||||
review_step = step_result.scalars().first()
|
||||
if review_step is not None:
|
||||
review_step.step_status = (
|
||||
StepStatus.FAILED if payload.decision == ReviewDecision.REJECT else StepStatus.SUCCEEDED
|
||||
)
|
||||
review_step.output_json = jsonable(payload)
|
||||
review_step.error_message = payload.comment if payload.decision == ReviewDecision.REJECT else None
|
||||
review_step.ended_at = utc_now()
|
||||
|
||||
if payload.decision == ReviewDecision.REJECT:
|
||||
order.status = OrderStatus.FAILED
|
||||
workflow_run.status = OrderStatus.FAILED
|
||||
else:
|
||||
order.status = OrderStatus.RUNNING
|
||||
workflow_run.status = OrderStatus.RUNNING
|
||||
|
||||
workflow_run.current_step = WorkflowStepName.REVIEW
|
||||
await session.commit()
|
||||
|
||||
|
||||
@activity.defn
|
||||
async def mark_workflow_failed_activity(payload: WorkflowFailureActivityInput) -> None:
|
||||
"""Mark the persisted workflow state as failed."""
|
||||
|
||||
async with get_session_factory()() as session:
|
||||
order, workflow_run = await load_order_and_run(session, payload.order_id, payload.workflow_run_id)
|
||||
|
||||
step_result = await session.execute(
|
||||
select(WorkflowStepORM)
|
||||
.where(
|
||||
WorkflowStepORM.workflow_run_id == payload.workflow_run_id,
|
||||
WorkflowStepORM.step_name == payload.current_step,
|
||||
)
|
||||
.order_by(WorkflowStepORM.started_at.desc(), WorkflowStepORM.id.desc())
|
||||
)
|
||||
workflow_step = step_result.scalars().first()
|
||||
if workflow_step is None:
|
||||
workflow_step = WorkflowStepORM(
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=payload.current_step,
|
||||
step_status=StepStatus.FAILED,
|
||||
input_json=jsonable(payload),
|
||||
started_at=utc_now(),
|
||||
)
|
||||
session.add(workflow_step)
|
||||
|
||||
workflow_step.step_status = StepStatus.FAILED
|
||||
workflow_step.error_message = payload.message
|
||||
workflow_step.output_json = jsonable({"message": payload.message, "status": payload.status.value})
|
||||
workflow_step.ended_at = workflow_step.ended_at or utc_now()
|
||||
|
||||
order.status = payload.status
|
||||
workflow_run.status = payload.status
|
||||
workflow_run.current_step = payload.current_step
|
||||
await session.commit()
|
||||
19
app/workers/activities/scene_activities.py
Normal file
19
app/workers/activities/scene_activities.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Scene mock activity."""
|
||||
|
||||
from temporalio import activity
|
||||
|
||||
from app.domain.enums import AssetType
|
||||
from app.workers.activities.tryon_activities import execute_asset_step
|
||||
from app.workers.workflows.types import MockActivityResult, StepActivityInput
|
||||
|
||||
|
||||
@activity.defn
|
||||
async def run_scene_activity(payload: StepActivityInput) -> MockActivityResult:
|
||||
"""Mock scene replacement."""
|
||||
|
||||
return await execute_asset_step(
|
||||
payload,
|
||||
AssetType.SCENE,
|
||||
extra_metadata={"scene_ref_asset_id": payload.scene_ref_asset_id},
|
||||
)
|
||||
|
||||
15
app/workers/activities/texture_activities.py
Normal file
15
app/workers/activities/texture_activities.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Texture mock activity."""
|
||||
|
||||
from temporalio import activity
|
||||
|
||||
from app.domain.enums import AssetType
|
||||
from app.workers.activities.tryon_activities import execute_asset_step
|
||||
from app.workers.workflows.types import MockActivityResult, StepActivityInput
|
||||
|
||||
|
||||
@activity.defn
|
||||
async def run_texture_activity(payload: StepActivityInput) -> MockActivityResult:
|
||||
"""Mock garment texture enhancement."""
|
||||
|
||||
return await execute_asset_step(payload, AssetType.TEXTURE)
|
||||
|
||||
170
app/workers/activities/tryon_activities.py
Normal file
170
app/workers/activities/tryon_activities.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""Prepare-model and try-on mock activities plus shared helpers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict, is_dataclass
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from temporalio import activity
|
||||
|
||||
from app.domain.enums import AssetType, OrderStatus, StepStatus
|
||||
from app.infra.db.models.asset import AssetORM
|
||||
from app.infra.db.models.order import OrderORM
|
||||
from app.infra.db.models.workflow_run import WorkflowRunORM
|
||||
from app.infra.db.models.workflow_step import WorkflowStepORM
|
||||
from app.infra.db.session import get_session_factory
|
||||
from app.workers.workflows.types import MockActivityResult, StepActivityInput
|
||||
|
||||
|
||||
def utc_now() -> datetime:
|
||||
"""Return the current UTC timestamp."""
|
||||
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
|
||||
def jsonable(value: Any) -> Any:
|
||||
"""Convert enums, dataclasses, and nested values to JSON-safe structures."""
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, Enum):
|
||||
return value.value
|
||||
if isinstance(value, datetime):
|
||||
return value.isoformat()
|
||||
if is_dataclass(value):
|
||||
return jsonable(asdict(value))
|
||||
if isinstance(value, dict):
|
||||
return {key: jsonable(item) for key, item in value.items() if item is not None}
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
return [jsonable(item) for item in value]
|
||||
return value
|
||||
|
||||
|
||||
def mock_uri(order_id: int, step_name: str, filename: str = "result.png") -> str:
|
||||
"""Build a deterministic-looking mock URI for an order step."""
|
||||
|
||||
return f"mock://orders/{order_id}/{step_name}/{uuid4().hex[:8]}-{filename}"
|
||||
|
||||
|
||||
async def load_order_and_run(session, order_id: int, workflow_run_id: int) -> tuple[OrderORM, WorkflowRunORM]:
|
||||
"""Load the order and workflow run required by an activity."""
|
||||
|
||||
order = await session.get(OrderORM, order_id)
|
||||
workflow_run = await session.get(WorkflowRunORM, workflow_run_id)
|
||||
if order is None or workflow_run is None:
|
||||
raise ValueError("Order or workflow run not found for activity execution")
|
||||
return order, workflow_run
|
||||
|
||||
|
||||
def create_step_record(payload: StepActivityInput) -> WorkflowStepORM:
|
||||
"""Create a running workflow step row for an activity execution."""
|
||||
|
||||
return WorkflowStepORM(
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=payload.step_name,
|
||||
step_status=StepStatus.RUNNING,
|
||||
input_json=jsonable(payload),
|
||||
started_at=utc_now(),
|
||||
)
|
||||
|
||||
|
||||
async def execute_asset_step(
|
||||
payload: StepActivityInput,
|
||||
asset_type: AssetType,
|
||||
*,
|
||||
score: float = 0.95,
|
||||
filename: str = "result.png",
|
||||
message: str = "mock success",
|
||||
extra_metadata: dict[str, Any] | None = None,
|
||||
finalize: bool = False,
|
||||
) -> MockActivityResult:
|
||||
"""Persist a mock asset-producing step and return its result."""
|
||||
|
||||
async with get_session_factory()() as session:
|
||||
order, workflow_run = await load_order_and_run(session, payload.order_id, payload.workflow_run_id)
|
||||
step = create_step_record(payload)
|
||||
session.add(step)
|
||||
|
||||
order.status = OrderStatus.RUNNING
|
||||
workflow_run.status = OrderStatus.RUNNING
|
||||
workflow_run.current_step = payload.step_name
|
||||
await session.flush()
|
||||
|
||||
try:
|
||||
metadata = {
|
||||
**payload.metadata,
|
||||
"source_asset_id": payload.source_asset_id,
|
||||
"selected_asset_id": payload.selected_asset_id,
|
||||
**(extra_metadata or {}),
|
||||
}
|
||||
metadata = {key: value for key, value in metadata.items() if value is not None}
|
||||
|
||||
asset = AssetORM(
|
||||
order_id=payload.order_id,
|
||||
asset_type=asset_type,
|
||||
step_name=payload.step_name,
|
||||
uri=mock_uri(payload.order_id, payload.step_name.value, filename),
|
||||
metadata_json=jsonable(metadata),
|
||||
)
|
||||
session.add(asset)
|
||||
await session.flush()
|
||||
|
||||
result = MockActivityResult(
|
||||
step_name=payload.step_name,
|
||||
success=True,
|
||||
asset_id=asset.id,
|
||||
uri=asset.uri,
|
||||
score=score,
|
||||
passed=True,
|
||||
message=message,
|
||||
metadata=jsonable(metadata) or {},
|
||||
)
|
||||
|
||||
if finalize:
|
||||
order.final_asset_id = asset.id
|
||||
order.status = OrderStatus.SUCCEEDED
|
||||
workflow_run.status = OrderStatus.SUCCEEDED
|
||||
|
||||
step.step_status = StepStatus.SUCCEEDED
|
||||
step.output_json = jsonable(result)
|
||||
step.ended_at = utc_now()
|
||||
await session.commit()
|
||||
return result
|
||||
except Exception as exc:
|
||||
step.step_status = StepStatus.FAILED
|
||||
step.error_message = str(exc)
|
||||
step.ended_at = utc_now()
|
||||
order.status = OrderStatus.FAILED
|
||||
workflow_run.status = OrderStatus.FAILED
|
||||
await session.commit()
|
||||
raise
|
||||
|
||||
|
||||
@activity.defn
|
||||
async def prepare_model_activity(payload: StepActivityInput) -> MockActivityResult:
|
||||
"""Mock model preparation for the pipeline."""
|
||||
|
||||
return await execute_asset_step(
|
||||
payload,
|
||||
AssetType.PREPARED_MODEL,
|
||||
extra_metadata={
|
||||
"model_id": payload.model_id,
|
||||
"pose_id": payload.pose_id,
|
||||
"garment_asset_id": payload.garment_asset_id,
|
||||
"scene_ref_asset_id": payload.scene_ref_asset_id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@activity.defn
|
||||
async def run_tryon_activity(payload: StepActivityInput) -> MockActivityResult:
|
||||
"""Mock try-on rendering."""
|
||||
|
||||
return await execute_asset_step(
|
||||
payload,
|
||||
AssetType.TRYON,
|
||||
extra_metadata={"prepared_asset_id": payload.source_asset_id},
|
||||
)
|
||||
84
app/workers/runner.py
Normal file
84
app/workers/runner.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""Temporal worker runner."""
|
||||
|
||||
import asyncio
|
||||
from contextlib import AsyncExitStack
|
||||
|
||||
from temporalio.client import Client
|
||||
from temporalio.worker import Worker
|
||||
|
||||
from app.infra.temporal.client import get_temporal_client
|
||||
from app.infra.temporal.task_queues import (
|
||||
IMAGE_PIPELINE_CONTROL_TASK_QUEUE,
|
||||
IMAGE_PIPELINE_EXPORT_TASK_QUEUE,
|
||||
IMAGE_PIPELINE_IMAGE_GEN_TASK_QUEUE,
|
||||
IMAGE_PIPELINE_POST_PROCESS_TASK_QUEUE,
|
||||
IMAGE_PIPELINE_QC_TASK_QUEUE,
|
||||
)
|
||||
from app.workers.activities.export_activities import run_export_activity
|
||||
from app.workers.activities.face_activities import run_face_activity
|
||||
from app.workers.activities.fusion_activities import run_fusion_activity
|
||||
from app.workers.activities.qc_activities import run_qc_activity
|
||||
from app.workers.activities.review_activities import (
|
||||
complete_review_wait_activity,
|
||||
mark_waiting_for_review_activity,
|
||||
mark_workflow_failed_activity,
|
||||
)
|
||||
from app.workers.activities.scene_activities import run_scene_activity
|
||||
from app.workers.activities.texture_activities import run_texture_activity
|
||||
from app.workers.activities.tryon_activities import prepare_model_activity, run_tryon_activity
|
||||
from app.workers.workflows.low_end_pipeline import LowEndPipelineWorkflow
|
||||
from app.workers.workflows.mid_end_pipeline import MidEndPipelineWorkflow
|
||||
|
||||
|
||||
def build_workers(client: Client) -> list[Worker]:
|
||||
"""Create the worker set needed for the task queues in this MVP."""
|
||||
|
||||
return [
|
||||
Worker(
|
||||
client,
|
||||
task_queue=IMAGE_PIPELINE_CONTROL_TASK_QUEUE,
|
||||
workflows=[LowEndPipelineWorkflow, MidEndPipelineWorkflow],
|
||||
activities=[
|
||||
prepare_model_activity,
|
||||
mark_waiting_for_review_activity,
|
||||
complete_review_wait_activity,
|
||||
mark_workflow_failed_activity,
|
||||
],
|
||||
),
|
||||
Worker(
|
||||
client,
|
||||
task_queue=IMAGE_PIPELINE_IMAGE_GEN_TASK_QUEUE,
|
||||
activities=[run_tryon_activity, run_scene_activity],
|
||||
),
|
||||
Worker(
|
||||
client,
|
||||
task_queue=IMAGE_PIPELINE_POST_PROCESS_TASK_QUEUE,
|
||||
activities=[run_texture_activity, run_face_activity, run_fusion_activity],
|
||||
),
|
||||
Worker(
|
||||
client,
|
||||
task_queue=IMAGE_PIPELINE_QC_TASK_QUEUE,
|
||||
activities=[run_qc_activity],
|
||||
),
|
||||
Worker(
|
||||
client,
|
||||
task_queue=IMAGE_PIPELINE_EXPORT_TASK_QUEUE,
|
||||
activities=[run_export_activity],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def run_workers() -> None:
|
||||
"""Start all Temporal workers and keep the process alive."""
|
||||
|
||||
client = await get_temporal_client()
|
||||
workers = build_workers(client)
|
||||
async with AsyncExitStack() as stack:
|
||||
for worker in workers:
|
||||
await stack.enter_async_context(worker)
|
||||
await asyncio.Event().wait()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(run_workers())
|
||||
|
||||
152
app/workers/workflows/low_end_pipeline.py
Normal file
152
app/workers/workflows/low_end_pipeline.py
Normal file
@@ -0,0 +1,152 @@
|
||||
"""Low-end image pipeline workflow."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from temporalio import workflow
|
||||
from temporalio.common import RetryPolicy
|
||||
|
||||
with workflow.unsafe.imports_passed_through():
|
||||
from app.domain.enums import OrderStatus, WorkflowStepName
|
||||
from app.infra.temporal.task_queues import (
|
||||
IMAGE_PIPELINE_CONTROL_TASK_QUEUE,
|
||||
IMAGE_PIPELINE_EXPORT_TASK_QUEUE,
|
||||
IMAGE_PIPELINE_IMAGE_GEN_TASK_QUEUE,
|
||||
IMAGE_PIPELINE_QC_TASK_QUEUE,
|
||||
)
|
||||
from app.workers.activities.export_activities import run_export_activity
|
||||
from app.workers.activities.qc_activities import run_qc_activity
|
||||
from app.workers.activities.review_activities import mark_workflow_failed_activity
|
||||
from app.workers.activities.scene_activities import run_scene_activity
|
||||
from app.workers.activities.tryon_activities import prepare_model_activity, run_tryon_activity
|
||||
from app.workers.workflows.types import (
|
||||
PipelineWorkflowInput,
|
||||
StepActivityInput,
|
||||
WorkflowFailureActivityInput,
|
||||
)
|
||||
|
||||
|
||||
ACTIVITY_TIMEOUT = timedelta(seconds=30)
|
||||
ACTIVITY_RETRY_POLICY = RetryPolicy(
|
||||
initial_interval=timedelta(seconds=1),
|
||||
backoff_coefficient=2.0,
|
||||
maximum_attempts=3,
|
||||
)
|
||||
|
||||
|
||||
@workflow.defn
|
||||
class LowEndPipelineWorkflow:
|
||||
"""Low-end fully automated image pipeline."""
|
||||
|
||||
@workflow.run
|
||||
async def run(self, payload: PipelineWorkflowInput) -> dict[str, int | str | None]:
|
||||
"""Execute the low-end workflow from start to finish."""
|
||||
|
||||
current_step = WorkflowStepName.PREPARE_MODEL
|
||||
try:
|
||||
prepared = await workflow.execute_activity(
|
||||
prepare_model_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.PREPARE_MODEL,
|
||||
model_id=payload.model_id,
|
||||
pose_id=payload.pose_id,
|
||||
garment_asset_id=payload.garment_asset_id,
|
||||
scene_ref_asset_id=payload.scene_ref_asset_id,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_CONTROL_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
current_step = WorkflowStepName.TRYON
|
||||
tryon_result = await workflow.execute_activity(
|
||||
run_tryon_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.TRYON,
|
||||
source_asset_id=prepared.asset_id,
|
||||
garment_asset_id=payload.garment_asset_id,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_IMAGE_GEN_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
current_step = WorkflowStepName.SCENE
|
||||
scene_result = await workflow.execute_activity(
|
||||
run_scene_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.SCENE,
|
||||
source_asset_id=tryon_result.asset_id,
|
||||
scene_ref_asset_id=payload.scene_ref_asset_id,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_IMAGE_GEN_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
current_step = WorkflowStepName.QC
|
||||
qc_result = await workflow.execute_activity(
|
||||
run_qc_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.QC,
|
||||
source_asset_id=scene_result.asset_id,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_QC_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
if not qc_result.passed:
|
||||
await self._mark_failed(payload, current_step, qc_result.message)
|
||||
return {"order_id": payload.order_id, "status": OrderStatus.FAILED.value, "final_asset_id": None}
|
||||
|
||||
current_step = WorkflowStepName.EXPORT
|
||||
final_result = await workflow.execute_activity(
|
||||
run_export_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.EXPORT,
|
||||
source_asset_id=(qc_result.candidate_asset_ids or [scene_result.asset_id])[0],
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_EXPORT_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
return {
|
||||
"order_id": payload.order_id,
|
||||
"status": OrderStatus.SUCCEEDED.value,
|
||||
"final_asset_id": final_result.asset_id,
|
||||
}
|
||||
except Exception as exc:
|
||||
await self._mark_failed(payload, current_step, str(exc))
|
||||
raise
|
||||
|
||||
async def _mark_failed(
|
||||
self,
|
||||
payload: PipelineWorkflowInput,
|
||||
current_step: WorkflowStepName,
|
||||
message: str,
|
||||
) -> None:
|
||||
"""Persist workflow failure state."""
|
||||
|
||||
await workflow.execute_activity(
|
||||
mark_workflow_failed_activity,
|
||||
WorkflowFailureActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
current_step=current_step,
|
||||
message=message,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_CONTROL_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
315
app/workers/workflows/mid_end_pipeline.py
Normal file
315
app/workers/workflows/mid_end_pipeline.py
Normal file
@@ -0,0 +1,315 @@
|
||||
"""Mid-end image pipeline workflow with review signal support."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from temporalio import workflow
|
||||
from temporalio.common import RetryPolicy
|
||||
|
||||
with workflow.unsafe.imports_passed_through():
|
||||
from app.domain.enums import OrderStatus, ReviewDecision, WorkflowStepName
|
||||
from app.infra.temporal.task_queues import (
|
||||
IMAGE_PIPELINE_CONTROL_TASK_QUEUE,
|
||||
IMAGE_PIPELINE_EXPORT_TASK_QUEUE,
|
||||
IMAGE_PIPELINE_IMAGE_GEN_TASK_QUEUE,
|
||||
IMAGE_PIPELINE_POST_PROCESS_TASK_QUEUE,
|
||||
IMAGE_PIPELINE_QC_TASK_QUEUE,
|
||||
)
|
||||
from app.workers.activities.export_activities import run_export_activity
|
||||
from app.workers.activities.face_activities import run_face_activity
|
||||
from app.workers.activities.fusion_activities import run_fusion_activity
|
||||
from app.workers.activities.qc_activities import run_qc_activity
|
||||
from app.workers.activities.review_activities import (
|
||||
complete_review_wait_activity,
|
||||
mark_waiting_for_review_activity,
|
||||
mark_workflow_failed_activity,
|
||||
)
|
||||
from app.workers.activities.scene_activities import run_scene_activity
|
||||
from app.workers.activities.texture_activities import run_texture_activity
|
||||
from app.workers.activities.tryon_activities import prepare_model_activity, run_tryon_activity
|
||||
from app.workers.workflows.types import (
|
||||
MockActivityResult,
|
||||
PipelineWorkflowInput,
|
||||
ReviewResolutionActivityInput,
|
||||
ReviewSignalPayload,
|
||||
ReviewWaitActivityInput,
|
||||
StepActivityInput,
|
||||
WorkflowFailureActivityInput,
|
||||
)
|
||||
|
||||
|
||||
ACTIVITY_TIMEOUT = timedelta(seconds=30)
|
||||
ACTIVITY_RETRY_POLICY = RetryPolicy(
|
||||
initial_interval=timedelta(seconds=1),
|
||||
backoff_coefficient=2.0,
|
||||
maximum_attempts=3,
|
||||
)
|
||||
|
||||
|
||||
@workflow.defn
|
||||
class MidEndPipelineWorkflow:
|
||||
"""Mid-end workflow that pauses for human review and supports reruns."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._review_payload: ReviewSignalPayload | None = None
|
||||
|
||||
@workflow.signal
|
||||
def submit_review(self, payload: ReviewSignalPayload) -> None:
|
||||
"""Receive a review decision from the API layer."""
|
||||
|
||||
self._review_payload = payload
|
||||
|
||||
@workflow.run
|
||||
async def run(self, payload: PipelineWorkflowInput) -> dict[str, int | str | None]:
|
||||
"""Execute the mid-end workflow with a human review loop."""
|
||||
|
||||
current_step = WorkflowStepName.PREPARE_MODEL
|
||||
try:
|
||||
prepared = await workflow.execute_activity(
|
||||
prepare_model_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.PREPARE_MODEL,
|
||||
model_id=payload.model_id,
|
||||
pose_id=payload.pose_id,
|
||||
garment_asset_id=payload.garment_asset_id,
|
||||
scene_ref_asset_id=payload.scene_ref_asset_id,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_CONTROL_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
current_step = WorkflowStepName.TRYON
|
||||
tryon_result = await workflow.execute_activity(
|
||||
run_tryon_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.TRYON,
|
||||
source_asset_id=prepared.asset_id,
|
||||
garment_asset_id=payload.garment_asset_id,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_IMAGE_GEN_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
current_step = WorkflowStepName.SCENE
|
||||
scene_result = await self._run_scene(payload, tryon_result.asset_id)
|
||||
|
||||
current_step = WorkflowStepName.TEXTURE
|
||||
texture_result = await self._run_texture(payload, scene_result.asset_id)
|
||||
|
||||
current_step = WorkflowStepName.FACE
|
||||
face_result = await self._run_face(payload, texture_result.asset_id)
|
||||
|
||||
current_step = WorkflowStepName.FUSION
|
||||
fusion_result = await self._run_fusion(payload, scene_result.asset_id, face_result.asset_id)
|
||||
|
||||
current_step = WorkflowStepName.QC
|
||||
qc_result = await self._run_qc(payload, fusion_result.asset_id)
|
||||
if not qc_result.passed:
|
||||
await self._mark_failed(payload, current_step, qc_result.message)
|
||||
return {"order_id": payload.order_id, "status": OrderStatus.FAILED.value, "final_asset_id": None}
|
||||
|
||||
while True:
|
||||
current_step = WorkflowStepName.REVIEW
|
||||
await workflow.execute_activity(
|
||||
mark_waiting_for_review_activity,
|
||||
ReviewWaitActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
candidate_asset_ids=qc_result.candidate_asset_ids,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_CONTROL_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
review_payload = await self._wait_for_review()
|
||||
await workflow.execute_activity(
|
||||
complete_review_wait_activity,
|
||||
ReviewResolutionActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
decision=review_payload.decision,
|
||||
reviewer_id=review_payload.reviewer_id,
|
||||
selected_asset_id=review_payload.selected_asset_id,
|
||||
comment=review_payload.comment,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_CONTROL_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
if review_payload.decision == ReviewDecision.APPROVE:
|
||||
current_step = WorkflowStepName.EXPORT
|
||||
export_source_id = review_payload.selected_asset_id
|
||||
if export_source_id is None:
|
||||
export_source_id = (qc_result.candidate_asset_ids or [fusion_result.asset_id])[0]
|
||||
final_result = await workflow.execute_activity(
|
||||
run_export_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.EXPORT,
|
||||
source_asset_id=export_source_id,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_EXPORT_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
return {
|
||||
"order_id": payload.order_id,
|
||||
"status": OrderStatus.SUCCEEDED.value,
|
||||
"final_asset_id": final_result.asset_id,
|
||||
}
|
||||
|
||||
if review_payload.decision == ReviewDecision.REJECT:
|
||||
return {"order_id": payload.order_id, "status": OrderStatus.FAILED.value, "final_asset_id": None}
|
||||
|
||||
if review_payload.decision == ReviewDecision.RERUN_SCENE:
|
||||
current_step = WorkflowStepName.SCENE
|
||||
scene_result = await self._run_scene(payload, tryon_result.asset_id)
|
||||
current_step = WorkflowStepName.TEXTURE
|
||||
texture_result = await self._run_texture(payload, scene_result.asset_id)
|
||||
current_step = WorkflowStepName.FACE
|
||||
face_result = await self._run_face(payload, texture_result.asset_id)
|
||||
current_step = WorkflowStepName.FUSION
|
||||
fusion_result = await self._run_fusion(payload, scene_result.asset_id, face_result.asset_id)
|
||||
elif review_payload.decision == ReviewDecision.RERUN_FACE:
|
||||
current_step = WorkflowStepName.FACE
|
||||
face_result = await self._run_face(payload, texture_result.asset_id)
|
||||
current_step = WorkflowStepName.FUSION
|
||||
fusion_result = await self._run_fusion(payload, scene_result.asset_id, face_result.asset_id)
|
||||
elif review_payload.decision == ReviewDecision.RERUN_FUSION:
|
||||
current_step = WorkflowStepName.FUSION
|
||||
fusion_result = await self._run_fusion(payload, scene_result.asset_id, face_result.asset_id)
|
||||
|
||||
current_step = WorkflowStepName.QC
|
||||
qc_result = await self._run_qc(payload, fusion_result.asset_id)
|
||||
if not qc_result.passed:
|
||||
await self._mark_failed(payload, current_step, qc_result.message)
|
||||
return {"order_id": payload.order_id, "status": OrderStatus.FAILED.value, "final_asset_id": None}
|
||||
except Exception as exc:
|
||||
await self._mark_failed(payload, current_step, str(exc))
|
||||
raise
|
||||
|
||||
async def _wait_for_review(self) -> ReviewSignalPayload:
|
||||
"""Suspend the workflow until a review signal arrives."""
|
||||
|
||||
if self._review_payload is None:
|
||||
await workflow.wait_condition(lambda: self._review_payload is not None)
|
||||
assert self._review_payload is not None
|
||||
review_payload = self._review_payload
|
||||
self._review_payload = None
|
||||
return review_payload
|
||||
|
||||
async def _run_scene(self, payload: PipelineWorkflowInput, source_asset_id: int | None) -> MockActivityResult:
|
||||
"""Execute the scene activity."""
|
||||
|
||||
return await workflow.execute_activity(
|
||||
run_scene_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.SCENE,
|
||||
source_asset_id=source_asset_id,
|
||||
scene_ref_asset_id=payload.scene_ref_asset_id,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_IMAGE_GEN_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
async def _run_texture(self, payload: PipelineWorkflowInput, source_asset_id: int | None) -> MockActivityResult:
|
||||
"""Execute the texture activity."""
|
||||
|
||||
return await workflow.execute_activity(
|
||||
run_texture_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.TEXTURE,
|
||||
source_asset_id=source_asset_id,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_POST_PROCESS_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
async def _run_face(self, payload: PipelineWorkflowInput, source_asset_id: int | None) -> MockActivityResult:
|
||||
"""Execute the face activity."""
|
||||
|
||||
return await workflow.execute_activity(
|
||||
run_face_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.FACE,
|
||||
source_asset_id=source_asset_id,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_POST_PROCESS_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
async def _run_fusion(
|
||||
self,
|
||||
payload: PipelineWorkflowInput,
|
||||
source_asset_id: int | None,
|
||||
face_asset_id: int | None,
|
||||
) -> MockActivityResult:
|
||||
"""Execute the fusion activity."""
|
||||
|
||||
return await workflow.execute_activity(
|
||||
run_fusion_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.FUSION,
|
||||
source_asset_id=source_asset_id,
|
||||
selected_asset_id=face_asset_id,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_POST_PROCESS_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
async def _run_qc(self, payload: PipelineWorkflowInput, source_asset_id: int | None) -> MockActivityResult:
|
||||
"""Execute the QC activity."""
|
||||
|
||||
return await workflow.execute_activity(
|
||||
run_qc_activity,
|
||||
StepActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
step_name=WorkflowStepName.QC,
|
||||
source_asset_id=source_asset_id,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_QC_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
|
||||
async def _mark_failed(
|
||||
self,
|
||||
payload: PipelineWorkflowInput,
|
||||
current_step: WorkflowStepName,
|
||||
message: str,
|
||||
) -> None:
|
||||
"""Persist workflow failure state."""
|
||||
|
||||
await workflow.execute_activity(
|
||||
mark_workflow_failed_activity,
|
||||
WorkflowFailureActivityInput(
|
||||
order_id=payload.order_id,
|
||||
workflow_run_id=payload.workflow_run_id,
|
||||
current_step=current_step,
|
||||
message=message,
|
||||
),
|
||||
task_queue=IMAGE_PIPELINE_CONTROL_TASK_QUEUE,
|
||||
start_to_close_timeout=ACTIVITY_TIMEOUT,
|
||||
retry_policy=ACTIVITY_RETRY_POLICY,
|
||||
)
|
||||
137
app/workers/workflows/types.py
Normal file
137
app/workers/workflows/types.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""Shared workflow and activity payload types."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
from app.domain.enums import CustomerLevel, OrderStatus, ReviewDecision, ServiceMode, WorkflowStepName
|
||||
|
||||
|
||||
def _coerce_enum(value: Any, enum_cls: type[Enum]) -> Any:
|
||||
"""Coerce raw Temporal payload values back into enum instances."""
|
||||
|
||||
if value is None or isinstance(value, enum_cls):
|
||||
return value
|
||||
if isinstance(value, list):
|
||||
value = "".join(str(item) for item in value)
|
||||
return enum_cls(value)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class PipelineWorkflowInput:
|
||||
"""Temporal workflow input for an image pipeline order."""
|
||||
|
||||
order_id: int
|
||||
workflow_run_id: int
|
||||
customer_level: CustomerLevel
|
||||
service_mode: ServiceMode
|
||||
model_id: int
|
||||
pose_id: int
|
||||
garment_asset_id: int
|
||||
scene_ref_asset_id: int
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Normalize enum-like values after Temporal deserialization."""
|
||||
|
||||
self.customer_level = _coerce_enum(self.customer_level, CustomerLevel)
|
||||
self.service_mode = _coerce_enum(self.service_mode, ServiceMode)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class StepActivityInput:
|
||||
"""Input payload shared by the mock pipeline activities."""
|
||||
|
||||
order_id: int
|
||||
workflow_run_id: int
|
||||
step_name: WorkflowStepName
|
||||
model_id: int | None = None
|
||||
pose_id: int | None = None
|
||||
garment_asset_id: int | None = None
|
||||
scene_ref_asset_id: int | None = None
|
||||
source_asset_id: int | None = None
|
||||
selected_asset_id: int | None = None
|
||||
metadata: dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Normalize enum-like values after Temporal deserialization."""
|
||||
|
||||
self.step_name = _coerce_enum(self.step_name, WorkflowStepName)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class MockActivityResult:
|
||||
"""Common mock activity result structure."""
|
||||
|
||||
step_name: WorkflowStepName
|
||||
success: bool
|
||||
asset_id: int | None
|
||||
uri: str | None
|
||||
score: float | None = None
|
||||
passed: bool | None = None
|
||||
message: str = "mock success"
|
||||
candidate_asset_ids: list[int] = field(default_factory=list)
|
||||
metadata: dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Normalize enum-like values after Temporal deserialization."""
|
||||
|
||||
self.step_name = _coerce_enum(self.step_name, WorkflowStepName)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ReviewSignalPayload:
|
||||
"""Signal payload sent from the API to the mid-end workflow."""
|
||||
|
||||
decision: ReviewDecision
|
||||
reviewer_id: int
|
||||
selected_asset_id: int | None = None
|
||||
comment: str | None = None
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Normalize enum-like values after Temporal deserialization."""
|
||||
|
||||
self.decision = _coerce_enum(self.decision, ReviewDecision)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ReviewWaitActivityInput:
|
||||
"""Input for marking a workflow as waiting for review."""
|
||||
|
||||
order_id: int
|
||||
workflow_run_id: int
|
||||
candidate_asset_ids: list[int] = field(default_factory=list)
|
||||
comment: str | None = None
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ReviewResolutionActivityInput:
|
||||
"""Input for completing a waiting review state."""
|
||||
|
||||
order_id: int
|
||||
workflow_run_id: int
|
||||
decision: ReviewDecision
|
||||
reviewer_id: int
|
||||
selected_asset_id: int | None = None
|
||||
comment: str | None = None
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Normalize enum-like values after Temporal deserialization."""
|
||||
|
||||
self.decision = _coerce_enum(self.decision, ReviewDecision)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class WorkflowFailureActivityInput:
|
||||
"""Input for marking a workflow as failed."""
|
||||
|
||||
order_id: int
|
||||
workflow_run_id: int
|
||||
current_step: WorkflowStepName
|
||||
message: str
|
||||
status: OrderStatus = OrderStatus.FAILED
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Normalize enum-like values after Temporal deserialization."""
|
||||
|
||||
self.current_step = _coerce_enum(self.current_step, WorkflowStepName)
|
||||
self.status = _coerce_enum(self.status, OrderStatus)
|
||||
Reference in New Issue
Block a user