itcloud/backend/src/app/api/v1/batch.py

175 lines
4.6 KiB
Python
Raw Normal View History

2025-12-30 23:18:13 +01:00
"""Batch operations API routes."""
2026-01-05 18:20:34 +01:00
import os
from pathlib import Path
from fastapi import APIRouter, BackgroundTasks, status
from fastapi.responses import FileResponse
2025-12-30 23:18:13 +01:00
from app.api.dependencies import CurrentUser, DatabaseSession, S3ClientDep
from app.api.schemas import (
BatchDeleteRequest,
BatchDeleteResponse,
BatchDownloadRequest,
BatchMoveRequest,
BatchMoveResponse,
)
from app.services.batch_operations_service import BatchOperationsService
router = APIRouter(prefix="/batch", tags=["batch"])
@router.post("/delete", response_model=BatchDeleteResponse)
async def batch_delete(
request: BatchDeleteRequest,
current_user: CurrentUser,
session: DatabaseSession,
s3_client: S3ClientDep,
):
"""
Delete multiple assets.
Args:
request: Batch delete request
current_user: Current authenticated user
session: Database session
s3_client: S3 client
Returns:
Deletion statistics
"""
batch_service = BatchOperationsService(session, s3_client)
result = await batch_service.delete_assets_batch(
user_id=current_user.id,
asset_ids=request.asset_ids,
)
return result
@router.post("/move", response_model=BatchMoveResponse)
async def batch_move(
request: BatchMoveRequest,
current_user: CurrentUser,
session: DatabaseSession,
s3_client: S3ClientDep,
):
"""
Move multiple assets to a folder.
Args:
request: Batch move request
current_user: Current authenticated user
session: Database session
s3_client: S3 client
Returns:
Move statistics
"""
batch_service = BatchOperationsService(session, s3_client)
result = await batch_service.move_assets_batch(
user_id=current_user.id,
asset_ids=request.asset_ids,
target_folder_id=request.folder_id,
)
return result
@router.post("/download")
async def batch_download(
request: BatchDownloadRequest,
current_user: CurrentUser,
session: DatabaseSession,
s3_client: S3ClientDep,
2026-01-05 18:20:34 +01:00
background_tasks: BackgroundTasks,
2025-12-30 23:18:13 +01:00
):
"""
2026-01-05 18:20:34 +01:00
Download multiple assets as a ZIP archive using streaming.
Uses temp file and FileResponse to avoid loading entire ZIP into memory.
Temp file is automatically cleaned up after response is sent.
2025-12-30 23:18:13 +01:00
Args:
request: Batch download request
current_user: Current authenticated user
session: Database session
s3_client: S3 client
2026-01-05 18:20:34 +01:00
background_tasks: Background tasks for cleanup
2025-12-30 23:18:13 +01:00
Returns:
ZIP file response
"""
batch_service = BatchOperationsService(session, s3_client)
2026-01-05 18:20:34 +01:00
temp_zip_path, filename = await batch_service.download_assets_batch(
2025-12-30 23:18:13 +01:00
user_id=current_user.id,
asset_ids=request.asset_ids,
)
2026-01-05 18:20:34 +01:00
# Schedule temp file cleanup after response is sent
def cleanup_temp_file():
try:
Path(temp_zip_path).unlink(missing_ok=True)
except Exception:
pass
background_tasks.add_task(cleanup_temp_file)
# Return file using streaming FileResponse
return FileResponse(
path=temp_zip_path,
2025-12-30 23:18:13 +01:00
media_type="application/zip",
2026-01-05 18:20:34 +01:00
filename=filename,
2025-12-30 23:18:13 +01:00
headers={
"Content-Disposition": f'attachment; filename="{filename}"',
},
)
@router.get("/folders/{folder_id}/download")
async def download_folder(
folder_id: str,
current_user: CurrentUser,
session: DatabaseSession,
s3_client: S3ClientDep,
2026-01-05 18:20:34 +01:00
background_tasks: BackgroundTasks,
2025-12-30 23:18:13 +01:00
):
"""
2026-01-05 18:20:34 +01:00
Download all assets in a folder as a ZIP archive using streaming.
Uses temp file and FileResponse to avoid loading entire ZIP into memory.
Temp file is automatically cleaned up after response is sent.
2025-12-30 23:18:13 +01:00
Args:
folder_id: Folder ID
current_user: Current authenticated user
session: Database session
s3_client: S3 client
2026-01-05 18:20:34 +01:00
background_tasks: Background tasks for cleanup
2025-12-30 23:18:13 +01:00
Returns:
ZIP file response
"""
batch_service = BatchOperationsService(session, s3_client)
2026-01-05 18:20:34 +01:00
temp_zip_path, filename = await batch_service.download_folder(
2025-12-30 23:18:13 +01:00
user_id=current_user.id,
folder_id=folder_id,
)
2026-01-05 18:20:34 +01:00
# Schedule temp file cleanup after response is sent
def cleanup_temp_file():
try:
Path(temp_zip_path).unlink(missing_ok=True)
except Exception:
pass
background_tasks.add_task(cleanup_temp_file)
# Return file using streaming FileResponse
return FileResponse(
path=temp_zip_path,
2025-12-30 23:18:13 +01:00
media_type="application/zip",
2026-01-05 18:20:34 +01:00
filename=filename,
2025-12-30 23:18:13 +01:00
headers={
"Content-Disposition": f'attachment; filename="{filename}"',
},
)