itcloud/backend/src/app/api/v1/batch.py

198 lines
5.4 KiB
Python
Raw Normal View History

2025-12-30 23:18:13 +01:00
"""Batch operations API routes."""
2026-01-05 18:20:34 +01:00
import os
from pathlib import Path
2026-01-05 19:45:27 +01:00
from urllib.parse import quote
2026-01-05 18:20:34 +01:00
from fastapi import APIRouter, BackgroundTasks, status
from fastapi.responses import FileResponse
2025-12-30 23:18:13 +01:00
from app.api.dependencies import CurrentUser, DatabaseSession, S3ClientDep
from app.api.schemas import (
BatchDeleteRequest,
BatchDeleteResponse,
BatchDownloadRequest,
BatchMoveRequest,
BatchMoveResponse,
)
from app.services.batch_operations_service import BatchOperationsService
router = APIRouter(prefix="/batch", tags=["batch"])
2026-01-05 19:45:27 +01:00
def make_content_disposition(filename: str) -> str:
"""
Create Content-Disposition header value with proper encoding for non-ASCII filenames.
Uses RFC 5987/2231 encoding to support UTF-8 filenames.
Args:
filename: Original filename (may contain non-ASCII characters)
Returns:
Properly formatted Content-Disposition header value
"""
# ASCII-safe fallback (replace non-ASCII with underscore)
ascii_filename = filename.encode("ascii", errors="replace").decode("ascii")
# UTF-8 encoded filename (RFC 5987)
utf8_filename = quote(filename.encode("utf-8"))
# Return both for maximum compatibility
return f"attachment; filename=\"{ascii_filename}\"; filename*=UTF-8''{utf8_filename}"
2025-12-30 23:18:13 +01:00
@router.post("/delete", response_model=BatchDeleteResponse)
async def batch_delete(
request: BatchDeleteRequest,
current_user: CurrentUser,
session: DatabaseSession,
s3_client: S3ClientDep,
):
"""
Delete multiple assets.
Args:
request: Batch delete request
current_user: Current authenticated user
session: Database session
s3_client: S3 client
Returns:
Deletion statistics
"""
batch_service = BatchOperationsService(session, s3_client)
result = await batch_service.delete_assets_batch(
user_id=current_user.id,
asset_ids=request.asset_ids,
)
return result
@router.post("/move", response_model=BatchMoveResponse)
async def batch_move(
request: BatchMoveRequest,
current_user: CurrentUser,
session: DatabaseSession,
s3_client: S3ClientDep,
):
"""
Move multiple assets to a folder.
Args:
request: Batch move request
current_user: Current authenticated user
session: Database session
s3_client: S3 client
Returns:
Move statistics
"""
batch_service = BatchOperationsService(session, s3_client)
result = await batch_service.move_assets_batch(
user_id=current_user.id,
asset_ids=request.asset_ids,
target_folder_id=request.folder_id,
)
return result
@router.post("/download")
async def batch_download(
request: BatchDownloadRequest,
current_user: CurrentUser,
session: DatabaseSession,
s3_client: S3ClientDep,
2026-01-05 18:20:34 +01:00
background_tasks: BackgroundTasks,
2025-12-30 23:18:13 +01:00
):
"""
2026-01-05 18:20:34 +01:00
Download multiple assets as a ZIP archive using streaming.
Uses temp file and FileResponse to avoid loading entire ZIP into memory.
Temp file is automatically cleaned up after response is sent.
2025-12-30 23:18:13 +01:00
Args:
request: Batch download request
current_user: Current authenticated user
session: Database session
s3_client: S3 client
2026-01-05 18:20:34 +01:00
background_tasks: Background tasks for cleanup
2025-12-30 23:18:13 +01:00
Returns:
ZIP file response
"""
batch_service = BatchOperationsService(session, s3_client)
2026-01-05 18:20:34 +01:00
temp_zip_path, filename = await batch_service.download_assets_batch(
2025-12-30 23:18:13 +01:00
user_id=current_user.id,
asset_ids=request.asset_ids,
)
2026-01-05 18:20:34 +01:00
# Schedule temp file cleanup after response is sent
def cleanup_temp_file():
try:
Path(temp_zip_path).unlink(missing_ok=True)
except Exception:
pass
background_tasks.add_task(cleanup_temp_file)
# Return file using streaming FileResponse
return FileResponse(
path=temp_zip_path,
2025-12-30 23:18:13 +01:00
media_type="application/zip",
2026-01-05 18:20:34 +01:00
filename=filename,
2025-12-30 23:18:13 +01:00
headers={
2026-01-05 19:45:27 +01:00
"Content-Disposition": make_content_disposition(filename),
2025-12-30 23:18:13 +01:00
},
)
@router.get("/folders/{folder_id}/download")
async def download_folder(
folder_id: str,
current_user: CurrentUser,
session: DatabaseSession,
s3_client: S3ClientDep,
2026-01-05 18:20:34 +01:00
background_tasks: BackgroundTasks,
2025-12-30 23:18:13 +01:00
):
"""
2026-01-05 18:20:34 +01:00
Download all assets in a folder as a ZIP archive using streaming.
Uses temp file and FileResponse to avoid loading entire ZIP into memory.
Temp file is automatically cleaned up after response is sent.
2025-12-30 23:18:13 +01:00
Args:
folder_id: Folder ID
current_user: Current authenticated user
session: Database session
s3_client: S3 client
2026-01-05 18:20:34 +01:00
background_tasks: Background tasks for cleanup
2025-12-30 23:18:13 +01:00
Returns:
ZIP file response
"""
batch_service = BatchOperationsService(session, s3_client)
2026-01-05 18:20:34 +01:00
temp_zip_path, filename = await batch_service.download_folder(
2025-12-30 23:18:13 +01:00
user_id=current_user.id,
folder_id=folder_id,
)
2026-01-05 18:20:34 +01:00
# Schedule temp file cleanup after response is sent
def cleanup_temp_file():
try:
Path(temp_zip_path).unlink(missing_ok=True)
except Exception:
pass
background_tasks.add_task(cleanup_temp_file)
# Return file using streaming FileResponse
return FileResponse(
path=temp_zip_path,
2025-12-30 23:18:13 +01:00
media_type="application/zip",
2026-01-05 18:20:34 +01:00
filename=filename,
2025-12-30 23:18:13 +01:00
headers={
2026-01-05 19:45:27 +01:00
"Content-Disposition": make_content_disposition(filename),
2025-12-30 23:18:13 +01:00
},
)