kanban-app/backend/app/services/storage/mock_adapter.py
2026-03-21 23:54:09 +03:00

217 lines
6.1 KiB
Python

"""Mock storage adapter for testing (in-memory storage)"""
import uuid
from datetime import datetime, timezone
from io import BytesIO
from app.services.storage.base import StorageAdapter
class MockStorageAdapter(StorageAdapter):
"""In-memory storage adapter for testing purposes"""
# Class-level storage to persist across instances
_files = {} # {(bucket, object_name): BytesIO}
_file_info = {} # {(bucket, object_name): metadata}
_buckets = set() # Track created buckets
def __init__(self):
"""Initialize mock storage (storage is shared at class level)"""
pass
def upload_file(
self,
file_data: BytesIO,
bucket_name: str,
object_name: str,
content_type: str,
length: int = None,
) -> dict:
"""
Upload a file to in-memory storage
Args:
file_data: File data as BytesIO
bucket_name: Name of bucket
object_name: Name of object in storage
content_type: MIME type of file
length: File size in bytes (optional)
Returns:
dict: Upload result with metadata
"""
# Ensure bucket exists
self.ensure_bucket_exists(bucket_name)
# Get file length if not provided
if length is None:
file_data.seek(0, 2) # Seek to end
length = file_data.tell()
file_data.seek(0) # Seek back to start
# Store file data (create a copy to avoid reference issues)
file_data.seek(0)
file_copy = BytesIO(file_data.read())
# Store in memory
key = (bucket_name, object_name)
self._files[key] = file_copy
# Store metadata
metadata = {
"size": length,
"content_type": content_type,
"last_modified": datetime.now(timezone.utc),
"etag": uuid.uuid4().hex,
}
self._file_info[key] = metadata
return {
"success": True,
"bucket": bucket_name,
"object_name": object_name,
"size": length,
"etag": metadata["etag"],
}
def download_file(self, bucket_name: str, object_name: str) -> BytesIO:
"""
Download a file from in-memory storage
Args:
bucket_name: Name of bucket
object_name: Name of the object in storage
Returns:
BytesIO: File data
Raises:
ValueError: If file doesn't exist
"""
key = (bucket_name, object_name)
if key not in self._files:
raise ValueError(f"File {bucket_name}/{object_name} not found")
# Create a copy to avoid modifying the original
self._files[key].seek(0)
return BytesIO(self._files[key].read())
def delete_file(self, bucket_name: str, object_name: str) -> bool:
"""
Delete a file from in-memory storage
Args:
bucket_name: Name of bucket
object_name: Name of the object in storage
Returns:
bool: True if successful, False if file doesn't exist
"""
key = (bucket_name, object_name)
if key not in self._files:
return False
# Remove from storage
del self._files[key]
del self._file_info[key]
return True
def generate_presigned_url(
self, bucket_name: str, object_name: str, expires_in: int = 3600
) -> str:
"""
Generate a mock presigned URL
In tests, this returns a fake URL that looks like a real MinIO URL
but doesn't actually need to work since we use Flask proxy routes.
Args:
bucket_name: Name of bucket
object_name: Name of the object in storage
expires_in: URL expiration time in seconds (ignored in mock)
Returns:
str: Mock presigned URL
"""
return f"http://minio:9000/{bucket_name}/{object_name}?X-Amz-Token=mock"
def file_exists(self, bucket_name: str, object_name: str) -> bool:
"""
Check if a file exists in in-memory storage
Args:
bucket_name: Name of bucket
object_name: Name of the object in storage
Returns:
bool: True if file exists
"""
return (bucket_name, object_name) in self._files
def get_file_info(self, bucket_name: str, object_name: str) -> dict:
"""
Get file metadata from in-memory storage
Args:
bucket_name: Name of bucket
object_name: Name of the object in storage
Returns:
dict: File metadata
Raises:
ValueError: If file doesn't exist
"""
key = (bucket_name, object_name)
if key not in self._file_info:
raise ValueError(f"File {bucket_name}/{object_name} not found")
return self._file_info[key].copy()
def ensure_bucket_exists(self, bucket_name: str) -> None:
"""
Ensure a bucket exists, create if it doesn't
Args:
bucket_name: Name of bucket
"""
if bucket_name not in self._buckets:
self._buckets.add(bucket_name)
def generate_unique_object_name(self, original_filename: str) -> str:
"""
Generate a unique object name for a file
Args:
original_filename: Original filename
Returns:
str: Unique object name
"""
# Extract file extension
ext = (
original_filename.rsplit(".", 1)[1].lower()
if "." in original_filename
else ""
)
# Generate unique filename with UUID
unique_name = f"{uuid.uuid4().hex}"
return f"{unique_name}.{ext}" if ext else unique_name
@classmethod
def clear_all(cls) -> None:
"""Clear all files and buckets (useful for test cleanup)"""
cls._files.clear()
cls._file_info.clear()
cls._buckets.clear()
@classmethod
def get_all_files(cls) -> list:
"""
Get all stored files (useful for test assertions)
Returns:
list: List of (bucket, object_name) tuples
"""
return list(cls._files.keys())