Merge branch 'fix-cdn-urls'

This commit is contained in:
Oleg Proskurin 2025-12-27 20:13:49 +07:00
commit b5bfc91949
18 changed files with 728 additions and 539 deletions

View File

@ -144,19 +144,19 @@ cdnRouter.get(
} }
// Download image from storage // Download image from storage
// Storage key format: {orgSlug}/{projectSlug}/img/{imageId}
const storageService = await StorageFactory.getInstance(); const storageService = await StorageFactory.getInstance();
const keyParts = image.storageKey.split('/'); const keyParts = image.storageKey.split('/');
if (keyParts.length < 4) { if (keyParts.length < 4 || keyParts[2] !== 'img') {
throw new Error('Invalid storage key format'); throw new Error('Invalid storage key format');
} }
const orgId = keyParts[0]!; const storedOrgSlug = keyParts[0]!;
const projectId = keyParts[1]!; const storedProjectSlug = keyParts[1]!;
const category = keyParts[2]! as 'uploads' | 'generated' | 'references'; const imageId = keyParts[3]!;
const filename = keyParts.slice(3).join('/');
const buffer = await storageService.downloadFile(orgId, projectId, category, filename); const buffer = await storageService.downloadFile(storedOrgSlug, storedProjectSlug, imageId);
// Set headers // Set headers
res.setHeader('Content-Type', image.mimeType); res.setHeader('Content-Type', image.mimeType);
@ -345,19 +345,19 @@ cdnRouter.get(
if (cachedImage) { if (cachedImage) {
// Cache HIT - serve existing image // Cache HIT - serve existing image
// Storage key format: {orgSlug}/{projectSlug}/img/{imageId}
const storageService = await StorageFactory.getInstance(); const storageService = await StorageFactory.getInstance();
const keyParts = cachedImage.storageKey.split('/'); const keyParts = cachedImage.storageKey.split('/');
if (keyParts.length < 4) { if (keyParts.length < 4 || keyParts[2] !== 'img') {
throw new Error('Invalid storage key format'); throw new Error('Invalid storage key format');
} }
const orgId = keyParts[0]!; const storedOrgSlug = keyParts[0]!;
const projectId = keyParts[1]!; const storedProjectSlug = keyParts[1]!;
const category = keyParts[2]! as 'uploads' | 'generated' | 'references'; const imageId = keyParts[3]!;
const filename = keyParts.slice(3).join('/');
const buffer = await storageService.downloadFile(orgId, projectId, category, filename); const buffer = await storageService.downloadFile(storedOrgSlug, storedProjectSlug, imageId);
// Set headers // Set headers
res.setHeader('Content-Type', cachedImage.mimeType); res.setHeader('Content-Type', cachedImage.mimeType);
@ -445,19 +445,19 @@ cdnRouter.get(
}); });
// Download newly generated image // Download newly generated image
// Storage key format: {orgSlug}/{projectSlug}/img/{imageId}
const storageService = await StorageFactory.getInstance(); const storageService = await StorageFactory.getInstance();
const keyParts = generation.outputImage.storageKey.split('/'); const keyParts = generation.outputImage.storageKey.split('/');
if (keyParts.length < 4) { if (keyParts.length < 4 || keyParts[2] !== 'img') {
throw new Error('Invalid storage key format'); throw new Error('Invalid storage key format');
} }
const orgId = keyParts[0]!; const storedOrgSlug = keyParts[0]!;
const projectId = keyParts[1]!; const storedProjectSlug = keyParts[1]!;
const category = keyParts[2]! as 'uploads' | 'generated' | 'references'; const imageId = keyParts[3]!;
const filename = keyParts.slice(3).join('/');
const buffer = await storageService.downloadFile(orgId, projectId, category, filename); const buffer = await storageService.downloadFile(storedOrgSlug, storedProjectSlug, imageId);
// Set headers // Set headers
res.setHeader('Content-Type', generation.outputImage.mimeType); res.setHeader('Content-Type', generation.outputImage.mimeType);

View File

@ -9,16 +9,17 @@ import { rateLimitByApiKey } from '../middleware/auth/rateLimiter';
export const imagesRouter: RouterType = Router(); export const imagesRouter: RouterType = Router();
/** /**
* GET /api/images/:orgId/:projectId/:category/:filename * GET /api/images/:orgSlug/:projectSlug/img/:imageId
* Serves images via presigned URLs (redirect approach) * Serves images directly (streaming approach)
* New format: {orgSlug}/{projectSlug}/img/{imageId}
*/ */
imagesRouter.get( imagesRouter.get(
'/images/:orgId/:projectId/:category/:filename', '/images/:orgSlug/:projectSlug/img/:imageId',
asyncHandler(async (req: Request, res: Response): Promise<void> => { asyncHandler(async (req: Request, res: Response): Promise<void> => {
const { orgId, projectId, category, filename } = req.params; const { orgSlug, projectSlug, imageId } = req.params;
// Validate required params (these are guaranteed by route pattern) // Validate required params (these are guaranteed by route pattern)
if (!orgId || !projectId || !category || !filename) { if (!orgSlug || !projectSlug || !imageId) {
res.status(400).json({ res.status(400).json({
success: false, success: false,
message: 'Missing required parameters', message: 'Missing required parameters',
@ -26,25 +27,11 @@ imagesRouter.get(
return; return;
} }
// Validate category
if (!['uploads', 'generated', 'references'].includes(category)) {
res.status(400).json({
success: false,
message: 'Invalid category',
});
return;
}
const storageService = await StorageFactory.getInstance(); const storageService = await StorageFactory.getInstance();
try { try {
// Check if file exists first (fast check) // Check if file exists first (fast check)
const exists = await storageService.fileExists( const exists = await storageService.fileExists(orgSlug, projectSlug, imageId);
orgId,
projectId,
category as 'uploads' | 'generated' | 'references',
filename,
);
if (!exists) { if (!exists) {
res.status(404).json({ res.status(404).json({
@ -54,37 +41,20 @@ imagesRouter.get(
return; return;
} }
// Determine content type from filename
const ext = filename.toLowerCase().split('.').pop();
const contentType =
{
png: 'image/png',
jpg: 'image/jpeg',
jpeg: 'image/jpeg',
gif: 'image/gif',
webp: 'image/webp',
svg: 'image/svg+xml',
}[ext || ''] || 'application/octet-stream';
// Set headers for optimal caching and performance // Set headers for optimal caching and performance
res.setHeader('Content-Type', contentType); // Note: Content-Type will be set from MinIO metadata
res.setHeader('Cache-Control', 'public, max-age=86400, immutable'); // 24 hours + immutable res.setHeader('Cache-Control', 'public, max-age=31536000, immutable'); // 1 year + immutable
res.setHeader('ETag', `"${orgId}-${projectId}-${filename}"`); // Simple ETag res.setHeader('ETag', `"${imageId}"`); // UUID as ETag
// Handle conditional requests (304 Not Modified) // Handle conditional requests (304 Not Modified)
const ifNoneMatch = req.headers['if-none-match']; const ifNoneMatch = req.headers['if-none-match'];
if (ifNoneMatch === `"${orgId}-${projectId}-${filename}"`) { if (ifNoneMatch === `"${imageId}"`) {
res.status(304).end(); // Not Modified res.status(304).end(); // Not Modified
return; return;
} }
// Stream the file directly through our API (memory efficient) // Stream the file directly through our API (memory efficient)
const fileStream = await storageService.streamFile( const fileStream = await storageService.streamFile(orgSlug, projectSlug, imageId);
orgId,
projectId,
category as 'uploads' | 'generated' | 'references',
filename,
);
// Handle stream errors // Handle stream errors
fileStream.on('error', (streamError) => { fileStream.on('error', (streamError) => {
@ -110,17 +80,17 @@ imagesRouter.get(
); );
/** /**
* GET /api/images/url/:orgId/:projectId/:category/:filename * GET /api/images/url/:orgSlug/:projectSlug/img/:imageId
* Returns a presigned URL instead of redirecting * Returns a presigned URL instead of redirecting
*/ */
imagesRouter.get( imagesRouter.get(
'/images/url/:orgId/:projectId/:category/:filename', '/images/url/:orgSlug/:projectSlug/img/:imageId',
asyncHandler(async (req: Request, res: Response): Promise<void> => { asyncHandler(async (req: Request, res: Response): Promise<void> => {
const { orgId, projectId, category, filename } = req.params; const { orgSlug, projectSlug, imageId } = req.params;
const { expiry = '3600' } = req.query; // Default 1 hour const { expiry = '3600' } = req.query; // Default 1 hour
// Validate required params (these are guaranteed by route pattern) // Validate required params (these are guaranteed by route pattern)
if (!orgId || !projectId || !category || !filename) { if (!orgSlug || !projectSlug || !imageId) {
res.status(400).json({ res.status(400).json({
success: false, success: false,
message: 'Missing required parameters', message: 'Missing required parameters',
@ -128,22 +98,13 @@ imagesRouter.get(
return; return;
} }
if (!['uploads', 'generated', 'references'].includes(category)) {
res.status(400).json({
success: false,
message: 'Invalid category',
});
return;
}
const storageService = await StorageFactory.getInstance(); const storageService = await StorageFactory.getInstance();
try { try {
const presignedUrl = await storageService.getPresignedDownloadUrl( const presignedUrl = await storageService.getPresignedDownloadUrl(
orgId, orgSlug,
projectId, projectSlug,
category as 'uploads' | 'generated' | 'references', imageId,
filename,
parseInt(expiry as string, 10), parseInt(expiry as string, 10),
); );
@ -191,19 +152,19 @@ imagesRouter.get(
} }
// Extract org/project from validated API key // Extract org/project from validated API key
const orgId = req.apiKey?.organizationSlug || 'default'; const orgSlug = req.apiKey?.organizationSlug || 'default';
const projectId = req.apiKey?.projectSlug!; const projectSlug = req.apiKey?.projectSlug!;
console.log( console.log(
`[${timestamp}] [${requestId}] Listing generated images for org:${orgId}, project:${projectId}, limit:${limit}, offset:${offset}, prefix:${prefix || 'none'}`, `[${timestamp}] [${requestId}] Listing images for org:${orgSlug}, project:${projectSlug}, limit:${limit}, offset:${offset}, prefix:${prefix || 'none'}`,
); );
try { try {
// Get storage service instance // Get storage service instance
const storageService = await StorageFactory.getInstance(); const storageService = await StorageFactory.getInstance();
// List files in generated category // List files in img folder
const allFiles = await storageService.listFiles(orgId, projectId, 'generated', prefix); const allFiles = await storageService.listFiles(orgSlug, projectSlug, prefix);
// Sort by lastModified descending (newest first) // Sort by lastModified descending (newest first)
allFiles.sort((a, b) => { allFiles.sort((a, b) => {
@ -218,8 +179,8 @@ imagesRouter.get(
// Map to response format with public URLs // Map to response format with public URLs
const images = paginatedFiles.map((file) => ({ const images = paginatedFiles.map((file) => ({
filename: file.filename, imageId: file.filename,
url: storageService.getPublicUrl(orgId, projectId, 'generated', file.filename), url: storageService.getPublicUrl(orgSlug, projectSlug, file.filename),
size: file.size, size: file.size,
contentType: file.contentType, contentType: file.contentType,
lastModified: file.lastModified ? file.lastModified.toISOString() : new Date().toISOString(), lastModified: file.lastModified ? file.lastModified.toISOString() : new Date().toISOString(),
@ -228,7 +189,7 @@ imagesRouter.get(
const hasMore = offset + limit < total; const hasMore = offset + limit < total;
console.log( console.log(
`[${timestamp}] [${requestId}] Successfully listed ${images.length} of ${total} generated images`, `[${timestamp}] [${requestId}] Successfully listed ${images.length} of ${total} images`,
); );
return res.status(200).json({ return res.status(200).json({
@ -242,11 +203,11 @@ imagesRouter.get(
}, },
}); });
} catch (error) { } catch (error) {
console.error(`[${timestamp}] [${requestId}] Failed to list generated images:`, error); console.error(`[${timestamp}] [${requestId}] Failed to list images:`, error);
return res.status(500).json({ return res.status(500).json({
success: false, success: false,
message: 'Failed to list generated images', message: 'Failed to list images',
error: error instanceof Error ? error.message : 'Unknown error occurred', error: error instanceof Error ? error.message : 'Unknown error occurred',
}); });
} }

View File

@ -1,5 +1,6 @@
import { Response, Router } from 'express'; import { Response, Router } from 'express';
import type { Router as RouterType } from 'express'; import type { Router as RouterType } from 'express';
import { randomUUID } from 'crypto';
import { ImageGenService } from '../services/ImageGenService'; import { ImageGenService } from '../services/ImageGenService';
import { validateTextToImageRequest, logTextToImageRequest } from '../middleware/jsonValidation'; import { validateTextToImageRequest, logTextToImageRequest } from '../middleware/jsonValidation';
import { autoEnhancePrompt, logEnhancementResult } from '../middleware/promptEnhancement'; import { autoEnhancePrompt, logEnhancementResult } from '../middleware/promptEnhancement';
@ -48,14 +49,17 @@ textToImageRouter.post(
const timestamp = new Date().toISOString(); const timestamp = new Date().toISOString();
const requestId = req.requestId; const requestId = req.requestId;
const { prompt, filename, aspectRatio, meta } = req.body; const { prompt, aspectRatio, meta } = req.body;
// Extract org/project slugs from validated API key // Extract org/project slugs from validated API key
const orgId = req.apiKey?.organizationSlug || undefined; const orgSlug = req.apiKey?.organizationSlug || undefined;
const projectId = req.apiKey?.projectSlug!; // Guaranteed by requireProjectKey middleware const projectSlug = req.apiKey?.projectSlug!; // Guaranteed by requireProjectKey middleware
// Generate imageId (UUID) - this will be the filename in storage
const imageId = randomUUID();
console.log( console.log(
`[${timestamp}] [${requestId}] Starting text-to-image generation process for org:${orgId}, project:${projectId}`, `[${timestamp}] [${requestId}] Starting text-to-image generation process for org:${orgSlug}, project:${projectSlug}`,
); );
try { try {
@ -66,10 +70,10 @@ textToImageRouter.post(
const result = await imageGenService.generateImage({ const result = await imageGenService.generateImage({
prompt, prompt,
filename, imageId,
...(aspectRatio && { aspectRatio }), ...(aspectRatio && { aspectRatio }),
orgId, orgSlug,
projectId, projectSlug,
...(meta && { meta }), ...(meta && { meta }),
}); });
@ -77,7 +81,7 @@ textToImageRouter.post(
console.log(`[${timestamp}] [${requestId}] Text-to-image generation completed:`, { console.log(`[${timestamp}] [${requestId}] Text-to-image generation completed:`, {
success: result.success, success: result.success,
model: result.model, model: result.model,
filename: result.filename, imageId: result.imageId,
hasError: !!result.error, hasError: !!result.error,
}); });
@ -87,7 +91,7 @@ textToImageRouter.post(
success: true, success: true,
message: 'Image generated successfully', message: 'Image generated successfully',
data: { data: {
filename: result.filename!, filename: result.imageId!,
filepath: result.filepath!, filepath: result.filepath!,
...(result.url && { url: result.url }), ...(result.url && { url: result.url }),
...(result.description && { description: result.description }), ...(result.description && { description: result.description }),

View File

@ -1,5 +1,6 @@
import { Response, Router } from 'express'; import { Response, Router } from 'express';
import type { Router as RouterType } from 'express'; import type { Router as RouterType } from 'express';
import { randomUUID } from 'crypto';
import { StorageFactory } from '../services/StorageFactory'; import { StorageFactory } from '../services/StorageFactory';
import { asyncHandler } from '../middleware/errorHandler'; import { asyncHandler } from '../middleware/errorHandler';
import { validateApiKey } from '../middleware/auth/validateApiKey'; import { validateApiKey } from '../middleware/auth/validateApiKey';
@ -53,18 +54,22 @@ uploadRouter.post(
// Initialize storage service // Initialize storage service
const storageService = await StorageFactory.getInstance(); const storageService = await StorageFactory.getInstance();
// Upload file to MinIO in 'uploads' category // Generate imageId (UUID) - this will be the filename in storage
const imageId = randomUUID();
// Upload file to MinIO
// Path format: {orgSlug}/{projectSlug}/img/{imageId}
console.log( console.log(
`[${timestamp}] [${requestId}] Uploading file: ${file.originalname} (${file.size} bytes)`, `[${timestamp}] [${requestId}] Uploading file: ${file.originalname} as ${imageId} (${file.size} bytes)`,
); );
const uploadResult = await storageService.uploadFile( const uploadResult = await storageService.uploadFile(
orgSlug, orgSlug,
projectSlug, projectSlug,
'uploads', imageId,
file.originalname,
file.buffer, file.buffer,
file.mimetype, file.mimetype,
file.originalname,
); );
if (!uploadResult.success) { if (!uploadResult.success) {

View File

@ -88,23 +88,21 @@ liveRouter.get(
const storageService = await StorageFactory.getInstance(); const storageService = await StorageFactory.getInstance();
// Parse storage key to get components // Parse storage key to get components
// Format: orgId/projectId/category/filename.ext // Format: {orgSlug}/{projectSlug}/img/{imageId}
const keyParts = image.storageKey.split('/'); const keyParts = image.storageKey.split('/');
if (keyParts.length < 4) { if (keyParts.length < 4 || keyParts[2] !== 'img') {
throw new Error('Invalid storage key format'); throw new Error('Invalid storage key format');
} }
const orgId = keyParts[0]; const storedOrgSlug = keyParts[0]!;
const projectIdSlug = keyParts[1]; const storedProjectSlug = keyParts[1]!;
const category = keyParts[2] as 'uploads' | 'generated' | 'references'; const imageId = keyParts[3]!;
const filename = keyParts.slice(3).join('/');
// Download image from storage // Download image from storage
const buffer = await storageService.downloadFile( const buffer = await storageService.downloadFile(
orgId!, storedOrgSlug,
projectIdSlug!, storedProjectSlug,
category, imageId
filename!
); );
// Set cache headers // Set cache headers
@ -157,22 +155,20 @@ liveRouter.get(
// Download newly generated image // Download newly generated image
const storageService = await StorageFactory.getInstance(); const storageService = await StorageFactory.getInstance();
// Format: orgId/projectId/category/filename.ext // Format: {orgSlug}/{projectSlug}/img/{imageId}
const keyParts = generation.outputImage.storageKey.split('/'); const keyParts = generation.outputImage.storageKey.split('/');
if (keyParts.length < 4) { if (keyParts.length < 4 || keyParts[2] !== 'img') {
throw new Error('Invalid storage key format'); throw new Error('Invalid storage key format');
} }
const orgId = keyParts[0]; const storedOrgSlug = keyParts[0]!;
const projectIdSlug = keyParts[1]; const storedProjectSlug = keyParts[1]!;
const category = keyParts[2] as 'uploads' | 'generated' | 'references'; const imageId = keyParts[3]!;
const filename = keyParts.slice(3).join('/');
const buffer = await storageService.downloadFile( const buffer = await storageService.downloadFile(
orgId!, storedOrgSlug,
projectIdSlug!, storedProjectSlug,
category, imageId
filename!
); );
// Set cache headers // Set cache headers

View File

@ -29,11 +29,11 @@ export class ImageGenService {
* This method separates image generation from storage for clear error handling * This method separates image generation from storage for clear error handling
*/ */
async generateImage(options: ImageGenerationOptions): Promise<ImageGenerationResult> { async generateImage(options: ImageGenerationOptions): Promise<ImageGenerationResult> {
const { prompt, filename, referenceImages, aspectRatio, orgId, projectId, meta } = options; const { prompt, imageId, referenceImages, aspectRatio, orgSlug, projectSlug, meta } = options;
// Use default values if not provided // Use default values if not provided
const finalOrgId = orgId || process.env['DEFAULT_ORG_ID'] || 'default'; const finalOrgSlug = orgSlug || process.env['DEFAULT_ORG_SLUG'] || 'default';
const finalProjectId = projectId || process.env['DEFAULT_PROJECT_ID'] || 'main'; const finalProjectSlug = projectSlug || process.env['DEFAULT_PROJECT_SLUG'] || 'main';
const finalAspectRatio = aspectRatio || '1:1'; // Default to square const finalAspectRatio = aspectRatio || '1:1'; // Default to square
// Step 1: Generate image from Gemini AI // Step 1: Generate image from Gemini AI
@ -44,8 +44,8 @@ export class ImageGenService {
prompt, prompt,
referenceImages, referenceImages,
finalAspectRatio, finalAspectRatio,
finalOrgId, finalOrgSlug,
finalProjectId, finalProjectSlug,
meta, meta,
); );
generatedData = aiResult.generatedData; generatedData = aiResult.generatedData;
@ -61,22 +61,25 @@ export class ImageGenService {
} }
// Step 2: Save generated image to storage // Step 2: Save generated image to storage
// Path format: {orgSlug}/{projectSlug}/img/{imageId}
try { try {
const finalFilename = `${filename}.${generatedData.fileExtension}`;
const storageService = await StorageFactory.getInstance(); const storageService = await StorageFactory.getInstance();
// Original filename for metadata (e.g., "my-image.png")
const originalFilename = `generated-image.${generatedData.fileExtension}`;
const uploadResult = await storageService.uploadFile( const uploadResult = await storageService.uploadFile(
finalOrgId, finalOrgSlug,
finalProjectId, finalProjectSlug,
'generated', imageId,
finalFilename,
generatedData.buffer, generatedData.buffer,
generatedData.mimeType, generatedData.mimeType,
originalFilename,
); );
if (uploadResult.success) { if (uploadResult.success) {
return { return {
success: true, success: true,
filename: uploadResult.filename, imageId: uploadResult.filename,
filepath: uploadResult.path, filepath: uploadResult.path,
url: uploadResult.url, url: uploadResult.url,
size: uploadResult.size, size: uploadResult.size,
@ -125,8 +128,8 @@ export class ImageGenService {
prompt: string, prompt: string,
referenceImages: ReferenceImage[] | undefined, referenceImages: ReferenceImage[] | undefined,
aspectRatio: string, aspectRatio: string,
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
meta?: { tags?: string[] }, meta?: { tags?: string[] },
): Promise<{ ): Promise<{
generatedData: GeneratedImageData; generatedData: GeneratedImageData;
@ -182,8 +185,8 @@ export class ImageGenService {
const ttiLogger = TTILogger.getInstance(); const ttiLogger = TTILogger.getInstance();
const logEntry: TTILogEntry = { const logEntry: TTILogEntry = {
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
orgId, orgId: orgSlug,
projectId, projectId: projectSlug,
prompt, prompt,
model: this.primaryModel, model: this.primaryModel,
config, config,

View File

@ -4,7 +4,7 @@ import { StorageService, FileMetadata, UploadResult } from './StorageService';
export class MinioStorageService implements StorageService { export class MinioStorageService implements StorageService {
private client: MinioClient; private client: MinioClient;
private bucketName: string; private bucketName: string;
private publicUrl: string; private cdnBaseUrl: string;
constructor( constructor(
endpoint: string, endpoint: string,
@ -12,7 +12,7 @@ export class MinioStorageService implements StorageService {
secretKey: string, secretKey: string,
useSSL: boolean = false, useSSL: boolean = false,
bucketName: string = 'banatie', bucketName: string = 'banatie',
publicUrl?: string, cdnBaseUrl?: string,
) { ) {
// Parse endpoint to separate hostname and port // Parse endpoint to separate hostname and port
const cleanEndpoint = endpoint.replace(/^https?:\/\//, ''); const cleanEndpoint = endpoint.replace(/^https?:\/\//, '');
@ -31,119 +31,59 @@ export class MinioStorageService implements StorageService {
secretKey, secretKey,
}); });
this.bucketName = bucketName; this.bucketName = bucketName;
this.publicUrl = publicUrl || `${useSSL ? 'https' : 'http'}://${endpoint}`; // CDN base URL without bucket name (e.g., https://cdn.banatie.app)
this.cdnBaseUrl = cdnBaseUrl || process.env['CDN_BASE_URL'] || `${useSSL ? 'https' : 'http'}://${endpoint}/${bucketName}`;
} }
private getFilePath( /**
orgId: string, * Get file path in storage
projectId: string, * Format: {orgSlug}/{projectSlug}/img/{imageId}
category: 'uploads' | 'generated' | 'references', */
filename: string, private getFilePath(orgSlug: string, projectSlug: string, imageId: string): string {
): string { return `${orgSlug}/${projectSlug}/img/${imageId}`;
// Simplified path without date folder for now
return `${orgId}/${projectId}/${category}/${filename}`;
} }
private generateUniqueFilename(originalFilename: string): string { /**
// Sanitize filename first * Extract file extension from original filename
const sanitized = this.sanitizeFilename(originalFilename); */
private extractExtension(filename: string): string | undefined {
const timestamp = Date.now(); if (!filename) return undefined;
const random = Math.random().toString(36).substring(2, 8); const lastDotIndex = filename.lastIndexOf('.');
const ext = sanitized.includes('.') ? sanitized.substring(sanitized.lastIndexOf('.')) : ''; if (lastDotIndex <= 0) return undefined;
const name = sanitized.includes('.') return filename.substring(lastDotIndex + 1).toLowerCase();
? sanitized.substring(0, sanitized.lastIndexOf('.'))
: sanitized;
return `${name}-${timestamp}-${random}${ext}`;
} }
private sanitizeFilename(filename: string): string { /**
// Remove path traversal attempts FIRST from entire filename * Validate storage path components
let cleaned = filename.replace(/\.\./g, '').trim(); */
private validatePath(orgSlug: string, projectSlug: string, imageId: string): void {
// Split filename and extension // Validate orgSlug
const lastDotIndex = cleaned.lastIndexOf('.'); if (!orgSlug || !/^[a-zA-Z0-9_-]+$/.test(orgSlug) || orgSlug.length > 50) {
let baseName = lastDotIndex > 0 ? cleaned.substring(0, lastDotIndex) : cleaned;
const extension = lastDotIndex > 0 ? cleaned.substring(lastDotIndex) : '';
// Remove dangerous characters from base name
baseName = baseName
.replace(/[<>:"/\\|?*\x00-\x1f]/g, '') // Remove dangerous chars
.trim();
// Replace non-ASCII characters with ASCII equivalents or remove them
// This prevents S3 signature mismatches with MinIO
baseName = baseName
.normalize('NFD') // Decompose combined characters (é -> e + ´)
.replace(/[\u0300-\u036f]/g, '') // Remove diacritical marks
.replace(/[^\x20-\x7E]/g, '_') // Replace any remaining non-ASCII with underscore
.replace(/[^\w\s\-_.]/g, '_') // Replace special chars (except word chars, space, dash, underscore, dot) with underscore
.replace(/\s+/g, '_') // Replace spaces with underscores
.replace(/_{2,}/g, '_') // Collapse multiple underscores
.replace(/^_+|_+$/g, ''); // Remove leading/trailing underscores
// Ensure we still have a valid base name
if (baseName.length === 0) {
baseName = 'file';
}
// Sanitize extension (remove only dangerous chars, keep the dot)
let sanitizedExt = extension
.replace(/[<>:"/\\|?*\x00-\x1f]/g, '')
.replace(/[^\x20-\x7E]/g, '')
.toLowerCase();
// Ensure extension starts with a dot and is reasonable
if (sanitizedExt && !sanitizedExt.startsWith('.')) {
sanitizedExt = '.' + sanitizedExt;
}
if (sanitizedExt.length > 10) {
sanitizedExt = sanitizedExt.substring(0, 10);
}
const result = baseName + sanitizedExt;
return result.substring(0, 255); // Limit total length
}
private validateFilePath(
orgId: string,
projectId: string,
category: string,
filename: string,
): void {
// Validate orgId
if (!orgId || !/^[a-zA-Z0-9_-]+$/.test(orgId) || orgId.length > 50) {
throw new Error( throw new Error(
'Invalid organization ID: must be alphanumeric with dashes/underscores, max 50 chars', 'Invalid organization slug: must be alphanumeric with dashes/underscores, max 50 chars',
); );
} }
// Validate projectId // Validate projectSlug
if (!projectId || !/^[a-zA-Z0-9_-]+$/.test(projectId) || projectId.length > 50) { if (!projectSlug || !/^[a-zA-Z0-9_-]+$/.test(projectSlug) || projectSlug.length > 50) {
throw new Error( throw new Error(
'Invalid project ID: must be alphanumeric with dashes/underscores, max 50 chars', 'Invalid project slug: must be alphanumeric with dashes/underscores, max 50 chars',
); );
} }
// Validate category // Validate imageId (UUID format)
if (!['uploads', 'generated', 'references'].includes(category)) { if (!imageId || imageId.length === 0 || imageId.length > 50) {
throw new Error('Invalid category: must be uploads, generated, or references'); throw new Error('Invalid imageId: must be 1-50 characters');
}
// Validate filename
if (!filename || filename.length === 0 || filename.length > 255) {
throw new Error('Invalid filename: must be 1-255 characters');
} }
// Check for path traversal and dangerous patterns // Check for path traversal and dangerous patterns
if (filename.includes('..') || filename.includes('/') || filename.includes('\\')) { if (imageId.includes('..') || imageId.includes('/') || imageId.includes('\\')) {
throw new Error('Invalid characters in filename: path traversal not allowed'); throw new Error('Invalid characters in imageId: path traversal not allowed');
} }
// Prevent null bytes and control characters // Prevent null bytes and control characters
if (/[\x00-\x1f]/.test(filename)) { if (/[\x00-\x1f]/.test(imageId)) {
throw new Error('Invalid filename: control characters not allowed'); throw new Error('Invalid imageId: control characters not allowed');
} }
} }
@ -154,8 +94,8 @@ export class MinioStorageService implements StorageService {
console.log(`Created bucket: ${this.bucketName}`); console.log(`Created bucket: ${this.bucketName}`);
} }
// Note: With SNMD and presigned URLs, we don't need bucket policies // Bucket should be public for CDN access (configured via mc anonymous set download)
console.log(`Bucket ${this.bucketName} ready for presigned URL access`); console.log(`Bucket ${this.bucketName} ready for CDN access`);
} }
async bucketExists(): Promise<boolean> { async bucketExists(): Promise<boolean> {
@ -163,15 +103,15 @@ export class MinioStorageService implements StorageService {
} }
async uploadFile( async uploadFile(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
buffer: Buffer, buffer: Buffer,
contentType: string, contentType: string,
originalFilename?: string,
): Promise<UploadResult> { ): Promise<UploadResult> {
// Validate inputs first // Validate inputs first
this.validateFilePath(orgId, projectId, category, filename); this.validatePath(orgSlug, projectSlug, imageId);
if (!buffer || buffer.length === 0) { if (!buffer || buffer.length === 0) {
throw new Error('Buffer cannot be empty'); throw new Error('Buffer cannot be empty');
@ -184,24 +124,34 @@ export class MinioStorageService implements StorageService {
// Ensure bucket exists // Ensure bucket exists
await this.createBucket(); await this.createBucket();
// Generate unique filename to avoid conflicts // Get file path: {orgSlug}/{projectSlug}/img/{imageId}
const uniqueFilename = this.generateUniqueFilename(filename); const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
const filePath = this.getFilePath(orgId, projectId, category, uniqueFilename);
// Extract file extension from original filename
const fileExtension = originalFilename ? this.extractExtension(originalFilename) : undefined;
// Encode original filename to Base64 to safely store non-ASCII characters in metadata // Encode original filename to Base64 to safely store non-ASCII characters in metadata
const originalNameEncoded = Buffer.from(filename, 'utf-8').toString('base64'); const originalNameEncoded = originalFilename
? Buffer.from(originalFilename, 'utf-8').toString('base64')
: undefined;
const metadata = { const metadata: Record<string, string> = {
'Content-Type': contentType, 'Content-Type': contentType,
'X-Amz-Meta-Original-Name': originalNameEncoded, 'X-Amz-Meta-Project': projectSlug,
'X-Amz-Meta-Original-Name-Encoding': 'base64', 'X-Amz-Meta-Organization': orgSlug,
'X-Amz-Meta-Category': category,
'X-Amz-Meta-Project': projectId,
'X-Amz-Meta-Organization': orgId,
'X-Amz-Meta-Upload-Time': new Date().toISOString(), 'X-Amz-Meta-Upload-Time': new Date().toISOString(),
}; };
console.log(`Uploading file to: ${this.bucketName}/${filePath}`); if (originalNameEncoded) {
metadata['X-Amz-Meta-Original-Name'] = originalNameEncoded;
metadata['X-Amz-Meta-Original-Name-Encoding'] = 'base64';
}
if (fileExtension) {
metadata['X-Amz-Meta-File-Extension'] = fileExtension;
}
console.log(`[MinIO] Uploading file to: ${this.bucketName}/${filePath}`);
await this.client.putObject( await this.client.putObject(
this.bucketName, this.bucketName,
@ -211,28 +161,29 @@ export class MinioStorageService implements StorageService {
metadata, metadata,
); );
const url = this.getPublicUrl(orgId, projectId, category, uniqueFilename); const url = this.getPublicUrl(orgSlug, projectSlug, imageId);
console.log(`Generated API URL: ${url}`); console.log(`[MinIO] CDN URL: ${url}`);
return { return {
success: true, success: true,
filename: uniqueFilename, filename: imageId,
path: filePath, path: filePath,
url, url,
size: buffer.length, size: buffer.length,
contentType, contentType,
...(originalFilename && { originalFilename }),
...(fileExtension && { fileExtension }),
}; };
} }
async downloadFile( async downloadFile(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
): Promise<Buffer> { ): Promise<Buffer> {
this.validateFilePath(orgId, projectId, category, filename); this.validatePath(orgSlug, projectSlug, imageId);
const filePath = this.getFilePath(orgId, projectId, category, filename); const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
const stream = await this.client.getObject(this.bucketName, filePath); const stream = await this.client.getObject(this.bucketName, filePath);
@ -245,203 +196,91 @@ export class MinioStorageService implements StorageService {
} }
async streamFile( async streamFile(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
): Promise<import('stream').Readable> { ): Promise<import('stream').Readable> {
this.validateFilePath(orgId, projectId, category, filename); this.validatePath(orgSlug, projectSlug, imageId);
const filePath = this.getFilePath(orgId, projectId, category, filename); const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
// Return the stream directly without buffering - memory efficient! // Return the stream directly without buffering - memory efficient!
return await this.client.getObject(this.bucketName, filePath); return await this.client.getObject(this.bucketName, filePath);
} }
async deleteFile( async deleteFile(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
): Promise<void> { ): Promise<void> {
this.validateFilePath(orgId, projectId, category, filename); this.validatePath(orgSlug, projectSlug, imageId);
const filePath = this.getFilePath(orgId, projectId, category, filename); const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
await this.client.removeObject(this.bucketName, filePath); await this.client.removeObject(this.bucketName, filePath);
} }
/** /**
* Get public URL for file access * Get public CDN URL for file access
* Returns CDN URL if MINIO_PUBLIC_URL is configured (production), * Returns: https://cdn.banatie.app/{orgSlug}/{projectSlug}/img/{imageId}
* otherwise falls back to API endpoint URL (development)
*
* @returns {string} URL for accessing the file
*/ */
getPublicUrl( getPublicUrl(orgSlug: string, projectSlug: string, imageId: string): string {
orgId: string, this.validatePath(orgSlug, projectSlug, imageId);
projectId: string, const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
category: 'uploads' | 'generated' | 'references', return `${this.cdnBaseUrl}/${filePath}`;
filename: string,
): string {
this.validateFilePath(orgId, projectId, category, filename);
// If MINIO_PUBLIC_URL is configured, use direct CDN access
// This provides better performance and reduces API server load
if (this.publicUrl && process.env['USE_DIRECT_CDN'] !== 'false') {
const filePath = this.getFilePath(orgId, projectId, category, filename);
const cdnUrl = `${this.publicUrl}/${this.bucketName}/${filePath}`;
console.log(`[MinIO] Using CDN URL: ${cdnUrl}`);
return cdnUrl;
}
// Fallback to API URL for local development or when CDN is disabled
const apiBaseUrl = process.env['API_BASE_URL'] || 'http://localhost:3000';
const apiUrl = `${apiBaseUrl}/api/images/${orgId}/${projectId}/${category}/${filename}`;
console.log(`[MinIO] Using API URL: ${apiUrl}`);
return apiUrl;
} }
async getPresignedUploadUrl( async getPresignedUploadUrl(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
expirySeconds: number, expirySeconds: number,
contentType: string, contentType: string,
): Promise<string> { ): Promise<string> {
this.validateFilePath(orgId, projectId, category, filename); this.validatePath(orgSlug, projectSlug, imageId);
if (!contentType || contentType.trim().length === 0) { if (!contentType || contentType.trim().length === 0) {
throw new Error('Content type is required for presigned upload URL'); throw new Error('Content type is required for presigned upload URL');
} }
const filePath = this.getFilePath(orgId, projectId, category, filename); const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
return await this.client.presignedPutObject(this.bucketName, filePath, expirySeconds); return await this.client.presignedPutObject(this.bucketName, filePath, expirySeconds);
} }
async getPresignedDownloadUrl( async getPresignedDownloadUrl(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
expirySeconds: number = 86400, // 24 hours default expirySeconds: number = 86400, // 24 hours default
): Promise<string> { ): Promise<string> {
this.validateFilePath(orgId, projectId, category, filename); this.validatePath(orgSlug, projectSlug, imageId);
const filePath = this.getFilePath(orgId, projectId, category, filename); const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
const presignedUrl = await this.client.presignedGetObject( const presignedUrl = await this.client.presignedGetObject(
this.bucketName, this.bucketName,
filePath, filePath,
expirySeconds, expirySeconds,
); );
// Replace internal Docker hostname with public URL if configured // Replace internal Docker hostname with CDN URL if configured
if (this.publicUrl) { if (this.cdnBaseUrl) {
// Access protected properties via type assertion for URL replacement // Access protected properties via type assertion for URL replacement
const client = this.client as unknown as { host: string; port: number; protocol: string }; const client = this.client as unknown as { host: string; port: number; protocol: string };
const clientEndpoint = client.host + (client.port ? `:${client.port}` : ''); const clientEndpoint = client.host + (client.port ? `:${client.port}` : '');
return presignedUrl.replace(`${client.protocol}//${clientEndpoint}`, this.publicUrl); return presignedUrl.replace(`${client.protocol}//${clientEndpoint}/${this.bucketName}`, this.cdnBaseUrl);
} }
return presignedUrl; return presignedUrl;
} }
async listProjectFiles( /**
orgId: string, * List files in a project's img folder
projectId: string, */
category?: 'uploads' | 'generated' | 'references',
): Promise<FileMetadata[]> {
const prefix = category ? `${orgId}/${projectId}/${category}/` : `${orgId}/${projectId}/`;
const files: FileMetadata[] = [];
return new Promise((resolve, reject) => {
const stream = this.client.listObjects(this.bucketName, prefix, true);
stream.on('data', async (obj) => {
try {
if (!obj.name) return;
const metadata = await this.client.statObject(this.bucketName, obj.name);
const pathParts = obj.name.split('/');
const filename = pathParts[pathParts.length - 1];
const categoryFromPath = pathParts[2] as 'uploads' | 'generated' | 'references';
if (!filename || !categoryFromPath) {
return;
}
files.push({
filename,
contentType: metadata.metaData?.['content-type'] || 'application/octet-stream',
size: obj.size || 0,
lastModified: obj.lastModified || new Date(),
path: obj.name,
});
} catch (error) {}
});
stream.on('end', () => resolve(files));
stream.on('error', reject);
});
}
parseKey(key: string): {
orgId: string;
projectId: string;
category: 'uploads' | 'generated' | 'references';
filename: string;
} | null {
try {
const match = key.match(
/^banatie\/([^/]+)\/([^/]+)\/(uploads|generated|references)\/[^/]+\/(.+)$/,
);
if (!match) {
return null;
}
const [, orgId, projectId, category, filename] = match;
if (!orgId || !projectId || !category || !filename) {
return null;
}
return {
orgId,
projectId,
category: category as 'uploads' | 'generated' | 'references',
filename,
};
} catch {
return null;
}
}
async fileExists(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
): Promise<boolean> {
try {
this.validateFilePath(orgId, projectId, category, filename);
const filePath = this.getFilePath(orgId, projectId, category, filename);
await this.client.statObject(this.bucketName, filePath);
return true;
} catch (error) {
return false;
}
}
async listFiles( async listFiles(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references',
prefix?: string, prefix?: string,
): Promise<FileMetadata[]> { ): Promise<FileMetadata[]> {
this.validateFilePath(orgId, projectId, category, 'dummy.txt'); this.validatePath(orgSlug, projectSlug, 'dummy');
const basePath = `${orgId}/${projectId}/${category}/`; const basePath = `${orgSlug}/${projectSlug}/img/`;
const searchPrefix = prefix ? `${basePath}${prefix}` : basePath; const searchPrefix = prefix ? `${basePath}${prefix}` : basePath;
const files: FileMetadata[] = []; const files: FileMetadata[] = [];
@ -449,31 +288,22 @@ export class MinioStorageService implements StorageService {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const stream = this.client.listObjects(this.bucketName, searchPrefix, true); const stream = this.client.listObjects(this.bucketName, searchPrefix, true);
stream.on('data', (obj) => { stream.on('data', async (obj) => {
if (!obj.name || !obj.size) return; if (!obj.name || !obj.size) return;
try { try {
const pathParts = obj.name.split('/'); const pathParts = obj.name.split('/');
const filename = pathParts[pathParts.length - 1]; const imageId = pathParts[pathParts.length - 1];
if (!filename) return; if (!imageId) return;
// Infer content type from file extension (more efficient than statObject) // Get metadata to find content type (no extension in filename)
const ext = filename.toLowerCase().split('.').pop(); const metadata = await this.client.statObject(this.bucketName, obj.name);
const contentType =
{
png: 'image/png',
jpg: 'image/jpeg',
jpeg: 'image/jpeg',
gif: 'image/gif',
webp: 'image/webp',
svg: 'image/svg+xml',
}[ext || ''] || 'application/octet-stream';
files.push({ files.push({
filename, filename: imageId!,
size: obj.size, size: obj.size,
contentType, contentType: metadata.metaData?.['content-type'] || 'application/octet-stream',
lastModified: obj.lastModified || new Date(), lastModified: obj.lastModified || new Date(),
etag: obj.etag || '', etag: obj.etag || '',
path: obj.name, path: obj.name,
@ -493,4 +323,52 @@ export class MinioStorageService implements StorageService {
}); });
}); });
} }
/**
* Parse storage key to extract components
* Format: {orgSlug}/{projectSlug}/img/{imageId}
*/
parseKey(key: string): {
orgSlug: string;
projectSlug: string;
imageId: string;
} | null {
try {
// Match: orgSlug/projectSlug/img/imageId
const match = key.match(/^([^/]+)\/([^/]+)\/img\/([^/]+)$/);
if (!match) {
return null;
}
const [, orgSlug, projectSlug, imageId] = match;
if (!orgSlug || !projectSlug || !imageId) {
return null;
}
return {
orgSlug,
projectSlug,
imageId,
};
} catch {
return null;
}
}
async fileExists(
orgSlug: string,
projectSlug: string,
imageId: string,
): Promise<boolean> {
try {
this.validatePath(orgSlug, projectSlug, imageId);
const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
await this.client.statObject(this.bucketName, filePath);
return true;
} catch (error) {
return false;
}
}
} }

View File

@ -11,11 +11,13 @@ export interface FileMetadata {
export interface UploadResult { export interface UploadResult {
success: boolean; success: boolean;
filename: string; filename: string; // UUID (same as image.id)
path: string; path: string;
url: string; // API URL for accessing the file url: string; // CDN URL for accessing the file
size: number; size: number;
contentType: string; contentType: string;
originalFilename?: string; // User's original filename
fileExtension?: string; // Original extension (png, jpg, etc.)
error?: string; error?: string;
} }
@ -32,137 +34,125 @@ export interface StorageService {
/** /**
* Upload a file to storage * Upload a file to storage
* @param orgId Organization ID * Path format: {orgSlug}/{projectSlug}/img/{imageId}
* @param projectId Project ID *
* @param category File category (uploads, generated, references) * @param orgSlug Organization slug
* @param filename Original filename * @param projectSlug Project slug
* @param imageId UUID for the file (same as image.id in DB)
* @param buffer File buffer * @param buffer File buffer
* @param contentType MIME type * @param contentType MIME type
* @param originalFilename Original filename from user (for metadata)
*/ */
uploadFile( uploadFile(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
buffer: Buffer, buffer: Buffer,
contentType: string, contentType: string,
originalFilename?: string,
): Promise<UploadResult>; ): Promise<UploadResult>;
/** /**
* Download a file from storage * Download a file from storage
* @param orgId Organization ID * @param orgSlug Organization slug
* @param projectId Project ID * @param projectSlug Project slug
* @param category File category * @param imageId UUID filename
* @param filename Filename to download
*/ */
downloadFile( downloadFile(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
): Promise<Buffer>; ): Promise<Buffer>;
/** /**
* Stream a file from storage (memory efficient) * Stream a file from storage (memory efficient)
* @param orgId Organization ID * @param orgSlug Organization slug
* @param projectId Project ID * @param projectSlug Project slug
* @param category File category * @param imageId UUID filename
* @param filename Filename to stream
*/ */
streamFile( streamFile(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
): Promise<Readable>; ): Promise<Readable>;
/** /**
* Generate a presigned URL for downloading a file * Generate a presigned URL for downloading a file
* @param orgId Organization ID * @param orgSlug Organization slug
* @param projectId Project ID * @param projectSlug Project slug
* @param category File category * @param imageId UUID filename
* @param filename Filename
* @param expirySeconds URL expiry time in seconds * @param expirySeconds URL expiry time in seconds
*/ */
getPresignedDownloadUrl( getPresignedDownloadUrl(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
expirySeconds: number, expirySeconds: number,
): Promise<string>; ): Promise<string>;
/** /**
* Generate a presigned URL for uploading a file * Generate a presigned URL for uploading a file
* @param orgId Organization ID * @param orgSlug Organization slug
* @param projectId Project ID * @param projectSlug Project slug
* @param category File category * @param imageId UUID filename
* @param filename Filename
* @param expirySeconds URL expiry time in seconds * @param expirySeconds URL expiry time in seconds
* @param contentType MIME type * @param contentType MIME type
*/ */
getPresignedUploadUrl( getPresignedUploadUrl(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
expirySeconds: number, expirySeconds: number,
contentType: string, contentType: string,
): Promise<string>; ): Promise<string>;
/** /**
* List files in a specific path * List files in a project's img folder
* @param orgId Organization ID * @param orgSlug Organization slug
* @param projectId Project ID * @param projectSlug Project slug
* @param category File category
* @param prefix Optional prefix to filter files * @param prefix Optional prefix to filter files
*/ */
listFiles( listFiles(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references',
prefix?: string, prefix?: string,
): Promise<FileMetadata[]>; ): Promise<FileMetadata[]>;
/** /**
* Delete a file from storage * Delete a file from storage
* @param orgId Organization ID * @param orgSlug Organization slug
* @param projectId Project ID * @param projectSlug Project slug
* @param category File category * @param imageId UUID filename to delete
* @param filename Filename to delete
*/ */
deleteFile( deleteFile(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
): Promise<void>; ): Promise<void>;
/** /**
* Check if a file exists * Check if a file exists
* @param orgId Organization ID * @param orgSlug Organization slug
* @param projectId Project ID * @param projectSlug Project slug
* @param category File category * @param imageId UUID filename to check
* @param filename Filename to check
*/ */
fileExists( fileExists(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
): Promise<boolean>; ): Promise<boolean>;
/** /**
* Get the public URL for a file * Get the public CDN URL for a file
* @param orgId Organization ID * Returns: https://cdn.banatie.app/{orgSlug}/{projectSlug}/img/{imageId}
* @param projectId Project ID *
* @param category File category * @param orgSlug Organization slug
* @param filename Filename * @param projectSlug Project slug
* @param imageId UUID filename
*/ */
getPublicUrl( getPublicUrl(
orgId: string, orgSlug: string,
projectId: string, projectSlug: string,
category: 'uploads' | 'generated' | 'references', imageId: string,
filename: string,
): string; ): string;
} }

View File

@ -148,13 +148,16 @@ export class GenerationService {
.where(eq(generations.id, generation.id)); .where(eq(generations.id, generation.id));
} }
// Generate imageId (UUID) upfront - this will be the filename in storage
const imageId = randomUUID();
const genResult = await this.imageGenService.generateImage({ const genResult = await this.imageGenService.generateImage({
prompt: usedPrompt, // Use the prompt that was stored (enhanced or original) prompt: usedPrompt, // Use the prompt that was stored (enhanced or original)
filename: `gen_${generation.id}`, imageId, // UUID used as filename: {orgSlug}/{projectSlug}/img/{imageId}
referenceImages: referenceImageBuffers, referenceImages: referenceImageBuffers,
aspectRatio: params.aspectRatio || GENERATION_LIMITS.DEFAULT_ASPECT_RATIO, aspectRatio: params.aspectRatio || GENERATION_LIMITS.DEFAULT_ASPECT_RATIO,
orgId: params.organizationSlug, // Use slug for storage path orgSlug: params.organizationSlug,
projectId: params.projectSlug, // Use slug for storage path projectSlug: params.projectSlug,
meta: params.meta || {}, meta: params.meta || {},
}); });
@ -172,13 +175,14 @@ export class GenerationService {
const fileHash = null; const fileHash = null;
const imageRecord = await this.imageService.create({ const imageRecord = await this.imageService.create({
id: imageId, // Use the same UUID for image record
projectId: params.projectId, projectId: params.projectId,
flowId: finalFlowId, flowId: finalFlowId,
generationId: generation.id, generationId: generation.id,
apiKeyId: params.apiKeyId, apiKeyId: params.apiKeyId,
storageKey, storageKey,
storageUrl: genResult.url!, storageUrl: genResult.url!,
mimeType: 'image/jpeg', mimeType: genResult.generatedImageData?.mimeType || 'image/png',
fileSize: genResult.size || 0, fileSize: genResult.size || 0,
fileHash, fileHash,
source: 'generated', source: 'generated',
@ -186,6 +190,8 @@ export class GenerationService {
meta: params.meta || {}, meta: params.meta || {},
width: genResult.generatedImageData?.width ?? null, width: genResult.generatedImageData?.width ?? null,
height: genResult.generatedImageData?.height ?? null, height: genResult.generatedImageData?.height ?? null,
originalFilename: `generated-image.${genResult.generatedImageData?.fileExtension || 'png'}`,
fileExtension: genResult.generatedImageData?.fileExtension || 'png',
}); });
// Reassign project alias if provided (override behavior per Section 5.2) // Reassign project alias if provided (override behavior per Section 5.2)
@ -272,27 +278,22 @@ export class GenerationService {
throw new Error(`${ERROR_MESSAGES.ALIAS_NOT_FOUND}: ${alias}`); throw new Error(`${ERROR_MESSAGES.ALIAS_NOT_FOUND}: ${alias}`);
} }
// Parse storage key: {orgSlug}/{projectSlug}/img/{imageId}
const parts = resolution.image.storageKey.split('/'); const parts = resolution.image.storageKey.split('/');
if (parts.length < 4) { if (parts.length < 4 || parts[2] !== 'img') {
throw new Error(`Invalid storage key format: ${resolution.image.storageKey}`); throw new Error(`Invalid storage key format: ${resolution.image.storageKey}`);
} }
const orgId = parts[0]!; const orgSlug = parts[0]!;
const projId = parts[1]!; const projectSlug = parts[1]!;
const category = parts[2]! as 'uploads' | 'generated' | 'references'; const imageId = parts[3]!;
const filename = parts.slice(3).join('/');
const buffer = await storageService.downloadFile( const buffer = await storageService.downloadFile(orgSlug, projectSlug, imageId);
orgId,
projId,
category,
filename
);
buffers.push({ buffers.push({
buffer, buffer,
mimetype: resolution.image.mimeType, mimetype: resolution.image.mimeType,
originalname: filename, originalname: resolution.image.originalFilename || imageId,
}); });
metadata.push({ metadata.push({
@ -517,14 +518,18 @@ export class GenerationService {
// Get slugs for storage paths // Get slugs for storage paths
const { orgSlug, projectSlug } = await this.getSlugs(generation.projectId); const { orgSlug, projectSlug } = await this.getSlugs(generation.projectId);
// Use the existing output image ID as the imageId for storage
// This ensures the file is overwritten at the same path
const imageId = generation.outputImageId;
// Use EXACT same parameters as original (no overrides) // Use EXACT same parameters as original (no overrides)
const genResult = await this.imageGenService.generateImage({ const genResult = await this.imageGenService.generateImage({
prompt: generation.prompt, prompt: generation.prompt,
filename: `gen_${id}`, imageId, // Same UUID to overwrite existing file
referenceImages: [], // TODO: Re-resolve referenced images if needed referenceImages: [], // TODO: Re-resolve referenced images if needed
aspectRatio: generation.aspectRatio || GENERATION_LIMITS.DEFAULT_ASPECT_RATIO, aspectRatio: generation.aspectRatio || GENERATION_LIMITS.DEFAULT_ASPECT_RATIO,
orgId: orgSlug, orgSlug,
projectId: projectSlug, projectSlug,
meta: generation.meta as Record<string, unknown> || {}, meta: generation.meta as Record<string, unknown> || {},
}); });
@ -634,14 +639,17 @@ export class GenerationService {
// Get slugs for storage paths // Get slugs for storage paths
const { orgSlug, projectSlug } = await this.getSlugs(generation.projectId); const { orgSlug, projectSlug } = await this.getSlugs(generation.projectId);
// Use the existing output image ID as the imageId for storage
const imageId = generation.outputImageId!;
// Regenerate image // Regenerate image
const genResult = await this.imageGenService.generateImage({ const genResult = await this.imageGenService.generateImage({
prompt: promptToUse, prompt: promptToUse,
filename: `gen_${id}`, imageId, // Same UUID to overwrite existing file
referenceImages: [], referenceImages: [],
aspectRatio: aspectRatioToUse, aspectRatio: aspectRatioToUse,
orgId: orgSlug, orgSlug,
projectId: projectSlug, projectSlug,
meta: updates.meta || generation.meta || {}, meta: updates.meta || generation.meta || {},
}); });

View File

@ -154,16 +154,16 @@ export class ImageService {
try { try {
// 1. Delete physical file from MinIO storage // 1. Delete physical file from MinIO storage
// Storage key format: {orgSlug}/{projectSlug}/img/{imageId}
const storageService = await StorageFactory.getInstance(); const storageService = await StorageFactory.getInstance();
const storageParts = image.storageKey.split('/'); const storageParts = image.storageKey.split('/');
if (storageParts.length >= 4) { if (storageParts.length >= 4 && storageParts[2] === 'img') {
const orgId = storageParts[0]!; const orgSlug = storageParts[0]!;
const projectId = storageParts[1]!; const projectSlug = storageParts[1]!;
const category = storageParts[2]! as 'uploads' | 'generated' | 'references'; const imageId = storageParts[3]!;
const filename = storageParts.slice(3).join('/');
await storageService.deleteFile(orgId, projectId, category, filename); await storageService.deleteFile(orgSlug, projectSlug, imageId);
} }
// 2. Cascade: Set outputImageId = NULL in related generations // 2. Cascade: Set outputImageId = NULL in related generations

View File

@ -57,11 +57,11 @@ export interface GenerateImageRequestWithFiles extends Request {
// Image generation service types // Image generation service types
export interface ImageGenerationOptions { export interface ImageGenerationOptions {
prompt: string; prompt: string;
filename: string; imageId: string; // UUID used as filename in storage (same as image.id in DB)
referenceImages?: ReferenceImage[]; referenceImages?: ReferenceImage[];
aspectRatio?: string; aspectRatio?: string;
orgId?: string; orgSlug?: string;
projectId?: string; projectSlug?: string;
userId?: string; userId?: string;
meta?: { meta?: {
tags?: string[]; tags?: string[];
@ -91,9 +91,9 @@ export interface GeminiParams {
export interface ImageGenerationResult { export interface ImageGenerationResult {
success: boolean; success: boolean;
filename?: string; imageId?: string; // UUID filename (same as image.id in DB)
filepath?: string; filepath?: string;
url?: string; // API URL for accessing the image url?: string; // CDN URL for accessing the image
size?: number; // File size in bytes size?: number; // File size in bytes
description?: string; description?: string;
model: string; model: string;

View File

@ -0,0 +1,158 @@
# CDN URL Architecture Fix - Cloudflare Configuration
This document describes the Cloudflare configuration for the new CDN URL architecture.
## Domain Structure
| Domain | Purpose | Cloudflare Proxy |
|--------|---------|------------------|
| cdn.banatie.app | CDN for images | Yes (orange cloud) |
| api.banatie.app | API server | Yes (orange cloud) |
| banatie.app | Landing page | Yes (orange cloud) |
## Cache Rules
### Rule 1: Cache UUID Images (High Priority)
Cache static images with UUID filenames for maximum performance.
**When:** Custom filter expression
```
(http.host eq "cdn.banatie.app" and http.request.uri.path matches "^/[^/]+/[^/]+/img/[0-9a-f-]{36}$")
```
**Then:**
- Cache eligibility: Eligible for cache
- Edge TTL: Override origin, 7 days
- Browser TTL: Override origin, 1 year (31536000 seconds)
- Cache Key: Include query string = No
### Rule 2: Bypass Cache for Aliases
Aliases need dynamic resolution, bypass cache.
**When:** Custom filter expression
```
(http.host eq "cdn.banatie.app" and http.request.uri.path matches "^/[^/]+/[^/]+/img/@")
```
**Then:**
- Cache eligibility: Bypass cache
### Rule 3: Bypass Cache for Live URLs
Live URLs need dynamic generation, bypass cache.
**When:** Custom filter expression
```
(http.host eq "cdn.banatie.app" and http.request.uri.path matches "^/[^/]+/[^/]+/live/")
```
**Then:**
- Cache eligibility: Bypass cache
## Page Rules (Alternative)
If not using Cache Rules, use Page Rules:
### Page Rule 1: Cache UUID Images
- URL: `cdn.banatie.app/*/img/*`
- Cache Level: Cache Everything
- Edge Cache TTL: 7 days
- Browser Cache TTL: 1 year
### Page Rule 2: Bypass Aliases and Live
- URL: `cdn.banatie.app/*/img/@*`
- Cache Level: Bypass
- URL: `cdn.banatie.app/*/live/*`
- Cache Level: Bypass
## DNS Configuration
Ensure DNS records point to your VPS:
```
cdn.banatie.app A YOUR_VPS_IP Proxied (orange cloud)
```
## SSL/TLS Configuration
- SSL Mode: Full (strict)
- Always Use HTTPS: On
- Automatic HTTPS Rewrites: On
- Minimum TLS Version: TLS 1.2
## Performance Settings
- Auto Minify: CSS, JavaScript (not HTML for API responses)
- Brotli: On
- Early Hints: On
- Rocket Loader: Off (may break API responses)
## Security Settings
- Security Level: Medium
- Challenge Passage: 30 minutes
- Browser Integrity Check: On
## Rate Limiting (Optional)
Create rate limiting rule for live URL abuse prevention:
**Rule Name:** Live URL Rate Limit
**When:**
```
(http.host eq "cdn.banatie.app" and http.request.uri.path matches "^/[^/]+/[^/]+/live/")
```
**Then:**
- Rate limit: 10 requests per minute per IP
- Action: Block for 1 minute
## Verification
After configuration:
1. **Test UUID caching:**
```bash
curl -I "https://cdn.banatie.app/org/proj/img/uuid-here"
# Check for: cf-cache-status: HIT (on second request)
```
2. **Test alias bypass:**
```bash
curl -I "https://cdn.banatie.app/org/proj/img/@alias"
# Check for: cf-cache-status: DYNAMIC or BYPASS
```
3. **Test live URL bypass:**
```bash
curl -I "https://cdn.banatie.app/org/proj/live/scope?prompt=test"
# Check for: cf-cache-status: DYNAMIC or BYPASS
```
## Troubleshooting
### Images not caching
- Verify the URL matches UUID pattern (36 character UUID)
- Check Cache Rules order (UUID rule should be first)
- Purge cache and retry
### Alias/Live URLs being cached
- Verify bypass rules are active
- Check rule order (bypass rules should run before catch-all)
- Development mode may disable caching
### Slow first requests
- Expected behavior for cache MISS
- Subsequent requests from same edge should be HIT
- Consider using Cache Reserve for longer edge retention
## Notes
- UUID pattern ensures only static, immutable images are cached at edge
- Aliases and live URLs are always fresh from origin
- 1-year browser cache is safe because UUID = immutable content
- Cloudflare caches at edge, browser caches locally

152
docs/url-fix-vps-site.md Normal file
View File

@ -0,0 +1,152 @@
# CDN URL Architecture Fix - VPS Deployment
This document describes the changes needed on VPS to support the new CDN URL architecture.
## Problem
Previous URL structure used presigned URLs with 24-hour expiry, which doesn't work for permanent image embedding on websites.
## Solution
New URL structure with direct CDN access:
- `cdn.banatie.app/{org}/{proj}/img/{imageId}` - Direct MinIO access for static images
- `cdn.banatie.app/{org}/{proj}/img/@{alias}` - API-mediated alias resolution
- `cdn.banatie.app/{org}/{proj}/live/{scope}?prompt=...` - API-mediated live generation
## Storage Path Format
```
Old: {orgSlug}/{projectSlug}/{category}/{timestamp-filename.ext}
New: {orgSlug}/{projectSlug}/img/{imageId}
```
Where `imageId` = UUID (same as `images.id` in database).
## VPS Deployment Steps
### 1. Update Caddy Configuration
Add the following routing rules to your Caddy config:
```caddyfile
# CDN Domain
cdn.banatie.app {
# UUID pattern - direct to MinIO (no extension in URL)
@uuid path_regexp uuid ^/([^/]+)/([^/]+)/img/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})$
handle @uuid {
reverse_proxy banatie-minio:9000 {
# Rewrite to bucket path
header_up Host cdn.banatie.app
rewrite * /banatie{uri}
}
}
# Alias pattern (@name) - proxy to API
@alias path_regexp alias ^/([^/]+)/([^/]+)/img/@(.+)$
handle @alias {
reverse_proxy banatie-api:3000 {
rewrite * /cdn{uri}
}
}
# Live URL pattern - proxy to API
@live path_regexp live ^/([^/]+)/([^/]+)/live/(.+)$
handle @live {
reverse_proxy banatie-api:3000 {
rewrite * /cdn{uri}
}
}
# Fallback for other patterns
handle {
reverse_proxy banatie-minio:9000 {
header_up Host cdn.banatie.app
rewrite * /banatie{uri}
}
}
}
```
### 2. Update Environment Variables
Add to `/opt/banatie/.env`:
```env
CDN_BASE_URL=https://cdn.banatie.app
```
### 3. Reset Database and MinIO Storage
Since this is a breaking change to the storage path format:
```bash
# Stop services
cd /opt/banatie
docker compose down
# Clean database (WARNING: deletes all data)
rm -rf /opt/banatie/data/postgres/*
# Clean MinIO storage (WARNING: deletes all files)
rm -rf /opt/banatie/data/minio/drive{1,2,3,4}/*
# Rebuild and start services
docker compose up -d --build
```
### 4. Run Storage Initialization
After rebuild, the `banatie-storage-init` container will:
1. Create the `banatie` bucket
2. Configure service user with readwrite access
3. Enable public anonymous download access for CDN
Verify public access is enabled:
```bash
docker exec banatie-minio mc anonymous get local/banatie
# Should show: Access permission for `local/banatie` is `download`
```
## Verification
### Test Direct UUID Access
```bash
# After generating an image, get its UUID from database or API response
# Then test direct CDN access:
curl -I "https://cdn.banatie.app/{orgSlug}/{projectSlug}/img/{uuid}"
# Expected: HTTP 200 with Content-Type: image/png (or similar)
```
### Test Alias Resolution
```bash
# Assign an alias to an image via API, then test:
curl -I "https://cdn.banatie.app/{orgSlug}/{projectSlug}/img/@hero"
# Expected: HTTP 200 (API resolves alias and streams image)
```
### Test Live URL Generation
```bash
curl -I "https://cdn.banatie.app/{orgSlug}/{projectSlug}/live/test?prompt=mountain"
# Expected: HTTP 200 (generates or returns cached image)
```
## Rollback
If issues occur:
1. Revert code changes
2. Rebuild API container
3. Regenerate any images (old storage paths won't work)
## Notes
- `filename = image.id` (UUID) ensures consistent identification across DB, storage, and URLs
- Files are stored without extension; Content-Type is served from MinIO metadata
- Cloudflare caching can be enabled for UUID patterns (see url-fix-cloudflare-site.md)

View File

@ -31,13 +31,9 @@ MINIO_BUCKET_NAME=banatie
MINIO_USE_SSL=false MINIO_USE_SSL=false
STORAGE_TYPE=minio STORAGE_TYPE=minio
# Public URL for CDN access (used in API responses) # CDN Base URL for image access
MINIO_PUBLIC_URL=https://cdn.banatie.app # Format: https://cdn.banatie.app/{orgSlug}/{projectSlug}/img/{imageId}
CDN_BASE_URL=https://cdn.banatie.app
# Use direct CDN URLs instead of API proxy (recommended for production)
# Set to 'false' to force API URLs even when MINIO_PUBLIC_URL is configured
# Default: true (CDN enabled when MINIO_PUBLIC_URL is present)
USE_DIRECT_CDN=true
# ---------------------------------------- # ----------------------------------------
# API Configuration # API Configuration
@ -57,6 +53,6 @@ CORS_ORIGIN=https://banatie.app,https://api.banatie.app
# ---------------------------------------- # ----------------------------------------
# Multi-tenancy Defaults # Multi-tenancy Defaults
# ---------------------------------------- # ----------------------------------------
DEFAULT_ORG_ID=default DEFAULT_ORG_SLUG=default
DEFAULT_PROJECT_ID=main DEFAULT_PROJECT_SLUG=main
DEFAULT_USER_ID=system DEFAULT_USER_ID=system

View File

@ -129,6 +129,9 @@ services:
{"Rules":[{"ID":"temp-cleanup","Status":"Enabled","Filter":{"Prefix":"temp/"},"Expiration":{"Days":7}}]} {"Rules":[{"ID":"temp-cleanup","Status":"Enabled","Filter":{"Prefix":"temp/"},"Expiration":{"Days":7}}]}
LCEOF LCEOF
mc ilm import storage/banatie < /tmp/lifecycle.json || echo 'Lifecycle policy may already exist' mc ilm import storage/banatie < /tmp/lifecycle.json || echo 'Lifecycle policy may already exist'
# Enable public read access for CDN
mc anonymous set download storage/banatie || echo 'Anonymous access may already be set'
echo 'Public read access enabled for CDN'
echo '=== Storage Initialization Completed ===' echo '=== Storage Initialization Completed ==='
exit 0 exit 0
restart: "no" restart: "no"

View File

@ -46,7 +46,8 @@ export const images = pgTable(
pendingFlowId: text('pending_flow_id'), // Temporary UUID for lazy flow pattern pendingFlowId: text('pending_flow_id'), // Temporary UUID for lazy flow pattern
apiKeyId: uuid('api_key_id').references(() => apiKeys.id, { onDelete: 'set null' }), apiKeyId: uuid('api_key_id').references(() => apiKeys.id, { onDelete: 'set null' }),
// Storage (MinIO path format: orgSlug/projectSlug/category/YYYY-MM/filename.ext) // Storage (MinIO path format: orgSlug/projectSlug/img/{imageId})
// Note: imageId = this record's UUID, filename has no extension
storageKey: varchar('storage_key', { length: 500 }).notNull().unique(), storageKey: varchar('storage_key', { length: 500 }).notNull().unique(),
storageUrl: text('storage_url').notNull(), storageUrl: text('storage_url').notNull(),
@ -54,6 +55,8 @@ export const images = pgTable(
mimeType: varchar('mime_type', { length: 100 }).notNull(), mimeType: varchar('mime_type', { length: 100 }).notNull(),
fileSize: integer('file_size').notNull(), fileSize: integer('file_size').notNull(),
fileHash: varchar('file_hash', { length: 64 }), // SHA-256 for deduplication fileHash: varchar('file_hash', { length: 64 }), // SHA-256 for deduplication
originalFilename: varchar('original_filename', { length: 255 }), // User's original filename
fileExtension: varchar('file_extension', { length: 20 }), // Original extension (png, jpg, etc.)
// Dimensions // Dimensions
width: integer('width'), width: integer('width'),

View File

@ -42,6 +42,37 @@ async function main() {
log.detail('Source', response.source); log.detail('Source', response.source);
}); });
// Test 1.5: Validate storage path format
await runTest('Storage path format validation', async () => {
// Get uploaded image
const result = await api(`${endpoints.images}/${testContext.uploadedImageId}`);
const image = result.data.data;
// Validate storageKey format: {orgSlug}/{projectSlug}/img/{uuid}
const keyRegex = /^[\w-]+\/[\w-]+\/img\/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/;
if (!keyRegex.test(image.storageKey)) {
throw new Error(`Invalid storageKey format: ${image.storageKey}. Expected: {org}/{proj}/img/{uuid}`);
}
// Validate storageUrl format: https://.../img/{uuid}
const urlRegex = /^https?:\/\/[^\/]+\/[\w-]+\/[\w-]+\/img\/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/;
if (!urlRegex.test(image.storageUrl)) {
throw new Error(`Invalid storageUrl format: ${image.storageUrl}. Expected: https://.../img/{uuid}`);
}
// Verify imageId matches the UUID in path
const pathParts = image.storageKey.split('/');
const uuidFromPath = pathParts[3];
if (uuidFromPath !== image.id) {
throw new Error(`storageKey UUID (${uuidFromPath}) doesn't match image.id (${image.id})`);
}
log.detail('storageKey', image.storageKey);
log.detail('storageUrl', image.storageUrl);
log.detail('UUID in path = image.id', '✓');
log.detail('Format', '{org}/{proj}/img/{uuid}');
});
// Test 2: Upload image without alias // Test 2: Upload image without alias
await runTest('Upload image without alias', async () => { await runTest('Upload image without alias', async () => {
const fixturePath = join(__dirname, config.fixturesDir, 'test-image.png'); const fixturePath = join(__dirname, config.fixturesDir, 'test-image.png');

View File

@ -236,11 +236,12 @@ X-API-Key: {{apiKey}}
### Step 6.1: CDN image by path (if implemented) ### Step 6.1: CDN image by path (if implemented)
# @name cdnImage # @name cdnImage
GET {{base}}/api/v1/cdn/default/test-project/generated/2024-01/test.jpg # New format: {org}/{project}/img/{uuid}
GET {{base}}/api/v1/cdn/default/test-project/img/00000000-0000-0000-0000-000000000000
X-API-Key: {{apiKey}} X-API-Key: {{apiKey}}
### ###
# Note: Endpoint structure check only - actual paths depend on storage # Note: Endpoint structure check only - uses placeholder UUID
### Step 6.2: Health check ### Step 6.2: Health check
# @name healthCheck # @name healthCheck