Merge branch 'fix-cdn-urls'

This commit is contained in:
Oleg Proskurin 2025-12-27 20:13:49 +07:00
commit b5bfc91949
18 changed files with 728 additions and 539 deletions

View File

@ -144,19 +144,19 @@ cdnRouter.get(
}
// Download image from storage
// Storage key format: {orgSlug}/{projectSlug}/img/{imageId}
const storageService = await StorageFactory.getInstance();
const keyParts = image.storageKey.split('/');
if (keyParts.length < 4) {
if (keyParts.length < 4 || keyParts[2] !== 'img') {
throw new Error('Invalid storage key format');
}
const orgId = keyParts[0]!;
const projectId = keyParts[1]!;
const category = keyParts[2]! as 'uploads' | 'generated' | 'references';
const filename = keyParts.slice(3).join('/');
const storedOrgSlug = keyParts[0]!;
const storedProjectSlug = keyParts[1]!;
const imageId = keyParts[3]!;
const buffer = await storageService.downloadFile(orgId, projectId, category, filename);
const buffer = await storageService.downloadFile(storedOrgSlug, storedProjectSlug, imageId);
// Set headers
res.setHeader('Content-Type', image.mimeType);
@ -345,19 +345,19 @@ cdnRouter.get(
if (cachedImage) {
// Cache HIT - serve existing image
// Storage key format: {orgSlug}/{projectSlug}/img/{imageId}
const storageService = await StorageFactory.getInstance();
const keyParts = cachedImage.storageKey.split('/');
if (keyParts.length < 4) {
if (keyParts.length < 4 || keyParts[2] !== 'img') {
throw new Error('Invalid storage key format');
}
const orgId = keyParts[0]!;
const projectId = keyParts[1]!;
const category = keyParts[2]! as 'uploads' | 'generated' | 'references';
const filename = keyParts.slice(3).join('/');
const storedOrgSlug = keyParts[0]!;
const storedProjectSlug = keyParts[1]!;
const imageId = keyParts[3]!;
const buffer = await storageService.downloadFile(orgId, projectId, category, filename);
const buffer = await storageService.downloadFile(storedOrgSlug, storedProjectSlug, imageId);
// Set headers
res.setHeader('Content-Type', cachedImage.mimeType);
@ -445,19 +445,19 @@ cdnRouter.get(
});
// Download newly generated image
// Storage key format: {orgSlug}/{projectSlug}/img/{imageId}
const storageService = await StorageFactory.getInstance();
const keyParts = generation.outputImage.storageKey.split('/');
if (keyParts.length < 4) {
if (keyParts.length < 4 || keyParts[2] !== 'img') {
throw new Error('Invalid storage key format');
}
const orgId = keyParts[0]!;
const projectId = keyParts[1]!;
const category = keyParts[2]! as 'uploads' | 'generated' | 'references';
const filename = keyParts.slice(3).join('/');
const storedOrgSlug = keyParts[0]!;
const storedProjectSlug = keyParts[1]!;
const imageId = keyParts[3]!;
const buffer = await storageService.downloadFile(orgId, projectId, category, filename);
const buffer = await storageService.downloadFile(storedOrgSlug, storedProjectSlug, imageId);
// Set headers
res.setHeader('Content-Type', generation.outputImage.mimeType);

View File

@ -9,16 +9,17 @@ import { rateLimitByApiKey } from '../middleware/auth/rateLimiter';
export const imagesRouter: RouterType = Router();
/**
* GET /api/images/:orgId/:projectId/:category/:filename
* Serves images via presigned URLs (redirect approach)
* GET /api/images/:orgSlug/:projectSlug/img/:imageId
* Serves images directly (streaming approach)
* New format: {orgSlug}/{projectSlug}/img/{imageId}
*/
imagesRouter.get(
'/images/:orgId/:projectId/:category/:filename',
'/images/:orgSlug/:projectSlug/img/:imageId',
asyncHandler(async (req: Request, res: Response): Promise<void> => {
const { orgId, projectId, category, filename } = req.params;
const { orgSlug, projectSlug, imageId } = req.params;
// Validate required params (these are guaranteed by route pattern)
if (!orgId || !projectId || !category || !filename) {
if (!orgSlug || !projectSlug || !imageId) {
res.status(400).json({
success: false,
message: 'Missing required parameters',
@ -26,25 +27,11 @@ imagesRouter.get(
return;
}
// Validate category
if (!['uploads', 'generated', 'references'].includes(category)) {
res.status(400).json({
success: false,
message: 'Invalid category',
});
return;
}
const storageService = await StorageFactory.getInstance();
try {
// Check if file exists first (fast check)
const exists = await storageService.fileExists(
orgId,
projectId,
category as 'uploads' | 'generated' | 'references',
filename,
);
const exists = await storageService.fileExists(orgSlug, projectSlug, imageId);
if (!exists) {
res.status(404).json({
@ -54,37 +41,20 @@ imagesRouter.get(
return;
}
// Determine content type from filename
const ext = filename.toLowerCase().split('.').pop();
const contentType =
{
png: 'image/png',
jpg: 'image/jpeg',
jpeg: 'image/jpeg',
gif: 'image/gif',
webp: 'image/webp',
svg: 'image/svg+xml',
}[ext || ''] || 'application/octet-stream';
// Set headers for optimal caching and performance
res.setHeader('Content-Type', contentType);
res.setHeader('Cache-Control', 'public, max-age=86400, immutable'); // 24 hours + immutable
res.setHeader('ETag', `"${orgId}-${projectId}-${filename}"`); // Simple ETag
// Note: Content-Type will be set from MinIO metadata
res.setHeader('Cache-Control', 'public, max-age=31536000, immutable'); // 1 year + immutable
res.setHeader('ETag', `"${imageId}"`); // UUID as ETag
// Handle conditional requests (304 Not Modified)
const ifNoneMatch = req.headers['if-none-match'];
if (ifNoneMatch === `"${orgId}-${projectId}-${filename}"`) {
if (ifNoneMatch === `"${imageId}"`) {
res.status(304).end(); // Not Modified
return;
}
// Stream the file directly through our API (memory efficient)
const fileStream = await storageService.streamFile(
orgId,
projectId,
category as 'uploads' | 'generated' | 'references',
filename,
);
const fileStream = await storageService.streamFile(orgSlug, projectSlug, imageId);
// Handle stream errors
fileStream.on('error', (streamError) => {
@ -110,17 +80,17 @@ imagesRouter.get(
);
/**
* GET /api/images/url/:orgId/:projectId/:category/:filename
* GET /api/images/url/:orgSlug/:projectSlug/img/:imageId
* Returns a presigned URL instead of redirecting
*/
imagesRouter.get(
'/images/url/:orgId/:projectId/:category/:filename',
'/images/url/:orgSlug/:projectSlug/img/:imageId',
asyncHandler(async (req: Request, res: Response): Promise<void> => {
const { orgId, projectId, category, filename } = req.params;
const { orgSlug, projectSlug, imageId } = req.params;
const { expiry = '3600' } = req.query; // Default 1 hour
// Validate required params (these are guaranteed by route pattern)
if (!orgId || !projectId || !category || !filename) {
if (!orgSlug || !projectSlug || !imageId) {
res.status(400).json({
success: false,
message: 'Missing required parameters',
@ -128,22 +98,13 @@ imagesRouter.get(
return;
}
if (!['uploads', 'generated', 'references'].includes(category)) {
res.status(400).json({
success: false,
message: 'Invalid category',
});
return;
}
const storageService = await StorageFactory.getInstance();
try {
const presignedUrl = await storageService.getPresignedDownloadUrl(
orgId,
projectId,
category as 'uploads' | 'generated' | 'references',
filename,
orgSlug,
projectSlug,
imageId,
parseInt(expiry as string, 10),
);
@ -191,19 +152,19 @@ imagesRouter.get(
}
// Extract org/project from validated API key
const orgId = req.apiKey?.organizationSlug || 'default';
const projectId = req.apiKey?.projectSlug!;
const orgSlug = req.apiKey?.organizationSlug || 'default';
const projectSlug = req.apiKey?.projectSlug!;
console.log(
`[${timestamp}] [${requestId}] Listing generated images for org:${orgId}, project:${projectId}, limit:${limit}, offset:${offset}, prefix:${prefix || 'none'}`,
`[${timestamp}] [${requestId}] Listing images for org:${orgSlug}, project:${projectSlug}, limit:${limit}, offset:${offset}, prefix:${prefix || 'none'}`,
);
try {
// Get storage service instance
const storageService = await StorageFactory.getInstance();
// List files in generated category
const allFiles = await storageService.listFiles(orgId, projectId, 'generated', prefix);
// List files in img folder
const allFiles = await storageService.listFiles(orgSlug, projectSlug, prefix);
// Sort by lastModified descending (newest first)
allFiles.sort((a, b) => {
@ -218,8 +179,8 @@ imagesRouter.get(
// Map to response format with public URLs
const images = paginatedFiles.map((file) => ({
filename: file.filename,
url: storageService.getPublicUrl(orgId, projectId, 'generated', file.filename),
imageId: file.filename,
url: storageService.getPublicUrl(orgSlug, projectSlug, file.filename),
size: file.size,
contentType: file.contentType,
lastModified: file.lastModified ? file.lastModified.toISOString() : new Date().toISOString(),
@ -228,7 +189,7 @@ imagesRouter.get(
const hasMore = offset + limit < total;
console.log(
`[${timestamp}] [${requestId}] Successfully listed ${images.length} of ${total} generated images`,
`[${timestamp}] [${requestId}] Successfully listed ${images.length} of ${total} images`,
);
return res.status(200).json({
@ -242,11 +203,11 @@ imagesRouter.get(
},
});
} catch (error) {
console.error(`[${timestamp}] [${requestId}] Failed to list generated images:`, error);
console.error(`[${timestamp}] [${requestId}] Failed to list images:`, error);
return res.status(500).json({
success: false,
message: 'Failed to list generated images',
message: 'Failed to list images',
error: error instanceof Error ? error.message : 'Unknown error occurred',
});
}

View File

@ -1,5 +1,6 @@
import { Response, Router } from 'express';
import type { Router as RouterType } from 'express';
import { randomUUID } from 'crypto';
import { ImageGenService } from '../services/ImageGenService';
import { validateTextToImageRequest, logTextToImageRequest } from '../middleware/jsonValidation';
import { autoEnhancePrompt, logEnhancementResult } from '../middleware/promptEnhancement';
@ -48,14 +49,17 @@ textToImageRouter.post(
const timestamp = new Date().toISOString();
const requestId = req.requestId;
const { prompt, filename, aspectRatio, meta } = req.body;
const { prompt, aspectRatio, meta } = req.body;
// Extract org/project slugs from validated API key
const orgId = req.apiKey?.organizationSlug || undefined;
const projectId = req.apiKey?.projectSlug!; // Guaranteed by requireProjectKey middleware
const orgSlug = req.apiKey?.organizationSlug || undefined;
const projectSlug = req.apiKey?.projectSlug!; // Guaranteed by requireProjectKey middleware
// Generate imageId (UUID) - this will be the filename in storage
const imageId = randomUUID();
console.log(
`[${timestamp}] [${requestId}] Starting text-to-image generation process for org:${orgId}, project:${projectId}`,
`[${timestamp}] [${requestId}] Starting text-to-image generation process for org:${orgSlug}, project:${projectSlug}`,
);
try {
@ -66,10 +70,10 @@ textToImageRouter.post(
const result = await imageGenService.generateImage({
prompt,
filename,
imageId,
...(aspectRatio && { aspectRatio }),
orgId,
projectId,
orgSlug,
projectSlug,
...(meta && { meta }),
});
@ -77,7 +81,7 @@ textToImageRouter.post(
console.log(`[${timestamp}] [${requestId}] Text-to-image generation completed:`, {
success: result.success,
model: result.model,
filename: result.filename,
imageId: result.imageId,
hasError: !!result.error,
});
@ -87,7 +91,7 @@ textToImageRouter.post(
success: true,
message: 'Image generated successfully',
data: {
filename: result.filename!,
filename: result.imageId!,
filepath: result.filepath!,
...(result.url && { url: result.url }),
...(result.description && { description: result.description }),

View File

@ -1,5 +1,6 @@
import { Response, Router } from 'express';
import type { Router as RouterType } from 'express';
import { randomUUID } from 'crypto';
import { StorageFactory } from '../services/StorageFactory';
import { asyncHandler } from '../middleware/errorHandler';
import { validateApiKey } from '../middleware/auth/validateApiKey';
@ -53,18 +54,22 @@ uploadRouter.post(
// Initialize storage service
const storageService = await StorageFactory.getInstance();
// Upload file to MinIO in 'uploads' category
// Generate imageId (UUID) - this will be the filename in storage
const imageId = randomUUID();
// Upload file to MinIO
// Path format: {orgSlug}/{projectSlug}/img/{imageId}
console.log(
`[${timestamp}] [${requestId}] Uploading file: ${file.originalname} (${file.size} bytes)`,
`[${timestamp}] [${requestId}] Uploading file: ${file.originalname} as ${imageId} (${file.size} bytes)`,
);
const uploadResult = await storageService.uploadFile(
orgSlug,
projectSlug,
'uploads',
file.originalname,
imageId,
file.buffer,
file.mimetype,
file.originalname,
);
if (!uploadResult.success) {

View File

@ -88,23 +88,21 @@ liveRouter.get(
const storageService = await StorageFactory.getInstance();
// Parse storage key to get components
// Format: orgId/projectId/category/filename.ext
// Format: {orgSlug}/{projectSlug}/img/{imageId}
const keyParts = image.storageKey.split('/');
if (keyParts.length < 4) {
if (keyParts.length < 4 || keyParts[2] !== 'img') {
throw new Error('Invalid storage key format');
}
const orgId = keyParts[0];
const projectIdSlug = keyParts[1];
const category = keyParts[2] as 'uploads' | 'generated' | 'references';
const filename = keyParts.slice(3).join('/');
const storedOrgSlug = keyParts[0]!;
const storedProjectSlug = keyParts[1]!;
const imageId = keyParts[3]!;
// Download image from storage
const buffer = await storageService.downloadFile(
orgId!,
projectIdSlug!,
category,
filename!
storedOrgSlug,
storedProjectSlug,
imageId
);
// Set cache headers
@ -157,22 +155,20 @@ liveRouter.get(
// Download newly generated image
const storageService = await StorageFactory.getInstance();
// Format: orgId/projectId/category/filename.ext
// Format: {orgSlug}/{projectSlug}/img/{imageId}
const keyParts = generation.outputImage.storageKey.split('/');
if (keyParts.length < 4) {
if (keyParts.length < 4 || keyParts[2] !== 'img') {
throw new Error('Invalid storage key format');
}
const orgId = keyParts[0];
const projectIdSlug = keyParts[1];
const category = keyParts[2] as 'uploads' | 'generated' | 'references';
const filename = keyParts.slice(3).join('/');
const storedOrgSlug = keyParts[0]!;
const storedProjectSlug = keyParts[1]!;
const imageId = keyParts[3]!;
const buffer = await storageService.downloadFile(
orgId!,
projectIdSlug!,
category,
filename!
storedOrgSlug,
storedProjectSlug,
imageId
);
// Set cache headers

View File

@ -29,11 +29,11 @@ export class ImageGenService {
* This method separates image generation from storage for clear error handling
*/
async generateImage(options: ImageGenerationOptions): Promise<ImageGenerationResult> {
const { prompt, filename, referenceImages, aspectRatio, orgId, projectId, meta } = options;
const { prompt, imageId, referenceImages, aspectRatio, orgSlug, projectSlug, meta } = options;
// Use default values if not provided
const finalOrgId = orgId || process.env['DEFAULT_ORG_ID'] || 'default';
const finalProjectId = projectId || process.env['DEFAULT_PROJECT_ID'] || 'main';
const finalOrgSlug = orgSlug || process.env['DEFAULT_ORG_SLUG'] || 'default';
const finalProjectSlug = projectSlug || process.env['DEFAULT_PROJECT_SLUG'] || 'main';
const finalAspectRatio = aspectRatio || '1:1'; // Default to square
// Step 1: Generate image from Gemini AI
@ -44,8 +44,8 @@ export class ImageGenService {
prompt,
referenceImages,
finalAspectRatio,
finalOrgId,
finalProjectId,
finalOrgSlug,
finalProjectSlug,
meta,
);
generatedData = aiResult.generatedData;
@ -61,22 +61,25 @@ export class ImageGenService {
}
// Step 2: Save generated image to storage
// Path format: {orgSlug}/{projectSlug}/img/{imageId}
try {
const finalFilename = `${filename}.${generatedData.fileExtension}`;
const storageService = await StorageFactory.getInstance();
// Original filename for metadata (e.g., "my-image.png")
const originalFilename = `generated-image.${generatedData.fileExtension}`;
const uploadResult = await storageService.uploadFile(
finalOrgId,
finalProjectId,
'generated',
finalFilename,
finalOrgSlug,
finalProjectSlug,
imageId,
generatedData.buffer,
generatedData.mimeType,
originalFilename,
);
if (uploadResult.success) {
return {
success: true,
filename: uploadResult.filename,
imageId: uploadResult.filename,
filepath: uploadResult.path,
url: uploadResult.url,
size: uploadResult.size,
@ -125,8 +128,8 @@ export class ImageGenService {
prompt: string,
referenceImages: ReferenceImage[] | undefined,
aspectRatio: string,
orgId: string,
projectId: string,
orgSlug: string,
projectSlug: string,
meta?: { tags?: string[] },
): Promise<{
generatedData: GeneratedImageData;
@ -182,8 +185,8 @@ export class ImageGenService {
const ttiLogger = TTILogger.getInstance();
const logEntry: TTILogEntry = {
timestamp: new Date().toISOString(),
orgId,
projectId,
orgId: orgSlug,
projectId: projectSlug,
prompt,
model: this.primaryModel,
config,

View File

@ -4,7 +4,7 @@ import { StorageService, FileMetadata, UploadResult } from './StorageService';
export class MinioStorageService implements StorageService {
private client: MinioClient;
private bucketName: string;
private publicUrl: string;
private cdnBaseUrl: string;
constructor(
endpoint: string,
@ -12,7 +12,7 @@ export class MinioStorageService implements StorageService {
secretKey: string,
useSSL: boolean = false,
bucketName: string = 'banatie',
publicUrl?: string,
cdnBaseUrl?: string,
) {
// Parse endpoint to separate hostname and port
const cleanEndpoint = endpoint.replace(/^https?:\/\//, '');
@ -31,119 +31,59 @@ export class MinioStorageService implements StorageService {
secretKey,
});
this.bucketName = bucketName;
this.publicUrl = publicUrl || `${useSSL ? 'https' : 'http'}://${endpoint}`;
// CDN base URL without bucket name (e.g., https://cdn.banatie.app)
this.cdnBaseUrl = cdnBaseUrl || process.env['CDN_BASE_URL'] || `${useSSL ? 'https' : 'http'}://${endpoint}/${bucketName}`;
}
private getFilePath(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
): string {
// Simplified path without date folder for now
return `${orgId}/${projectId}/${category}/${filename}`;
/**
* Get file path in storage
* Format: {orgSlug}/{projectSlug}/img/{imageId}
*/
private getFilePath(orgSlug: string, projectSlug: string, imageId: string): string {
return `${orgSlug}/${projectSlug}/img/${imageId}`;
}
private generateUniqueFilename(originalFilename: string): string {
// Sanitize filename first
const sanitized = this.sanitizeFilename(originalFilename);
const timestamp = Date.now();
const random = Math.random().toString(36).substring(2, 8);
const ext = sanitized.includes('.') ? sanitized.substring(sanitized.lastIndexOf('.')) : '';
const name = sanitized.includes('.')
? sanitized.substring(0, sanitized.lastIndexOf('.'))
: sanitized;
return `${name}-${timestamp}-${random}${ext}`;
/**
* Extract file extension from original filename
*/
private extractExtension(filename: string): string | undefined {
if (!filename) return undefined;
const lastDotIndex = filename.lastIndexOf('.');
if (lastDotIndex <= 0) return undefined;
return filename.substring(lastDotIndex + 1).toLowerCase();
}
private sanitizeFilename(filename: string): string {
// Remove path traversal attempts FIRST from entire filename
let cleaned = filename.replace(/\.\./g, '').trim();
// Split filename and extension
const lastDotIndex = cleaned.lastIndexOf('.');
let baseName = lastDotIndex > 0 ? cleaned.substring(0, lastDotIndex) : cleaned;
const extension = lastDotIndex > 0 ? cleaned.substring(lastDotIndex) : '';
// Remove dangerous characters from base name
baseName = baseName
.replace(/[<>:"/\\|?*\x00-\x1f]/g, '') // Remove dangerous chars
.trim();
// Replace non-ASCII characters with ASCII equivalents or remove them
// This prevents S3 signature mismatches with MinIO
baseName = baseName
.normalize('NFD') // Decompose combined characters (é -> e + ´)
.replace(/[\u0300-\u036f]/g, '') // Remove diacritical marks
.replace(/[^\x20-\x7E]/g, '_') // Replace any remaining non-ASCII with underscore
.replace(/[^\w\s\-_.]/g, '_') // Replace special chars (except word chars, space, dash, underscore, dot) with underscore
.replace(/\s+/g, '_') // Replace spaces with underscores
.replace(/_{2,}/g, '_') // Collapse multiple underscores
.replace(/^_+|_+$/g, ''); // Remove leading/trailing underscores
// Ensure we still have a valid base name
if (baseName.length === 0) {
baseName = 'file';
}
// Sanitize extension (remove only dangerous chars, keep the dot)
let sanitizedExt = extension
.replace(/[<>:"/\\|?*\x00-\x1f]/g, '')
.replace(/[^\x20-\x7E]/g, '')
.toLowerCase();
// Ensure extension starts with a dot and is reasonable
if (sanitizedExt && !sanitizedExt.startsWith('.')) {
sanitizedExt = '.' + sanitizedExt;
}
if (sanitizedExt.length > 10) {
sanitizedExt = sanitizedExt.substring(0, 10);
}
const result = baseName + sanitizedExt;
return result.substring(0, 255); // Limit total length
}
private validateFilePath(
orgId: string,
projectId: string,
category: string,
filename: string,
): void {
// Validate orgId
if (!orgId || !/^[a-zA-Z0-9_-]+$/.test(orgId) || orgId.length > 50) {
/**
* Validate storage path components
*/
private validatePath(orgSlug: string, projectSlug: string, imageId: string): void {
// Validate orgSlug
if (!orgSlug || !/^[a-zA-Z0-9_-]+$/.test(orgSlug) || orgSlug.length > 50) {
throw new Error(
'Invalid organization ID: must be alphanumeric with dashes/underscores, max 50 chars',
'Invalid organization slug: must be alphanumeric with dashes/underscores, max 50 chars',
);
}
// Validate projectId
if (!projectId || !/^[a-zA-Z0-9_-]+$/.test(projectId) || projectId.length > 50) {
// Validate projectSlug
if (!projectSlug || !/^[a-zA-Z0-9_-]+$/.test(projectSlug) || projectSlug.length > 50) {
throw new Error(
'Invalid project ID: must be alphanumeric with dashes/underscores, max 50 chars',
'Invalid project slug: must be alphanumeric with dashes/underscores, max 50 chars',
);
}
// Validate category
if (!['uploads', 'generated', 'references'].includes(category)) {
throw new Error('Invalid category: must be uploads, generated, or references');
}
// Validate filename
if (!filename || filename.length === 0 || filename.length > 255) {
throw new Error('Invalid filename: must be 1-255 characters');
// Validate imageId (UUID format)
if (!imageId || imageId.length === 0 || imageId.length > 50) {
throw new Error('Invalid imageId: must be 1-50 characters');
}
// Check for path traversal and dangerous patterns
if (filename.includes('..') || filename.includes('/') || filename.includes('\\')) {
throw new Error('Invalid characters in filename: path traversal not allowed');
if (imageId.includes('..') || imageId.includes('/') || imageId.includes('\\')) {
throw new Error('Invalid characters in imageId: path traversal not allowed');
}
// Prevent null bytes and control characters
if (/[\x00-\x1f]/.test(filename)) {
throw new Error('Invalid filename: control characters not allowed');
if (/[\x00-\x1f]/.test(imageId)) {
throw new Error('Invalid imageId: control characters not allowed');
}
}
@ -154,8 +94,8 @@ export class MinioStorageService implements StorageService {
console.log(`Created bucket: ${this.bucketName}`);
}
// Note: With SNMD and presigned URLs, we don't need bucket policies
console.log(`Bucket ${this.bucketName} ready for presigned URL access`);
// Bucket should be public for CDN access (configured via mc anonymous set download)
console.log(`Bucket ${this.bucketName} ready for CDN access`);
}
async bucketExists(): Promise<boolean> {
@ -163,15 +103,15 @@ export class MinioStorageService implements StorageService {
}
async uploadFile(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
buffer: Buffer,
contentType: string,
originalFilename?: string,
): Promise<UploadResult> {
// Validate inputs first
this.validateFilePath(orgId, projectId, category, filename);
this.validatePath(orgSlug, projectSlug, imageId);
if (!buffer || buffer.length === 0) {
throw new Error('Buffer cannot be empty');
@ -184,24 +124,34 @@ export class MinioStorageService implements StorageService {
// Ensure bucket exists
await this.createBucket();
// Generate unique filename to avoid conflicts
const uniqueFilename = this.generateUniqueFilename(filename);
const filePath = this.getFilePath(orgId, projectId, category, uniqueFilename);
// Get file path: {orgSlug}/{projectSlug}/img/{imageId}
const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
// Extract file extension from original filename
const fileExtension = originalFilename ? this.extractExtension(originalFilename) : undefined;
// Encode original filename to Base64 to safely store non-ASCII characters in metadata
const originalNameEncoded = Buffer.from(filename, 'utf-8').toString('base64');
const originalNameEncoded = originalFilename
? Buffer.from(originalFilename, 'utf-8').toString('base64')
: undefined;
const metadata = {
const metadata: Record<string, string> = {
'Content-Type': contentType,
'X-Amz-Meta-Original-Name': originalNameEncoded,
'X-Amz-Meta-Original-Name-Encoding': 'base64',
'X-Amz-Meta-Category': category,
'X-Amz-Meta-Project': projectId,
'X-Amz-Meta-Organization': orgId,
'X-Amz-Meta-Project': projectSlug,
'X-Amz-Meta-Organization': orgSlug,
'X-Amz-Meta-Upload-Time': new Date().toISOString(),
};
console.log(`Uploading file to: ${this.bucketName}/${filePath}`);
if (originalNameEncoded) {
metadata['X-Amz-Meta-Original-Name'] = originalNameEncoded;
metadata['X-Amz-Meta-Original-Name-Encoding'] = 'base64';
}
if (fileExtension) {
metadata['X-Amz-Meta-File-Extension'] = fileExtension;
}
console.log(`[MinIO] Uploading file to: ${this.bucketName}/${filePath}`);
await this.client.putObject(
this.bucketName,
@ -211,28 +161,29 @@ export class MinioStorageService implements StorageService {
metadata,
);
const url = this.getPublicUrl(orgId, projectId, category, uniqueFilename);
const url = this.getPublicUrl(orgSlug, projectSlug, imageId);
console.log(`Generated API URL: ${url}`);
console.log(`[MinIO] CDN URL: ${url}`);
return {
success: true,
filename: uniqueFilename,
filename: imageId,
path: filePath,
url,
size: buffer.length,
contentType,
...(originalFilename && { originalFilename }),
...(fileExtension && { fileExtension }),
};
}
async downloadFile(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
): Promise<Buffer> {
this.validateFilePath(orgId, projectId, category, filename);
const filePath = this.getFilePath(orgId, projectId, category, filename);
this.validatePath(orgSlug, projectSlug, imageId);
const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
const stream = await this.client.getObject(this.bucketName, filePath);
@ -245,203 +196,91 @@ export class MinioStorageService implements StorageService {
}
async streamFile(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
): Promise<import('stream').Readable> {
this.validateFilePath(orgId, projectId, category, filename);
const filePath = this.getFilePath(orgId, projectId, category, filename);
this.validatePath(orgSlug, projectSlug, imageId);
const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
// Return the stream directly without buffering - memory efficient!
return await this.client.getObject(this.bucketName, filePath);
}
async deleteFile(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
): Promise<void> {
this.validateFilePath(orgId, projectId, category, filename);
const filePath = this.getFilePath(orgId, projectId, category, filename);
this.validatePath(orgSlug, projectSlug, imageId);
const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
await this.client.removeObject(this.bucketName, filePath);
}
/**
* Get public URL for file access
* Returns CDN URL if MINIO_PUBLIC_URL is configured (production),
* otherwise falls back to API endpoint URL (development)
*
* @returns {string} URL for accessing the file
* Get public CDN URL for file access
* Returns: https://cdn.banatie.app/{orgSlug}/{projectSlug}/img/{imageId}
*/
getPublicUrl(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
): string {
this.validateFilePath(orgId, projectId, category, filename);
// If MINIO_PUBLIC_URL is configured, use direct CDN access
// This provides better performance and reduces API server load
if (this.publicUrl && process.env['USE_DIRECT_CDN'] !== 'false') {
const filePath = this.getFilePath(orgId, projectId, category, filename);
const cdnUrl = `${this.publicUrl}/${this.bucketName}/${filePath}`;
console.log(`[MinIO] Using CDN URL: ${cdnUrl}`);
return cdnUrl;
}
// Fallback to API URL for local development or when CDN is disabled
const apiBaseUrl = process.env['API_BASE_URL'] || 'http://localhost:3000';
const apiUrl = `${apiBaseUrl}/api/images/${orgId}/${projectId}/${category}/${filename}`;
console.log(`[MinIO] Using API URL: ${apiUrl}`);
return apiUrl;
getPublicUrl(orgSlug: string, projectSlug: string, imageId: string): string {
this.validatePath(orgSlug, projectSlug, imageId);
const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
return `${this.cdnBaseUrl}/${filePath}`;
}
async getPresignedUploadUrl(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
expirySeconds: number,
contentType: string,
): Promise<string> {
this.validateFilePath(orgId, projectId, category, filename);
this.validatePath(orgSlug, projectSlug, imageId);
if (!contentType || contentType.trim().length === 0) {
throw new Error('Content type is required for presigned upload URL');
}
const filePath = this.getFilePath(orgId, projectId, category, filename);
const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
return await this.client.presignedPutObject(this.bucketName, filePath, expirySeconds);
}
async getPresignedDownloadUrl(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
expirySeconds: number = 86400, // 24 hours default
): Promise<string> {
this.validateFilePath(orgId, projectId, category, filename);
const filePath = this.getFilePath(orgId, projectId, category, filename);
this.validatePath(orgSlug, projectSlug, imageId);
const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
const presignedUrl = await this.client.presignedGetObject(
this.bucketName,
filePath,
expirySeconds,
);
// Replace internal Docker hostname with public URL if configured
if (this.publicUrl) {
// Replace internal Docker hostname with CDN URL if configured
if (this.cdnBaseUrl) {
// Access protected properties via type assertion for URL replacement
const client = this.client as unknown as { host: string; port: number; protocol: string };
const clientEndpoint = client.host + (client.port ? `:${client.port}` : '');
return presignedUrl.replace(`${client.protocol}//${clientEndpoint}`, this.publicUrl);
return presignedUrl.replace(`${client.protocol}//${clientEndpoint}/${this.bucketName}`, this.cdnBaseUrl);
}
return presignedUrl;
}
async listProjectFiles(
orgId: string,
projectId: string,
category?: 'uploads' | 'generated' | 'references',
): Promise<FileMetadata[]> {
const prefix = category ? `${orgId}/${projectId}/${category}/` : `${orgId}/${projectId}/`;
const files: FileMetadata[] = [];
return new Promise((resolve, reject) => {
const stream = this.client.listObjects(this.bucketName, prefix, true);
stream.on('data', async (obj) => {
try {
if (!obj.name) return;
const metadata = await this.client.statObject(this.bucketName, obj.name);
const pathParts = obj.name.split('/');
const filename = pathParts[pathParts.length - 1];
const categoryFromPath = pathParts[2] as 'uploads' | 'generated' | 'references';
if (!filename || !categoryFromPath) {
return;
}
files.push({
filename,
contentType: metadata.metaData?.['content-type'] || 'application/octet-stream',
size: obj.size || 0,
lastModified: obj.lastModified || new Date(),
path: obj.name,
});
} catch (error) {}
});
stream.on('end', () => resolve(files));
stream.on('error', reject);
});
}
parseKey(key: string): {
orgId: string;
projectId: string;
category: 'uploads' | 'generated' | 'references';
filename: string;
} | null {
try {
const match = key.match(
/^banatie\/([^/]+)\/([^/]+)\/(uploads|generated|references)\/[^/]+\/(.+)$/,
);
if (!match) {
return null;
}
const [, orgId, projectId, category, filename] = match;
if (!orgId || !projectId || !category || !filename) {
return null;
}
return {
orgId,
projectId,
category: category as 'uploads' | 'generated' | 'references',
filename,
};
} catch {
return null;
}
}
async fileExists(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
): Promise<boolean> {
try {
this.validateFilePath(orgId, projectId, category, filename);
const filePath = this.getFilePath(orgId, projectId, category, filename);
await this.client.statObject(this.bucketName, filePath);
return true;
} catch (error) {
return false;
}
}
/**
* List files in a project's img folder
*/
async listFiles(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
orgSlug: string,
projectSlug: string,
prefix?: string,
): Promise<FileMetadata[]> {
this.validateFilePath(orgId, projectId, category, 'dummy.txt');
this.validatePath(orgSlug, projectSlug, 'dummy');
const basePath = `${orgId}/${projectId}/${category}/`;
const basePath = `${orgSlug}/${projectSlug}/img/`;
const searchPrefix = prefix ? `${basePath}${prefix}` : basePath;
const files: FileMetadata[] = [];
@ -449,31 +288,22 @@ export class MinioStorageService implements StorageService {
return new Promise((resolve, reject) => {
const stream = this.client.listObjects(this.bucketName, searchPrefix, true);
stream.on('data', (obj) => {
stream.on('data', async (obj) => {
if (!obj.name || !obj.size) return;
try {
const pathParts = obj.name.split('/');
const filename = pathParts[pathParts.length - 1];
const imageId = pathParts[pathParts.length - 1];
if (!filename) return;
if (!imageId) return;
// Infer content type from file extension (more efficient than statObject)
const ext = filename.toLowerCase().split('.').pop();
const contentType =
{
png: 'image/png',
jpg: 'image/jpeg',
jpeg: 'image/jpeg',
gif: 'image/gif',
webp: 'image/webp',
svg: 'image/svg+xml',
}[ext || ''] || 'application/octet-stream';
// Get metadata to find content type (no extension in filename)
const metadata = await this.client.statObject(this.bucketName, obj.name);
files.push({
filename,
filename: imageId!,
size: obj.size,
contentType,
contentType: metadata.metaData?.['content-type'] || 'application/octet-stream',
lastModified: obj.lastModified || new Date(),
etag: obj.etag || '',
path: obj.name,
@ -493,4 +323,52 @@ export class MinioStorageService implements StorageService {
});
});
}
/**
* Parse storage key to extract components
* Format: {orgSlug}/{projectSlug}/img/{imageId}
*/
parseKey(key: string): {
orgSlug: string;
projectSlug: string;
imageId: string;
} | null {
try {
// Match: orgSlug/projectSlug/img/imageId
const match = key.match(/^([^/]+)\/([^/]+)\/img\/([^/]+)$/);
if (!match) {
return null;
}
const [, orgSlug, projectSlug, imageId] = match;
if (!orgSlug || !projectSlug || !imageId) {
return null;
}
return {
orgSlug,
projectSlug,
imageId,
};
} catch {
return null;
}
}
async fileExists(
orgSlug: string,
projectSlug: string,
imageId: string,
): Promise<boolean> {
try {
this.validatePath(orgSlug, projectSlug, imageId);
const filePath = this.getFilePath(orgSlug, projectSlug, imageId);
await this.client.statObject(this.bucketName, filePath);
return true;
} catch (error) {
return false;
}
}
}

View File

@ -11,11 +11,13 @@ export interface FileMetadata {
export interface UploadResult {
success: boolean;
filename: string;
filename: string; // UUID (same as image.id)
path: string;
url: string; // API URL for accessing the file
url: string; // CDN URL for accessing the file
size: number;
contentType: string;
originalFilename?: string; // User's original filename
fileExtension?: string; // Original extension (png, jpg, etc.)
error?: string;
}
@ -32,137 +34,125 @@ export interface StorageService {
/**
* Upload a file to storage
* @param orgId Organization ID
* @param projectId Project ID
* @param category File category (uploads, generated, references)
* @param filename Original filename
* Path format: {orgSlug}/{projectSlug}/img/{imageId}
*
* @param orgSlug Organization slug
* @param projectSlug Project slug
* @param imageId UUID for the file (same as image.id in DB)
* @param buffer File buffer
* @param contentType MIME type
* @param originalFilename Original filename from user (for metadata)
*/
uploadFile(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
buffer: Buffer,
contentType: string,
originalFilename?: string,
): Promise<UploadResult>;
/**
* Download a file from storage
* @param orgId Organization ID
* @param projectId Project ID
* @param category File category
* @param filename Filename to download
* @param orgSlug Organization slug
* @param projectSlug Project slug
* @param imageId UUID filename
*/
downloadFile(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
): Promise<Buffer>;
/**
* Stream a file from storage (memory efficient)
* @param orgId Organization ID
* @param projectId Project ID
* @param category File category
* @param filename Filename to stream
* @param orgSlug Organization slug
* @param projectSlug Project slug
* @param imageId UUID filename
*/
streamFile(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
): Promise<Readable>;
/**
* Generate a presigned URL for downloading a file
* @param orgId Organization ID
* @param projectId Project ID
* @param category File category
* @param filename Filename
* @param orgSlug Organization slug
* @param projectSlug Project slug
* @param imageId UUID filename
* @param expirySeconds URL expiry time in seconds
*/
getPresignedDownloadUrl(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
expirySeconds: number,
): Promise<string>;
/**
* Generate a presigned URL for uploading a file
* @param orgId Organization ID
* @param projectId Project ID
* @param category File category
* @param filename Filename
* @param orgSlug Organization slug
* @param projectSlug Project slug
* @param imageId UUID filename
* @param expirySeconds URL expiry time in seconds
* @param contentType MIME type
*/
getPresignedUploadUrl(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
expirySeconds: number,
contentType: string,
): Promise<string>;
/**
* List files in a specific path
* @param orgId Organization ID
* @param projectId Project ID
* @param category File category
* List files in a project's img folder
* @param orgSlug Organization slug
* @param projectSlug Project slug
* @param prefix Optional prefix to filter files
*/
listFiles(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
orgSlug: string,
projectSlug: string,
prefix?: string,
): Promise<FileMetadata[]>;
/**
* Delete a file from storage
* @param orgId Organization ID
* @param projectId Project ID
* @param category File category
* @param filename Filename to delete
* @param orgSlug Organization slug
* @param projectSlug Project slug
* @param imageId UUID filename to delete
*/
deleteFile(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
): Promise<void>;
/**
* Check if a file exists
* @param orgId Organization ID
* @param projectId Project ID
* @param category File category
* @param filename Filename to check
* @param orgSlug Organization slug
* @param projectSlug Project slug
* @param imageId UUID filename to check
*/
fileExists(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
): Promise<boolean>;
/**
* Get the public URL for a file
* @param orgId Organization ID
* @param projectId Project ID
* @param category File category
* @param filename Filename
* Get the public CDN URL for a file
* Returns: https://cdn.banatie.app/{orgSlug}/{projectSlug}/img/{imageId}
*
* @param orgSlug Organization slug
* @param projectSlug Project slug
* @param imageId UUID filename
*/
getPublicUrl(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string,
orgSlug: string,
projectSlug: string,
imageId: string,
): string;
}

View File

@ -148,13 +148,16 @@ export class GenerationService {
.where(eq(generations.id, generation.id));
}
// Generate imageId (UUID) upfront - this will be the filename in storage
const imageId = randomUUID();
const genResult = await this.imageGenService.generateImage({
prompt: usedPrompt, // Use the prompt that was stored (enhanced or original)
filename: `gen_${generation.id}`,
imageId, // UUID used as filename: {orgSlug}/{projectSlug}/img/{imageId}
referenceImages: referenceImageBuffers,
aspectRatio: params.aspectRatio || GENERATION_LIMITS.DEFAULT_ASPECT_RATIO,
orgId: params.organizationSlug, // Use slug for storage path
projectId: params.projectSlug, // Use slug for storage path
orgSlug: params.organizationSlug,
projectSlug: params.projectSlug,
meta: params.meta || {},
});
@ -172,13 +175,14 @@ export class GenerationService {
const fileHash = null;
const imageRecord = await this.imageService.create({
id: imageId, // Use the same UUID for image record
projectId: params.projectId,
flowId: finalFlowId,
generationId: generation.id,
apiKeyId: params.apiKeyId,
storageKey,
storageUrl: genResult.url!,
mimeType: 'image/jpeg',
mimeType: genResult.generatedImageData?.mimeType || 'image/png',
fileSize: genResult.size || 0,
fileHash,
source: 'generated',
@ -186,6 +190,8 @@ export class GenerationService {
meta: params.meta || {},
width: genResult.generatedImageData?.width ?? null,
height: genResult.generatedImageData?.height ?? null,
originalFilename: `generated-image.${genResult.generatedImageData?.fileExtension || 'png'}`,
fileExtension: genResult.generatedImageData?.fileExtension || 'png',
});
// Reassign project alias if provided (override behavior per Section 5.2)
@ -272,27 +278,22 @@ export class GenerationService {
throw new Error(`${ERROR_MESSAGES.ALIAS_NOT_FOUND}: ${alias}`);
}
// Parse storage key: {orgSlug}/{projectSlug}/img/{imageId}
const parts = resolution.image.storageKey.split('/');
if (parts.length < 4) {
if (parts.length < 4 || parts[2] !== 'img') {
throw new Error(`Invalid storage key format: ${resolution.image.storageKey}`);
}
const orgId = parts[0]!;
const projId = parts[1]!;
const category = parts[2]! as 'uploads' | 'generated' | 'references';
const filename = parts.slice(3).join('/');
const orgSlug = parts[0]!;
const projectSlug = parts[1]!;
const imageId = parts[3]!;
const buffer = await storageService.downloadFile(
orgId,
projId,
category,
filename
);
const buffer = await storageService.downloadFile(orgSlug, projectSlug, imageId);
buffers.push({
buffer,
mimetype: resolution.image.mimeType,
originalname: filename,
originalname: resolution.image.originalFilename || imageId,
});
metadata.push({
@ -517,14 +518,18 @@ export class GenerationService {
// Get slugs for storage paths
const { orgSlug, projectSlug } = await this.getSlugs(generation.projectId);
// Use the existing output image ID as the imageId for storage
// This ensures the file is overwritten at the same path
const imageId = generation.outputImageId;
// Use EXACT same parameters as original (no overrides)
const genResult = await this.imageGenService.generateImage({
prompt: generation.prompt,
filename: `gen_${id}`,
imageId, // Same UUID to overwrite existing file
referenceImages: [], // TODO: Re-resolve referenced images if needed
aspectRatio: generation.aspectRatio || GENERATION_LIMITS.DEFAULT_ASPECT_RATIO,
orgId: orgSlug,
projectId: projectSlug,
orgSlug,
projectSlug,
meta: generation.meta as Record<string, unknown> || {},
});
@ -634,14 +639,17 @@ export class GenerationService {
// Get slugs for storage paths
const { orgSlug, projectSlug } = await this.getSlugs(generation.projectId);
// Use the existing output image ID as the imageId for storage
const imageId = generation.outputImageId!;
// Regenerate image
const genResult = await this.imageGenService.generateImage({
prompt: promptToUse,
filename: `gen_${id}`,
imageId, // Same UUID to overwrite existing file
referenceImages: [],
aspectRatio: aspectRatioToUse,
orgId: orgSlug,
projectId: projectSlug,
orgSlug,
projectSlug,
meta: updates.meta || generation.meta || {},
});

View File

@ -154,16 +154,16 @@ export class ImageService {
try {
// 1. Delete physical file from MinIO storage
// Storage key format: {orgSlug}/{projectSlug}/img/{imageId}
const storageService = await StorageFactory.getInstance();
const storageParts = image.storageKey.split('/');
if (storageParts.length >= 4) {
const orgId = storageParts[0]!;
const projectId = storageParts[1]!;
const category = storageParts[2]! as 'uploads' | 'generated' | 'references';
const filename = storageParts.slice(3).join('/');
if (storageParts.length >= 4 && storageParts[2] === 'img') {
const orgSlug = storageParts[0]!;
const projectSlug = storageParts[1]!;
const imageId = storageParts[3]!;
await storageService.deleteFile(orgId, projectId, category, filename);
await storageService.deleteFile(orgSlug, projectSlug, imageId);
}
// 2. Cascade: Set outputImageId = NULL in related generations

View File

@ -57,11 +57,11 @@ export interface GenerateImageRequestWithFiles extends Request {
// Image generation service types
export interface ImageGenerationOptions {
prompt: string;
filename: string;
imageId: string; // UUID used as filename in storage (same as image.id in DB)
referenceImages?: ReferenceImage[];
aspectRatio?: string;
orgId?: string;
projectId?: string;
orgSlug?: string;
projectSlug?: string;
userId?: string;
meta?: {
tags?: string[];
@ -91,9 +91,9 @@ export interface GeminiParams {
export interface ImageGenerationResult {
success: boolean;
filename?: string;
imageId?: string; // UUID filename (same as image.id in DB)
filepath?: string;
url?: string; // API URL for accessing the image
url?: string; // CDN URL for accessing the image
size?: number; // File size in bytes
description?: string;
model: string;

View File

@ -0,0 +1,158 @@
# CDN URL Architecture Fix - Cloudflare Configuration
This document describes the Cloudflare configuration for the new CDN URL architecture.
## Domain Structure
| Domain | Purpose | Cloudflare Proxy |
|--------|---------|------------------|
| cdn.banatie.app | CDN for images | Yes (orange cloud) |
| api.banatie.app | API server | Yes (orange cloud) |
| banatie.app | Landing page | Yes (orange cloud) |
## Cache Rules
### Rule 1: Cache UUID Images (High Priority)
Cache static images with UUID filenames for maximum performance.
**When:** Custom filter expression
```
(http.host eq "cdn.banatie.app" and http.request.uri.path matches "^/[^/]+/[^/]+/img/[0-9a-f-]{36}$")
```
**Then:**
- Cache eligibility: Eligible for cache
- Edge TTL: Override origin, 7 days
- Browser TTL: Override origin, 1 year (31536000 seconds)
- Cache Key: Include query string = No
### Rule 2: Bypass Cache for Aliases
Aliases need dynamic resolution, bypass cache.
**When:** Custom filter expression
```
(http.host eq "cdn.banatie.app" and http.request.uri.path matches "^/[^/]+/[^/]+/img/@")
```
**Then:**
- Cache eligibility: Bypass cache
### Rule 3: Bypass Cache for Live URLs
Live URLs need dynamic generation, bypass cache.
**When:** Custom filter expression
```
(http.host eq "cdn.banatie.app" and http.request.uri.path matches "^/[^/]+/[^/]+/live/")
```
**Then:**
- Cache eligibility: Bypass cache
## Page Rules (Alternative)
If not using Cache Rules, use Page Rules:
### Page Rule 1: Cache UUID Images
- URL: `cdn.banatie.app/*/img/*`
- Cache Level: Cache Everything
- Edge Cache TTL: 7 days
- Browser Cache TTL: 1 year
### Page Rule 2: Bypass Aliases and Live
- URL: `cdn.banatie.app/*/img/@*`
- Cache Level: Bypass
- URL: `cdn.banatie.app/*/live/*`
- Cache Level: Bypass
## DNS Configuration
Ensure DNS records point to your VPS:
```
cdn.banatie.app A YOUR_VPS_IP Proxied (orange cloud)
```
## SSL/TLS Configuration
- SSL Mode: Full (strict)
- Always Use HTTPS: On
- Automatic HTTPS Rewrites: On
- Minimum TLS Version: TLS 1.2
## Performance Settings
- Auto Minify: CSS, JavaScript (not HTML for API responses)
- Brotli: On
- Early Hints: On
- Rocket Loader: Off (may break API responses)
## Security Settings
- Security Level: Medium
- Challenge Passage: 30 minutes
- Browser Integrity Check: On
## Rate Limiting (Optional)
Create rate limiting rule for live URL abuse prevention:
**Rule Name:** Live URL Rate Limit
**When:**
```
(http.host eq "cdn.banatie.app" and http.request.uri.path matches "^/[^/]+/[^/]+/live/")
```
**Then:**
- Rate limit: 10 requests per minute per IP
- Action: Block for 1 minute
## Verification
After configuration:
1. **Test UUID caching:**
```bash
curl -I "https://cdn.banatie.app/org/proj/img/uuid-here"
# Check for: cf-cache-status: HIT (on second request)
```
2. **Test alias bypass:**
```bash
curl -I "https://cdn.banatie.app/org/proj/img/@alias"
# Check for: cf-cache-status: DYNAMIC or BYPASS
```
3. **Test live URL bypass:**
```bash
curl -I "https://cdn.banatie.app/org/proj/live/scope?prompt=test"
# Check for: cf-cache-status: DYNAMIC or BYPASS
```
## Troubleshooting
### Images not caching
- Verify the URL matches UUID pattern (36 character UUID)
- Check Cache Rules order (UUID rule should be first)
- Purge cache and retry
### Alias/Live URLs being cached
- Verify bypass rules are active
- Check rule order (bypass rules should run before catch-all)
- Development mode may disable caching
### Slow first requests
- Expected behavior for cache MISS
- Subsequent requests from same edge should be HIT
- Consider using Cache Reserve for longer edge retention
## Notes
- UUID pattern ensures only static, immutable images are cached at edge
- Aliases and live URLs are always fresh from origin
- 1-year browser cache is safe because UUID = immutable content
- Cloudflare caches at edge, browser caches locally

152
docs/url-fix-vps-site.md Normal file
View File

@ -0,0 +1,152 @@
# CDN URL Architecture Fix - VPS Deployment
This document describes the changes needed on VPS to support the new CDN URL architecture.
## Problem
Previous URL structure used presigned URLs with 24-hour expiry, which doesn't work for permanent image embedding on websites.
## Solution
New URL structure with direct CDN access:
- `cdn.banatie.app/{org}/{proj}/img/{imageId}` - Direct MinIO access for static images
- `cdn.banatie.app/{org}/{proj}/img/@{alias}` - API-mediated alias resolution
- `cdn.banatie.app/{org}/{proj}/live/{scope}?prompt=...` - API-mediated live generation
## Storage Path Format
```
Old: {orgSlug}/{projectSlug}/{category}/{timestamp-filename.ext}
New: {orgSlug}/{projectSlug}/img/{imageId}
```
Where `imageId` = UUID (same as `images.id` in database).
## VPS Deployment Steps
### 1. Update Caddy Configuration
Add the following routing rules to your Caddy config:
```caddyfile
# CDN Domain
cdn.banatie.app {
# UUID pattern - direct to MinIO (no extension in URL)
@uuid path_regexp uuid ^/([^/]+)/([^/]+)/img/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})$
handle @uuid {
reverse_proxy banatie-minio:9000 {
# Rewrite to bucket path
header_up Host cdn.banatie.app
rewrite * /banatie{uri}
}
}
# Alias pattern (@name) - proxy to API
@alias path_regexp alias ^/([^/]+)/([^/]+)/img/@(.+)$
handle @alias {
reverse_proxy banatie-api:3000 {
rewrite * /cdn{uri}
}
}
# Live URL pattern - proxy to API
@live path_regexp live ^/([^/]+)/([^/]+)/live/(.+)$
handle @live {
reverse_proxy banatie-api:3000 {
rewrite * /cdn{uri}
}
}
# Fallback for other patterns
handle {
reverse_proxy banatie-minio:9000 {
header_up Host cdn.banatie.app
rewrite * /banatie{uri}
}
}
}
```
### 2. Update Environment Variables
Add to `/opt/banatie/.env`:
```env
CDN_BASE_URL=https://cdn.banatie.app
```
### 3. Reset Database and MinIO Storage
Since this is a breaking change to the storage path format:
```bash
# Stop services
cd /opt/banatie
docker compose down
# Clean database (WARNING: deletes all data)
rm -rf /opt/banatie/data/postgres/*
# Clean MinIO storage (WARNING: deletes all files)
rm -rf /opt/banatie/data/minio/drive{1,2,3,4}/*
# Rebuild and start services
docker compose up -d --build
```
### 4. Run Storage Initialization
After rebuild, the `banatie-storage-init` container will:
1. Create the `banatie` bucket
2. Configure service user with readwrite access
3. Enable public anonymous download access for CDN
Verify public access is enabled:
```bash
docker exec banatie-minio mc anonymous get local/banatie
# Should show: Access permission for `local/banatie` is `download`
```
## Verification
### Test Direct UUID Access
```bash
# After generating an image, get its UUID from database or API response
# Then test direct CDN access:
curl -I "https://cdn.banatie.app/{orgSlug}/{projectSlug}/img/{uuid}"
# Expected: HTTP 200 with Content-Type: image/png (or similar)
```
### Test Alias Resolution
```bash
# Assign an alias to an image via API, then test:
curl -I "https://cdn.banatie.app/{orgSlug}/{projectSlug}/img/@hero"
# Expected: HTTP 200 (API resolves alias and streams image)
```
### Test Live URL Generation
```bash
curl -I "https://cdn.banatie.app/{orgSlug}/{projectSlug}/live/test?prompt=mountain"
# Expected: HTTP 200 (generates or returns cached image)
```
## Rollback
If issues occur:
1. Revert code changes
2. Rebuild API container
3. Regenerate any images (old storage paths won't work)
## Notes
- `filename = image.id` (UUID) ensures consistent identification across DB, storage, and URLs
- Files are stored without extension; Content-Type is served from MinIO metadata
- Cloudflare caching can be enabled for UUID patterns (see url-fix-cloudflare-site.md)

View File

@ -31,13 +31,9 @@ MINIO_BUCKET_NAME=banatie
MINIO_USE_SSL=false
STORAGE_TYPE=minio
# Public URL for CDN access (used in API responses)
MINIO_PUBLIC_URL=https://cdn.banatie.app
# Use direct CDN URLs instead of API proxy (recommended for production)
# Set to 'false' to force API URLs even when MINIO_PUBLIC_URL is configured
# Default: true (CDN enabled when MINIO_PUBLIC_URL is present)
USE_DIRECT_CDN=true
# CDN Base URL for image access
# Format: https://cdn.banatie.app/{orgSlug}/{projectSlug}/img/{imageId}
CDN_BASE_URL=https://cdn.banatie.app
# ----------------------------------------
# API Configuration
@ -57,6 +53,6 @@ CORS_ORIGIN=https://banatie.app,https://api.banatie.app
# ----------------------------------------
# Multi-tenancy Defaults
# ----------------------------------------
DEFAULT_ORG_ID=default
DEFAULT_PROJECT_ID=main
DEFAULT_ORG_SLUG=default
DEFAULT_PROJECT_SLUG=main
DEFAULT_USER_ID=system

View File

@ -129,6 +129,9 @@ services:
{"Rules":[{"ID":"temp-cleanup","Status":"Enabled","Filter":{"Prefix":"temp/"},"Expiration":{"Days":7}}]}
LCEOF
mc ilm import storage/banatie < /tmp/lifecycle.json || echo 'Lifecycle policy may already exist'
# Enable public read access for CDN
mc anonymous set download storage/banatie || echo 'Anonymous access may already be set'
echo 'Public read access enabled for CDN'
echo '=== Storage Initialization Completed ==='
exit 0
restart: "no"

View File

@ -46,7 +46,8 @@ export const images = pgTable(
pendingFlowId: text('pending_flow_id'), // Temporary UUID for lazy flow pattern
apiKeyId: uuid('api_key_id').references(() => apiKeys.id, { onDelete: 'set null' }),
// Storage (MinIO path format: orgSlug/projectSlug/category/YYYY-MM/filename.ext)
// Storage (MinIO path format: orgSlug/projectSlug/img/{imageId})
// Note: imageId = this record's UUID, filename has no extension
storageKey: varchar('storage_key', { length: 500 }).notNull().unique(),
storageUrl: text('storage_url').notNull(),
@ -54,6 +55,8 @@ export const images = pgTable(
mimeType: varchar('mime_type', { length: 100 }).notNull(),
fileSize: integer('file_size').notNull(),
fileHash: varchar('file_hash', { length: 64 }), // SHA-256 for deduplication
originalFilename: varchar('original_filename', { length: 255 }), // User's original filename
fileExtension: varchar('file_extension', { length: 20 }), // Original extension (png, jpg, etc.)
// Dimensions
width: integer('width'),

View File

@ -42,6 +42,37 @@ async function main() {
log.detail('Source', response.source);
});
// Test 1.5: Validate storage path format
await runTest('Storage path format validation', async () => {
// Get uploaded image
const result = await api(`${endpoints.images}/${testContext.uploadedImageId}`);
const image = result.data.data;
// Validate storageKey format: {orgSlug}/{projectSlug}/img/{uuid}
const keyRegex = /^[\w-]+\/[\w-]+\/img\/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/;
if (!keyRegex.test(image.storageKey)) {
throw new Error(`Invalid storageKey format: ${image.storageKey}. Expected: {org}/{proj}/img/{uuid}`);
}
// Validate storageUrl format: https://.../img/{uuid}
const urlRegex = /^https?:\/\/[^\/]+\/[\w-]+\/[\w-]+\/img\/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/;
if (!urlRegex.test(image.storageUrl)) {
throw new Error(`Invalid storageUrl format: ${image.storageUrl}. Expected: https://.../img/{uuid}`);
}
// Verify imageId matches the UUID in path
const pathParts = image.storageKey.split('/');
const uuidFromPath = pathParts[3];
if (uuidFromPath !== image.id) {
throw new Error(`storageKey UUID (${uuidFromPath}) doesn't match image.id (${image.id})`);
}
log.detail('storageKey', image.storageKey);
log.detail('storageUrl', image.storageUrl);
log.detail('UUID in path = image.id', '✓');
log.detail('Format', '{org}/{proj}/img/{uuid}');
});
// Test 2: Upload image without alias
await runTest('Upload image without alias', async () => {
const fixturePath = join(__dirname, config.fixturesDir, 'test-image.png');

View File

@ -236,11 +236,12 @@ X-API-Key: {{apiKey}}
### Step 6.1: CDN image by path (if implemented)
# @name cdnImage
GET {{base}}/api/v1/cdn/default/test-project/generated/2024-01/test.jpg
# New format: {org}/{project}/img/{uuid}
GET {{base}}/api/v1/cdn/default/test-project/img/00000000-0000-0000-0000-000000000000
X-API-Key: {{apiKey}}
###
# Note: Endpoint structure check only - actual paths depend on storage
# Note: Endpoint structure check only - uses placeholder UUID
### Step 6.2: Health check
# @name healthCheck