fix: service config

This commit is contained in:
Oleg Proskurin 2025-10-05 16:50:09 +07:00
parent 91ba71cc23
commit e2cfd6e27f
13 changed files with 527 additions and 447 deletions

View File

@ -0,0 +1,31 @@
---
description: Start API dev server with automatic error monitoring and cleanup
---
You need to launch the API development server. Follow these steps carefully:
1. **Kill any existing server on port 3000**:
- First check if there's a background shell running the dev server in Claude Code and kill it using the KillShell tool
- Then check for any process using port 3000 with `lsof -ti:3000` and kill it with `kill -9 $(lsof -ti:3000)` if found
- This ensures a clean restart regardless of where the server was started
2. **Start the dev server in background**:
- Navigate to the API service directory and start the server: `cd /projects/my-projects/banatie-service/apps/api-service && pnpm dev`
- Run this command in the background using the Bash tool with `run_in_background: true`
- Save the shell ID for monitoring
3. **Monitor the logs**:
- Wait 3-5 seconds for the server to start
- Use BashOutput tool to check the background process output for errors
- Read the api-dev.log file to verify the server started successfully
- Look for:
- Server startup message (should show "Server running on port 3000")
- Any error messages or stack traces
- Database/MinIO connection status
4. **Report status**:
- Inform the user if the server started successfully
- Show any errors found in the logs
- Provide the shell ID so the user can monitor it later if needed
CRITICAL: Always kill existing servers before starting a new one to avoid port conflicts.

View File

@ -1,49 +0,0 @@
# Application Configuration
NODE_ENV=development
PORT=3000
LOG_LEVEL=info
API_BASE_URL=http://localhost:3000
# CORS Configuration
CORS_ORIGIN=*
# Database Configuration
DB_HOST=postgres
DB_PORT=5432
DB_NAME=banatie_db
DB_USER=banatie_user
DB_PASSWORD=banatie_secure_password
DATABASE_URL=postgresql://banatie_user:banatie_secure_password@postgres:5432/banatie_db
# MinIO Storage Configuration (SNMD)
MINIO_ROOT_USER=banatie_admin
MINIO_ROOT_PASSWORD=banatie_storage_secure_key_2024
STORAGE_TYPE=minio
MINIO_ENDPOINT=minio:9000
MINIO_ACCESS_KEY=banatie_service
MINIO_SECRET_KEY=banatie_service_key_2024
MINIO_USE_SSL=false
MINIO_BUCKET_NAME=banatie
MINIO_PUBLIC_URL=http://localhost:9000
# AI Service Configuration
GEMINI_API_KEY=AIzaSyBaOt9JMPGKA3811FL-ssf1n5Hh9Jauly8
# File Upload Configuration
MAX_FILE_SIZE=5242880
MAX_FILES=3
# Multi-tenancy Configuration (Production-Ready Names)
DEFAULT_ORG_ID=default
DEFAULT_PROJECT_ID=main
DEFAULT_USER_ID=system
# Presigned URL Configuration
PRESIGNED_URL_EXPIRY=86400 # 24 hours
# Directory Configuration
RESULTS_DIR=/app/results
UPLOADS_DIR=/app/uploads/temp
# Logging Configuration
LOG_LEVEL=info

View File

@ -122,25 +122,36 @@ Key table: `api_keys`
## Environment Configuration
### Root Environment (`.env.docker`)
**Important**: We use TWO `.env` files with different purposes:
- `DATABASE_URL` - PostgreSQL connection string (for Docker: `postgresql://banatie_user:banatie_secure_password@postgres:5432/banatie_db`)
- `MINIO_ROOT_USER` - MinIO admin username
- `MINIO_ROOT_PASSWORD` - MinIO admin password
### Root `.env` (Docker Compose Infrastructure)
### API Service Environment (`apps/api-service/.env`)
Used by Docker Compose services (MinIO, Postgres, API container). Key differences from local:
- `DATABASE_URL=postgresql://banatie_user:banatie_secure_password@postgres:5432/banatie_db` (Docker network hostname)
- `MINIO_ENDPOINT=minio:9000` (Docker network hostname)
- `MINIO_ROOT_USER` and `MINIO_ROOT_PASSWORD` - MinIO admin credentials
- All variables are passed to the app container via docker-compose.yml environment section
Required environment variables:
### API Service `.env` (Local Development Only)
- `DATABASE_URL` - PostgreSQL connection string (for local dev: `postgresql://banatie_user:banatie_secure_password@localhost:5434/banatie_db`)
Located at `apps/api-service/.env` - used ONLY when running `pnpm dev:api` locally:
- `DATABASE_URL=postgresql://banatie_user:banatie_secure_password@localhost:5434/banatie_db` (port-forwarded)
- `MINIO_ENDPOINT=localhost:9000` (port-forwarded)
- **NOTE**: This file is excluded from Docker builds (see Dockerfile.mono)
### Required Environment Variables
- `DATABASE_URL` - PostgreSQL connection string
- `GEMINI_API_KEY` - Google Gemini API key (required)
- `MINIO_ENDPOINT` - MinIO endpoint (`localhost:9000` for local dev, `minio:9000` for Docker)
- `MINIO_ACCESS_KEY` - MinIO service account key
- `MINIO_SECRET_KEY` - MinIO service account secret
- `MINIO_ENDPOINT` - MinIO endpoint
- `MINIO_ACCESS_KEY` - MinIO service account key (`banatie_service`)
- `MINIO_SECRET_KEY` - MinIO service account secret (`banatie_service_key_2024`)
- `MINIO_BUCKET_NAME` - Storage bucket name (default: `banatie`)
- `MINIO_ROOT_USER` - MinIO admin user (`banatie_admin`)
- `MINIO_ROOT_PASSWORD` - MinIO admin password
- `PORT` - Server port (default: 3000)
- `NODE_ENV` - Environment mode
- `CORS_ORIGIN` - CORS origin setting (default: multiple localhost URLs for frontend apps)
- `CORS_ORIGIN` - CORS origin setting
## Key Dependencies

View File

@ -11,7 +11,11 @@ COPY pnpm-workspace.yaml package.json pnpm-lock.yaml ./
# Copy all workspace packages
COPY packages/ ./packages/
COPY apps/api-service/ ./apps/api-service/
# Copy API service (exclude .env file - it's for local dev only)
COPY apps/api-service/package.json ./apps/api-service/
COPY apps/api-service/tsconfig.json ./apps/api-service/
COPY apps/api-service/src/ ./apps/api-service/src/
# Install all dependencies (workspace-aware)
RUN pnpm install --frozen-lockfile

View File

@ -4,7 +4,7 @@
"description": "Nano Banana Image Generation Service - REST API for AI-powered image generation using Gemini Flash Image model",
"main": "dist/server.js",
"scripts": {
"dev": "tsx --watch src/server.ts",
"dev": "echo 'Logs will be saved to api-dev.log' && tsx --watch src/server.ts 2>&1 | tee api-dev.log",
"start": "node dist/server.js",
"build": "tsc",
"typecheck": "tsc --noEmit",

View File

@ -1,5 +1,5 @@
import express from 'express';
import { ApiKeyService } from '../services/ApiKeyService';
import express from "express";
import { ApiKeyService } from "../services/ApiKeyService";
const router = express.Router();
const apiKeyService = new ApiKeyService();
@ -10,36 +10,42 @@ const apiKeyService = new ApiKeyService();
*
* POST /api/bootstrap/initial-key
*/
router.post('/initial-key', async (req, res) => {
router.post("/initial-key", async (req, res) => {
try {
// Check if any keys already exist
const hasKeys = await apiKeyService.hasAnyKeys();
if (hasKeys) {
console.warn(`[${new Date().toISOString()}] Bootstrap attempt when keys already exist`);
console.warn(
`[${new Date().toISOString()}] Bootstrap attempt when keys already exist`,
);
return res.status(403).json({
error: 'Bootstrap not allowed',
message: 'API keys already exist. Use /api/admin/keys to create new keys.',
error: "Bootstrap not allowed",
message:
"API keys already exist. Use /api/admin/keys to create new keys.",
});
}
// Create first master key
const { key, metadata } = await apiKeyService.createMasterKey('Initial Master Key');
const { key, metadata } =
await apiKeyService.createMasterKey("Initial Master Key");
console.log(`[${new Date().toISOString()}] Initial master key created via bootstrap: ${metadata.id}`);
console.log(
`[${new Date().toISOString()}] Initial master key created via bootstrap: ${metadata.id}`,
);
res.status(201).json({
apiKey: key,
type: metadata.keyType,
name: metadata.name,
expiresAt: metadata.expiresAt,
message: 'IMPORTANT: Save this key securely. You will not see it again!',
message: "IMPORTANT: Save this key securely. You will not see it again!",
});
} catch (error) {
console.error(`[${new Date().toISOString()}] Bootstrap error:`, error);
res.status(500).json({
error: 'Bootstrap failed',
message: 'Failed to create initial API key',
error: "Bootstrap failed",
message: "Failed to create initial API key",
});
}
});

View File

@ -1,6 +1,6 @@
import { Router, Request, Response } from 'express';
import { StorageFactory } from '../services/StorageFactory';
import { asyncHandler } from '../middleware/errorHandler';
import { Router, Request, Response } from "express";
import { StorageFactory } from "../services/StorageFactory";
import { asyncHandler } from "../middleware/errorHandler";
export const imagesRouter = Router();
@ -9,54 +9,55 @@ export const imagesRouter = Router();
* Serves images via presigned URLs (redirect approach)
*/
imagesRouter.get(
'/images/:orgId/:projectId/:category/:filename',
"/images/:orgId/:projectId/:category/:filename",
asyncHandler(async (req: Request, res: Response) => {
const { orgId, projectId, category, filename } = req.params;
// Validate category
if (!['uploads', 'generated', 'references'].includes(category)) {
if (!["uploads", "generated", "references"].includes(category)) {
return res.status(400).json({
success: false,
message: 'Invalid category'
message: "Invalid category",
});
}
const storageService = StorageFactory.getInstance();
const storageService = await StorageFactory.getInstance();
try {
// Check if file exists first (fast check)
const exists = await storageService.fileExists(
orgId,
projectId,
category as 'uploads' | 'generated' | 'references',
filename
category as "uploads" | "generated" | "references",
filename,
);
if (!exists) {
return res.status(404).json({
success: false,
message: 'File not found'
message: "File not found",
});
}
// Determine content type from filename
const ext = filename.toLowerCase().split('.').pop();
const contentType = {
'png': 'image/png',
'jpg': 'image/jpeg',
'jpeg': 'image/jpeg',
'gif': 'image/gif',
'webp': 'image/webp',
'svg': 'image/svg+xml'
}[ext || ''] || 'application/octet-stream';
const ext = filename.toLowerCase().split(".").pop();
const contentType =
{
png: "image/png",
jpg: "image/jpeg",
jpeg: "image/jpeg",
gif: "image/gif",
webp: "image/webp",
svg: "image/svg+xml",
}[ext || ""] || "application/octet-stream";
// Set headers for optimal caching and performance
res.setHeader('Content-Type', contentType);
res.setHeader('Cache-Control', 'public, max-age=86400, immutable'); // 24 hours + immutable
res.setHeader('ETag', `"${orgId}-${projectId}-${filename}"`); // Simple ETag
res.setHeader("Content-Type", contentType);
res.setHeader("Cache-Control", "public, max-age=86400, immutable"); // 24 hours + immutable
res.setHeader("ETag", `"${orgId}-${projectId}-${filename}"`); // Simple ETag
// Handle conditional requests (304 Not Modified)
const ifNoneMatch = req.headers['if-none-match'];
const ifNoneMatch = req.headers["if-none-match"];
if (ifNoneMatch === `"${orgId}-${projectId}-${filename}"`) {
return res.status(304).end(); // Not Modified
}
@ -65,32 +66,31 @@ imagesRouter.get(
const fileStream = await storageService.streamFile(
orgId,
projectId,
category as 'uploads' | 'generated' | 'references',
filename
category as "uploads" | "generated" | "references",
filename,
);
// Handle stream errors
fileStream.on('error', (streamError) => {
console.error('Stream error:', streamError);
fileStream.on("error", (streamError) => {
console.error("Stream error:", streamError);
if (!res.headersSent) {
res.status(500).json({
success: false,
message: 'Error streaming file'
message: "Error streaming file",
});
}
});
// Stream the file without loading into memory
fileStream.pipe(res);
} catch (error) {
console.error('Failed to stream file:', error);
console.error("Failed to stream file:", error);
return res.status(404).json({
success: false,
message: 'File not found'
message: "File not found",
});
}
})
}),
);
/**
@ -98,41 +98,40 @@ imagesRouter.get(
* Returns a presigned URL instead of redirecting
*/
imagesRouter.get(
'/images/url/:orgId/:projectId/:category/:filename',
"/images/url/:orgId/:projectId/:category/:filename",
asyncHandler(async (req: Request, res: Response) => {
const { orgId, projectId, category, filename } = req.params;
const { expiry = '3600' } = req.query; // Default 1 hour
const { expiry = "3600" } = req.query; // Default 1 hour
if (!['uploads', 'generated', 'references'].includes(category)) {
if (!["uploads", "generated", "references"].includes(category)) {
return res.status(400).json({
success: false,
message: 'Invalid category'
message: "Invalid category",
});
}
const storageService = StorageFactory.getInstance();
const storageService = await StorageFactory.getInstance();
try {
const presignedUrl = await storageService.getPresignedDownloadUrl(
orgId,
projectId,
category as 'uploads' | 'generated' | 'references',
category as "uploads" | "generated" | "references",
filename,
parseInt(expiry as string, 10)
parseInt(expiry as string, 10),
);
return res.json({
success: true,
url: presignedUrl,
expiresIn: parseInt(expiry as string, 10)
expiresIn: parseInt(expiry as string, 10),
});
} catch (error) {
console.error('Failed to generate presigned URL:', error);
console.error("Failed to generate presigned URL:", error);
return res.status(404).json({
success: false,
message: 'File not found or access denied'
message: "File not found or access denied",
});
}
})
}),
);

View File

@ -1,23 +1,24 @@
import crypto from 'crypto';
import { db } from '../db';
import { apiKeys, type ApiKey, type NewApiKey } from '@banatie/database';
import { eq, and, desc } from 'drizzle-orm';
import crypto from "crypto";
import { db } from "../db";
import { apiKeys, type ApiKey, type NewApiKey } from "@banatie/database";
import { eq, and, desc } from "drizzle-orm";
export class ApiKeyService {
/**
* Generate a new API key
* Format: bnt_{64_hex_chars}
*/
private generateKey(): { fullKey: string; keyHash: string; keyPrefix: string } {
const secret = crypto.randomBytes(32).toString('hex'); // 64 chars
const keyPrefix = 'bnt_';
private generateKey(): {
fullKey: string;
keyHash: string;
keyPrefix: string;
} {
const secret = crypto.randomBytes(32).toString("hex"); // 64 chars
const keyPrefix = "bnt_";
const fullKey = keyPrefix + secret;
// Hash for storage (SHA-256)
const keyHash = crypto
.createHash('sha256')
.update(fullKey)
.digest('hex');
const keyHash = crypto.createHash("sha256").update(fullKey).digest("hex");
return { fullKey, keyHash, keyPrefix };
}
@ -25,21 +26,29 @@ export class ApiKeyService {
/**
* Create a master key (admin access, never expires)
*/
async createMasterKey(name?: string, createdBy?: string): Promise<{ key: string; metadata: ApiKey }> {
async createMasterKey(
name?: string,
createdBy?: string,
): Promise<{ key: string; metadata: ApiKey }> {
const { fullKey, keyHash, keyPrefix } = this.generateKey();
const [newKey] = await db.insert(apiKeys).values({
keyHash,
keyPrefix,
keyType: 'master',
projectId: null,
scopes: ['*'], // Full access
name: name || 'Master Key',
expiresAt: null, // Never expires
createdBy: createdBy || null,
}).returning();
const [newKey] = await db
.insert(apiKeys)
.values({
keyHash,
keyPrefix,
keyType: "master",
projectId: null,
scopes: ["*"], // Full access
name: name || "Master Key",
expiresAt: null, // Never expires
createdBy: createdBy || null,
})
.returning();
console.log(`[${new Date().toISOString()}] Master key created: ${newKey?.id} - ${newKey?.name}`);
console.log(
`[${new Date().toISOString()}] Master key created: ${newKey?.id} - ${newKey?.name}`,
);
return { key: fullKey, metadata: newKey! };
}
@ -52,26 +61,31 @@ export class ApiKeyService {
organizationId?: string,
name?: string,
createdBy?: string,
expiresInDays: number = 90
expiresInDays: number = 90,
): Promise<{ key: string; metadata: ApiKey }> {
const { fullKey, keyHash, keyPrefix } = this.generateKey();
const expiresAt = new Date();
expiresAt.setDate(expiresAt.getDate() + expiresInDays);
const [newKey] = await db.insert(apiKeys).values({
keyHash,
keyPrefix,
keyType: 'project',
projectId,
organizationId: organizationId || null,
scopes: ['generate', 'read'],
name: name || `Project Key - ${projectId}`,
expiresAt,
createdBy: createdBy || null,
}).returning();
const [newKey] = await db
.insert(apiKeys)
.values({
keyHash,
keyPrefix,
keyType: "project",
projectId,
organizationId: organizationId || null,
scopes: ["generate", "read"],
name: name || `Project Key - ${projectId}`,
expiresAt,
createdBy: createdBy || null,
})
.returning();
console.log(`[${new Date().toISOString()}] Project key created: ${newKey?.id} - ${projectId} - expires: ${expiresAt.toISOString()}`);
console.log(
`[${new Date().toISOString()}] Project key created: ${newKey?.id} - ${projectId} - expires: ${expiresAt.toISOString()}`,
);
return { key: fullKey, metadata: newKey! };
}
@ -81,36 +95,35 @@ export class ApiKeyService {
* Returns null if invalid/expired/revoked
*/
async validateKey(providedKey: string): Promise<ApiKey | null> {
if (!providedKey || !providedKey.startsWith('bnt_')) {
if (!providedKey || !providedKey.startsWith("bnt_")) {
return null;
}
// Hash the provided key
const keyHash = crypto
.createHash('sha256')
.createHash("sha256")
.update(providedKey)
.digest('hex');
.digest("hex");
// Find in database
const [key] = await db
.select()
.from(apiKeys)
.where(
and(
eq(apiKeys.keyHash, keyHash),
eq(apiKeys.isActive, true)
)
)
.where(and(eq(apiKeys.keyHash, keyHash), eq(apiKeys.isActive, true)))
.limit(1);
if (!key) {
console.warn(`[${new Date().toISOString()}] Invalid API key attempt: ${providedKey.substring(0, 10)}...`);
console.warn(
`[${new Date().toISOString()}] Invalid API key attempt: ${providedKey.substring(0, 10)}...`,
);
return null;
}
// Check expiration
if (key.expiresAt && key.expiresAt < new Date()) {
console.warn(`[${new Date().toISOString()}] Expired API key used: ${key.id} - expired: ${key.expiresAt.toISOString()}`);
console.warn(
`[${new Date().toISOString()}] Expired API key used: ${key.id} - expired: ${key.expiresAt.toISOString()}`,
);
return null;
}
@ -119,7 +132,12 @@ export class ApiKeyService {
.set({ lastUsedAt: new Date() })
.where(eq(apiKeys.id, key.id))
.execute()
.catch(err => console.error(`[${new Date().toISOString()}] Failed to update lastUsedAt:`, err));
.catch((err) =>
console.error(
`[${new Date().toISOString()}] Failed to update lastUsedAt:`,
err,
),
);
return key;
}
@ -146,20 +164,14 @@ export class ApiKeyService {
* List all keys (for admin)
*/
async listKeys(): Promise<ApiKey[]> {
return db
.select()
.from(apiKeys)
.orderBy(desc(apiKeys.createdAt));
return db.select().from(apiKeys).orderBy(desc(apiKeys.createdAt));
}
/**
* Check if any keys exist (for bootstrap)
*/
async hasAnyKeys(): Promise<boolean> {
const keys = await db
.select({ id: apiKeys.id })
.from(apiKeys)
.limit(1);
const keys = await db.select({ id: apiKeys.id }).from(apiKeys).limit(1);
return keys.length > 0;
}

View File

@ -1,19 +1,17 @@
import { GoogleGenAI } from "@google/genai";
// eslint-disable-next-line @typescript-eslint/no-var-requires
const mime = require("mime") as any;
import path from "path";
import {
ImageGenerationOptions,
ImageGenerationResult,
ReferenceImage,
GeneratedImageData,
} from "../types/api";
import { StorageFactory } from "./StorageFactory";
import { UploadResult } from "./StorageService";
export class ImageGenService {
private ai: GoogleGenAI;
private primaryModel = "gemini-2.5-flash-image-preview";
private fallbackModel = "imagen-4.0-generate-001";
constructor(apiKey: string) {
if (!apiKey) {
@ -24,182 +22,168 @@ export class ImageGenService {
/**
* Generate an image from text prompt with optional reference images
* This method separates image generation from storage for clear error handling
*/
async generateImage(
options: ImageGenerationOptions,
): Promise<ImageGenerationResult> {
const { prompt, filename, referenceImages, orgId, projectId, userId } =
options;
const timestamp = new Date().toISOString();
const { prompt, filename, referenceImages, orgId, projectId } = options;
// Use default values if not provided
const finalOrgId = orgId || process.env["DEFAULT_ORG_ID"] || "default";
const finalProjectId =
projectId || process.env["DEFAULT_PROJECT_ID"] || "main";
const finalUserId = userId || process.env["DEFAULT_USER_ID"] || "system";
// Step 1: Generate image from Gemini AI
let generatedData: GeneratedImageData;
try {
const result = await this.tryGeneration({
model: this.primaryModel,
config: { responseModalities: ["IMAGE", "TEXT"] },
prompt,
filename,
orgId: finalOrgId,
projectId: finalProjectId,
userId: finalUserId,
...(referenceImages && { referenceImages }),
modelName: "Primary Model",
});
if (result.success) {
return result;
}
return await this.tryGeneration({
model: this.fallbackModel,
config: { responseModalities: ["IMAGE"] },
prompt,
filename: `${filename}_fallback`,
orgId: finalOrgId,
projectId: finalProjectId,
userId: finalUserId,
...(referenceImages && { referenceImages }),
modelName: "Fallback Model",
});
generatedData = await this.generateImageWithAI(prompt, referenceImages);
} catch (error) {
// Generation failed - return explicit error
return {
success: false,
model: "none",
model: this.primaryModel,
error:
error instanceof Error ? error.message : "Unknown error occurred",
error instanceof Error ? error.message : "Image generation failed",
errorType: "generation",
};
}
// Step 2: Save generated image to storage
try {
const finalFilename = `${filename}.${generatedData.fileExtension}`;
const storageService = await StorageFactory.getInstance();
const uploadResult = await storageService.uploadFile(
finalOrgId,
finalProjectId,
"generated",
finalFilename,
generatedData.buffer,
generatedData.mimeType,
);
if (uploadResult.success) {
return {
success: true,
filename: uploadResult.filename,
filepath: uploadResult.path,
url: uploadResult.url,
model: this.primaryModel,
...(generatedData.description && {
description: generatedData.description,
}),
};
} else {
// Storage failed but image was generated
return {
success: false,
model: this.primaryModel,
error: `Image generated successfully but storage failed: ${uploadResult.error || "Unknown storage error"}`,
errorType: "storage",
generatedImageData: generatedData,
...(generatedData.description && {
description: generatedData.description,
}),
};
}
} catch (error) {
// Storage exception - image was generated but couldn't be saved
return {
success: false,
model: this.primaryModel,
error: `Image generated successfully but storage failed: ${error instanceof Error ? error.message : "Unknown storage error"}`,
errorType: "storage",
generatedImageData: generatedData,
...(generatedData.description && {
description: generatedData.description,
}),
};
}
}
private async tryGeneration(params: {
model: string;
config: { responseModalities: string[] };
prompt: string;
filename: string;
orgId: string;
projectId: string;
userId: string;
referenceImages?: ReferenceImage[];
modelName: string;
}): Promise<ImageGenerationResult> {
const {
model,
config,
prompt,
filename,
orgId,
projectId,
userId,
referenceImages,
modelName,
} = params;
/**
* Generate image using Gemini AI - isolated from storage logic
* @throws Error if generation fails
*/
private async generateImageWithAI(
prompt: string,
referenceImages?: ReferenceImage[],
): Promise<GeneratedImageData> {
const contentParts: any[] = [];
// Add reference images if provided
if (referenceImages && referenceImages.length > 0) {
for (const refImage of referenceImages) {
contentParts.push({
inlineData: {
mimeType: refImage.mimetype,
data: refImage.buffer.toString("base64"),
},
});
}
}
// Add text prompt
contentParts.push({
text: prompt,
});
const contents = [
{
role: "user" as const,
parts: contentParts,
},
];
try {
const contentParts: any[] = [];
if (referenceImages && referenceImages.length > 0) {
for (const refImage of referenceImages) {
contentParts.push({
inlineData: {
mimeType: refImage.mimetype,
data: refImage.buffer.toString("base64"),
},
});
}
}
contentParts.push({
text: prompt,
});
const contents = [
{
role: "user" as const,
parts: contentParts,
},
];
const response = await this.ai.models.generateContent({
model,
config,
model: this.primaryModel,
config: { responseModalities: ["IMAGE", "TEXT"] },
contents,
});
// Parse response
if (
response.candidates &&
response.candidates[0] &&
response.candidates[0].content
!response.candidates ||
!response.candidates[0] ||
!response.candidates[0].content
) {
const content = response.candidates[0].content;
let generatedDescription = "";
let uploadResult: UploadResult | null = null;
throw new Error("No response received from Gemini AI");
}
for (let index = 0; index < (content.parts?.length || 0); index++) {
const part = content.parts?.[index];
if (!part) continue;
const content = response.candidates[0].content;
let generatedDescription: string | undefined;
let imageData: { buffer: Buffer; mimeType: string } | null = null;
if (part.inlineData) {
const fileExtension = mime.getExtension(
part.inlineData.mimeType || "",
);
const finalFilename = `${filename}.${fileExtension}`;
const contentType =
part.inlineData.mimeType || `image/${fileExtension}`;
const buffer = Buffer.from(part.inlineData.data || "", "base64");
const storageService = StorageFactory.getInstance();
const result = (await storageService).uploadFile(
orgId,
projectId,
"generated",
finalFilename,
buffer,
contentType,
);
uploadResult = await result;
} else if (part.text) {
generatedDescription = part.text;
}
}
if (uploadResult && uploadResult.success) {
return {
success: true,
filename: uploadResult.filename,
filepath: uploadResult.path,
url: uploadResult.url,
description: generatedDescription,
model: modelName,
};
// Extract image data and description from response
for (const part of content.parts || []) {
if (part.inlineData) {
const buffer = Buffer.from(part.inlineData.data || "", "base64");
const mimeType = part.inlineData.mimeType || "image/png";
imageData = { buffer, mimeType };
} else if (part.text) {
generatedDescription = part.text;
}
}
if (!imageData) {
throw new Error("No image data received from Gemini AI");
}
const fileExtension = mime.getExtension(imageData.mimeType) || "png";
return {
success: false,
model: modelName,
error: "No image data received from API",
buffer: imageData.buffer,
mimeType: imageData.mimeType,
fileExtension,
...(generatedDescription && { description: generatedDescription }),
};
} catch (error) {
return {
success: false,
model: modelName,
error: error instanceof Error ? error.message : "Generation failed",
};
// Re-throw with clear error message
if (error instanceof Error) {
throw new Error(`Gemini AI generation failed: ${error.message}`);
}
throw new Error("Gemini AI generation failed: Unknown error");
}
}

View File

@ -1,5 +1,5 @@
import { Client as MinioClient } from 'minio';
import { StorageService, FileMetadata, UploadResult } from './StorageService';
import { Client as MinioClient } from "minio";
import { StorageService, FileMetadata, UploadResult } from "./StorageService";
export class MinioStorageService implements StorageService {
private client: MinioClient;
@ -11,13 +11,13 @@ export class MinioStorageService implements StorageService {
accessKey: string,
secretKey: string,
useSSL: boolean = false,
bucketName: string = 'banatie',
publicUrl?: string
bucketName: string = "banatie",
publicUrl?: string,
) {
// Parse endpoint to separate hostname and port
const cleanEndpoint = endpoint.replace(/^https?:\/\//, '');
const [hostname, portStr] = cleanEndpoint.split(':');
const port = portStr ? parseInt(portStr, 10) : (useSSL ? 443 : 9000);
const cleanEndpoint = endpoint.replace(/^https?:\/\//, "");
const [hostname, portStr] = cleanEndpoint.split(":");
const port = portStr ? parseInt(portStr, 10) : useSSL ? 443 : 9000;
if (!hostname) {
throw new Error(`Invalid MinIO endpoint: ${endpoint}`);
@ -25,20 +25,20 @@ export class MinioStorageService implements StorageService {
this.client = new MinioClient({
endPoint: hostname,
port: port,
port,
useSSL,
accessKey,
secretKey
secretKey,
});
this.bucketName = bucketName;
this.publicUrl = publicUrl || `${useSSL ? 'https' : 'http'}://${endpoint}`;
this.publicUrl = publicUrl || `${useSSL ? "https" : "http"}://${endpoint}`;
}
private getFilePath(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string
category: "uploads" | "generated" | "references",
filename: string,
): string {
// Simplified path without date folder for now
return `${orgId}/${projectId}/${category}/${filename}`;
@ -50,11 +50,11 @@ export class MinioStorageService implements StorageService {
const timestamp = Date.now();
const random = Math.random().toString(36).substring(2, 8);
const ext = sanitized.includes('.')
? sanitized.substring(sanitized.lastIndexOf('.'))
: '';
const name = sanitized.includes('.')
? sanitized.substring(0, sanitized.lastIndexOf('.'))
const ext = sanitized.includes(".")
? sanitized.substring(sanitized.lastIndexOf("."))
: "";
const name = sanitized.includes(".")
? sanitized.substring(0, sanitized.lastIndexOf("."))
: sanitized;
return `${name}-${timestamp}-${random}${ext}`;
@ -63,49 +63,70 @@ export class MinioStorageService implements StorageService {
private sanitizeFilename(filename: string): string {
// Remove dangerous characters and path traversal attempts
return filename
.replace(/[<>:"/\\|?*\x00-\x1f]/g, '') // Remove dangerous chars
.replace(/\.\./g, '') // Remove path traversal
.replace(/^\.+/, '') // Remove leading dots
.replace(/[<>:"/\\|?*\x00-\x1f]/g, "") // Remove dangerous chars
.replace(/\.\./g, "") // Remove path traversal
.replace(/^\.+/, "") // Remove leading dots
.trim()
.substring(0, 255); // Limit length
}
private validateFilePath(orgId: string, projectId: string, category: string, filename: string): void {
private validateFilePath(
orgId: string,
projectId: string,
category: string,
filename: string,
): void {
// Validate orgId
if (!orgId || !/^[a-zA-Z0-9_-]+$/.test(orgId) || orgId.length > 50) {
throw new Error('Invalid organization ID: must be alphanumeric with dashes/underscores, max 50 chars');
throw new Error(
"Invalid organization ID: must be alphanumeric with dashes/underscores, max 50 chars",
);
}
// Validate projectId
if (!projectId || !/^[a-zA-Z0-9_-]+$/.test(projectId) || projectId.length > 50) {
throw new Error('Invalid project ID: must be alphanumeric with dashes/underscores, max 50 chars');
if (
!projectId ||
!/^[a-zA-Z0-9_-]+$/.test(projectId) ||
projectId.length > 50
) {
throw new Error(
"Invalid project ID: must be alphanumeric with dashes/underscores, max 50 chars",
);
}
// Validate category
if (!['uploads', 'generated', 'references'].includes(category)) {
throw new Error('Invalid category: must be uploads, generated, or references');
if (!["uploads", "generated", "references"].includes(category)) {
throw new Error(
"Invalid category: must be uploads, generated, or references",
);
}
// Validate filename
if (!filename || filename.length === 0 || filename.length > 255) {
throw new Error('Invalid filename: must be 1-255 characters');
throw new Error("Invalid filename: must be 1-255 characters");
}
// Check for path traversal and dangerous patterns
if (filename.includes('..') || filename.includes('/') || filename.includes('\\')) {
throw new Error('Invalid characters in filename: path traversal not allowed');
if (
filename.includes("..") ||
filename.includes("/") ||
filename.includes("\\")
) {
throw new Error(
"Invalid characters in filename: path traversal not allowed",
);
}
// Prevent null bytes and control characters
if (/[\x00-\x1f]/.test(filename)) {
throw new Error('Invalid filename: control characters not allowed');
throw new Error("Invalid filename: control characters not allowed");
}
}
async createBucket(): Promise<void> {
const exists = await this.client.bucketExists(this.bucketName);
if (!exists) {
await this.client.makeBucket(this.bucketName, 'us-east-1');
await this.client.makeBucket(this.bucketName, "us-east-1");
console.log(`Created bucket: ${this.bucketName}`);
}
@ -120,20 +141,20 @@ export class MinioStorageService implements StorageService {
async uploadFile(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
category: "uploads" | "generated" | "references",
filename: string,
buffer: Buffer,
contentType: string
contentType: string,
): Promise<UploadResult> {
// Validate inputs first
this.validateFilePath(orgId, projectId, category, filename);
if (!buffer || buffer.length === 0) {
throw new Error('Buffer cannot be empty');
throw new Error("Buffer cannot be empty");
}
if (!contentType || contentType.trim().length === 0) {
throw new Error('Content type is required');
throw new Error("Content type is required");
}
// Ensure bucket exists
@ -141,15 +162,20 @@ export class MinioStorageService implements StorageService {
// Generate unique filename to avoid conflicts
const uniqueFilename = this.generateUniqueFilename(filename);
const filePath = this.getFilePath(orgId, projectId, category, uniqueFilename);
const filePath = this.getFilePath(
orgId,
projectId,
category,
uniqueFilename,
);
const metadata = {
'Content-Type': contentType,
'X-Amz-Meta-Original-Name': filename,
'X-Amz-Meta-Category': category,
'X-Amz-Meta-Project': projectId,
'X-Amz-Meta-Organization': orgId,
'X-Amz-Meta-Upload-Time': new Date().toISOString()
"Content-Type": contentType,
"X-Amz-Meta-Original-Name": filename,
"X-Amz-Meta-Category": category,
"X-Amz-Meta-Project": projectId,
"X-Amz-Meta-Organization": orgId,
"X-Amz-Meta-Upload-Time": new Date().toISOString(),
};
console.log(`Uploading file to: ${this.bucketName}/${filePath}`);
@ -159,7 +185,7 @@ export class MinioStorageService implements StorageService {
filePath,
buffer,
buffer.length,
metadata
metadata,
);
const url = this.getPublicUrl(orgId, projectId, category, uniqueFilename);
@ -172,15 +198,15 @@ export class MinioStorageService implements StorageService {
path: filePath,
url,
size: buffer.length,
contentType
contentType,
};
}
async downloadFile(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string
category: "uploads" | "generated" | "references",
filename: string,
): Promise<Buffer> {
this.validateFilePath(orgId, projectId, category, filename);
const filePath = this.getFilePath(orgId, projectId, category, filename);
@ -189,18 +215,18 @@ export class MinioStorageService implements StorageService {
return new Promise((resolve, reject) => {
const chunks: Buffer[] = [];
stream.on('data', (chunk) => chunks.push(chunk));
stream.on('end', () => resolve(Buffer.concat(chunks)));
stream.on('error', reject);
stream.on("data", (chunk) => chunks.push(chunk));
stream.on("end", () => resolve(Buffer.concat(chunks)));
stream.on("error", reject);
});
}
async streamFile(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string
): Promise<import('stream').Readable> {
category: "uploads" | "generated" | "references",
filename: string,
): Promise<import("stream").Readable> {
this.validateFilePath(orgId, projectId, category, filename);
const filePath = this.getFilePath(orgId, projectId, category, filename);
@ -211,8 +237,8 @@ export class MinioStorageService implements StorageService {
async deleteFile(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string
category: "uploads" | "generated" | "references",
filename: string,
): Promise<void> {
this.validateFilePath(orgId, projectId, category, filename);
const filePath = this.getFilePath(orgId, projectId, category, filename);
@ -222,52 +248,61 @@ export class MinioStorageService implements StorageService {
getPublicUrl(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string
category: "uploads" | "generated" | "references",
filename: string,
): string {
this.validateFilePath(orgId, projectId, category, filename);
// Production-ready: Return API URL for presigned URL access
const apiBaseUrl = process.env['API_BASE_URL'] || 'http://localhost:3000';
const apiBaseUrl = process.env["API_BASE_URL"] || "http://localhost:3000";
return `${apiBaseUrl}/api/images/${orgId}/${projectId}/${category}/${filename}`;
}
async getPresignedUploadUrl(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
category: "uploads" | "generated" | "references",
filename: string,
expirySeconds: number,
contentType: string
contentType: string,
): Promise<string> {
this.validateFilePath(orgId, projectId, category, filename);
if (!contentType || contentType.trim().length === 0) {
throw new Error('Content type is required for presigned upload URL');
throw new Error("Content type is required for presigned upload URL");
}
const filePath = this.getFilePath(orgId, projectId, category, filename);
return await this.client.presignedPutObject(this.bucketName, filePath, expirySeconds);
return await this.client.presignedPutObject(
this.bucketName,
filePath,
expirySeconds,
);
}
async getPresignedDownloadUrl(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
category: "uploads" | "generated" | "references",
filename: string,
expirySeconds: number = 86400 // 24 hours default
expirySeconds: number = 86400, // 24 hours default
): Promise<string> {
this.validateFilePath(orgId, projectId, category, filename);
const filePath = this.getFilePath(orgId, projectId, category, filename);
const presignedUrl = await this.client.presignedGetObject(this.bucketName, filePath, expirySeconds);
const presignedUrl = await this.client.presignedGetObject(
this.bucketName,
filePath,
expirySeconds,
);
// Replace internal Docker hostname with public URL if configured
if (this.publicUrl) {
const clientEndpoint = this.client.host + (this.client.port ? `:${this.client.port}` : '');
const publicEndpoint = this.publicUrl.replace(/^https?:\/\//, '');
const clientEndpoint =
this.client.host + (this.client.port ? `:${this.client.port}` : "");
const publicEndpoint = this.publicUrl.replace(/^https?:\/\//, "");
return presignedUrl.replace(
`${this.client.protocol}//${clientEndpoint}`,
this.publicUrl
this.publicUrl,
);
}
@ -277,24 +312,32 @@ export class MinioStorageService implements StorageService {
async listProjectFiles(
orgId: string,
projectId: string,
category?: 'uploads' | 'generated' | 'references'
category?: "uploads" | "generated" | "references",
): Promise<FileMetadata[]> {
const prefix = category ? `${orgId}/${projectId}/${category}/` : `${orgId}/${projectId}/`;
const prefix = category
? `${orgId}/${projectId}/${category}/`
: `${orgId}/${projectId}/`;
const files: FileMetadata[] = [];
return new Promise((resolve, reject) => {
const stream = this.client.listObjects(this.bucketName, prefix, true);
stream.on('data', async (obj) => {
stream.on("data", async (obj) => {
try {
if (!obj.name) return;
const metadata = await this.client.statObject(this.bucketName, obj.name);
const metadata = await this.client.statObject(
this.bucketName,
obj.name,
);
const pathParts = obj.name.split('/');
const pathParts = obj.name.split("/");
const filename = pathParts[pathParts.length - 1];
const categoryFromPath = pathParts[2] as 'uploads' | 'generated' | 'references';
const categoryFromPath = pathParts[2] as
| "uploads"
| "generated"
| "references";
if (!filename || !categoryFromPath) {
return;
@ -303,28 +346,35 @@ export class MinioStorageService implements StorageService {
files.push({
key: `${this.bucketName}/${obj.name}`,
filename,
contentType: metadata.metaData?.['content-type'] || 'application/octet-stream',
contentType:
metadata.metaData?.["content-type"] || "application/octet-stream",
size: obj.size || 0,
url: this.getPublicUrl(orgId, projectId, categoryFromPath, filename),
createdAt: obj.lastModified || new Date()
url: this.getPublicUrl(
orgId,
projectId,
categoryFromPath,
filename,
),
createdAt: obj.lastModified || new Date(),
});
} catch (error) {
}
} catch (error) {}
});
stream.on('end', () => resolve(files));
stream.on('error', reject);
stream.on("end", () => resolve(files));
stream.on("error", reject);
});
}
parseKey(key: string): {
orgId: string;
projectId: string;
category: 'uploads' | 'generated' | 'references';
category: "uploads" | "generated" | "references";
filename: string;
} | null {
try {
const match = key.match(/^banatie\/([^/]+)\/([^/]+)\/(uploads|generated|references)\/[^/]+\/(.+)$/);
const match = key.match(
/^banatie\/([^/]+)\/([^/]+)\/(uploads|generated|references)\/[^/]+\/(.+)$/,
);
if (!match) {
return null;
@ -339,20 +389,19 @@ export class MinioStorageService implements StorageService {
return {
orgId,
projectId,
category: category as 'uploads' | 'generated' | 'references',
filename
category: category as "uploads" | "generated" | "references",
filename,
};
} catch {
return null;
}
}
async fileExists(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
filename: string
category: "uploads" | "generated" | "references",
filename: string,
): Promise<boolean> {
try {
this.validateFilePath(orgId, projectId, category, filename);
@ -367,10 +416,10 @@ export class MinioStorageService implements StorageService {
async listFiles(
orgId: string,
projectId: string,
category: 'uploads' | 'generated' | 'references',
prefix?: string
category: "uploads" | "generated" | "references",
prefix?: string,
): Promise<FileMetadata[]> {
this.validateFilePath(orgId, projectId, category, 'dummy.txt');
this.validateFilePath(orgId, projectId, category, "dummy.txt");
const basePath = `${orgId}/${projectId}/${category}/`;
const searchPrefix = prefix ? `${basePath}${prefix}` : basePath;
@ -378,33 +427,40 @@ export class MinioStorageService implements StorageService {
const files: FileMetadata[] = [];
return new Promise((resolve, reject) => {
const stream = this.client.listObjects(this.bucketName, searchPrefix, true);
const stream = this.client.listObjects(
this.bucketName,
searchPrefix,
true,
);
stream.on('data', async (obj) => {
stream.on("data", async (obj) => {
if (!obj.name || !obj.size) return;
try {
const pathParts = obj.name.split('/');
const pathParts = obj.name.split("/");
const filename = pathParts[pathParts.length - 1];
if (!filename) return;
const metadata = await this.client.statObject(this.bucketName, obj.name);
const metadata = await this.client.statObject(
this.bucketName,
obj.name,
);
files.push({
filename,
size: obj.size,
contentType: metadata.metaData?.['content-type'] || 'application/octet-stream',
contentType:
metadata.metaData?.["content-type"] || "application/octet-stream",
lastModified: obj.lastModified || new Date(),
etag: metadata.etag,
path: obj.name
path: obj.name,
});
} catch (error) {
}
} catch (error) {}
});
stream.on('end', () => resolve(files));
stream.on('error', reject);
stream.on("end", () => resolve(files));
stream.on("error", reject);
});
}
}

View File

@ -1,5 +1,5 @@
import { StorageService } from './StorageService';
import { MinioStorageService } from './MinioStorageService';
import { StorageService } from "./StorageService";
import { MinioStorageService } from "./MinioStorageService";
export class StorageFactory {
private static instance: StorageService | null = null;
@ -30,32 +30,30 @@ export class StorageFactory {
try {
this.instance = this.createStorageService();
} catch (error) {
throw new Error('Storage service unavailable. Please check MinIO configuration.');
throw new Error(
"Storage service unavailable. Please check MinIO configuration.",
);
}
}
return this.instance;
}
private static async createStorageServiceWithRetry(): Promise<StorageService> {
const maxRetries = 3;
const baseDelay = 1000; // 1 second
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
const service = this.createStorageService();
await service.bucketExists();
return service;
} catch (error) {
if (attempt === maxRetries) {
throw new Error(
`Failed to initialize storage service after ${maxRetries} attempts. ` +
`Last error: ${error instanceof Error ? error.message : 'Unknown error'}`
`Last error: ${error instanceof Error ? error.message : "Unknown error"}`,
);
}
@ -64,40 +62,39 @@ export class StorageFactory {
}
}
throw new Error('Unexpected error in storage service creation');
throw new Error("Unexpected error in storage service creation");
}
private static sleep(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
return new Promise((resolve) => setTimeout(resolve, ms));
}
private static createStorageService(): StorageService {
const storageType = process.env['STORAGE_TYPE'] || 'minio';
const storageType = process.env["STORAGE_TYPE"] || "minio";
try {
switch (storageType.toLowerCase()) {
case 'minio': {
const endpoint = process.env['MINIO_ENDPOINT'];
const accessKey = process.env['MINIO_ACCESS_KEY'];
const secretKey = process.env['MINIO_SECRET_KEY'];
const useSSL = process.env['MINIO_USE_SSL'] === 'true';
const bucketName = process.env['MINIO_BUCKET_NAME'] || 'banatie';
const publicUrl = process.env['MINIO_PUBLIC_URL'];
case "minio": {
const endpoint = process.env["MINIO_ENDPOINT"];
const accessKey = process.env["MINIO_ACCESS_KEY"];
const secretKey = process.env["MINIO_SECRET_KEY"];
const useSSL = process.env["MINIO_USE_SSL"] === "true";
const bucketName = process.env["MINIO_BUCKET_NAME"] || "banatie";
const publicUrl = process.env["MINIO_PUBLIC_URL"];
if (!endpoint || !accessKey || !secretKey) {
throw new Error(
'MinIO configuration missing. Required: MINIO_ENDPOINT, MINIO_ACCESS_KEY, MINIO_SECRET_KEY'
"MinIO configuration missing. Required: MINIO_ENDPOINT, MINIO_ACCESS_KEY, MINIO_SECRET_KEY",
);
}
return new MinioStorageService(
endpoint,
accessKey,
secretKey,
useSSL,
bucketName,
publicUrl
publicUrl,
);
}

View File

@ -77,6 +77,16 @@ export interface ImageGenerationResult {
description?: string;
model: string;
error?: string;
errorType?: "generation" | "storage"; // Distinguish between generation and storage errors
generatedImageData?: GeneratedImageData; // Available when generation succeeds but storage fails
}
// Intermediate result after image generation, before storage
export interface GeneratedImageData {
buffer: Buffer;
mimeType: string;
fileExtension: string;
description?: string;
}
// Logging types

View File

@ -21,8 +21,27 @@ services:
condition: service_healthy
environment:
- NODE_ENV=development
env_file:
- .env.docker
- DATABASE_URL=${DATABASE_URL}
- GEMINI_API_KEY=${GEMINI_API_KEY}
- STORAGE_TYPE=${STORAGE_TYPE}
- MINIO_ENDPOINT=${MINIO_ENDPOINT}
- MINIO_ACCESS_KEY=${MINIO_ACCESS_KEY}
- MINIO_SECRET_KEY=${MINIO_SECRET_KEY}
- MINIO_USE_SSL=${MINIO_USE_SSL}
- MINIO_BUCKET_NAME=${MINIO_BUCKET_NAME}
- MINIO_PUBLIC_URL=${MINIO_PUBLIC_URL}
- API_BASE_URL=${API_BASE_URL}
- DEFAULT_ORG_ID=${DEFAULT_ORG_ID}
- DEFAULT_PROJECT_ID=${DEFAULT_PROJECT_ID}
- DEFAULT_USER_ID=${DEFAULT_USER_ID}
- PRESIGNED_URL_EXPIRY=${PRESIGNED_URL_EXPIRY}
- MAX_FILE_SIZE=${MAX_FILE_SIZE}
- MAX_FILES=${MAX_FILES}
- RESULTS_DIR=${RESULTS_DIR}
- UPLOADS_DIR=${UPLOADS_DIR}
- LOG_LEVEL=${LOG_LEVEL}
- PORT=${PORT}
- CORS_ORIGIN=${CORS_ORIGIN}
restart: unless-stopped
postgres: