Compare commits
5 Commits
3c922c861b
...
23c1d33adb
| Author | SHA1 | Date |
|---|---|---|
|
|
23c1d33adb | |
|
|
a8220921cb | |
|
|
ba85b076ad | |
|
|
f938c63cf6 | |
|
|
bf96452f1b |
31
.env.docker
31
.env.docker
|
|
@ -1,8 +1,7 @@
|
||||||
# Application Configuration
|
# Application Configuration
|
||||||
NODE_ENV=development
|
NODE_ENV=development
|
||||||
PORT=3000
|
PORT=3000
|
||||||
LOG_LEVEL=info
|
LOG_LEVEL=debug
|
||||||
API_BASE_URL=http://localhost:3000
|
|
||||||
|
|
||||||
# CORS Configuration
|
# CORS Configuration
|
||||||
CORS_ORIGIN=*
|
CORS_ORIGIN=*
|
||||||
|
|
@ -10,37 +9,31 @@ CORS_ORIGIN=*
|
||||||
# Database Configuration
|
# Database Configuration
|
||||||
DB_HOST=postgres
|
DB_HOST=postgres
|
||||||
DB_PORT=5432
|
DB_PORT=5432
|
||||||
DB_NAME=banatie
|
DB_NAME=banatie_db
|
||||||
DB_USER=banatie_user
|
DB_USER=banatie_user
|
||||||
DB_PASSWORD=banatie_secure_password
|
DB_PASSWORD=development_password
|
||||||
|
|
||||||
# MinIO Storage Configuration (SNMD)
|
# MinIO Storage Configuration
|
||||||
MINIO_ROOT_USER=banatie_admin
|
|
||||||
MINIO_ROOT_PASSWORD=banatie_storage_secure_key_2024
|
|
||||||
STORAGE_TYPE=minio
|
STORAGE_TYPE=minio
|
||||||
MINIO_ENDPOINT=minio:9000
|
MINIO_ENDPOINT=minio:9000
|
||||||
MINIO_ACCESS_KEY=banatie_service
|
MINIO_ACCESS_KEY=minioadmin
|
||||||
MINIO_SECRET_KEY=banatie_service_key_2024
|
MINIO_SECRET_KEY=minioadmin
|
||||||
MINIO_USE_SSL=false
|
MINIO_USE_SSL=false
|
||||||
MINIO_BUCKET_NAME=banatie
|
MINIO_BUCKET_PREFIX=banatie
|
||||||
MINIO_PUBLIC_URL=http://localhost:9000
|
MINIO_PUBLIC_URL=http://localhost:9000
|
||||||
|
|
||||||
# AI Service Configuration
|
# AI Service Configuration (using the existing API key from .env)
|
||||||
GEMINI_API_KEY=AIzaSyBaOt9JMPGKA3811FL-ssf1n5Hh9Jauly8
|
GEMINI_API_KEY=AIzaSyBaOt9JMPGKA3811FL-ssf1n5Hh9Jauly8
|
||||||
|
|
||||||
# File Upload Configuration
|
# File Upload Configuration
|
||||||
MAX_FILE_SIZE=5242880
|
MAX_FILE_SIZE=5242880
|
||||||
MAX_FILES=3
|
MAX_FILES=3
|
||||||
|
|
||||||
# Multi-tenancy Configuration (Production-Ready Names)
|
# Multi-tenancy Configuration
|
||||||
DEFAULT_ORG_ID=default
|
DEFAULT_ORG_ID=demo
|
||||||
DEFAULT_PROJECT_ID=main
|
DEFAULT_USER_ID=guest
|
||||||
DEFAULT_USER_ID=system
|
|
||||||
|
|
||||||
# Presigned URL Configuration
|
# Directory Configuration (for Docker containers)
|
||||||
PRESIGNED_URL_EXPIRY=86400 # 24 hours
|
|
||||||
|
|
||||||
# Directory Configuration
|
|
||||||
RESULTS_DIR=/app/results
|
RESULTS_DIR=/app/results
|
||||||
UPLOADS_DIR=/app/uploads/temp
|
UPLOADS_DIR=/app/uploads/temp
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -72,10 +72,18 @@ jspm_packages/
|
||||||
.env
|
.env
|
||||||
|
|
||||||
# Generated images and uploads
|
# Generated images and uploads
|
||||||
data/storage/
|
|
||||||
results/
|
results/
|
||||||
uploads/
|
uploads/
|
||||||
|
|
||||||
# Temporary files
|
# Temporary files
|
||||||
temp/
|
temp/
|
||||||
tmp/
|
tmp/
|
||||||
|
|
||||||
|
# Docker data directories
|
||||||
|
data/
|
||||||
|
.env.docker
|
||||||
|
.env.production
|
||||||
|
|
||||||
|
# Docker volumes (persistent data)
|
||||||
|
postgres-data/
|
||||||
|
minio-data/
|
||||||
|
|
@ -8,8 +8,6 @@ Banatie is a REST API service for AI-powered image generation using the Gemini F
|
||||||
|
|
||||||
## Development Commands
|
## Development Commands
|
||||||
|
|
||||||
use `docker compose` command for using docker-compose service (v3 version)
|
|
||||||
|
|
||||||
### Core Development
|
### Core Development
|
||||||
- `pnpm dev` - Start development server with auto-reload using tsx
|
- `pnpm dev` - Start development server with auto-reload using tsx
|
||||||
- `pnpm start` - Start production server (runs build first)
|
- `pnpm start` - Start production server (runs build first)
|
||||||
|
|
|
||||||
|
|
@ -1,17 +1,18 @@
|
||||||
|
version: '3.8'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
app:
|
app:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
target: development
|
target: development
|
||||||
container_name: banatie-app
|
container_name: banatie-app-dev
|
||||||
ports:
|
ports:
|
||||||
- "3000:3000"
|
- "3000:3000"
|
||||||
volumes:
|
volumes:
|
||||||
- ./src:/app/src
|
- ./src:/app/src # Hot reload for development
|
||||||
- ./logs:/app/logs
|
- ./logs:/app/logs # Persistent logs
|
||||||
networks:
|
networks:
|
||||||
- banatie-network
|
- banatie-dev
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
@ -25,49 +26,40 @@ services:
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:15-alpine
|
image: postgres:15-alpine
|
||||||
container_name: banatie-postgres
|
container_name: banatie-postgres-dev
|
||||||
ports:
|
ports:
|
||||||
- "5434:5432"
|
- "5434:5432" # Avoid conflicts with other PostgreSQL instances
|
||||||
volumes:
|
volumes:
|
||||||
- ./data/postgres:/var/lib/postgresql/data
|
- ./data/postgres:/var/lib/postgresql/data
|
||||||
- ./scripts/init-db.sql:/docker-entrypoint-initdb.d/01-init.sql
|
- ./scripts/init-db.sql:/docker-entrypoint-initdb.d/01-init.sql
|
||||||
networks:
|
networks:
|
||||||
- banatie-network
|
- banatie-dev
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_DB: banatie
|
POSTGRES_DB: banatie_db
|
||||||
POSTGRES_USER: banatie_user
|
POSTGRES_USER: banatie_user
|
||||||
POSTGRES_PASSWORD: banatie_secure_password
|
POSTGRES_PASSWORD: development_password
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "pg_isready -U banatie_user -d banatie"]
|
test: ["CMD-SHELL", "pg_isready -U banatie_user -d banatie_db"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
start_period: 40s
|
start_period: 40s
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# SNMD MinIO Setup - Production Ready
|
|
||||||
minio:
|
minio:
|
||||||
image: quay.io/minio/minio:latest
|
image: minio/minio:latest
|
||||||
container_name: banatie-storage
|
container_name: banatie-minio-dev
|
||||||
ports:
|
ports:
|
||||||
- "9000:9000" # S3 API
|
- "9000:9000" # S3 API
|
||||||
- "9001:9001" # Console
|
- "9001:9001" # Web Console
|
||||||
volumes:
|
volumes:
|
||||||
# SNMD: 4 drives for full S3 compatibility and erasure coding
|
- ./data/minio:/data
|
||||||
- ./data/storage/drive1:/data1
|
|
||||||
- ./data/storage/drive2:/data2
|
|
||||||
- ./data/storage/drive3:/data3
|
|
||||||
- ./data/storage/drive4:/data4
|
|
||||||
networks:
|
networks:
|
||||||
- banatie-network
|
- banatie-dev
|
||||||
environment:
|
environment:
|
||||||
MINIO_ROOT_USER: ${MINIO_ROOT_USER}
|
MINIO_ROOT_USER: minioadmin
|
||||||
MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD}
|
MINIO_ROOT_PASSWORD: minioadmin
|
||||||
MINIO_BROWSER_REDIRECT_URL: http://localhost:9001
|
command: server /data --console-address ":9001"
|
||||||
MINIO_SERVER_URL: http://localhost:9000
|
|
||||||
MINIO_DOMAIN: localhost
|
|
||||||
# CRITICAL: SNMD command for full S3 compatibility
|
|
||||||
command: server /data{1...4} --console-address ":9001"
|
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
|
|
@ -76,37 +68,40 @@ services:
|
||||||
start_period: 40s
|
start_period: 40s
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
storage-init:
|
minio-init:
|
||||||
image: minio/mc:latest
|
image: minio/mc:latest
|
||||||
container_name: banatie-storage-init
|
container_name: banatie-minio-init
|
||||||
networks:
|
networks:
|
||||||
- banatie-network
|
- banatie-dev
|
||||||
depends_on:
|
depends_on:
|
||||||
minio:
|
minio:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
entrypoint: >
|
entrypoint: >
|
||||||
/bin/sh -c "
|
/bin/sh -c "
|
||||||
echo 'Setting up MinIO alias...';
|
echo 'Setting up MinIO alias...';
|
||||||
mc alias set storage http://minio:9000 ${MINIO_ROOT_USER} ${MINIO_ROOT_PASSWORD};
|
mc alias set minio http://minio:9000 minioadmin minioadmin;
|
||||||
|
|
||||||
echo 'Creating main bucket...';
|
echo 'Creating demo bucket...';
|
||||||
mc mb --ignore-existing storage/banatie;
|
mc mb --ignore-existing minio/banatie-demo;
|
||||||
|
|
||||||
echo 'Creating service user...';
|
echo 'Setting up public read policy for generated images...';
|
||||||
mc admin user add storage banatie_service banatie_service_key_2024;
|
mc anonymous set download minio/banatie-demo/users/guest/generated;
|
||||||
|
|
||||||
echo 'Attaching readwrite policy to service user...';
|
echo 'Creating banatie service user...';
|
||||||
mc admin policy attach storage readwrite --user=banatie_service;
|
mc admin user add minio banatie-user banatie-password;
|
||||||
|
|
||||||
echo 'Setting up lifecycle policy...';
|
echo 'Attaching readwrite policy to banatie user...';
|
||||||
|
mc admin policy attach minio readwrite --user=banatie-user;
|
||||||
|
|
||||||
|
echo 'Creating lifecycle policy for temp files (7 days retention)...';
|
||||||
cat > /tmp/lifecycle.json << EOF
|
cat > /tmp/lifecycle.json << EOF
|
||||||
{
|
{
|
||||||
\"Rules\": [
|
\"Rules\": [
|
||||||
{
|
{
|
||||||
\"ID\": \"temp-cleanup\",
|
\"ID\": \"temp-files-cleanup\",
|
||||||
\"Status\": \"Enabled\",
|
\"Status\": \"Enabled\",
|
||||||
\"Filter\": {
|
\"Filter\": {
|
||||||
\"Prefix\": \"temp/\"
|
\"Prefix\": \"users/\"
|
||||||
},
|
},
|
||||||
\"Expiration\": {
|
\"Expiration\": {
|
||||||
\"Days\": 7
|
\"Days\": 7
|
||||||
|
|
@ -115,22 +110,20 @@ services:
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
EOF
|
EOF
|
||||||
mc ilm import storage/banatie < /tmp/lifecycle.json;
|
mc ilm import minio/banatie-demo < /tmp/lifecycle.json;
|
||||||
|
|
||||||
echo 'Storage initialization completed!';
|
echo 'MinIO initialization completed successfully!';
|
||||||
echo 'Bucket: banatie';
|
|
||||||
echo 'Using presigned URLs for secure access';
|
|
||||||
echo 'SNMD mode: Full S3 compatibility enabled';
|
|
||||||
exit 0;
|
exit 0;
|
||||||
"
|
"
|
||||||
restart: "no"
|
restart: "no"
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
banatie-network:
|
banatie-dev:
|
||||||
driver: bridge
|
driver: bridge
|
||||||
|
name: banatie-dev-network
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres-data:
|
postgres-data:
|
||||||
driver: local
|
driver: local
|
||||||
storage-data:
|
minio-data:
|
||||||
driver: local
|
driver: local
|
||||||
|
|
@ -1,11 +1,5 @@
|
||||||
@base = http://localhost:3000
|
@base = http://localhost:3000
|
||||||
|
|
||||||
|
|
||||||
### Health
|
|
||||||
|
|
||||||
GET {{base}}/health
|
|
||||||
|
|
||||||
|
|
||||||
### Info
|
### Info
|
||||||
|
|
||||||
GET {{base}}/api/info
|
GET {{base}}/api/info
|
||||||
|
|
@ -36,8 +30,15 @@ POST {{base}}/api/text-to-image
|
||||||
Content-Type: application/json
|
Content-Type: application/json
|
||||||
|
|
||||||
{
|
{
|
||||||
"prompt": "A majestic eagle soaring over snow-capped mountains",
|
"prompt": "банановый стимпанк. много стимпанк машин и меаханизмов посвященных бананм и работающих на бананах. банановая феерия",
|
||||||
"filename": "test-eagle"
|
"filename": "banatie-party",
|
||||||
|
"autoEnhance": true,
|
||||||
|
"enhancementOptions": {
|
||||||
|
"imageStyle": "photorealistic",
|
||||||
|
"aspectRatio": "landscape",
|
||||||
|
"mood": "peaceful",
|
||||||
|
"lighting": "golden hour"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
265
minio-setup.md
265
minio-setup.md
|
|
@ -1,265 +0,0 @@
|
||||||
# MinIO Setup Technical Specification
|
|
||||||
|
|
||||||
## Project Status
|
|
||||||
Starting MinIO integration from scratch. Previous implementation had compatibility issues with bucket policies in SNSD mode. New implementation uses SNMD (Single Node Multi Drive) configuration with presigned URLs for reliable file access.
|
|
||||||
|
|
||||||
## Architecture Overview
|
|
||||||
|
|
||||||
### Storage Strategy
|
|
||||||
- **Mode**: SNMD (4 virtual drives) for full S3 compatibility
|
|
||||||
- **Access Method**: Presigned URLs only (no bucket policies)
|
|
||||||
- **Bucket Structure**: Single bucket `banatie` with path-based organization
|
|
||||||
- **File Organization**: `orgId/projectId/category/year-month/filename`
|
|
||||||
|
|
||||||
### Technology Stack
|
|
||||||
- MinIO latest (`quay.io/minio/minio:latest`)
|
|
||||||
- Docker Compose for orchestration
|
|
||||||
- PostgreSQL for application data
|
|
||||||
- Express.js API with TypeScript
|
|
||||||
|
|
||||||
## Configuration Files Status
|
|
||||||
|
|
||||||
### Completed Files
|
|
||||||
- `docker-compose.yml` - SNMD configuration with 4 virtual drives
|
|
||||||
- `.env.docker` - Environment variables for development
|
|
||||||
- `src/services/MinioStorageService.ts` - Updated service implementation
|
|
||||||
- `src/services/StorageFactory.ts` - Service factory configuration
|
|
||||||
- `src/routes/images.ts` - Presigned URL endpoints
|
|
||||||
|
|
||||||
### Integration Requirements
|
|
||||||
|
|
||||||
#### 1. Update Application Router
|
|
||||||
Add images router to main application in `src/app.ts`:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { imagesRouter } from './routes/images';
|
|
||||||
|
|
||||||
// Add to routes section
|
|
||||||
app.use('/api', imagesRouter);
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 2. Environment Variables Update
|
|
||||||
Update existing `.env` file with MinIO configuration:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Add to existing .env file
|
|
||||||
MINIO_ROOT_USER=banatie_admin
|
|
||||||
MINIO_ROOT_PASSWORD=banatie_storage_secure_key_2024
|
|
||||||
STORAGE_TYPE=minio
|
|
||||||
MINIO_ENDPOINT=minio:9000
|
|
||||||
MINIO_ACCESS_KEY=banatie_service
|
|
||||||
MINIO_SECRET_KEY=banatie_service_key_2024
|
|
||||||
MINIO_USE_SSL=false
|
|
||||||
MINIO_BUCKET_NAME=banatie
|
|
||||||
MINIO_PUBLIC_URL=http://localhost:9000
|
|
||||||
API_BASE_URL=http://localhost:3000
|
|
||||||
DEFAULT_ORG_ID=default
|
|
||||||
DEFAULT_PROJECT_ID=main
|
|
||||||
DEFAULT_USER_ID=system
|
|
||||||
PRESIGNED_URL_EXPIRY=86400
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 3. Database Script Update
|
|
||||||
Update `scripts/init-db.sql` to use new database name `banatie` instead of previous naming.
|
|
||||||
|
|
||||||
#### 4. Service Dependencies Update
|
|
||||||
Update existing image generation services to use new storage configuration:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// In ImageGenService.ts or similar
|
|
||||||
const storageService = StorageFactory.getInstance();
|
|
||||||
const uploadResult = await storageService.uploadFile(
|
|
||||||
orgId,
|
|
||||||
projectId,
|
|
||||||
'generated',
|
|
||||||
filename,
|
|
||||||
buffer,
|
|
||||||
'image/png'
|
|
||||||
);
|
|
||||||
// Use uploadResult.url (returns API URL for presigned access)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Setup Instructions
|
|
||||||
|
|
||||||
### 1. Directory Structure
|
|
||||||
Create required directories:
|
|
||||||
```bash
|
|
||||||
mkdir -p data/storage/{drive1,drive2,drive3,drive4}
|
|
||||||
mkdir -p data/postgres
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Service Startup
|
|
||||||
```bash
|
|
||||||
# Start all services
|
|
||||||
docker-compose up -d
|
|
||||||
|
|
||||||
# Verify services are healthy
|
|
||||||
docker-compose ps
|
|
||||||
|
|
||||||
# Check MinIO logs
|
|
||||||
docker logs banatie-storage
|
|
||||||
|
|
||||||
# Check initialization logs
|
|
||||||
docker logs banatie-storage-init
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Verification Steps
|
|
||||||
|
|
||||||
#### MinIO Console Access
|
|
||||||
- URL: http://localhost:9001
|
|
||||||
- Username: banatie_admin
|
|
||||||
- Password: banatie_storage_secure_key_2024
|
|
||||||
|
|
||||||
#### Test Presigned URL Generation
|
|
||||||
```bash
|
|
||||||
# Test image upload endpoint
|
|
||||||
curl -X POST http://localhost:3000/api/upload \
|
|
||||||
-F "files=@test.png" \
|
|
||||||
-F "category=generated"
|
|
||||||
|
|
||||||
# Test presigned URL access
|
|
||||||
curl -I "http://localhost:3000/api/images/default/main/generated/test-image.png"
|
|
||||||
# Should return 302 redirect to presigned URL
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Verify SNMD Mode
|
|
||||||
```bash
|
|
||||||
# Check MinIO is in erasure coding mode
|
|
||||||
docker exec banatie-storage mc admin info local
|
|
||||||
# Should show 4 drives and erasure coding information
|
|
||||||
```
|
|
||||||
|
|
||||||
## Integration Testing
|
|
||||||
|
|
||||||
### Required Tests
|
|
||||||
1. **Storage Service Initialization**
|
|
||||||
- Verify StorageFactory creates MinioStorageService
|
|
||||||
- Confirm bucket creation and accessibility
|
|
||||||
|
|
||||||
2. **File Upload/Download Cycle**
|
|
||||||
- Upload file via StorageService
|
|
||||||
- Generate presigned URL
|
|
||||||
- Verify file accessibility via presigned URL
|
|
||||||
|
|
||||||
3. **API Endpoint Testing**
|
|
||||||
- Test `/api/images/:orgId/:projectId/:category/:filename`
|
|
||||||
- Verify 302 redirect to presigned URL
|
|
||||||
- Test fallback direct streaming
|
|
||||||
|
|
||||||
4. **Error Handling**
|
|
||||||
- Test invalid file paths
|
|
||||||
- Test expired presigned URLs
|
|
||||||
- Test MinIO connection failures
|
|
||||||
|
|
||||||
### Test Script Template
|
|
||||||
```bash
|
|
||||||
#!/bin/bash
|
|
||||||
# test-minio-integration.sh
|
|
||||||
|
|
||||||
echo "Testing MinIO Integration"
|
|
||||||
|
|
||||||
# Test 1: Upload file
|
|
||||||
UPLOAD_RESPONSE=$(curl -s -X POST http://localhost:3000/api/upload \
|
|
||||||
-F "files=@test.png" \
|
|
||||||
-F "category=generated")
|
|
||||||
|
|
||||||
echo "Upload Response: $UPLOAD_RESPONSE"
|
|
||||||
|
|
||||||
# Extract URL from response
|
|
||||||
FILE_URL=$(echo "$UPLOAD_RESPONSE" | jq -r '.files[0].url')
|
|
||||||
|
|
||||||
# Test 2: Access via presigned URL
|
|
||||||
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" "$FILE_URL")
|
|
||||||
|
|
||||||
if [ "$HTTP_CODE" = "302" ]; then
|
|
||||||
echo "SUCCESS: Presigned URL redirect working"
|
|
||||||
else
|
|
||||||
echo "FAILURE: Expected 302, got $HTTP_CODE"
|
|
||||||
fi
|
|
||||||
```
|
|
||||||
|
|
||||||
## Production Considerations
|
|
||||||
|
|
||||||
### Environment Differences
|
|
||||||
Development and production use identical configuration with different values:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Production .env differences
|
|
||||||
API_BASE_URL=https://api.yourdomain.com
|
|
||||||
MINIO_PUBLIC_URL=https://storage.yourdomain.com:9000
|
|
||||||
MINIO_USE_SSL=true
|
|
||||||
```
|
|
||||||
|
|
||||||
### Security Notes
|
|
||||||
- All passwords use environment variables (no hardcoded values)
|
|
||||||
- Presigned URLs expire after 24 hours by default
|
|
||||||
- Service user has minimal required permissions
|
|
||||||
- MinIO admin access separate from application access
|
|
||||||
|
|
||||||
### Scaling Path
|
|
||||||
- Current SNMD setup supports development and small production loads
|
|
||||||
- Can migrate to distributed MinIO cluster when needed
|
|
||||||
- Presigned URL architecture remains unchanged during scaling
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Common Issues
|
|
||||||
1. **403 Errors**: Check presigned URL generation and MinIO service user permissions
|
|
||||||
2. **404 Errors**: Verify file paths and bucket configuration
|
|
||||||
3. **Connection Errors**: Confirm MinIO service health and network connectivity
|
|
||||||
|
|
||||||
### Debug Commands
|
|
||||||
```bash
|
|
||||||
# Check MinIO health
|
|
||||||
docker exec banatie-storage mc admin info local
|
|
||||||
|
|
||||||
# List bucket contents
|
|
||||||
docker exec banatie-storage mc ls storage/banatie --recursive
|
|
||||||
|
|
||||||
# Check service logs
|
|
||||||
docker logs banatie-storage
|
|
||||||
docker logs banatie-app
|
|
||||||
```
|
|
||||||
|
|
||||||
### Recovery Procedures
|
|
||||||
```bash
|
|
||||||
# Reset MinIO data (development only)
|
|
||||||
docker-compose down
|
|
||||||
rm -rf data/storage/*
|
|
||||||
docker-compose up -d
|
|
||||||
|
|
||||||
# Recreate bucket structure
|
|
||||||
docker exec banatie-storage mc mb storage/banatie
|
|
||||||
```
|
|
||||||
|
|
||||||
## Implementation Priority
|
|
||||||
|
|
||||||
### Phase 1 (Immediate)
|
|
||||||
1. Update app.ts with images router
|
|
||||||
2. Update environment configuration
|
|
||||||
3. Test basic upload/download functionality
|
|
||||||
|
|
||||||
### Phase 2 (Next)
|
|
||||||
1. Update existing image generation services
|
|
||||||
2. Implement comprehensive error handling
|
|
||||||
3. Add integration tests
|
|
||||||
|
|
||||||
### Phase 3 (Future)
|
|
||||||
1. Add monitoring and logging
|
|
||||||
2. Implement file cleanup policies
|
|
||||||
3. Add CDN integration capability
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
|
|
||||||
Integration is complete when:
|
|
||||||
- [ ] All services start successfully via docker-compose
|
|
||||||
- [ ] MinIO operates in SNMD mode with 4 drives
|
|
||||||
- [ ] Image upload returns API URL format
|
|
||||||
- [ ] API URLs redirect to working presigned URLs
|
|
||||||
- [ ] Generated images accessible via presigned URLs
|
|
||||||
- [ ] Error handling provides meaningful responses
|
|
||||||
- [ ] Integration tests pass consistently
|
|
||||||
|
|
||||||
## Notes
|
|
||||||
|
|
||||||
This implementation prioritizes simplicity and reliability over advanced features. The presigned URL approach ensures consistent behavior across different MinIO configurations and provides a foundation for future enhancements without requiring architectural changes.
|
|
||||||
|
|
@ -1,35 +1,21 @@
|
||||||
-- Banatie Database Initialization Script
|
-- Banatie Database Initialization Script
|
||||||
-- This script creates the initial database schema for the Banatie image generation service
|
-- This script creates the database schema for the Banatie image generation service
|
||||||
|
|
||||||
-- Enable UUID extension
|
-- Enable UUID extension for generating UUIDs
|
||||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||||
|
|
||||||
-- Organizations table
|
-- Organizations table - for multi-tenant support
|
||||||
CREATE TABLE IF NOT EXISTS organizations (
|
CREATE TABLE organizations (
|
||||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
name VARCHAR(255) NOT NULL,
|
name VARCHAR(255) NOT NULL,
|
||||||
slug VARCHAR(100) UNIQUE NOT NULL,
|
slug VARCHAR(100) UNIQUE NOT NULL,
|
||||||
description TEXT,
|
|
||||||
settings JSONB DEFAULT '{}',
|
settings JSONB DEFAULT '{}',
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Projects table (within organizations)
|
-- Users table - users within organizations
|
||||||
CREATE TABLE IF NOT EXISTS projects (
|
CREATE TABLE users (
|
||||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
|
||||||
organization_id UUID REFERENCES organizations(id) ON DELETE CASCADE,
|
|
||||||
name VARCHAR(255) NOT NULL,
|
|
||||||
slug VARCHAR(100) NOT NULL,
|
|
||||||
description TEXT,
|
|
||||||
settings JSONB DEFAULT '{}',
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
|
||||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
|
||||||
UNIQUE(organization_id, slug)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Users table
|
|
||||||
CREATE TABLE IF NOT EXISTS users (
|
|
||||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
organization_id UUID REFERENCES organizations(id) ON DELETE CASCADE,
|
organization_id UUID REFERENCES organizations(id) ON DELETE CASCADE,
|
||||||
username VARCHAR(100) NOT NULL,
|
username VARCHAR(100) NOT NULL,
|
||||||
|
|
@ -41,52 +27,52 @@ CREATE TABLE IF NOT EXISTS users (
|
||||||
UNIQUE(organization_id, username)
|
UNIQUE(organization_id, username)
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Image metadata table
|
-- Images table - metadata for generated and uploaded images
|
||||||
CREATE TABLE IF NOT EXISTS images (
|
CREATE TABLE images (
|
||||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
organization_id UUID REFERENCES organizations(id) ON DELETE CASCADE,
|
|
||||||
project_id UUID REFERENCES projects(id) ON DELETE CASCADE,
|
|
||||||
user_id UUID REFERENCES users(id) ON DELETE CASCADE,
|
user_id UUID REFERENCES users(id) ON DELETE CASCADE,
|
||||||
filename VARCHAR(255) NOT NULL,
|
filename VARCHAR(255) NOT NULL,
|
||||||
original_filename VARCHAR(255),
|
file_path VARCHAR(500), -- Legacy: local file path (for backward compatibility)
|
||||||
file_path VARCHAR(500) NOT NULL, -- Path in MinIO
|
minio_key VARCHAR(500), -- MinIO object key
|
||||||
category VARCHAR(50) NOT NULL CHECK (category IN ('uploads', 'generated', 'references')),
|
url VARCHAR(1000), -- Public or presigned URL to access the image
|
||||||
original_prompt TEXT,
|
original_prompt TEXT,
|
||||||
enhanced_prompt TEXT,
|
enhanced_prompt TEXT,
|
||||||
model_used VARCHAR(100),
|
model_used VARCHAR(100),
|
||||||
file_size BIGINT,
|
file_size BIGINT,
|
||||||
content_type VARCHAR(100),
|
content_type VARCHAR(100),
|
||||||
|
category VARCHAR(50) DEFAULT 'generated', -- 'generated', 'references', 'temp'
|
||||||
metadata JSONB DEFAULT '{}',
|
metadata JSONB DEFAULT '{}',
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Upload sessions table (for tracking multi-part uploads)
|
-- Sessions table - for tracking user upload sessions
|
||||||
CREATE TABLE IF NOT EXISTS upload_sessions (
|
CREATE TABLE upload_sessions (
|
||||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||||
organization_id UUID REFERENCES organizations(id) ON DELETE CASCADE,
|
|
||||||
project_id UUID REFERENCES projects(id) ON DELETE CASCADE,
|
|
||||||
user_id UUID REFERENCES users(id) ON DELETE CASCADE,
|
user_id UUID REFERENCES users(id) ON DELETE CASCADE,
|
||||||
session_data JSONB NOT NULL,
|
session_data JSONB NOT NULL,
|
||||||
expires_at TIMESTAMP WITH TIME ZONE,
|
expires_at TIMESTAMP WITH TIME ZONE,
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Create indexes for better performance
|
-- Create indexes for better query performance
|
||||||
CREATE INDEX IF NOT EXISTS idx_organizations_slug ON organizations(slug);
|
CREATE INDEX idx_users_org_id ON users(organization_id);
|
||||||
CREATE INDEX IF NOT EXISTS idx_projects_org_id ON projects(organization_id);
|
CREATE INDEX idx_users_username ON users(username);
|
||||||
CREATE INDEX IF NOT EXISTS idx_projects_org_slug ON projects(organization_id, slug);
|
CREATE INDEX idx_images_user_id ON images(user_id);
|
||||||
CREATE INDEX IF NOT EXISTS idx_users_org_id ON users(organization_id);
|
CREATE INDEX idx_images_created_at ON images(created_at);
|
||||||
CREATE INDEX IF NOT EXISTS idx_users_org_username ON users(organization_id, username);
|
CREATE INDEX idx_images_category ON images(category);
|
||||||
CREATE INDEX IF NOT EXISTS idx_images_org_id ON images(organization_id);
|
CREATE INDEX idx_images_minio_key ON images(minio_key);
|
||||||
CREATE INDEX IF NOT EXISTS idx_images_project_id ON images(project_id);
|
CREATE INDEX idx_upload_sessions_user_id ON upload_sessions(user_id);
|
||||||
CREATE INDEX IF NOT EXISTS idx_images_user_id ON images(user_id);
|
CREATE INDEX idx_upload_sessions_expires_at ON upload_sessions(expires_at);
|
||||||
CREATE INDEX IF NOT EXISTS idx_images_category ON images(category);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_images_created_at ON images(created_at);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_upload_sessions_user_id ON upload_sessions(user_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_upload_sessions_expires_at ON upload_sessions(expires_at);
|
|
||||||
|
|
||||||
-- Function to update the updated_at timestamp
|
-- Insert demo organization and user for development/testing
|
||||||
|
INSERT INTO organizations (id, name, slug, settings) VALUES
|
||||||
|
('00000000-0000-0000-0000-000000000001', 'Demo Organization', 'demo', '{"description": "Default demo organization for testing"}');
|
||||||
|
|
||||||
|
INSERT INTO users (id, organization_id, username, email, role, settings) VALUES
|
||||||
|
('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001', 'guest', 'guest@demo.banatie.app', 'user', '{"description": "Default guest user for testing"}');
|
||||||
|
|
||||||
|
-- Create a function to update updated_at timestamp
|
||||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||||
RETURNS TRIGGER AS $$
|
RETURNS TRIGGER AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
|
|
@ -96,34 +82,22 @@ END;
|
||||||
$$ language 'plpgsql';
|
$$ language 'plpgsql';
|
||||||
|
|
||||||
-- Create triggers to automatically update updated_at
|
-- Create triggers to automatically update updated_at
|
||||||
CREATE TRIGGER update_organizations_updated_at BEFORE UPDATE ON organizations FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
CREATE TRIGGER update_organizations_updated_at
|
||||||
CREATE TRIGGER update_projects_updated_at BEFORE UPDATE ON projects FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
BEFORE UPDATE ON organizations
|
||||||
CREATE TRIGGER update_users_updated_at BEFORE UPDATE ON users FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
CREATE TRIGGER update_images_updated_at BEFORE UPDATE ON images FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
|
||||||
|
|
||||||
-- Insert default organization and project
|
CREATE TRIGGER update_users_updated_at
|
||||||
INSERT INTO organizations (id, name, slug, description) VALUES
|
BEFORE UPDATE ON users
|
||||||
('00000000-0000-0000-0000-000000000001', 'Default Organization', 'default', 'Default organization for development and testing')
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
ON CONFLICT (slug) DO NOTHING;
|
|
||||||
|
|
||||||
INSERT INTO projects (id, organization_id, name, slug, description) VALUES
|
CREATE TRIGGER update_images_updated_at
|
||||||
('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001', 'Main Project', 'main', 'Main project for image generation')
|
BEFORE UPDATE ON images
|
||||||
ON CONFLICT (organization_id, slug) DO NOTHING;
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
-- Insert system user
|
-- Display initialization completion message
|
||||||
INSERT INTO users (id, organization_id, username, role) VALUES
|
DO $$
|
||||||
('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001', 'system', 'admin')
|
BEGIN
|
||||||
ON CONFLICT (organization_id, username) DO NOTHING;
|
RAISE NOTICE 'Banatie database initialization completed successfully!';
|
||||||
|
RAISE NOTICE 'Created tables: organizations, users, images, upload_sessions';
|
||||||
-- Insert demo organization for development
|
RAISE NOTICE 'Created demo organization (id: 00000000-0000-0000-0000-000000000001) with guest user';
|
||||||
INSERT INTO organizations (id, name, slug, description) VALUES
|
END $$;
|
||||||
('00000000-0000-0000-0000-000000000002', 'Demo Organization', 'demo', 'Demo organization for testing and development')
|
|
||||||
ON CONFLICT (slug) DO NOTHING;
|
|
||||||
|
|
||||||
INSERT INTO projects (id, organization_id, name, slug, description) VALUES
|
|
||||||
('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002', 'Sandbox Project', 'sandbox', 'Sandbox project for testing features')
|
|
||||||
ON CONFLICT (organization_id, slug) DO NOTHING;
|
|
||||||
|
|
||||||
INSERT INTO users (id, organization_id, username, role) VALUES
|
|
||||||
('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002', 'guest', 'user')
|
|
||||||
ON CONFLICT (organization_id, username) DO NOTHING;
|
|
||||||
|
|
@ -5,7 +5,6 @@ import { Config } from './types/api';
|
||||||
import { generateRouter } from './routes/generate';
|
import { generateRouter } from './routes/generate';
|
||||||
import { enhanceRouter } from './routes/enhance';
|
import { enhanceRouter } from './routes/enhance';
|
||||||
import { textToImageRouter } from './routes/textToImage';
|
import { textToImageRouter } from './routes/textToImage';
|
||||||
import { imagesRouter } from './routes/images';
|
|
||||||
import { errorHandler, notFoundHandler } from './middleware/errorHandler';
|
import { errorHandler, notFoundHandler } from './middleware/errorHandler';
|
||||||
|
|
||||||
// Load environment variables
|
// Load environment variables
|
||||||
|
|
@ -83,7 +82,6 @@ export const createApp = (): Application => {
|
||||||
app.use('/api', generateRouter);
|
app.use('/api', generateRouter);
|
||||||
app.use('/api', enhanceRouter);
|
app.use('/api', enhanceRouter);
|
||||||
app.use('/api', textToImageRouter);
|
app.use('/api', textToImageRouter);
|
||||||
app.use('/api', imagesRouter);
|
|
||||||
|
|
||||||
// Error handling middleware (must be last)
|
// Error handling middleware (must be last)
|
||||||
app.use(notFoundHandler);
|
app.use(notFoundHandler);
|
||||||
|
|
|
||||||
|
|
@ -1,138 +0,0 @@
|
||||||
import { Router, Request, Response } from 'express';
|
|
||||||
import { StorageFactory } from '../services/StorageFactory';
|
|
||||||
import { asyncHandler } from '../middleware/errorHandler';
|
|
||||||
|
|
||||||
export const imagesRouter = Router();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* GET /api/images/:orgId/:projectId/:category/:filename
|
|
||||||
* Serves images via presigned URLs (redirect approach)
|
|
||||||
*/
|
|
||||||
imagesRouter.get(
|
|
||||||
'/images/:orgId/:projectId/:category/:filename',
|
|
||||||
asyncHandler(async (req: Request, res: Response) => {
|
|
||||||
const { orgId, projectId, category, filename } = req.params;
|
|
||||||
|
|
||||||
// Validate category
|
|
||||||
if (!['uploads', 'generated', 'references'].includes(category)) {
|
|
||||||
return res.status(400).json({
|
|
||||||
success: false,
|
|
||||||
message: 'Invalid category'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const storageService = StorageFactory.getInstance();
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Check if file exists first (fast check)
|
|
||||||
const exists = await storageService.fileExists(
|
|
||||||
orgId,
|
|
||||||
projectId,
|
|
||||||
category as 'uploads' | 'generated' | 'references',
|
|
||||||
filename
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!exists) {
|
|
||||||
return res.status(404).json({
|
|
||||||
success: false,
|
|
||||||
message: 'File not found'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine content type from filename
|
|
||||||
const ext = filename.toLowerCase().split('.').pop();
|
|
||||||
const contentType = {
|
|
||||||
'png': 'image/png',
|
|
||||||
'jpg': 'image/jpeg',
|
|
||||||
'jpeg': 'image/jpeg',
|
|
||||||
'gif': 'image/gif',
|
|
||||||
'webp': 'image/webp',
|
|
||||||
'svg': 'image/svg+xml'
|
|
||||||
}[ext || ''] || 'application/octet-stream';
|
|
||||||
|
|
||||||
// Set headers for optimal caching and performance
|
|
||||||
res.setHeader('Content-Type', contentType);
|
|
||||||
res.setHeader('Cache-Control', 'public, max-age=86400, immutable'); // 24 hours + immutable
|
|
||||||
res.setHeader('ETag', `"${orgId}-${projectId}-${filename}"`); // Simple ETag
|
|
||||||
|
|
||||||
// Handle conditional requests (304 Not Modified)
|
|
||||||
const ifNoneMatch = req.headers['if-none-match'];
|
|
||||||
if (ifNoneMatch === `"${orgId}-${projectId}-${filename}"`) {
|
|
||||||
return res.status(304).end(); // Not Modified
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stream the file directly through our API (memory efficient)
|
|
||||||
const fileStream = await storageService.streamFile(
|
|
||||||
orgId,
|
|
||||||
projectId,
|
|
||||||
category as 'uploads' | 'generated' | 'references',
|
|
||||||
filename
|
|
||||||
);
|
|
||||||
|
|
||||||
// Handle stream errors
|
|
||||||
fileStream.on('error', (streamError) => {
|
|
||||||
console.error('Stream error:', streamError);
|
|
||||||
if (!res.headersSent) {
|
|
||||||
res.status(500).json({
|
|
||||||
success: false,
|
|
||||||
message: 'Error streaming file'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Stream the file without loading into memory
|
|
||||||
fileStream.pipe(res);
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to stream file:', error);
|
|
||||||
return res.status(404).json({
|
|
||||||
success: false,
|
|
||||||
message: 'File not found'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* GET /api/images/url/:orgId/:projectId/:category/:filename
|
|
||||||
* Returns a presigned URL instead of redirecting
|
|
||||||
*/
|
|
||||||
imagesRouter.get(
|
|
||||||
'/images/url/:orgId/:projectId/:category/:filename',
|
|
||||||
asyncHandler(async (req: Request, res: Response) => {
|
|
||||||
const { orgId, projectId, category, filename } = req.params;
|
|
||||||
const { expiry = '3600' } = req.query; // Default 1 hour
|
|
||||||
|
|
||||||
if (!['uploads', 'generated', 'references'].includes(category)) {
|
|
||||||
return res.status(400).json({
|
|
||||||
success: false,
|
|
||||||
message: 'Invalid category'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const storageService = StorageFactory.getInstance();
|
|
||||||
|
|
||||||
try {
|
|
||||||
const presignedUrl = await storageService.getPresignedDownloadUrl(
|
|
||||||
orgId,
|
|
||||||
projectId,
|
|
||||||
category as 'uploads' | 'generated' | 'references',
|
|
||||||
filename,
|
|
||||||
parseInt(expiry as string, 10)
|
|
||||||
);
|
|
||||||
|
|
||||||
return res.json({
|
|
||||||
success: true,
|
|
||||||
url: presignedUrl,
|
|
||||||
expiresIn: parseInt(expiry as string, 10)
|
|
||||||
});
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to generate presigned URL:', error);
|
|
||||||
return res.status(404).json({
|
|
||||||
success: false,
|
|
||||||
message: 'File not found or access denied'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
@ -82,7 +82,6 @@ textToImageRouter.post(
|
||||||
data: {
|
data: {
|
||||||
filename: result.filename!,
|
filename: result.filename!,
|
||||||
filepath: result.filepath!,
|
filepath: result.filepath!,
|
||||||
...(result.url && { url: result.url }),
|
|
||||||
...(result.description && { description: result.description }),
|
...(result.description && { description: result.description }),
|
||||||
model: result.model,
|
model: result.model,
|
||||||
generatedAt: timestamp,
|
generatedAt: timestamp,
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,13 @@
|
||||||
import { GoogleGenAI } from "@google/genai";
|
import { GoogleGenAI } from "@google/genai";
|
||||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
const mime = require("mime") as any;
|
const mime = require("mime") as any;
|
||||||
|
import fs from "fs";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
import {
|
import {
|
||||||
ImageGenerationOptions,
|
ImageGenerationOptions,
|
||||||
ImageGenerationResult,
|
ImageGenerationResult,
|
||||||
ReferenceImage,
|
ReferenceImage,
|
||||||
} from "../types/api";
|
} from "../types/api";
|
||||||
import { StorageFactory } from "./StorageFactory";
|
|
||||||
|
|
||||||
export class ImageGenService {
|
export class ImageGenService {
|
||||||
private ai: GoogleGenAI;
|
private ai: GoogleGenAI;
|
||||||
|
|
@ -27,16 +27,11 @@ export class ImageGenService {
|
||||||
async generateImage(
|
async generateImage(
|
||||||
options: ImageGenerationOptions,
|
options: ImageGenerationOptions,
|
||||||
): Promise<ImageGenerationResult> {
|
): Promise<ImageGenerationResult> {
|
||||||
const { prompt, filename, referenceImages, orgId, projectId, userId } = options;
|
const { prompt, filename, referenceImages } = options;
|
||||||
const timestamp = new Date().toISOString();
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
// Use default values if not provided
|
|
||||||
const finalOrgId = orgId || process.env['DEFAULT_ORG_ID'] || 'default';
|
|
||||||
const finalProjectId = projectId || process.env['DEFAULT_PROJECT_ID'] || 'main';
|
|
||||||
const finalUserId = userId || process.env['DEFAULT_USER_ID'] || 'system';
|
|
||||||
|
|
||||||
console.log(
|
console.log(
|
||||||
`[${timestamp}] Starting image generation: "${prompt.substring(0, 50)}..." for ${finalOrgId}/${finalProjectId}`,
|
`[${timestamp}] Starting image generation: "${prompt.substring(0, 50)}..."`,
|
||||||
);
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
@ -46,9 +41,6 @@ export class ImageGenService {
|
||||||
config: { responseModalities: ["IMAGE", "TEXT"] },
|
config: { responseModalities: ["IMAGE", "TEXT"] },
|
||||||
prompt,
|
prompt,
|
||||||
filename,
|
filename,
|
||||||
orgId: finalOrgId,
|
|
||||||
projectId: finalProjectId,
|
|
||||||
userId: finalUserId,
|
|
||||||
...(referenceImages && { referenceImages }),
|
...(referenceImages && { referenceImages }),
|
||||||
modelName: "Nano Banana",
|
modelName: "Nano Banana",
|
||||||
});
|
});
|
||||||
|
|
@ -67,9 +59,6 @@ export class ImageGenService {
|
||||||
config: { responseModalities: ["IMAGE"] },
|
config: { responseModalities: ["IMAGE"] },
|
||||||
prompt,
|
prompt,
|
||||||
filename: `${filename}_fallback`,
|
filename: `${filename}_fallback`,
|
||||||
orgId: finalOrgId,
|
|
||||||
projectId: finalProjectId,
|
|
||||||
userId: finalUserId,
|
|
||||||
...(referenceImages && { referenceImages }),
|
...(referenceImages && { referenceImages }),
|
||||||
modelName: "Imagen 4",
|
modelName: "Imagen 4",
|
||||||
});
|
});
|
||||||
|
|
@ -95,13 +84,10 @@ export class ImageGenService {
|
||||||
config: { responseModalities: string[] };
|
config: { responseModalities: string[] };
|
||||||
prompt: string;
|
prompt: string;
|
||||||
filename: string;
|
filename: string;
|
||||||
orgId: string;
|
|
||||||
projectId: string;
|
|
||||||
userId: string;
|
|
||||||
referenceImages?: ReferenceImage[];
|
referenceImages?: ReferenceImage[];
|
||||||
modelName: string;
|
modelName: string;
|
||||||
}): Promise<ImageGenerationResult> {
|
}): Promise<ImageGenerationResult> {
|
||||||
const { model, config, prompt, filename, orgId, projectId, userId, referenceImages, modelName } =
|
const { model, config, prompt, filename, referenceImages, modelName } =
|
||||||
params;
|
params;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
@ -157,7 +143,7 @@ export class ImageGenService {
|
||||||
) {
|
) {
|
||||||
const content = response.candidates[0].content;
|
const content = response.candidates[0].content;
|
||||||
let generatedDescription = "";
|
let generatedDescription = "";
|
||||||
let uploadResult = null;
|
let savedImagePath = "";
|
||||||
|
|
||||||
for (let index = 0; index < (content.parts?.length || 0); index++) {
|
for (let index = 0; index < (content.parts?.length || 0); index++) {
|
||||||
const part = content.parts?.[index];
|
const part = content.parts?.[index];
|
||||||
|
|
@ -168,28 +154,16 @@ export class ImageGenService {
|
||||||
part.inlineData.mimeType || "",
|
part.inlineData.mimeType || "",
|
||||||
);
|
);
|
||||||
const finalFilename = `${filename}.${fileExtension}`;
|
const finalFilename = `${filename}.${fileExtension}`;
|
||||||
const contentType = part.inlineData.mimeType || `image/${fileExtension}`;
|
const filepath = path.join("./results", finalFilename);
|
||||||
|
|
||||||
console.log(
|
console.log(
|
||||||
`[${new Date().toISOString()}] Uploading image to MinIO: ${finalFilename}`,
|
`[${new Date().toISOString()}] Saving image: ${finalFilename}`,
|
||||||
);
|
);
|
||||||
|
|
||||||
const buffer = Buffer.from(part.inlineData.data || "", "base64");
|
const buffer = Buffer.from(part.inlineData.data || "", "base64");
|
||||||
|
await this.saveImageFile(filepath, buffer);
|
||||||
|
|
||||||
// Upload to MinIO storage
|
savedImagePath = filepath;
|
||||||
const storageService = StorageFactory.getInstance();
|
|
||||||
uploadResult = await storageService.uploadFile(
|
|
||||||
orgId,
|
|
||||||
projectId,
|
|
||||||
'generated',
|
|
||||||
finalFilename,
|
|
||||||
buffer,
|
|
||||||
contentType
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
`[${new Date().toISOString()}] Image uploaded successfully: ${uploadResult.path}`,
|
|
||||||
);
|
|
||||||
} else if (part.text) {
|
} else if (part.text) {
|
||||||
generatedDescription = part.text;
|
generatedDescription = part.text;
|
||||||
console.log(
|
console.log(
|
||||||
|
|
@ -198,12 +172,11 @@ export class ImageGenService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (uploadResult && uploadResult.success) {
|
if (savedImagePath) {
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
filename: uploadResult.filename,
|
filename: path.basename(savedImagePath),
|
||||||
filepath: uploadResult.path,
|
filepath: savedImagePath,
|
||||||
url: uploadResult.url,
|
|
||||||
description: generatedDescription,
|
description: generatedDescription,
|
||||||
model: modelName,
|
model: modelName,
|
||||||
};
|
};
|
||||||
|
|
@ -228,6 +201,33 @@ export class ImageGenService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save image buffer to file system
|
||||||
|
*/
|
||||||
|
private async saveImageFile(filepath: string, buffer: Buffer): Promise<void> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
// Ensure the results directory exists
|
||||||
|
const dir = path.dirname(filepath);
|
||||||
|
if (!fs.existsSync(dir)) {
|
||||||
|
fs.mkdirSync(dir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.writeFile(filepath, buffer, (err) => {
|
||||||
|
if (err) {
|
||||||
|
console.error(
|
||||||
|
`[${new Date().toISOString()}] Error saving file ${filepath}:`,
|
||||||
|
err,
|
||||||
|
);
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
console.log(
|
||||||
|
`[${new Date().toISOString()}] File saved successfully: ${filepath}`,
|
||||||
|
);
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validate reference images
|
* Validate reference images
|
||||||
|
|
|
||||||
|
|
@ -1,417 +0,0 @@
|
||||||
import { Client as MinioClient } from 'minio';
|
|
||||||
import { StorageService, FileMetadata, UploadResult } from './StorageService';
|
|
||||||
|
|
||||||
export class MinioStorageService implements StorageService {
|
|
||||||
private client: MinioClient;
|
|
||||||
private bucketName: string;
|
|
||||||
private publicUrl: string;
|
|
||||||
|
|
||||||
constructor(
|
|
||||||
endpoint: string,
|
|
||||||
accessKey: string,
|
|
||||||
secretKey: string,
|
|
||||||
useSSL: boolean = false,
|
|
||||||
bucketName: string = 'banatie',
|
|
||||||
publicUrl?: string
|
|
||||||
) {
|
|
||||||
// Parse endpoint to separate hostname and port
|
|
||||||
const cleanEndpoint = endpoint.replace(/^https?:\/\//, '');
|
|
||||||
const [hostname, portStr] = cleanEndpoint.split(':');
|
|
||||||
const port = portStr ? parseInt(portStr, 10) : (useSSL ? 443 : 9000);
|
|
||||||
|
|
||||||
if (!hostname) {
|
|
||||||
throw new Error(`Invalid MinIO endpoint: ${endpoint}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.client = new MinioClient({
|
|
||||||
endPoint: hostname,
|
|
||||||
port: port,
|
|
||||||
useSSL,
|
|
||||||
accessKey,
|
|
||||||
secretKey
|
|
||||||
});
|
|
||||||
this.bucketName = bucketName;
|
|
||||||
this.publicUrl = publicUrl || `${useSSL ? 'https' : 'http'}://${endpoint}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
private getFilePath(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string
|
|
||||||
): string {
|
|
||||||
const now = new Date();
|
|
||||||
const year = now.getFullYear();
|
|
||||||
const month = String(now.getMonth() + 1).padStart(2, '0');
|
|
||||||
|
|
||||||
return `${orgId}/${projectId}/${category}/${year}-${month}/${filename}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
private generateUniqueFilename(originalFilename: string): string {
|
|
||||||
// Sanitize filename first
|
|
||||||
const sanitized = this.sanitizeFilename(originalFilename);
|
|
||||||
|
|
||||||
const timestamp = Date.now();
|
|
||||||
const random = Math.random().toString(36).substring(2, 8);
|
|
||||||
const ext = sanitized.includes('.')
|
|
||||||
? sanitized.substring(sanitized.lastIndexOf('.'))
|
|
||||||
: '';
|
|
||||||
const name = sanitized.includes('.')
|
|
||||||
? sanitized.substring(0, sanitized.lastIndexOf('.'))
|
|
||||||
: sanitized;
|
|
||||||
|
|
||||||
return `${name}-${timestamp}-${random}${ext}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
private sanitizeFilename(filename: string): string {
|
|
||||||
// Remove dangerous characters and path traversal attempts
|
|
||||||
return filename
|
|
||||||
.replace(/[<>:"/\\|?*\x00-\x1f]/g, '') // Remove dangerous chars
|
|
||||||
.replace(/\.\./g, '') // Remove path traversal
|
|
||||||
.replace(/^\.+/, '') // Remove leading dots
|
|
||||||
.trim()
|
|
||||||
.substring(0, 255); // Limit length
|
|
||||||
}
|
|
||||||
|
|
||||||
private validateFilePath(orgId: string, projectId: string, category: string, filename: string): void {
|
|
||||||
// Validate orgId
|
|
||||||
if (!orgId || !/^[a-zA-Z0-9_-]+$/.test(orgId) || orgId.length > 50) {
|
|
||||||
throw new Error('Invalid organization ID: must be alphanumeric with dashes/underscores, max 50 chars');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate projectId
|
|
||||||
if (!projectId || !/^[a-zA-Z0-9_-]+$/.test(projectId) || projectId.length > 50) {
|
|
||||||
throw new Error('Invalid project ID: must be alphanumeric with dashes/underscores, max 50 chars');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate category
|
|
||||||
if (!['uploads', 'generated', 'references'].includes(category)) {
|
|
||||||
throw new Error('Invalid category: must be uploads, generated, or references');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate filename
|
|
||||||
if (!filename || filename.length === 0 || filename.length > 255) {
|
|
||||||
throw new Error('Invalid filename: must be 1-255 characters');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for path traversal and dangerous patterns
|
|
||||||
if (filename.includes('..') || filename.includes('/') || filename.includes('\\')) {
|
|
||||||
throw new Error('Invalid characters in filename: path traversal not allowed');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prevent null bytes and control characters
|
|
||||||
if (/[\x00-\x1f]/.test(filename)) {
|
|
||||||
throw new Error('Invalid filename: control characters not allowed');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async createBucket(): Promise<void> {
|
|
||||||
const exists = await this.client.bucketExists(this.bucketName);
|
|
||||||
if (!exists) {
|
|
||||||
await this.client.makeBucket(this.bucketName, 'us-east-1');
|
|
||||||
console.log(`Created bucket: ${this.bucketName}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note: With SNMD and presigned URLs, we don't need bucket policies
|
|
||||||
console.log(`Bucket ${this.bucketName} ready for presigned URL access`);
|
|
||||||
}
|
|
||||||
|
|
||||||
async bucketExists(): Promise<boolean> {
|
|
||||||
return await this.client.bucketExists(this.bucketName);
|
|
||||||
}
|
|
||||||
|
|
||||||
async uploadFile(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string,
|
|
||||||
buffer: Buffer,
|
|
||||||
contentType: string
|
|
||||||
): Promise<UploadResult> {
|
|
||||||
// Validate inputs first
|
|
||||||
this.validateFilePath(orgId, projectId, category, filename);
|
|
||||||
|
|
||||||
if (!buffer || buffer.length === 0) {
|
|
||||||
throw new Error('Buffer cannot be empty');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!contentType || contentType.trim().length === 0) {
|
|
||||||
throw new Error('Content type is required');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure bucket exists
|
|
||||||
await this.createBucket();
|
|
||||||
|
|
||||||
// Generate unique filename to avoid conflicts
|
|
||||||
const uniqueFilename = this.generateUniqueFilename(filename);
|
|
||||||
const filePath = this.getFilePath(orgId, projectId, category, uniqueFilename);
|
|
||||||
|
|
||||||
const metadata = {
|
|
||||||
'Content-Type': contentType,
|
|
||||||
'X-Amz-Meta-Original-Name': filename,
|
|
||||||
'X-Amz-Meta-Category': category,
|
|
||||||
'X-Amz-Meta-Project': projectId,
|
|
||||||
'X-Amz-Meta-Organization': orgId,
|
|
||||||
'X-Amz-Meta-Upload-Time': new Date().toISOString()
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log(`Uploading file to: ${this.bucketName}/${filePath}`);
|
|
||||||
|
|
||||||
const result = await this.client.putObject(
|
|
||||||
this.bucketName,
|
|
||||||
filePath,
|
|
||||||
buffer,
|
|
||||||
buffer.length,
|
|
||||||
metadata
|
|
||||||
);
|
|
||||||
|
|
||||||
const url = this.getPublicUrl(orgId, projectId, category, uniqueFilename);
|
|
||||||
|
|
||||||
console.log(`Generated API URL: ${url}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
filename: uniqueFilename,
|
|
||||||
path: filePath,
|
|
||||||
url,
|
|
||||||
size: buffer.length,
|
|
||||||
contentType
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async downloadFile(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string
|
|
||||||
): Promise<Buffer> {
|
|
||||||
this.validateFilePath(orgId, projectId, category, filename);
|
|
||||||
const filePath = this.getFilePath(orgId, projectId, category, filename);
|
|
||||||
|
|
||||||
const stream = await this.client.getObject(this.bucketName, filePath);
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const chunks: Buffer[] = [];
|
|
||||||
stream.on('data', (chunk) => chunks.push(chunk));
|
|
||||||
stream.on('end', () => resolve(Buffer.concat(chunks)));
|
|
||||||
stream.on('error', reject);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async streamFile(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string
|
|
||||||
): Promise<import('stream').Readable> {
|
|
||||||
this.validateFilePath(orgId, projectId, category, filename);
|
|
||||||
const filePath = this.getFilePath(orgId, projectId, category, filename);
|
|
||||||
|
|
||||||
// Return the stream directly without buffering - memory efficient!
|
|
||||||
return await this.client.getObject(this.bucketName, filePath);
|
|
||||||
}
|
|
||||||
|
|
||||||
async deleteFile(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string
|
|
||||||
): Promise<void> {
|
|
||||||
this.validateFilePath(orgId, projectId, category, filename);
|
|
||||||
const filePath = this.getFilePath(orgId, projectId, category, filename);
|
|
||||||
await this.client.removeObject(this.bucketName, filePath);
|
|
||||||
}
|
|
||||||
|
|
||||||
getPublicUrl(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string
|
|
||||||
): string {
|
|
||||||
this.validateFilePath(orgId, projectId, category, filename);
|
|
||||||
// Production-ready: Return API URL for presigned URL access
|
|
||||||
const apiBaseUrl = process.env['API_BASE_URL'] || 'http://localhost:3000';
|
|
||||||
return `${apiBaseUrl}/api/images/${orgId}/${projectId}/${category}/${filename}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
async getPresignedUploadUrl(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string,
|
|
||||||
expirySeconds: number,
|
|
||||||
contentType: string
|
|
||||||
): Promise<string> {
|
|
||||||
this.validateFilePath(orgId, projectId, category, filename);
|
|
||||||
|
|
||||||
if (!contentType || contentType.trim().length === 0) {
|
|
||||||
throw new Error('Content type is required for presigned upload URL');
|
|
||||||
}
|
|
||||||
|
|
||||||
const filePath = this.getFilePath(orgId, projectId, category, filename);
|
|
||||||
return await this.client.presignedPutObject(this.bucketName, filePath, expirySeconds);
|
|
||||||
}
|
|
||||||
|
|
||||||
async getPresignedDownloadUrl(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string,
|
|
||||||
expirySeconds: number = 86400 // 24 hours default
|
|
||||||
): Promise<string> {
|
|
||||||
this.validateFilePath(orgId, projectId, category, filename);
|
|
||||||
const filePath = this.getFilePath(orgId, projectId, category, filename);
|
|
||||||
const presignedUrl = await this.client.presignedGetObject(this.bucketName, filePath, expirySeconds);
|
|
||||||
|
|
||||||
// Replace internal Docker hostname with public URL if configured
|
|
||||||
if (this.publicUrl) {
|
|
||||||
const clientEndpoint = this.client.host + (this.client.port ? `:${this.client.port}` : '');
|
|
||||||
const publicEndpoint = this.publicUrl.replace(/^https?:\/\//, '');
|
|
||||||
|
|
||||||
return presignedUrl.replace(
|
|
||||||
`${this.client.protocol}//${clientEndpoint}`,
|
|
||||||
this.publicUrl
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return presignedUrl;
|
|
||||||
}
|
|
||||||
|
|
||||||
async listProjectFiles(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category?: 'uploads' | 'generated' | 'references'
|
|
||||||
): Promise<FileMetadata[]> {
|
|
||||||
const prefix = category ? `${orgId}/${projectId}/${category}/` : `${orgId}/${projectId}/`;
|
|
||||||
|
|
||||||
const files: FileMetadata[] = [];
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const stream = this.client.listObjects(this.bucketName, prefix, true);
|
|
||||||
|
|
||||||
stream.on('data', async (obj) => {
|
|
||||||
try {
|
|
||||||
if (!obj.name) return;
|
|
||||||
|
|
||||||
const metadata = await this.client.statObject(this.bucketName, obj.name);
|
|
||||||
|
|
||||||
const pathParts = obj.name.split('/');
|
|
||||||
const filename = pathParts[pathParts.length - 1];
|
|
||||||
const categoryFromPath = pathParts[2] as 'uploads' | 'generated' | 'references';
|
|
||||||
|
|
||||||
if (!filename || !categoryFromPath) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
files.push({
|
|
||||||
key: `${this.bucketName}/${obj.name}`,
|
|
||||||
filename,
|
|
||||||
contentType: metadata.metaData?.['content-type'] || 'application/octet-stream',
|
|
||||||
size: obj.size || 0,
|
|
||||||
url: this.getPublicUrl(orgId, projectId, categoryFromPath, filename),
|
|
||||||
createdAt: obj.lastModified || new Date()
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
console.error(`Error processing file ${obj.name}:`, error);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
stream.on('end', () => resolve(files));
|
|
||||||
stream.on('error', reject);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
parseKey(key: string): {
|
|
||||||
orgId: string;
|
|
||||||
projectId: string;
|
|
||||||
category: 'uploads' | 'generated' | 'references';
|
|
||||||
filename: string;
|
|
||||||
} | null {
|
|
||||||
try {
|
|
||||||
// Key format: banatie/orgId/projectId/category/year-month/filename
|
|
||||||
const match = key.match(/^banatie\/([^/]+)\/([^/]+)\/(uploads|generated|references)\/[^/]+\/(.+)$/);
|
|
||||||
|
|
||||||
if (!match) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const [, orgId, projectId, category, filename] = match;
|
|
||||||
|
|
||||||
if (!orgId || !projectId || !category || !filename) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
orgId,
|
|
||||||
projectId,
|
|
||||||
category: category as 'uploads' | 'generated' | 'references',
|
|
||||||
filename
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// MISSING METHODS FROM INTERFACE
|
|
||||||
|
|
||||||
async fileExists(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string
|
|
||||||
): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
this.validateFilePath(orgId, projectId, category, filename);
|
|
||||||
const filePath = this.getFilePath(orgId, projectId, category, filename);
|
|
||||||
await this.client.statObject(this.bucketName, filePath);
|
|
||||||
return true;
|
|
||||||
} catch (error) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async listFiles(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
prefix?: string
|
|
||||||
): Promise<FileMetadata[]> {
|
|
||||||
this.validateFilePath(orgId, projectId, category, 'dummy.txt'); // Validate path components
|
|
||||||
|
|
||||||
const basePath = `${orgId}/${projectId}/${category}/`;
|
|
||||||
const searchPrefix = prefix ? `${basePath}${prefix}` : basePath;
|
|
||||||
|
|
||||||
const files: FileMetadata[] = [];
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const stream = this.client.listObjects(this.bucketName, searchPrefix, true);
|
|
||||||
|
|
||||||
stream.on('data', async (obj) => {
|
|
||||||
if (!obj.name || !obj.size) return;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const pathParts = obj.name.split('/');
|
|
||||||
const filename = pathParts[pathParts.length - 1];
|
|
||||||
|
|
||||||
if (!filename) return;
|
|
||||||
|
|
||||||
const metadata = await this.client.statObject(this.bucketName, obj.name);
|
|
||||||
|
|
||||||
files.push({
|
|
||||||
filename,
|
|
||||||
size: obj.size,
|
|
||||||
contentType: metadata.metaData?.['content-type'] || 'application/octet-stream',
|
|
||||||
lastModified: obj.lastModified || new Date(),
|
|
||||||
etag: metadata.etag,
|
|
||||||
path: obj.name
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
console.error(`Error processing file ${obj.name}:`, error);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
stream.on('end', () => resolve(files));
|
|
||||||
stream.on('error', reject);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,131 +0,0 @@
|
||||||
import { StorageService } from './StorageService';
|
|
||||||
import { MinioStorageService } from './MinioStorageService';
|
|
||||||
|
|
||||||
export class StorageFactory {
|
|
||||||
private static instance: StorageService | null = null;
|
|
||||||
private static initializationPromise: Promise<StorageService> | null = null;
|
|
||||||
|
|
||||||
static async getInstance(): Promise<StorageService> {
|
|
||||||
if (this.instance) {
|
|
||||||
return this.instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.initializationPromise) {
|
|
||||||
return await this.initializationPromise;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.initializationPromise = this.createStorageServiceWithRetry();
|
|
||||||
|
|
||||||
try {
|
|
||||||
this.instance = await this.initializationPromise;
|
|
||||||
return this.instance;
|
|
||||||
} catch (error) {
|
|
||||||
this.initializationPromise = null;
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Synchronous version for backward compatibility (with graceful degradation)
|
|
||||||
static getInstanceSync(): StorageService {
|
|
||||||
if (!this.instance) {
|
|
||||||
try {
|
|
||||||
this.instance = this.createStorageService();
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to create storage service:', error);
|
|
||||||
throw new Error('Storage service unavailable. Please check MinIO configuration.');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this.instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private static async createStorageServiceWithRetry(): Promise<StorageService> {
|
|
||||||
const maxRetries = 3;
|
|
||||||
const baseDelay = 1000; // 1 second
|
|
||||||
|
|
||||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
||||||
try {
|
|
||||||
console.log(`Attempting to create storage service (attempt ${attempt}/${maxRetries})`);
|
|
||||||
|
|
||||||
const service = this.createStorageService();
|
|
||||||
|
|
||||||
// Test the connection by checking if bucket exists
|
|
||||||
await service.bucketExists();
|
|
||||||
|
|
||||||
console.log('Storage service created successfully');
|
|
||||||
return service;
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error(`Storage service creation attempt ${attempt} failed:`, error);
|
|
||||||
|
|
||||||
if (attempt === maxRetries) {
|
|
||||||
throw new Error(
|
|
||||||
`Failed to initialize storage service after ${maxRetries} attempts. ` +
|
|
||||||
`Last error: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Exponential backoff
|
|
||||||
const delay = baseDelay * Math.pow(2, attempt - 1);
|
|
||||||
console.log(`Waiting ${delay}ms before retry...`);
|
|
||||||
await this.sleep(delay);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error('Unexpected error in storage service creation');
|
|
||||||
}
|
|
||||||
|
|
||||||
private static sleep(ms: number): Promise<void> {
|
|
||||||
return new Promise(resolve => setTimeout(resolve, ms));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static createStorageService(): StorageService {
|
|
||||||
const storageType = process.env['STORAGE_TYPE'] || 'minio';
|
|
||||||
|
|
||||||
try {
|
|
||||||
switch (storageType.toLowerCase()) {
|
|
||||||
case 'minio': {
|
|
||||||
const endpoint = process.env['MINIO_ENDPOINT'];
|
|
||||||
const accessKey = process.env['MINIO_ACCESS_KEY'];
|
|
||||||
const secretKey = process.env['MINIO_SECRET_KEY'];
|
|
||||||
const useSSL = process.env['MINIO_USE_SSL'] === 'true';
|
|
||||||
const bucketName = process.env['MINIO_BUCKET_NAME'] || 'banatie';
|
|
||||||
const publicUrl = process.env['MINIO_PUBLIC_URL'];
|
|
||||||
|
|
||||||
if (!endpoint || !accessKey || !secretKey) {
|
|
||||||
throw new Error(
|
|
||||||
'MinIO configuration missing. Required: MINIO_ENDPOINT, MINIO_ACCESS_KEY, MINIO_SECRET_KEY'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(`Initializing MinIO Storage Service:`);
|
|
||||||
console.log(` Endpoint: ${endpoint}`);
|
|
||||||
console.log(` Bucket: ${bucketName}`);
|
|
||||||
console.log(` SSL: ${useSSL}`);
|
|
||||||
console.log(` Public URL: ${publicUrl}`);
|
|
||||||
|
|
||||||
return new MinioStorageService(
|
|
||||||
endpoint,
|
|
||||||
accessKey,
|
|
||||||
secretKey,
|
|
||||||
useSSL,
|
|
||||||
bucketName,
|
|
||||||
publicUrl
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`Unsupported storage type: ${storageType}`);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error creating storage service:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reset instance for testing
|
|
||||||
static resetInstance(): void {
|
|
||||||
this.instance = null;
|
|
||||||
this.initializationPromise = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,154 +0,0 @@
|
||||||
import { Readable } from 'stream';
|
|
||||||
|
|
||||||
export interface FileMetadata {
|
|
||||||
filename: string;
|
|
||||||
size: number;
|
|
||||||
contentType: string;
|
|
||||||
lastModified: Date;
|
|
||||||
etag?: string;
|
|
||||||
path: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface UploadResult {
|
|
||||||
success: boolean;
|
|
||||||
filename: string;
|
|
||||||
path: string;
|
|
||||||
url: string; // API URL for accessing the file
|
|
||||||
size: number;
|
|
||||||
contentType: string;
|
|
||||||
error?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface StorageService {
|
|
||||||
/**
|
|
||||||
* Create the main bucket if it doesn't exist
|
|
||||||
*/
|
|
||||||
createBucket(): Promise<void>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if the main bucket exists
|
|
||||||
*/
|
|
||||||
bucketExists(): Promise<boolean>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Upload a file to storage
|
|
||||||
* @param orgId Organization ID
|
|
||||||
* @param projectId Project ID
|
|
||||||
* @param category File category (uploads, generated, references)
|
|
||||||
* @param filename Original filename
|
|
||||||
* @param buffer File buffer
|
|
||||||
* @param contentType MIME type
|
|
||||||
*/
|
|
||||||
uploadFile(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string,
|
|
||||||
buffer: Buffer,
|
|
||||||
contentType: string
|
|
||||||
): Promise<UploadResult>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Download a file from storage
|
|
||||||
* @param orgId Organization ID
|
|
||||||
* @param projectId Project ID
|
|
||||||
* @param category File category
|
|
||||||
* @param filename Filename to download
|
|
||||||
*/
|
|
||||||
downloadFile(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string
|
|
||||||
): Promise<Buffer>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Stream a file from storage (memory efficient)
|
|
||||||
* @param orgId Organization ID
|
|
||||||
* @param projectId Project ID
|
|
||||||
* @param category File category
|
|
||||||
* @param filename Filename to stream
|
|
||||||
*/
|
|
||||||
streamFile(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string
|
|
||||||
): Promise<Readable>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate a presigned URL for downloading a file
|
|
||||||
* @param orgId Organization ID
|
|
||||||
* @param projectId Project ID
|
|
||||||
* @param category File category
|
|
||||||
* @param filename Filename
|
|
||||||
* @param expirySeconds URL expiry time in seconds
|
|
||||||
*/
|
|
||||||
getPresignedDownloadUrl(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string,
|
|
||||||
expirySeconds: number
|
|
||||||
): Promise<string>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate a presigned URL for uploading a file
|
|
||||||
* @param orgId Organization ID
|
|
||||||
* @param projectId Project ID
|
|
||||||
* @param category File category
|
|
||||||
* @param filename Filename
|
|
||||||
* @param expirySeconds URL expiry time in seconds
|
|
||||||
* @param contentType MIME type
|
|
||||||
*/
|
|
||||||
getPresignedUploadUrl(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string,
|
|
||||||
expirySeconds: number,
|
|
||||||
contentType: string
|
|
||||||
): Promise<string>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* List files in a specific path
|
|
||||||
* @param orgId Organization ID
|
|
||||||
* @param projectId Project ID
|
|
||||||
* @param category File category
|
|
||||||
* @param prefix Optional prefix to filter files
|
|
||||||
*/
|
|
||||||
listFiles(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
prefix?: string
|
|
||||||
): Promise<FileMetadata[]>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete a file from storage
|
|
||||||
* @param orgId Organization ID
|
|
||||||
* @param projectId Project ID
|
|
||||||
* @param category File category
|
|
||||||
* @param filename Filename to delete
|
|
||||||
*/
|
|
||||||
deleteFile(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string
|
|
||||||
): Promise<void>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if a file exists
|
|
||||||
* @param orgId Organization ID
|
|
||||||
* @param projectId Project ID
|
|
||||||
* @param category File category
|
|
||||||
* @param filename Filename to check
|
|
||||||
*/
|
|
||||||
fileExists(
|
|
||||||
orgId: string,
|
|
||||||
projectId: string,
|
|
||||||
category: 'uploads' | 'generated' | 'references',
|
|
||||||
filename: string
|
|
||||||
): Promise<boolean>;
|
|
||||||
}
|
|
||||||
|
|
@ -32,7 +32,6 @@ export interface GenerateImageResponse {
|
||||||
data?: {
|
data?: {
|
||||||
filename: string;
|
filename: string;
|
||||||
filepath: string;
|
filepath: string;
|
||||||
url?: string; // API URL for accessing the image
|
|
||||||
description?: string;
|
description?: string;
|
||||||
model: string;
|
model: string;
|
||||||
generatedAt: string;
|
generatedAt: string;
|
||||||
|
|
@ -58,9 +57,6 @@ export interface ImageGenerationOptions {
|
||||||
prompt: string;
|
prompt: string;
|
||||||
filename: string;
|
filename: string;
|
||||||
referenceImages?: ReferenceImage[];
|
referenceImages?: ReferenceImage[];
|
||||||
orgId?: string;
|
|
||||||
projectId?: string;
|
|
||||||
userId?: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ReferenceImage {
|
export interface ReferenceImage {
|
||||||
|
|
@ -73,7 +69,6 @@ export interface ImageGenerationResult {
|
||||||
success: boolean;
|
success: boolean;
|
||||||
filename?: string;
|
filename?: string;
|
||||||
filepath?: string;
|
filepath?: string;
|
||||||
url?: string; // API URL for accessing the image
|
|
||||||
description?: string;
|
description?: string;
|
||||||
model: string;
|
model: string;
|
||||||
error?: string;
|
error?: string;
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue