NeahStable/lib/s3.ts
2026-01-16 11:58:45 +01:00

270 lines
8.5 KiB
TypeScript

import { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand, ListObjectsV2Command } from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
// S3 Configuration
// Uses existing MINIO_* environment variables that are shared across the application
export const S3_CONFIG = {
// Remove trailing slash from endpoint if present
endpoint: (process.env.MINIO_S3_UPLOAD_BUCKET_URL || process.env.S3_ENDPOINT || 'https://dome-api.slm-lab.net').replace(/\/$/, ''),
region: process.env.MINIO_AWS_REGION || process.env.S3_REGION || 'us-east-1',
// Use MINIO_AWS_S3_UPLOAD_BUCKET_NAME for pages bucket (shared with other parts of the app)
bucket: process.env.MINIO_AWS_S3_UPLOAD_BUCKET_NAME || process.env.S3_BUCKET || 'pages',
accessKey: process.env.MINIO_ACCESS_KEY || process.env.S3_ACCESS_KEY || '',
secretKey: process.env.MINIO_SECRET_KEY || process.env.S3_SECRET_KEY || ''
};
// Validate required S3 configuration
if (!S3_CONFIG.accessKey || !S3_CONFIG.secretKey) {
console.error('⚠️ S3 credentials are missing! Please set MINIO_ACCESS_KEY and MINIO_SECRET_KEY environment variables.');
if (process.env.NODE_ENV === 'production') {
throw new Error('S3 credentials are required in production environment');
}
}
// Initialize S3 client for Minio
export const s3Client = new S3Client({
region: S3_CONFIG.region,
endpoint: S3_CONFIG.endpoint,
credentials: {
accessKeyId: S3_CONFIG.accessKey,
secretAccessKey: S3_CONFIG.secretKey
},
forcePathStyle: true // Required for MinIO
});
/**
* Upload a file to S3
* Similar implementation to mission-uploads.ts which works correctly with MinIO
*/
export async function putObject(
key: string,
content: string | Buffer,
contentType?: string
): Promise<{ key: string; url?: string }> {
// Convert string to Buffer consistently, exactly like mission-uploads.ts
// This ensures the same encoding and buffer handling that works for mission attachments
// For empty strings, use an empty buffer instead of Buffer.from('') to avoid MinIO issues
let buffer: Buffer;
if (typeof content === 'string') {
buffer = content.length > 0
? Buffer.from(content, 'utf-8')
: Buffer.alloc(0);
} else if (Buffer.isBuffer(content)) {
buffer = content;
} else {
buffer = Buffer.from(content);
}
// Use the same pattern as mission-uploads.ts: direct s3Client.send with PutObjectCommand
await s3Client.send(new PutObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: key,
Body: buffer,
ContentType: contentType || 'text/plain',
}));
return { key };
}
/**
* Get object content from S3
*/
export async function getObjectContent(key: string): Promise<string | null> {
try {
const command = new GetObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: key,
});
const response = await s3Client.send(command);
if (!response.Body) {
return null;
}
const chunks: Uint8Array[] = [];
for await (const chunk of response.Body as any) {
chunks.push(chunk);
}
const buffer = Buffer.concat(chunks);
return buffer.toString('utf-8');
} catch (error) {
console.error('Error getting object content:', error);
return null;
}
}
/**
* Delete an object from S3
*/
export async function deleteObject(key: string): Promise<void> {
const command = new DeleteObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: key,
});
await s3Client.send(command);
}
/**
* List objects for a user in a specific folder
*/
export async function listUserObjects(userId: string, folder: string): Promise<Array<{ key: string; name: string; size?: number; lastModified?: Date }>> {
// Normalize folder name to lowercase for consistency
const normalizedFolder = folder.toLowerCase();
const prefix = `user-${userId}/${normalizedFolder}/`;
// Enable debug logging only in development
const isDebug = process.env.NODE_ENV === 'development';
if (isDebug) {
console.log(`[listUserObjects] Listing objects with prefix: ${prefix} in bucket: ${S3_CONFIG.bucket}`);
}
const command = new ListObjectsV2Command({
Bucket: S3_CONFIG.bucket,
Prefix: prefix,
// Don't use Delimiter to get all objects, not just "folders"
});
const response = await s3Client.send(command);
const objects = response.Contents || [];
if (isDebug) {
console.log(`[listUserObjects] Found ${objects.length} raw objects for prefix ${prefix}`);
}
// Filter out:
// - Objects that are "folders" (end with /)
// - Placeholder files
// - Objects that don't match our prefix
const filtered = objects
.filter(obj => {
if (!obj.Key) {
return false;
}
// Exclude folder markers (end with /)
if (obj.Key.endsWith('/')) {
return false;
}
// Exclude placeholder files
if (obj.Key.includes('.placeholder')) {
return false;
}
// Ensure it matches our prefix
if (!obj.Key.startsWith(prefix)) {
return false;
}
// Additional check: ensure it's actually in the folder (not a subfolder)
const keyWithoutPrefix = obj.Key.substring(prefix.length);
// If there's another / in the remaining path, it's in a subfolder, skip it
if (keyWithoutPrefix.includes('/')) {
return false;
}
return true;
})
.map(obj => ({
key: obj.Key!,
name: obj.Key!.split('/').pop() || obj.Key!,
size: obj.Size,
lastModified: obj.LastModified
}));
if (isDebug) {
console.log(`[listUserObjects] Returning ${filtered.length} filtered objects`);
}
return filtered;
}
/**
* Get public URL for an object
* Simple URL construction for MinIO/S3 objects
*/
export function getPublicUrl(filePath: string, bucket?: string): string {
if (!filePath) return '';
if (filePath.startsWith('http')) return filePath; // Already a full URL
// Remove leading slash if present
const cleanPath = filePath.startsWith('/') ? filePath.substring(1) : filePath;
// Construct the full URL
const endpoint = S3_CONFIG.endpoint?.replace(/\/$/, ''); // Remove trailing slash if present
const bucketName = bucket || S3_CONFIG.bucket;
// Return original path if no endpoint is configured
if (!endpoint) return cleanPath;
// Construct and return the full URL
return `${endpoint}/${bucketName}/${cleanPath}`;
}
/**
* Create standard folder structure for a user
* In S3/MinIO, folders don't need to be explicitly created - they exist as prefixes.
* We only create a placeholder file if the folder doesn't exist or is empty.
*/
export async function createUserFolderStructure(userId: string): Promise<void> {
const folders = ['notes', 'diary', 'health', 'contacts'];
for (const folder of folders) {
try {
// Check if folder already exists by listing objects
const existingFiles = await listUserObjects(userId, folder);
// If folder already has files, skip creating placeholder
if (existingFiles.length > 0) {
console.log(`Folder user-${userId}/${folder}/ already exists with ${existingFiles.length} file(s), skipping creation`);
continue;
}
// Check if placeholder already exists
const placeholderKey = `user-${userId}/${folder}/.placeholder`;
const placeholderExists = await getObjectContent(placeholderKey);
if (placeholderExists) {
console.log(`Folder user-${userId}/${folder}/ already initialized, skipping`);
continue;
}
// Folder is empty and no placeholder exists, create one
const placeholderContent = `Folder initialized at ${new Date().toISOString()}`;
await putObject(placeholderKey, placeholderContent, 'text/plain');
console.log(`Initialized folder: user-${userId}/${folder}/`);
} catch (error) {
console.error(`Error initializing folder ${folder} for user ${userId}:`, error);
// Continue with other folders even if one fails
}
}
}
export async function uploadMissionFile({
missionId,
file,
type, // 'logo' or 'attachment'
}: {
missionId: string;
file: File;
type: 'logo' | 'attachment';
}): Promise<{ success: boolean; data?: any; error?: string }> {
const formData = new FormData();
formData.append('missionId', missionId);
formData.append('type', type);
formData.append('file', file);
const res = await fetch('/api/missions/upload', {
method: 'POST',
body: formData,
});
if (!res.ok) {
const err = await res.json().catch(() => ({}));
return { success: false, error: err.error || 'Upload failed' };
}
const data = await res.json();
return { success: true, data };
}