W n8n attention vm

This commit is contained in:
alma 2025-05-24 12:36:21 +02:00
parent ce10c2c33c
commit 6d226f7dd7
4 changed files with 6 additions and 757 deletions

View File

@ -2,16 +2,15 @@ import { NextRequest, NextResponse } from 'next/server';
import { getServerSession } from 'next-auth';
import { authOptions } from '@/app/api/auth/options';
import { S3Client, GetObjectCommand } from '@aws-sdk/client-s3';
import { S3_CONFIG } from '@/lib/s3';
import { NoSuchKey } from '@aws-sdk/client-s3';
// Initialize S3 client
const s3Client = new S3Client({
region: S3_CONFIG.region,
endpoint: S3_CONFIG.endpoint,
region: process.env.MINIO_AWS_REGION,
endpoint: process.env.MINIO_S3_UPLOAD_BUCKET_URL,
credentials: {
accessKeyId: S3_CONFIG.accessKey || '',
secretAccessKey: S3_CONFIG.secretKey || ''
accessKeyId: process.env.MINIO_ACCESS_KEY || '',
secretAccessKey: process.env.MINIO_SECRET_KEY || ''
},
forcePathStyle: true // Required for MinIO
});
@ -36,7 +35,7 @@ export async function GET(
console.log('Full Minio path:', minioPath);
const command = new GetObjectCommand({
Bucket: process.env.MINIO_BUCKET_NAME || 'missions',
Bucket: process.env.MINIO_AWS_S3_UPLOAD_BUCKET_NAME || 'pages',
Key: minioPath,
});

View File

@ -1,9 +1,5 @@
import { s3Client, putObject, generatePresignedUrl, S3_CONFIG, deleteObject } from '@/lib/s3';
import { PutObjectCommand, DeleteObjectCommand } from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
/**
* Utilities for mission-related file uploads using Minio
* Utilities for mission-related file paths
*/
// Generate the mission logo path in Minio
@ -16,247 +12,4 @@ export function getMissionLogoPath(userId: string, missionId: string, fileExtens
export function getMissionAttachmentPath(userId: string, missionId: string, filename: string): string {
// Simplify path to match pages bucket structure
return `${missionId}/attachments/${filename}`;
}
// Upload mission logo to Minio
export async function uploadMissionLogo(
userId: string,
missionId: string,
file: File
): Promise<{ filePath: string }> {
try {
console.log('=== Starting logo upload process ===');
console.log('Upload params:', { userId, missionId, fileName: file.name, fileSize: file.size, fileType: file.type });
// Get file extension
const fileExtension = file.name.substring(file.name.lastIndexOf('.'));
console.log('File extension:', fileExtension);
// Create file path
const filePath = getMissionLogoPath(userId, missionId, fileExtension);
console.log('Generated file path:', filePath);
// Convert file to ArrayBuffer
console.log('Converting file to buffer...');
const arrayBuffer = await file.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
console.log('Buffer created, size:', buffer.length);
// Upload to Minio using the pages bucket instead of missions bucket
console.log('Creating S3 command with bucket:', S3_CONFIG.bucket);
console.log('S3 config:', {
endpoint: S3_CONFIG.endpoint || 'MISSING!',
region: S3_CONFIG.region || 'MISSING!',
bucket: S3_CONFIG.bucket || 'MISSING!',
hasAccessKey: !!S3_CONFIG.accessKey || 'MISSING!',
hasSecretKey: !!S3_CONFIG.secretKey || 'MISSING!'
});
// Log the full path being used
console.log('FULL S3 PATH:', `${S3_CONFIG.endpoint}/${S3_CONFIG.bucket}/${filePath}`);
const command = new PutObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: filePath,
Body: buffer,
ContentType: file.type,
// Add ACL for public read access
ACL: 'public-read',
});
console.log('Sending upload command to S3/Minio...');
console.log('Command details:', {
Bucket: command.input.Bucket,
Key: command.input.Key,
ContentType: command.input.ContentType,
ACL: command.input.ACL,
ContentLength: buffer.length
});
try {
const result = await s3Client.send(command);
console.log('Upload successful, result:', result);
} catch (uploadError) {
console.error('S3 upload error details:', uploadError);
console.error('Error name:', (uploadError as any).name);
console.error('Error message:', (uploadError as any).message);
if ((uploadError as any).$metadata) {
console.error('Error metadata:', (uploadError as any).$metadata);
}
throw uploadError;
}
console.log('Upload complete, returning file path:', filePath);
return { filePath };
} catch (error) {
console.error('Error uploading mission logo:', error);
throw new Error('Failed to upload mission logo');
}
}
// Upload mission attachment to Minio
export async function uploadMissionAttachment(
userId: string,
missionId: string,
file: File
): Promise<{
filename: string,
filePath: string,
fileType: string,
fileSize: number
}> {
try {
console.log('=== Starting attachment upload process ===');
console.log('Upload params:', { userId, missionId, fileName: file.name, fileSize: file.size, fileType: file.type });
// Create file path
const filePath = getMissionAttachmentPath(userId, missionId, file.name);
console.log('Generated file path:', filePath);
// Convert file to ArrayBuffer
console.log('Converting file to buffer...');
const arrayBuffer = await file.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
console.log('Buffer created, size:', buffer.length);
// Log the full path being used
console.log('FULL S3 PATH:', `${S3_CONFIG.endpoint}/${S3_CONFIG.bucket}/${filePath}`);
// Upload to Minio using pages bucket
const command = new PutObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: filePath,
Body: buffer,
ContentType: file.type,
// Add ACL for public read access
ACL: 'public-read',
});
console.log('Sending upload command to S3/Minio...');
console.log('Command details:', {
Bucket: command.input.Bucket,
Key: command.input.Key,
ContentType: command.input.ContentType,
ACL: command.input.ACL,
ContentLength: buffer.length
});
try {
const result = await s3Client.send(command);
console.log('Upload successful, result:', result);
} catch (uploadError) {
console.error('S3 upload error details:', uploadError);
console.error('Error name:', (uploadError as any).name);
console.error('Error message:', (uploadError as any).message);
if ((uploadError as any).$metadata) {
console.error('Error metadata:', (uploadError as any).$metadata);
}
throw uploadError;
}
return {
filename: file.name,
filePath,
fileType: file.type,
fileSize: file.size,
};
} catch (error) {
console.error('Error uploading mission attachment:', error);
throw new Error('Failed to upload mission attachment');
}
}
// Generate presigned URL for missions bucket
async function generateMissionPresignedUrl(key: string, expiresIn = 3600): Promise<string> {
try {
const command = new PutObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: key
});
return await getSignedUrl(s3Client, command, { expiresIn });
} catch (error) {
console.error('Error generating presigned URL for missions bucket:', error);
throw error;
}
}
// Generate presigned URL for direct browser upload of mission logo
export async function generateMissionLogoUploadUrl(
userId: string,
missionId: string,
fileExtension: string,
expiresIn = 3600
): Promise<{
uploadUrl: string,
filePath: string
}> {
try {
const filePath = getMissionLogoPath(userId, missionId, fileExtension);
const uploadUrl = await generateMissionPresignedUrl(filePath, expiresIn);
return { uploadUrl, filePath };
} catch (error) {
console.error('Error generating mission logo upload URL:', error);
throw new Error('Failed to generate upload URL for mission logo');
}
}
// Generate presigned URL for direct browser upload of mission attachment
export async function generateMissionAttachmentUploadUrl(
userId: string,
missionId: string,
filename: string,
expiresIn = 3600
): Promise<{
uploadUrl: string,
filePath: string
}> {
try {
const filePath = getMissionAttachmentPath(userId, missionId, filename);
const uploadUrl = await generateMissionPresignedUrl(filePath, expiresIn);
return { uploadUrl, filePath };
} catch (error) {
console.error('Error generating mission attachment upload URL:', error);
throw new Error('Failed to generate upload URL for mission attachment');
}
}
// Delete object from missions bucket
async function deleteMissionObject(key: string): Promise<boolean> {
try {
const command = new DeleteObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: key
});
await s3Client.send(command);
return true;
} catch (error) {
console.error('Error deleting mission object:', error);
throw error;
}
}
// Delete mission attachment from Minio
export async function deleteMissionAttachment(filePath: string): Promise<boolean> {
try {
await deleteMissionObject(filePath);
return true;
} catch (error) {
console.error('Error deleting mission attachment:', error);
throw new Error('Failed to delete mission attachment');
}
}
// Delete mission logo from Minio
export async function deleteMissionLogo(filePath: string): Promise<boolean> {
try {
await deleteMissionObject(filePath);
return true;
} catch (error) {
console.error('Error deleting mission logo:', error);
throw new Error('Failed to delete mission logo');
}
}

343
lib/s3.ts
View File

@ -1,343 +0,0 @@
import { S3Client, ListObjectsV2Command, GetObjectCommand, PutObjectCommand, DeleteObjectCommand } from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
/**
* S3/MinIO Configuration
*
* IMPORTANT: Set these environment variables in your .env file:
*
* MINIO_S3_UPLOAD_BUCKET_URL=https://your-minio-instance.com/
* MINIO_AWS_REGION=your-region
* MINIO_AWS_S3_UPLOAD_BUCKET_NAME=your-bucket-name
* MINIO_ACCESS_KEY=your-access-key
* MINIO_SECRET_KEY=your-secret-key
*
* Alternative credentials (fallback):
* AWS_ACCESS_KEY_ID=your-aws-access-key
* AWS_SECRET_ACCESS_KEY=your-aws-secret-key
*/
export const S3_CONFIG = {
endpoint: process.env.MINIO_S3_UPLOAD_BUCKET_URL,
region: process.env.MINIO_AWS_REGION,
bucket: process.env.MINIO_AWS_S3_UPLOAD_BUCKET_NAME,
missionsBucket: process.env.MINIO_MISSIONS_BUCKET || 'missions',
accessKey: process.env.MINIO_ACCESS_KEY || process.env.AWS_ACCESS_KEY_ID,
secretKey: process.env.MINIO_SECRET_KEY || process.env.AWS_SECRET_ACCESS_KEY,
}
// Initialize S3 client with standard configuration
const s3Config = {
region: S3_CONFIG.region,
endpoint: S3_CONFIG.endpoint,
forcePathStyle: true, // Required for MinIO
};
// Add credentials if available
if (S3_CONFIG.accessKey && S3_CONFIG.secretKey) {
Object.assign(s3Config, {
credentials: {
accessKeyId: S3_CONFIG.accessKey,
secretAccessKey: S3_CONFIG.secretKey
}
});
}
// Create S3 client
export const s3Client = new S3Client(s3Config);
// Check Minio connection on startup
(async () => {
try {
console.log('Testing Minio/S3 connection...');
// Try a simple operation to test the connection
const command = new ListObjectsV2Command({
Bucket: S3_CONFIG.bucket,
MaxKeys: 1
});
const response = await s3Client.send(command);
console.log('Minio/S3 connection successful! Bucket exists and is accessible.');
console.log(`Bucket details: ${S3_CONFIG.bucket}, contains ${response.KeyCount || 0} objects`);
} catch (error) {
console.error('CRITICAL ERROR: Failed to connect to Minio/S3 server!');
console.error('File uploads will fail until this is resolved.');
console.error('Error details:', error);
console.error('Please check your S3/Minio configuration and server status.');
}
})();
// Check for required environment variables
if (!S3_CONFIG.endpoint || !S3_CONFIG.bucket) {
console.error('ERROR: Missing required S3 environment variables!');
console.error('Please make sure your .env file contains:');
console.error('- MINIO_S3_UPLOAD_BUCKET_URL');
console.error('- MINIO_AWS_S3_UPLOAD_BUCKET_NAME');
console.error('- MINIO_ACCESS_KEY or AWS_ACCESS_KEY_ID');
console.error('- MINIO_SECRET_KEY or AWS_SECRET_ACCESS_KEY');
}
// Log configuration for debugging (without exposing credentials)
console.log('S3 Configuration:', {
endpoint: S3_CONFIG.endpoint || 'MISSING!',
region: S3_CONFIG.region || 'MISSING!',
bucket: S3_CONFIG.bucket || 'MISSING!',
hasAccessKey: !!S3_CONFIG.accessKey || 'MISSING!',
hasSecretKey: !!S3_CONFIG.secretKey || 'MISSING!',
});
// Helper functions for S3 operations
// List objects in a "folder" for a specific user
export async function listUserObjects(userId: string, folder: string) {
try {
// Remove the 'pages/' prefix since it's already the bucket name
const prefix = `user-${userId}/${folder}/`;
console.log(`Listing objects with prefix: ${prefix}`);
const command = new ListObjectsV2Command({
Bucket: S3_CONFIG.bucket,
Prefix: prefix,
Delimiter: '/'
});
const response = await s3Client.send(command);
// Transform S3 objects to match the expected format for the frontend
// This ensures compatibility with existing NextCloud based components
return response.Contents?.map(item => ({
id: item.Key,
title: item.Key?.split('/').pop()?.replace('.md', '') || '',
lastModified: item.LastModified?.toISOString(),
size: item.Size,
type: 'file',
mime: item.Key?.endsWith('.md') ? 'text/markdown' : 'application/octet-stream',
etag: item.ETag
}))
// Filter out placeholder files and empty directory markers
.filter(item => !item.title.startsWith('.placeholder') && item.title !== '')
|| [];
} catch (error) {
console.error('Error listing objects:', error);
throw error;
}
}
// Get object content
export async function getObjectContent(key: string) {
try {
const command = new GetObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: key
});
const response = await s3Client.send(command);
// Convert the stream to string
return await response.Body?.transformToString();
} catch (error) {
console.error('Error getting object content:', error);
throw error;
}
}
// Put object (create or update a file)
export async function putObject(key: string, content: string | Buffer, contentType?: string) {
try {
console.log(`Attempting to upload to S3/Minio: ${key}`);
if (!S3_CONFIG.bucket) {
throw new Error('S3 bucket name is not configured');
}
const command = new PutObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: key,
Body: content,
ContentType: contentType || (key.endsWith('.md') ? 'text/markdown' : 'text/plain')
});
console.log(`S3 PutObject request prepared for ${key}`);
const response = await s3Client.send(command);
console.log(`S3 PutObject successful for ${key}, ETag: ${response.ETag}`);
return {
id: key,
title: key.split('/').pop()?.replace('.md', '') || '',
lastModified: new Date().toISOString(),
size: typeof content === 'string' ? content.length : content.length,
type: 'file',
mime: contentType || (key.endsWith('.md') ? 'text/markdown' : 'text/plain'),
etag: response.ETag
};
} catch (error) {
console.error(`Error putting object to S3/Minio (${key}):`, error);
// Check for specific S3 errors
if ((error as any)?.name === 'NoSuchBucket') {
console.error(`Bucket "${S3_CONFIG.bucket}" does not exist. Please create it first.`);
}
throw error;
}
}
// Delete object
export async function deleteObject(key: string) {
try {
const command = new DeleteObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: key
});
await s3Client.send(command);
return true;
} catch (error) {
console.error('Error deleting object:', error);
throw error;
}
}
// Create folder structure (In S3, folders are just prefix notations)
export async function createUserFolderStructure(userId: string) {
try {
console.log(`Creating folder structure for user: ${userId}`);
// Define standard folders - use lowercase only for simplicity and consistency
const folders = ['notes', 'diary', 'health', 'contacts'];
// Create folders with placeholders
const results = [];
for (const folder of folders) {
try {
// Create the folder path (just a prefix in S3)
// Remove the 'pages/' prefix since it's already the bucket name
const key = `user-${userId}/${folder}/`;
console.log(`Creating folder: ${key}`);
await putObject(key, '', 'application/x-directory');
// Create a placeholder file to ensure the folder exists and is visible
const placeholderKey = `user-${userId}/${folder}/.placeholder`;
await putObject(placeholderKey, 'Folder placeholder', 'text/plain');
results.push(folder);
} catch (error) {
console.error(`Error creating folder ${folder}:`, error);
}
}
console.log(`Successfully created ${results.length} folders for user ${userId}: ${results.join(', ')}`);
return true;
} catch (error) {
console.error('Error creating folder structure:', error);
throw error;
}
}
// Generate pre-signed URL for direct browser upload (optional feature)
export async function generatePresignedUrl(key: string, expiresIn = 3600) {
try {
const command = new PutObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: key
});
return await getSignedUrl(s3Client, command, { expiresIn });
} catch (error) {
console.error('Error generating presigned URL:', error);
throw error;
}
}
// Generate a public URL for a file stored in Minio/S3
export function getPublicUrl(filePath: string, bucketName?: string): string {
if (!filePath) return '';
if (filePath.startsWith('http')) return filePath; // Already a full URL
console.log('Generating public URL for:', filePath);
// Remove leading slash if present
const cleanPath = filePath.startsWith('/') ? filePath.substring(1) : filePath;
// Special handling for paths that start with 'pages/'
if (cleanPath.startsWith('pages/')) {
// For paths with pages/ prefix, use a different URL format
const minioBaseUrl = process.env.NEXT_PUBLIC_MINIO_BASE_URL || process.env.MINIO_PUBLIC_URL;
if (minioBaseUrl) {
const trimmedBaseUrl = minioBaseUrl.replace(/\/$/, ''); // Remove trailing slash if present
const publicUrl = `${trimmedBaseUrl}/${cleanPath}`;
console.log('Generated special public URL for pages path:', publicUrl);
return publicUrl;
}
}
// Determine which bucket to use
const bucket = bucketName || S3_CONFIG.bucket;
// Construct the full URL using the standard approach
const endpoint = S3_CONFIG.endpoint?.replace(/\/$/, ''); // Remove trailing slash if present
console.log('S3 Config for URL generation:', {
endpoint,
bucket,
cleanPath
});
// Return original path if no endpoint is configured
if (!endpoint) {
console.warn('No S3/Minio endpoint configured, returning original path');
return cleanPath;
}
// Construct and return the full URL
const publicUrl = `${endpoint}/${bucket}/${cleanPath}`;
console.log('Generated public URL:', publicUrl);
return publicUrl;
}
// Test Minio connection - can be called from browser console
export async function testMinioConnection() {
console.log('=== Testing Minio Connection ===');
try {
console.log('S3 Configuration:', {
endpoint: S3_CONFIG.endpoint || 'MISSING!',
region: S3_CONFIG.region || 'MISSING!',
bucket: S3_CONFIG.bucket || 'MISSING!',
hasAccessKey: !!S3_CONFIG.accessKey || 'MISSING!',
hasSecretKey: !!S3_CONFIG.secretKey || 'MISSING!',
});
// Try a simple operation
console.log('Attempting to list objects...');
const command = new ListObjectsV2Command({
Bucket: S3_CONFIG.bucket || '',
MaxKeys: 5
});
const response = await s3Client.send(command);
console.log('Connection successful!');
console.log('Response:', response);
const files = response.Contents || [];
console.log(`Found ${files.length} files in bucket ${S3_CONFIG.bucket}:`);
files.forEach((file, index) => {
console.log(` ${index + 1}. ${file.Key} (${file.Size} bytes)`);
});
console.log('=== Test completed successfully ===');
return { success: true, files: files.map(f => f.Key) };
} catch (error) {
console.error('=== Test failed ===');
console.error('Error details:', error);
return { success: false, error };
}
}
// Make testMinioConnection available globally if in browser
if (typeof window !== 'undefined') {
(window as any).testMinioConnection = testMinioConnection;
// Also expose the getPublicUrl function
(window as any).getMinioUrl = getPublicUrl;
console.log('Minio test utilities available. Run window.testMinioConnection() to test Minio connection.');
}

View File

@ -1,160 +0,0 @@
/**
* Test Minio Upload Script
*
* Usage:
* node scripts/test-minio-upload.js
*
* This script tests connectivity to Minio by uploading a simple test file
* and then trying to retrieve it. It uses the same S3 client configuration
* as the main application.
*/
require('dotenv').config();
const { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand } = require('@aws-sdk/client-s3');
// Configuration for S3/Minio from environment variables
const S3_CONFIG = {
endpoint: process.env.MINIO_S3_UPLOAD_BUCKET_URL,
region: process.env.MINIO_AWS_REGION,
bucket: process.env.MINIO_AWS_S3_UPLOAD_BUCKET_NAME,
accessKey: process.env.MINIO_ACCESS_KEY || process.env.AWS_ACCESS_KEY_ID,
secretKey: process.env.MINIO_SECRET_KEY || process.env.AWS_SECRET_ACCESS_KEY,
};
// Initialize S3 client with standard configuration
const s3Config = {
region: S3_CONFIG.region,
endpoint: S3_CONFIG.endpoint,
forcePathStyle: true, // Required for MinIO
};
// Add credentials if available
if (S3_CONFIG.accessKey && S3_CONFIG.secretKey) {
Object.assign(s3Config, {
credentials: {
accessKeyId: S3_CONFIG.accessKey,
secretAccessKey: S3_CONFIG.secretKey
}
});
}
// Create S3 client
const s3Client = new S3Client(s3Config);
/**
* Generate a public URL for a file stored in Minio/S3
*/
function getPublicUrl(filePath) {
if (!filePath) return '';
if (filePath.startsWith('http')) return filePath; // Already a full URL
// Remove leading slash if present
const cleanPath = filePath.startsWith('/') ? filePath.substring(1) : filePath;
// Construct the full URL
const endpoint = S3_CONFIG.endpoint?.replace(/\/$/, ''); // Remove trailing slash if present
const bucket = S3_CONFIG.bucket;
// Return original path if no endpoint is configured
if (!endpoint) {
console.warn('No S3/Minio endpoint configured, returning original path');
return cleanPath;
}
// Construct and return the full URL
return `${endpoint}/${bucket}/${cleanPath}`;
}
/**
* Test Minio connection by uploading and retrieving a test file
*/
async function testMinioUpload() {
// Generate a unique test file path
const testKey = `test/upload-test-${Date.now()}.txt`;
const testContent = 'This is a test file created at ' + new Date().toISOString();
console.log('=== Testing Minio Upload ===');
console.log('S3 Configuration:');
console.log(' Endpoint:', S3_CONFIG.endpoint || 'MISSING!');
console.log(' Region:', S3_CONFIG.region || 'MISSING!');
console.log(' Bucket:', S3_CONFIG.bucket || 'MISSING!');
console.log(' Has Access Key:', !!S3_CONFIG.accessKey || 'MISSING!');
console.log(' Has Secret Key:', !!S3_CONFIG.secretKey || 'MISSING!');
try {
// 1. Upload test file
console.log('\nStep 1: Uploading test file...');
console.log(' File path:', testKey);
console.log(' Content:', testContent);
const uploadCommand = new PutObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: testKey,
Body: testContent,
ContentType: 'text/plain'
});
const uploadResult = await s3Client.send(uploadCommand);
console.log(' Upload successful!');
console.log(' ETag:', uploadResult.ETag);
// 2. Generate public URL
const publicUrl = getPublicUrl(testKey);
console.log('\nStep 2: Generated public URL:');
console.log(' URL:', publicUrl);
// 3. Download the file to verify it was uploaded correctly
console.log('\nStep 3: Downloading test file to verify...');
const downloadCommand = new GetObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: testKey
});
const downloadResult = await s3Client.send(downloadCommand);
const downloadedContent = await downloadResult.Body.transformToString();
console.log(' Download successful!');
console.log(' Content:', downloadedContent);
if (downloadedContent === testContent) {
console.log(' Content verification: ✓ Matches original');
} else {
console.log(' Content verification: ✗ Does not match original');
console.log(' Expected:', testContent);
console.log(' Received:', downloadedContent);
}
// 4. Cleanup - Delete the test file
console.log('\nStep 4: Cleaning up - deleting test file...');
const deleteCommand = new DeleteObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: testKey
});
await s3Client.send(deleteCommand);
console.log(' Deletion successful!');
console.log('\n=== Test completed successfully ===');
console.log('Minio is properly configured and accessible.');
console.log('File uploads should be working correctly.');
} catch (error) {
console.error('\n=== Test failed ===');
console.error('Error details:', error);
console.error('\nPossible issues:');
console.error('1. Minio server is not running or not accessible');
console.error('2. Incorrect endpoint URL');
console.error('3. Invalid credentials');
console.error('4. Bucket does not exist or is not accessible');
console.error('5. Network connectivity issues');
console.error('\nEnvironment variables to check:');
console.error('- MINIO_S3_UPLOAD_BUCKET_URL');
console.error('- MINIO_AWS_S3_UPLOAD_BUCKET_NAME');
console.error('- MINIO_ACCESS_KEY / AWS_ACCESS_KEY_ID');
console.error('- MINIO_SECRET_KEY / AWS_SECRET_ACCESS_KEY');
}
}
// Run the test
testMinioUpload();