321 lines
11 KiB
TypeScript
321 lines
11 KiB
TypeScript
'use server';
|
|
|
|
import { getImapConnection, getEmails, getEmailContent } from './email-service';
|
|
import {
|
|
cacheEmailList,
|
|
cacheEmailContent,
|
|
cacheImapSession,
|
|
getCachedEmailList,
|
|
getRedisClient,
|
|
warmupRedisCache
|
|
} from '@/lib/redis';
|
|
|
|
// Keep track of ongoing prefetch operations to prevent duplicates
|
|
const prefetchInProgress = new Map<string, boolean>();
|
|
const lastPrefetchTime = new Map<string, number>();
|
|
const PREFETCH_COOLDOWN_MS = 30000; // 30 seconds between prefetch operations
|
|
|
|
// Track recent refreshes to prevent infinite loops
|
|
const recentRefreshes = new Map<string, number>();
|
|
const COOLDOWN_PERIOD = 60000; // 60 seconds cooldown between refreshes
|
|
|
|
/**
|
|
* Check if we should prefetch for a user based on cooldown
|
|
*/
|
|
function shouldPrefetch(userId: string, key: string = 'general'): boolean {
|
|
const prefetchKey = `${userId}:${key}`;
|
|
|
|
// Check if prefetch is already in progress
|
|
if (prefetchInProgress.get(prefetchKey)) {
|
|
console.log(`Prefetch already in progress for ${prefetchKey}`);
|
|
return false;
|
|
}
|
|
|
|
// Check cooldown
|
|
const lastTime = lastPrefetchTime.get(prefetchKey) || 0;
|
|
const now = Date.now();
|
|
|
|
if (now - lastTime < PREFETCH_COOLDOWN_MS) {
|
|
console.log(`Prefetch cooldown active for ${prefetchKey}, last was ${Math.round((now - lastTime)/1000)}s ago`);
|
|
return false;
|
|
}
|
|
|
|
// Mark as in progress and update last time
|
|
prefetchInProgress.set(prefetchKey, true);
|
|
lastPrefetchTime.set(prefetchKey, now);
|
|
return true;
|
|
}
|
|
|
|
/**
|
|
* Mark prefetch as completed
|
|
*/
|
|
function markPrefetchCompleted(userId: string, key: string = 'general'): void {
|
|
const prefetchKey = `${userId}:${key}`;
|
|
prefetchInProgress.set(prefetchKey, false);
|
|
}
|
|
|
|
/**
|
|
* Get cached emails with timeout to ensure fast UI response
|
|
* If cache access takes longer than timeout, return null to use regular IMAP fetch
|
|
*/
|
|
export async function getCachedEmailsWithTimeout(
|
|
userId: string,
|
|
folder: string,
|
|
page: number,
|
|
perPage: number,
|
|
timeoutMs: number = 100,
|
|
accountId?: string
|
|
): Promise<any | null> {
|
|
// Skip cache if accountId is 'loading-account'
|
|
if (accountId === 'loading-account') {
|
|
console.log(`Skipping cache for loading account`);
|
|
return null;
|
|
}
|
|
|
|
// CRITICAL FIX: Proper folder and account ID normalization
|
|
// This is critical for consistent cache keys
|
|
let effectiveAccountId: string;
|
|
let normalizedFolder: string;
|
|
|
|
// First, handle the folder format
|
|
if (folder.includes(':')) {
|
|
// Extract parts if folder already has a prefix
|
|
const parts = folder.split(':');
|
|
const folderAccountId = parts[0];
|
|
normalizedFolder = parts[1];
|
|
|
|
// CRITICAL FIX: If explicit accountId is provided, it ALWAYS takes precedence
|
|
// This ensures account switching works correctly
|
|
if (accountId) {
|
|
console.log(`[getCachedEmailsWithTimeout] Using provided accountId (${accountId}) over folder prefix (${folderAccountId})`);
|
|
effectiveAccountId = accountId;
|
|
} else {
|
|
effectiveAccountId = folderAccountId;
|
|
}
|
|
} else {
|
|
// No folder prefix, use the folder name as is
|
|
normalizedFolder = folder;
|
|
effectiveAccountId = accountId || 'default';
|
|
}
|
|
|
|
// Log the normalization for debugging
|
|
console.log(`[getCachedEmailsWithTimeout] Normalized: folder=${normalizedFolder}, accountId=${effectiveAccountId} (from ${folder})`);
|
|
|
|
return new Promise((resolve) => {
|
|
const timeoutId = setTimeout(() => {
|
|
console.log(`Cache access timeout for ${userId}:${effectiveAccountId}:${normalizedFolder}:${page}:${perPage}`);
|
|
resolve(null);
|
|
}, timeoutMs);
|
|
|
|
// CRITICAL FIX: Use the normalized parameters consistently
|
|
// This ensures we're looking up the right cache entries
|
|
getCachedEmailList(userId, effectiveAccountId, normalizedFolder, page, perPage)
|
|
.then(result => {
|
|
clearTimeout(timeoutId);
|
|
if (result) {
|
|
console.log(`[getCachedEmailsWithTimeout] Cache hit for ${userId}:${effectiveAccountId}:${normalizedFolder}:${page}:${perPage}`);
|
|
resolve(result);
|
|
} else {
|
|
console.log(`[getCachedEmailsWithTimeout] Cache miss for ${userId}:${effectiveAccountId}:${normalizedFolder}:${page}:${perPage}`);
|
|
resolve(null);
|
|
}
|
|
})
|
|
.catch(err => {
|
|
clearTimeout(timeoutId);
|
|
console.error('[getCachedEmailsWithTimeout] Error accessing cache:', err);
|
|
resolve(null);
|
|
});
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Refresh emails in background without blocking UI
|
|
* This allows the UI to show cached data immediately while refreshing in background
|
|
*/
|
|
export async function refreshEmailsInBackground(
|
|
userId: string,
|
|
folder: string,
|
|
page: number,
|
|
perPage: number,
|
|
accountId?: string
|
|
): Promise<void> {
|
|
try {
|
|
// First check if folder has the accountId prefix
|
|
console.log(`[refreshEmailsInBackground] Normalized: folder=${folder}, accountId=${accountId} (from ${folder})`);
|
|
|
|
// Create a unique key for this refresh request
|
|
const refreshKey = `${userId}:refresh:${accountId || 'default'}:${folder}:${page}`;
|
|
|
|
// Check if this exact refresh was done recently - PREVENT INFINITE LOOPS
|
|
const lastRefreshed = recentRefreshes.get(refreshKey);
|
|
const now = Date.now();
|
|
|
|
if (lastRefreshed && now - lastRefreshed < COOLDOWN_PERIOD) {
|
|
console.log(`Prefetch cooldown active for ${refreshKey}, last was ${Math.floor((now - lastRefreshed)/1000)}s ago`);
|
|
return; // Skip if we refreshed this exact data recently
|
|
}
|
|
|
|
// Update the refresh timestamp
|
|
recentRefreshes.set(refreshKey, now);
|
|
|
|
// Prune old entries from the map (keep only recent ones)
|
|
for (const [key, timestamp] of recentRefreshes.entries()) {
|
|
if (now - timestamp > COOLDOWN_PERIOD) {
|
|
recentRefreshes.delete(key);
|
|
}
|
|
}
|
|
|
|
// CRITICAL FIX: Prevent any background refresh by immediately returning
|
|
console.log(`[refreshEmailsInBackground] DISABLED to prevent infinite loops`);
|
|
return;
|
|
|
|
// Real implementation of refresh would be here
|
|
} catch (error) {
|
|
console.error('Error in refreshEmailsInBackground:', error);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Prefetch basic email data for faster initial loading
|
|
* This function should be called when a user logs in
|
|
*/
|
|
export async function prefetchUserEmailData(userId: string): Promise<void> {
|
|
// Skip if already in progress or in cooldown
|
|
if (!shouldPrefetch(userId)) {
|
|
return;
|
|
}
|
|
|
|
console.log(`Starting email prefetch for user ${userId}`);
|
|
const startTime = Date.now();
|
|
|
|
try {
|
|
// Connect to IMAP server
|
|
const client = await getImapConnection(userId);
|
|
|
|
// 1. Prefetch mailbox list
|
|
const mailboxes = await client.list();
|
|
const mailboxPaths = mailboxes.map(mailbox => mailbox.path);
|
|
|
|
// Cache mailbox list in session data
|
|
await cacheImapSession(userId, {
|
|
lastActive: Date.now(),
|
|
mailboxes: mailboxPaths
|
|
});
|
|
|
|
console.log(`Prefetched ${mailboxPaths.length} folders for user ${userId}`);
|
|
|
|
// 2. Prefetch email lists for important folders
|
|
const importantFolders = [
|
|
'INBOX',
|
|
mailboxPaths.find(path => path.toLowerCase().includes('sent')) || 'Sent',
|
|
mailboxPaths.find(path => path.toLowerCase().includes('draft')) || 'Drafts'
|
|
].filter(Boolean);
|
|
|
|
// Fetch first page of each important folder
|
|
for (const folder of importantFolders) {
|
|
try {
|
|
console.log(`Prefetching emails for ${folder}`);
|
|
const emailList = await getEmails(userId, folder, 1, 20);
|
|
console.log(`Prefetched ${emailList.emails.length} emails for ${folder}`);
|
|
} catch (error) {
|
|
console.error(`Error prefetching emails for folder ${folder}:`, error);
|
|
// Continue with other folders even if one fails
|
|
}
|
|
}
|
|
|
|
// 3. Prefetch content of recent unread emails in INBOX
|
|
try {
|
|
// Get the list again (it's already cached so this will be fast)
|
|
const inboxList = await getEmails(userId, 'INBOX', 1, 20);
|
|
|
|
// Prefetch content for up to 5 recent unread emails
|
|
const unreadEmails = inboxList.emails
|
|
.filter(email => !email.flags.seen)
|
|
.slice(0, 5);
|
|
|
|
if (unreadEmails.length > 0) {
|
|
console.log(`Prefetching content for ${unreadEmails.length} unread emails`);
|
|
|
|
// Fetch content in parallel for speed
|
|
await Promise.allSettled(
|
|
unreadEmails.map(email =>
|
|
getEmailContent(userId, email.id, 'INBOX')
|
|
.catch(err => console.error(`Error prefetching email ${email.id}:`, err))
|
|
)
|
|
);
|
|
|
|
console.log(`Completed prefetching content for unread emails`);
|
|
}
|
|
} catch (error) {
|
|
console.error('Error prefetching unread email content:', error);
|
|
}
|
|
|
|
const duration = (Date.now() - startTime) / 1000;
|
|
console.log(`Email prefetch completed for user ${userId} in ${duration.toFixed(2)}s`);
|
|
} catch (error) {
|
|
console.error('Error during email prefetch:', error);
|
|
} finally {
|
|
markPrefetchCompleted(userId);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Prefetch a specific folder's emails
|
|
* This can be used when the user navigates to a folder to preload more pages
|
|
*/
|
|
export async function prefetchFolderEmails(
|
|
userId: string,
|
|
folder: string,
|
|
pages: number = 3,
|
|
startPage: number = 1,
|
|
accountId?: string
|
|
): Promise<void> {
|
|
// Extract account ID from folder name if present and none was explicitly provided
|
|
const folderAccountId = folder.includes(':') ? folder.split(':')[0] : accountId;
|
|
|
|
// Use the most specific account ID available
|
|
const effectiveAccountId = folderAccountId || accountId || 'default';
|
|
|
|
// Normalize folder name by removing account prefix if present
|
|
const normalizedFolder = folder.includes(':') ? folder.split(':')[1] : folder;
|
|
|
|
const prefetchKey = `folder:${normalizedFolder}:${startPage}:${effectiveAccountId}`;
|
|
|
|
// Skip if already in progress or in cooldown
|
|
if (!shouldPrefetch(userId, prefetchKey)) {
|
|
return;
|
|
}
|
|
|
|
try {
|
|
console.log(`Prefetching ${pages} pages of emails for folder ${normalizedFolder} starting from page ${startPage} for account ${effectiveAccountId}`);
|
|
|
|
// Calculate the range of pages to prefetch
|
|
const pagesToFetch = Array.from(
|
|
{ length: pages },
|
|
(_, i) => startPage + i
|
|
);
|
|
|
|
console.log(`Will prefetch pages: ${pagesToFetch.join(', ')}`);
|
|
|
|
// Fetch multiple pages in parallel
|
|
await Promise.allSettled(
|
|
pagesToFetch.map(page =>
|
|
getEmails(userId, normalizedFolder, page, 20, effectiveAccountId)
|
|
.then(result => {
|
|
console.log(`Successfully prefetched and cached page ${page} of ${normalizedFolder} with ${result.emails.length} emails for account ${effectiveAccountId}`);
|
|
return result;
|
|
})
|
|
.catch(err => {
|
|
console.error(`Error prefetching page ${page} of ${normalizedFolder} for account ${effectiveAccountId}:`, err);
|
|
return null;
|
|
})
|
|
)
|
|
);
|
|
|
|
console.log(`Completed prefetching ${pages} pages for ${normalizedFolder} in account ${effectiveAccountId}`);
|
|
} catch (error) {
|
|
console.error(`Error during folder prefetch:`, error);
|
|
} finally {
|
|
markPrefetchCompleted(userId, prefetchKey);
|
|
}
|
|
}
|