221 lines
7.3 KiB
TypeScript
221 lines
7.3 KiB
TypeScript
'use server';
|
|
|
|
import { getImapConnection, getEmails, getEmailContent } from './email-service';
|
|
import {
|
|
cacheEmailList,
|
|
cacheEmailContent,
|
|
cacheImapSession,
|
|
getCachedEmailList,
|
|
getRedisClient,
|
|
warmupRedisCache
|
|
} from '@/lib/redis';
|
|
|
|
/**
|
|
* Get cached emails with timeout to ensure fast UI response
|
|
* If cache access takes longer than timeout, return null to use regular IMAP fetch
|
|
*/
|
|
export async function getCachedEmailsWithTimeout(
|
|
userId: string,
|
|
folder: string,
|
|
page: number,
|
|
perPage: number,
|
|
timeoutMs: number = 200
|
|
): Promise<any | null> {
|
|
return new Promise((resolve) => {
|
|
const timeoutId = setTimeout(() => {
|
|
console.log(`Cache access timeout for ${userId}:${folder}:${page}:${perPage}`);
|
|
resolve(null);
|
|
}, timeoutMs);
|
|
|
|
getCachedEmailList(userId, folder, page, perPage)
|
|
.then(result => {
|
|
clearTimeout(timeoutId);
|
|
if (result) {
|
|
console.log(`Using cached data for ${userId}:${folder}:${page}:${perPage}`);
|
|
|
|
// Validate and normalize the data structure
|
|
if (typeof result === 'object') {
|
|
// Make sure we have an emails array
|
|
if (!result.emails && Array.isArray(result)) {
|
|
// If result is an array, convert to proper structure
|
|
resolve({ emails: result });
|
|
} else if (!result.emails) {
|
|
// If no emails property, add empty array
|
|
resolve({ ...result, emails: [] });
|
|
} else {
|
|
// Normal case, return as is
|
|
resolve(result);
|
|
}
|
|
} else {
|
|
// Invalid data, return null
|
|
console.warn('Invalid cached data format:', result);
|
|
resolve(null);
|
|
}
|
|
} else {
|
|
resolve(null);
|
|
}
|
|
})
|
|
.catch(err => {
|
|
clearTimeout(timeoutId);
|
|
console.error('Error accessing cache:', err);
|
|
resolve(null);
|
|
});
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Refresh emails in background without blocking UI
|
|
* This allows the UI to show cached data immediately while refreshing in background
|
|
*/
|
|
export async function refreshEmailsInBackground(
|
|
userId: string,
|
|
folder: string = 'INBOX',
|
|
page: number = 1,
|
|
perPage: number = 20
|
|
): Promise<void> {
|
|
// Track ongoing refreshes to avoid duplicates
|
|
const refreshKey = `${userId}:${folder}:${page}`;
|
|
|
|
// Use a higher priority for inbox and small page numbers
|
|
const priority = folder === 'INBOX' && page <= 2 ? 100 : 300;
|
|
|
|
// Use setTimeout to ensure this runs after current execution context
|
|
setTimeout(async () => {
|
|
try {
|
|
console.log(`Background refresh for ${userId}:${folder}:${page}:${perPage}`);
|
|
const freshData = await getEmails(userId, folder, page, perPage);
|
|
console.log(`Background refresh completed for ${userId}:${folder}:${page} with ${freshData.emails.length} emails`);
|
|
|
|
// If it's the inbox and there's a next page, prefetch that too
|
|
if (folder === 'INBOX' && page === 1) {
|
|
setTimeout(() => {
|
|
refreshEmailsInBackground(userId, folder, 2, perPage);
|
|
}, 500);
|
|
}
|
|
} catch (error) {
|
|
console.error('Background refresh error:', error);
|
|
}
|
|
}, priority);
|
|
}
|
|
|
|
/**
|
|
* Prefetch basic email data for faster initial loading
|
|
* This function should be called when a user logs in
|
|
*/
|
|
export async function prefetchUserEmailData(userId: string): Promise<void> {
|
|
console.log(`Starting email prefetch for user ${userId}`);
|
|
const startTime = Date.now();
|
|
|
|
try {
|
|
// Connect to IMAP server
|
|
const client = await getImapConnection(userId);
|
|
|
|
// 1. Prefetch mailbox list
|
|
const mailboxes = await client.list();
|
|
const mailboxPaths = mailboxes.map(mailbox => mailbox.path);
|
|
|
|
// Cache mailbox list in session data
|
|
await cacheImapSession(userId, {
|
|
lastActive: Date.now(),
|
|
mailboxes: mailboxPaths
|
|
});
|
|
|
|
console.log(`Prefetched ${mailboxPaths.length} folders for user ${userId}`);
|
|
|
|
// 2. Prefetch email lists for important folders
|
|
const importantFolders = [
|
|
'INBOX',
|
|
mailboxPaths.find(path => path.toLowerCase().includes('sent')) || 'Sent',
|
|
mailboxPaths.find(path => path.toLowerCase().includes('draft')) || 'Drafts'
|
|
].filter(Boolean);
|
|
|
|
// Fetch first page of each important folder
|
|
for (const folder of importantFolders) {
|
|
try {
|
|
console.log(`Prefetching emails for ${folder}`);
|
|
const emailList = await getEmails(userId, folder, 1, 20);
|
|
console.log(`Prefetched ${emailList.emails.length} emails for ${folder}`);
|
|
} catch (error) {
|
|
console.error(`Error prefetching emails for folder ${folder}:`, error);
|
|
// Continue with other folders even if one fails
|
|
}
|
|
}
|
|
|
|
// 3. Prefetch content of recent unread emails in INBOX
|
|
try {
|
|
// Get the list again (it's already cached so this will be fast)
|
|
const inboxList = await getEmails(userId, 'INBOX', 1, 20);
|
|
|
|
// Prefetch content for up to 5 recent unread emails
|
|
const unreadEmails = inboxList.emails
|
|
.filter(email => !email.flags.seen)
|
|
.slice(0, 5);
|
|
|
|
if (unreadEmails.length > 0) {
|
|
console.log(`Prefetching content for ${unreadEmails.length} unread emails`);
|
|
|
|
// Fetch content in parallel for speed
|
|
await Promise.allSettled(
|
|
unreadEmails.map(email =>
|
|
getEmailContent(userId, email.id, 'INBOX')
|
|
.catch(err => console.error(`Error prefetching email ${email.id}:`, err))
|
|
)
|
|
);
|
|
|
|
console.log(`Completed prefetching content for unread emails`);
|
|
}
|
|
} catch (error) {
|
|
console.error('Error prefetching unread email content:', error);
|
|
}
|
|
|
|
const duration = (Date.now() - startTime) / 1000;
|
|
console.log(`Email prefetch completed for user ${userId} in ${duration.toFixed(2)}s`);
|
|
} catch (error) {
|
|
console.error('Error during email prefetch:', error);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Prefetch a specific folder's emails
|
|
* This can be used when the user navigates to a folder to preload more pages
|
|
*/
|
|
export async function prefetchFolderEmails(
|
|
userId: string,
|
|
folder: string,
|
|
pages: number = 3,
|
|
startPage: number = 1
|
|
): Promise<void> {
|
|
try {
|
|
console.log(`Prefetching ${pages} pages of emails for folder ${folder} starting from page ${startPage}`);
|
|
|
|
// Calculate the range of pages to prefetch
|
|
const pagesToFetch = Array.from(
|
|
{ length: pages },
|
|
(_, i) => startPage + i
|
|
);
|
|
|
|
console.log(`Will prefetch pages: ${pagesToFetch.join(', ')}`);
|
|
|
|
// Fetch multiple pages in parallel, but with a slight delay between them to avoid overwhelming the server
|
|
for (let i = 0; i < pagesToFetch.length; i++) {
|
|
const page = pagesToFetch[i];
|
|
|
|
// Use a delay for all but the first page
|
|
setTimeout(() => {
|
|
getEmails(userId, folder, page, 20)
|
|
.then(result => {
|
|
console.log(`Successfully prefetched and cached page ${page} of ${folder} with ${result.emails.length} emails`);
|
|
return result;
|
|
})
|
|
.catch(err => {
|
|
console.error(`Error prefetching page ${page} of ${folder}:`, err);
|
|
return null;
|
|
});
|
|
}, i * 300); // Stagger prefetches with 300ms between them
|
|
}
|
|
|
|
console.log(`Scheduled prefetching for ${pages} pages of ${folder}`);
|
|
} catch (error) {
|
|
console.error(`Error prefetching folder ${folder}:`, error);
|
|
}
|
|
}
|