Neah/lib/services/prefetch-service.ts

261 lines
9.0 KiB
TypeScript

'use server';
import { getImapConnection, getEmails, getEmailContent } from './email-service';
import {
cacheEmailList,
cacheEmailContent,
cacheImapSession,
getCachedEmailList,
getRedisClient,
warmupRedisCache
} from '@/lib/redis';
/**
* Get cached emails with timeout to ensure fast UI response
* If cache access takes longer than timeout, return null to use regular IMAP fetch
*/
export async function getCachedEmailsWithTimeout(
userId: string,
folder: string,
page: number,
perPage: number,
timeoutMs: number = 200
): Promise<any | null> {
return new Promise((resolve) => {
const timeoutId = setTimeout(() => {
console.log(`Cache access timeout for ${userId}:${folder}:${page}:${perPage}`);
resolve(null);
}, timeoutMs);
getCachedEmailList(userId, folder, page, perPage)
.then(result => {
clearTimeout(timeoutId);
if (result) {
console.log(`Using cached data for ${userId}:${folder}:${page}:${perPage}`);
// Validate and normalize the data structure
if (typeof result === 'object') {
// Make sure we have an emails array
if (!result.emails && Array.isArray(result)) {
// If result is an array, convert to proper structure
resolve({ emails: result });
} else if (!result.emails) {
// If no emails property, add empty array
resolve({ ...result, emails: [] });
} else {
// Normal case, return as is
resolve(result);
}
} else {
// Invalid data, return null
console.warn('Invalid cached data format:', result);
resolve(null);
}
} else {
resolve(null);
}
})
.catch(err => {
clearTimeout(timeoutId);
console.error('Error accessing cache:', err);
resolve(null);
});
});
}
/**
* Refresh emails in background without blocking UI
* This allows the UI to show cached data immediately while refreshing in background
*/
export async function refreshEmailsInBackground(
userId: string,
folder: string = 'INBOX',
page: number = 1,
perPage: number = 20
): Promise<void> {
// Track ongoing refreshes to avoid duplicates
const refreshKey = `${userId}:${folder}:${page}`;
// Only use small timeouts for INBOX, other folders can wait longer
const priority = folder.toUpperCase() === 'INBOX' && page <= 2 ? 100 : 500;
// Use setTimeout to ensure this runs after current execution context
setTimeout(async () => {
try {
// Skip if we've recently refreshed this data (use a module-scope cache)
// We don't need to refresh the same data too frequently
const cacheKey = `${userId}:${folder}:${page}:refreshed`;
const lastRefreshed = (window as any)[cacheKey] || 0;
const now = Date.now();
// Don't refresh if it's been less than 30 seconds for inbox, 2 minutes for other folders
const minInterval = folder.toUpperCase() === 'INBOX' ? 30000 : 120000;
if (now - lastRefreshed < minInterval) {
console.log(`Skipping refresh for ${folder}:${page} - last refreshed ${Math.round((now - lastRefreshed)/1000)}s ago`);
return;
}
console.log(`Background refresh for ${userId}:${folder}:${page}:${perPage}`);
const freshData = await getEmails(userId, folder, page, perPage);
console.log(`Background refresh completed for ${userId}:${folder}:${page} with ${freshData.emails.length} emails`);
// Mark as refreshed
(window as any)[cacheKey] = now;
// For inbox first page only, prefetch page 2 but with a longer delay
if (folder.toUpperCase() === 'INBOX' && page === 1) {
setTimeout(() => {
refreshEmailsInBackground(userId, folder, 2, perPage);
}, 1000);
}
} catch (error) {
console.error('Background refresh error:', error);
}
}, priority);
}
/**
* Prefetch basic email data for faster initial loading
* This function should be called when a user logs in
*/
export async function prefetchUserEmailData(userId: string): Promise<void> {
console.log(`Starting email prefetch for user ${userId}`);
const startTime = Date.now();
try {
// Connect to IMAP server
const client = await getImapConnection(userId);
// 1. Prefetch mailbox list
const mailboxes = await client.list();
const mailboxPaths = mailboxes.map(mailbox => mailbox.path);
// Cache mailbox list in session data
await cacheImapSession(userId, {
lastActive: Date.now(),
mailboxes: mailboxPaths
});
console.log(`Prefetched ${mailboxPaths.length} folders for user ${userId}`);
// 2. Prefetch email lists for important folders
const importantFolders = [
'INBOX',
mailboxPaths.find(path => path.toLowerCase().includes('sent')) || 'Sent',
mailboxPaths.find(path => path.toLowerCase().includes('draft')) || 'Drafts'
].filter(Boolean);
// Fetch first page of each important folder
for (const folder of importantFolders) {
try {
console.log(`Prefetching emails for ${folder}`);
const emailList = await getEmails(userId, folder, 1, 20);
console.log(`Prefetched ${emailList.emails.length} emails for ${folder}`);
} catch (error) {
console.error(`Error prefetching emails for folder ${folder}:`, error);
// Continue with other folders even if one fails
}
}
// 3. Prefetch content of recent unread emails in INBOX
try {
// Get the list again (it's already cached so this will be fast)
const inboxList = await getEmails(userId, 'INBOX', 1, 20);
// Prefetch content for up to 5 recent unread emails
const unreadEmails = inboxList.emails
.filter(email => !email.flags.seen)
.slice(0, 5);
if (unreadEmails.length > 0) {
console.log(`Prefetching content for ${unreadEmails.length} unread emails`);
// Fetch content in parallel for speed
await Promise.allSettled(
unreadEmails.map(email =>
getEmailContent(userId, email.id, 'INBOX')
.catch(err => console.error(`Error prefetching email ${email.id}:`, err))
)
);
console.log(`Completed prefetching content for unread emails`);
}
} catch (error) {
console.error('Error prefetching unread email content:', error);
}
const duration = (Date.now() - startTime) / 1000;
console.log(`Email prefetch completed for user ${userId} in ${duration.toFixed(2)}s`);
} catch (error) {
console.error('Error during email prefetch:', error);
}
}
/**
* Prefetch a specific folder's emails
* This can be used when the user navigates to a folder to preload more pages
*/
export async function prefetchFolderEmails(
userId: string,
folder: string,
pages: number = 3,
startPage: number = 1
): Promise<void> {
try {
console.log(`Prefetching ${pages} pages of emails for folder ${folder} starting from page ${startPage}`);
// Limit the number of pages to prefetch to reduce server load
const maxPages = 3;
const actualPages = Math.min(pages, maxPages);
// Calculate the range of pages to prefetch
const pagesToFetch = Array.from(
{ length: actualPages },
(_, i) => startPage + i
);
console.log(`Will prefetch pages: ${pagesToFetch.join(', ')}`);
// Fetch pages sequentially with delays to avoid overwhelming the server
// Focus on the first page first, which is most important
const fetchPage = async (pageIndex: number) => {
if (pageIndex >= pagesToFetch.length) return;
const page = pagesToFetch[pageIndex];
try {
// Skip if we've recently prefetched this page
const cacheKey = `${userId}:${folder}:${page}:prefetched`;
const lastPrefetched = (window as any)[cacheKey] || 0;
const now = Date.now();
// Don't prefetch if it's been less than 1 minute
if (now - lastPrefetched < 60000) {
console.log(`Skipping prefetch for ${folder}:${page} - prefetched ${Math.round((now - lastPrefetched)/1000)}s ago`);
// Continue with next page
setTimeout(() => fetchPage(pageIndex + 1), 100);
return;
}
console.log(`Prefetching page ${page} of ${folder}`);
const result = await getEmails(userId, folder, page, 20);
console.log(`Successfully prefetched page ${page} of ${folder} with ${result.emails.length} emails`);
// Mark as prefetched
(window as any)[cacheKey] = now;
// Fetch next page with delay
setTimeout(() => fetchPage(pageIndex + 1), 500);
} catch (err) {
console.error(`Error prefetching page ${page} of ${folder}:`, err);
// Try next page anyway after a longer delay
setTimeout(() => fetchPage(pageIndex + 1), 1000);
}
};
// Start fetching the first page
fetchPage(0);
} catch (error) {
console.error(`Error prefetching folder ${folder}:`, error);
}
}