diff --git a/app/courrier/page.tsx b/app/courrier/page.tsx index f7c7b628..0d086d1b 100644 --- a/app/courrier/page.tsx +++ b/app/courrier/page.tsx @@ -151,9 +151,36 @@ export default function CourrierPage() { setLoading(true); // First check if Redis is ready before making API calls - const redisStatus = await fetch('/api/redis/status') - .then(res => res.json()) - .catch(() => ({ ready: false })); + // Use a cache mechanism to reduce frequency of Redis status checks + const redisCheckCacheKey = 'neah_redis_status_check'; + const cachedRedisCheck = localStorage.getItem(redisCheckCacheKey); + let redisStatus = { ready: false }; + + if (cachedRedisCheck) { + try { + const { status, timestamp } = JSON.parse(cachedRedisCheck); + // Only use cache if it's less than 2 minutes old + if (Date.now() - timestamp < 2 * 60 * 1000) { + redisStatus = status; + console.log('Using cached Redis status check'); + } + } catch (e) { + // Invalid JSON in cache, ignore and fetch fresh status + } + } + + // Only check Redis status if we don't have a recent cached result + if (!redisStatus.ready) { + redisStatus = await fetch('/api/redis/status') + .then(res => res.json()) + .catch(() => ({ ready: false })); + + // Cache the result + localStorage.setItem(redisCheckCacheKey, JSON.stringify({ + status: redisStatus, + timestamp: Date.now() + })); + } if (!isMounted) return; @@ -373,7 +400,8 @@ export default function CourrierPage() { return ( <> - + {/* Only render RedisCacheStatus in development mode to avoid unnecessary status checks */} + {process.env.NODE_ENV === 'development' && } {/* Main layout */} diff --git a/components/email/EmailList.tsx b/components/email/EmailList.tsx index 4ecf373f..26df0b10 100644 --- a/components/email/EmailList.tsx +++ b/components/email/EmailList.tsx @@ -44,10 +44,20 @@ export default function EmailList({ const [scrollPosition, setScrollPosition] = useState(0); const [searchQuery, setSearchQuery] = useState(''); const [isLoadingMore, setIsLoadingMore] = useState(false); + const [lastLoadTime, setLastLoadTime] = useState(0); const scrollRef = useRef(null); const scrollTimeoutRef = useRef(null); + const loadMoreTimeoutRef = useRef(null); const prevEmailsLengthRef = useRef(emails.length); + // Clear any pending timeouts on unmount + useEffect(() => { + return () => { + if (scrollTimeoutRef.current) clearTimeout(scrollTimeoutRef.current); + if (loadMoreTimeoutRef.current) clearTimeout(loadMoreTimeoutRef.current); + }; + }, []); + // Debounced scroll handler for better performance const handleScroll = useCallback((event: React.UIEvent) => { const target = event.target as HTMLDivElement; @@ -61,11 +71,18 @@ export default function EmailList({ clearTimeout(scrollTimeoutRef.current); } - // If near bottom (within 200px) and more emails are available, load more + // If near bottom (within 300px) and more emails are available, load more // Added additional checks to prevent loading loop - const isNearBottom = scrollHeight - scrollTop - clientHeight < 300; // Increased detection area - if (isNearBottom && hasMoreEmails && !isLoading && !isLoadingMore) { + const isNearBottom = scrollHeight - scrollTop - clientHeight < 300; + + // Don't trigger load if we're already loading or if the last load was too recent (throttle) + const now = Date.now(); + const timeSinceLastLoad = now - lastLoadTime; + const tooSoonToLoadAgain = timeSinceLastLoad < 2000; // 2 seconds throttle + + if (isNearBottom && hasMoreEmails && !isLoading && !isLoadingMore && !tooSoonToLoadAgain) { setIsLoadingMore(true); + setLastLoadTime(now); // Use timeout to debounce load requests scrollTimeoutRef.current = setTimeout(() => { @@ -76,13 +93,15 @@ export default function EmailList({ onLoadMore(); // Reset loading state after a delay - setTimeout(() => { + if (loadMoreTimeoutRef.current) clearTimeout(loadMoreTimeoutRef.current); + loadMoreTimeoutRef.current = setTimeout(() => { + loadMoreTimeoutRef.current = null; console.log('Resetting loading more state after timeout'); setIsLoadingMore(false); - }, 3000); // Increased from 1500ms to 3000ms to allow more time for loading - }, 300); // Increased from 200ms to 300ms for better debouncing + }, 2000); // Reduced from 3000ms to 2000ms to avoid long loading states + }, 200); // Reduced from 300ms to 200ms for better responsiveness } - }, [hasMoreEmails, isLoading, isLoadingMore, onLoadMore]); + }, [hasMoreEmails, isLoading, isLoadingMore, onLoadMore, lastLoadTime]); // Restore scroll position when emails are loaded useEffect(() => { @@ -93,8 +112,13 @@ export default function EmailList({ // 4. We're not in the middle of a loading operation if (emails.length > prevEmailsLengthRef.current && scrollRef.current && - scrollPosition > 0 && - !isLoading) { + scrollPosition > 0) { + // If emails have been loaded, force reset the loading state + if (isLoadingMore) { + console.log('Emails loaded, resetting loading state'); + setIsLoadingMore(false); + } + // Use requestAnimationFrame to ensure the DOM has updated requestAnimationFrame(() => { if (scrollRef.current) { @@ -106,7 +130,7 @@ export default function EmailList({ // Always update the reference for next comparison prevEmailsLengthRef.current = emails.length; - }, [emails.length, scrollPosition, isLoading]); + }, [emails.length, scrollPosition, isLoadingMore]); // Add safety mechanism to reset loading state if we get stuck useEffect(() => { @@ -116,13 +140,13 @@ export default function EmailList({ setIsLoadingMore(false); } - // Add a timeout-based safety mechanism + // Add a timeout-based safety mechanism - reduced from 5000ms to 3000ms const safetyTimeout = setTimeout(() => { if (isLoadingMore) { console.log('Safety timeout: Resetting stuck loading state'); setIsLoadingMore(false); } - }, 5000); + }, 3000); return () => clearTimeout(safetyTimeout); }, [emails.length, isLoadingMore]); @@ -279,7 +303,7 @@ export default function EmailList({ {/* Loading indicator */} {(isLoading || isLoadingMore) && ( - + Loading more emails... @@ -290,6 +314,7 @@ export default function EmailList({ { console.log('Manual load more triggered'); + setLastLoadTime(Date.now()); onLoadMore(); }} className="w-full py-2 text-gray-500 hover:bg-gray-100 text-sm" diff --git a/hooks/use-courrier.ts b/hooks/use-courrier.ts index 8b27d615..62855076 100644 --- a/hooks/use-courrier.ts +++ b/hooks/use-courrier.ts @@ -1,4 +1,4 @@ -import { useState, useCallback, useEffect } from 'react'; +import { useState, useCallback, useEffect, useRef } from 'react'; import { useSession } from 'next-auth/react'; import { useToast } from './use-toast'; import { formatEmailForReplyOrForward } from '@/lib/utils/email-formatter'; @@ -52,6 +52,14 @@ export interface EmailData { export type MailFolder = string; +// Near the top of the file, before the useCourrier hook +interface EmailResponse { + emails: Email[]; + total: number; + totalPages: number; + hasMore: boolean; +} + // Hook for managing email operations export const useCourrier = () => { // State for email data @@ -71,178 +79,70 @@ export const useCourrier = () => { const [perPage, setPerPage] = useState(20); const [totalEmails, setTotalEmails] = useState(0); const [totalPages, setTotalPages] = useState(0); + const [hasMore, setHasMore] = useState(false); // Auth and notifications const { data: session } = useSession(); const { toast } = useToast(); + // Add the missing refs + const loadingRequestsRef = useRef>(new Set()); + const loadMoreRef = useRef(0); + // Load emails from the server - const loadEmails = useCallback(async (isLoadMore = false) => { - if (!session?.user?.id) return; - - console.log(`Loading emails for folder ${currentFolder}, page ${page}, isLoadMore: ${isLoadMore}`); - - // If already loading, don't trigger multiple simultaneous requests - if (isLoading) { - console.log('Skipping loadEmails - already loading'); - return; - } - - setIsLoading(true); - setError(null); - - // Keep reference to the current page for this request - const currentRequestPage = page; - const requestStartTime = Date.now(); - - try { - // First try Redis cache with low timeout - const cachedEmails = await getCachedEmailsWithTimeout(session.user.id, currentFolder, currentRequestPage, perPage, 200); - if (cachedEmails) { - // Ensure cached data has emails array property - if (Array.isArray(cachedEmails.emails)) { - if (isLoadMore) { - // When loading more, always append to the existing list - setEmails(prevEmails => { - // Create a Set of existing email IDs to avoid duplicates - const existingIds = new Set(prevEmails.map(email => email.id)); - // Filter out any duplicates before appending - const newEmails = cachedEmails.emails.filter((email: Email) => !existingIds.has(email.id)); - - // Log pagination info - console.log(`Added ${newEmails.length} cached emails from page ${currentRequestPage} to existing ${prevEmails.length} emails (${Date.now() - requestStartTime}ms)`); - - // Combine emails and sort them by date (newest first) - const combinedEmails = [...prevEmails, ...newEmails]; - return combinedEmails.sort((a: Email, b: Email) => new Date(b.date).getTime() - new Date(a.date).getTime()); - }); - } else { - // For initial load, replace emails - console.log(`Setting ${cachedEmails.emails.length} cached emails for page ${currentRequestPage} (${Date.now() - requestStartTime}ms)`); - // Ensure emails are sorted by date (newest first) - setEmails(cachedEmails.emails.sort((a: Email, b: Email) => new Date(b.date).getTime() - new Date(a.date).getTime())); - } - - // Set pagination info from cache if available - if (cachedEmails.totalEmails) setTotalEmails(cachedEmails.totalEmails); - if (cachedEmails.totalPages) setTotalPages(cachedEmails.totalPages); - - // Update available mailboxes if provided - if (cachedEmails.mailboxes && cachedEmails.mailboxes.length > 0) { - setMailboxes(cachedEmails.mailboxes); - } - } else if (Array.isArray(cachedEmails)) { - // Direct array response - if (isLoadMore) { - setEmails(prevEmails => { - // Create a Set of existing email IDs to avoid duplicates - const existingIds = new Set(prevEmails.map(email => email.id)); - // Filter out any duplicates before appending - const newEmails = cachedEmails.filter((email: Email) => !existingIds.has(email.id)); - - // Log pagination info - console.log(`Added ${newEmails.length} cached emails from page ${currentRequestPage} to existing ${prevEmails.length} emails`); - - // Combine emails and sort them by date (newest first) - const combinedEmails = [...prevEmails, ...newEmails]; - return combinedEmails.sort((a: Email, b: Email) => new Date(b.date).getTime() - new Date(a.date).getTime()); - }); - } else { - // For initial load, replace emails - console.log(`Setting ${cachedEmails.length} cached emails for page ${currentRequestPage}`); - // Ensure emails are sorted by date (newest first) - setEmails(cachedEmails.sort((a: Email, b: Email) => new Date(b.date).getTime() - new Date(a.date).getTime())); - } - } else { - console.warn('Invalid cache format:', cachedEmails); - } - - setIsLoading(false); - - // Still refresh in background for fresh data - refreshEmailsInBackground(session.user.id, currentFolder, currentRequestPage, perPage).catch(err => { - console.error('Background refresh error:', err); - }); + const loadEmails = useCallback( + async (folder = currentFolder, pageToLoad = page, resetList = true, isInitial = false) => { + if (!session?.user?.id || isLoading) return; + + // Track this request to avoid duplicates + const requestKey = `${folder}_${pageToLoad}`; + if (loadingRequestsRef.current.has(requestKey)) { + console.log(`Skipping duplicate request for ${requestKey}`); return; } + loadingRequestsRef.current.add(requestKey); + + setIsLoading(true); - // Build query params - const queryParams = new URLSearchParams({ - folder: currentFolder, - page: currentRequestPage.toString(), - perPage: perPage.toString() - }); - - if (searchQuery) { - queryParams.set('search', searchQuery); - } - - // Fetch emails from API - const response = await fetch(`/api/courrier?${queryParams.toString()}`); - - if (!response.ok) { - const errorData = await response.json(); - throw new Error(errorData.error || 'Failed to fetch emails'); - } - - const data: EmailListResult = await response.json(); - - // Update state with the fetched data - if (isLoadMore) { - setEmails(prev => { - // Create a Set of existing email IDs to avoid duplicates - const existingIds = new Set(prev.map(email => email.id)); - // Filter out any duplicates before appending - const newEmails = data.emails.filter((email: Email) => !existingIds.has(email.id)); - - // Log pagination info - console.log(`Added ${newEmails.length} fetched emails from page ${currentRequestPage} to existing ${prev.length} emails (${Date.now() - requestStartTime}ms)`); - - // Combine emails and sort them by date (newest first) - const combinedEmails = [...prev, ...newEmails]; - return combinedEmails.sort((a: Email, b: Email) => new Date(b.date).getTime() - new Date(a.date).getTime()); - }); - } else { - // Ensure we always set an array even if API returns invalid data - console.log(`Setting ${data.emails?.length || 0} fetched emails for page ${currentRequestPage}`); - // Ensure emails are sorted by date (newest first) - if (Array.isArray(data.emails)) { - setEmails(data.emails.sort((a: Email, b: Email) => new Date(b.date).getTime() - new Date(a.date).getTime())); + try { + // Get emails for the current folder + const response = await getEmails(session.user.id, folder, pageToLoad); + + // Update state based on response + if (resetList) { + setEmails(response.emails); } else { - setEmails([]); + setEmails(prev => [...prev, ...response.emails]); } + + setTotalEmails(response.total); + setTotalPages(response.totalPages); + setHasMore(response.hasMore); + setPage(pageToLoad); + + if (folder !== currentFolder) { + setCurrentFolder(folder); + } + + // Clear errors + setError(null); + } catch (error) { + console.error('Error loading emails:', error); + setError(error instanceof Error ? error.message : 'Failed to load emails'); + + toast({ + variant: "destructive", + title: "Error", + description: "Failed to load emails" + }); + } finally { + setIsLoading(false); + // Clear the loading request tracker + loadingRequestsRef.current.delete(requestKey); } - - setTotalEmails(data.totalEmails); - setTotalPages(data.totalPages); - - // Update available mailboxes if provided - if (data.mailboxes && data.mailboxes.length > 0) { - setMailboxes(data.mailboxes); - } - - // Clear selection if not loading more - if (!isLoadMore) { - setSelectedEmail(null); - setSelectedEmailIds([]); - } - } catch (err) { - console.error(`Error loading emails for page ${currentRequestPage}:`, err); - // Set emails to empty array on error to prevent runtime issues - if (!isLoadMore) { - setEmails([]); - } - setError(err instanceof Error ? err.message : 'Failed to load emails'); - toast({ - variant: "destructive", - title: "Error", - description: err instanceof Error ? err.message : 'Failed to load emails' - }); - } finally { - console.log(`Completed loading emails for page ${currentRequestPage} (${Date.now() - requestStartTime}ms)`); - setIsLoading(false); - } - }, [currentFolder, page, perPage, searchQuery, session?.user?.id, toast]); + }, + [session?.user?.id, currentFolder, page, isLoading, toast] + ); // Load emails when folder or page changes useEffect(() => { @@ -252,7 +152,7 @@ export const useCourrier = () => { // Add a small delay to prevent rapid consecutive loads const loadTimer = setTimeout(() => { - loadEmails(isLoadingMore); + loadEmails(currentFolder, page, false, false); }, 50); // If we're loading the first page, publish an event to reset scroll position @@ -551,6 +451,75 @@ export const useCourrier = () => { return formatEmailForReplyOrForward(email, type); }, []); + /** + * Fetches emails from the API + */ + const getEmails = async (userId: string, folder: string, page: number): Promise => { + // Build query params + const queryParams = new URLSearchParams({ + folder: folder, + page: page.toString(), + perPage: perPage.toString() + }); + + if (searchQuery) { + queryParams.set('search', searchQuery); + } + + // Fetch emails from API + const response = await fetch(`/api/courrier?${queryParams.toString()}`); + + if (!response.ok) { + const errorData = await response.json(); + throw new Error(errorData.error || 'Failed to fetch emails'); + } + + const data = await response.json(); + return { + emails: Array.isArray(data.emails) ? data.emails : [], + total: data.totalEmails || 0, + totalPages: data.totalPages || 0, + hasMore: data.totalPages > page + }; + }; + + /** + * Prefetches emails for a specific folder + */ + const prefetchFolderEmails = async (userId: string, folder: string, startPage: number, endPage: number) => { + try { + for (let p = startPage; p <= endPage; p++) { + await getEmails(userId, folder, p); + // Add small delay between requests + if (p < endPage) await new Promise(r => setTimeout(r, 500)); + } + } catch (error) { + console.error("Error prefetching emails:", error); + } + }; + + // Update loadMoreEmails + const loadMoreEmails = useCallback(async () => { + if (isLoading || !hasMore || !session) { + return; + } + + // Don't allow loading more if we've loaded too recently + const now = Date.now(); + const lastLoadTime = loadMoreRef.current || 0; + if (now - lastLoadTime < 1000) { // Throttle to once per second + console.log('Throttling loadMoreEmails - too many requests'); + return; + } + + // Track when we last attempted to load more + loadMoreRef.current = now; + + // Load the next page + console.log(`Loading more emails for ${currentFolder}, page ${page + 1}`); + return loadEmails(currentFolder, page + 1, false, false); + }, [isLoading, hasMore, session, currentFolder, page, loadEmails]); + // Return all the functionality and state values return { // Data @@ -568,6 +537,7 @@ export const useCourrier = () => { perPage, totalEmails, totalPages, + hasMore, // Functions loadEmails, @@ -584,5 +554,6 @@ export const useCourrier = () => { setPage, setPerPage, setSearchQuery, + loadMoreEmails, }; }; \ No newline at end of file diff --git a/lib/services/prefetch-service.ts b/lib/services/prefetch-service.ts index 3dfd4b9f..779a6000 100644 --- a/lib/services/prefetch-service.ts +++ b/lib/services/prefetch-service.ts @@ -76,21 +76,37 @@ export async function refreshEmailsInBackground( // Track ongoing refreshes to avoid duplicates const refreshKey = `${userId}:${folder}:${page}`; - // Use a higher priority for inbox and small page numbers - const priority = folder === 'INBOX' && page <= 2 ? 100 : 300; + // Only use small timeouts for INBOX, other folders can wait longer + const priority = folder.toUpperCase() === 'INBOX' && page <= 2 ? 100 : 500; // Use setTimeout to ensure this runs after current execution context setTimeout(async () => { try { + // Skip if we've recently refreshed this data (use a module-scope cache) + // We don't need to refresh the same data too frequently + const cacheKey = `${userId}:${folder}:${page}:refreshed`; + const lastRefreshed = (window as any)[cacheKey] || 0; + const now = Date.now(); + + // Don't refresh if it's been less than 30 seconds for inbox, 2 minutes for other folders + const minInterval = folder.toUpperCase() === 'INBOX' ? 30000 : 120000; + if (now - lastRefreshed < minInterval) { + console.log(`Skipping refresh for ${folder}:${page} - last refreshed ${Math.round((now - lastRefreshed)/1000)}s ago`); + return; + } + console.log(`Background refresh for ${userId}:${folder}:${page}:${perPage}`); const freshData = await getEmails(userId, folder, page, perPage); console.log(`Background refresh completed for ${userId}:${folder}:${page} with ${freshData.emails.length} emails`); - // If it's the inbox and there's a next page, prefetch that too - if (folder === 'INBOX' && page === 1) { + // Mark as refreshed + (window as any)[cacheKey] = now; + + // For inbox first page only, prefetch page 2 but with a longer delay + if (folder.toUpperCase() === 'INBOX' && page === 1) { setTimeout(() => { refreshEmailsInBackground(userId, folder, 2, perPage); - }, 500); + }, 1000); } } catch (error) { console.error('Background refresh error:', error); @@ -188,33 +204,57 @@ export async function prefetchFolderEmails( try { console.log(`Prefetching ${pages} pages of emails for folder ${folder} starting from page ${startPage}`); + // Limit the number of pages to prefetch to reduce server load + const maxPages = 3; + const actualPages = Math.min(pages, maxPages); + // Calculate the range of pages to prefetch const pagesToFetch = Array.from( - { length: pages }, + { length: actualPages }, (_, i) => startPage + i ); console.log(`Will prefetch pages: ${pagesToFetch.join(', ')}`); - // Fetch multiple pages in parallel, but with a slight delay between them to avoid overwhelming the server - for (let i = 0; i < pagesToFetch.length; i++) { - const page = pagesToFetch[i]; + // Fetch pages sequentially with delays to avoid overwhelming the server + // Focus on the first page first, which is most important + const fetchPage = async (pageIndex: number) => { + if (pageIndex >= pagesToFetch.length) return; - // Use a delay for all but the first page - setTimeout(() => { - getEmails(userId, folder, page, 20) - .then(result => { - console.log(`Successfully prefetched and cached page ${page} of ${folder} with ${result.emails.length} emails`); - return result; - }) - .catch(err => { - console.error(`Error prefetching page ${page} of ${folder}:`, err); - return null; - }); - }, i * 300); // Stagger prefetches with 300ms between them - } + const page = pagesToFetch[pageIndex]; + + try { + // Skip if we've recently prefetched this page + const cacheKey = `${userId}:${folder}:${page}:prefetched`; + const lastPrefetched = (window as any)[cacheKey] || 0; + const now = Date.now(); + + // Don't prefetch if it's been less than 1 minute + if (now - lastPrefetched < 60000) { + console.log(`Skipping prefetch for ${folder}:${page} - prefetched ${Math.round((now - lastPrefetched)/1000)}s ago`); + // Continue with next page + setTimeout(() => fetchPage(pageIndex + 1), 100); + return; + } + + console.log(`Prefetching page ${page} of ${folder}`); + const result = await getEmails(userId, folder, page, 20); + console.log(`Successfully prefetched page ${page} of ${folder} with ${result.emails.length} emails`); + + // Mark as prefetched + (window as any)[cacheKey] = now; + + // Fetch next page with delay + setTimeout(() => fetchPage(pageIndex + 1), 500); + } catch (err) { + console.error(`Error prefetching page ${page} of ${folder}:`, err); + // Try next page anyway after a longer delay + setTimeout(() => fetchPage(pageIndex + 1), 1000); + } + }; - console.log(`Scheduled prefetching for ${pages} pages of ${folder}`); + // Start fetching the first page + fetchPage(0); } catch (error) { console.error(`Error prefetching folder ${folder}:`, error); }