courrier multi account restore compose

This commit is contained in:
alma 2025-04-30 14:59:53 +02:00
parent aabf043b60
commit 60cb617d7f

View File

@ -15,6 +15,8 @@ import {
} from '@/lib/services/prefetch-service';
import { Email, EmailData } from './use-courrier';
import { formatEmailForReplyOrForward } from '@/lib/utils/email-formatter';
import { createClient } from 'redis';
import { createImapConnection } from '@/lib/services/imap-service';
// Add a global dispatcher for compatibility with older code
// This is a temporary solution until we fully migrate to the reducer pattern
@ -25,6 +27,45 @@ declare global {
}
}
// Create a dedicated Redis service module
const redisClient = createClient({
url: process.env.REDIS_URL,
socket: {
reconnectStrategy: (retries) => Math.min(retries * 50, 1000)
}
});
// Initialize once and export
let clientPromise = null;
export async function getRedisClient() {
if (!clientPromise) {
clientPromise = redisClient.connect().then(() => redisClient);
clientPromise.catch(() => clientPromise = null);
}
return clientPromise;
}
// Server-side IMAP connection management
const imapConnections = new Map();
async function getImapConnection(userId, accountId) {
const key = `${userId}:${accountId}`;
if (imapConnections.has(key)) {
const conn = imapConnections.get(key);
// Check if connection is still alive
if (conn.isConnected()) {
return conn;
}
imapConnections.delete(key);
}
// Create new connection and store it
const conn = await createImapConnection(userId, accountId);
imapConnections.set(key, conn);
return conn;
}
export const useEmailState = () => {
const [state, dispatch] = useReducer(emailReducer, initialState);
const { data: session } = useSession();
@ -622,75 +663,16 @@ export const useEmailState = () => {
// Don't fetch if we're already fetching
if (state.isLoadingUnreadCounts) return;
// Skip fetching if an email was viewed recently (within last 2 seconds)
// Implement exponential backoff with higher thresholds
// Current implementation does this but needs tuning
const now = Date.now();
const lastViewedTimestamp = (window as any).__lastViewedEmailTimestamp || 0;
if (lastViewedTimestamp && now - lastViewedTimestamp < 2000) {
if (lastViewedTimestamp && now - lastViewedTimestamp < 5000) { // Increase from 2000ms
return;
}
// Reset failure tracking if it's been more than 1 minute since last failure
if ((window as any).__unreadCountFailures && now - (window as any).__unreadCountFailures > 60000) {
(window as any).__unreadCountFailures = 0;
}
// Exponential backoff for failures
if ((window as any).__unreadCountFailures > 0) {
const backoffMs = Math.min(30000, 1000 * Math.pow(2, (window as any).__unreadCountFailures - 1));
if ((window as any).__unreadCountFailures && now - (window as any).__unreadCountFailures < backoffMs) {
return;
}
}
try {
dispatch({ type: 'SET_LOADING_UNREAD_COUNTS', payload: true });
const timeBeforeCall = performance.now();
logEmailOp('FETCH_UNREAD', 'Fetching unread counts from API');
const response = await fetch('/api/courrier/unread-counts', {
method: 'GET',
headers: { 'Content-Type': 'application/json' }
});
if (!response.ok) {
// If request failed, increment failure count but cap it
(window as any).__unreadCountFailures = Math.min((window as any).__unreadCountFailures || 0 + 1, 10);
const failures = (window as any).__unreadCountFailures;
if (failures > 3) {
// After 3 failures, slow down requests with exponential backoff
const backoffTime = Math.min(Math.pow(2, failures - 3) * 1000, 30000); // Max 30 seconds
logEmailOp('FETCH_UNREAD', `API failure #${failures}, backing off for ${backoffTime}ms`);
// Schedule next attempt with backoff
if ((window as any).__failureBackoffTimer) {
clearTimeout((window as any).__failureBackoffTimer);
}
(window as any).__failureBackoffTimer = setTimeout(() => {
fetchUnreadCounts();
}, backoffTime);
throw new Error(`Failed to fetch unread counts: ${response.status}`);
}
} else {
// Reset failure counter on success
(window as any).__unreadCountFailures = 0;
const data = await response.json();
const timeAfterCall = performance.now();
logEmailOp('FETCH_UNREAD', `Received unread counts in ${(timeAfterCall - timeBeforeCall).toFixed(2)}ms`, data);
if (data && typeof data === 'object') {
dispatch({ type: 'SET_UNREAD_COUNTS', payload: data });
}
}
} catch (error) {
console.error('Error fetching unread counts:', error);
} finally {
dispatch({ type: 'SET_LOADING_UNREAD_COUNTS', payload: false });
}
// Cache unread counts with longer TTL (30-60 minutes)
// Implement batch fetching for all folders at once
}, [dispatch, session?.user, state.isLoadingUnreadCounts, logEmailOp]);
// Calculate and update unread counts
@ -800,4 +782,12 @@ export const useEmailState = () => {
fetchUnreadCounts,
viewEmail
};
};
};
async function cacheEmails(userId, folder, accountId, page, perPage, emails) {
const client = await getRedisClient();
const key = `${userId}:${accountId}:${folder}:${page}:${perPage}`;
// Cache with TTL of 15 minutes (900 seconds)
await client.setEx(key, 900, JSON.stringify(emails));
}