Fondation

This commit is contained in:
alma 2026-01-16 22:02:57 +01:00
parent c4267a0eeb
commit 7c01525bac
6 changed files with 47 additions and 29 deletions

View File

@ -117,7 +117,7 @@ export async function GET(req: NextRequest) {
// Exclude "Privée"/"Default" calendars that are not actively synced
if (isPrivateOrDefault && !hasActiveSync) {
logger.debug('[CALENDAR] Filtering out calendar without active sync', {
calendarId: cal.id,
calendarIdHash: Buffer.from(cal.id).toString('base64').slice(0, 12),
calendarName: cal.name,
syncEnabled: cal.syncConfig?.syncEnabled,
hasMailCredential: !!cal.syncConfig?.mailCredential,

View File

@ -37,7 +37,9 @@ export async function GET(request: Request) {
const cachedCounts = await redis.get(UNREAD_COUNTS_CACHE_KEY(userId));
if (cachedCounts) {
// Use cached results if available
logger.debug('[UNREAD_API] Using cached unread counts', { userId });
logger.debug('[UNREAD_API] Using cached unread counts', {
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
});
// If the cache is about to expire, schedule a background refresh
const ttl = await redis.ttl(UNREAD_COUNTS_CACHE_KEY(userId));
@ -52,12 +54,14 @@ export async function GET(request: Request) {
);
if (lockAcquired) {
logger.debug('[UNREAD_API] Scheduling background refresh', { userId });
logger.debug('[UNREAD_API] Scheduling background refresh', {
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
});
// Use Promise to run in background
setTimeout(() => {
refreshUnreadCounts(userId, redis)
.catch(err => logger.error('[UNREAD_API] Background refresh error', {
userId,
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
error: err instanceof Error ? err.message : String(err)
}))
.finally(() => {
@ -71,7 +75,9 @@ export async function GET(request: Request) {
return NextResponse.json(JSON.parse(cachedCounts));
}
logger.debug('[UNREAD_API] Cache miss, fetching unread counts', { userId });
logger.debug('[UNREAD_API] Cache miss, fetching unread counts', {
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
});
// Try to acquire lock to prevent parallel refreshes
const lockAcquired = await redis.set(
@ -83,7 +89,9 @@ export async function GET(request: Request) {
);
if (!lockAcquired) {
logger.debug('[UNREAD_API] Another process is refreshing unread counts', { userId });
logger.debug('[UNREAD_API] Another process is refreshing unread counts', {
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
});
// Return empty counts with short cache time if we can't acquire lock
// The next request will likely get cached data
@ -123,7 +131,9 @@ export async function GET(request: Request) {
*/
async function refreshUnreadCounts(userId: string, redis: any): Promise<void> {
try {
logger.debug('[UNREAD_API] Background refresh started', { userId });
logger.debug('[UNREAD_API] Background refresh started', {
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
});
const unreadCounts = await fetchUnreadCounts(userId);
// Save to cache
@ -134,10 +144,12 @@ async function refreshUnreadCounts(userId: string, redis: any): Promise<void> {
UNREAD_COUNTS_CACHE_TTL
);
logger.debug('[UNREAD_API] Background refresh completed', { userId });
logger.debug('[UNREAD_API] Background refresh completed', {
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
});
} catch (error) {
logger.error('[UNREAD_API] Background refresh failed', {
userId,
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
error: error instanceof Error ? error.message : String(error)
});
throw error;
@ -157,7 +169,10 @@ async function fetchUnreadCounts(userId: string): Promise<Record<string, Record<
}
});
logger.debug('[UNREAD_API] Found accounts', { userId, count: accounts.length });
logger.debug('[UNREAD_API] Found accounts', {
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
count: accounts.length,
});
if (accounts.length === 0) {
return { default: {} };
@ -172,9 +187,8 @@ async function fetchUnreadCounts(userId: string): Promise<Record<string, Record<
try {
// Get IMAP connection for this account
logger.debug('[UNREAD_API] Processing account', {
userId,
accountId,
email: account.email.substring(0, 5) + '***'
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
accountIdHash: Buffer.from(accountId).toString('base64').slice(0, 12),
});
const client = await getImapConnection(userId, accountId);
unreadCounts[accountId] = {};
@ -213,7 +227,7 @@ async function fetchUnreadCounts(userId: string): Promise<Record<string, Record<
}
} catch (folderError) {
logger.error('[UNREAD_API] Error getting unread count for folder', {
accountId,
accountIdHash: Buffer.from(accountId).toString('base64').slice(0, 12),
folder,
error: folderError instanceof Error ? folderError.message : String(folderError)
});
@ -224,7 +238,7 @@ async function fetchUnreadCounts(userId: string): Promise<Record<string, Record<
// Don't close the connection - let the connection pool handle it
} catch (accountError) {
logger.error('[UNREAD_API] Error processing account', {
accountId,
accountIdHash: Buffer.from(accountId).toString('base64').slice(0, 12),
error: accountError instanceof Error ? accountError.message : String(accountError)
});
}

View File

@ -45,7 +45,7 @@ export async function POST(request: Request) {
await registry.recordCount(session.user.id, source, count, items);
logger.debug('[NOTIFICATIONS_UPDATE] Count updated', {
userId: session.user.id,
userIdHash: Buffer.from(session.user.id).toString('base64').slice(0, 12),
source,
count,
itemsCount: items?.length || 0,

View File

@ -67,7 +67,7 @@ export async function GET(request: Request) {
const cachedMessages = await getCachedMessagesData(session.user.id);
if (cachedMessages) {
logger.debug("[ROCKET_CHAT] Using cached messages data", {
userId: session.user.id,
emailHash: Buffer.from(session.user.email.toLowerCase()).toString('base64').slice(0, 12),
});
return NextResponse.json(cachedMessages);
}

View File

@ -102,7 +102,7 @@ export async function GET(request: Request) {
const tokenData = await createTokenResponse.json();
logger.debug('[ROCKET_CHAT_USER_TOKEN] Token created', {
userId: currentUser._id,
emailHash: Buffer.from(session.user.email.toLowerCase()).toString('base64').slice(0, 12),
hasAuthToken: !!tokenData.data?.authToken,
tokenLength: tokenData.data?.authToken?.length,
});

View File

@ -56,7 +56,7 @@ export class NotificationRegistry {
}
} catch (error) {
logger.error('[NOTIFICATION_REGISTRY] Error reading cache', {
userId,
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
error: error instanceof Error ? error.message : String(error),
});
}
@ -116,7 +116,7 @@ export class NotificationRegistry {
);
logger.debug('[NOTIFICATION_REGISTRY] Count updated', {
userId,
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
source,
count,
totalUnread: currentCount.unread,
@ -124,7 +124,7 @@ export class NotificationRegistry {
});
} catch (error) {
logger.error('[NOTIFICATION_REGISTRY] Error updating cache', {
userId,
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
error: error instanceof Error ? error.message : String(error),
});
}
@ -142,13 +142,13 @@ export class NotificationRegistry {
);
logger.debug('[NOTIFICATION_REGISTRY] Items stored', {
userId,
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
source,
itemsCount: items.length,
});
} catch (error) {
logger.error('[NOTIFICATION_REGISTRY] Error storing items', {
userId,
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
source,
error: error instanceof Error ? error.message : String(error),
});
@ -168,7 +168,7 @@ export class NotificationRegistry {
if (cached) {
const count = JSON.parse(cached);
logger.debug('[NOTIFICATION_REGISTRY] Count retrieved from cache', {
userId,
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
totalUnread: count.unread,
sources: Object.keys(count.sources),
});
@ -176,13 +176,15 @@ export class NotificationRegistry {
}
} catch (error) {
logger.error('[NOTIFICATION_REGISTRY] Error reading cache', {
userId,
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
error: error instanceof Error ? error.message : String(error),
});
}
// If no cache, return empty count
logger.debug('[NOTIFICATION_REGISTRY] No cache found, returning empty count', { userId });
logger.debug('[NOTIFICATION_REGISTRY] No cache found, returning empty count', {
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
});
return {
total: 0,
unread: 0,
@ -247,7 +249,7 @@ export class NotificationRegistry {
allItems.sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());
logger.debug('[NOTIFICATION_REGISTRY] Notifications retrieved', {
userId,
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
total: allItems.length,
limit,
returned: Math.min(allItems.length, limit),
@ -274,10 +276,12 @@ export class NotificationRegistry {
await redis.del(itemsKey);
}
logger.debug('[NOTIFICATION_REGISTRY] Cache invalidated', { userId });
logger.debug('[NOTIFICATION_REGISTRY] Cache invalidated', {
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
});
} catch (error) {
logger.error('[NOTIFICATION_REGISTRY] Error invalidating cache', {
userId,
userIdHash: Buffer.from(userId).toString('base64').slice(0, 12),
error: error instanceof Error ? error.message : String(error),
});
}