Refactor Logging BIG

This commit is contained in:
alma 2026-01-07 11:04:19 +01:00
parent 9ee441773c
commit fbb8eac162
6 changed files with 150 additions and 65 deletions

View File

@ -3,6 +3,7 @@ import { getServerSession } from "next-auth/next";
import { authOptions } from "@/app/api/auth/options";
import { prisma } from "@/lib/prisma";
import { getCachedCalendarData, cacheCalendarData } from "@/lib/redis";
import { logger } from "@/lib/logger";
/**
* Handles the GET request to retrieve calendars for the authenticated user.
@ -34,13 +35,18 @@ export async function GET(req: NextRequest) {
if (!forceRefresh) {
const cachedData = await getCachedCalendarData(session.user.id);
if (cachedData) {
console.log(`Using cached calendar data for user ${session.user.id}`);
logger.debug('[CALENDAR] Using cached calendar data', {
userId: session.user.id,
calendarCount: cachedData.length,
});
return NextResponse.json(cachedData);
}
}
// If no cache or forcing refresh, fetch from database
console.log(`Fetching calendar data from database for user ${session.user.id}`);
logger.debug('[CALENDAR] Fetching calendar data from database', {
userId: session.user.id,
});
const calendars = await prisma.calendar.findMany({
where: {
userId: session.user.id,
@ -57,14 +63,19 @@ export async function GET(req: NextRequest) {
},
});
console.log("Fetched calendars with events:", calendars);
logger.debug('[CALENDAR] Fetched calendars with events', {
userId: session.user.id,
count: calendars.length,
});
// Cache the results
await cacheCalendarData(session.user.id, calendars);
return NextResponse.json(calendars);
} catch (error) {
console.error("Erreur lors de la récupération des calendriers:", error);
logger.error('[CALENDAR] Erreur lors de la récupération des calendriers', {
error: error instanceof Error ? error.message : String(error),
});
return NextResponse.json({ error: "Erreur serveur" }, { status: 500 });
}
}

View File

@ -5,6 +5,7 @@ import { getMailboxes } from '@/lib/services/email-service';
import { getRedisClient } from '@/lib/redis';
import { getImapConnection } from '@/lib/services/email-service';
import { prisma } from '@/lib/prisma';
import { logger } from '@/lib/logger';
import bcrypt from 'bcryptjs';
// Define extended MailCredentials type
@ -94,16 +95,16 @@ export async function GET() {
// Get Redis connection first to ensure it's available
const redis = getRedisClient();
if (!redis) {
console.error('Redis connection failed');
logger.error('[COURRIER_SESSION] Redis connection failed');
return NextResponse.json({ error: 'Redis connection failed' }, { status: 500 });
}
// Get session with detailed logging
console.log('Attempting to get server session...');
logger.debug('[COURRIER_SESSION] Attempting to get server session');
const session = await getServerSession(authOptions);
if (!session) {
console.error('No session found');
logger.error('[COURRIER_SESSION] No session found');
return NextResponse.json({
authenticated: false,
error: 'No session found'
@ -111,7 +112,7 @@ export async function GET() {
}
if (!session.user) {
console.error('No user in session');
logger.error('[COURRIER_SESSION] No user in session');
return NextResponse.json({
authenticated: false,
error: 'No user in session'
@ -119,24 +120,26 @@ export async function GET() {
}
if (!session.user.id) {
console.error('No user ID in session');
logger.error('[COURRIER_SESSION] No user ID in session');
return NextResponse.json({
authenticated: false,
error: 'No user ID in session'
}, { status: 401 });
}
console.log('Session validated successfully:', {
logger.debug('[COURRIER_SESSION] Session validated successfully', {
userId: session.user.id,
email: session.user.email,
name: session.user.name
hasEmail: !!session.user.email,
hasName: !!session.user.name,
});
// Ensure user exists in database (create if missing)
try {
await ensureUserExists(session);
} catch (error) {
console.error(`Error ensuring user exists:`, error);
logger.error('[COURRIER_SESSION] Error ensuring user exists', {
error: error instanceof Error ? error.message : String(error),
});
return NextResponse.json({
authenticated: true,
hasEmailCredentials: false,
@ -146,14 +149,16 @@ export async function GET() {
}
// Get user with their accounts
console.log('Fetching user with ID:', session.user.id);
logger.debug('[COURRIER_SESSION] Fetching user with ID', {
userId: session.user.id,
});
const user = await prisma.user.findUnique({
where: { id: session.user.id },
include: { mailCredentials: true }
});
if (!user) {
console.error('User not found in database after creation attempt');
logger.error('[COURRIER_SESSION] User not found in database after creation attempt');
return NextResponse.json({
authenticated: true,
hasEmailCredentials: false,
@ -164,7 +169,9 @@ export async function GET() {
// Get all accounts for the user
const accounts = (user.mailCredentials || []) as MailCredentials[];
if (accounts.length === 0) {
console.log('No email accounts found for user:', session.user.id);
logger.debug('[COURRIER_SESSION] No email accounts found for user', {
userId: session.user.id,
});
return NextResponse.json({
authenticated: true,
hasEmailCredentials: false,
@ -173,7 +180,11 @@ export async function GET() {
});
}
console.log(`Found ${accounts.length} accounts for user:`, accounts.map(a => a.email));
logger.debug('[COURRIER_SESSION] Found accounts for user', {
userId: session.user.id,
count: accounts.length,
emails: accounts.map(a => a.email),
});
// Fetch folders for each account
const accountsWithFolders = await Promise.all(
@ -184,7 +195,9 @@ export async function GET() {
// Try to get folders from Redis cache first
const cachedFolders = await redis.get(cacheKey);
if (cachedFolders) {
console.log(`Using cached folders for account ${account.email}`);
logger.debug('[COURRIER_SESSION] Using cached folders for account', {
email: account.email,
});
return {
id: account.id,
email: account.email,
@ -195,10 +208,14 @@ export async function GET() {
}
// If not in cache, fetch from IMAP
console.log(`Fetching folders from IMAP for account ${account.email}`);
logger.debug('[COURRIER_SESSION] Fetching folders from IMAP for account', {
email: account.email,
});
const client = await getImapConnection(user.id, account.id);
if (!client) {
console.warn(`Failed to get IMAP connection for account ${account.email}`);
logger.warn('[COURRIER_SESSION] Failed to get IMAP connection for account', {
email: account.email,
});
return {
id: account.id,
email: account.email,
@ -209,7 +226,10 @@ export async function GET() {
}
const folders = await getMailboxes(client);
console.log(`Fetched ${folders.length} folders for account ${account.email}`);
logger.debug('[COURRIER_SESSION] Fetched folders for account', {
email: account.email,
count: folders.length,
});
// Cache the folders in Redis
await redis.set(
@ -227,7 +247,10 @@ export async function GET() {
folders
};
} catch (error) {
console.error(`Error fetching folders for account ${account.id}:`, error);
logger.error('[COURRIER_SESSION] Error fetching folders for account', {
accountId: account.id,
error: error instanceof Error ? error.message : String(error),
});
return {
id: account.id,
email: account.email,
@ -245,7 +268,9 @@ export async function GET() {
allAccounts: accountsWithFolders
});
} catch (error) {
console.error('Error in session route:', error);
logger.error('[COURRIER_SESSION] Error in session route', {
error: error instanceof Error ? error.message : String(error),
});
return NextResponse.json(
{
authenticated: false,

View File

@ -2,6 +2,7 @@ import { getServerSession } from "next-auth/next";
import { authOptions } from "@/app/api/auth/options";
import { NextResponse } from "next/server";
import { createDolibarrUser, checkDolibarrUserExists, deleteDolibarrUser } from "@/lib/dolibarr-api";
import { logger } from "@/lib/logger";
export async function GET() {
const session = await getServerSession(authOptions);
@ -10,9 +11,10 @@ export async function GET() {
return NextResponse.json({ error: "Non autorisé" }, { status: 401 });
}
console.log("Session:", {
accessToken: session.accessToken?.substring(0, 20) + "...",
user: session.user,
logger.debug('[USERS] Session info', {
hasAccessToken: !!session.accessToken,
userId: session.user.id,
email: session.user.email,
});
try {
@ -33,14 +35,16 @@ export async function GET() {
);
const tokenData = await tokenResponse.json();
console.log("Token response:", {
logger.debug('[USERS] Token response summary', {
ok: tokenResponse.ok,
status: tokenResponse.status,
data: tokenData.access_token ? "Token received" : tokenData,
hasAccessToken: !!tokenData.access_token,
});
if (!tokenResponse.ok) {
console.error("Failed to get token:", tokenData);
logger.error('[USERS] Failed to get token', {
status: tokenResponse.status,
});
return NextResponse.json([getCurrentUser(session)]);
}
@ -61,12 +65,9 @@ export async function GET() {
}
const users = await usersResponse.json();
console.log("Raw users data:", users.map((u: any) => ({
id: u.id,
username: u.username,
realm: u.realm,
serviceAccountClientId: u.serviceAccountClientId,
})));
logger.debug('[USERS] Raw users data summary', {
count: users.length,
});
// Filter out service accounts and users from other realms
const filteredUsers = users.filter((user: any) =>
@ -74,7 +75,9 @@ export async function GET() {
(!user.realm || user.realm === process.env.KEYCLOAK_REALM) // Only users from our realm
);
console.log("Filtered users count:", filteredUsers.length);
logger.debug('[USERS] Filtered users count', {
count: filteredUsers.length,
});
// Fetch roles for each user
const usersWithRoles = await Promise.all(filteredUsers.map(async (user: any) => {
@ -98,7 +101,10 @@ export async function GET() {
!['offline_access', 'uma_authorization'].includes(role.name)
)
.map((role: any) => role.name);
console.log(`Roles for user ${user.username}:`, roles);
logger.debug('[USERS] Roles for user', {
username: user.username,
roles,
});
}
return {
@ -125,16 +131,17 @@ export async function GET() {
};
}
}));
console.log("Final users data:", usersWithRoles.map(u => ({
username: u.username,
roles: u.roles,
})));
logger.debug('[USERS] Final users data summary', {
count: usersWithRoles.length,
});
return NextResponse.json(usersWithRoles);
} catch (error) {
console.error("Error:", error);
logger.error('[USERS] Error in GET route', {
error: error instanceof Error ? error.message : String(error),
});
return NextResponse.json([getCurrentUser(session)]);
}
}

View File

@ -15,7 +15,8 @@ export const prisma =
url: env.DATABASE_URL
}
},
log: ['query'],
// Only enable query logging in development to avoid noisy prisma:query
log: process.env.NODE_ENV === 'production' ? [] : ['query'],
})
if (process.env.NODE_ENV !== 'production') {

View File

@ -698,7 +698,8 @@ export async function cacheMessagesData(
try {
await redis.set(key, JSON.stringify(data), 'EX', TTL.MESSAGES);
console.log(`Messages data cached for user ${userId}`);
// Debug-only; safe to be silent in production
// logger.debug('[REDIS] Messages data cached', { userId });
} catch (error) {
console.error(`Error caching messages data for user ${userId}:`, error);
}

View File

@ -9,6 +9,7 @@ import {
getRedisClient,
warmupRedisCache
} from '@/lib/redis';
import { logger } from '@/lib/logger';
// Keep track of ongoing prefetch operations to prevent duplicates
const prefetchInProgress = new Map<string, boolean>();
@ -27,7 +28,7 @@ function shouldPrefetch(userId: string, key: string = 'general'): boolean {
// Check if prefetch is already in progress
if (prefetchInProgress.get(prefetchKey)) {
console.log(`Prefetch already in progress for ${prefetchKey}`);
logger.debug('[PREFETCH] Prefetch already in progress', { prefetchKey });
return false;
}
@ -36,7 +37,10 @@ function shouldPrefetch(userId: string, key: string = 'general'): boolean {
const now = Date.now();
if (now - lastTime < PREFETCH_COOLDOWN_MS) {
console.log(`Prefetch cooldown active for ${prefetchKey}, last was ${Math.round((now - lastTime)/1000)}s ago`);
logger.debug('[PREFETCH] Prefetch cooldown active', {
prefetchKey,
secondsSinceLast: Math.round((now - lastTime) / 1000),
});
return false;
}
@ -68,7 +72,7 @@ export async function getCachedEmailsWithTimeout(
): Promise<any | null> {
// Skip cache if accountId is 'loading-account'
if (accountId === 'loading-account') {
console.log(`Skipping cache for loading account`);
logger.debug('[PREFETCH] Skipping cache for loading account');
return null;
}
@ -78,7 +82,7 @@ export async function getCachedEmailsWithTimeout(
let normalizedFolder: string;
// First, handle the folder format
if (folder.includes(':')) {
if (folder.includes(':')) {
// Extract parts if folder already has a prefix
const parts = folder.split(':');
const folderAccountId = parts[0];
@ -86,24 +90,37 @@ export async function getCachedEmailsWithTimeout(
// CRITICAL FIX: If explicit accountId is provided, it ALWAYS takes precedence
// This ensures account switching works correctly
if (accountId) {
console.log(`[getCachedEmailsWithTimeout] Using provided accountId (${accountId}) over folder prefix (${folderAccountId})`);
effectiveAccountId = accountId;
} else {
effectiveAccountId = folderAccountId;
}
if (accountId) {
logger.debug('[PREFETCH] Using provided accountId over folder prefix', {
accountId,
folderAccountId,
});
effectiveAccountId = accountId;
} else {
effectiveAccountId = folderAccountId;
}
} else {
// No folder prefix, use the folder name as is
normalizedFolder = folder;
effectiveAccountId = accountId || 'default';
}
// Log the normalization for debugging
console.log(`[getCachedEmailsWithTimeout] Normalized: folder=${normalizedFolder}, accountId=${effectiveAccountId} (from ${folder})`);
// Log the normalization for debugging (dev only)
logger.debug('[PREFETCH] Normalized cache key', {
folder: normalizedFolder,
accountId: effectiveAccountId,
originalFolder: folder,
});
return new Promise((resolve) => {
const timeoutId = setTimeout(() => {
console.log(`Cache access timeout for ${userId}:${effectiveAccountId}:${normalizedFolder}:${page}:${perPage}`);
logger.debug('[PREFETCH] Cache access timeout', {
userId,
accountId: effectiveAccountId,
folder: normalizedFolder,
page,
perPage,
});
resolve(null);
}, timeoutMs);
@ -113,16 +130,30 @@ export async function getCachedEmailsWithTimeout(
.then(result => {
clearTimeout(timeoutId);
if (result) {
console.log(`[getCachedEmailsWithTimeout] Cache hit for ${userId}:${effectiveAccountId}:${normalizedFolder}:${page}:${perPage}`);
logger.debug('[PREFETCH] Cache hit for emails', {
userId,
accountId: effectiveAccountId,
folder: normalizedFolder,
page,
perPage,
});
resolve(result);
} else {
console.log(`[getCachedEmailsWithTimeout] Cache miss for ${userId}:${effectiveAccountId}:${normalizedFolder}:${page}:${perPage}`);
logger.debug('[PREFETCH] Cache miss for emails', {
userId,
accountId: effectiveAccountId,
folder: normalizedFolder,
page,
perPage,
});
resolve(null);
}
})
.catch(err => {
clearTimeout(timeoutId);
console.error('[getCachedEmailsWithTimeout] Error accessing cache:', err);
logger.error('[PREFETCH] Error accessing email cache', {
error: err instanceof Error ? err.message : String(err),
});
resolve(null);
});
});
@ -141,7 +172,11 @@ export async function refreshEmailsInBackground(
): Promise<void> {
try {
// First check if folder has the accountId prefix
console.log(`[refreshEmailsInBackground] Normalized: folder=${folder}, accountId=${accountId} (from ${folder})`);
logger.debug('[PREFETCH] refreshEmailsInBackground called', {
userId,
folder,
accountId,
});
// Create a unique key for this refresh request
const refreshKey = `${userId}:refresh:${accountId || 'default'}:${folder}:${page}`;
@ -151,7 +186,10 @@ export async function refreshEmailsInBackground(
const now = Date.now();
if (lastRefreshed && now - lastRefreshed < COOLDOWN_PERIOD) {
console.log(`Prefetch cooldown active for ${refreshKey}, last was ${Math.floor((now - lastRefreshed)/1000)}s ago`);
logger.debug('[PREFETCH] Background refresh cooldown active', {
refreshKey,
secondsSinceLast: Math.floor((now - lastRefreshed) / 1000),
});
return; // Skip if we refreshed this exact data recently
}
@ -166,12 +204,14 @@ export async function refreshEmailsInBackground(
}
// CRITICAL FIX: Prevent any background refresh by immediately returning
console.log(`[refreshEmailsInBackground] DISABLED to prevent infinite loops`);
logger.debug('[PREFETCH] Background refresh disabled');
return;
// Real implementation of refresh would be here
} catch (error) {
console.error('Error in refreshEmailsInBackground:', error);
logger.error('[PREFETCH] Error in refreshEmailsInBackground', {
error: error instanceof Error ? error.message : String(error),
});
}
}