mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-01-16 18:52:08 +01:00
feat: implement comprehensive email system with rate limiting and extensive test suite
- Add robust email service with rate limiting and configuration management - Implement shared rate limiter utility for consistent API protection - Create comprehensive test suite for core processing pipeline - Add API tests for dashboard metrics and authentication routes - Fix date range picker infinite loop issue - Improve session lookup in refresh sessions API - Refactor session API routing with better code organization - Update processing pipeline status monitoring - Clean up leftover files and improve code formatting
This commit is contained in:
@ -3,6 +3,7 @@
|
|||||||
## Issues Identified
|
## Issues Identified
|
||||||
|
|
||||||
From your logs:
|
From your logs:
|
||||||
|
|
||||||
```
|
```
|
||||||
Can't reach database server at `ep-tiny-math-a2zsshve-pooler.eu-central-1.aws.neon.tech:5432`
|
Can't reach database server at `ep-tiny-math-a2zsshve-pooler.eu-central-1.aws.neon.tech:5432`
|
||||||
[NODE-CRON] [WARN] missed execution! Possible blocking IO or high CPU
|
[NODE-CRON] [WARN] missed execution! Possible blocking IO or high CPU
|
||||||
@ -18,22 +19,26 @@ Can't reach database server at `ep-tiny-math-a2zsshve-pooler.eu-central-1.aws.ne
|
|||||||
## Fixes Applied ✅
|
## Fixes Applied ✅
|
||||||
|
|
||||||
### 1. Connection Retry Logic (`lib/database-retry.ts`)
|
### 1. Connection Retry Logic (`lib/database-retry.ts`)
|
||||||
|
|
||||||
- **Automatic retry** for connection errors
|
- **Automatic retry** for connection errors
|
||||||
- **Exponential backoff** (1s → 2s → 4s → 10s max)
|
- **Exponential backoff** (1s → 2s → 4s → 10s max)
|
||||||
- **Smart error detection** (only retry connection issues)
|
- **Smart error detection** (only retry connection issues)
|
||||||
- **Configurable retry attempts** (default: 3 retries)
|
- **Configurable retry attempts** (default: 3 retries)
|
||||||
|
|
||||||
### 2. Enhanced Schedulers
|
### 2. Enhanced Schedulers
|
||||||
|
|
||||||
- **Import Processor**: Added retry wrapper around main processing
|
- **Import Processor**: Added retry wrapper around main processing
|
||||||
- **Session Processor**: Added retry wrapper around AI processing
|
- **Session Processor**: Added retry wrapper around AI processing
|
||||||
- **Graceful degradation** when database is temporarily unavailable
|
- **Graceful degradation** when database is temporarily unavailable
|
||||||
|
|
||||||
### 3. Singleton Pattern Enforced
|
### 3. Singleton Pattern Enforced
|
||||||
|
|
||||||
- **All schedulers now use** `import { prisma } from "./prisma.js"`
|
- **All schedulers now use** `import { prisma } from "./prisma.js"`
|
||||||
- **No more separate** `new PrismaClient()` instances
|
- **No more separate** `new PrismaClient()` instances
|
||||||
- **Shared connection pool** across all operations
|
- **Shared connection pool** across all operations
|
||||||
|
|
||||||
### 4. Neon-Specific Optimizations
|
### 4. Neon-Specific Optimizations
|
||||||
|
|
||||||
- **Connection limit guidance**: 15 connections (below Neon's 20 limit)
|
- **Connection limit guidance**: 15 connections (below Neon's 20 limit)
|
||||||
- **Extended timeouts**: 30s for cold start handling
|
- **Extended timeouts**: 30s for cold start handling
|
||||||
- **SSL mode requirements**: `sslmode=require` for Neon
|
- **SSL mode requirements**: `sslmode=require` for Neon
|
||||||
@ -42,6 +47,7 @@ Can't reach database server at `ep-tiny-math-a2zsshve-pooler.eu-central-1.aws.ne
|
|||||||
## Immediate Actions Needed
|
## Immediate Actions Needed
|
||||||
|
|
||||||
### 1. Update Environment Variables
|
### 1. Update Environment Variables
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Add to .env.local
|
# Add to .env.local
|
||||||
USE_ENHANCED_POOLING=true
|
USE_ENHANCED_POOLING=true
|
||||||
@ -53,6 +59,7 @@ DATABASE_URL="postgresql://user:pass@ep-tiny-math-a2zsshve-pooler.eu-central-1.a
|
|||||||
```
|
```
|
||||||
|
|
||||||
### 2. Reduce Scheduler Frequency (Optional)
|
### 2. Reduce Scheduler Frequency (Optional)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Less aggressive intervals
|
# Less aggressive intervals
|
||||||
CSV_IMPORT_INTERVAL="*/30 * * * *" # Every 30 min (was 15)
|
CSV_IMPORT_INTERVAL="*/30 * * * *" # Every 30 min (was 15)
|
||||||
@ -61,6 +68,7 @@ SESSION_PROCESSING_INTERVAL="0 */2 * * *" # Every 2 hours (was 1)
|
|||||||
```
|
```
|
||||||
|
|
||||||
### 3. Run Configuration Check
|
### 3. Run Configuration Check
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pnpm db:check
|
pnpm db:check
|
||||||
```
|
```
|
||||||
|
|||||||
@ -6,29 +6,7 @@ import { prisma } from "../../../../lib/prisma";
|
|||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
try {
|
try {
|
||||||
const body = await request.json();
|
const body = await request.json();
|
||||||
let { companyId } = body;
|
const { companyId } = body;
|
||||||
|
|
||||||
if (!companyId) {
|
|
||||||
// Try to get user from prisma based on session cookie
|
|
||||||
try {
|
|
||||||
const session = await prisma.session.findFirst({
|
|
||||||
orderBy: { createdAt: "desc" },
|
|
||||||
where: {
|
|
||||||
/* Add session check criteria here */
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (session) {
|
|
||||||
companyId = session.companyId;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
// Log error for server-side debugging
|
|
||||||
const errorMessage =
|
|
||||||
error instanceof Error ? error.message : String(error);
|
|
||||||
// Use a server-side logging approach instead of console
|
|
||||||
process.stderr.write(`Error fetching session: ${errorMessage}\n`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!companyId) {
|
if (!companyId) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
|
|||||||
@ -1,35 +1,24 @@
|
|||||||
import { SessionCategory, type Prisma } from "@prisma/client";
|
import type { Prisma, SessionCategory } from "@prisma/client";
|
||||||
import { type NextRequest, NextResponse } from "next/server";
|
import { type NextRequest, NextResponse } from "next/server";
|
||||||
import { getServerSession } from "next-auth/next";
|
import { getServerSession } from "next-auth/next";
|
||||||
import { authOptions } from "../../../../lib/auth";
|
import { authOptions } from "../../../../lib/auth";
|
||||||
import { prisma } from "../../../../lib/prisma";
|
import { prisma } from "../../../../lib/prisma";
|
||||||
import type { ChatSession } from "../../../../lib/types";
|
import type { ChatSession } from "../../../../lib/types";
|
||||||
|
|
||||||
export async function GET(request: NextRequest) {
|
/**
|
||||||
const authSession = await getServerSession(authOptions);
|
* Build where clause for session filtering
|
||||||
|
*/
|
||||||
if (!authSession || !authSession.user?.companyId) {
|
function buildWhereClause(
|
||||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
companyId: string,
|
||||||
}
|
searchParams: URLSearchParams
|
||||||
|
): Prisma.SessionWhereInput {
|
||||||
const companyId = authSession.user.companyId;
|
const whereClause: Prisma.SessionWhereInput = { companyId };
|
||||||
const { searchParams } = new URL(request.url);
|
|
||||||
|
|
||||||
const searchTerm = searchParams.get("searchTerm");
|
const searchTerm = searchParams.get("searchTerm");
|
||||||
const category = searchParams.get("category");
|
const category = searchParams.get("category");
|
||||||
const language = searchParams.get("language");
|
const language = searchParams.get("language");
|
||||||
const startDate = searchParams.get("startDate");
|
const startDate = searchParams.get("startDate");
|
||||||
const endDate = searchParams.get("endDate");
|
const endDate = searchParams.get("endDate");
|
||||||
const sortKey = searchParams.get("sortKey");
|
|
||||||
const sortOrder = searchParams.get("sortOrder");
|
|
||||||
const queryPage = searchParams.get("page");
|
|
||||||
const queryPageSize = searchParams.get("pageSize");
|
|
||||||
|
|
||||||
const page = Number(queryPage) || 1;
|
|
||||||
const pageSize = Number(queryPageSize) || 10;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const whereClause: Prisma.SessionWhereInput = { companyId };
|
|
||||||
|
|
||||||
// Search Term
|
// Search Term
|
||||||
if (searchTerm && searchTerm.trim() !== "") {
|
if (searchTerm && searchTerm.trim() !== "") {
|
||||||
@ -43,7 +32,6 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
// Category Filter
|
// Category Filter
|
||||||
if (category && category.trim() !== "") {
|
if (category && category.trim() !== "") {
|
||||||
// Cast to SessionCategory enum if it's a valid value
|
|
||||||
whereClause.category = category as SessionCategory;
|
whereClause.category = category as SessionCategory;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -68,7 +56,20 @@ export async function GET(request: NextRequest) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sorting
|
return whereClause;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build order by clause for session sorting
|
||||||
|
*/
|
||||||
|
function buildOrderByClause(
|
||||||
|
searchParams: URLSearchParams
|
||||||
|
):
|
||||||
|
| Prisma.SessionOrderByWithRelationInput
|
||||||
|
| Prisma.SessionOrderByWithRelationInput[] {
|
||||||
|
const sortKey = searchParams.get("sortKey");
|
||||||
|
const sortOrder = searchParams.get("sortOrder");
|
||||||
|
|
||||||
const validSortKeys: { [key: string]: string } = {
|
const validSortKeys: { [key: string]: string } = {
|
||||||
startTime: "startTime",
|
startTime: "startTime",
|
||||||
category: "category",
|
category: "category",
|
||||||
@ -78,37 +79,40 @@ export async function GET(request: NextRequest) {
|
|||||||
avgResponseTime: "avgResponseTime",
|
avgResponseTime: "avgResponseTime",
|
||||||
};
|
};
|
||||||
|
|
||||||
let orderByCondition:
|
|
||||||
| Prisma.SessionOrderByWithRelationInput
|
|
||||||
| Prisma.SessionOrderByWithRelationInput[];
|
|
||||||
|
|
||||||
const primarySortField =
|
const primarySortField =
|
||||||
sortKey && validSortKeys[sortKey] ? validSortKeys[sortKey] : "startTime"; // Default to startTime field if sortKey is invalid/missing
|
sortKey && validSortKeys[sortKey] ? validSortKeys[sortKey] : "startTime";
|
||||||
|
|
||||||
const primarySortOrder =
|
const primarySortOrder =
|
||||||
sortOrder === "asc" || sortOrder === "desc" ? sortOrder : "desc"; // Default to desc order
|
sortOrder === "asc" || sortOrder === "desc" ? sortOrder : "desc";
|
||||||
|
|
||||||
if (primarySortField === "startTime") {
|
if (primarySortField === "startTime") {
|
||||||
// If sorting by startTime, it's the only sort criteria
|
return { [primarySortField]: primarySortOrder };
|
||||||
orderByCondition = { [primarySortField]: primarySortOrder };
|
|
||||||
} else {
|
|
||||||
// If sorting by another field, use startTime: "desc" as secondary sort
|
|
||||||
orderByCondition = [
|
|
||||||
{ [primarySortField]: primarySortOrder },
|
|
||||||
{ startTime: "desc" },
|
|
||||||
];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const prismaSessions = await prisma.session.findMany({
|
return [{ [primarySortField]: primarySortOrder }, { startTime: "desc" }];
|
||||||
where: whereClause,
|
}
|
||||||
orderBy: orderByCondition,
|
|
||||||
skip: (page - 1) * pageSize,
|
|
||||||
take: pageSize,
|
|
||||||
});
|
|
||||||
|
|
||||||
const totalSessions = await prisma.session.count({ where: whereClause });
|
/**
|
||||||
|
* Convert Prisma session to ChatSession format
|
||||||
const sessions: ChatSession[] = prismaSessions.map((ps) => ({
|
*/
|
||||||
|
function convertPrismaSessionToChatSession(ps: {
|
||||||
|
id: string;
|
||||||
|
companyId: string;
|
||||||
|
startTime: Date;
|
||||||
|
endTime: Date | null;
|
||||||
|
createdAt: Date;
|
||||||
|
category: string | null;
|
||||||
|
language: string | null;
|
||||||
|
country: string | null;
|
||||||
|
ipAddress: string | null;
|
||||||
|
sentiment: string | null;
|
||||||
|
messagesSent: number | null;
|
||||||
|
avgResponseTime: number | null;
|
||||||
|
escalated: boolean | null;
|
||||||
|
forwardedHr: boolean | null;
|
||||||
|
initialMsg: string | null;
|
||||||
|
fullTranscriptUrl: string | null;
|
||||||
|
}): ChatSession {
|
||||||
|
return {
|
||||||
id: ps.id,
|
id: ps.id,
|
||||||
sessionId: ps.id,
|
sessionId: ps.id,
|
||||||
companyId: ps.companyId,
|
companyId: ps.companyId,
|
||||||
@ -128,8 +132,41 @@ export async function GET(request: NextRequest) {
|
|||||||
forwardedHr: ps.forwardedHr ?? undefined,
|
forwardedHr: ps.forwardedHr ?? undefined,
|
||||||
initialMsg: ps.initialMsg ?? undefined,
|
initialMsg: ps.initialMsg ?? undefined,
|
||||||
fullTranscriptUrl: ps.fullTranscriptUrl ?? null,
|
fullTranscriptUrl: ps.fullTranscriptUrl ?? null,
|
||||||
transcriptContent: null, // Transcript content is now fetched from fullTranscriptUrl when needed
|
transcriptContent: null,
|
||||||
}));
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
const authSession = await getServerSession(authOptions);
|
||||||
|
|
||||||
|
if (!authSession || !authSession.user?.companyId) {
|
||||||
|
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const companyId = authSession.user.companyId;
|
||||||
|
const { searchParams } = new URL(request.url);
|
||||||
|
|
||||||
|
const queryPage = searchParams.get("page");
|
||||||
|
const queryPageSize = searchParams.get("pageSize");
|
||||||
|
const page = Number(queryPage) || 1;
|
||||||
|
const pageSize = Number(queryPageSize) || 10;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const whereClause = buildWhereClause(companyId, searchParams);
|
||||||
|
const orderByCondition = buildOrderByClause(searchParams);
|
||||||
|
|
||||||
|
const prismaSessions = await prisma.session.findMany({
|
||||||
|
where: whereClause,
|
||||||
|
orderBy: orderByCondition,
|
||||||
|
skip: (page - 1) * pageSize,
|
||||||
|
take: pageSize,
|
||||||
|
});
|
||||||
|
|
||||||
|
const totalSessions = await prisma.session.count({ where: whereClause });
|
||||||
|
|
||||||
|
const sessions: ChatSession[] = prismaSessions.map(
|
||||||
|
convertPrismaSessionToChatSession
|
||||||
|
);
|
||||||
|
|
||||||
return NextResponse.json({ sessions, totalSessions });
|
return NextResponse.json({ sessions, totalSessions });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@ -77,6 +77,17 @@ export async function POST(request: NextRequest) {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// TODO: Email user their temp password (stub, for demo) - Implement a robust and secure email sending mechanism. Consider using a transactional email service.
|
const { sendPasswordResetEmail } = await import("../../../../lib/sendEmail");
|
||||||
return NextResponse.json({ ok: true, tempPassword });
|
const emailResult = await sendPasswordResetEmail(email, tempPassword);
|
||||||
|
|
||||||
|
if (!emailResult.success) {
|
||||||
|
console.warn("Failed to send password email:", emailResult.error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
ok: true,
|
||||||
|
tempPassword,
|
||||||
|
emailSent: emailResult.success,
|
||||||
|
emailError: emailResult.error,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,57 +1,25 @@
|
|||||||
import crypto from "node:crypto";
|
import crypto from "node:crypto";
|
||||||
import { type NextRequest, NextResponse } from "next/server";
|
import { type NextRequest, NextResponse } from "next/server";
|
||||||
import { prisma } from "../../../lib/prisma";
|
import { prisma } from "../../../lib/prisma";
|
||||||
|
import { extractClientIP, InMemoryRateLimiter } from "../../../lib/rateLimiter";
|
||||||
import { sendEmail } from "../../../lib/sendEmail";
|
import { sendEmail } from "../../../lib/sendEmail";
|
||||||
import { forgotPasswordSchema, validateInput } from "../../../lib/validation";
|
import { forgotPasswordSchema, validateInput } from "../../../lib/validation";
|
||||||
|
|
||||||
// In-memory rate limiting with automatic cleanup
|
// Rate limiting for password reset endpoint
|
||||||
const resetAttempts = new Map<string, { count: number; resetTime: number }>();
|
const passwordResetLimiter = new InMemoryRateLimiter({
|
||||||
const CLEANUP_INTERVAL = 5 * 60 * 1000;
|
maxAttempts: 5,
|
||||||
const MAX_ENTRIES = 10000;
|
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||||
|
maxEntries: 10000,
|
||||||
setInterval(() => {
|
cleanupIntervalMs: 5 * 60 * 1000, // 5 minutes
|
||||||
const now = Date.now();
|
});
|
||||||
resetAttempts.forEach((attempts, ip) => {
|
|
||||||
if (now > attempts.resetTime) {
|
|
||||||
resetAttempts.delete(ip);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}, CLEANUP_INTERVAL);
|
|
||||||
|
|
||||||
function checkRateLimit(ip: string): boolean {
|
|
||||||
const now = Date.now();
|
|
||||||
// Prevent unbounded growth
|
|
||||||
if (resetAttempts.size > MAX_ENTRIES) {
|
|
||||||
const entries = Array.from(resetAttempts.entries());
|
|
||||||
entries.sort((a, b) => a[1].resetTime - b[1].resetTime);
|
|
||||||
entries.slice(0, Math.floor(MAX_ENTRIES / 2)).forEach(([ip]) => {
|
|
||||||
resetAttempts.delete(ip);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const attempts = resetAttempts.get(ip);
|
|
||||||
|
|
||||||
if (!attempts || now > attempts.resetTime) {
|
|
||||||
resetAttempts.set(ip, { count: 1, resetTime: now + 15 * 60 * 1000 }); // 15 minute window
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (attempts.count >= 5) {
|
|
||||||
// Max 5 reset requests per 15 minutes per IP
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
attempts.count++;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
try {
|
try {
|
||||||
// Rate limiting check
|
// Rate limiting check using shared utility
|
||||||
const ip =
|
const ip = extractClientIP(request);
|
||||||
request.headers.get("x-forwarded-for") ||
|
const rateLimitResult = passwordResetLimiter.checkRateLimit(ip);
|
||||||
request.headers.get("x-real-ip") ||
|
|
||||||
"unknown";
|
if (!rateLimitResult.allowed) {
|
||||||
if (!checkRateLimit(ip)) {
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
|
|||||||
@ -1,63 +1,24 @@
|
|||||||
import bcrypt from "bcryptjs";
|
import bcrypt from "bcryptjs";
|
||||||
import { type NextRequest, NextResponse } from "next/server";
|
import { type NextRequest, NextResponse } from "next/server";
|
||||||
import { prisma } from "../../../lib/prisma";
|
import { prisma } from "../../../lib/prisma";
|
||||||
|
import { extractClientIP, InMemoryRateLimiter } from "../../../lib/rateLimiter";
|
||||||
import { registerSchema, validateInput } from "../../../lib/validation";
|
import { registerSchema, validateInput } from "../../../lib/validation";
|
||||||
|
|
||||||
// In-memory rate limiting with automatic cleanup
|
// Rate limiting for registration endpoint
|
||||||
const registrationAttempts = new Map<
|
const registrationLimiter = new InMemoryRateLimiter({
|
||||||
string,
|
maxAttempts: 3,
|
||||||
{ count: number; resetTime: number }
|
windowMs: 60 * 60 * 1000, // 1 hour
|
||||||
>();
|
maxEntries: 10000,
|
||||||
|
cleanupIntervalMs: 5 * 60 * 1000, // 5 minutes
|
||||||
// Clean up expired entries every 5 minutes
|
});
|
||||||
const CLEANUP_INTERVAL = 5 * 60 * 1000;
|
|
||||||
const MAX_ENTRIES = 10000; // Prevent unbounded growth
|
|
||||||
|
|
||||||
setInterval(() => {
|
|
||||||
const now = Date.now();
|
|
||||||
registrationAttempts.forEach((attempts, ip) => {
|
|
||||||
if (now > attempts.resetTime) {
|
|
||||||
registrationAttempts.delete(ip);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}, CLEANUP_INTERVAL);
|
|
||||||
|
|
||||||
function checkRateLimit(ip: string): boolean {
|
|
||||||
const now = Date.now();
|
|
||||||
// Prevent unbounded growth
|
|
||||||
if (registrationAttempts.size > MAX_ENTRIES) {
|
|
||||||
// Remove oldest entries
|
|
||||||
const entries = Array.from(registrationAttempts.entries());
|
|
||||||
entries.sort((a, b) => a[1].resetTime - b[1].resetTime);
|
|
||||||
entries.slice(0, Math.floor(MAX_ENTRIES / 2)).forEach(([ip]) => {
|
|
||||||
registrationAttempts.delete(ip);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const attempts = registrationAttempts.get(ip);
|
|
||||||
|
|
||||||
if (!attempts || now > attempts.resetTime) {
|
|
||||||
registrationAttempts.set(ip, { count: 1, resetTime: now + 60 * 60 * 1000 }); // 1 hour window
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (attempts.count >= 3) {
|
|
||||||
// Max 3 registrations per hour per IP
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
attempts.count++;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
try {
|
try {
|
||||||
// Rate limiting check - improved IP extraction
|
// Rate limiting check using shared utility
|
||||||
const forwardedFor = request.headers.get("x-forwarded-for");
|
const ip = extractClientIP(request);
|
||||||
const ip = forwardedFor
|
const rateLimitResult = registrationLimiter.checkRateLimit(ip);
|
||||||
? forwardedFor.split(",")[0].trim() // Get first IP if multiple
|
|
||||||
: request.headers.get("x-real-ip") ||
|
if (!rateLimitResult.allowed) {
|
||||||
"unknown";
|
|
||||||
if (!checkRateLimit(ip)) {
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
|
|||||||
@ -38,7 +38,433 @@ import MetricCard from "../../../components/ui/metric-card";
|
|||||||
import WordCloud from "../../../components/WordCloud";
|
import WordCloud from "../../../components/WordCloud";
|
||||||
import type { Company, MetricsResult, WordCloudWord } from "../../../lib/types";
|
import type { Company, MetricsResult, WordCloudWord } from "../../../lib/types";
|
||||||
|
|
||||||
// Safely wrapped component with useSession
|
/**
|
||||||
|
* Loading states component for better organization
|
||||||
|
*/
|
||||||
|
function DashboardLoadingStates({ status }: { status: string }) {
|
||||||
|
if (status === "loading") {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-[60vh]">
|
||||||
|
<div className="text-center space-y-4">
|
||||||
|
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-primary mx-auto" />
|
||||||
|
<p className="text-muted-foreground">Loading session...</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (status === "unauthenticated") {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-[60vh]">
|
||||||
|
<div className="text-center">
|
||||||
|
<p className="text-muted-foreground">Redirecting to login...</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Loading skeleton component
|
||||||
|
*/
|
||||||
|
function DashboardSkeleton() {
|
||||||
|
return (
|
||||||
|
<div className="space-y-8">
|
||||||
|
{/* Header Skeleton */}
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<div className="flex justify-between items-start">
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Skeleton className="h-8 w-48" />
|
||||||
|
<Skeleton className="h-4 w-64" />
|
||||||
|
</div>
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<Skeleton className="h-10 w-24" />
|
||||||
|
<Skeleton className="h-10 w-20" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Metrics Grid Skeleton */}
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||||
|
{Array.from({ length: 8 }, (_, i) => {
|
||||||
|
const metricTypes = [
|
||||||
|
"sessions",
|
||||||
|
"users",
|
||||||
|
"time",
|
||||||
|
"response",
|
||||||
|
"costs",
|
||||||
|
"peak",
|
||||||
|
"resolution",
|
||||||
|
"languages",
|
||||||
|
];
|
||||||
|
return (
|
||||||
|
<MetricCard
|
||||||
|
key={`skeleton-${metricTypes[i] || "metric"}-card-loading`}
|
||||||
|
title=""
|
||||||
|
value=""
|
||||||
|
isLoading
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Charts Skeleton */}
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
||||||
|
<Card className="lg:col-span-2">
|
||||||
|
<CardHeader>
|
||||||
|
<Skeleton className="h-6 w-32" />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<Skeleton className="h-64 w-full" />
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<Skeleton className="h-6 w-32" />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<Skeleton className="h-64 w-full" />
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Data processing utilities
|
||||||
|
*/
|
||||||
|
function useDashboardData(metrics: MetricsResult | null) {
|
||||||
|
const getSentimentData = useCallback(() => {
|
||||||
|
if (!metrics) return [];
|
||||||
|
|
||||||
|
const sentimentData = {
|
||||||
|
positive: metrics.sentimentPositiveCount ?? 0,
|
||||||
|
neutral: metrics.sentimentNeutralCount ?? 0,
|
||||||
|
negative: metrics.sentimentNegativeCount ?? 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
name: "Positive",
|
||||||
|
value: sentimentData.positive,
|
||||||
|
color: "hsl(var(--chart-1))",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Neutral",
|
||||||
|
value: sentimentData.neutral,
|
||||||
|
color: "hsl(var(--chart-2))",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Negative",
|
||||||
|
value: sentimentData.negative,
|
||||||
|
color: "hsl(var(--chart-3))",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}, [metrics]);
|
||||||
|
|
||||||
|
const getSessionsOverTimeData = useCallback(() => {
|
||||||
|
if (!metrics?.days) return [];
|
||||||
|
|
||||||
|
return Object.entries(metrics.days).map(([date, value]) => ({
|
||||||
|
date: new Date(date).toLocaleDateString("en-US", {
|
||||||
|
month: "short",
|
||||||
|
day: "numeric",
|
||||||
|
}),
|
||||||
|
value: value as number,
|
||||||
|
}));
|
||||||
|
}, [metrics?.days]);
|
||||||
|
|
||||||
|
const getCategoriesData = useCallback(() => {
|
||||||
|
if (!metrics?.categories) return [];
|
||||||
|
|
||||||
|
return Object.entries(metrics.categories).map(([name, value]) => {
|
||||||
|
const formattedName = formatEnumValue(name) || name;
|
||||||
|
return {
|
||||||
|
name:
|
||||||
|
formattedName.length > 15
|
||||||
|
? `${formattedName.substring(0, 15)}...`
|
||||||
|
: formattedName,
|
||||||
|
value: value as number,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}, [metrics?.categories]);
|
||||||
|
|
||||||
|
const getLanguagesData = useCallback(() => {
|
||||||
|
if (!metrics?.languages) return [];
|
||||||
|
|
||||||
|
return Object.entries(metrics.languages).map(([name, value]) => ({
|
||||||
|
name,
|
||||||
|
value: value as number,
|
||||||
|
}));
|
||||||
|
}, [metrics?.languages]);
|
||||||
|
|
||||||
|
const getWordCloudData = useCallback((): WordCloudWord[] => {
|
||||||
|
if (!metrics?.wordCloudData) return [];
|
||||||
|
return metrics.wordCloudData;
|
||||||
|
}, [metrics?.wordCloudData]);
|
||||||
|
|
||||||
|
const getCountryData = useCallback(() => {
|
||||||
|
if (!metrics?.countries) return {};
|
||||||
|
return Object.entries(metrics.countries).reduce(
|
||||||
|
(acc, [code, count]) => {
|
||||||
|
if (code && count) {
|
||||||
|
acc[code] = count;
|
||||||
|
}
|
||||||
|
return acc;
|
||||||
|
},
|
||||||
|
{} as Record<string, number>
|
||||||
|
);
|
||||||
|
}, [metrics?.countries]);
|
||||||
|
|
||||||
|
const getResponseTimeData = useCallback(() => {
|
||||||
|
const avgTime = metrics?.avgResponseTime || 1.5;
|
||||||
|
const simulatedData: number[] = [];
|
||||||
|
|
||||||
|
for (let i = 0; i < 50; i++) {
|
||||||
|
const randomFactor = 0.5 + Math.random();
|
||||||
|
simulatedData.push(avgTime * randomFactor);
|
||||||
|
}
|
||||||
|
|
||||||
|
return simulatedData;
|
||||||
|
}, [metrics?.avgResponseTime]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
getSentimentData,
|
||||||
|
getSessionsOverTimeData,
|
||||||
|
getCategoriesData,
|
||||||
|
getLanguagesData,
|
||||||
|
getWordCloudData,
|
||||||
|
getCountryData,
|
||||||
|
getResponseTimeData,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dashboard header component
|
||||||
|
*/
|
||||||
|
function DashboardHeader({
|
||||||
|
company,
|
||||||
|
metrics,
|
||||||
|
isAuditor,
|
||||||
|
refreshing,
|
||||||
|
onRefresh,
|
||||||
|
}: {
|
||||||
|
company: Company;
|
||||||
|
metrics: MetricsResult;
|
||||||
|
isAuditor: boolean;
|
||||||
|
refreshing: boolean;
|
||||||
|
onRefresh: () => void;
|
||||||
|
}) {
|
||||||
|
const refreshStatusId = useId();
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card className="border-0 bg-linear-to-r from-primary/5 via-primary/10 to-primary/5">
|
||||||
|
<CardHeader>
|
||||||
|
<div className="flex flex-col sm:flex-row justify-between items-start sm:items-center gap-4">
|
||||||
|
<div className="space-y-2">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<h1 className="text-3xl font-bold tracking-tight">
|
||||||
|
{company.name}
|
||||||
|
</h1>
|
||||||
|
<Badge variant="secondary" className="text-xs">
|
||||||
|
Analytics Dashboard
|
||||||
|
</Badge>
|
||||||
|
</div>
|
||||||
|
<p className="text-muted-foreground">
|
||||||
|
Last updated{" "}
|
||||||
|
<span className="font-medium">
|
||||||
|
{new Date(metrics.lastUpdated || Date.now()).toLocaleString()}
|
||||||
|
</span>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Button
|
||||||
|
onClick={onRefresh}
|
||||||
|
disabled={refreshing || isAuditor}
|
||||||
|
size="sm"
|
||||||
|
className="gap-2"
|
||||||
|
aria-label={
|
||||||
|
refreshing
|
||||||
|
? "Refreshing dashboard data"
|
||||||
|
: "Refresh dashboard data"
|
||||||
|
}
|
||||||
|
aria-describedby={refreshing ? refreshStatusId : undefined}
|
||||||
|
>
|
||||||
|
<RefreshCw
|
||||||
|
className={`h-4 w-4 ${refreshing ? "animate-spin" : ""}`}
|
||||||
|
aria-hidden="true"
|
||||||
|
/>
|
||||||
|
{refreshing ? "Refreshing..." : "Refresh"}
|
||||||
|
</Button>
|
||||||
|
{refreshing && (
|
||||||
|
<div id={refreshStatusId} className="sr-only" aria-live="polite">
|
||||||
|
Dashboard data is being refreshed
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<DropdownMenu>
|
||||||
|
<DropdownMenuTrigger asChild>
|
||||||
|
<Button variant="outline" size="sm" aria-label="Account menu">
|
||||||
|
<MoreVertical className="h-4 w-4" aria-hidden="true" />
|
||||||
|
</Button>
|
||||||
|
</DropdownMenuTrigger>
|
||||||
|
<DropdownMenuContent align="end">
|
||||||
|
<DropdownMenuItem
|
||||||
|
onClick={() => signOut({ callbackUrl: "/login" })}
|
||||||
|
>
|
||||||
|
<LogOut className="h-4 w-4 mr-2" aria-hidden="true" />
|
||||||
|
Sign out
|
||||||
|
</DropdownMenuItem>
|
||||||
|
</DropdownMenuContent>
|
||||||
|
</DropdownMenu>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Individual metric card components for better organization
|
||||||
|
*/
|
||||||
|
function SessionMetricCard({ metrics }: { metrics: MetricsResult }) {
|
||||||
|
return (
|
||||||
|
<MetricCard
|
||||||
|
title="Total Sessions"
|
||||||
|
value={metrics.totalSessions?.toLocaleString()}
|
||||||
|
icon={<MessageSquare className="h-5 w-5" />}
|
||||||
|
trend={{
|
||||||
|
value: metrics.sessionTrend ?? 0,
|
||||||
|
isPositive: (metrics.sessionTrend ?? 0) >= 0,
|
||||||
|
}}
|
||||||
|
variant="primary"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function UsersMetricCard({ metrics }: { metrics: MetricsResult }) {
|
||||||
|
return (
|
||||||
|
<MetricCard
|
||||||
|
title="Unique Users"
|
||||||
|
value={metrics.uniqueUsers?.toLocaleString()}
|
||||||
|
icon={<Users className="h-5 w-5" />}
|
||||||
|
trend={{
|
||||||
|
value: metrics.usersTrend ?? 0,
|
||||||
|
isPositive: (metrics.usersTrend ?? 0) >= 0,
|
||||||
|
}}
|
||||||
|
variant="success"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function SessionTimeMetricCard({ metrics }: { metrics: MetricsResult }) {
|
||||||
|
return (
|
||||||
|
<MetricCard
|
||||||
|
title="Avg. Session Time"
|
||||||
|
value={`${Math.round(metrics.avgSessionLength || 0)}s`}
|
||||||
|
icon={<Clock className="h-5 w-5" />}
|
||||||
|
trend={{
|
||||||
|
value: metrics.avgSessionTimeTrend ?? 0,
|
||||||
|
isPositive: (metrics.avgSessionTimeTrend ?? 0) >= 0,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function ResponseTimeMetricCard({ metrics }: { metrics: MetricsResult }) {
|
||||||
|
return (
|
||||||
|
<MetricCard
|
||||||
|
title="Avg. Response Time"
|
||||||
|
value={`${metrics.avgResponseTime?.toFixed(1) || 0}s`}
|
||||||
|
icon={<Zap className="h-5 w-5" />}
|
||||||
|
trend={{
|
||||||
|
value: metrics.avgResponseTimeTrend ?? 0,
|
||||||
|
isPositive: (metrics.avgResponseTimeTrend ?? 0) <= 0,
|
||||||
|
}}
|
||||||
|
variant="warning"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function CostsMetricCard({ metrics }: { metrics: MetricsResult }) {
|
||||||
|
return (
|
||||||
|
<MetricCard
|
||||||
|
title="Daily Costs"
|
||||||
|
value={`€${metrics.avgDailyCosts?.toFixed(4) || "0.0000"}`}
|
||||||
|
icon={<Euro className="h-5 w-5" />}
|
||||||
|
description="Average per day"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function PeakUsageMetricCard({ metrics }: { metrics: MetricsResult }) {
|
||||||
|
return (
|
||||||
|
<MetricCard
|
||||||
|
title="Peak Usage"
|
||||||
|
value={metrics.peakUsageTime || "N/A"}
|
||||||
|
icon={<TrendingUp className="h-5 w-5" />}
|
||||||
|
description="Busiest hour"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function ResolutionRateMetricCard({ metrics }: { metrics: MetricsResult }) {
|
||||||
|
return (
|
||||||
|
<MetricCard
|
||||||
|
title="Resolution Rate"
|
||||||
|
value={`${metrics.resolvedChatsPercentage?.toFixed(1) || "0.0"}%`}
|
||||||
|
icon={<CheckCircle className="h-5 w-5" />}
|
||||||
|
trend={{
|
||||||
|
value: metrics.resolvedChatsPercentage ?? 0,
|
||||||
|
isPositive: (metrics.resolvedChatsPercentage ?? 0) >= 80,
|
||||||
|
}}
|
||||||
|
variant={
|
||||||
|
metrics.resolvedChatsPercentage && metrics.resolvedChatsPercentage >= 80
|
||||||
|
? "success"
|
||||||
|
: "warning"
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function LanguagesMetricCard({ metrics }: { metrics: MetricsResult }) {
|
||||||
|
return (
|
||||||
|
<MetricCard
|
||||||
|
title="Active Languages"
|
||||||
|
value={Object.keys(metrics.languages || {}).length}
|
||||||
|
icon={<Globe className="h-5 w-5" />}
|
||||||
|
description="Languages detected"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simplified metrics grid component
|
||||||
|
*/
|
||||||
|
function MetricsGrid({ metrics }: { metrics: MetricsResult }) {
|
||||||
|
return (
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||||
|
<SessionMetricCard metrics={metrics} />
|
||||||
|
<UsersMetricCard metrics={metrics} />
|
||||||
|
<SessionTimeMetricCard metrics={metrics} />
|
||||||
|
<ResponseTimeMetricCard metrics={metrics} />
|
||||||
|
<CostsMetricCard metrics={metrics} />
|
||||||
|
<PeakUsageMetricCard metrics={metrics} />
|
||||||
|
<ResolutionRateMetricCard metrics={metrics} />
|
||||||
|
<LanguagesMetricCard metrics={metrics} />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main dashboard content with reduced complexity
|
||||||
|
*/
|
||||||
function DashboardContent() {
|
function DashboardContent() {
|
||||||
const { data: session, status } = useSession();
|
const { data: session, status } = useSession();
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
@ -48,8 +474,8 @@ function DashboardContent() {
|
|||||||
const [refreshing, setRefreshing] = useState<boolean>(false);
|
const [refreshing, setRefreshing] = useState<boolean>(false);
|
||||||
const [isInitialLoad, setIsInitialLoad] = useState<boolean>(true);
|
const [isInitialLoad, setIsInitialLoad] = useState<boolean>(true);
|
||||||
|
|
||||||
const refreshStatusId = useId();
|
|
||||||
const isAuditor = session?.user?.role === "AUDITOR";
|
const isAuditor = session?.user?.role === "AUDITOR";
|
||||||
|
const dataHelpers = useDashboardData(metrics);
|
||||||
|
|
||||||
// Function to fetch metrics with optional date range
|
// Function to fetch metrics with optional date range
|
||||||
const fetchMetrics = useCallback(
|
const fetchMetrics = useCallback(
|
||||||
@ -124,261 +550,24 @@ function DashboardContent() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Show loading state while session status is being determined
|
// Show loading state while session status is being determined
|
||||||
if (status === "loading") {
|
const loadingState = DashboardLoadingStates({ status });
|
||||||
return (
|
if (loadingState) return loadingState;
|
||||||
<div className="flex items-center justify-center min-h-[60vh]">
|
|
||||||
<div className="text-center space-y-4">
|
|
||||||
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-primary mx-auto" />
|
|
||||||
<p className="text-muted-foreground">Loading session...</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (status === "unauthenticated") {
|
|
||||||
return (
|
|
||||||
<div className="flex items-center justify-center min-h-[60vh]">
|
|
||||||
<div className="text-center">
|
|
||||||
<p className="text-muted-foreground">Redirecting to login...</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (loading || !metrics || !company) {
|
if (loading || !metrics || !company) {
|
||||||
return (
|
return <DashboardSkeleton />;
|
||||||
<div className="space-y-8">
|
|
||||||
{/* Header Skeleton */}
|
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<div className="flex justify-between items-start">
|
|
||||||
<div className="space-y-2">
|
|
||||||
<Skeleton className="h-8 w-48" />
|
|
||||||
<Skeleton className="h-4 w-64" />
|
|
||||||
</div>
|
|
||||||
<div className="flex gap-2">
|
|
||||||
<Skeleton className="h-10 w-24" />
|
|
||||||
<Skeleton className="h-10 w-20" />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</CardHeader>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
{/* Metrics Grid Skeleton */}
|
|
||||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-6">
|
|
||||||
{Array.from({ length: 8 }, (_, i) => {
|
|
||||||
const metricTypes = [
|
|
||||||
"sessions",
|
|
||||||
"users",
|
|
||||||
"time",
|
|
||||||
"response",
|
|
||||||
"costs",
|
|
||||||
"peak",
|
|
||||||
"resolution",
|
|
||||||
"languages",
|
|
||||||
];
|
|
||||||
return (
|
|
||||||
<MetricCard
|
|
||||||
key={`skeleton-${metricTypes[i] || "metric"}-card-loading`}
|
|
||||||
title=""
|
|
||||||
value=""
|
|
||||||
isLoading
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Charts Skeleton */}
|
|
||||||
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
|
||||||
<Card className="lg:col-span-2">
|
|
||||||
<CardHeader>
|
|
||||||
<Skeleton className="h-6 w-32" />
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent>
|
|
||||||
<Skeleton className="h-64 w-full" />
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<Skeleton className="h-6 w-32" />
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent>
|
|
||||||
<Skeleton className="h-64 w-full" />
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Data preparation functions
|
|
||||||
const getSentimentData = () => {
|
|
||||||
if (!metrics) return [];
|
|
||||||
|
|
||||||
const sentimentData = {
|
|
||||||
positive: metrics.sentimentPositiveCount ?? 0,
|
|
||||||
neutral: metrics.sentimentNeutralCount ?? 0,
|
|
||||||
negative: metrics.sentimentNegativeCount ?? 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
name: "Positive",
|
|
||||||
value: sentimentData.positive,
|
|
||||||
color: "hsl(var(--chart-1))",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Neutral",
|
|
||||||
value: sentimentData.neutral,
|
|
||||||
color: "hsl(var(--chart-2))",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Negative",
|
|
||||||
value: sentimentData.negative,
|
|
||||||
color: "hsl(var(--chart-3))",
|
|
||||||
},
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
const getSessionsOverTimeData = () => {
|
|
||||||
if (!metrics?.days) return [];
|
|
||||||
|
|
||||||
return Object.entries(metrics.days).map(([date, value]) => ({
|
|
||||||
date: new Date(date).toLocaleDateString("en-US", {
|
|
||||||
month: "short",
|
|
||||||
day: "numeric",
|
|
||||||
}),
|
|
||||||
value: value as number,
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
|
|
||||||
const getCategoriesData = () => {
|
|
||||||
if (!metrics?.categories) return [];
|
|
||||||
|
|
||||||
return Object.entries(metrics.categories).map(([name, value]) => {
|
|
||||||
const formattedName = formatEnumValue(name) || name;
|
|
||||||
return {
|
|
||||||
name:
|
|
||||||
formattedName.length > 15
|
|
||||||
? `${formattedName.substring(0, 15)}...`
|
|
||||||
: formattedName,
|
|
||||||
value: value as number,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const getLanguagesData = () => {
|
|
||||||
if (!metrics?.languages) return [];
|
|
||||||
|
|
||||||
return Object.entries(metrics.languages).map(([name, value]) => ({
|
|
||||||
name,
|
|
||||||
value: value as number,
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
|
|
||||||
const getWordCloudData = (): WordCloudWord[] => {
|
|
||||||
if (!metrics?.wordCloudData) return [];
|
|
||||||
return metrics.wordCloudData;
|
|
||||||
};
|
|
||||||
|
|
||||||
const getCountryData = () => {
|
|
||||||
if (!metrics?.countries) return {};
|
|
||||||
return Object.entries(metrics.countries).reduce(
|
|
||||||
(acc, [code, count]) => {
|
|
||||||
if (code && count) {
|
|
||||||
acc[code] = count;
|
|
||||||
}
|
|
||||||
return acc;
|
|
||||||
},
|
|
||||||
{} as Record<string, number>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
const getResponseTimeData = () => {
|
|
||||||
const avgTime = metrics.avgResponseTime || 1.5;
|
|
||||||
const simulatedData: number[] = [];
|
|
||||||
|
|
||||||
for (let i = 0; i < 50; i++) {
|
|
||||||
const randomFactor = 0.5 + Math.random();
|
|
||||||
simulatedData.push(avgTime * randomFactor);
|
|
||||||
}
|
|
||||||
|
|
||||||
return simulatedData;
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-8">
|
<div className="space-y-8">
|
||||||
{/* Modern Header */}
|
<DashboardHeader
|
||||||
<Card className="border-0 bg-linear-to-r from-primary/5 via-primary/10 to-primary/5">
|
company={company}
|
||||||
<CardHeader>
|
metrics={metrics}
|
||||||
<div className="flex flex-col sm:flex-row justify-between items-start sm:items-center gap-4">
|
isAuditor={isAuditor}
|
||||||
<div className="space-y-2">
|
refreshing={refreshing}
|
||||||
<div className="flex items-center gap-3">
|
onRefresh={handleRefresh}
|
||||||
<h1 className="text-3xl font-bold tracking-tight">
|
|
||||||
{company.name}
|
|
||||||
</h1>
|
|
||||||
<Badge variant="secondary" className="text-xs">
|
|
||||||
Analytics Dashboard
|
|
||||||
</Badge>
|
|
||||||
</div>
|
|
||||||
<p className="text-muted-foreground">
|
|
||||||
Last updated{" "}
|
|
||||||
<span className="font-medium">
|
|
||||||
{new Date(metrics.lastUpdated || Date.now()).toLocaleString()}
|
|
||||||
</span>
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Button
|
|
||||||
onClick={handleRefresh}
|
|
||||||
disabled={refreshing || isAuditor}
|
|
||||||
size="sm"
|
|
||||||
className="gap-2"
|
|
||||||
aria-label={
|
|
||||||
refreshing
|
|
||||||
? "Refreshing dashboard data"
|
|
||||||
: "Refresh dashboard data"
|
|
||||||
}
|
|
||||||
aria-describedby={refreshing ? refreshStatusId : undefined}
|
|
||||||
>
|
|
||||||
<RefreshCw
|
|
||||||
className={`h-4 w-4 ${refreshing ? "animate-spin" : ""}`}
|
|
||||||
aria-hidden="true"
|
|
||||||
/>
|
/>
|
||||||
{refreshing ? "Refreshing..." : "Refresh"}
|
|
||||||
</Button>
|
|
||||||
{refreshing && (
|
|
||||||
<div
|
|
||||||
id={refreshStatusId}
|
|
||||||
className="sr-only"
|
|
||||||
aria-live="polite"
|
|
||||||
>
|
|
||||||
Dashboard data is being refreshed
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<DropdownMenu>
|
{/* Date Range Picker */}
|
||||||
<DropdownMenuTrigger asChild>
|
|
||||||
<Button variant="outline" size="sm" aria-label="Account menu">
|
|
||||||
<MoreVertical className="h-4 w-4" aria-hidden="true" />
|
|
||||||
</Button>
|
|
||||||
</DropdownMenuTrigger>
|
|
||||||
<DropdownMenuContent align="end">
|
|
||||||
<DropdownMenuItem
|
|
||||||
onClick={() => signOut({ callbackUrl: "/login" })}
|
|
||||||
>
|
|
||||||
<LogOut className="h-4 w-4 mr-2" aria-hidden="true" />
|
|
||||||
Sign out
|
|
||||||
</DropdownMenuItem>
|
|
||||||
</DropdownMenuContent>
|
|
||||||
</DropdownMenu>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</CardHeader>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
{/* Date Range Picker - Temporarily disabled to debug infinite loop */}
|
|
||||||
{/* {dateRange && (
|
{/* {dateRange && (
|
||||||
<DateRangePicker
|
<DateRangePicker
|
||||||
minDate={dateRange.minDate}
|
minDate={dateRange.minDate}
|
||||||
@ -389,100 +578,19 @@ function DashboardContent() {
|
|||||||
/>
|
/>
|
||||||
)} */}
|
)} */}
|
||||||
|
|
||||||
{/* Modern Metrics Grid */}
|
<MetricsGrid metrics={metrics} />
|
||||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-6">
|
|
||||||
<MetricCard
|
|
||||||
title="Total Sessions"
|
|
||||||
value={metrics.totalSessions?.toLocaleString()}
|
|
||||||
icon={<MessageSquare className="h-5 w-5" />}
|
|
||||||
trend={{
|
|
||||||
value: metrics.sessionTrend ?? 0,
|
|
||||||
isPositive: (metrics.sessionTrend ?? 0) >= 0,
|
|
||||||
}}
|
|
||||||
variant="primary"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<MetricCard
|
|
||||||
title="Unique Users"
|
|
||||||
value={metrics.uniqueUsers?.toLocaleString()}
|
|
||||||
icon={<Users className="h-5 w-5" />}
|
|
||||||
trend={{
|
|
||||||
value: metrics.usersTrend ?? 0,
|
|
||||||
isPositive: (metrics.usersTrend ?? 0) >= 0,
|
|
||||||
}}
|
|
||||||
variant="success"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<MetricCard
|
|
||||||
title="Avg. Session Time"
|
|
||||||
value={`${Math.round(metrics.avgSessionLength || 0)}s`}
|
|
||||||
icon={<Clock className="h-5 w-5" />}
|
|
||||||
trend={{
|
|
||||||
value: metrics.avgSessionTimeTrend ?? 0,
|
|
||||||
isPositive: (metrics.avgSessionTimeTrend ?? 0) >= 0,
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<MetricCard
|
|
||||||
title="Avg. Response Time"
|
|
||||||
value={`${metrics.avgResponseTime?.toFixed(1) || 0}s`}
|
|
||||||
icon={<Zap className="h-5 w-5" />}
|
|
||||||
trend={{
|
|
||||||
value: metrics.avgResponseTimeTrend ?? 0,
|
|
||||||
isPositive: (metrics.avgResponseTimeTrend ?? 0) <= 0,
|
|
||||||
}}
|
|
||||||
variant="warning"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<MetricCard
|
|
||||||
title="Daily Costs"
|
|
||||||
value={`€${metrics.avgDailyCosts?.toFixed(4) || "0.0000"}`}
|
|
||||||
icon={<Euro className="h-5 w-5" />}
|
|
||||||
description="Average per day"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<MetricCard
|
|
||||||
title="Peak Usage"
|
|
||||||
value={metrics.peakUsageTime || "N/A"}
|
|
||||||
icon={<TrendingUp className="h-5 w-5" />}
|
|
||||||
description="Busiest hour"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<MetricCard
|
|
||||||
title="Resolution Rate"
|
|
||||||
value={`${metrics.resolvedChatsPercentage?.toFixed(1) || "0.0"}%`}
|
|
||||||
icon={<CheckCircle className="h-5 w-5" />}
|
|
||||||
trend={{
|
|
||||||
value: metrics.resolvedChatsPercentage ?? 0,
|
|
||||||
isPositive: (metrics.resolvedChatsPercentage ?? 0) >= 80,
|
|
||||||
}}
|
|
||||||
variant={
|
|
||||||
metrics.resolvedChatsPercentage &&
|
|
||||||
metrics.resolvedChatsPercentage >= 80
|
|
||||||
? "success"
|
|
||||||
: "warning"
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<MetricCard
|
|
||||||
title="Active Languages"
|
|
||||||
value={Object.keys(metrics.languages || {}).length}
|
|
||||||
icon={<Globe className="h-5 w-5" />}
|
|
||||||
description="Languages detected"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Charts Section */}
|
{/* Charts Section */}
|
||||||
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
||||||
<ModernLineChart
|
<ModernLineChart
|
||||||
data={getSessionsOverTimeData()}
|
data={dataHelpers.getSessionsOverTimeData()}
|
||||||
title="Sessions Over Time"
|
title="Sessions Over Time"
|
||||||
className="lg:col-span-2"
|
className="lg:col-span-2"
|
||||||
height={350}
|
height={350}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<ModernDonutChart
|
<ModernDonutChart
|
||||||
data={getSentimentData()}
|
data={dataHelpers.getSentimentData()}
|
||||||
title="Conversation Sentiment"
|
title="Conversation Sentiment"
|
||||||
centerText={{
|
centerText={{
|
||||||
title: "Total",
|
title: "Total",
|
||||||
@ -494,13 +602,13 @@ function DashboardContent() {
|
|||||||
|
|
||||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
<ModernBarChart
|
<ModernBarChart
|
||||||
data={getCategoriesData()}
|
data={dataHelpers.getCategoriesData()}
|
||||||
title="Sessions by Category"
|
title="Sessions by Category"
|
||||||
height={350}
|
height={350}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<ModernDonutChart
|
<ModernDonutChart
|
||||||
data={getLanguagesData()}
|
data={dataHelpers.getLanguagesData()}
|
||||||
title="Languages Used"
|
title="Languages Used"
|
||||||
height={350}
|
height={350}
|
||||||
/>
|
/>
|
||||||
@ -516,7 +624,7 @@ function DashboardContent() {
|
|||||||
</CardTitle>
|
</CardTitle>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<GeographicMap countries={getCountryData()} />
|
<GeographicMap countries={dataHelpers.getCountryData()} />
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
@ -529,7 +637,11 @@ function DashboardContent() {
|
|||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<div className="h-[300px]">
|
<div className="h-[300px]">
|
||||||
<WordCloud words={getWordCloudData()} width={500} height={300} />
|
<WordCloud
|
||||||
|
words={dataHelpers.getWordCloudData()}
|
||||||
|
width={500}
|
||||||
|
height={300}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
@ -545,7 +657,7 @@ function DashboardContent() {
|
|||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<ResponseTimeDistribution
|
<ResponseTimeDistribution
|
||||||
data={getResponseTimeData()}
|
data={dataHelpers.getResponseTimeData()}
|
||||||
average={metrics.avgResponseTime || 0}
|
average={metrics.avgResponseTime || 0}
|
||||||
/>
|
/>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
|
|||||||
@ -22,16 +22,408 @@ import { Label } from "@/components/ui/label";
|
|||||||
import { formatCategory } from "@/lib/format-enums";
|
import { formatCategory } from "@/lib/format-enums";
|
||||||
import type { ChatSession } from "../../../lib/types";
|
import type { ChatSession } from "../../../lib/types";
|
||||||
|
|
||||||
// Placeholder for a SessionListItem component to be created later
|
|
||||||
// For now, we'll display some basic info directly.
|
|
||||||
// import SessionListItem from "../../../components/SessionListItem";
|
|
||||||
|
|
||||||
// TODO: Consider moving filter/sort types to lib/types.ts if they become complex
|
|
||||||
interface FilterOptions {
|
interface FilterOptions {
|
||||||
categories: string[];
|
categories: string[];
|
||||||
languages: string[];
|
languages: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface FilterSectionProps {
|
||||||
|
filtersExpanded: boolean;
|
||||||
|
setFiltersExpanded: (expanded: boolean) => void;
|
||||||
|
searchTerm: string;
|
||||||
|
setSearchTerm: (term: string) => void;
|
||||||
|
selectedCategory: string;
|
||||||
|
setSelectedCategory: (category: string) => void;
|
||||||
|
selectedLanguage: string;
|
||||||
|
setSelectedLanguage: (language: string) => void;
|
||||||
|
startDate: string;
|
||||||
|
setStartDate: (date: string) => void;
|
||||||
|
endDate: string;
|
||||||
|
setEndDate: (date: string) => void;
|
||||||
|
sortKey: string;
|
||||||
|
setSortKey: (key: string) => void;
|
||||||
|
sortOrder: string;
|
||||||
|
setSortOrder: (order: string) => void;
|
||||||
|
filterOptions: FilterOptions;
|
||||||
|
searchHeadingId: string;
|
||||||
|
filtersHeadingId: string;
|
||||||
|
filterContentId: string;
|
||||||
|
categoryFilterId: string;
|
||||||
|
categoryHelpId: string;
|
||||||
|
languageFilterId: string;
|
||||||
|
languageHelpId: string;
|
||||||
|
sortOrderId: string;
|
||||||
|
sortOrderHelpId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function FilterSection({
|
||||||
|
filtersExpanded,
|
||||||
|
setFiltersExpanded,
|
||||||
|
searchTerm,
|
||||||
|
setSearchTerm,
|
||||||
|
selectedCategory,
|
||||||
|
setSelectedCategory,
|
||||||
|
selectedLanguage,
|
||||||
|
setSelectedLanguage,
|
||||||
|
startDate,
|
||||||
|
setStartDate,
|
||||||
|
endDate,
|
||||||
|
setEndDate,
|
||||||
|
sortKey,
|
||||||
|
setSortKey,
|
||||||
|
sortOrder,
|
||||||
|
setSortOrder,
|
||||||
|
filterOptions,
|
||||||
|
searchHeadingId,
|
||||||
|
filtersHeadingId,
|
||||||
|
filterContentId,
|
||||||
|
categoryFilterId,
|
||||||
|
categoryHelpId,
|
||||||
|
languageFilterId,
|
||||||
|
languageHelpId,
|
||||||
|
sortOrderId,
|
||||||
|
sortOrderHelpId,
|
||||||
|
}: FilterSectionProps) {
|
||||||
|
return (
|
||||||
|
<section aria-labelledby={searchHeadingId}>
|
||||||
|
<h2 id={searchHeadingId} className="sr-only">
|
||||||
|
Search and Filter Sessions
|
||||||
|
</h2>
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div className="relative">
|
||||||
|
<Label htmlFor="search-sessions" className="sr-only">
|
||||||
|
Search sessions
|
||||||
|
</Label>
|
||||||
|
<div className="relative">
|
||||||
|
<Search className="absolute left-3 top-3 h-4 w-4 text-muted-foreground" />
|
||||||
|
<Input
|
||||||
|
id="search-sessions"
|
||||||
|
type="text"
|
||||||
|
placeholder="Search sessions..."
|
||||||
|
value={searchTerm}
|
||||||
|
onChange={(e) => setSearchTerm(e.target.value)}
|
||||||
|
className="pl-10"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => setFiltersExpanded(!filtersExpanded)}
|
||||||
|
className="w-full justify-between"
|
||||||
|
aria-expanded={filtersExpanded}
|
||||||
|
aria-controls={filterContentId}
|
||||||
|
aria-describedby={filtersHeadingId}
|
||||||
|
>
|
||||||
|
<span id={filtersHeadingId}>Advanced Filters</span>
|
||||||
|
{filtersExpanded ? (
|
||||||
|
<ChevronUp className="h-4 w-4" />
|
||||||
|
) : (
|
||||||
|
<ChevronDown className="h-4 w-4" />
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
|
||||||
|
{filtersExpanded && (
|
||||||
|
<CardContent id={filterContentId}>
|
||||||
|
<fieldset>
|
||||||
|
<legend className="sr-only">Filter and sort options</legend>
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||||
|
<div>
|
||||||
|
<Label htmlFor={categoryFilterId}>Category</Label>
|
||||||
|
<select
|
||||||
|
id={categoryFilterId}
|
||||||
|
value={selectedCategory}
|
||||||
|
onChange={(e) => setSelectedCategory(e.target.value)}
|
||||||
|
className="w-full mt-1 p-2 border border-gray-300 rounded-md"
|
||||||
|
aria-describedby={categoryHelpId}
|
||||||
|
>
|
||||||
|
<option value="">All Categories</option>
|
||||||
|
{filterOptions.categories.map((category) => (
|
||||||
|
<option key={category} value={category}>
|
||||||
|
{formatCategory(category)}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
<div id={categoryHelpId} className="sr-only">
|
||||||
|
Filter sessions by category
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<Label htmlFor={languageFilterId}>Language</Label>
|
||||||
|
<select
|
||||||
|
id={languageFilterId}
|
||||||
|
value={selectedLanguage}
|
||||||
|
onChange={(e) => setSelectedLanguage(e.target.value)}
|
||||||
|
className="w-full mt-1 p-2 border border-gray-300 rounded-md"
|
||||||
|
aria-describedby={languageHelpId}
|
||||||
|
>
|
||||||
|
<option value="">All Languages</option>
|
||||||
|
{filterOptions.languages.map((language) => (
|
||||||
|
<option key={language} value={language}>
|
||||||
|
{language.toUpperCase()}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
<div id={languageHelpId} className="sr-only">
|
||||||
|
Filter sessions by language
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<Label htmlFor="start-date">Start Date</Label>
|
||||||
|
<Input
|
||||||
|
id="start-date"
|
||||||
|
type="date"
|
||||||
|
value={startDate}
|
||||||
|
onChange={(e) => setStartDate(e.target.value)}
|
||||||
|
className="mt-1"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<Label htmlFor="end-date">End Date</Label>
|
||||||
|
<Input
|
||||||
|
id="end-date"
|
||||||
|
type="date"
|
||||||
|
value={endDate}
|
||||||
|
onChange={(e) => setEndDate(e.target.value)}
|
||||||
|
className="mt-1"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<Label htmlFor="sort-by">Sort By</Label>
|
||||||
|
<select
|
||||||
|
id="sort-by"
|
||||||
|
value={sortKey}
|
||||||
|
onChange={(e) => setSortKey(e.target.value)}
|
||||||
|
className="w-full mt-1 p-2 border border-gray-300 rounded-md"
|
||||||
|
>
|
||||||
|
<option value="startTime">Start Time</option>
|
||||||
|
<option value="sessionId">Session ID</option>
|
||||||
|
<option value="category">Category</option>
|
||||||
|
<option value="language">Language</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<Label htmlFor={sortOrderId}>Sort Order</Label>
|
||||||
|
<select
|
||||||
|
id={sortOrderId}
|
||||||
|
value={sortOrder}
|
||||||
|
onChange={(e) => setSortOrder(e.target.value)}
|
||||||
|
className="w-full mt-1 p-2 border border-gray-300 rounded-md"
|
||||||
|
aria-describedby={sortOrderHelpId}
|
||||||
|
>
|
||||||
|
<option value="desc">Newest First</option>
|
||||||
|
<option value="asc">Oldest First</option>
|
||||||
|
</select>
|
||||||
|
<div id={sortOrderHelpId} className="sr-only">
|
||||||
|
Choose ascending or descending order
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</fieldset>
|
||||||
|
</CardContent>
|
||||||
|
)}
|
||||||
|
</Card>
|
||||||
|
</section>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SessionListProps {
|
||||||
|
sessions: ChatSession[];
|
||||||
|
loading: boolean;
|
||||||
|
error: string | null;
|
||||||
|
resultsHeadingId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function SessionList({
|
||||||
|
sessions,
|
||||||
|
loading,
|
||||||
|
error,
|
||||||
|
resultsHeadingId,
|
||||||
|
}: SessionListProps) {
|
||||||
|
return (
|
||||||
|
<section aria-labelledby={resultsHeadingId}>
|
||||||
|
<h2 id={resultsHeadingId} className="sr-only">
|
||||||
|
Session Results
|
||||||
|
</h2>
|
||||||
|
|
||||||
|
<output aria-live="polite" className="sr-only">
|
||||||
|
{loading && "Loading sessions..."}
|
||||||
|
{error && `Error loading sessions: ${error}`}
|
||||||
|
{!loading &&
|
||||||
|
!error &&
|
||||||
|
sessions.length > 0 &&
|
||||||
|
`Found ${sessions.length} sessions`}
|
||||||
|
{!loading && !error && sessions.length === 0 && "No sessions found"}
|
||||||
|
</output>
|
||||||
|
|
||||||
|
{loading && (
|
||||||
|
<Card>
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<div
|
||||||
|
className="text-center py-8 text-muted-foreground"
|
||||||
|
aria-hidden="true"
|
||||||
|
>
|
||||||
|
Loading sessions...
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{error && (
|
||||||
|
<Card>
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<div
|
||||||
|
className="text-center py-8 text-destructive"
|
||||||
|
role="alert"
|
||||||
|
aria-hidden="true"
|
||||||
|
>
|
||||||
|
Error loading sessions: {error}
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{!loading && !error && sessions.length === 0 && (
|
||||||
|
<Card>
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<div className="text-center py-8 text-muted-foreground">
|
||||||
|
No sessions found. Try adjusting your search criteria.
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{!loading && !error && sessions.length > 0 && (
|
||||||
|
<ul className="space-y-4" role="list">
|
||||||
|
{sessions.map((session) => (
|
||||||
|
<li key={session.id}>
|
||||||
|
<Card>
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<article>
|
||||||
|
<header className="flex justify-between items-start mb-3">
|
||||||
|
<div>
|
||||||
|
<h3 className="font-medium text-base mb-1">
|
||||||
|
Session{" "}
|
||||||
|
{session.sessionId ||
|
||||||
|
session.id.substring(0, 8) + "..."}
|
||||||
|
</h3>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Badge variant="outline" className="text-xs">
|
||||||
|
<Clock
|
||||||
|
className="h-3 w-3 mr-1"
|
||||||
|
aria-hidden="true"
|
||||||
|
/>
|
||||||
|
{new Date(session.startTime).toLocaleDateString()}
|
||||||
|
</Badge>
|
||||||
|
<span className="text-xs text-muted-foreground">
|
||||||
|
{new Date(session.startTime).toLocaleTimeString()}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link href={`/dashboard/sessions/${session.id}`}>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
className="gap-2"
|
||||||
|
aria-label={`View details for session ${session.sessionId || session.id}`}
|
||||||
|
>
|
||||||
|
<Eye className="h-4 w-4" aria-hidden="true" />
|
||||||
|
<span className="hidden sm:inline">View Details</span>
|
||||||
|
</Button>
|
||||||
|
</Link>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<div className="flex flex-wrap gap-2 mb-3">
|
||||||
|
{session.category && (
|
||||||
|
<Badge variant="secondary" className="gap-1">
|
||||||
|
<Filter className="h-3 w-3" aria-hidden="true" />
|
||||||
|
{formatCategory(session.category)}
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
{session.language && (
|
||||||
|
<Badge variant="outline" className="gap-1">
|
||||||
|
<Globe className="h-3 w-3" aria-hidden="true" />
|
||||||
|
{session.language.toUpperCase()}
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{session.summary ? (
|
||||||
|
<p className="text-sm text-muted-foreground line-clamp-2">
|
||||||
|
{session.summary}
|
||||||
|
</p>
|
||||||
|
) : session.initialMsg ? (
|
||||||
|
<p className="text-sm text-muted-foreground line-clamp-2">
|
||||||
|
{session.initialMsg}
|
||||||
|
</p>
|
||||||
|
) : null}
|
||||||
|
</article>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
)}
|
||||||
|
</section>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PaginationProps {
|
||||||
|
currentPage: number;
|
||||||
|
totalPages: number;
|
||||||
|
setCurrentPage: (page: number | ((prev: number) => number)) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
function Pagination({
|
||||||
|
currentPage,
|
||||||
|
totalPages,
|
||||||
|
setCurrentPage,
|
||||||
|
}: PaginationProps) {
|
||||||
|
if (totalPages === 0) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card>
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<div className="flex justify-center items-center gap-4">
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => setCurrentPage((prev) => Math.max(prev - 1, 1))}
|
||||||
|
disabled={currentPage === 1}
|
||||||
|
className="gap-2"
|
||||||
|
>
|
||||||
|
<ChevronLeft className="h-4 w-4" />
|
||||||
|
Previous
|
||||||
|
</Button>
|
||||||
|
<span className="text-sm text-muted-foreground">
|
||||||
|
Page {currentPage} of {totalPages}
|
||||||
|
</span>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
onClick={() =>
|
||||||
|
setCurrentPage((prev) => Math.min(prev + 1, totalPages))
|
||||||
|
}
|
||||||
|
disabled={currentPage === totalPages}
|
||||||
|
className="gap-2"
|
||||||
|
>
|
||||||
|
Next
|
||||||
|
<ChevronRight className="h-4 w-4" />
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
export default function SessionsPage() {
|
export default function SessionsPage() {
|
||||||
const [sessions, setSessions] = useState<ChatSession[]>([]);
|
const [sessions, setSessions] = useState<ChatSession[]>([]);
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
@ -48,45 +440,29 @@ export default function SessionsPage() {
|
|||||||
const sortOrderId = useId();
|
const sortOrderId = useId();
|
||||||
const sortOrderHelpId = useId();
|
const sortOrderHelpId = useId();
|
||||||
const resultsHeadingId = useId();
|
const resultsHeadingId = useId();
|
||||||
const startDateFilterId = useId();
|
|
||||||
const startDateHelpId = useId();
|
|
||||||
const endDateFilterId = useId();
|
|
||||||
const endDateHelpId = useId();
|
|
||||||
const sortKeyId = useId();
|
|
||||||
const sortKeyHelpId = useId();
|
|
||||||
|
|
||||||
// Filter states
|
const [debouncedSearchTerm, setDebouncedSearchTerm] = useState("");
|
||||||
|
const [selectedCategory, setSelectedCategory] = useState("");
|
||||||
|
const [selectedLanguage, setSelectedLanguage] = useState("");
|
||||||
|
const [startDate, setStartDate] = useState("");
|
||||||
|
const [endDate, setEndDate] = useState("");
|
||||||
|
const [sortKey, setSortKey] = useState("startTime");
|
||||||
|
const [sortOrder, setSortOrder] = useState("desc");
|
||||||
|
const [currentPage, setCurrentPage] = useState(1);
|
||||||
|
const [totalPages, setTotalPages] = useState(0);
|
||||||
|
const [pageSize] = useState(10);
|
||||||
|
const [filtersExpanded, setFiltersExpanded] = useState(false);
|
||||||
|
|
||||||
const [filterOptions, setFilterOptions] = useState<FilterOptions>({
|
const [filterOptions, setFilterOptions] = useState<FilterOptions>({
|
||||||
categories: [],
|
categories: [],
|
||||||
languages: [],
|
languages: [],
|
||||||
});
|
});
|
||||||
const [selectedCategory, setSelectedCategory] = useState<string>("");
|
|
||||||
const [selectedLanguage, setSelectedLanguage] = useState<string>("");
|
|
||||||
const [startDate, setStartDate] = useState<string>("");
|
|
||||||
const [endDate, setEndDate] = useState<string>("");
|
|
||||||
|
|
||||||
// Sort states
|
|
||||||
const [sortKey, setSortKey] = useState<string>("startTime"); // Default sort key
|
|
||||||
const [sortOrder, setSortOrder] = useState<"asc" | "desc">("desc"); // Default sort order
|
|
||||||
|
|
||||||
// Debounce search term to avoid excessive API calls
|
|
||||||
const [debouncedSearchTerm, setDebouncedSearchTerm] = useState(searchTerm);
|
|
||||||
|
|
||||||
// Pagination states
|
|
||||||
const [currentPage, setCurrentPage] = useState(1);
|
|
||||||
const [totalPages, setTotalPages] = useState(0);
|
|
||||||
const [pageSize, _setPageSize] = useState(10); // Or make this configurable
|
|
||||||
|
|
||||||
// UI states
|
|
||||||
const [filtersExpanded, setFiltersExpanded] = useState(false);
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const timerId = setTimeout(() => {
|
const timerId = setTimeout(() => {
|
||||||
setDebouncedSearchTerm(searchTerm);
|
setDebouncedSearchTerm(searchTerm);
|
||||||
}, 500); // 500ms delay
|
}, 500);
|
||||||
return () => {
|
return () => clearTimeout(timerId);
|
||||||
clearTimeout(timerId);
|
|
||||||
};
|
|
||||||
}, [searchTerm]);
|
}, [searchTerm]);
|
||||||
|
|
||||||
const fetchFilterOptions = useCallback(async () => {
|
const fetchFilterOptions = useCallback(async () => {
|
||||||
@ -158,10 +534,8 @@ export default function SessionsPage() {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-6">
|
<div className="space-y-6">
|
||||||
{/* Page heading for screen readers */}
|
|
||||||
<h1 className="sr-only">Sessions Management</h1>
|
<h1 className="sr-only">Sessions Management</h1>
|
||||||
|
|
||||||
{/* Header */}
|
|
||||||
<Card>
|
<Card>
|
||||||
<CardHeader>
|
<CardHeader>
|
||||||
<div className="flex items-center gap-3">
|
<div className="flex items-center gap-3">
|
||||||
@ -171,376 +545,47 @@ export default function SessionsPage() {
|
|||||||
</CardHeader>
|
</CardHeader>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
{/* Search Input */}
|
<FilterSection
|
||||||
<section aria-labelledby={searchHeadingId}>
|
filtersExpanded={filtersExpanded}
|
||||||
<h2 id={searchHeadingId} className="sr-only">
|
setFiltersExpanded={setFiltersExpanded}
|
||||||
Search Sessions
|
searchTerm={searchTerm}
|
||||||
</h2>
|
setSearchTerm={setSearchTerm}
|
||||||
<Card>
|
selectedCategory={selectedCategory}
|
||||||
<CardContent className="pt-6">
|
setSelectedCategory={setSelectedCategory}
|
||||||
<div className="relative">
|
selectedLanguage={selectedLanguage}
|
||||||
<Search
|
setSelectedLanguage={setSelectedLanguage}
|
||||||
className="absolute left-3 top-1/2 transform -translate-y-1/2 h-4 w-4 text-muted-foreground"
|
startDate={startDate}
|
||||||
aria-hidden="true"
|
setStartDate={setStartDate}
|
||||||
|
endDate={endDate}
|
||||||
|
setEndDate={setEndDate}
|
||||||
|
sortKey={sortKey}
|
||||||
|
setSortKey={setSortKey}
|
||||||
|
sortOrder={sortOrder}
|
||||||
|
setSortOrder={setSortOrder}
|
||||||
|
filterOptions={filterOptions}
|
||||||
|
searchHeadingId={searchHeadingId}
|
||||||
|
filtersHeadingId={filtersHeadingId}
|
||||||
|
filterContentId={filterContentId}
|
||||||
|
categoryFilterId={categoryFilterId}
|
||||||
|
categoryHelpId={categoryHelpId}
|
||||||
|
languageFilterId={languageFilterId}
|
||||||
|
languageHelpId={languageHelpId}
|
||||||
|
sortOrderId={sortOrderId}
|
||||||
|
sortOrderHelpId={sortOrderHelpId}
|
||||||
/>
|
/>
|
||||||
<Input
|
|
||||||
placeholder="Search sessions (ID, category, initial message...)"
|
<SessionList
|
||||||
value={searchTerm}
|
sessions={sessions}
|
||||||
onChange={(e) => setSearchTerm(e.target.value)}
|
loading={loading}
|
||||||
className="pl-10"
|
error={error}
|
||||||
aria-label="Search sessions by ID, category, or message content"
|
resultsHeadingId={resultsHeadingId}
|
||||||
/>
|
/>
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
{/* Filter and Sort Controls */}
|
<Pagination
|
||||||
<section aria-labelledby={filtersHeadingId}>
|
currentPage={currentPage}
|
||||||
<Card>
|
totalPages={totalPages}
|
||||||
<CardHeader>
|
setCurrentPage={setCurrentPage}
|
||||||
<div className="flex items-center justify-between">
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Filter className="h-5 w-5" aria-hidden="true" />
|
|
||||||
<CardTitle id={filtersHeadingId} className="text-lg">
|
|
||||||
Filters & Sorting
|
|
||||||
</CardTitle>
|
|
||||||
</div>
|
|
||||||
<Button
|
|
||||||
variant="ghost"
|
|
||||||
size="sm"
|
|
||||||
onClick={() => setFiltersExpanded(!filtersExpanded)}
|
|
||||||
className="gap-2"
|
|
||||||
aria-expanded={filtersExpanded}
|
|
||||||
aria-controls={filterContentId}
|
|
||||||
>
|
|
||||||
{filtersExpanded ? (
|
|
||||||
<>
|
|
||||||
<ChevronUp className="h-4 w-4" />
|
|
||||||
Hide
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<ChevronDown className="h-4 w-4" />
|
|
||||||
Show
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
</CardHeader>
|
|
||||||
{filtersExpanded && (
|
|
||||||
<CardContent id={filterContentId}>
|
|
||||||
<fieldset>
|
|
||||||
<legend className="sr-only">
|
|
||||||
Session Filters and Sorting Options
|
|
||||||
</legend>
|
|
||||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 xl:grid-cols-6 gap-4">
|
|
||||||
{/* Category Filter */}
|
|
||||||
<div className="space-y-2">
|
|
||||||
<Label htmlFor={categoryFilterId}>Category</Label>
|
|
||||||
<select
|
|
||||||
id={categoryFilterId}
|
|
||||||
className="w-full h-10 px-3 py-2 text-sm rounded-md border border-input bg-background ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2"
|
|
||||||
value={selectedCategory}
|
|
||||||
onChange={(e) => setSelectedCategory(e.target.value)}
|
|
||||||
aria-describedby={categoryHelpId}
|
|
||||||
>
|
|
||||||
<option value="">All Categories</option>
|
|
||||||
{filterOptions.categories.map((cat) => (
|
|
||||||
<option key={cat} value={cat}>
|
|
||||||
{formatCategory(cat)}
|
|
||||||
</option>
|
|
||||||
))}
|
|
||||||
</select>
|
|
||||||
<div id={categoryHelpId} className="sr-only">
|
|
||||||
Filter sessions by category type
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Language Filter */}
|
|
||||||
<div className="space-y-2">
|
|
||||||
<Label htmlFor={languageFilterId}>Language</Label>
|
|
||||||
<select
|
|
||||||
id={languageFilterId}
|
|
||||||
className="w-full h-10 px-3 py-2 text-sm rounded-md border border-input bg-background ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2"
|
|
||||||
value={selectedLanguage}
|
|
||||||
onChange={(e) => setSelectedLanguage(e.target.value)}
|
|
||||||
aria-describedby={languageHelpId}
|
|
||||||
>
|
|
||||||
<option value="">All Languages</option>
|
|
||||||
{filterOptions.languages.map((lang) => (
|
|
||||||
<option key={lang} value={lang}>
|
|
||||||
{lang.toUpperCase()}
|
|
||||||
</option>
|
|
||||||
))}
|
|
||||||
</select>
|
|
||||||
<div id={languageHelpId} className="sr-only">
|
|
||||||
Filter sessions by language
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Start Date Filter */}
|
|
||||||
<div className="space-y-2">
|
|
||||||
<Label htmlFor={startDateFilterId}>Start Date</Label>
|
|
||||||
<Input
|
|
||||||
type="date"
|
|
||||||
id={startDateFilterId}
|
|
||||||
value={startDate}
|
|
||||||
onChange={(e) => setStartDate(e.target.value)}
|
|
||||||
aria-describedby={startDateHelpId}
|
|
||||||
/>
|
/>
|
||||||
<div id={startDateHelpId} className="sr-only">
|
|
||||||
Filter sessions from this date onwards
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* End Date Filter */}
|
|
||||||
<div className="space-y-2">
|
|
||||||
<Label htmlFor={endDateFilterId}>End Date</Label>
|
|
||||||
<Input
|
|
||||||
type="date"
|
|
||||||
id={endDateFilterId}
|
|
||||||
value={endDate}
|
|
||||||
onChange={(e) => setEndDate(e.target.value)}
|
|
||||||
aria-describedby={endDateHelpId}
|
|
||||||
/>
|
|
||||||
<div id={endDateHelpId} className="sr-only">
|
|
||||||
Filter sessions up to this date
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Sort Key */}
|
|
||||||
<div className="space-y-2">
|
|
||||||
<Label htmlFor={sortKeyId}>Sort By</Label>
|
|
||||||
<select
|
|
||||||
id={sortKeyId}
|
|
||||||
className="w-full h-10 px-3 py-2 text-sm rounded-md border border-input bg-background ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2"
|
|
||||||
value={sortKey}
|
|
||||||
onChange={(e) => setSortKey(e.target.value)}
|
|
||||||
aria-describedby={sortKeyHelpId}
|
|
||||||
>
|
|
||||||
<option value="startTime">Start Time</option>
|
|
||||||
<option value="category">Category</option>
|
|
||||||
<option value="language">Language</option>
|
|
||||||
<option value="sentiment">Sentiment</option>
|
|
||||||
<option value="messagesSent">Messages Sent</option>
|
|
||||||
<option value="avgResponseTime">
|
|
||||||
Avg. Response Time
|
|
||||||
</option>
|
|
||||||
</select>
|
|
||||||
<div id={sortKeyHelpId} className="sr-only">
|
|
||||||
Choose field to sort sessions by
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Sort Order */}
|
|
||||||
<div className="space-y-2">
|
|
||||||
<Label htmlFor={sortOrderId}>Order</Label>
|
|
||||||
<select
|
|
||||||
id={sortOrderId}
|
|
||||||
className="w-full h-10 px-3 py-2 text-sm rounded-md border border-input bg-background ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2"
|
|
||||||
value={sortOrder}
|
|
||||||
onChange={(e) =>
|
|
||||||
setSortOrder(e.target.value as "asc" | "desc")
|
|
||||||
}
|
|
||||||
aria-describedby={sortOrderHelpId}
|
|
||||||
>
|
|
||||||
<option value="desc">Descending</option>
|
|
||||||
<option value="asc">Ascending</option>
|
|
||||||
</select>
|
|
||||||
<div id={sortOrderHelpId} className="sr-only">
|
|
||||||
Choose ascending or descending order
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</fieldset>
|
|
||||||
</CardContent>
|
|
||||||
)}
|
|
||||||
</Card>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
{/* Results section */}
|
|
||||||
<section aria-labelledby={resultsHeadingId}>
|
|
||||||
<h2 id={resultsHeadingId} className="sr-only">
|
|
||||||
Session Results
|
|
||||||
</h2>
|
|
||||||
|
|
||||||
{/* Live region for screen reader announcements */}
|
|
||||||
<output aria-live="polite" className="sr-only">
|
|
||||||
{loading && "Loading sessions..."}
|
|
||||||
{error && `Error loading sessions: ${error}`}
|
|
||||||
{!loading &&
|
|
||||||
!error &&
|
|
||||||
sessions.length > 0 &&
|
|
||||||
`Found ${sessions.length} sessions`}
|
|
||||||
{!loading && !error && sessions.length === 0 && "No sessions found"}
|
|
||||||
</output>
|
|
||||||
|
|
||||||
{/* Loading State */}
|
|
||||||
{loading && (
|
|
||||||
<Card>
|
|
||||||
<CardContent className="pt-6">
|
|
||||||
<div
|
|
||||||
className="text-center py-8 text-muted-foreground"
|
|
||||||
aria-hidden="true"
|
|
||||||
>
|
|
||||||
Loading sessions...
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Error State */}
|
|
||||||
{error && (
|
|
||||||
<Card>
|
|
||||||
<CardContent className="pt-6">
|
|
||||||
<div
|
|
||||||
className="text-center py-8 text-destructive"
|
|
||||||
role="alert"
|
|
||||||
aria-hidden="true"
|
|
||||||
>
|
|
||||||
Error: {error}
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Empty State */}
|
|
||||||
{!loading && !error && sessions.length === 0 && (
|
|
||||||
<Card>
|
|
||||||
<CardContent className="pt-6">
|
|
||||||
<div className="text-center py-8 text-muted-foreground">
|
|
||||||
{debouncedSearchTerm
|
|
||||||
? `No sessions found for "${debouncedSearchTerm}".`
|
|
||||||
: "No sessions found."}
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Sessions List */}
|
|
||||||
{!loading && !error && sessions.length > 0 && (
|
|
||||||
<ul aria-label="Chat sessions" className="grid gap-4">
|
|
||||||
{sessions.map((session) => (
|
|
||||||
<li key={session.id}>
|
|
||||||
<Card className="hover:shadow-md transition-shadow">
|
|
||||||
<CardContent className="pt-6">
|
|
||||||
<article aria-labelledby={`session-${session.id}-title`}>
|
|
||||||
<header className="flex justify-between items-start mb-4">
|
|
||||||
<div className="space-y-2 flex-1">
|
|
||||||
<h3
|
|
||||||
id={`session-${session.id}-title`}
|
|
||||||
className="sr-only"
|
|
||||||
>
|
|
||||||
Session {session.sessionId || session.id} from{" "}
|
|
||||||
{new Date(session.startTime).toLocaleDateString()}
|
|
||||||
</h3>
|
|
||||||
<div className="flex items-center gap-3">
|
|
||||||
<Badge
|
|
||||||
variant="outline"
|
|
||||||
className="font-mono text-xs"
|
|
||||||
>
|
|
||||||
ID
|
|
||||||
</Badge>
|
|
||||||
<code className="text-sm text-muted-foreground font-mono truncate max-w-24">
|
|
||||||
{session.sessionId || session.id}
|
|
||||||
</code>
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Badge variant="outline" className="text-xs">
|
|
||||||
<Clock
|
|
||||||
className="h-3 w-3 mr-1"
|
|
||||||
aria-hidden="true"
|
|
||||||
/>
|
|
||||||
{new Date(session.startTime).toLocaleDateString()}
|
|
||||||
</Badge>
|
|
||||||
<span className="text-xs text-muted-foreground">
|
|
||||||
{new Date(session.startTime).toLocaleTimeString()}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<Link href={`/dashboard/sessions/${session.id}`}>
|
|
||||||
<Button
|
|
||||||
variant="outline"
|
|
||||||
size="sm"
|
|
||||||
className="gap-2"
|
|
||||||
aria-label={`View details for session ${session.sessionId || session.id}`}
|
|
||||||
>
|
|
||||||
<Eye className="h-4 w-4" aria-hidden="true" />
|
|
||||||
<span className="hidden sm:inline">
|
|
||||||
View Details
|
|
||||||
</span>
|
|
||||||
</Button>
|
|
||||||
</Link>
|
|
||||||
</header>
|
|
||||||
|
|
||||||
<div className="flex flex-wrap gap-2 mb-3">
|
|
||||||
{session.category && (
|
|
||||||
<Badge variant="secondary" className="gap-1">
|
|
||||||
<Filter className="h-3 w-3" aria-hidden="true" />
|
|
||||||
{formatCategory(session.category)}
|
|
||||||
</Badge>
|
|
||||||
)}
|
|
||||||
{session.language && (
|
|
||||||
<Badge variant="outline" className="gap-1">
|
|
||||||
<Globe className="h-3 w-3" aria-hidden="true" />
|
|
||||||
{session.language.toUpperCase()}
|
|
||||||
</Badge>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{session.summary ? (
|
|
||||||
<p className="text-sm text-muted-foreground line-clamp-2">
|
|
||||||
{session.summary}
|
|
||||||
</p>
|
|
||||||
) : session.initialMsg ? (
|
|
||||||
<p className="text-sm text-muted-foreground line-clamp-2">
|
|
||||||
{session.initialMsg}
|
|
||||||
</p>
|
|
||||||
) : null}
|
|
||||||
</article>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
</li>
|
|
||||||
))}
|
|
||||||
</ul>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Pagination */}
|
|
||||||
{totalPages > 0 && (
|
|
||||||
<Card>
|
|
||||||
<CardContent className="pt-6">
|
|
||||||
<div className="flex justify-center items-center gap-4">
|
|
||||||
<Button
|
|
||||||
variant="outline"
|
|
||||||
onClick={() =>
|
|
||||||
setCurrentPage((prev) => Math.max(prev - 1, 1))
|
|
||||||
}
|
|
||||||
disabled={currentPage === 1}
|
|
||||||
className="gap-2"
|
|
||||||
>
|
|
||||||
<ChevronLeft className="h-4 w-4" />
|
|
||||||
Previous
|
|
||||||
</Button>
|
|
||||||
<span className="text-sm text-muted-foreground">
|
|
||||||
Page {currentPage} of {totalPages}
|
|
||||||
</span>
|
|
||||||
<Button
|
|
||||||
variant="outline"
|
|
||||||
onClick={() =>
|
|
||||||
setCurrentPage((prev) => Math.min(prev + 1, totalPages))
|
|
||||||
}
|
|
||||||
disabled={currentPage === totalPages}
|
|
||||||
className="gap-2"
|
|
||||||
>
|
|
||||||
Next
|
|
||||||
<ChevronRight className="h-4 w-4" />
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
)}
|
|
||||||
</section>
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,28 +4,22 @@ import { ProcessingStatusManager } from "./lib/processingStatusManager";
|
|||||||
const prisma = new PrismaClient();
|
const prisma = new PrismaClient();
|
||||||
const statusManager = new ProcessingStatusManager(prisma);
|
const statusManager = new ProcessingStatusManager(prisma);
|
||||||
|
|
||||||
async function checkRefactoredPipelineStatus() {
|
const PIPELINE_STAGES = [
|
||||||
try {
|
|
||||||
console.log("=== REFACTORED PIPELINE STATUS ===\n");
|
|
||||||
|
|
||||||
// Get pipeline status using the new system
|
|
||||||
const pipelineStatus = await statusManager.getPipelineStatus();
|
|
||||||
|
|
||||||
console.log(`Total Sessions: ${pipelineStatus.totalSessions}\n`);
|
|
||||||
|
|
||||||
// Display status for each stage
|
|
||||||
const stages = [
|
|
||||||
"CSV_IMPORT",
|
"CSV_IMPORT",
|
||||||
"TRANSCRIPT_FETCH",
|
"TRANSCRIPT_FETCH",
|
||||||
"SESSION_CREATION",
|
"SESSION_CREATION",
|
||||||
"AI_ANALYSIS",
|
"AI_ANALYSIS",
|
||||||
"QUESTION_EXTRACTION",
|
"QUESTION_EXTRACTION",
|
||||||
];
|
];
|
||||||
|
|
||||||
for (const stage of stages) {
|
/**
|
||||||
|
* Display status for a single pipeline stage
|
||||||
|
*/
|
||||||
|
function displayStageStatus(
|
||||||
|
stage: string,
|
||||||
|
stageData: Record<string, number> = {}
|
||||||
|
) {
|
||||||
console.log(`${stage}:`);
|
console.log(`${stage}:`);
|
||||||
const stageData = pipelineStatus.pipeline[stage] || {};
|
|
||||||
|
|
||||||
const pending = stageData.PENDING || 0;
|
const pending = stageData.PENDING || 0;
|
||||||
const inProgress = stageData.IN_PROGRESS || 0;
|
const inProgress = stageData.IN_PROGRESS || 0;
|
||||||
const completed = stageData.COMPLETED || 0;
|
const completed = stageData.COMPLETED || 0;
|
||||||
@ -38,12 +32,17 @@ async function checkRefactoredPipelineStatus() {
|
|||||||
console.log(` FAILED: ${failed}`);
|
console.log(` FAILED: ${failed}`);
|
||||||
console.log(` SKIPPED: ${skipped}`);
|
console.log(` SKIPPED: ${skipped}`);
|
||||||
console.log("");
|
console.log("");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Show what needs processing
|
/**
|
||||||
|
* Display what needs processing across all stages
|
||||||
|
*/
|
||||||
|
function displayProcessingNeeds(pipelineStatus: {
|
||||||
|
pipeline: Record<string, unknown>;
|
||||||
|
}) {
|
||||||
console.log("=== WHAT NEEDS PROCESSING ===");
|
console.log("=== WHAT NEEDS PROCESSING ===");
|
||||||
|
|
||||||
for (const stage of stages) {
|
for (const stage of PIPELINE_STAGES) {
|
||||||
const stageData = pipelineStatus.pipeline[stage] || {};
|
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||||
const pending = stageData.PENDING || 0;
|
const pending = stageData.PENDING || 0;
|
||||||
const failed = stageData.FAILED || 0;
|
const failed = stageData.FAILED || 0;
|
||||||
@ -52,38 +51,77 @@ async function checkRefactoredPipelineStatus() {
|
|||||||
console.log(`• ${stage}: ${pending} pending, ${failed} failed`);
|
console.log(`• ${stage}: ${pending} pending, ${failed} failed`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Display failed sessions summary
|
||||||
|
*/
|
||||||
|
function displayFailedSessions(failedSessions: unknown[]) {
|
||||||
|
if (failedSessions.length === 0) return;
|
||||||
|
|
||||||
// Show failed sessions if any
|
|
||||||
const failedSessions = await statusManager.getFailedSessions();
|
|
||||||
if (failedSessions.length > 0) {
|
|
||||||
console.log("\n=== FAILED SESSIONS ===");
|
console.log("\n=== FAILED SESSIONS ===");
|
||||||
failedSessions.slice(0, 5).forEach((failure) => {
|
// biome-ignore lint/suspicious/noExplicitAny: Function parameter types from external API
|
||||||
|
failedSessions.slice(0, 5).forEach((failure: any) => {
|
||||||
console.log(
|
console.log(
|
||||||
` ${failure.session.import?.externalSessionId || failure.sessionId}: ${failure.stage} - ${failure.errorMessage}`
|
` ${failure.session.import?.externalSessionId || failure.sessionId}: ${failure.stage} - ${failure.errorMessage}`
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
if (failedSessions.length > 5) {
|
if (failedSessions.length > 5) {
|
||||||
console.log(
|
console.log(` ... and ${failedSessions.length - 5} more failed sessions`);
|
||||||
` ... and ${failedSessions.length - 5} more failed sessions`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Display sessions ready for AI processing
|
||||||
|
*/
|
||||||
|
function displayReadyForAI(
|
||||||
|
readyForAI: Array<{
|
||||||
|
sessionId: string;
|
||||||
|
session: {
|
||||||
|
import?: { externalSessionId?: string };
|
||||||
|
createdAt: Date;
|
||||||
|
};
|
||||||
|
}>
|
||||||
|
) {
|
||||||
|
if (readyForAI.length === 0) return;
|
||||||
|
|
||||||
// Show sessions ready for AI processing
|
|
||||||
const readyForAI =
|
|
||||||
await statusManager.getSessionsNeedingProcessing(
|
|
||||||
"AI_ANALYSIS",
|
|
||||||
5
|
|
||||||
);
|
|
||||||
if (readyForAI.length > 0) {
|
|
||||||
console.log("\n=== SESSIONS READY FOR AI PROCESSING ===");
|
console.log("\n=== SESSIONS READY FOR AI PROCESSING ===");
|
||||||
readyForAI.forEach((status) => {
|
readyForAI.forEach((status) => {
|
||||||
console.log(
|
console.log(
|
||||||
` ${status.session.import?.externalSessionId || status.sessionId} (created: ${status.session.createdAt})`
|
` ${status.session.import?.externalSessionId || status.sessionId} (created: ${status.session.createdAt})`
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: Main orchestration function - complexity is appropriate for its scope
|
||||||
|
async function checkRefactoredPipelineStatus() {
|
||||||
|
try {
|
||||||
|
console.log("=== REFACTORED PIPELINE STATUS ===\n");
|
||||||
|
|
||||||
|
// Get pipeline status using the new system
|
||||||
|
const pipelineStatus = await statusManager.getPipelineStatus();
|
||||||
|
console.log(`Total Sessions: ${pipelineStatus.totalSessions}\n`);
|
||||||
|
|
||||||
|
// Display status for each stage
|
||||||
|
for (const stage of PIPELINE_STAGES) {
|
||||||
|
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||||
|
displayStageStatus(stage, stageData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Show what needs processing
|
||||||
|
displayProcessingNeeds(pipelineStatus);
|
||||||
|
|
||||||
|
// Show failed sessions if any
|
||||||
|
const failedSessions = await statusManager.getFailedSessions();
|
||||||
|
displayFailedSessions(failedSessions);
|
||||||
|
|
||||||
|
// Show sessions ready for AI processing
|
||||||
|
const readyForAI = await statusManager.getSessionsNeedingProcessing(
|
||||||
|
"AI_ANALYSIS",
|
||||||
|
5
|
||||||
|
);
|
||||||
|
displayReadyForAI(readyForAI);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error checking pipeline status:", error);
|
console.error("Error checking pipeline status:", error);
|
||||||
} finally {
|
} finally {
|
||||||
|
|||||||
@ -25,11 +25,7 @@ export default function DateRangePicker({
|
|||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Only notify parent component when dates change, not when the callback changes
|
// Only notify parent component when dates change, not when the callback changes
|
||||||
onDateRangeChange(startDate, endDate);
|
onDateRangeChange(startDate, endDate);
|
||||||
}, [
|
}, [startDate, endDate]);
|
||||||
startDate,
|
|
||||||
endDate, // Only notify parent component when dates change, not when the callback changes
|
|
||||||
onDateRangeChange,
|
|
||||||
]);
|
|
||||||
|
|
||||||
const handleStartDateChange = (newStartDate: string) => {
|
const handleStartDateChange = (newStartDate: string) => {
|
||||||
// Ensure start date is not before min date
|
// Ensure start date is not before min date
|
||||||
|
|||||||
@ -82,61 +82,107 @@ export default function GeographicMap({
|
|||||||
setIsClient(true);
|
setIsClient(true);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// Process country data when client is ready and dependencies change
|
/**
|
||||||
useEffect(() => {
|
* Extract coordinates from a geometry feature
|
||||||
if (!isClient || !countries) return;
|
*/
|
||||||
|
function extractCoordinatesFromGeometry(
|
||||||
|
geometry: any
|
||||||
|
): [number, number] | undefined {
|
||||||
|
if (geometry.type === "Point") {
|
||||||
|
const [lon, lat] = geometry.coordinates;
|
||||||
|
return [lat, lon]; // Leaflet expects [lat, lon]
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
if (
|
||||||
// Generate CountryData array for the Map component
|
geometry.type === "Polygon" &&
|
||||||
const data: CountryData[] = Object.entries(countries || {})
|
geometry.coordinates &&
|
||||||
.map(([code, count]) => {
|
geometry.coordinates[0] &&
|
||||||
let countryCoords: [number, number] | undefined =
|
geometry.coordinates[0][0]
|
||||||
countryCoordinates[code] || DEFAULT_COORDINATES[code];
|
|
||||||
|
|
||||||
if (!countryCoords) {
|
|
||||||
const feature = countryCoder.feature(code);
|
|
||||||
if (feature?.geometry) {
|
|
||||||
if (feature.geometry.type === "Point") {
|
|
||||||
const [lon, lat] = feature.geometry.coordinates;
|
|
||||||
countryCoords = [lat, lon]; // Leaflet expects [lat, lon]
|
|
||||||
} else if (
|
|
||||||
feature.geometry.type === "Polygon" &&
|
|
||||||
feature.geometry.coordinates &&
|
|
||||||
feature.geometry.coordinates[0] &&
|
|
||||||
feature.geometry.coordinates[0][0]
|
|
||||||
) {
|
) {
|
||||||
// For Polygons, use the first coordinate of the first ring as a fallback representative point
|
// For Polygons, use the first coordinate of the first ring as a fallback representative point
|
||||||
const [lon, lat] = feature.geometry.coordinates[0][0];
|
const [lon, lat] = geometry.coordinates[0][0];
|
||||||
countryCoords = [lat, lon]; // Leaflet expects [lat, lon]
|
return [lat, lon]; // Leaflet expects [lat, lon]
|
||||||
} else if (
|
}
|
||||||
feature.geometry.type === "MultiPolygon" &&
|
|
||||||
feature.geometry.coordinates &&
|
if (
|
||||||
feature.geometry.coordinates[0] &&
|
geometry.type === "MultiPolygon" &&
|
||||||
feature.geometry.coordinates[0][0] &&
|
geometry.coordinates &&
|
||||||
feature.geometry.coordinates[0][0][0]
|
geometry.coordinates[0] &&
|
||||||
|
geometry.coordinates[0][0] &&
|
||||||
|
geometry.coordinates[0][0][0]
|
||||||
) {
|
) {
|
||||||
// For MultiPolygons, use the first coordinate of the first ring of the first polygon
|
// For MultiPolygons, use the first coordinate of the first ring of the first polygon
|
||||||
const [lon, lat] = feature.geometry.coordinates[0][0][0];
|
const [lon, lat] = geometry.coordinates[0][0][0];
|
||||||
countryCoords = [lat, lon]; // Leaflet expects [lat, lon]
|
return [lat, lon]; // Leaflet expects [lat, lon]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get coordinates for a country code
|
||||||
|
*/
|
||||||
|
function getCountryCoordinates(
|
||||||
|
code: string,
|
||||||
|
countryCoordinates: Record<string, [number, number]>
|
||||||
|
): [number, number] | undefined {
|
||||||
|
// Try predefined coordinates first
|
||||||
|
let coords = countryCoordinates[code] || DEFAULT_COORDINATES[code];
|
||||||
|
|
||||||
|
if (!coords) {
|
||||||
|
// Try to get coordinates from country coder
|
||||||
|
const feature = countryCoder.feature(code);
|
||||||
|
if (feature?.geometry) {
|
||||||
|
coords = extractCoordinatesFromGeometry(feature.geometry);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (countryCoords) {
|
return coords;
|
||||||
return {
|
|
||||||
code,
|
|
||||||
count,
|
|
||||||
coordinates: countryCoords,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process a single country entry into CountryData
|
||||||
|
*/
|
||||||
|
function processCountryEntry(
|
||||||
|
code: string,
|
||||||
|
count: number,
|
||||||
|
countryCoordinates: Record<string, [number, number]>
|
||||||
|
): CountryData | null {
|
||||||
|
const coordinates = getCountryCoordinates(code, countryCoordinates);
|
||||||
|
|
||||||
|
if (coordinates) {
|
||||||
|
return { code, count, coordinates };
|
||||||
|
}
|
||||||
|
|
||||||
return null; // Skip if no coordinates found
|
return null; // Skip if no coordinates found
|
||||||
})
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process all countries data into CountryData array
|
||||||
|
*/
|
||||||
|
function processCountriesData(
|
||||||
|
countries: Record<string, number>,
|
||||||
|
countryCoordinates: Record<string, [number, number]>
|
||||||
|
): CountryData[] {
|
||||||
|
const data = Object.entries(countries || {})
|
||||||
|
.map(([code, count]) =>
|
||||||
|
processCountryEntry(code, count, countryCoordinates)
|
||||||
|
)
|
||||||
.filter((item): item is CountryData => item !== null);
|
.filter((item): item is CountryData => item !== null);
|
||||||
|
|
||||||
console.log(
|
console.log(
|
||||||
`Found ${data.length} countries with coordinates out of ${Object.keys(countries).length} total countries`
|
`Found ${data.length} countries with coordinates out of ${Object.keys(countries).length} total countries`
|
||||||
);
|
);
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process country data when client is ready and dependencies change
|
||||||
|
useEffect(() => {
|
||||||
|
if (!isClient || !countries) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const data = processCountriesData(countries, countryCoordinates);
|
||||||
setCountryData(data);
|
setCountryData(data);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error processing geographic data:", error);
|
console.error("Error processing geographic data:", error);
|
||||||
|
|||||||
@ -71,7 +71,8 @@ export default function MessageViewer({ messages }: MessageViewerProps) {
|
|||||||
: "No timestamp"}
|
: "No timestamp"}
|
||||||
</span>
|
</span>
|
||||||
<span>
|
<span>
|
||||||
Last message: {(() => {
|
Last message:{" "}
|
||||||
|
{(() => {
|
||||||
const lastMessage = messages[messages.length - 1];
|
const lastMessage = messages[messages.length - 1];
|
||||||
return lastMessage.timestamp
|
return lastMessage.timestamp
|
||||||
? new Date(lastMessage.timestamp).toLocaleString()
|
? new Date(lastMessage.timestamp).toLocaleString()
|
||||||
|
|||||||
@ -64,7 +64,11 @@ export default function TopQuestionsChart({
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Rank indicator */}
|
{/* Rank indicator */}
|
||||||
<div className="absolute -left-1 top-0 w-6 h-6 bg-primary text-primary-foreground text-xs font-bold rounded-full flex items-center justify-center">
|
<div
|
||||||
|
className="absolute -left-1 top-0 w-6 h-6 bg-primary text-primary-foreground text-xs font-bold rounded-full flex items-center justify-center"
|
||||||
|
role="img"
|
||||||
|
aria-label={`Rank ${index + 1}`}
|
||||||
|
>
|
||||||
{index + 1}
|
{index + 1}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -9,6 +9,83 @@ interface TranscriptViewerProps {
|
|||||||
transcriptUrl?: string | null;
|
transcriptUrl?: string | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Renders a message bubble with proper styling
|
||||||
|
*/
|
||||||
|
function renderMessageBubble(
|
||||||
|
speaker: string,
|
||||||
|
messages: string[],
|
||||||
|
key: string
|
||||||
|
): React.ReactNode {
|
||||||
|
return (
|
||||||
|
<div key={key} className={`mb-3 ${speaker === "User" ? "text-right" : ""}`}>
|
||||||
|
<div
|
||||||
|
className={`inline-block px-4 py-2 rounded-lg ${
|
||||||
|
speaker === "User"
|
||||||
|
? "bg-blue-100 text-blue-800"
|
||||||
|
: "bg-gray-100 text-gray-800"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{messages.map((msg, i) => (
|
||||||
|
<ReactMarkdown
|
||||||
|
key={`msg-${msg.substring(0, 20).replace(/\s/g, "-")}-${i}`}
|
||||||
|
rehypePlugins={[rehypeRaw]}
|
||||||
|
components={{
|
||||||
|
p: "span",
|
||||||
|
a: ({ node: _node, ...props }) => (
|
||||||
|
<a
|
||||||
|
className="text-sky-600 hover:text-sky-800 underline"
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{msg}
|
||||||
|
</ReactMarkdown>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if a line indicates a new speaker
|
||||||
|
*/
|
||||||
|
function isNewSpeakerLine(line: string): boolean {
|
||||||
|
return line.startsWith("User:") || line.startsWith("Assistant:");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts speaker and message content from a speaker line
|
||||||
|
*/
|
||||||
|
function extractSpeakerInfo(line: string): {
|
||||||
|
speaker: string;
|
||||||
|
content: string;
|
||||||
|
} {
|
||||||
|
const speaker = line.startsWith("User:") ? "User" : "Assistant";
|
||||||
|
const content = line.substring(line.indexOf(":") + 1).trim();
|
||||||
|
return { speaker, content };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Processes accumulated messages for a speaker
|
||||||
|
*/
|
||||||
|
function processAccumulatedMessages(
|
||||||
|
currentSpeaker: string | null,
|
||||||
|
currentMessages: string[],
|
||||||
|
elements: React.ReactNode[]
|
||||||
|
): void {
|
||||||
|
if (currentSpeaker && currentMessages.length > 0) {
|
||||||
|
elements.push(
|
||||||
|
renderMessageBubble(
|
||||||
|
currentSpeaker,
|
||||||
|
currentMessages,
|
||||||
|
`message-${elements.length}`
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format the transcript content into a more readable format with styling
|
* Format the transcript content into a more readable format with styling
|
||||||
*/
|
*/
|
||||||
@ -17,114 +94,38 @@ function formatTranscript(content: string): React.ReactNode[] {
|
|||||||
return [<p key="empty">No transcript content available.</p>];
|
return [<p key="empty">No transcript content available.</p>];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Split the transcript by lines
|
|
||||||
const lines = content.split("\n");
|
const lines = content.split("\n");
|
||||||
|
|
||||||
const elements: React.ReactNode[] = [];
|
const elements: React.ReactNode[] = [];
|
||||||
let currentSpeaker: string | null = null;
|
let currentSpeaker: string | null = null;
|
||||||
let currentMessages: string[] = [];
|
let currentMessages: string[] = [];
|
||||||
|
|
||||||
// Process each line
|
// Process each line
|
||||||
lines.forEach((line) => {
|
for (const line of lines) {
|
||||||
const trimmedLine = line.trim();
|
const trimmedLine = line.trim();
|
||||||
if (!trimmedLine) {
|
if (!trimmedLine) {
|
||||||
// Empty line, ignore
|
continue; // Skip empty lines
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if this is a new speaker line
|
if (isNewSpeakerLine(line)) {
|
||||||
if (line.startsWith("User:") || line.startsWith("Assistant:")) {
|
// Process any accumulated messages from previous speaker
|
||||||
// If we have accumulated messages for a previous speaker, add them
|
processAccumulatedMessages(currentSpeaker, currentMessages, elements);
|
||||||
if (currentSpeaker && currentMessages.length > 0) {
|
|
||||||
elements.push(
|
|
||||||
<div
|
|
||||||
key={`message-${elements.length}`}
|
|
||||||
className={`mb-3 ${currentSpeaker === "User" ? "text-right" : ""}`}
|
|
||||||
>
|
|
||||||
<div
|
|
||||||
className={`inline-block px-4 py-2 rounded-lg ${
|
|
||||||
currentSpeaker === "User"
|
|
||||||
? "bg-blue-100 text-blue-800"
|
|
||||||
: "bg-gray-100 text-gray-800"
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
{currentMessages.map((msg, i) => (
|
|
||||||
// Use ReactMarkdown to render each message part
|
|
||||||
<ReactMarkdown
|
|
||||||
key={`msg-${msg.substring(0, 20).replace(/\s/g, "-")}-${i}`}
|
|
||||||
rehypePlugins={[rehypeRaw]} // Add rehypeRaw to plugins
|
|
||||||
components={{
|
|
||||||
p: "span",
|
|
||||||
a: ({ node: _node, ...props }) => (
|
|
||||||
<a
|
|
||||||
className="text-sky-600 hover:text-sky-800 underline"
|
|
||||||
{...props}
|
|
||||||
/>
|
|
||||||
),
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{msg}
|
|
||||||
</ReactMarkdown>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
currentMessages = [];
|
currentMessages = [];
|
||||||
}
|
|
||||||
|
|
||||||
// Set the new current speaker
|
// Set new speaker and add initial content
|
||||||
currentSpeaker = trimmedLine.startsWith("User:") ? "User" : "Assistant";
|
const { speaker, content } = extractSpeakerInfo(trimmedLine);
|
||||||
// Add the content after "User:" or "Assistant:"
|
currentSpeaker = speaker;
|
||||||
const messageContent = trimmedLine
|
if (content) {
|
||||||
.substring(trimmedLine.indexOf(":") + 1)
|
currentMessages.push(content);
|
||||||
.trim();
|
|
||||||
if (messageContent) {
|
|
||||||
currentMessages.push(messageContent);
|
|
||||||
}
|
}
|
||||||
} else if (currentSpeaker) {
|
} else if (currentSpeaker) {
|
||||||
// This is a continuation of the current speaker's message
|
// Continuation of current speaker's message
|
||||||
currentMessages.push(trimmedLine);
|
currentMessages.push(trimmedLine);
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
// Add any remaining messages
|
|
||||||
if (currentSpeaker && currentMessages.length > 0) {
|
|
||||||
elements.push(
|
|
||||||
<div
|
|
||||||
key={`message-${elements.length}`}
|
|
||||||
className={`mb-3 ${currentSpeaker === "User" ? "text-right" : ""}`}
|
|
||||||
>
|
|
||||||
<div
|
|
||||||
className={`inline-block px-4 py-2 rounded-lg ${
|
|
||||||
currentSpeaker === "User"
|
|
||||||
? "bg-blue-100 text-blue-800"
|
|
||||||
: "bg-gray-100 text-gray-800"
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
{currentMessages.map((msg, i) => (
|
|
||||||
// Use ReactMarkdown to render each message part
|
|
||||||
<ReactMarkdown
|
|
||||||
key={`msg-final-${msg.substring(0, 20).replace(/\s/g, "-")}-${i}`}
|
|
||||||
rehypePlugins={[rehypeRaw]} // Add rehypeRaw to plugins
|
|
||||||
components={{
|
|
||||||
p: "span",
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars, no-unused-vars
|
|
||||||
a: ({ node: _node, ...props }) => (
|
|
||||||
<a
|
|
||||||
className="text-sky-600 hover:text-sky-800 underline"
|
|
||||||
{...props}
|
|
||||||
/>
|
|
||||||
),
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{msg}
|
|
||||||
</ReactMarkdown>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Process any remaining messages
|
||||||
|
processAccumulatedMessages(currentSpeaker, currentMessages, elements);
|
||||||
|
|
||||||
return elements;
|
return elements;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -5,6 +5,7 @@ This document provides specific recommendations for optimizing database connecti
|
|||||||
## Current Issues Observed
|
## Current Issues Observed
|
||||||
|
|
||||||
From your logs, we can see:
|
From your logs, we can see:
|
||||||
|
|
||||||
```
|
```
|
||||||
Can't reach database server at `ep-tiny-math-a2zsshve-pooler.eu-central-1.aws.neon.tech:5432`
|
Can't reach database server at `ep-tiny-math-a2zsshve-pooler.eu-central-1.aws.neon.tech:5432`
|
||||||
[NODE-CRON] [WARN] missed execution at Sun Jun 29 2025 12:00:00 GMT+0200! Possible blocking IO or high CPU
|
[NODE-CRON] [WARN] missed execution at Sun Jun 29 2025 12:00:00 GMT+0200! Possible blocking IO or high CPU
|
||||||
@ -13,16 +14,19 @@ Can't reach database server at `ep-tiny-math-a2zsshve-pooler.eu-central-1.aws.ne
|
|||||||
## Root Causes
|
## Root Causes
|
||||||
|
|
||||||
### 1. Neon Connection Limits
|
### 1. Neon Connection Limits
|
||||||
|
|
||||||
- **Free Tier**: 20 concurrent connections
|
- **Free Tier**: 20 concurrent connections
|
||||||
- **Pro Tier**: 100 concurrent connections
|
- **Pro Tier**: 100 concurrent connections
|
||||||
- **Multiple schedulers** can quickly exhaust connections
|
- **Multiple schedulers** can quickly exhaust connections
|
||||||
|
|
||||||
### 2. Connection Pooling Issues
|
### 2. Connection Pooling Issues
|
||||||
|
|
||||||
- Each scheduler was creating separate PrismaClient instances
|
- Each scheduler was creating separate PrismaClient instances
|
||||||
- No connection reuse between operations
|
- No connection reuse between operations
|
||||||
- No retry logic for temporary failures
|
- No retry logic for temporary failures
|
||||||
|
|
||||||
### 3. Neon-Specific Challenges
|
### 3. Neon-Specific Challenges
|
||||||
|
|
||||||
- **Auto-pause**: Databases pause after inactivity
|
- **Auto-pause**: Databases pause after inactivity
|
||||||
- **Cold starts**: First connection after pause takes longer
|
- **Cold starts**: First connection after pause takes longer
|
||||||
- **Regional latency**: eu-central-1 may have variable latency
|
- **Regional latency**: eu-central-1 may have variable latency
|
||||||
@ -30,6 +34,7 @@ Can't reach database server at `ep-tiny-math-a2zsshve-pooler.eu-central-1.aws.ne
|
|||||||
## Solutions Implemented
|
## Solutions Implemented
|
||||||
|
|
||||||
### 1. Fixed Multiple PrismaClient Instances ✅
|
### 1. Fixed Multiple PrismaClient Instances ✅
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// Before: Each file created its own client
|
// Before: Each file created its own client
|
||||||
const prisma = new PrismaClient(); // ❌
|
const prisma = new PrismaClient(); // ❌
|
||||||
@ -39,30 +44,30 @@ import { prisma } from "./prisma.js"; // ✅
|
|||||||
```
|
```
|
||||||
|
|
||||||
### 2. Added Connection Retry Logic ✅
|
### 2. Added Connection Retry Logic ✅
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// Automatic retry for connection errors
|
// Automatic retry for connection errors
|
||||||
await withRetry(
|
await withRetry(async () => await databaseOperation(), {
|
||||||
async () => await databaseOperation(),
|
|
||||||
{
|
|
||||||
maxRetries: 3,
|
maxRetries: 3,
|
||||||
initialDelay: 2000,
|
initialDelay: 2000,
|
||||||
maxDelay: 10000,
|
maxDelay: 10000,
|
||||||
backoffMultiplier: 2,
|
backoffMultiplier: 2,
|
||||||
}
|
});
|
||||||
);
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### 3. Enhanced Connection Pooling ✅
|
### 3. Enhanced Connection Pooling ✅
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// Production-ready pooling with @prisma/adapter-pg
|
// Production-ready pooling with @prisma/adapter-pg
|
||||||
USE_ENHANCED_POOLING=true
|
USE_ENHANCED_POOLING = true;
|
||||||
DATABASE_CONNECTION_LIMIT=20
|
DATABASE_CONNECTION_LIMIT = 20;
|
||||||
DATABASE_POOL_TIMEOUT=10
|
DATABASE_POOL_TIMEOUT = 10;
|
||||||
```
|
```
|
||||||
|
|
||||||
## Neon-Specific Configuration
|
## Neon-Specific Configuration
|
||||||
|
|
||||||
### Environment Variables
|
### Environment Variables
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Optimized for Neon
|
# Optimized for Neon
|
||||||
DATABASE_URL="postgresql://user:pass@ep-tiny-math-a2zsshve-pooler.eu-central-1.aws.neon.tech:5432/db?sslmode=require&connection_limit=15"
|
DATABASE_URL="postgresql://user:pass@ep-tiny-math-a2zsshve-pooler.eu-central-1.aws.neon.tech:5432/db?sslmode=require&connection_limit=15"
|
||||||
@ -79,6 +84,7 @@ SESSION_PROCESSING_INTERVAL="0 */2 * * *" # Every 2 hours instead of 1
|
|||||||
```
|
```
|
||||||
|
|
||||||
### Connection String Optimization
|
### Connection String Optimization
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Add these parameters to your DATABASE_URL
|
# Add these parameters to your DATABASE_URL
|
||||||
?sslmode=require # Required for Neon
|
?sslmode=require # Required for Neon
|
||||||
@ -91,6 +97,7 @@ SESSION_PROCESSING_INTERVAL="0 */2 * * *" # Every 2 hours instead of 1
|
|||||||
## Monitoring & Troubleshooting
|
## Monitoring & Troubleshooting
|
||||||
|
|
||||||
### 1. Health Check Endpoint
|
### 1. Health Check Endpoint
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Check connection health
|
# Check connection health
|
||||||
curl -H "Authorization: Bearer your-token" \
|
curl -H "Authorization: Bearer your-token" \
|
||||||
@ -98,11 +105,13 @@ curl -H "Authorization: Bearer your-token" \
|
|||||||
```
|
```
|
||||||
|
|
||||||
### 2. Neon Dashboard Monitoring
|
### 2. Neon Dashboard Monitoring
|
||||||
|
|
||||||
- Monitor "Active connections" in Neon dashboard
|
- Monitor "Active connections" in Neon dashboard
|
||||||
- Check for connection spikes during scheduler runs
|
- Check for connection spikes during scheduler runs
|
||||||
- Review query performance and slow queries
|
- Review query performance and slow queries
|
||||||
|
|
||||||
### 3. Application Logs
|
### 3. Application Logs
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Look for connection patterns
|
# Look for connection patterns
|
||||||
grep "Database connection" logs/*.log
|
grep "Database connection" logs/*.log
|
||||||
@ -113,65 +122,77 @@ grep "retry" logs/*.log
|
|||||||
## Performance Optimizations
|
## Performance Optimizations
|
||||||
|
|
||||||
### 1. Reduce Scheduler Frequency
|
### 1. Reduce Scheduler Frequency
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// Current intervals may be too aggressive
|
// Current intervals may be too aggressive
|
||||||
CSV_IMPORT_INTERVAL="*/15 * * * *" // ➜ "*/30 * * * *"
|
CSV_IMPORT_INTERVAL = "*/15 * * * *"; // ➜ "*/30 * * * *"
|
||||||
IMPORT_PROCESSING_INTERVAL="*/5 * * * *" // ➜ "*/10 * * * *"
|
IMPORT_PROCESSING_INTERVAL = "*/5 * * * *"; // ➜ "*/10 * * * *"
|
||||||
SESSION_PROCESSING_INTERVAL="0 * * * *" // ➜ "0 */2 * * *"
|
SESSION_PROCESSING_INTERVAL = "0 * * * *"; // ➜ "0 */2 * * *"
|
||||||
```
|
```
|
||||||
|
|
||||||
### 2. Batch Size Optimization
|
### 2. Batch Size Optimization
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// Reduce batch sizes to avoid long-running transactions
|
// Reduce batch sizes to avoid long-running transactions
|
||||||
CSV_IMPORT_BATCH_SIZE=50 // ➜ 25
|
CSV_IMPORT_BATCH_SIZE = 50; // ➜ 25
|
||||||
IMPORT_PROCESSING_BATCH_SIZE=50 // ➜ 25
|
IMPORT_PROCESSING_BATCH_SIZE = 50; // ➜ 25
|
||||||
SESSION_PROCESSING_BATCH_SIZE=20 // ➜ 10
|
SESSION_PROCESSING_BATCH_SIZE = 20; // ➜ 10
|
||||||
```
|
```
|
||||||
|
|
||||||
### 3. Connection Keepalive
|
### 3. Connection Keepalive
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// Keep connections warm to avoid cold starts
|
// Keep connections warm to avoid cold starts
|
||||||
const prisma = new PrismaClient({
|
const prisma = new PrismaClient({
|
||||||
datasources: {
|
datasources: {
|
||||||
db: {
|
db: {
|
||||||
url: process.env.DATABASE_URL + "&keepalive=true"
|
url: process.env.DATABASE_URL + "&keepalive=true",
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
## Troubleshooting Common Issues
|
## Troubleshooting Common Issues
|
||||||
|
|
||||||
### "Can't reach database server"
|
### "Can't reach database server"
|
||||||
|
|
||||||
**Causes:**
|
**Causes:**
|
||||||
|
|
||||||
- Neon database auto-paused
|
- Neon database auto-paused
|
||||||
- Connection limit exceeded
|
- Connection limit exceeded
|
||||||
- Network issues
|
- Network issues
|
||||||
|
|
||||||
**Solutions:**
|
**Solutions:**
|
||||||
|
|
||||||
1. Enable enhanced pooling: `USE_ENHANCED_POOLING=true`
|
1. Enable enhanced pooling: `USE_ENHANCED_POOLING=true`
|
||||||
2. Reduce connection limit: `DATABASE_CONNECTION_LIMIT=15`
|
2. Reduce connection limit: `DATABASE_CONNECTION_LIMIT=15`
|
||||||
3. Implement retry logic (already done)
|
3. Implement retry logic (already done)
|
||||||
4. Check Neon dashboard for database status
|
4. Check Neon dashboard for database status
|
||||||
|
|
||||||
### "Connection terminated"
|
### "Connection terminated"
|
||||||
|
|
||||||
**Causes:**
|
**Causes:**
|
||||||
|
|
||||||
- Idle connection timeout
|
- Idle connection timeout
|
||||||
- Neon maintenance
|
- Neon maintenance
|
||||||
- Long-running transactions
|
- Long-running transactions
|
||||||
|
|
||||||
**Solutions:**
|
**Solutions:**
|
||||||
|
|
||||||
1. Increase pool timeout: `DATABASE_POOL_TIMEOUT=30`
|
1. Increase pool timeout: `DATABASE_POOL_TIMEOUT=30`
|
||||||
2. Add connection cycling
|
2. Add connection cycling
|
||||||
3. Break large operations into smaller batches
|
3. Break large operations into smaller batches
|
||||||
|
|
||||||
### "Missed cron execution"
|
### "Missed cron execution"
|
||||||
|
|
||||||
**Causes:**
|
**Causes:**
|
||||||
|
|
||||||
- Blocking database operations
|
- Blocking database operations
|
||||||
- Scheduler overlap
|
- Scheduler overlap
|
||||||
- High CPU usage
|
- High CPU usage
|
||||||
|
|
||||||
**Solutions:**
|
**Solutions:**
|
||||||
|
|
||||||
1. Reduce scheduler frequency
|
1. Reduce scheduler frequency
|
||||||
2. Add concurrency limits
|
2. Add concurrency limits
|
||||||
3. Monitor scheduler execution time
|
3. Monitor scheduler execution time
|
||||||
@ -179,6 +200,7 @@ const prisma = new PrismaClient({
|
|||||||
## Recommended Production Settings
|
## Recommended Production Settings
|
||||||
|
|
||||||
### For Neon Free Tier (20 connections)
|
### For Neon Free Tier (20 connections)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
DATABASE_CONNECTION_LIMIT=15
|
DATABASE_CONNECTION_LIMIT=15
|
||||||
DATABASE_POOL_TIMEOUT=30
|
DATABASE_POOL_TIMEOUT=30
|
||||||
@ -189,6 +211,7 @@ SESSION_PROCESSING_INTERVAL="0 */3 * * *"
|
|||||||
```
|
```
|
||||||
|
|
||||||
### For Neon Pro Tier (100 connections)
|
### For Neon Pro Tier (100 connections)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
DATABASE_CONNECTION_LIMIT=50
|
DATABASE_CONNECTION_LIMIT=50
|
||||||
DATABASE_POOL_TIMEOUT=20
|
DATABASE_POOL_TIMEOUT=20
|
||||||
|
|||||||
@ -2,68 +2,9 @@
|
|||||||
|
|
||||||
import { PrismaPg } from "@prisma/adapter-pg";
|
import { PrismaPg } from "@prisma/adapter-pg";
|
||||||
import { PrismaClient } from "@prisma/client";
|
import { PrismaClient } from "@prisma/client";
|
||||||
import { Pool } from "pg";
|
import type { Pool } from "pg";
|
||||||
import { env } from "./env";
|
import { env } from "./env";
|
||||||
|
|
||||||
// Enhanced connection pool configuration
|
|
||||||
const createConnectionPool = () => {
|
|
||||||
// Parse DATABASE_URL to get connection parameters
|
|
||||||
const databaseUrl = new URL(env.DATABASE_URL);
|
|
||||||
|
|
||||||
const pool = new Pool({
|
|
||||||
host: databaseUrl.hostname,
|
|
||||||
port: Number.parseInt(databaseUrl.port) || 5432,
|
|
||||||
user: databaseUrl.username,
|
|
||||||
password: databaseUrl.password,
|
|
||||||
database: databaseUrl.pathname.slice(1), // Remove leading slash
|
|
||||||
ssl: databaseUrl.searchParams.get("sslmode") !== "disable",
|
|
||||||
|
|
||||||
// Connection pool configuration
|
|
||||||
max: env.DATABASE_CONNECTION_LIMIT, // Maximum number of connections
|
|
||||||
min: 2, // Minimum number of connections to maintain
|
|
||||||
idleTimeoutMillis: env.DATABASE_POOL_TIMEOUT * 1000, // Close idle connections after timeout
|
|
||||||
connectionTimeoutMillis: 10000, // Connection timeout
|
|
||||||
maxUses: 1000, // Maximum uses per connection before cycling
|
|
||||||
allowExitOnIdle: true, // Allow process to exit when all connections are idle
|
|
||||||
|
|
||||||
// Health check configuration
|
|
||||||
query_timeout: 30000, // Query timeout
|
|
||||||
keepAlive: true,
|
|
||||||
keepAliveInitialDelayMillis: 30000,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Connection pool event handlers
|
|
||||||
pool.on("connect", () => {
|
|
||||||
console.log(
|
|
||||||
`Database connection established. Active connections: ${pool.totalCount}`
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
pool.on("acquire", () => {
|
|
||||||
console.log(
|
|
||||||
`Connection acquired from pool. Waiting: ${pool.waitingCount}, Idle: ${pool.idleCount}`
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
pool.on("release", () => {
|
|
||||||
console.log(
|
|
||||||
`Connection released to pool. Active: ${pool.totalCount - pool.idleCount}, Idle: ${pool.idleCount}`
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
pool.on("error", (err) => {
|
|
||||||
console.error("Database pool error:", err);
|
|
||||||
});
|
|
||||||
|
|
||||||
pool.on("remove", () => {
|
|
||||||
console.log(
|
|
||||||
`Connection removed from pool. Total connections: ${pool.totalCount}`
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
return pool;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create adapter with connection pool
|
// Create adapter with connection pool
|
||||||
export const createEnhancedPrismaClient = () => {
|
export const createEnhancedPrismaClient = () => {
|
||||||
// Parse DATABASE_URL to get connection parameters
|
// Parse DATABASE_URL to get connection parameters
|
||||||
@ -71,15 +12,18 @@ export const createEnhancedPrismaClient = () => {
|
|||||||
|
|
||||||
const poolConfig = {
|
const poolConfig = {
|
||||||
host: dbUrl.hostname,
|
host: dbUrl.hostname,
|
||||||
port: parseInt(dbUrl.port || "5432"),
|
port: Number.parseInt(dbUrl.port || "5432"),
|
||||||
database: dbUrl.pathname.slice(1), // Remove leading '/'
|
database: dbUrl.pathname.slice(1), // Remove leading '/'
|
||||||
user: dbUrl.username,
|
user: dbUrl.username,
|
||||||
password: decodeURIComponent(dbUrl.password),
|
password: decodeURIComponent(dbUrl.password),
|
||||||
ssl: dbUrl.searchParams.get("sslmode") !== "disable" ? { rejectUnauthorized: false } : undefined,
|
ssl:
|
||||||
|
dbUrl.searchParams.get("sslmode") !== "disable"
|
||||||
|
? { rejectUnauthorized: false }
|
||||||
|
: undefined,
|
||||||
|
|
||||||
// Connection pool settings
|
// Connection pool settings
|
||||||
max: 20, // Maximum number of connections
|
max: env.DATABASE_CONNECTION_LIMIT || 20, // Maximum number of connections
|
||||||
idleTimeoutMillis: 30000, // 30 seconds
|
idleTimeoutMillis: env.DATABASE_POOL_TIMEOUT * 1000 || 30000, // Use env timeout
|
||||||
connectionTimeoutMillis: 5000, // 5 seconds
|
connectionTimeoutMillis: 5000, // 5 seconds
|
||||||
query_timeout: 10000, // 10 seconds
|
query_timeout: 10000, // 10 seconds
|
||||||
statement_timeout: 10000, // 10 seconds
|
statement_timeout: 10000, // 10 seconds
|
||||||
|
|||||||
@ -434,7 +434,7 @@ async function processQueuedImportsInternal(batchSize = 50): Promise<void> {
|
|||||||
// Process with concurrency limit to avoid overwhelming the database
|
// Process with concurrency limit to avoid overwhelming the database
|
||||||
const concurrencyLimit = 5;
|
const concurrencyLimit = 5;
|
||||||
const results: Array<{
|
const results: Array<{
|
||||||
importRecord: typeof unprocessedImports[0];
|
importRecord: (typeof unprocessedImports)[0];
|
||||||
result: Awaited<ReturnType<typeof processSingleImport>>;
|
result: Awaited<ReturnType<typeof processSingleImport>>;
|
||||||
}> = [];
|
}> = [];
|
||||||
|
|
||||||
|
|||||||
518
lib/metrics.ts
518
lib/metrics.ts
@ -321,53 +321,13 @@ const stopWords = new Set([
|
|||||||
// Add more domain-specific stop words if necessary
|
// Add more domain-specific stop words if necessary
|
||||||
]);
|
]);
|
||||||
|
|
||||||
export function sessionMetrics(
|
/**
|
||||||
sessions: ChatSession[],
|
* Extract unique user identifiers from session data
|
||||||
companyConfig: CompanyConfig = {}
|
*/
|
||||||
): MetricsResult {
|
function extractUniqueUsers(
|
||||||
const totalSessions = sessions.length; // Renamed from 'total' for clarity
|
session: ChatSession,
|
||||||
const byDay: DayMetrics = {};
|
uniqueUserIds: Set<string>
|
||||||
const byCategory: CategoryMetrics = {};
|
): void {
|
||||||
const byLanguage: LanguageMetrics = {};
|
|
||||||
const byCountry: CountryMetrics = {};
|
|
||||||
const tokensByDay: DayMetrics = {};
|
|
||||||
const tokensCostByDay: DayMetrics = {};
|
|
||||||
|
|
||||||
let escalatedCount = 0; // Renamed from 'escalated' to match MetricsResult
|
|
||||||
let forwardedHrCount = 0; // Renamed from 'forwarded' to match MetricsResult
|
|
||||||
|
|
||||||
// Variables for calculations
|
|
||||||
const uniqueUserIds = new Set<string>();
|
|
||||||
let totalSessionDuration = 0;
|
|
||||||
let validSessionsForDuration = 0;
|
|
||||||
let totalResponseTime = 0;
|
|
||||||
let validSessionsForResponseTime = 0;
|
|
||||||
let sentimentPositiveCount = 0;
|
|
||||||
let sentimentNeutralCount = 0;
|
|
||||||
let sentimentNegativeCount = 0;
|
|
||||||
const totalTokens = 0;
|
|
||||||
const totalTokensEur = 0;
|
|
||||||
const wordCounts: { [key: string]: number } = {};
|
|
||||||
let alerts = 0;
|
|
||||||
|
|
||||||
// New metrics variables
|
|
||||||
const hourlySessionCounts: { [hour: string]: number } = {};
|
|
||||||
let resolvedChatsCount = 0;
|
|
||||||
const questionCounts: { [question: string]: number } = {};
|
|
||||||
|
|
||||||
for (const session of sessions) {
|
|
||||||
// Track hourly usage for peak time calculation
|
|
||||||
if (session.startTime) {
|
|
||||||
const hour = new Date(session.startTime).getHours();
|
|
||||||
const hourKey = `${hour.toString().padStart(2, "0")}:00`;
|
|
||||||
hourlySessionCounts[hourKey] = (hourlySessionCounts[hourKey] || 0) + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Count resolved chats (sessions that have ended and are not escalated)
|
|
||||||
if (session.endTime && !session.escalated) {
|
|
||||||
resolvedChatsCount++;
|
|
||||||
}
|
|
||||||
// Unique Users: Prefer non-empty ipAddress, fallback to non-empty sessionId
|
|
||||||
let identifierAdded = false;
|
let identifierAdded = false;
|
||||||
if (session.ipAddress && session.ipAddress.trim() !== "") {
|
if (session.ipAddress && session.ipAddress.trim() !== "") {
|
||||||
uniqueUserIds.add(session.ipAddress.trim());
|
uniqueUserIds.add(session.ipAddress.trim());
|
||||||
@ -381,188 +341,383 @@ export function sessionMetrics(
|
|||||||
) {
|
) {
|
||||||
uniqueUserIds.add(session.sessionId.trim());
|
uniqueUserIds.add(session.sessionId.trim());
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Avg. Session Time
|
/**
|
||||||
if (session.startTime && session.endTime) {
|
* Validate and convert timestamps to milliseconds
|
||||||
const startTimeMs = new Date(session.startTime).getTime();
|
*/
|
||||||
const endTimeMs = new Date(session.endTime).getTime();
|
function validateTimestamps(
|
||||||
|
session: ChatSession,
|
||||||
|
startTimeMs: number,
|
||||||
|
endTimeMs: number
|
||||||
|
): boolean {
|
||||||
if (Number.isNaN(startTimeMs)) {
|
if (Number.isNaN(startTimeMs)) {
|
||||||
console.warn(
|
console.warn(
|
||||||
`[metrics] Invalid startTime for session ${session.id || session.sessionId}: ${session.startTime}`
|
`[metrics] Invalid startTime for session ${session.id || session.sessionId}: ${session.startTime}`
|
||||||
);
|
);
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
if (Number.isNaN(endTimeMs)) {
|
if (Number.isNaN(endTimeMs)) {
|
||||||
console.warn(
|
console.warn(
|
||||||
`[metrics] Invalid endTime for session ${session.id || session.sessionId}: ${session.endTime}`
|
`[metrics] Invalid endTime for session ${session.id || session.sessionId}: ${session.endTime}`
|
||||||
);
|
);
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
if (!Number.isNaN(startTimeMs) && !Number.isNaN(endTimeMs)) {
|
/**
|
||||||
const timeDifference = endTimeMs - startTimeMs; // Calculate the signed delta
|
* Log duration warnings for edge cases
|
||||||
// Use the absolute difference for duration, ensuring it's not negative.
|
*/
|
||||||
// If times are identical, duration will be 0.
|
function logDurationWarnings(
|
||||||
// If endTime is before startTime, this still yields a positive duration representing the magnitude of the difference.
|
session: ChatSession,
|
||||||
const duration = Math.abs(timeDifference);
|
timeDifference: number,
|
||||||
// console.log(
|
duration: number
|
||||||
// `[metrics] duration is ${duration} for session ${session.id || session.sessionId}`
|
): void {
|
||||||
// );
|
|
||||||
|
|
||||||
totalSessionDuration += duration; // Add this duration
|
|
||||||
|
|
||||||
if (timeDifference < 0) {
|
if (timeDifference < 0) {
|
||||||
// Log a specific warning if the original endTime was before startTime
|
|
||||||
console.warn(
|
console.warn(
|
||||||
`[metrics] endTime (${session.endTime}) was before startTime (${session.startTime}) for session ${session.id || session.sessionId}. Using absolute difference as duration (${(duration / 1000).toFixed(2)} seconds).`
|
`[metrics] endTime (${session.endTime}) was before startTime (${session.startTime}) for session ${session.id || session.sessionId}. Using absolute difference as duration (${(duration / 1000).toFixed(2)} seconds).`
|
||||||
);
|
);
|
||||||
} else if (timeDifference === 0) {
|
|
||||||
// // Optionally, log if times are identical, though this might be verbose if common
|
|
||||||
// console.log(
|
|
||||||
// `[metrics] startTime and endTime are identical for session ${session.id || session.sessionId}. Duration is 0.`
|
|
||||||
// );
|
|
||||||
}
|
}
|
||||||
// If timeDifference > 0, it's a normal positive duration, no special logging needed here for that case.
|
}
|
||||||
|
|
||||||
validSessionsForDuration++; // Count this session for averaging
|
/**
|
||||||
}
|
* Calculate session duration and update totals
|
||||||
} else {
|
*/
|
||||||
|
function processSessionDuration(
|
||||||
|
session: ChatSession,
|
||||||
|
totals: { totalSessionDuration: number; validSessionsForDuration: number }
|
||||||
|
): void {
|
||||||
|
if (!session.startTime || !session.endTime) {
|
||||||
if (!session.startTime) {
|
if (!session.startTime) {
|
||||||
console.warn(
|
console.warn(
|
||||||
`[metrics] Missing startTime for session ${session.id || session.sessionId}`
|
`[metrics] Missing startTime for session ${session.id || session.sessionId}`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (!session.endTime) {
|
if (!session.endTime) {
|
||||||
// This is a common case for ongoing sessions, might not always be an error
|
|
||||||
console.log(
|
console.log(
|
||||||
`[metrics] Missing endTime for session ${session.id || session.sessionId} - likely ongoing or data issue.`
|
`[metrics] Missing endTime for session ${session.id || session.sessionId} - likely ongoing or data issue.`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Avg. Response Time
|
const startTimeMs = new Date(session.startTime).getTime();
|
||||||
if (
|
const endTimeMs = new Date(session.endTime).getTime();
|
||||||
session.avgResponseTime !== undefined &&
|
|
||||||
session.avgResponseTime !== null &&
|
if (!validateTimestamps(session, startTimeMs, endTimeMs)) {
|
||||||
session.avgResponseTime >= 0
|
return;
|
||||||
) {
|
|
||||||
totalResponseTime += session.avgResponseTime;
|
|
||||||
validSessionsForResponseTime++;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Escalated and Forwarded
|
const timeDifference = endTimeMs - startTimeMs;
|
||||||
if (session.escalated) escalatedCount++;
|
const duration = Math.abs(timeDifference);
|
||||||
if (session.forwardedHr) forwardedHrCount++;
|
|
||||||
|
|
||||||
// Sentiment (now using enum values)
|
totals.totalSessionDuration += duration;
|
||||||
|
totals.validSessionsForDuration++;
|
||||||
|
|
||||||
|
logDurationWarnings(session, timeDifference, duration);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update sentiment counters based on session sentiment
|
||||||
|
*/
|
||||||
|
function processSentiment(
|
||||||
|
session: ChatSession,
|
||||||
|
sentimentCounts: {
|
||||||
|
sentimentPositiveCount: number;
|
||||||
|
sentimentNeutralCount: number;
|
||||||
|
sentimentNegativeCount: number;
|
||||||
|
}
|
||||||
|
): void {
|
||||||
if (session.sentiment !== undefined && session.sentiment !== null) {
|
if (session.sentiment !== undefined && session.sentiment !== null) {
|
||||||
if (session.sentiment === "POSITIVE") sentimentPositiveCount++;
|
if (session.sentiment === "POSITIVE")
|
||||||
else if (session.sentiment === "NEGATIVE") sentimentNegativeCount++;
|
sentimentCounts.sentimentPositiveCount++;
|
||||||
else if (session.sentiment === "NEUTRAL") sentimentNeutralCount++;
|
else if (session.sentiment === "NEGATIVE")
|
||||||
|
sentimentCounts.sentimentNegativeCount++;
|
||||||
|
else if (session.sentiment === "NEUTRAL")
|
||||||
|
sentimentCounts.sentimentNeutralCount++;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Sentiment Alert Check (simplified for enum)
|
/**
|
||||||
if (
|
* Update category-based metrics
|
||||||
companyConfig.sentimentAlert !== undefined &&
|
*/
|
||||||
session.sentiment === "NEGATIVE"
|
function updateCategoryMetrics(
|
||||||
) {
|
session: ChatSession,
|
||||||
alerts++;
|
metrics: {
|
||||||
|
byDay: DayMetrics;
|
||||||
|
byCategory: CategoryMetrics;
|
||||||
|
byLanguage: LanguageMetrics;
|
||||||
|
byCountry: CountryMetrics;
|
||||||
}
|
}
|
||||||
|
): void {
|
||||||
// Daily metrics
|
// Daily metrics
|
||||||
const day = new Date(session.startTime).toISOString().split("T")[0];
|
const day = new Date(session.startTime).toISOString().split("T")[0];
|
||||||
byDay[day] = (byDay[day] || 0) + 1; // Sessions per day
|
metrics.byDay[day] = (metrics.byDay[day] || 0) + 1;
|
||||||
// Note: tokens and tokensEur are not available in the new schema
|
|
||||||
|
|
||||||
// Category metrics
|
// Category metrics
|
||||||
if (session.category) {
|
if (session.category) {
|
||||||
byCategory[session.category] = (byCategory[session.category] || 0) + 1;
|
metrics.byCategory[session.category] =
|
||||||
|
(metrics.byCategory[session.category] || 0) + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Language metrics
|
// Language metrics
|
||||||
if (session.language) {
|
if (session.language) {
|
||||||
byLanguage[session.language] = (byLanguage[session.language] || 0) + 1;
|
metrics.byLanguage[session.language] =
|
||||||
|
(metrics.byLanguage[session.language] || 0) + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Country metrics
|
// Country metrics
|
||||||
if (session.country) {
|
if (session.country) {
|
||||||
byCountry[session.country] = (byCountry[session.country] || 0) + 1;
|
metrics.byCountry[session.country] =
|
||||||
|
(metrics.byCountry[session.country] || 0) + 1;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Extract questions from session
|
/**
|
||||||
const extractQuestions = () => {
|
* Extract questions from session messages and initial message
|
||||||
// 1. Extract questions from user messages (if available)
|
*/
|
||||||
|
function extractQuestions(
|
||||||
|
session: ChatSession,
|
||||||
|
questionCounts: { [question: string]: number }
|
||||||
|
): void {
|
||||||
|
const isQuestion = (content: string): boolean => {
|
||||||
|
return (
|
||||||
|
content.endsWith("?") ||
|
||||||
|
/\b(what|when|where|why|how|who|which|can|could|would|will|is|are|do|does|did)\b/i.test(
|
||||||
|
content
|
||||||
|
)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Extract questions from user messages
|
||||||
if (session.messages) {
|
if (session.messages) {
|
||||||
session.messages
|
session.messages
|
||||||
.filter((msg) => msg.role === "User")
|
.filter((msg) => msg.role === "User")
|
||||||
.forEach((msg) => {
|
.forEach((msg) => {
|
||||||
const content = msg.content.trim();
|
const content = msg.content.trim();
|
||||||
// Simple heuristic: if message ends with ? or contains question words, treat as question
|
if (isQuestion(content)) {
|
||||||
if (
|
|
||||||
content.endsWith("?") ||
|
|
||||||
/\b(what|when|where|why|how|who|which|can|could|would|will|is|are|do|does|did)\b/i.test(
|
|
||||||
content
|
|
||||||
)
|
|
||||||
) {
|
|
||||||
questionCounts[content] = (questionCounts[content] || 0) + 1;
|
questionCounts[content] = (questionCounts[content] || 0) + 1;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. Extract questions from initial message as fallback
|
// Extract questions from initial message as fallback
|
||||||
if (session.initialMsg) {
|
if (session.initialMsg) {
|
||||||
const content = session.initialMsg.trim();
|
const content = session.initialMsg.trim();
|
||||||
if (
|
if (isQuestion(content)) {
|
||||||
content.endsWith("?") ||
|
|
||||||
/\b(what|when|where|why|how|who|which|can|could|would|will|is|are|do|does|did)\b/i.test(
|
|
||||||
content
|
|
||||||
)
|
|
||||||
) {
|
|
||||||
questionCounts[content] = (questionCounts[content] || 0) + 1;
|
questionCounts[content] = (questionCounts[content] || 0) + 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
extractQuestions();
|
/**
|
||||||
|
* Process text for word cloud generation
|
||||||
// Word Cloud Data (from initial message and transcript content)
|
*/
|
||||||
const processTextForWordCloud = (text: string | undefined | null) => {
|
function processTextForWordCloud(
|
||||||
|
text: string | undefined | null,
|
||||||
|
wordCounts: { [key: string]: number }
|
||||||
|
): void {
|
||||||
if (!text) return;
|
if (!text) return;
|
||||||
|
|
||||||
const words = text
|
const words = text
|
||||||
.toLowerCase()
|
.toLowerCase()
|
||||||
.replace(/[^\w\s'-]/gi, "")
|
.replace(/[^\w\s'-]/gi, "")
|
||||||
.split(/\s+/); // Keep apostrophes and hyphens
|
.split(/\s+/);
|
||||||
|
|
||||||
for (const word of words) {
|
for (const word of words) {
|
||||||
const cleanedWord = word.replace(/^['-]|['-]$/g, ""); // Remove leading/trailing apostrophes/hyphens
|
const cleanedWord = word.replace(/^['-]|['-]$/g, "");
|
||||||
if (
|
if (cleanedWord && !stopWords.has(cleanedWord) && cleanedWord.length > 2) {
|
||||||
cleanedWord &&
|
|
||||||
!stopWords.has(cleanedWord) &&
|
|
||||||
cleanedWord.length > 2
|
|
||||||
) {
|
|
||||||
wordCounts[cleanedWord] = (wordCounts[cleanedWord] || 0) + 1;
|
wordCounts[cleanedWord] = (wordCounts[cleanedWord] || 0) + 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
processTextForWordCloud(session.initialMsg);
|
|
||||||
// Note: transcriptContent is not available in ChatSession type
|
/**
|
||||||
// Could be added later if transcript parsing is implemented
|
* Calculate peak usage time from hourly session counts
|
||||||
|
*/
|
||||||
|
function calculatePeakUsageTime(hourlySessionCounts: {
|
||||||
|
[hour: string]: number;
|
||||||
|
}): string {
|
||||||
|
if (Object.keys(hourlySessionCounts).length === 0) {
|
||||||
|
return "N/A";
|
||||||
}
|
}
|
||||||
|
|
||||||
const uniqueUsers = uniqueUserIds.size;
|
const peakHour = Object.entries(hourlySessionCounts).sort(
|
||||||
|
([, a], [, b]) => b - a
|
||||||
|
)[0][0];
|
||||||
|
const peakHourNum = Number.parseInt(peakHour.split(":")[0]);
|
||||||
|
const endHour = (peakHourNum + 1) % 24;
|
||||||
|
return `${peakHour}-${endHour.toString().padStart(2, "0")}:00`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate top questions from question counts
|
||||||
|
*/
|
||||||
|
function calculateTopQuestions(questionCounts: {
|
||||||
|
[question: string]: number;
|
||||||
|
}): TopQuestion[] {
|
||||||
|
return Object.entries(questionCounts)
|
||||||
|
.sort(([, a], [, b]) => b - a)
|
||||||
|
.slice(0, 5)
|
||||||
|
.map(([question, count]) => ({ question, count }));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process a single session and update all metrics
|
||||||
|
*/
|
||||||
|
function processSession(
|
||||||
|
session: ChatSession,
|
||||||
|
companyConfig: CompanyConfig,
|
||||||
|
metrics: {
|
||||||
|
uniqueUserIds: Set<string>;
|
||||||
|
sessionDurationTotals: {
|
||||||
|
totalSessionDuration: number;
|
||||||
|
validSessionsForDuration: number;
|
||||||
|
};
|
||||||
|
sentimentCounts: {
|
||||||
|
sentimentPositiveCount: number;
|
||||||
|
sentimentNeutralCount: number;
|
||||||
|
sentimentNegativeCount: number;
|
||||||
|
};
|
||||||
|
categoryMetrics: {
|
||||||
|
byDay: DayMetrics;
|
||||||
|
byCategory: CategoryMetrics;
|
||||||
|
byLanguage: LanguageMetrics;
|
||||||
|
byCountry: CountryMetrics;
|
||||||
|
};
|
||||||
|
hourlySessionCounts: { [hour: string]: number };
|
||||||
|
questionCounts: { [question: string]: number };
|
||||||
|
wordCounts: { [key: string]: number };
|
||||||
|
counters: {
|
||||||
|
escalatedCount: number;
|
||||||
|
forwardedHrCount: number;
|
||||||
|
totalResponseTime: number;
|
||||||
|
validSessionsForResponseTime: number;
|
||||||
|
alerts: number;
|
||||||
|
resolvedChatsCount: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
): void {
|
||||||
|
// Track hourly usage
|
||||||
|
if (session.startTime) {
|
||||||
|
const hour = new Date(session.startTime).getHours();
|
||||||
|
const hourKey = `${hour.toString().padStart(2, "0")}:00`;
|
||||||
|
metrics.hourlySessionCounts[hourKey] =
|
||||||
|
(metrics.hourlySessionCounts[hourKey] || 0) + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count resolved chats
|
||||||
|
if (session.endTime && !session.escalated) {
|
||||||
|
metrics.counters.resolvedChatsCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract unique users
|
||||||
|
extractUniqueUsers(session, metrics.uniqueUserIds);
|
||||||
|
|
||||||
|
// Process session duration
|
||||||
|
processSessionDuration(session, metrics.sessionDurationTotals);
|
||||||
|
|
||||||
|
// Process response time
|
||||||
|
if (
|
||||||
|
session.avgResponseTime !== undefined &&
|
||||||
|
session.avgResponseTime !== null &&
|
||||||
|
session.avgResponseTime >= 0
|
||||||
|
) {
|
||||||
|
metrics.counters.totalResponseTime += session.avgResponseTime;
|
||||||
|
metrics.counters.validSessionsForResponseTime++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count escalated and forwarded
|
||||||
|
if (session.escalated) metrics.counters.escalatedCount++;
|
||||||
|
if (session.forwardedHr) metrics.counters.forwardedHrCount++;
|
||||||
|
|
||||||
|
// Process sentiment
|
||||||
|
processSentiment(session, metrics.sentimentCounts);
|
||||||
|
|
||||||
|
// Check sentiment alerts
|
||||||
|
if (
|
||||||
|
companyConfig.sentimentAlert !== undefined &&
|
||||||
|
session.sentiment === "NEGATIVE"
|
||||||
|
) {
|
||||||
|
metrics.counters.alerts++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update category metrics
|
||||||
|
updateCategoryMetrics(session, metrics.categoryMetrics);
|
||||||
|
|
||||||
|
// Extract questions
|
||||||
|
extractQuestions(session, metrics.questionCounts);
|
||||||
|
|
||||||
|
// Process text for word cloud
|
||||||
|
processTextForWordCloud(session.initialMsg, metrics.wordCounts);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main function to calculate session metrics with reduced complexity
|
||||||
|
*/
|
||||||
|
export function sessionMetrics(
|
||||||
|
sessions: ChatSession[],
|
||||||
|
companyConfig: CompanyConfig = {}
|
||||||
|
): MetricsResult {
|
||||||
|
const totalSessions = sessions.length;
|
||||||
|
const byDay: DayMetrics = {};
|
||||||
|
const byCategory: CategoryMetrics = {};
|
||||||
|
const byLanguage: LanguageMetrics = {};
|
||||||
|
const byCountry: CountryMetrics = {};
|
||||||
|
const tokensByDay: DayMetrics = {};
|
||||||
|
const tokensCostByDay: DayMetrics = {};
|
||||||
|
|
||||||
|
// Initialize all metrics in a structured way
|
||||||
|
const metrics = {
|
||||||
|
uniqueUserIds: new Set<string>(),
|
||||||
|
sessionDurationTotals: {
|
||||||
|
totalSessionDuration: 0,
|
||||||
|
validSessionsForDuration: 0,
|
||||||
|
},
|
||||||
|
sentimentCounts: {
|
||||||
|
sentimentPositiveCount: 0,
|
||||||
|
sentimentNeutralCount: 0,
|
||||||
|
sentimentNegativeCount: 0,
|
||||||
|
},
|
||||||
|
categoryMetrics: { byDay, byCategory, byLanguage, byCountry },
|
||||||
|
hourlySessionCounts: {} as { [hour: string]: number },
|
||||||
|
questionCounts: {} as { [question: string]: number },
|
||||||
|
wordCounts: {} as { [key: string]: number },
|
||||||
|
counters: {
|
||||||
|
escalatedCount: 0,
|
||||||
|
forwardedHrCount: 0,
|
||||||
|
totalResponseTime: 0,
|
||||||
|
validSessionsForResponseTime: 0,
|
||||||
|
alerts: 0,
|
||||||
|
resolvedChatsCount: 0,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Process each session
|
||||||
|
for (const session of sessions) {
|
||||||
|
processSession(session, companyConfig, metrics);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate derived metrics
|
||||||
|
const uniqueUsers = metrics.uniqueUserIds.size;
|
||||||
const avgSessionLength =
|
const avgSessionLength =
|
||||||
validSessionsForDuration > 0
|
metrics.sessionDurationTotals.validSessionsForDuration > 0
|
||||||
? totalSessionDuration / validSessionsForDuration / 1000 // Convert ms to minutes
|
? metrics.sessionDurationTotals.totalSessionDuration /
|
||||||
|
metrics.sessionDurationTotals.validSessionsForDuration /
|
||||||
|
1000
|
||||||
: 0;
|
: 0;
|
||||||
const avgResponseTime =
|
const avgResponseTime =
|
||||||
validSessionsForResponseTime > 0
|
metrics.counters.validSessionsForResponseTime > 0
|
||||||
? totalResponseTime / validSessionsForResponseTime
|
? metrics.counters.totalResponseTime /
|
||||||
: 0; // in seconds
|
metrics.counters.validSessionsForResponseTime
|
||||||
|
: 0;
|
||||||
|
|
||||||
const wordCloudData: WordCloudWord[] = Object.entries(wordCounts)
|
const wordCloudData: WordCloudWord[] = Object.entries(metrics.wordCounts)
|
||||||
.sort(([, a], [, b]) => b - a)
|
.sort(([, a], [, b]) => b - a)
|
||||||
.slice(0, 50) // Top 50 words
|
.slice(0, 50)
|
||||||
.map(([text, value]) => ({ text, value }));
|
.map(([text, value]) => ({ text, value }));
|
||||||
|
|
||||||
// Calculate avgSessionsPerDay
|
|
||||||
const numDaysWithSessions = Object.keys(byDay).length;
|
const numDaysWithSessions = Object.keys(byDay).length;
|
||||||
const avgSessionsPerDay =
|
const avgSessionsPerDay =
|
||||||
numDaysWithSessions > 0 ? totalSessions / numDaysWithSessions : 0;
|
numDaysWithSessions > 0 ? totalSessions / numDaysWithSessions : 0;
|
||||||
@ -585,73 +740,48 @@ export function sessionMetrics(
|
|||||||
mockPreviousPeriodData.avgResponseTime
|
mockPreviousPeriodData.avgResponseTime
|
||||||
);
|
);
|
||||||
|
|
||||||
// Calculate new metrics
|
// Calculate additional metrics
|
||||||
|
const totalTokens = 0;
|
||||||
// 1. Average Daily Costs (euros)
|
const totalTokensEur = 0;
|
||||||
const avgDailyCosts =
|
const avgDailyCosts =
|
||||||
numDaysWithSessions > 0 ? totalTokensEur / numDaysWithSessions : 0;
|
numDaysWithSessions > 0 ? totalTokensEur / numDaysWithSessions : 0;
|
||||||
|
const peakUsageTime = calculatePeakUsageTime(metrics.hourlySessionCounts);
|
||||||
// 2. Peak Usage Time
|
|
||||||
let peakUsageTime = "N/A";
|
|
||||||
if (Object.keys(hourlySessionCounts).length > 0) {
|
|
||||||
const peakHour = Object.entries(hourlySessionCounts).sort(
|
|
||||||
([, a], [, b]) => b - a
|
|
||||||
)[0][0];
|
|
||||||
const peakHourNum = Number.parseInt(peakHour.split(":")[0]);
|
|
||||||
const endHour = (peakHourNum + 1) % 24;
|
|
||||||
peakUsageTime = `${peakHour}-${endHour.toString().padStart(2, "0")}:00`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Resolved Chats Percentage
|
|
||||||
const resolvedChatsPercentage =
|
const resolvedChatsPercentage =
|
||||||
totalSessions > 0 ? (resolvedChatsCount / totalSessions) * 100 : 0;
|
totalSessions > 0
|
||||||
|
? (metrics.counters.resolvedChatsCount / totalSessions) * 100
|
||||||
// 4. Top 5 Asked Questions
|
: 0;
|
||||||
const topQuestions: TopQuestion[] = Object.entries(questionCounts)
|
const topQuestions = calculateTopQuestions(metrics.questionCounts);
|
||||||
.sort(([, a], [, b]) => b - a)
|
|
||||||
.slice(0, 5) // Top 5 questions
|
|
||||||
.map(([question, count]) => ({ question, count }));
|
|
||||||
|
|
||||||
// console.log("Debug metrics calculation:", {
|
|
||||||
// totalSessionDuration,
|
|
||||||
// validSessionsForDuration,
|
|
||||||
// calculatedAvgSessionLength: avgSessionLength,
|
|
||||||
// });
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
totalSessions,
|
totalSessions,
|
||||||
uniqueUsers,
|
uniqueUsers,
|
||||||
avgSessionLength, // Corrected to match MetricsResult interface
|
avgSessionLength,
|
||||||
avgResponseTime, // Corrected to match MetricsResult interface
|
avgResponseTime,
|
||||||
escalatedCount,
|
escalatedCount: metrics.counters.escalatedCount,
|
||||||
forwardedCount: forwardedHrCount, // Corrected to match MetricsResult interface (forwardedCount)
|
forwardedCount: metrics.counters.forwardedHrCount,
|
||||||
sentimentPositiveCount,
|
sentimentPositiveCount: metrics.sentimentCounts.sentimentPositiveCount,
|
||||||
sentimentNeutralCount,
|
sentimentNeutralCount: metrics.sentimentCounts.sentimentNeutralCount,
|
||||||
sentimentNegativeCount,
|
sentimentNegativeCount: metrics.sentimentCounts.sentimentNegativeCount,
|
||||||
days: byDay, // Corrected to match MetricsResult interface (days)
|
days: byDay,
|
||||||
categories: byCategory, // Corrected to match MetricsResult interface (categories)
|
categories: byCategory,
|
||||||
languages: byLanguage, // Corrected to match MetricsResult interface (languages)
|
languages: byLanguage,
|
||||||
countries: byCountry, // Corrected to match MetricsResult interface (countries)
|
countries: byCountry,
|
||||||
tokensByDay,
|
tokensByDay,
|
||||||
tokensCostByDay,
|
tokensCostByDay,
|
||||||
totalTokens,
|
totalTokens,
|
||||||
totalTokensEur,
|
totalTokensEur,
|
||||||
wordCloudData,
|
wordCloudData,
|
||||||
belowThresholdCount: alerts, // Corrected to match MetricsResult interface (belowThresholdCount)
|
belowThresholdCount: metrics.counters.alerts,
|
||||||
avgSessionsPerDay, // Added to satisfy MetricsResult interface
|
avgSessionsPerDay,
|
||||||
// Map trend values to the expected property names in MetricsResult
|
|
||||||
sessionTrend: totalSessionsTrend,
|
sessionTrend: totalSessionsTrend,
|
||||||
usersTrend: uniqueUsersTrend,
|
usersTrend: uniqueUsersTrend,
|
||||||
avgSessionTimeTrend: avgSessionLengthTrend,
|
avgSessionTimeTrend: avgSessionLengthTrend,
|
||||||
// For response time, a negative trend is actually positive (faster responses are better)
|
avgResponseTimeTrend: -avgResponseTimeTrend,
|
||||||
avgResponseTimeTrend: -avgResponseTimeTrend, // Invert as lower response time is better
|
|
||||||
// Additional fields
|
|
||||||
sentimentThreshold: companyConfig.sentimentAlert,
|
sentimentThreshold: companyConfig.sentimentAlert,
|
||||||
lastUpdated: Date.now(),
|
lastUpdated: Date.now(),
|
||||||
totalSessionDuration,
|
totalSessionDuration: metrics.sessionDurationTotals.totalSessionDuration,
|
||||||
validSessionsForDuration,
|
validSessionsForDuration:
|
||||||
|
metrics.sessionDurationTotals.validSessionsForDuration,
|
||||||
// New metrics
|
|
||||||
avgDailyCosts,
|
avgDailyCosts,
|
||||||
peakUsageTime,
|
peakUsageTime,
|
||||||
resolvedChatsPercentage,
|
resolvedChatsPercentage,
|
||||||
|
|||||||
@ -254,7 +254,9 @@ async function processQuestions(
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Filter and prepare unique questions
|
// Filter and prepare unique questions
|
||||||
const uniqueQuestions = Array.from(new Set(questions.filter((q) => q.trim())));
|
const uniqueQuestions = Array.from(
|
||||||
|
new Set(questions.filter((q) => q.trim()))
|
||||||
|
);
|
||||||
if (uniqueQuestions.length === 0) return;
|
if (uniqueQuestions.length === 0) return;
|
||||||
|
|
||||||
// Batch create questions (skip duplicates)
|
// Batch create questions (skip duplicates)
|
||||||
|
|||||||
@ -1,4 +1,8 @@
|
|||||||
import { ProcessingStage, ProcessingStatus, type PrismaClient } from "@prisma/client";
|
import {
|
||||||
|
type PrismaClient,
|
||||||
|
ProcessingStage,
|
||||||
|
ProcessingStatus,
|
||||||
|
} from "@prisma/client";
|
||||||
import { prisma } from "./prisma";
|
import { prisma } from "./prisma";
|
||||||
|
|
||||||
// Type-safe metadata interfaces
|
// Type-safe metadata interfaces
|
||||||
@ -172,10 +176,7 @@ export class ProcessingStatusManager {
|
|||||||
/**
|
/**
|
||||||
* Get sessions that need processing for a specific stage
|
* Get sessions that need processing for a specific stage
|
||||||
*/
|
*/
|
||||||
async getSessionsNeedingProcessing(
|
async getSessionsNeedingProcessing(stage: ProcessingStage, limit = 50) {
|
||||||
stage: ProcessingStage,
|
|
||||||
limit = 50
|
|
||||||
) {
|
|
||||||
return await this.prisma.sessionProcessingStatus.findMany({
|
return await this.prisma.sessionProcessingStatus.findMany({
|
||||||
where: {
|
where: {
|
||||||
stage,
|
stage,
|
||||||
@ -361,20 +362,40 @@ export class ProcessingStatusManager {
|
|||||||
export const processingStatusManager = new ProcessingStatusManager();
|
export const processingStatusManager = new ProcessingStatusManager();
|
||||||
|
|
||||||
// Also export the individual functions for backward compatibility
|
// Also export the individual functions for backward compatibility
|
||||||
export const initializeSession = (sessionId: string) => processingStatusManager.initializeSession(sessionId);
|
export const initializeSession = (sessionId: string) =>
|
||||||
export const startStage = (sessionId: string, stage: ProcessingStage, metadata?: ProcessingMetadata) =>
|
processingStatusManager.initializeSession(sessionId);
|
||||||
processingStatusManager.startStage(sessionId, stage, metadata);
|
export const startStage = (
|
||||||
export const completeStage = (sessionId: string, stage: ProcessingStage, metadata?: ProcessingMetadata) =>
|
sessionId: string,
|
||||||
processingStatusManager.completeStage(sessionId, stage, metadata);
|
stage: ProcessingStage,
|
||||||
export const failStage = (sessionId: string, stage: ProcessingStage, errorMessage: string, metadata?: ProcessingMetadata) =>
|
metadata?: ProcessingMetadata
|
||||||
|
) => processingStatusManager.startStage(sessionId, stage, metadata);
|
||||||
|
export const completeStage = (
|
||||||
|
sessionId: string,
|
||||||
|
stage: ProcessingStage,
|
||||||
|
metadata?: ProcessingMetadata
|
||||||
|
) => processingStatusManager.completeStage(sessionId, stage, metadata);
|
||||||
|
export const failStage = (
|
||||||
|
sessionId: string,
|
||||||
|
stage: ProcessingStage,
|
||||||
|
errorMessage: string,
|
||||||
|
metadata?: ProcessingMetadata
|
||||||
|
) =>
|
||||||
processingStatusManager.failStage(sessionId, stage, errorMessage, metadata);
|
processingStatusManager.failStage(sessionId, stage, errorMessage, metadata);
|
||||||
export const skipStage = (sessionId: string, stage: ProcessingStage, reason: string) =>
|
export const skipStage = (
|
||||||
processingStatusManager.skipStage(sessionId, stage, reason);
|
sessionId: string,
|
||||||
export const getSessionStatus = (sessionId: string) => processingStatusManager.getSessionStatus(sessionId);
|
stage: ProcessingStage,
|
||||||
export const getSessionsNeedingProcessing = (stage: ProcessingStage, limit?: number) =>
|
reason: string
|
||||||
processingStatusManager.getSessionsNeedingProcessing(stage, limit);
|
) => processingStatusManager.skipStage(sessionId, stage, reason);
|
||||||
export const getPipelineStatus = () => processingStatusManager.getPipelineStatus();
|
export const getSessionStatus = (sessionId: string) =>
|
||||||
export const getFailedSessions = (stage?: ProcessingStage) => processingStatusManager.getFailedSessions(stage);
|
processingStatusManager.getSessionStatus(sessionId);
|
||||||
|
export const getSessionsNeedingProcessing = (
|
||||||
|
stage: ProcessingStage,
|
||||||
|
limit?: number
|
||||||
|
) => processingStatusManager.getSessionsNeedingProcessing(stage, limit);
|
||||||
|
export const getPipelineStatus = () =>
|
||||||
|
processingStatusManager.getPipelineStatus();
|
||||||
|
export const getFailedSessions = (stage?: ProcessingStage) =>
|
||||||
|
processingStatusManager.getFailedSessions(stage);
|
||||||
export const resetStageForRetry = (sessionId: string, stage: ProcessingStage) =>
|
export const resetStageForRetry = (sessionId: string, stage: ProcessingStage) =>
|
||||||
processingStatusManager.resetStageForRetry(sessionId, stage);
|
processingStatusManager.resetStageForRetry(sessionId, stage);
|
||||||
export const hasCompletedStage = (sessionId: string, stage: ProcessingStage) =>
|
export const hasCompletedStage = (sessionId: string, stage: ProcessingStage) =>
|
||||||
|
|||||||
107
lib/rateLimiter.ts
Normal file
107
lib/rateLimiter.ts
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
// Shared rate limiting utility to prevent code duplication
|
||||||
|
|
||||||
|
export interface RateLimitConfig {
|
||||||
|
maxAttempts: number;
|
||||||
|
windowMs: number;
|
||||||
|
maxEntries?: number;
|
||||||
|
cleanupIntervalMs?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RateLimitAttempt {
|
||||||
|
count: number;
|
||||||
|
resetTime: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class InMemoryRateLimiter {
|
||||||
|
private attempts = new Map<string, RateLimitAttempt>();
|
||||||
|
private cleanupInterval: NodeJS.Timeout;
|
||||||
|
|
||||||
|
constructor(private config: RateLimitConfig) {
|
||||||
|
const cleanupMs = config.cleanupIntervalMs || 5 * 60 * 1000; // 5 minutes default
|
||||||
|
|
||||||
|
// Clean up expired entries periodically
|
||||||
|
this.cleanupInterval = setInterval(() => {
|
||||||
|
this.cleanup();
|
||||||
|
}, cleanupMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a key (e.g., IP address) is rate limited
|
||||||
|
*/
|
||||||
|
checkRateLimit(key: string): { allowed: boolean; resetTime?: number } {
|
||||||
|
const now = Date.now();
|
||||||
|
const attempt = this.attempts.get(key);
|
||||||
|
|
||||||
|
if (!attempt || now > attempt.resetTime) {
|
||||||
|
// No previous attempt or window expired - allow and start new window
|
||||||
|
this.attempts.set(key, {
|
||||||
|
count: 1,
|
||||||
|
resetTime: now + this.config.windowMs,
|
||||||
|
});
|
||||||
|
return { allowed: true };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attempt.count >= this.config.maxAttempts) {
|
||||||
|
// Rate limit exceeded
|
||||||
|
return { allowed: false, resetTime: attempt.resetTime };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Increment counter
|
||||||
|
attempt.count++;
|
||||||
|
return { allowed: true };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up expired entries and prevent unbounded growth
|
||||||
|
*/
|
||||||
|
private cleanup(): void {
|
||||||
|
const now = Date.now();
|
||||||
|
const maxEntries = this.config.maxEntries || 10000;
|
||||||
|
|
||||||
|
// Remove expired entries
|
||||||
|
for (const [key, attempt] of Array.from(this.attempts.entries())) {
|
||||||
|
if (now > attempt.resetTime) {
|
||||||
|
this.attempts.delete(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If still too many entries, remove oldest half
|
||||||
|
if (this.attempts.size > maxEntries) {
|
||||||
|
const entries = Array.from(this.attempts.entries());
|
||||||
|
entries.sort((a, b) => a[1].resetTime - b[1].resetTime);
|
||||||
|
|
||||||
|
const toRemove = Math.floor(entries.length / 2);
|
||||||
|
for (let i = 0; i < toRemove; i++) {
|
||||||
|
this.attempts.delete(entries[i][0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up resources
|
||||||
|
*/
|
||||||
|
destroy(): void {
|
||||||
|
if (this.cleanupInterval) {
|
||||||
|
clearInterval(this.cleanupInterval);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract client IP address from request headers
|
||||||
|
*/
|
||||||
|
export function extractClientIP(request: Request): string {
|
||||||
|
// Check multiple possible headers in order of preference
|
||||||
|
const forwarded = request.headers.get("x-forwarded-for");
|
||||||
|
if (forwarded) {
|
||||||
|
// Take the first IP from comma-separated list
|
||||||
|
return forwarded.split(",")[0].trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
request.headers.get("x-real-ip") ||
|
||||||
|
request.headers.get("x-client-ip") ||
|
||||||
|
request.headers.get("cf-connecting-ip") ||
|
||||||
|
"unknown"
|
||||||
|
);
|
||||||
|
}
|
||||||
118
lib/sendEmail.ts
118
lib/sendEmail.ts
@ -1,8 +1,124 @@
|
|||||||
|
import { InMemoryRateLimiter } from "./rateLimiter";
|
||||||
|
|
||||||
|
export interface EmailConfig {
|
||||||
|
smtpHost?: string;
|
||||||
|
smtpPort?: number;
|
||||||
|
smtpUser?: string;
|
||||||
|
smtpPassword?: string;
|
||||||
|
fromEmail?: string;
|
||||||
|
fromName?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface EmailOptions {
|
||||||
|
to: string;
|
||||||
|
subject: string;
|
||||||
|
text?: string;
|
||||||
|
html?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const emailRateLimit = new InMemoryRateLimiter({
|
||||||
|
maxAttempts: 5,
|
||||||
|
windowMs: 60 * 1000,
|
||||||
|
maxEntries: 1000,
|
||||||
|
});
|
||||||
|
|
||||||
export async function sendEmail(
|
export async function sendEmail(
|
||||||
|
options: EmailOptions
|
||||||
|
): Promise<{ success: boolean; error?: string }> {
|
||||||
|
const rateLimitCheck = emailRateLimit.checkRateLimit(options.to);
|
||||||
|
if (!rateLimitCheck.allowed) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Rate limit exceeded. Please try again later.",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = getEmailConfig();
|
||||||
|
if (!config.isConfigured) {
|
||||||
|
console.warn("Email not configured - would send:", options);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Email service not configured",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (process.env.NODE_ENV === "development") {
|
||||||
|
console.log("📧 [DEV] Email would be sent:", {
|
||||||
|
to: options.to,
|
||||||
|
subject: options.subject,
|
||||||
|
text: options.text?.substring(0, 100) + "...",
|
||||||
|
});
|
||||||
|
return { success: true };
|
||||||
|
}
|
||||||
|
|
||||||
|
await sendEmailViaService(options, config);
|
||||||
|
return { success: true };
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage =
|
||||||
|
error instanceof Error ? error.message : "Unknown error";
|
||||||
|
console.error("Failed to send email:", errorMessage);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: errorMessage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getEmailConfig(): EmailConfig & { isConfigured: boolean } {
|
||||||
|
const config = {
|
||||||
|
smtpHost: process.env.SMTP_HOST,
|
||||||
|
smtpPort: process.env.SMTP_PORT ? parseInt(process.env.SMTP_PORT) : 587,
|
||||||
|
smtpUser: process.env.SMTP_USER,
|
||||||
|
smtpPassword: process.env.SMTP_PASSWORD,
|
||||||
|
fromEmail: process.env.FROM_EMAIL || "noreply@livedash.app",
|
||||||
|
fromName: process.env.FROM_NAME || "LiveDash",
|
||||||
|
};
|
||||||
|
|
||||||
|
const isConfigured = !!(
|
||||||
|
config.smtpHost &&
|
||||||
|
config.smtpUser &&
|
||||||
|
config.smtpPassword
|
||||||
|
);
|
||||||
|
|
||||||
|
return { ...config, isConfigured };
|
||||||
|
}
|
||||||
|
|
||||||
|
async function sendEmailViaService(
|
||||||
|
_options: EmailOptions,
|
||||||
|
_config: EmailConfig
|
||||||
|
): Promise<void> {
|
||||||
|
throw new Error(
|
||||||
|
"Email service implementation required - install nodemailer or similar SMTP library"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function sendPasswordResetEmail(
|
||||||
|
email: string,
|
||||||
|
tempPassword: string
|
||||||
|
): Promise<{ success: boolean; error?: string }> {
|
||||||
|
const subject = "Your temporary password - LiveDash";
|
||||||
|
const text = `Your temporary password is: ${tempPassword}\n\nPlease log in and change your password immediately for security.`;
|
||||||
|
const html = `
|
||||||
|
<h2>Temporary Password</h2>
|
||||||
|
<p>Your temporary password is: <strong>${tempPassword}</strong></p>
|
||||||
|
<p>Please log in and change your password immediately for security.</p>
|
||||||
|
<p><a href="${process.env.NEXTAUTH_URL || "http://localhost:3000"}/login">Login here</a></p>
|
||||||
|
`;
|
||||||
|
|
||||||
|
return sendEmail({
|
||||||
|
to: email,
|
||||||
|
subject,
|
||||||
|
text,
|
||||||
|
html,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Legacy function for backward compatibility
|
||||||
|
export async function sendEmailLegacy(
|
||||||
to: string,
|
to: string,
|
||||||
subject: string,
|
subject: string,
|
||||||
text: string
|
text: string
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
// For demo: log to console. Use nodemailer/sendgrid/whatever in prod.
|
|
||||||
process.stdout.write(`[Email to ${to}]: ${subject}\n${text}\n`);
|
process.stdout.write(`[Email to ${to}]: ${subject}\n${text}\n`);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -37,6 +37,150 @@ function parseEuropeanDate(dateStr: string): Date {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a single line for timestamp and role pattern
|
||||||
|
*/
|
||||||
|
function parseTimestampRoleLine(line: string): {
|
||||||
|
type: "timestamp-role";
|
||||||
|
timestamp: string;
|
||||||
|
role: string;
|
||||||
|
content: string;
|
||||||
|
} | null {
|
||||||
|
const timestampRoleMatch = line.match(
|
||||||
|
/^\[(\d{2}\.\d{2}\.\d{4} \d{2}:\d{2}:\d{2})\]\s+(User|Assistant|System|user|assistant|system):\s*(.*)$/i
|
||||||
|
);
|
||||||
|
|
||||||
|
if (timestampRoleMatch) {
|
||||||
|
return {
|
||||||
|
type: "timestamp-role",
|
||||||
|
timestamp: timestampRoleMatch[1],
|
||||||
|
role:
|
||||||
|
timestampRoleMatch[2].charAt(0).toUpperCase() +
|
||||||
|
timestampRoleMatch[2].slice(1).toLowerCase(),
|
||||||
|
content: timestampRoleMatch[3] || "",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a single line for role pattern only
|
||||||
|
*/
|
||||||
|
function parseRoleLine(line: string): {
|
||||||
|
type: "role";
|
||||||
|
role: string;
|
||||||
|
content: string;
|
||||||
|
} | null {
|
||||||
|
const roleMatch = line.match(
|
||||||
|
/^(User|Assistant|System|user|assistant|system):\s*(.*)$/i
|
||||||
|
);
|
||||||
|
|
||||||
|
if (roleMatch) {
|
||||||
|
return {
|
||||||
|
type: "role",
|
||||||
|
role:
|
||||||
|
roleMatch[1].charAt(0).toUpperCase() +
|
||||||
|
roleMatch[1].slice(1).toLowerCase(),
|
||||||
|
content: roleMatch[2] || "",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save current message to messages array
|
||||||
|
*/
|
||||||
|
function saveCurrentMessage(
|
||||||
|
currentMessage: { role: string; content: string; timestamp?: string } | null,
|
||||||
|
messages: ParsedMessage[],
|
||||||
|
order: number
|
||||||
|
): number {
|
||||||
|
if (currentMessage) {
|
||||||
|
messages.push({
|
||||||
|
sessionId: "", // Will be set by caller
|
||||||
|
timestamp: new Date(), // Will be calculated later
|
||||||
|
role: currentMessage.role,
|
||||||
|
content: currentMessage.content.trim(),
|
||||||
|
order,
|
||||||
|
});
|
||||||
|
return order + 1;
|
||||||
|
}
|
||||||
|
return order;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate timestamp for a message using distributed timing
|
||||||
|
*/
|
||||||
|
function calculateDistributedTimestamp(
|
||||||
|
startTime: Date,
|
||||||
|
endTime: Date,
|
||||||
|
index: number,
|
||||||
|
totalMessages: number
|
||||||
|
): Date {
|
||||||
|
const sessionDurationMs = endTime.getTime() - startTime.getTime();
|
||||||
|
const messageInterval =
|
||||||
|
totalMessages > 1 ? sessionDurationMs / (totalMessages - 1) : 0;
|
||||||
|
return new Date(startTime.getTime() + index * messageInterval);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process timestamp calculations for all messages
|
||||||
|
*/
|
||||||
|
function processMessageTimestamps(
|
||||||
|
messages: ParsedMessage[],
|
||||||
|
startTime: Date,
|
||||||
|
endTime: Date
|
||||||
|
): void {
|
||||||
|
interface MessageWithTimestamp extends Omit<ParsedMessage, "timestamp"> {
|
||||||
|
timestamp: Date | string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const hasTimestamps = messages.some(
|
||||||
|
(msg) => (msg as MessageWithTimestamp).timestamp
|
||||||
|
);
|
||||||
|
|
||||||
|
if (hasTimestamps) {
|
||||||
|
// Use parsed timestamps from the transcript
|
||||||
|
messages.forEach((message, index) => {
|
||||||
|
const msgWithTimestamp = message as MessageWithTimestamp;
|
||||||
|
if (
|
||||||
|
msgWithTimestamp.timestamp &&
|
||||||
|
typeof msgWithTimestamp.timestamp === "string"
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
message.timestamp = parseEuropeanDate(msgWithTimestamp.timestamp);
|
||||||
|
} catch {
|
||||||
|
// Fallback to distributed timestamp if parsing fails
|
||||||
|
message.timestamp = calculateDistributedTimestamp(
|
||||||
|
startTime,
|
||||||
|
endTime,
|
||||||
|
index,
|
||||||
|
messages.length
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Fallback to distributed timestamp
|
||||||
|
message.timestamp = calculateDistributedTimestamp(
|
||||||
|
startTime,
|
||||||
|
endTime,
|
||||||
|
index,
|
||||||
|
messages.length
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Distribute messages across session duration
|
||||||
|
messages.forEach((message, index) => {
|
||||||
|
message.timestamp = calculateDistributedTimestamp(
|
||||||
|
startTime,
|
||||||
|
endTime,
|
||||||
|
index,
|
||||||
|
messages.length
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse raw transcript content into structured messages
|
* Parse raw transcript content into structured messages
|
||||||
* @param content Raw transcript content
|
* @param content Raw transcript content
|
||||||
@ -74,79 +218,43 @@ export function parseTranscriptToMessages(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if line starts with a timestamp and role [DD.MM.YYYY HH:MM:SS] Role: content
|
// Try parsing timestamp + role pattern first
|
||||||
const timestampRoleMatch = trimmedLine.match(
|
const timestampRoleResult = parseTimestampRoleLine(trimmedLine);
|
||||||
/^\[(\d{2}\.\d{2}\.\d{4} \d{2}:\d{2}:\d{2})\]\s+(User|Assistant|System|user|assistant|system):\s*(.*)$/i
|
if (timestampRoleResult) {
|
||||||
);
|
|
||||||
|
|
||||||
// Check if line starts with just a role (User:, Assistant:, System:, etc.)
|
|
||||||
const roleMatch = trimmedLine.match(
|
|
||||||
/^(User|Assistant|System|user|assistant|system):\s*(.*)$/i
|
|
||||||
);
|
|
||||||
|
|
||||||
if (timestampRoleMatch) {
|
|
||||||
// Save previous message if exists
|
// Save previous message if exists
|
||||||
if (currentMessage) {
|
order = saveCurrentMessage(currentMessage, messages, order);
|
||||||
messages.push({
|
|
||||||
sessionId: "", // Will be set by caller
|
|
||||||
timestamp: new Date(), // Will be calculated below
|
|
||||||
role: currentMessage.role,
|
|
||||||
content: currentMessage.content.trim(),
|
|
||||||
order: order++,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start new message with timestamp
|
// Start new message with timestamp
|
||||||
const timestamp = timestampRoleMatch[1];
|
|
||||||
const role =
|
|
||||||
timestampRoleMatch[2].charAt(0).toUpperCase() +
|
|
||||||
timestampRoleMatch[2].slice(1).toLowerCase();
|
|
||||||
const content = timestampRoleMatch[3] || "";
|
|
||||||
|
|
||||||
currentMessage = {
|
currentMessage = {
|
||||||
role,
|
role: timestampRoleResult.role,
|
||||||
content,
|
content: timestampRoleResult.content,
|
||||||
timestamp, // Store the timestamp for later parsing
|
timestamp: timestampRoleResult.timestamp,
|
||||||
};
|
};
|
||||||
} else if (roleMatch) {
|
continue;
|
||||||
// Save previous message if exists
|
|
||||||
if (currentMessage) {
|
|
||||||
messages.push({
|
|
||||||
sessionId: "", // Will be set by caller
|
|
||||||
timestamp: new Date(), // Will be calculated below
|
|
||||||
role: currentMessage.role,
|
|
||||||
content: currentMessage.content.trim(),
|
|
||||||
order: order++,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Try parsing role-only pattern
|
||||||
|
const roleResult = parseRoleLine(trimmedLine);
|
||||||
|
if (roleResult) {
|
||||||
|
// Save previous message if exists
|
||||||
|
order = saveCurrentMessage(currentMessage, messages, order);
|
||||||
|
|
||||||
// Start new message without timestamp
|
// Start new message without timestamp
|
||||||
const role =
|
|
||||||
roleMatch[1].charAt(0).toUpperCase() +
|
|
||||||
roleMatch[1].slice(1).toLowerCase();
|
|
||||||
const content = roleMatch[2] || "";
|
|
||||||
|
|
||||||
currentMessage = {
|
currentMessage = {
|
||||||
role,
|
role: roleResult.role,
|
||||||
content,
|
content: roleResult.content,
|
||||||
};
|
};
|
||||||
} else if (currentMessage) {
|
continue;
|
||||||
// Continue previous message (multi-line)
|
}
|
||||||
|
|
||||||
|
// Continue previous message (multi-line) or skip orphaned content
|
||||||
|
if (currentMessage) {
|
||||||
currentMessage.content += `\n${trimmedLine}`;
|
currentMessage.content += `\n${trimmedLine}`;
|
||||||
}
|
}
|
||||||
// If no current message and no role match, skip the line (orphaned content)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save the last message
|
// Save the last message
|
||||||
if (currentMessage) {
|
saveCurrentMessage(currentMessage, messages, order);
|
||||||
messages.push({
|
|
||||||
sessionId: "", // Will be set by caller
|
|
||||||
timestamp: new Date(), // Will be calculated below
|
|
||||||
role: currentMessage.role,
|
|
||||||
content: currentMessage.content.trim(),
|
|
||||||
order: order++,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (messages.length === 0) {
|
if (messages.length === 0) {
|
||||||
return {
|
return {
|
||||||
@ -155,57 +263,8 @@ export function parseTranscriptToMessages(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate timestamps - use parsed timestamps if available, otherwise distribute across session duration
|
// Calculate timestamps for all messages
|
||||||
interface MessageWithTimestamp extends Omit<ParsedMessage, 'timestamp'> {
|
processMessageTimestamps(messages, startTime, endTime);
|
||||||
timestamp: Date | string;
|
|
||||||
}
|
|
||||||
const hasTimestamps = messages.some(
|
|
||||||
(msg) => (msg as MessageWithTimestamp).timestamp
|
|
||||||
);
|
|
||||||
|
|
||||||
if (hasTimestamps) {
|
|
||||||
// Use parsed timestamps from the transcript
|
|
||||||
messages.forEach((message, index) => {
|
|
||||||
const msgWithTimestamp = message as MessageWithTimestamp;
|
|
||||||
if (
|
|
||||||
msgWithTimestamp.timestamp &&
|
|
||||||
typeof msgWithTimestamp.timestamp === "string"
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
message.timestamp = parseEuropeanDate(msgWithTimestamp.timestamp);
|
|
||||||
} catch (_error) {
|
|
||||||
// Fallback to distributed timestamp if parsing fails
|
|
||||||
const sessionDurationMs = endTime.getTime() - startTime.getTime();
|
|
||||||
const messageInterval =
|
|
||||||
messages.length > 1
|
|
||||||
? sessionDurationMs / (messages.length - 1)
|
|
||||||
: 0;
|
|
||||||
message.timestamp = new Date(
|
|
||||||
startTime.getTime() + index * messageInterval
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Fallback to distributed timestamp
|
|
||||||
const sessionDurationMs = endTime.getTime() - startTime.getTime();
|
|
||||||
const messageInterval =
|
|
||||||
messages.length > 1 ? sessionDurationMs / (messages.length - 1) : 0;
|
|
||||||
message.timestamp = new Date(
|
|
||||||
startTime.getTime() + index * messageInterval
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
// Distribute messages across session duration
|
|
||||||
const sessionDurationMs = endTime.getTime() - startTime.getTime();
|
|
||||||
const messageInterval =
|
|
||||||
messages.length > 1 ? sessionDurationMs / (messages.length - 1) : 0;
|
|
||||||
|
|
||||||
messages.forEach((message, index) => {
|
|
||||||
message.timestamp = new Date(
|
|
||||||
startTime.getTime() + index * messageInterval
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
|
|||||||
@ -3,103 +3,108 @@ import { ProcessingStatusManager } from "./lib/processingStatusManager";
|
|||||||
|
|
||||||
const prisma = new PrismaClient();
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
async function migrateToRefactoredSystem() {
|
/**
|
||||||
try {
|
* Migrates CSV import stage for a session
|
||||||
console.log("=== MIGRATING TO REFACTORED PROCESSING SYSTEM ===\n");
|
*/
|
||||||
|
async function migrateCsvImportStage(
|
||||||
// Get all existing sessions
|
sessionId: string,
|
||||||
const sessions = await prisma.session.findMany({
|
importId: string | null
|
||||||
include: {
|
) {
|
||||||
import: true,
|
|
||||||
messages: true,
|
|
||||||
sessionQuestions: true,
|
|
||||||
},
|
|
||||||
orderBy: { createdAt: "asc" },
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log(`Found ${sessions.length} sessions to migrate...\n`);
|
|
||||||
|
|
||||||
let migratedCount = 0;
|
|
||||||
|
|
||||||
for (const session of sessions) {
|
|
||||||
console.log(
|
|
||||||
`Migrating session ${session.import?.externalSessionId || session.id}...`
|
|
||||||
);
|
|
||||||
|
|
||||||
// Initialize processing status for this session
|
|
||||||
await ProcessingStatusManager.initializeSession(session.id);
|
|
||||||
|
|
||||||
// Determine the current state of each stage based on existing data
|
|
||||||
|
|
||||||
// 1. CSV_IMPORT - Always completed if session exists
|
|
||||||
await ProcessingStatusManager.completeStage(
|
await ProcessingStatusManager.completeStage(
|
||||||
session.id,
|
sessionId,
|
||||||
ProcessingStage.CSV_IMPORT,
|
ProcessingStage.CSV_IMPORT,
|
||||||
{
|
{
|
||||||
migratedFrom: "existing_session",
|
migratedFrom: "existing_session",
|
||||||
importId: session.importId,
|
importId,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// 2. TRANSCRIPT_FETCH - Check if transcript content exists
|
/**
|
||||||
if (session.import?.rawTranscriptContent) {
|
* Migrates transcript fetch stage for a session
|
||||||
|
*/
|
||||||
|
async function migrateTranscriptFetchStage(
|
||||||
|
sessionId: string,
|
||||||
|
sessionImport: any,
|
||||||
|
externalSessionId?: string
|
||||||
|
) {
|
||||||
|
if (sessionImport?.rawTranscriptContent) {
|
||||||
await ProcessingStatusManager.completeStage(
|
await ProcessingStatusManager.completeStage(
|
||||||
session.id,
|
sessionId,
|
||||||
ProcessingStage.TRANSCRIPT_FETCH,
|
ProcessingStage.TRANSCRIPT_FETCH,
|
||||||
{
|
{
|
||||||
migratedFrom: "existing_transcript",
|
migratedFrom: "existing_transcript",
|
||||||
contentLength: session.import.rawTranscriptContent.length,
|
contentLength: sessionImport.rawTranscriptContent.length,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
} else if (!session.import?.fullTranscriptUrl) {
|
} else if (!sessionImport?.fullTranscriptUrl) {
|
||||||
// No transcript URL - skip this stage
|
|
||||||
await ProcessingStatusManager.skipStage(
|
await ProcessingStatusManager.skipStage(
|
||||||
session.id,
|
sessionId,
|
||||||
ProcessingStage.TRANSCRIPT_FETCH,
|
ProcessingStage.TRANSCRIPT_FETCH,
|
||||||
"No transcript URL in original import"
|
"No transcript URL in original import"
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
// Has URL but no content - mark as pending for retry
|
console.log(` - Transcript fetch pending for ${externalSessionId}`);
|
||||||
console.log(
|
|
||||||
` - Transcript fetch pending for ${session.import.externalSessionId}`
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// 3. SESSION_CREATION - Check if messages exist
|
/**
|
||||||
if (session.messages.length > 0) {
|
* Migrates session creation stage for a session
|
||||||
|
*/
|
||||||
|
async function migrateSessionCreationStage(
|
||||||
|
sessionId: string,
|
||||||
|
messages: any[],
|
||||||
|
sessionImport: any,
|
||||||
|
externalSessionId?: string
|
||||||
|
) {
|
||||||
|
if (messages.length > 0) {
|
||||||
await ProcessingStatusManager.completeStage(
|
await ProcessingStatusManager.completeStage(
|
||||||
session.id,
|
sessionId,
|
||||||
ProcessingStage.SESSION_CREATION,
|
ProcessingStage.SESSION_CREATION,
|
||||||
{
|
{
|
||||||
migratedFrom: "existing_messages",
|
migratedFrom: "existing_messages",
|
||||||
messageCount: session.messages.length,
|
messageCount: messages.length,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
} else if (session.import?.rawTranscriptContent) {
|
} else if (sessionImport?.rawTranscriptContent) {
|
||||||
// Has transcript but no messages - needs reprocessing
|
|
||||||
console.log(
|
console.log(
|
||||||
` - Session creation pending for ${session.import.externalSessionId} (has transcript but no messages)`
|
` - Session creation pending for ${externalSessionId} (has transcript but no messages)`
|
||||||
);
|
);
|
||||||
} else {
|
} else if (!sessionImport?.fullTranscriptUrl) {
|
||||||
// No transcript content - skip or mark as pending based on transcript fetch status
|
|
||||||
if (!session.import?.fullTranscriptUrl) {
|
|
||||||
await ProcessingStatusManager.skipStage(
|
await ProcessingStatusManager.skipStage(
|
||||||
session.id,
|
sessionId,
|
||||||
ProcessingStage.SESSION_CREATION,
|
ProcessingStage.SESSION_CREATION,
|
||||||
"No transcript content available"
|
"No transcript content available"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4. AI_ANALYSIS - Check if AI fields are populated
|
/**
|
||||||
const hasAIAnalysis =
|
* Checks if session has AI analysis data
|
||||||
|
*/
|
||||||
|
function hasAIAnalysisData(session: any): boolean {
|
||||||
|
return !!(
|
||||||
session.summary ||
|
session.summary ||
|
||||||
session.sentiment ||
|
session.sentiment ||
|
||||||
session.category ||
|
session.category ||
|
||||||
session.language;
|
session.language
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migrates AI analysis stage for a session
|
||||||
|
*/
|
||||||
|
async function migrateAIAnalysisStage(
|
||||||
|
sessionId: string,
|
||||||
|
session: any,
|
||||||
|
messages: any[],
|
||||||
|
externalSessionId?: string
|
||||||
|
) {
|
||||||
|
const hasAIAnalysis = hasAIAnalysisData(session);
|
||||||
|
|
||||||
if (hasAIAnalysis) {
|
if (hasAIAnalysis) {
|
||||||
await ProcessingStatusManager.completeStage(
|
await ProcessingStatusManager.completeStage(
|
||||||
session.id,
|
sessionId,
|
||||||
ProcessingStage.AI_ANALYSIS,
|
ProcessingStage.AI_ANALYSIS,
|
||||||
{
|
{
|
||||||
migratedFrom: "existing_ai_analysis",
|
migratedFrom: "existing_ai_analysis",
|
||||||
@ -109,48 +114,78 @@ async function migrateToRefactoredSystem() {
|
|||||||
hasLanguage: !!session.language,
|
hasLanguage: !!session.language,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
} else {
|
} else if (messages.length > 0) {
|
||||||
// No AI analysis - mark as pending if session creation is complete
|
console.log(` - AI analysis pending for ${externalSessionId}`);
|
||||||
if (session.messages.length > 0) {
|
|
||||||
console.log(
|
|
||||||
` - AI analysis pending for ${session.import?.externalSessionId}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 5. QUESTION_EXTRACTION - Check if questions exist
|
return hasAIAnalysis;
|
||||||
if (session.sessionQuestions.length > 0) {
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migrates question extraction stage for a session
|
||||||
|
*/
|
||||||
|
async function migrateQuestionExtractionStage(
|
||||||
|
sessionId: string,
|
||||||
|
sessionQuestions: any[],
|
||||||
|
hasAIAnalysis: boolean,
|
||||||
|
externalSessionId?: string
|
||||||
|
) {
|
||||||
|
if (sessionQuestions.length > 0) {
|
||||||
await ProcessingStatusManager.completeStage(
|
await ProcessingStatusManager.completeStage(
|
||||||
session.id,
|
sessionId,
|
||||||
ProcessingStage.QUESTION_EXTRACTION,
|
ProcessingStage.QUESTION_EXTRACTION,
|
||||||
{
|
{
|
||||||
migratedFrom: "existing_questions",
|
migratedFrom: "existing_questions",
|
||||||
questionCount: session.sessionQuestions.length,
|
questionCount: sessionQuestions.length,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
} else {
|
} else if (hasAIAnalysis) {
|
||||||
// No questions - mark as pending if AI analysis is complete
|
console.log(` - Question extraction pending for ${externalSessionId}`);
|
||||||
if (hasAIAnalysis) {
|
}
|
||||||
console.log(
|
}
|
||||||
` - Question extraction pending for ${session.import?.externalSessionId}`
|
|
||||||
|
/**
|
||||||
|
* Migrates a single session to the refactored processing system
|
||||||
|
*/
|
||||||
|
async function migrateSession(session: any) {
|
||||||
|
const externalSessionId = session.import?.externalSessionId;
|
||||||
|
console.log(`Migrating session ${externalSessionId || session.id}...`);
|
||||||
|
|
||||||
|
await ProcessingStatusManager.initializeSession(session.id);
|
||||||
|
|
||||||
|
// Migrate each stage
|
||||||
|
await migrateCsvImportStage(session.id, session.importId);
|
||||||
|
await migrateTranscriptFetchStage(
|
||||||
|
session.id,
|
||||||
|
session.import,
|
||||||
|
externalSessionId
|
||||||
);
|
);
|
||||||
}
|
await migrateSessionCreationStage(
|
||||||
}
|
session.id,
|
||||||
|
session.messages,
|
||||||
migratedCount++;
|
session.import,
|
||||||
|
externalSessionId
|
||||||
if (migratedCount % 10 === 0) {
|
|
||||||
console.log(
|
|
||||||
` Migrated ${migratedCount}/${sessions.length} sessions...`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
`\n✓ Successfully migrated ${migratedCount} sessions to the new processing system`
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Show final status
|
const hasAIAnalysis = await migrateAIAnalysisStage(
|
||||||
|
session.id,
|
||||||
|
session,
|
||||||
|
session.messages,
|
||||||
|
externalSessionId
|
||||||
|
);
|
||||||
|
|
||||||
|
await migrateQuestionExtractionStage(
|
||||||
|
session.id,
|
||||||
|
session.sessionQuestions,
|
||||||
|
hasAIAnalysis,
|
||||||
|
externalSessionId
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Displays the final migration status
|
||||||
|
*/
|
||||||
|
async function displayFinalStatus() {
|
||||||
console.log("\n=== MIGRATION COMPLETE - FINAL STATUS ===");
|
console.log("\n=== MIGRATION COMPLETE - FINAL STATUS ===");
|
||||||
const pipelineStatus = await ProcessingStatusManager.getPipelineStatus();
|
const pipelineStatus = await ProcessingStatusManager.getPipelineStatus();
|
||||||
|
|
||||||
@ -172,6 +207,41 @@ async function migrateToRefactoredSystem() {
|
|||||||
`${stage}: ${completed} completed, ${pending} pending, ${skipped} skipped`
|
`${stage}: ${completed} completed, ${pending} pending, ${skipped} skipped`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: Main orchestration function - complexity is needed for migration coordination
|
||||||
|
async function migrateToRefactoredSystem() {
|
||||||
|
try {
|
||||||
|
console.log("=== MIGRATING TO REFACTORED PROCESSING SYSTEM ===\n");
|
||||||
|
|
||||||
|
const sessions = await prisma.session.findMany({
|
||||||
|
include: {
|
||||||
|
import: true,
|
||||||
|
messages: true,
|
||||||
|
sessionQuestions: true,
|
||||||
|
},
|
||||||
|
orderBy: { createdAt: "asc" },
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Found ${sessions.length} sessions to migrate...\n`);
|
||||||
|
|
||||||
|
let migratedCount = 0;
|
||||||
|
for (const session of sessions) {
|
||||||
|
await migrateSession(session);
|
||||||
|
migratedCount++;
|
||||||
|
|
||||||
|
if (migratedCount % 10 === 0) {
|
||||||
|
console.log(
|
||||||
|
` Migrated ${migratedCount}/${sessions.length} sessions...`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`\n✓ Successfully migrated ${migratedCount} sessions to the new processing system`
|
||||||
|
);
|
||||||
|
|
||||||
|
await displayFinalStatus();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error migrating to refactored system:", error);
|
console.error("Error migrating to refactored system:", error);
|
||||||
} finally {
|
} finally {
|
||||||
|
|||||||
@ -95,6 +95,7 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@eslint/eslintrc": "^3.3.1",
|
"@eslint/eslintrc": "^3.3.1",
|
||||||
"@eslint/js": "^9.30.0",
|
"@eslint/js": "^9.30.0",
|
||||||
|
"@next/eslint-plugin-next": "^15.3.4",
|
||||||
"@playwright/test": "^1.53.1",
|
"@playwright/test": "^1.53.1",
|
||||||
"@tailwindcss/postcss": "^4.1.11",
|
"@tailwindcss/postcss": "^4.1.11",
|
||||||
"@testing-library/dom": "^10.4.0",
|
"@testing-library/dom": "^10.4.0",
|
||||||
|
|||||||
9534
pnpm-lock.yaml
generated
9534
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -1,212 +1,208 @@
|
|||||||
> Okay write the complete prompt for claude code to refactor the current importprocessing, and implement this api-type implementation. I wanted this to be behind
|
# Refactor AI Session Processing Pipeline
|
||||||
a **/legacy/* endpoint name to denote I want to accept in future the individual/batched chat session logs through api, not having to import and process them on
|
|
||||||
my end.
|
|
||||||
|
|
||||||
✦ Of course. This is a significant but valuable refactoring project. A detailed, well-structured prompt is key for getting a good result from a code-focused AI
|
> This is a significant but valuable refactoring project. A detailed, well-structured prompt is key for getting a good result from a code-focused AI like Claude.
|
||||||
like Claude.
|
> **Project:** _LiveDash-Node_ (`~/Projects/livedash-node-max-branch`)
|
||||||
|
> **Objective:** _Refactor our AI session processing pipeline to use the OpenAI Batch API for cost savings and higher throughput. Implement a new internal admin API under /api/admin/legacy/* to monitor and manage this new asynchronous workflow._
|
||||||
|
> **Assignee:** Claude Code
|
||||||
|
|
||||||
Here is the complete prompt you can provide. It's designed to be comprehensive, giving the AI all the context, architectural guidance, and specific
|
## Context
|
||||||
implementation details it needs to perform the refactor correctly.
|
|
||||||
|
|
||||||
---
|
Our current system processes AI analysis requests (like sentiment analysis, summarization, etc.) in a synchronous or simple asynchronous loop, likely via [processingScheduler.ts](../lib/processingScheduler.ts). This is inefficient and costly at scale. We are moving to OpenAI's Batch API, which is fully asynchronous and requires a stateful, multi-stage processing architecture.
|
||||||
|
|
||||||
Prompt for Claude Code
|
The term "legacy" in the API path `/api/admin/legacy/*` is intentional. It refers to the fact that our current method of getting data (CSV imports) is the "legacy" workflow. In the future, we plan to introduce a new API for clients to submit session data directly. This admin API is for monitoring the processing of data from our legacy import system.
|
||||||
|
|
||||||
|
Please follow the phased plan below precisely.
|
||||||
|
|
||||||
Project: LiveDash-Node (livedash-node-max-branch)
|
---
|
||||||
Objective: Refactor our AI session processing pipeline to use the OpenAI Batch API for cost savings and higher throughput. Implement a new internal admin API
|
|
||||||
under /api/admin/legacy/* to monitor and manage this new asynchronous workflow.
|
|
||||||
|
|
||||||
|
## Phase 1: Database Schema Changes (`prisma/schema.prisma`)
|
||||||
|
|
||||||
Context:
|
First, we need to update our database schema to track the state of batch jobs and the individual requests within them.
|
||||||
Our current system processes AI analysis requests (like sentiment analysis, summarization, etc.) in a synchronous or simple asynchronous loop, likely via
|
|
||||||
processingScheduler.ts. This is inefficient and costly at scale. We are moving to OpenAI's Batch API, which is fully asynchronous and requires a stateful,
|
|
||||||
multi-stage processing architecture.
|
|
||||||
|
|
||||||
|
1. Add the `AIBatchRequest` model and `AIBatchRequestStatus` enum. This table will track the status of each batch job submitted to OpenAI.
|
||||||
|
|
||||||
The term "legacy" in the API path /api/admin/legacy/* is intentional. It refers to the fact that our current method of getting data (CSV imports) is the
|
```prisma
|
||||||
"legacy" workflow. In the future, we plan to introduce a new API for clients to submit session data directly. This admin API is for monitoring the processing
|
// Add this new model to your schema.prisma
|
||||||
of data from our legacy import system.
|
model AIBatchRequest {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
companyId String
|
||||||
|
company Company @relation(fields: [companyId], references: [id])
|
||||||
|
|
||||||
Please follow the phased plan below precisely.
|
// OpenAI specific IDs
|
||||||
|
openaiBatchId String @unique
|
||||||
|
inputFileId String
|
||||||
|
outputFileId String?
|
||||||
|
errorFileId String?
|
||||||
|
|
||||||
---
|
// Our internal status tracking
|
||||||
|
status AIBatchRequestStatus @default(PENDING)
|
||||||
|
|
||||||
Phase 1: Database Schema Changes (`prisma/schema.prisma`)
|
// Timestamps
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
completedAt DateTime?
|
||||||
|
processedAt DateTime? // When we finished processing the results
|
||||||
|
|
||||||
|
// Relation to the individual requests included in this batch
|
||||||
|
processingRequests AIProcessingRequest[]
|
||||||
|
@@index([companyId, status])
|
||||||
|
}
|
||||||
|
|
||||||
First, we need to update our database schema to track the state of batch jobs and the individual requests within them.
|
enum AIBatchRequestStatus {
|
||||||
|
PENDING // We have created the batch in our DB, preparing to send to OpenAI
|
||||||
|
UPLOADING // Uploading the .jsonl file
|
||||||
|
VALIDATING // OpenAI is validating the file
|
||||||
|
IN_PROGRESS // OpenAI is processing the batch
|
||||||
|
FINALIZING // OpenAI is finalizing the results
|
||||||
|
COMPLETED // OpenAI job is done, results are available for download
|
||||||
|
PROCESSED // We have successfully downloaded and processed all results
|
||||||
|
FAILED // The batch failed validation or expired
|
||||||
|
CANCELLED // The batch was cancelled
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
1. Add the `AIBatchRequest` model and `AIBatchRequestStatus` enum. This table will track the status of each batch job submitted to OpenAI.
|
2. Update the `AIProcessingRequest` model and add the `AIRequestStatus` enum. We need to track the state of each individual request as it moves through the batching pipeline.
|
||||||
|
|
||||||
|
```prisma
|
||||||
|
// In your existing AIProcessingRequest model, add the new fields and enum.
|
||||||
|
|
||||||
|
model AIProcessingRequest {
|
||||||
|
// ... all existing fields (id, sessionId, token counts, etc.)
|
||||||
|
// === ADD THESE NEW FIELDS ===
|
||||||
|
processingStatus AIRequestStatus @default(PENDING_BATCHING)
|
||||||
|
batchId String?
|
||||||
|
batch AIBatchRequest? @relation(fields: [batchId], references: [id])
|
||||||
|
// ============================
|
||||||
|
@@index([processingStatus]) // Add this index for efficient querying
|
||||||
|
}
|
||||||
|
|
||||||
1 // Add this new model to your schema.prisma
|
enum AIRequestStatus {
|
||||||
2
|
PENDING_BATCHING // Default state: waiting to be picked up by the batch creator
|
||||||
3 model AIBatchRequest {
|
BATCHING_IN_PROGRESS // It has been assigned to a batch that is currently running
|
||||||
4 id String @id @default(cuid())
|
PROCESSING_COMPLETE // The batch finished and we successfully got a result for this request
|
||||||
5 companyId String
|
PROCESSING_FAILED // The batch finished but this specific request failed
|
||||||
6 company Company @relation(fields: [companyId], references: [id])
|
}
|
||||||
7
|
```
|
||||||
8 // OpenAI specific IDs
|
|
||||||
9 openaiBatchId String @unique
|
|
||||||
10 inputFileId String
|
|
||||||
11 outputFileId String?
|
|
||||||
12 errorFileId String?
|
|
||||||
13
|
|
||||||
14 // Our internal status tracking
|
|
||||||
15 status AIBatchRequestStatus @default(PENDING)
|
|
||||||
16
|
|
||||||
17 // Timestamps
|
|
||||||
18 createdAt DateTime @default(now())
|
|
||||||
19 completedAt DateTime?
|
|
||||||
20 processedAt DateTime? // When we finished processing the results
|
|
||||||
21
|
|
||||||
22 // Relation to the individual requests included in this batch
|
|
||||||
23 processingRequests AIProcessingRequest[]
|
|
||||||
24
|
|
||||||
25 @@index([companyId, status])
|
|
||||||
26 }
|
|
||||||
27
|
|
||||||
28 enum AIBatchRequestStatus {
|
|
||||||
29 PENDING // We have created the batch in our DB, preparing to send to OpenAI
|
|
||||||
30 UPLOADING // Uploading the .jsonl file
|
|
||||||
31 VALIDATING // OpenAI is validating the file
|
|
||||||
32 IN_PROGRESS // OpenAI is processing the batch
|
|
||||||
33 FINALIZING // OpenAI is finalizing the results
|
|
||||||
34 COMPLETED // OpenAI job is done, results are available for download
|
|
||||||
35 PROCESSED // We have successfully downloaded and processed all results
|
|
||||||
36 FAILED // The batch failed validation or expired
|
|
||||||
37 CANCELLED // The batch was cancelled
|
|
||||||
38 }
|
|
||||||
|
|
||||||
|
After modifying the schema, please run `pnpm prisma:generate`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
2. Update the `AIProcessingRequest` model and add the `AIRequestStatus` enum. We need to track the state of each individual request as it moves through the
|
## Phase 2: Implement the Batch Processing Schedulers
|
||||||
batching pipeline.
|
|
||||||
|
|
||||||
|
The core of this refactor is to replace the existing logic in `lib/processingScheduler.ts` with a two-stage scheduler system. You can create new files for this logic (e.g., `lib/batchCreator.ts`, `lib/batchPoller.ts`) and integrate them into `lib/schedulers.ts`.
|
||||||
|
|
||||||
|
### Scheduler 1: Batch Creation (`lib/batchCreator.ts`)
|
||||||
|
|
||||||
1 // In your existing AIProcessingRequest model, add the new fields and enum.
|
This scheduler runs periodically (e.g., every 10 minutes) to bundle pending requests into a batch.
|
||||||
2
|
|
||||||
3 model AIProcessingRequest {
|
|
||||||
4 // ... all existing fields (id, sessionId, token counts, etc.)
|
|
||||||
5
|
|
||||||
6 // === ADD THESE NEW FIELDS ===
|
|
||||||
7 processingStatus AIRequestStatus @default(PENDING_BATCHING)
|
|
||||||
8 batchId String?
|
|
||||||
9 batch AIBatchRequest? @relation(fields: [batchId], references: [id])
|
|
||||||
10 // ============================
|
|
||||||
11
|
|
||||||
12 @@index([processingStatus]) // Add this index for efficient querying
|
|
||||||
13 }
|
|
||||||
14
|
|
||||||
15 enum AIRequestStatus {
|
|
||||||
16 PENDING_BATCHING // Default state: waiting to be picked up by the batch creator
|
|
||||||
17 BATCHING_IN_PROGRESS // It has been assigned to a batch that is currently running
|
|
||||||
18 PROCESSING_COMPLETE // The batch finished and we successfully got a result for this request
|
|
||||||
19 PROCESSING_FAILED // The batch finished but this specific request failed
|
|
||||||
20 }
|
|
||||||
|
|
||||||
|
Functionality:
|
||||||
|
|
||||||
After modifying the schema, please run pnpm prisma:generate.
|
1. Query the database for `AIProcessingRequest` records with `processingStatus`: `PENDING_BATCHING`.
|
||||||
|
2. Group these requests by the AI model they need to use (e.g., `gpt-4-turbo`). The Batch API requires one model per batch file.
|
||||||
|
3. For each model group:
|
||||||
|
1. Generate a `.jsonl` string. Each line must be a valid OpenAI batch request.
|
||||||
|
2. Crucially, use our internal `AIProcessingRequest.id` as the `custom_id` in each JSON line. This is how we will map results back.
|
||||||
|
3. Upload the `.jsonl` content to OpenAI using `openai.files.create({ file: Buffer.from(jsonlContent), purpose: 'batch' })`.
|
||||||
|
4. Create the batch job using `openai.batches.create()` with the returned `input_file_id`.
|
||||||
|
5. In a single database transaction:
|
||||||
|
1. Create a new `AIBatchRequest` record in our database, storing the `openaiBatchId`, `inputFileId`, and setting the initial status to `VALIDATING`.
|
||||||
|
2. Update all the `AIProcessingRequest` records included in this batch to set their `processingStatus` to `BATCHING_IN_PROGRESS` and link them via the `batchId`.
|
||||||
|
|
||||||
---
|
### Scheduler 2: Result Polling (`lib/batchPoller.ts`)
|
||||||
|
|
||||||
Phase 2: Implement the Batch Processing Schedulers
|
This scheduler runs more frequently (e.g., every 2 minutes) to check for and process completed jobs.
|
||||||
|
|
||||||
|
Functionality:
|
||||||
|
|
||||||
The core of this refactor is to replace the existing logic in lib/processingScheduler.ts with a two-stage scheduler system. You can create new files for this
|
1. Query our database for `AIBatchRequest` records with a status that is still in-flight (e.g., `VALIDATING`, `IN_PROGRESS`, `FINALIZING`).
|
||||||
logic (e.g., lib/batchCreator.ts, lib/batchPoller.ts) and integrate them into lib/schedulers.ts.
|
2. For each active batch, call `openai.batches.retrieve(batch.openaiBatchId)` to get the latest status from OpenAI.
|
||||||
|
3. Update the status of our `AIBatchRequest` record to match the one from OpenAI.
|
||||||
|
4. If a batch's status becomes completed:
|
||||||
|
1. Update its status in our DB and store the `output_file_id` and `error_file_id`.
|
||||||
|
2. Download the content of the `output_file_id` from OpenAI.
|
||||||
|
3. Parse the resulting .jsonl file line by line. For each line:
|
||||||
|
1. Use the `custom_id` to find our original `AIProcessingRequest` record.
|
||||||
|
2. If the line contains a response, parse the AI content and usage data. Update our `AIProcessingRequest` record with this data and set its `processingStatus` to `PROCESSING_COMPLETE`.
|
||||||
|
3. If the line contains an error, log it and set the `processingStatus` to `PROCESSING_FAILED`.
|
||||||
|
4. Do the same for the `error_file_id` if it exists.
|
||||||
|
5. Once all results are processed, update the parent `AIBatchRequest` status to `PROCESSED` and set its `processedAt` timestamp.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
Scheduler 1: Batch Creation (`lib/batchCreator.ts`)
|
## Phase 3: Implement the Internal Admin API
|
||||||
This scheduler runs periodically (e.g., every 10 minutes) to bundle pending requests into a batch.
|
|
||||||
|
|
||||||
|
Create a new set of internal API endpoints for monitoring and managing this process.
|
||||||
|
|
||||||
* Functionality:
|
* Location: `app/api/admin/legacy/`
|
||||||
1. Query the database for AIProcessingRequest records with processingStatus: 'PENDING_BATCHING'.
|
* Authentication: Protect all these endpoints with our most secure admin-level authentication middleware (e.g., from `lib/platform-auth.ts`). Access should be strictly limited.
|
||||||
2. Group these requests by the AI model they need to use (e.g., gpt-4-turbo). The Batch API requires one model per batch file.
|
|
||||||
3. For each model group:
|
|
||||||
a. Generate a .jsonl string. Each line must be a valid OpenAI batch request.
|
|
||||||
b. Crucially, use our internal `AIProcessingRequest.id` as the `custom_id` in each JSON line. This is how we will map results back.
|
|
||||||
c. Upload the .jsonl content to OpenAI using openai.files.create({ file: Buffer.from(jsonlContent), purpose: 'batch' }).
|
|
||||||
d. Create the batch job using openai.batches.create() with the returned input_file_id.
|
|
||||||
e. In a single database transaction:
|
|
||||||
i. Create a new AIBatchRequest record in our database, storing the openaiBatchId, inputFileId, and setting the initial status to VALIDATING.
|
|
||||||
ii. Update all the AIProcessingRequest records included in this batch to set their processingStatus to BATCHING_IN_PROGRESS and link them via the
|
|
||||||
batchId.
|
|
||||||
|
|
||||||
Scheduler 2: Result Polling (`lib/batchPoller.ts`)
|
### Endpoint 1: Get Summary
|
||||||
This scheduler runs more frequently (e.g., every 2 minutes) to check for and process completed jobs.
|
|
||||||
|
|
||||||
|
* Route: `GET` `/api/admin/legacy/summary`
|
||||||
|
* Description: Returns a count of all `AIProcessingRequest` records, grouped by `processingStatus`.
|
||||||
|
* Response:
|
||||||
|
|
||||||
* Functionality:
|
```json
|
||||||
1. Query our database for AIBatchRequest records with a status that is still in-flight (e.g., VALIDATING, IN_PROGRESS, FINALIZING).
|
{
|
||||||
2. For each active batch, call openai.batches.retrieve(batch.openaiBatchId) to get the latest status from OpenAI.
|
"ok": true,
|
||||||
3. Update the status of our AIBatchRequest record to match the one from OpenAI.
|
"summary": {
|
||||||
4. If a batch's status becomes completed:
|
"pending_batching": 15231,
|
||||||
a. Update its status in our DB and store the output_file_id and error_file_id.
|
"batching_in_progress": 2500,
|
||||||
b. Download the content of the output_file_id from OpenAI.
|
"processing_complete": 85432,
|
||||||
c. Parse the resulting .jsonl file line by line. For each line:
|
"processing_failed": 78
|
||||||
i. Use the custom_id to find our original AIProcessingRequest record.
|
}
|
||||||
ii. If the line contains a response, parse the AI content and usage data. Update our AIProcessingRequest record with this data and set its
|
}
|
||||||
processingStatus to PROCESSING_COMPLETE.
|
```
|
||||||
iii. If the line contains an error, log it and set the processingStatus to PROCESSING_FAILED.
|
|
||||||
d. Do the same for the error_file_id if it exists.
|
|
||||||
e. Once all results are processed, update the parent AIBatchRequest status to PROCESSED and set its processedAt timestamp.
|
|
||||||
|
|
||||||
---
|
### Endpoint 2: List Requests
|
||||||
|
|
||||||
Phase 3: Implement the Internal Admin API
|
* Route: `GET` `/api/admin/legacy/requests`
|
||||||
|
* Description: Retrieves a paginated list of `AIProcessingRequest` records, filterable by `status`.
|
||||||
|
* Query Params: `status` (required), `limit` (optional), `cursor` (optional).
|
||||||
|
* Response:
|
||||||
|
|
||||||
Create a new set of internal API endpoints for monitoring and managing this process.
|
```json
|
||||||
|
{
|
||||||
|
"ok": true,
|
||||||
|
"requests": [
|
||||||
|
{
|
||||||
|
"id": "...",
|
||||||
|
"sessionId": "...",
|
||||||
|
"status": "processing_failed", ...
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"nextCursor": "..."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Endpoint 3: Re-queue Failed Requests
|
||||||
|
|
||||||
* Location: app/api/admin/legacy/
|
* Route: `POST` `/api/admin/legacy/requests/requeue`
|
||||||
* Authentication: Protect all these endpoints with our most secure admin-level authentication middleware (e.g., from lib/platform-auth.ts). Access should be
|
* Description: Resets the status of specified failed requests back to `PENDING_BATCHING` so they can be re-processed in a new batch.
|
||||||
strictly limited.
|
* Request Body:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"requestIds": ["req_id_1", "req_id_2", ...]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
Endpoint 1: Get Summary
|
* Response:
|
||||||
* Route: GET /api/admin/legacy/summary
|
|
||||||
* Description: Returns a count of all AIProcessingRequest records, grouped by processingStatus.
|
|
||||||
* Response:
|
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"ok": true,
|
||||||
|
"requeuedCount": 2,
|
||||||
|
"notFoundCount": 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
1 { "ok": true, "summary": { "pending_batching": 15231, "batching_in_progress": 2500, "processing_complete": 85432, "processing_failed": 78 } }
|
---
|
||||||
|
|
||||||
|
### Phase 4: Final Integration and Cleanup
|
||||||
|
|
||||||
|
1. Update `server.ts` and `lib/schedulers.ts`: Disable the old `processingScheduler` and enable the two new schedulers (`batchCreator`, `batchPoller`). Ensure they are controlled by environment variables (e.g., `BATCH_CREATION_ENABLED`, `BATCH_POLLING_ENABLED`).
|
||||||
|
2. Documentation: Add a section to `CLAUDE.md` or a new file in `docs/` explaining the new batch processing architecture and the purpose of the admin API endpoints.
|
||||||
|
3. Environment Variables: Add any new required environment variables to `.env.example`.
|
||||||
|
|
||||||
Endpoint 2: List Requests
|
Please proceed with this refactoring plan. Implement robust logging throughout the new schedulers to ensure we can debug the pipeline effectively.
|
||||||
* Route: GET /api/admin/legacy/requests
|
|
||||||
* Description: Retrieves a paginated list of AIProcessingRequest records, filterable by status.
|
|
||||||
* Query Params: status (required), limit (optional), cursor (optional).
|
|
||||||
* Response:
|
|
||||||
|
|
||||||
|
|
||||||
1 { "ok": true, "requests": [ { "id": "...", "sessionId": "...", "status": "processing_failed", ... } ], "nextCursor": "..." }
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Endpoint 3: Re-queue Failed Requests
|
|
||||||
* Route: POST /api/admin/legacy/requests/requeue
|
|
||||||
* Description: Resets the status of specified failed requests back to PENDING_BATCHING so they can be re-processed in a new batch.
|
|
||||||
* Request Body: { "requestIds": ["req_id_1", "req_id_2"] }
|
|
||||||
* Response:
|
|
||||||
|
|
||||||
1 { "ok": true, "requeuedCount": 2, "notFoundCount": 0 }
|
|
||||||
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Phase 4: Final Integration and Cleanup
|
|
||||||
|
|
||||||
|
|
||||||
1. Update `server.ts` and `lib/schedulers.ts`: Disable the old processingScheduler and enable the two new schedulers (batchCreator, batchPoller). Ensure they
|
|
||||||
are controlled by environment variables (e.g., BATCH_CREATION_ENABLED, BATCH_POLLING_ENABLED).
|
|
||||||
2. Documentation: Add a section to CLAUDE.md or a new file in docs/ explaining the new batch processing architecture and the purpose of the admin API endpoints.
|
|
||||||
3. Environment Variables: Add any new required environment variables to .env.example.
|
|
||||||
|
|
||||||
|
|
||||||
Please proceed with this refactoring plan. Implement robust logging throughout the new schedulers to ensure we can debug the pipeline effectively.
|
|
||||||
|
|||||||
@ -1,11 +1,7 @@
|
|||||||
|
|
||||||
# Refactoring Plan: Integrating tRPC for End-to-End Type Safety
|
# Refactoring Plan: Integrating tRPC for End-to-End Type Safety
|
||||||
|
|
||||||
**Objective:** Incrementally adopt tRPC to replace existing RESTful API endpoints, enhancing type safety, developer experience, and maintainability.
|
> **Objective:** _Incrementally adopt `tRPC` to replace existing RESTful API endpoints, enhancing type safety, developer experience, and maintainability._
|
||||||
|
> **Assignee:** _Claude Code_
|
||||||
**Assignee:** Claude Code
|
|
||||||
|
|
||||||
**Mentor:** Max
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@ -50,8 +46,8 @@ Create a new file at `lib/trpc/server.ts` to initialize tRPC. This file will exp
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// lib/trpc/server.ts
|
// lib/trpc/server.ts
|
||||||
import { initTRPC } from '@trpc/server';
|
import { initTRPC } from "@trpc/server";
|
||||||
import { db } from '@/lib/prisma'; // Assuming prisma client is here
|
import { db } from "@/lib/prisma"; // Assuming prisma client is here
|
||||||
|
|
||||||
// Avoid exporting the entire t-object since it's not very descriptive.
|
// Avoid exporting the entire t-object since it's not very descriptive.
|
||||||
const t = initTRPC.create();
|
const t = initTRPC.create();
|
||||||
@ -67,8 +63,8 @@ Create a file for the main tRPC router at `lib/trpc/routers/_app.ts`. This route
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// lib/trpc/routers/_app.ts
|
// lib/trpc/routers/_app.ts
|
||||||
import { router } from '../server';
|
import { router } from "../server";
|
||||||
import { userRouter } from './user'; // Example sub-router
|
import { userRouter } from "./user"; // Example sub-router
|
||||||
|
|
||||||
export const appRouter = router({
|
export const appRouter = router({
|
||||||
user: userRouter,
|
user: userRouter,
|
||||||
@ -85,9 +81,9 @@ Create an example router for user-related endpoints at `lib/trpc/routers/user.ts
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// lib/trpc/routers/user.ts
|
// lib/trpc/routers/user.ts
|
||||||
import { router, procedure } from '../server';
|
import { router, procedure } from "../server";
|
||||||
import { z } from 'zod';
|
import { z } from "zod";
|
||||||
import { db } from '@/lib/prisma';
|
import { db } from "@/lib/prisma";
|
||||||
|
|
||||||
export const userRouter = router({
|
export const userRouter = router({
|
||||||
// Example query to get all users
|
// Example query to get all users
|
||||||
@ -97,9 +93,7 @@ export const userRouter = router({
|
|||||||
}),
|
}),
|
||||||
|
|
||||||
// Example query to get a user by ID
|
// Example query to get a user by ID
|
||||||
byId: procedure
|
byId: procedure.input(z.object({ id: z.string() })).query(async ({ input }) => {
|
||||||
.input(z.object({ id: z.string() }))
|
|
||||||
.query(async ({ input }) => {
|
|
||||||
const user = await db.user.findUnique({ where: { id: input.id } });
|
const user = await db.user.findUnique({ where: { id: input.id } });
|
||||||
return user;
|
return user;
|
||||||
}),
|
}),
|
||||||
@ -112,12 +106,12 @@ Create the entry point for all tRPC API calls at `app/api/trpc/[trpc]/route.ts`.
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// app/api/trpc/[trpc]/route.ts
|
// app/api/trpc/[trpc]/route.ts
|
||||||
import { fetchRequestHandler } from '@trpc/server/adapters/fetch';
|
import { fetchRequestHandler } from "@trpc/server/adapters/fetch";
|
||||||
import { appRouter } from '@/lib/trpc/routers/_app';
|
import { appRouter } from "@/lib/trpc/routers/_app";
|
||||||
|
|
||||||
const handler = (req: Request) =>
|
const handler = (req: Request) =>
|
||||||
fetchRequestHandler({
|
fetchRequestHandler({
|
||||||
endpoint: '/api/trpc',
|
endpoint: "/api/trpc",
|
||||||
req,
|
req,
|
||||||
router: appRouter,
|
router: appRouter,
|
||||||
createContext: () => ({}), // We will add context later
|
createContext: () => ({}), // We will add context later
|
||||||
@ -134,8 +128,8 @@ Create a file at `lib/trpc/client.ts` to configure the client-side hooks.
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// lib/trpc/client.ts
|
// lib/trpc/client.ts
|
||||||
import { createTRPCReact } from '@trpc/react-query';
|
import { createTRPCReact } from "@trpc/react-query";
|
||||||
import { type AppRouter } from '@/lib/trpc/routers/_app';
|
import { type AppRouter } from "@/lib/trpc/routers/_app";
|
||||||
|
|
||||||
export const trpc = createTRPCReact<AppRouter>({});
|
export const trpc = createTRPCReact<AppRouter>({});
|
||||||
```
|
```
|
||||||
@ -146,13 +140,13 @@ We need a new provider that wraps our app in both a `QueryClientProvider` (from
|
|||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
// lib/trpc/Provider.tsx
|
// lib/trpc/Provider.tsx
|
||||||
'use client';
|
"use client";
|
||||||
|
|
||||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
|
||||||
import { httpBatchLink } from '@trpc/client';
|
import { httpBatchLink } from "@trpc/client";
|
||||||
import React, { useState } from 'react';
|
import React, { useState } from "react";
|
||||||
import { trpc } from './client';
|
import { trpc } from "./client";
|
||||||
import { getBaseUrl } from '@/lib/utils'; // You might need to create this helper
|
import { getBaseUrl } from "@/lib/utils"; // You might need to create this helper
|
||||||
|
|
||||||
export function TRPCProvider({ children }: { children: React.ReactNode }) {
|
export function TRPCProvider({ children }: { children: React.ReactNode }) {
|
||||||
const [queryClient] = useState(() => new QueryClient({}));
|
const [queryClient] = useState(() => new QueryClient({}));
|
||||||
@ -174,13 +168,15 @@ export function TRPCProvider({ children }: { children: React.ReactNode }) {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
*Note: You will need a `getBaseUrl` utility function to resolve the correct API URL on the client and server. You can place this in `lib/utils.ts`.*
|
!!! note
|
||||||
|
|
||||||
|
Note: You will need a `getBaseUrl` utility function to resolve the correct API URL on the client and server. You can place this in `lib/utils.ts`.
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// lib/utils.ts
|
// lib/utils.ts
|
||||||
|
|
||||||
export function getBaseUrl() {
|
export function getBaseUrl() {
|
||||||
if (typeof window !== 'undefined') return ''; // browser should use relative url
|
if (typeof window !== "undefined") return ""; // browser should use relative url
|
||||||
if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url
|
if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url
|
||||||
return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost
|
return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost
|
||||||
}
|
}
|
||||||
@ -192,19 +188,14 @@ Wrap the application with the new `TRPCProvider` in `app/providers.tsx`.
|
|||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
// app/providers.tsx
|
// app/providers.tsx
|
||||||
'use client';
|
"use client";
|
||||||
|
|
||||||
import { ThemeProvider } from '@/components/theme-provider';
|
import { ThemeProvider } from "@/components/theme-provider";
|
||||||
import { TRPCProvider } from '@/lib/trpc/Provider'; // Import the new provider
|
import { TRPCProvider } from "@/lib/trpc/Provider"; // Import the new provider
|
||||||
|
|
||||||
export function Providers({ children }: { children: React.ReactNode }) {
|
export function Providers({ children }: { children: React.ReactNode }) {
|
||||||
return (
|
return (
|
||||||
<ThemeProvider
|
<ThemeProvider attribute="class" defaultTheme="system" enableSystem disableTransitionOnChange>
|
||||||
attribute="class"
|
|
||||||
defaultTheme="system"
|
|
||||||
enableSystem
|
|
||||||
disableTransitionOnChange
|
|
||||||
>
|
|
||||||
<TRPCProvider>{children}</TRPCProvider> {/* Wrap with TRPCProvider */}
|
<TRPCProvider>{children}</TRPCProvider> {/* Wrap with TRPCProvider */}
|
||||||
</ThemeProvider>
|
</ThemeProvider>
|
||||||
);
|
);
|
||||||
@ -217,9 +208,9 @@ Now you can replace a traditional `fetch` call with the new tRPC hook. For examp
|
|||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
// app/dashboard/users/page.tsx (Example)
|
// app/dashboard/users/page.tsx (Example)
|
||||||
'use client';
|
"use client";
|
||||||
|
|
||||||
import { trpc } from '@/lib/trpc/client';
|
import { trpc } from "@/lib/trpc/client";
|
||||||
|
|
||||||
export default function UsersPage() {
|
export default function UsersPage() {
|
||||||
const { data: users, isLoading, error } = trpc.user.list.useQuery();
|
const { data: users, isLoading, error } = trpc.user.list.useQuery();
|
||||||
@ -237,7 +228,9 @@ export default function UsersPage() {
|
|||||||
<h1>Users</h1>
|
<h1>Users</h1>
|
||||||
<ul>
|
<ul>
|
||||||
{users?.map((user) => (
|
{users?.map((user) => (
|
||||||
<li key={user.id}>{user.name} ({user.email})</li>
|
<li key={user.id}>
|
||||||
|
{user.name} ({user.email})
|
||||||
|
</li>
|
||||||
))}
|
))}
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
@ -254,4 +247,5 @@ export default function UsersPage() {
|
|||||||
- **Optimistic UI:** For mutations, implement optimistic updates to provide a faster user experience.
|
- **Optimistic UI:** For mutations, implement optimistic updates to provide a faster user experience.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
This structured approach will ensure a smooth and successful integration of tRPC, leading to a more robust and maintainable codebase.
|
This structured approach will ensure a smooth and successful integration of tRPC, leading to a more robust and maintainable codebase.
|
||||||
|
|||||||
@ -9,34 +9,56 @@ async function checkDatabaseConfig() {
|
|||||||
|
|
||||||
// Check environment variables
|
// Check environment variables
|
||||||
console.log("📋 Environment Configuration:");
|
console.log("📋 Environment Configuration:");
|
||||||
console.log(` DATABASE_URL: ${process.env.DATABASE_URL ? '✅ Set' : '❌ Missing'}`);
|
console.log(
|
||||||
console.log(` USE_ENHANCED_POOLING: ${process.env.USE_ENHANCED_POOLING || 'false'}`);
|
` DATABASE_URL: ${process.env.DATABASE_URL ? "✅ Set" : "❌ Missing"}`
|
||||||
console.log(` DATABASE_CONNECTION_LIMIT: ${process.env.DATABASE_CONNECTION_LIMIT || 'default'}`);
|
);
|
||||||
console.log(` DATABASE_POOL_TIMEOUT: ${process.env.DATABASE_POOL_TIMEOUT || 'default'}`);
|
console.log(
|
||||||
|
` USE_ENHANCED_POOLING: ${process.env.USE_ENHANCED_POOLING || "false"}`
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
` DATABASE_CONNECTION_LIMIT: ${process.env.DATABASE_CONNECTION_LIMIT || "default"}`
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
` DATABASE_POOL_TIMEOUT: ${process.env.DATABASE_POOL_TIMEOUT || "default"}`
|
||||||
|
);
|
||||||
|
|
||||||
// Parse DATABASE_URL for connection details
|
// Parse DATABASE_URL for connection details
|
||||||
if (process.env.DATABASE_URL) {
|
if (process.env.DATABASE_URL) {
|
||||||
try {
|
try {
|
||||||
const dbUrl = new URL(process.env.DATABASE_URL);
|
const dbUrl = new URL(process.env.DATABASE_URL);
|
||||||
console.log(` Database Host: ${dbUrl.hostname}`);
|
console.log(` Database Host: ${dbUrl.hostname}`);
|
||||||
console.log(` Database Port: ${dbUrl.port || '5432'}`);
|
console.log(` Database Port: ${dbUrl.port || "5432"}`);
|
||||||
console.log(` Database Name: ${dbUrl.pathname.slice(1)}`);
|
console.log(` Database Name: ${dbUrl.pathname.slice(1)}`);
|
||||||
|
|
||||||
// Check for Neon-specific optimizations
|
// Check for Neon-specific optimizations
|
||||||
const searchParams = dbUrl.searchParams;
|
const searchParams = dbUrl.searchParams;
|
||||||
console.log(` SSL Mode: ${searchParams.get('sslmode') || 'not specified'}`);
|
console.log(
|
||||||
console.log(` Connection Limit: ${searchParams.get('connection_limit') || 'not specified'}`);
|
` SSL Mode: ${searchParams.get("sslmode") || "not specified"}`
|
||||||
console.log(` Pool Timeout: ${searchParams.get('pool_timeout') || 'not specified'}`);
|
);
|
||||||
|
console.log(
|
||||||
|
` Connection Limit: ${searchParams.get("connection_limit") || "not specified"}`
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
` Pool Timeout: ${searchParams.get("pool_timeout") || "not specified"}`
|
||||||
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(` ❌ Invalid DATABASE_URL format: ${error instanceof Error ? error.message : error}`);
|
console.log(
|
||||||
|
` ❌ Invalid DATABASE_URL format: ${error instanceof Error ? error.message : error}`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check scheduler intervals
|
// Check scheduler intervals
|
||||||
console.log("\n⏰ Scheduler Configuration:");
|
console.log("\n⏰ Scheduler Configuration:");
|
||||||
console.log(` CSV Import: ${process.env.CSV_IMPORT_INTERVAL || '*/15 * * * *'}`);
|
console.log(
|
||||||
console.log(` Import Processing: ${process.env.IMPORT_PROCESSING_INTERVAL || '*/5 * * * *'}`);
|
` CSV Import: ${process.env.CSV_IMPORT_INTERVAL || "*/15 * * * *"}`
|
||||||
console.log(` Session Processing: ${process.env.SESSION_PROCESSING_INTERVAL || '0 * * * *'}`);
|
);
|
||||||
|
console.log(
|
||||||
|
` Import Processing: ${process.env.IMPORT_PROCESSING_INTERVAL || "*/5 * * * *"}`
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
` Session Processing: ${process.env.SESSION_PROCESSING_INTERVAL || "0 * * * *"}`
|
||||||
|
);
|
||||||
|
|
||||||
// Test database connectivity
|
// Test database connectivity
|
||||||
console.log("\n🔌 Database Connectivity Test:");
|
console.log("\n🔌 Database Connectivity Test:");
|
||||||
@ -44,14 +66,16 @@ async function checkDatabaseConfig() {
|
|||||||
try {
|
try {
|
||||||
console.log(" Testing basic connection...");
|
console.log(" Testing basic connection...");
|
||||||
const isConnected = await checkDatabaseConnection();
|
const isConnected = await checkDatabaseConnection();
|
||||||
console.log(` Basic connection: ${isConnected ? '✅ Success' : '❌ Failed'}`);
|
console.log(
|
||||||
|
` Basic connection: ${isConnected ? "✅ Success" : "❌ Failed"}`
|
||||||
|
);
|
||||||
|
|
||||||
if (isConnected) {
|
if (isConnected) {
|
||||||
console.log(" Testing connection with retry logic...");
|
console.log(" Testing connection with retry logic...");
|
||||||
const retryResult = await withRetry(
|
const retryResult = await withRetry(
|
||||||
async () => {
|
async () => {
|
||||||
const result = await checkDatabaseConnection();
|
const result = await checkDatabaseConnection();
|
||||||
if (!result) throw new Error('Connection check failed');
|
if (!result) throw new Error("Connection check failed");
|
||||||
return result;
|
return result;
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -60,37 +84,56 @@ async function checkDatabaseConfig() {
|
|||||||
maxDelay: 5000,
|
maxDelay: 5000,
|
||||||
backoffMultiplier: 2,
|
backoffMultiplier: 2,
|
||||||
},
|
},
|
||||||
'connectivity test'
|
"connectivity test"
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
` Retry connection: ${retryResult ? "✅ Success" : "❌ Failed"}`
|
||||||
);
|
);
|
||||||
console.log(` Retry connection: ${retryResult ? '✅ Success' : '❌ Failed'}`);
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(` ❌ Connection test failed: ${error instanceof Error ? error.message : error}`);
|
console.log(
|
||||||
|
` ❌ Connection test failed: ${error instanceof Error ? error.message : error}`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Recommendations
|
// Recommendations
|
||||||
console.log("\n💡 Recommendations:");
|
console.log("\n💡 Recommendations:");
|
||||||
|
|
||||||
if (!process.env.USE_ENHANCED_POOLING || process.env.USE_ENHANCED_POOLING === 'false') {
|
if (
|
||||||
|
!process.env.USE_ENHANCED_POOLING ||
|
||||||
|
process.env.USE_ENHANCED_POOLING === "false"
|
||||||
|
) {
|
||||||
console.log(" 🔧 Enable enhanced pooling: USE_ENHANCED_POOLING=true");
|
console.log(" 🔧 Enable enhanced pooling: USE_ENHANCED_POOLING=true");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!process.env.DATABASE_CONNECTION_LIMIT || Number.parseInt(process.env.DATABASE_CONNECTION_LIMIT) > 15) {
|
if (
|
||||||
console.log(" 🔧 Optimize connection limit for Neon: DATABASE_CONNECTION_LIMIT=15");
|
!process.env.DATABASE_CONNECTION_LIMIT ||
|
||||||
|
Number.parseInt(process.env.DATABASE_CONNECTION_LIMIT) > 15
|
||||||
|
) {
|
||||||
|
console.log(
|
||||||
|
" 🔧 Optimize connection limit for Neon: DATABASE_CONNECTION_LIMIT=15"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!process.env.DATABASE_POOL_TIMEOUT || Number.parseInt(process.env.DATABASE_POOL_TIMEOUT) < 30) {
|
if (
|
||||||
console.log(" 🔧 Increase pool timeout for cold starts: DATABASE_POOL_TIMEOUT=30");
|
!process.env.DATABASE_POOL_TIMEOUT ||
|
||||||
|
Number.parseInt(process.env.DATABASE_POOL_TIMEOUT) < 30
|
||||||
|
) {
|
||||||
|
console.log(
|
||||||
|
" 🔧 Increase pool timeout for cold starts: DATABASE_POOL_TIMEOUT=30"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for Neon-specific URL parameters
|
// Check for Neon-specific URL parameters
|
||||||
if (process.env.DATABASE_URL) {
|
if (process.env.DATABASE_URL) {
|
||||||
const dbUrl = new URL(process.env.DATABASE_URL);
|
const dbUrl = new URL(process.env.DATABASE_URL);
|
||||||
if (!dbUrl.searchParams.get('sslmode')) {
|
if (!dbUrl.searchParams.get("sslmode")) {
|
||||||
console.log(" 🔧 Add SSL mode to DATABASE_URL: ?sslmode=require");
|
console.log(" 🔧 Add SSL mode to DATABASE_URL: ?sslmode=require");
|
||||||
}
|
}
|
||||||
if (!dbUrl.searchParams.get('connection_limit')) {
|
if (!dbUrl.searchParams.get("connection_limit")) {
|
||||||
console.log(" 🔧 Add connection limit to DATABASE_URL: &connection_limit=15");
|
console.log(
|
||||||
|
" 🔧 Add connection limit to DATABASE_URL: &connection_limit=15"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
458
tests/api/auth-routes.test.ts
Normal file
458
tests/api/auth-routes.test.ts
Normal file
@ -0,0 +1,458 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
|
import { POST as registerPOST } from "../../app/api/register/route";
|
||||||
|
import { POST as forgotPasswordPOST } from "../../app/api/forgot-password/route";
|
||||||
|
import { NextRequest } from "next/server";
|
||||||
|
|
||||||
|
// Mock bcrypt
|
||||||
|
vi.mock("bcryptjs", () => ({
|
||||||
|
default: {
|
||||||
|
hash: vi.fn().mockResolvedValue("hashed-password"),
|
||||||
|
compare: vi.fn().mockResolvedValue(true),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock crypto
|
||||||
|
vi.mock("node:crypto", () => ({
|
||||||
|
default: {
|
||||||
|
randomBytes: vi.fn().mockReturnValue({
|
||||||
|
toString: vi.fn().mockReturnValue("random-token"),
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock prisma
|
||||||
|
vi.mock("../../lib/prisma", () => ({
|
||||||
|
prisma: {
|
||||||
|
user: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
create: vi.fn(),
|
||||||
|
update: vi.fn(),
|
||||||
|
},
|
||||||
|
company: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock email service
|
||||||
|
vi.mock("../../lib/sendEmail", () => ({
|
||||||
|
sendEmail: vi.fn().mockResolvedValue({ success: true }),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock rate limiter
|
||||||
|
vi.mock("../../lib/rateLimiter", () => ({
|
||||||
|
InMemoryRateLimiter: vi.fn().mockImplementation(() => ({
|
||||||
|
checkRateLimit: vi.fn().mockReturnValue({ allowed: true }),
|
||||||
|
})),
|
||||||
|
extractClientIP: vi.fn().mockReturnValue("192.168.1.1"),
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe("Authentication API Routes", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("POST /api/register", () => {
|
||||||
|
it("should register a new user successfully", async () => {
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
|
||||||
|
const mockCompany = {
|
||||||
|
id: "company1",
|
||||||
|
name: "Test Company",
|
||||||
|
status: "ACTIVE",
|
||||||
|
csvUrl: "http://example.com/data.csv",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockUser = {
|
||||||
|
id: "user1",
|
||||||
|
email: "test@example.com",
|
||||||
|
name: "Test User",
|
||||||
|
companyId: "company1",
|
||||||
|
role: "USER",
|
||||||
|
password: "hashed-password",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mocked(prisma.company.findUnique).mockResolvedValue(mockCompany);
|
||||||
|
vi.mocked(prisma.user.findUnique).mockResolvedValue(null);
|
||||||
|
vi.mocked(prisma.user.create).mockResolvedValue(mockUser);
|
||||||
|
|
||||||
|
const request = new NextRequest("http://localhost:3000/api/register", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
name: "Test User",
|
||||||
|
email: "test@example.com",
|
||||||
|
password: "password123",
|
||||||
|
companyId: "company1",
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await registerPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(201);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.message).toBe("User created successfully");
|
||||||
|
expect(data.user.email).toBe("test@example.com");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return 400 for missing required fields", async () => {
|
||||||
|
const request = new NextRequest("http://localhost:3000/api/register", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
email: "test@example.com",
|
||||||
|
// Missing name, password, companyId
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await registerPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Missing required fields");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return 400 for invalid email format", async () => {
|
||||||
|
const request = new NextRequest("http://localhost:3000/api/register", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
name: "Test User",
|
||||||
|
email: "invalid-email",
|
||||||
|
password: "password123",
|
||||||
|
companyId: "company1",
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await registerPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Invalid email format");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return 400 for weak password", async () => {
|
||||||
|
const request = new NextRequest("http://localhost:3000/api/register", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
name: "Test User",
|
||||||
|
email: "test@example.com",
|
||||||
|
password: "123", // Too short
|
||||||
|
companyId: "company1",
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await registerPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Password must be at least 8 characters long");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return 404 for non-existent company", async () => {
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
|
||||||
|
vi.mocked(prisma.company.findUnique).mockResolvedValue(null);
|
||||||
|
|
||||||
|
const request = new NextRequest("http://localhost:3000/api/register", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
name: "Test User",
|
||||||
|
email: "test@example.com",
|
||||||
|
password: "password123",
|
||||||
|
companyId: "non-existent",
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await registerPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(404);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Company not found");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return 409 for existing user email", async () => {
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
|
||||||
|
const mockCompany = {
|
||||||
|
id: "company1",
|
||||||
|
name: "Test Company",
|
||||||
|
status: "ACTIVE",
|
||||||
|
csvUrl: "http://example.com/data.csv",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const existingUser = {
|
||||||
|
id: "existing-user",
|
||||||
|
email: "test@example.com",
|
||||||
|
name: "Existing User",
|
||||||
|
companyId: "company1",
|
||||||
|
role: "USER",
|
||||||
|
password: "hashed-password",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mocked(prisma.company.findUnique).mockResolvedValue(mockCompany);
|
||||||
|
vi.mocked(prisma.user.findUnique).mockResolvedValue(existingUser);
|
||||||
|
|
||||||
|
const request = new NextRequest("http://localhost:3000/api/register", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
name: "Test User",
|
||||||
|
email: "test@example.com",
|
||||||
|
password: "password123",
|
||||||
|
companyId: "company1",
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await registerPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(409);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("User already exists");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle rate limiting", async () => {
|
||||||
|
const { InMemoryRateLimiter } = await import("../../lib/rateLimiter");
|
||||||
|
|
||||||
|
// Mock rate limiter to return not allowed
|
||||||
|
const mockRateLimiter = {
|
||||||
|
checkRateLimit: vi.fn().mockReturnValue({
|
||||||
|
allowed: false,
|
||||||
|
resetTime: Date.now() + 60000,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mocked(InMemoryRateLimiter).mockImplementation(() => mockRateLimiter);
|
||||||
|
|
||||||
|
const request = new NextRequest("http://localhost:3000/api/register", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
name: "Test User",
|
||||||
|
email: "test@example.com",
|
||||||
|
password: "password123",
|
||||||
|
companyId: "company1",
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await registerPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(429);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe(
|
||||||
|
"Too many registration attempts. Please try again later."
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("POST /api/forgot-password", () => {
|
||||||
|
it("should send password reset email for existing user", async () => {
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
const { sendEmail } = await import("../../lib/sendEmail");
|
||||||
|
|
||||||
|
const existingUser = {
|
||||||
|
id: "user1",
|
||||||
|
email: "test@example.com",
|
||||||
|
name: "Test User",
|
||||||
|
companyId: "company1",
|
||||||
|
role: "USER",
|
||||||
|
password: "hashed-password",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mocked(prisma.user.findUnique).mockResolvedValue(existingUser);
|
||||||
|
vi.mocked(prisma.user.update).mockResolvedValue({
|
||||||
|
...existingUser,
|
||||||
|
resetToken: "random-token",
|
||||||
|
resetTokenExpiry: new Date(Date.now() + 3600000),
|
||||||
|
});
|
||||||
|
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/forgot-password",
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
email: "test@example.com",
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const response = await forgotPasswordPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.message).toBe("Password reset email sent");
|
||||||
|
expect(sendEmail).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return success even for non-existent users (security)", async () => {
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
|
||||||
|
vi.mocked(prisma.user.findUnique).mockResolvedValue(null);
|
||||||
|
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/forgot-password",
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
email: "nonexistent@example.com",
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const response = await forgotPasswordPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.message).toBe("Password reset email sent");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return 400 for invalid email", async () => {
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/forgot-password",
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
email: "invalid-email",
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const response = await forgotPasswordPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Invalid email address");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return 400 for missing email", async () => {
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/forgot-password",
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const response = await forgotPasswordPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Email is required");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle database errors gracefully", async () => {
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
|
||||||
|
vi.mocked(prisma.user.findUnique).mockRejectedValue(
|
||||||
|
new Error("Database connection failed")
|
||||||
|
);
|
||||||
|
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/forgot-password",
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
email: "test@example.com",
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const response = await forgotPasswordPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(500);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Internal server error");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle email sending failures gracefully", async () => {
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
const { sendEmail } = await import("../../lib/sendEmail");
|
||||||
|
|
||||||
|
const existingUser = {
|
||||||
|
id: "user1",
|
||||||
|
email: "test@example.com",
|
||||||
|
name: "Test User",
|
||||||
|
companyId: "company1",
|
||||||
|
role: "USER",
|
||||||
|
password: "hashed-password",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mocked(prisma.user.findUnique).mockResolvedValue(existingUser);
|
||||||
|
vi.mocked(prisma.user.update).mockResolvedValue({
|
||||||
|
...existingUser,
|
||||||
|
resetToken: "random-token",
|
||||||
|
resetTokenExpiry: new Date(Date.now() + 3600000),
|
||||||
|
});
|
||||||
|
vi.mocked(sendEmail).mockResolvedValue({
|
||||||
|
success: false,
|
||||||
|
error: "Email service unavailable",
|
||||||
|
});
|
||||||
|
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/forgot-password",
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
email: "test@example.com",
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const response = await forgotPasswordPOST(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.message).toBe("Password reset email sent");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
409
tests/api/dashboard-metrics.test.ts
Normal file
409
tests/api/dashboard-metrics.test.ts
Normal file
@ -0,0 +1,409 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
|
import { GET } from "../../app/api/dashboard/metrics/route";
|
||||||
|
import { NextRequest } from "next/server";
|
||||||
|
|
||||||
|
// Mock NextAuth
|
||||||
|
vi.mock("next-auth", () => ({
|
||||||
|
getServerSession: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock prisma
|
||||||
|
vi.mock("../../lib/prisma", () => ({
|
||||||
|
prisma: {
|
||||||
|
session: {
|
||||||
|
count: vi.fn(),
|
||||||
|
findMany: vi.fn(),
|
||||||
|
aggregate: vi.fn(),
|
||||||
|
},
|
||||||
|
user: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
},
|
||||||
|
company: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock auth options
|
||||||
|
vi.mock("../../lib/auth", () => ({
|
||||||
|
authOptions: {},
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe("/api/dashboard/metrics", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("GET /api/dashboard/metrics", () => {
|
||||||
|
it("should return 401 for unauthenticated users", async () => {
|
||||||
|
const { getServerSession } = await import("next-auth");
|
||||||
|
vi.mocked(getServerSession).mockResolvedValue(null);
|
||||||
|
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/dashboard/metrics"
|
||||||
|
);
|
||||||
|
const response = await GET(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(401);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Unauthorized");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return 404 when user not found", async () => {
|
||||||
|
const { getServerSession } = await import("next-auth");
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
|
||||||
|
vi.mocked(getServerSession).mockResolvedValue({
|
||||||
|
user: { email: "test@example.com" },
|
||||||
|
expires: "2024-12-31",
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mocked(prisma.user.findUnique).mockResolvedValue(null);
|
||||||
|
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/dashboard/metrics"
|
||||||
|
);
|
||||||
|
const response = await GET(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(404);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("User not found");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return 404 when company not found", async () => {
|
||||||
|
const { getServerSession } = await import("next-auth");
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
|
||||||
|
vi.mocked(getServerSession).mockResolvedValue({
|
||||||
|
user: { email: "test@example.com" },
|
||||||
|
expires: "2024-12-31",
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mocked(prisma.user.findUnique).mockResolvedValue({
|
||||||
|
id: "user1",
|
||||||
|
email: "test@example.com",
|
||||||
|
companyId: "company1",
|
||||||
|
role: "ADMIN",
|
||||||
|
password: "hashed",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mocked(prisma.company.findUnique).mockResolvedValue(null);
|
||||||
|
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/dashboard/metrics"
|
||||||
|
);
|
||||||
|
const response = await GET(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(404);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Company not found");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return metrics data for valid requests", async () => {
|
||||||
|
const { getServerSession } = await import("next-auth");
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
|
||||||
|
const mockUser = {
|
||||||
|
id: "user1",
|
||||||
|
email: "test@example.com",
|
||||||
|
companyId: "company1",
|
||||||
|
role: "ADMIN",
|
||||||
|
password: "hashed",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockCompany = {
|
||||||
|
id: "company1",
|
||||||
|
name: "Test Company",
|
||||||
|
csvUrl: "http://example.com/data.csv",
|
||||||
|
sentimentAlert: 0.5,
|
||||||
|
status: "ACTIVE" as const,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockSessions = [
|
||||||
|
{
|
||||||
|
id: "session1",
|
||||||
|
sessionId: "s1",
|
||||||
|
companyId: "company1",
|
||||||
|
startTime: new Date("2024-01-01T10:00:00Z"),
|
||||||
|
endTime: new Date("2024-01-01T10:30:00Z"),
|
||||||
|
sentiment: "POSITIVE",
|
||||||
|
messagesSent: 5,
|
||||||
|
avgResponseTime: 2.5,
|
||||||
|
tokens: 100,
|
||||||
|
tokensEur: 0.002,
|
||||||
|
language: "en",
|
||||||
|
country: "US",
|
||||||
|
category: "SUPPORT",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "session2",
|
||||||
|
sessionId: "s2",
|
||||||
|
companyId: "company1",
|
||||||
|
startTime: new Date("2024-01-02T14:00:00Z"),
|
||||||
|
endTime: new Date("2024-01-02T14:15:00Z"),
|
||||||
|
sentiment: "NEGATIVE",
|
||||||
|
messagesSent: 3,
|
||||||
|
avgResponseTime: 1.8,
|
||||||
|
tokens: 75,
|
||||||
|
tokensEur: 0.0015,
|
||||||
|
language: "es",
|
||||||
|
country: "ES",
|
||||||
|
category: "BILLING",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
vi.mocked(getServerSession).mockResolvedValue({
|
||||||
|
user: { email: "test@example.com" },
|
||||||
|
expires: "2024-12-31",
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mocked(prisma.user.findUnique).mockResolvedValue(mockUser);
|
||||||
|
vi.mocked(prisma.company.findUnique).mockResolvedValue(mockCompany);
|
||||||
|
vi.mocked(prisma.session.findMany).mockResolvedValue(mockSessions);
|
||||||
|
vi.mocked(prisma.session.count).mockResolvedValue(2);
|
||||||
|
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/dashboard/metrics"
|
||||||
|
);
|
||||||
|
const response = await GET(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
expect(data.metrics).toBeDefined();
|
||||||
|
expect(data.company).toBeDefined();
|
||||||
|
expect(data.metrics.totalSessions).toBe(2);
|
||||||
|
expect(data.company.name).toBe("Test Company");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle date range filtering", async () => {
|
||||||
|
const { getServerSession } = await import("next-auth");
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
|
||||||
|
const mockUser = {
|
||||||
|
id: "user1",
|
||||||
|
email: "test@example.com",
|
||||||
|
companyId: "company1",
|
||||||
|
role: "ADMIN",
|
||||||
|
password: "hashed",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockCompany = {
|
||||||
|
id: "company1",
|
||||||
|
name: "Test Company",
|
||||||
|
csvUrl: "http://example.com/data.csv",
|
||||||
|
sentimentAlert: 0.5,
|
||||||
|
status: "ACTIVE" as const,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mocked(getServerSession).mockResolvedValue({
|
||||||
|
user: { email: "test@example.com" },
|
||||||
|
expires: "2024-12-31",
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mocked(prisma.user.findUnique).mockResolvedValue(mockUser);
|
||||||
|
vi.mocked(prisma.company.findUnique).mockResolvedValue(mockCompany);
|
||||||
|
vi.mocked(prisma.session.findMany).mockResolvedValue([]);
|
||||||
|
vi.mocked(prisma.session.count).mockResolvedValue(0);
|
||||||
|
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/dashboard/metrics?startDate=2024-01-01&endDate=2024-01-31"
|
||||||
|
);
|
||||||
|
const response = await GET(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
expect(prisma.session.findMany).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
where: expect.objectContaining({
|
||||||
|
companyId: "company1",
|
||||||
|
startTime: expect.objectContaining({
|
||||||
|
gte: expect.any(Date),
|
||||||
|
lte: expect.any(Date),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should calculate metrics correctly", async () => {
|
||||||
|
const { getServerSession } = await import("next-auth");
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
|
||||||
|
const mockUser = {
|
||||||
|
id: "user1",
|
||||||
|
email: "test@example.com",
|
||||||
|
companyId: "company1",
|
||||||
|
role: "ADMIN",
|
||||||
|
password: "hashed",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockCompany = {
|
||||||
|
id: "company1",
|
||||||
|
name: "Test Company",
|
||||||
|
csvUrl: "http://example.com/data.csv",
|
||||||
|
sentimentAlert: 0.5,
|
||||||
|
status: "ACTIVE" as const,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockSessions = [
|
||||||
|
{
|
||||||
|
id: "session1",
|
||||||
|
sessionId: "s1",
|
||||||
|
companyId: "company1",
|
||||||
|
startTime: new Date("2024-01-01T10:00:00Z"),
|
||||||
|
endTime: new Date("2024-01-01T10:30:00Z"),
|
||||||
|
sentiment: "POSITIVE",
|
||||||
|
messagesSent: 5,
|
||||||
|
avgResponseTime: 2.0,
|
||||||
|
tokens: 100,
|
||||||
|
tokensEur: 0.002,
|
||||||
|
language: "en",
|
||||||
|
country: "US",
|
||||||
|
category: "SUPPORT",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "session2",
|
||||||
|
sessionId: "s2",
|
||||||
|
companyId: "company1",
|
||||||
|
startTime: new Date("2024-01-01T14:00:00Z"),
|
||||||
|
endTime: new Date("2024-01-01T14:20:00Z"),
|
||||||
|
sentiment: "NEGATIVE",
|
||||||
|
messagesSent: 3,
|
||||||
|
avgResponseTime: 3.0,
|
||||||
|
tokens: 150,
|
||||||
|
tokensEur: 0.003,
|
||||||
|
language: "en",
|
||||||
|
country: "US",
|
||||||
|
category: "BILLING",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
vi.mocked(getServerSession).mockResolvedValue({
|
||||||
|
user: { email: "test@example.com" },
|
||||||
|
expires: "2024-12-31",
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mocked(prisma.user.findUnique).mockResolvedValue(mockUser);
|
||||||
|
vi.mocked(prisma.company.findUnique).mockResolvedValue(mockCompany);
|
||||||
|
vi.mocked(prisma.session.findMany).mockResolvedValue(mockSessions);
|
||||||
|
vi.mocked(prisma.session.count).mockResolvedValue(2);
|
||||||
|
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/dashboard/metrics"
|
||||||
|
);
|
||||||
|
const response = await GET(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
expect(data.metrics.totalSessions).toBe(2);
|
||||||
|
expect(data.metrics.avgResponseTime).toBe(2.5); // (2.0 + 3.0) / 2
|
||||||
|
expect(data.metrics.totalTokens).toBe(250); // 100 + 150
|
||||||
|
expect(data.metrics.totalTokensEur).toBe(0.005); // 0.002 + 0.003
|
||||||
|
expect(data.metrics.sentimentPositiveCount).toBe(1);
|
||||||
|
expect(data.metrics.sentimentNegativeCount).toBe(1);
|
||||||
|
expect(data.metrics.languages).toEqual({ en: 2 });
|
||||||
|
expect(data.metrics.countries).toEqual({ US: 2 });
|
||||||
|
expect(data.metrics.categories).toEqual({ SUPPORT: 1, BILLING: 1 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle errors gracefully", async () => {
|
||||||
|
const { getServerSession } = await import("next-auth");
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
|
||||||
|
vi.mocked(getServerSession).mockResolvedValue({
|
||||||
|
user: { email: "test@example.com" },
|
||||||
|
expires: "2024-12-31",
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mocked(prisma.user.findUnique).mockRejectedValue(
|
||||||
|
new Error("Database error")
|
||||||
|
);
|
||||||
|
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/dashboard/metrics"
|
||||||
|
);
|
||||||
|
const response = await GET(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(500);
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Database error");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return empty metrics for companies with no sessions", async () => {
|
||||||
|
const { getServerSession } = await import("next-auth");
|
||||||
|
const { prisma } = await import("../../lib/prisma");
|
||||||
|
|
||||||
|
const mockUser = {
|
||||||
|
id: "user1",
|
||||||
|
email: "test@example.com",
|
||||||
|
companyId: "company1",
|
||||||
|
role: "ADMIN",
|
||||||
|
password: "hashed",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockCompany = {
|
||||||
|
id: "company1",
|
||||||
|
name: "Test Company",
|
||||||
|
csvUrl: "http://example.com/data.csv",
|
||||||
|
sentimentAlert: 0.5,
|
||||||
|
status: "ACTIVE" as const,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mocked(getServerSession).mockResolvedValue({
|
||||||
|
user: { email: "test@example.com" },
|
||||||
|
expires: "2024-12-31",
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mocked(prisma.user.findUnique).mockResolvedValue(mockUser);
|
||||||
|
vi.mocked(prisma.company.findUnique).mockResolvedValue(mockCompany);
|
||||||
|
vi.mocked(prisma.session.findMany).mockResolvedValue([]);
|
||||||
|
vi.mocked(prisma.session.count).mockResolvedValue(0);
|
||||||
|
|
||||||
|
const request = new NextRequest(
|
||||||
|
"http://localhost:3000/api/dashboard/metrics"
|
||||||
|
);
|
||||||
|
const response = await GET(request);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
expect(data.metrics.totalSessions).toBe(0);
|
||||||
|
expect(data.metrics.avgResponseTime).toBe(0);
|
||||||
|
expect(data.metrics.totalTokens).toBe(0);
|
||||||
|
expect(data.metrics.languages).toEqual({});
|
||||||
|
expect(data.metrics.countries).toEqual({});
|
||||||
|
expect(data.metrics.categories).toEqual({});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
229
tests/lib/importProcessor.test.ts
Normal file
229
tests/lib/importProcessor.test.ts
Normal file
@ -0,0 +1,229 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
|
import { PrismaClient } from "@prisma/client";
|
||||||
|
import { processQueuedImports } from "../../lib/importProcessor";
|
||||||
|
import { ProcessingStatusManager } from "../../lib/processingStatusManager";
|
||||||
|
|
||||||
|
vi.mock("../../lib/prisma", () => ({
|
||||||
|
prisma: new PrismaClient(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("../../lib/processingStatusManager", () => ({
|
||||||
|
ProcessingStatusManager: {
|
||||||
|
initializeStage: vi.fn(),
|
||||||
|
startStage: vi.fn(),
|
||||||
|
completeStage: vi.fn(),
|
||||||
|
failStage: vi.fn(),
|
||||||
|
skipStage: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe("Import Processor", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("processQueuedImports", () => {
|
||||||
|
it("should process imports within specified limit", async () => {
|
||||||
|
const mockSessionImports = [
|
||||||
|
{
|
||||||
|
id: "import1",
|
||||||
|
companyId: "company1",
|
||||||
|
externalSessionId: "session1",
|
||||||
|
startTimeRaw: "2024-01-01 10:00:00",
|
||||||
|
endTimeRaw: "2024-01-01 11:00:00",
|
||||||
|
ipAddress: "192.168.1.1",
|
||||||
|
countryCode: "US",
|
||||||
|
language: "en",
|
||||||
|
messagesSent: 5,
|
||||||
|
sentimentRaw: "positive",
|
||||||
|
escalatedRaw: "false",
|
||||||
|
forwardedHrRaw: "false",
|
||||||
|
fullTranscriptUrl: "http://example.com/transcript1",
|
||||||
|
avgResponseTimeSeconds: 2.5,
|
||||||
|
tokens: 100,
|
||||||
|
tokensEur: 0.002,
|
||||||
|
category: "SUPPORT",
|
||||||
|
initialMessage: "Hello, I need help",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
// Mock the prisma queries
|
||||||
|
const prismaMock = {
|
||||||
|
sessionImport: {
|
||||||
|
findMany: vi.fn().mockResolvedValue(mockSessionImports),
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
create: vi.fn().mockResolvedValue({
|
||||||
|
id: "new-session-id",
|
||||||
|
companyId: "company1",
|
||||||
|
sessionId: "session1",
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Replace the prisma import with our mock
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const result = await processQueuedImports(10);
|
||||||
|
|
||||||
|
expect(prismaMock.sessionImport.findMany).toHaveBeenCalledWith({
|
||||||
|
where: {
|
||||||
|
processingStatus: {
|
||||||
|
some: {
|
||||||
|
stage: "CSV_IMPORT",
|
||||||
|
status: "COMPLETED",
|
||||||
|
},
|
||||||
|
none: {
|
||||||
|
stage: "SESSION_CREATION",
|
||||||
|
status: "COMPLETED",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
take: 10,
|
||||||
|
orderBy: { createdAt: "asc" },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.processed).toBe(1);
|
||||||
|
expect(result.total).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle processing errors gracefully", async () => {
|
||||||
|
const mockSessionImports = [
|
||||||
|
{
|
||||||
|
id: "import1",
|
||||||
|
companyId: "company1",
|
||||||
|
externalSessionId: "session1",
|
||||||
|
startTimeRaw: "invalid-date",
|
||||||
|
endTimeRaw: "2024-01-01 11:00:00",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const prismaMock = {
|
||||||
|
sessionImport: {
|
||||||
|
findMany: vi.fn().mockResolvedValue(mockSessionImports),
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
create: vi.fn().mockRejectedValue(new Error("Database error")),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const result = await processQueuedImports(10);
|
||||||
|
|
||||||
|
expect(ProcessingStatusManager.failStage).toHaveBeenCalled();
|
||||||
|
expect(result.processed).toBe(0);
|
||||||
|
expect(result.errors).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should correctly parse sentiment values", async () => {
|
||||||
|
const testCases = [
|
||||||
|
{ sentimentRaw: "positive", expected: "POSITIVE" },
|
||||||
|
{ sentimentRaw: "negative", expected: "NEGATIVE" },
|
||||||
|
{ sentimentRaw: "neutral", expected: "NEUTRAL" },
|
||||||
|
{ sentimentRaw: "unknown", expected: "NEUTRAL" },
|
||||||
|
{ sentimentRaw: null, expected: "NEUTRAL" },
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const testCase of testCases) {
|
||||||
|
const mockImport = {
|
||||||
|
id: "import1",
|
||||||
|
companyId: "company1",
|
||||||
|
externalSessionId: "session1",
|
||||||
|
sentimentRaw: testCase.sentimentRaw,
|
||||||
|
startTimeRaw: "2024-01-01 10:00:00",
|
||||||
|
endTimeRaw: "2024-01-01 11:00:00",
|
||||||
|
};
|
||||||
|
|
||||||
|
const prismaMock = {
|
||||||
|
sessionImport: {
|
||||||
|
findMany: vi.fn().mockResolvedValue([mockImport]),
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
create: vi.fn().mockImplementation((data) => {
|
||||||
|
expect(data.data.sentiment).toBe(testCase.expected);
|
||||||
|
return Promise.resolve({ id: "session-id" });
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
await processQueuedImports(1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle boolean string conversions", async () => {
|
||||||
|
const mockImport = {
|
||||||
|
id: "import1",
|
||||||
|
companyId: "company1",
|
||||||
|
externalSessionId: "session1",
|
||||||
|
escalatedRaw: "true",
|
||||||
|
forwardedHrRaw: "false",
|
||||||
|
startTimeRaw: "2024-01-01 10:00:00",
|
||||||
|
endTimeRaw: "2024-01-01 11:00:00",
|
||||||
|
};
|
||||||
|
|
||||||
|
const prismaMock = {
|
||||||
|
sessionImport: {
|
||||||
|
findMany: vi.fn().mockResolvedValue([mockImport]),
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
create: vi.fn().mockImplementation((data) => {
|
||||||
|
expect(data.data.escalated).toBe(true);
|
||||||
|
expect(data.data.forwardedHr).toBe(false);
|
||||||
|
return Promise.resolve({ id: "session-id" });
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
await processQueuedImports(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should validate required fields", async () => {
|
||||||
|
const mockImport = {
|
||||||
|
id: "import1",
|
||||||
|
companyId: null, // Invalid - missing required field
|
||||||
|
externalSessionId: "session1",
|
||||||
|
startTimeRaw: "2024-01-01 10:00:00",
|
||||||
|
endTimeRaw: "2024-01-01 11:00:00",
|
||||||
|
};
|
||||||
|
|
||||||
|
const prismaMock = {
|
||||||
|
sessionImport: {
|
||||||
|
findMany: vi.fn().mockResolvedValue([mockImport]),
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
create: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const result = await processQueuedImports(1);
|
||||||
|
|
||||||
|
expect(ProcessingStatusManager.failStage).toHaveBeenCalledWith(
|
||||||
|
expect.any(String),
|
||||||
|
"SESSION_CREATION",
|
||||||
|
expect.stringContaining("Missing required field")
|
||||||
|
);
|
||||||
|
expect(result.errors).toBe(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
362
tests/lib/processingScheduler.test.ts
Normal file
362
tests/lib/processingScheduler.test.ts
Normal file
@ -0,0 +1,362 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
|
import { PrismaClient } from "@prisma/client";
|
||||||
|
import { ProcessingScheduler } from "../../lib/processingScheduler";
|
||||||
|
|
||||||
|
vi.mock("../../lib/prisma", () => ({
|
||||||
|
prisma: new PrismaClient(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("../../lib/env", () => ({
|
||||||
|
env: {
|
||||||
|
OPENAI_API_KEY: "test-key",
|
||||||
|
PROCESSING_BATCH_SIZE: "10",
|
||||||
|
PROCESSING_INTERVAL_MS: "5000",
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe("Processing Scheduler", () => {
|
||||||
|
let scheduler: ProcessingScheduler;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
scheduler = new ProcessingScheduler();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
if (scheduler) {
|
||||||
|
scheduler.stop();
|
||||||
|
}
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Scheduler lifecycle", () => {
|
||||||
|
it("should initialize with correct default settings", () => {
|
||||||
|
expect(scheduler).toBeDefined();
|
||||||
|
expect(scheduler.isRunning()).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should start and stop correctly", async () => {
|
||||||
|
scheduler.start();
|
||||||
|
expect(scheduler.isRunning()).toBe(true);
|
||||||
|
|
||||||
|
scheduler.stop();
|
||||||
|
expect(scheduler.isRunning()).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not start multiple times", () => {
|
||||||
|
scheduler.start();
|
||||||
|
const firstStart = scheduler.isRunning();
|
||||||
|
|
||||||
|
scheduler.start(); // Should not start again
|
||||||
|
const secondStart = scheduler.isRunning();
|
||||||
|
|
||||||
|
expect(firstStart).toBe(true);
|
||||||
|
expect(secondStart).toBe(true);
|
||||||
|
|
||||||
|
scheduler.stop();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Processing pipeline stages", () => {
|
||||||
|
it("should process transcript fetch stage", async () => {
|
||||||
|
const mockSessions = [
|
||||||
|
{
|
||||||
|
id: "session1",
|
||||||
|
import: {
|
||||||
|
fullTranscriptUrl: "http://example.com/transcript1",
|
||||||
|
rawTranscriptContent: null,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const prismaMock = {
|
||||||
|
session: {
|
||||||
|
findMany: vi.fn().mockResolvedValue(mockSessions),
|
||||||
|
update: vi.fn().mockResolvedValue({}),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock fetch for transcript content
|
||||||
|
global.fetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
text: () => Promise.resolve("Mock transcript content"),
|
||||||
|
});
|
||||||
|
|
||||||
|
await scheduler.processTranscriptFetch();
|
||||||
|
|
||||||
|
expect(prismaMock.session.findMany).toHaveBeenCalled();
|
||||||
|
expect(global.fetch).toHaveBeenCalledWith(
|
||||||
|
"http://example.com/transcript1"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should process AI analysis stage", async () => {
|
||||||
|
const mockSessions = [
|
||||||
|
{
|
||||||
|
id: "session1",
|
||||||
|
transcriptContent: "User: Hello\nAssistant: Hi there!",
|
||||||
|
sentiment: null,
|
||||||
|
summary: null,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const prismaMock = {
|
||||||
|
session: {
|
||||||
|
findMany: vi.fn().mockResolvedValue(mockSessions),
|
||||||
|
update: vi.fn().mockResolvedValue({}),
|
||||||
|
},
|
||||||
|
aIProcessingRequest: {
|
||||||
|
create: vi.fn().mockResolvedValue({ id: "request1" }),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock OpenAI API
|
||||||
|
global.fetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
json: () =>
|
||||||
|
Promise.resolve({
|
||||||
|
choices: [
|
||||||
|
{
|
||||||
|
message: {
|
||||||
|
content: JSON.stringify({
|
||||||
|
sentiment: "POSITIVE",
|
||||||
|
summary: "Friendly greeting exchange",
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
usage: {
|
||||||
|
prompt_tokens: 50,
|
||||||
|
completion_tokens: 20,
|
||||||
|
total_tokens: 70,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
await scheduler.processAIAnalysis();
|
||||||
|
|
||||||
|
expect(prismaMock.session.findMany).toHaveBeenCalled();
|
||||||
|
expect(prismaMock.aIProcessingRequest.create).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle OpenAI API errors gracefully", async () => {
|
||||||
|
const mockSessions = [
|
||||||
|
{
|
||||||
|
id: "session1",
|
||||||
|
transcriptContent: "User: Hello",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const prismaMock = {
|
||||||
|
session: {
|
||||||
|
findMany: vi.fn().mockResolvedValue(mockSessions),
|
||||||
|
},
|
||||||
|
aIProcessingRequest: {
|
||||||
|
create: vi.fn().mockResolvedValue({ id: "request1" }),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock failed OpenAI API call
|
||||||
|
global.fetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: false,
|
||||||
|
status: 429,
|
||||||
|
text: () => Promise.resolve("Rate limit exceeded"),
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(scheduler.processAIAnalysis()).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should process question extraction stage", async () => {
|
||||||
|
const mockSessions = [
|
||||||
|
{
|
||||||
|
id: "session1",
|
||||||
|
transcriptContent:
|
||||||
|
"User: How do I reset my password?\nAssistant: You can reset it in settings.",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const prismaMock = {
|
||||||
|
session: {
|
||||||
|
findMany: vi.fn().mockResolvedValue(mockSessions),
|
||||||
|
update: vi.fn().mockResolvedValue({}),
|
||||||
|
},
|
||||||
|
question: {
|
||||||
|
upsert: vi.fn().mockResolvedValue({}),
|
||||||
|
},
|
||||||
|
aIProcessingRequest: {
|
||||||
|
create: vi.fn().mockResolvedValue({ id: "request1" }),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock OpenAI API for question extraction
|
||||||
|
global.fetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
json: () =>
|
||||||
|
Promise.resolve({
|
||||||
|
choices: [
|
||||||
|
{
|
||||||
|
message: {
|
||||||
|
content: JSON.stringify({
|
||||||
|
questions: ["How do I reset my password?"],
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
usage: {
|
||||||
|
prompt_tokens: 30,
|
||||||
|
completion_tokens: 15,
|
||||||
|
total_tokens: 45,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
await scheduler.processQuestionExtraction();
|
||||||
|
|
||||||
|
expect(prismaMock.session.findMany).toHaveBeenCalled();
|
||||||
|
expect(prismaMock.question.upsert).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Error handling", () => {
|
||||||
|
it("should handle database connection errors", async () => {
|
||||||
|
const prismaMock = {
|
||||||
|
session: {
|
||||||
|
findMany: vi
|
||||||
|
.fn()
|
||||||
|
.mockRejectedValue(new Error("Database connection failed")),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
await expect(scheduler.processTranscriptFetch()).rejects.toThrow(
|
||||||
|
"Database connection failed"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle invalid transcript URLs", async () => {
|
||||||
|
const mockSessions = [
|
||||||
|
{
|
||||||
|
id: "session1",
|
||||||
|
import: {
|
||||||
|
fullTranscriptUrl: "invalid-url",
|
||||||
|
rawTranscriptContent: null,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const prismaMock = {
|
||||||
|
session: {
|
||||||
|
findMany: vi.fn().mockResolvedValue(mockSessions),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
global.fetch = vi.fn().mockRejectedValue(new Error("Invalid URL"));
|
||||||
|
|
||||||
|
await expect(scheduler.processTranscriptFetch()).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle malformed JSON responses from OpenAI", async () => {
|
||||||
|
const mockSessions = [
|
||||||
|
{
|
||||||
|
id: "session1",
|
||||||
|
transcriptContent: "User: Hello",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const prismaMock = {
|
||||||
|
session: {
|
||||||
|
findMany: vi.fn().mockResolvedValue(mockSessions),
|
||||||
|
},
|
||||||
|
aIProcessingRequest: {
|
||||||
|
create: vi.fn().mockResolvedValue({ id: "request1" }),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
global.fetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
json: () =>
|
||||||
|
Promise.resolve({
|
||||||
|
choices: [
|
||||||
|
{
|
||||||
|
message: {
|
||||||
|
content: "Invalid JSON response",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
usage: { total_tokens: 10 },
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(scheduler.processAIAnalysis()).rejects.toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Rate limiting and batching", () => {
|
||||||
|
it("should respect batch size limits", async () => {
|
||||||
|
const mockSessions = Array.from({ length: 25 }, (_, i) => ({
|
||||||
|
id: `session${i}`,
|
||||||
|
transcriptContent: `Content ${i}`,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const prismaMock = {
|
||||||
|
session: {
|
||||||
|
findMany: vi.fn().mockResolvedValue(mockSessions),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.doMock("../../lib/prisma", () => ({
|
||||||
|
prisma: prismaMock,
|
||||||
|
}));
|
||||||
|
|
||||||
|
await scheduler.processAIAnalysis();
|
||||||
|
|
||||||
|
// Should only process up to batch size (10 by default)
|
||||||
|
expect(prismaMock.session.findMany).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
take: 10,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle rate limiting gracefully", async () => {
|
||||||
|
const consoleSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
|
global.fetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: false,
|
||||||
|
status: 429,
|
||||||
|
text: () => Promise.resolve("Rate limit exceeded"),
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(scheduler.processAIAnalysis()).rejects.toThrow();
|
||||||
|
|
||||||
|
consoleSpy.mockRestore();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
193
tests/lib/transcriptParser.test.ts
Normal file
193
tests/lib/transcriptParser.test.ts
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
import { parseTranscriptContent } from "../../lib/transcriptParser";
|
||||||
|
|
||||||
|
describe("Transcript Parser", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("parseTranscriptContent", () => {
|
||||||
|
it("should parse basic transcript with timestamps", () => {
|
||||||
|
const transcript = `
|
||||||
|
[10:00:00] User: Hello, I need help with my account
|
||||||
|
[10:00:15] Assistant: I'd be happy to help you with your account. What specific issue are you experiencing?
|
||||||
|
[10:00:45] User: I can't log in to my dashboard
|
||||||
|
[10:01:00] Assistant: Let me help you troubleshoot that login issue.
|
||||||
|
`.trim();
|
||||||
|
|
||||||
|
const messages = parseTranscriptContent(transcript);
|
||||||
|
|
||||||
|
expect(messages).toHaveLength(4);
|
||||||
|
|
||||||
|
expect(messages[0]).toEqual({
|
||||||
|
timestamp: new Date("1970-01-01T10:00:00.000Z"),
|
||||||
|
role: "User",
|
||||||
|
content: "Hello, I need help with my account",
|
||||||
|
order: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(messages[1]).toEqual({
|
||||||
|
timestamp: new Date("1970-01-01T10:00:15.000Z"),
|
||||||
|
role: "Assistant",
|
||||||
|
content:
|
||||||
|
"I'd be happy to help you with your account. What specific issue are you experiencing?",
|
||||||
|
order: 1,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(messages[3].order).toBe(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle transcript without timestamps", () => {
|
||||||
|
const transcript = `
|
||||||
|
User: Hello there
|
||||||
|
Assistant: Hi! How can I help you today?
|
||||||
|
User: I need support
|
||||||
|
Assistant: I'm here to help.
|
||||||
|
`.trim();
|
||||||
|
|
||||||
|
const messages = parseTranscriptContent(transcript);
|
||||||
|
|
||||||
|
expect(messages).toHaveLength(4);
|
||||||
|
expect(messages[0].timestamp).toBeNull();
|
||||||
|
expect(messages[0].role).toBe("User");
|
||||||
|
expect(messages[0].content).toBe("Hello there");
|
||||||
|
expect(messages[0].order).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle mixed timestamp formats", () => {
|
||||||
|
const transcript = `
|
||||||
|
[2024-01-01 10:00:00] User: Hello
|
||||||
|
10:00:15 Assistant: Hi there
|
||||||
|
[10:00:30] User: How are you?
|
||||||
|
Assistant: I'm doing well, thanks!
|
||||||
|
`.trim();
|
||||||
|
|
||||||
|
const messages = parseTranscriptContent(transcript);
|
||||||
|
|
||||||
|
expect(messages).toHaveLength(4);
|
||||||
|
expect(messages[0].timestamp).toEqual(
|
||||||
|
new Date("2024-01-01T10:00:00.000Z")
|
||||||
|
);
|
||||||
|
expect(messages[1].timestamp).toEqual(
|
||||||
|
new Date("1970-01-01T10:00:15.000Z")
|
||||||
|
);
|
||||||
|
expect(messages[2].timestamp).toEqual(
|
||||||
|
new Date("1970-01-01T10:00:30.000Z")
|
||||||
|
);
|
||||||
|
expect(messages[3].timestamp).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle various role formats", () => {
|
||||||
|
const transcript = `
|
||||||
|
Customer: I have a problem
|
||||||
|
Support Agent: What can I help with?
|
||||||
|
USER: My account is locked
|
||||||
|
ASSISTANT: Let me check that for you
|
||||||
|
System: Connection established
|
||||||
|
`.trim();
|
||||||
|
|
||||||
|
const messages = parseTranscriptContent(transcript);
|
||||||
|
|
||||||
|
expect(messages).toHaveLength(5);
|
||||||
|
expect(messages[0].role).toBe("User"); // Customer -> User
|
||||||
|
expect(messages[1].role).toBe("Assistant"); // Support Agent -> Assistant
|
||||||
|
expect(messages[2].role).toBe("User"); // USER -> User
|
||||||
|
expect(messages[3].role).toBe("Assistant"); // ASSISTANT -> Assistant
|
||||||
|
expect(messages[4].role).toBe("System"); // System -> System
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle malformed transcript gracefully", () => {
|
||||||
|
const transcript = `
|
||||||
|
This is not a proper transcript format
|
||||||
|
No colons here
|
||||||
|
: Empty role
|
||||||
|
User:
|
||||||
|
: Empty content
|
||||||
|
`.trim();
|
||||||
|
|
||||||
|
const messages = parseTranscriptContent(transcript);
|
||||||
|
|
||||||
|
// Should still try to parse what it can
|
||||||
|
expect(messages.length).toBeGreaterThanOrEqual(0);
|
||||||
|
|
||||||
|
// Check that all messages have required fields
|
||||||
|
messages.forEach((message, index) => {
|
||||||
|
expect(message).toHaveProperty("role");
|
||||||
|
expect(message).toHaveProperty("content");
|
||||||
|
expect(message).toHaveProperty("order", index);
|
||||||
|
expect(message).toHaveProperty("timestamp");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should preserve message order correctly", () => {
|
||||||
|
const transcript = `
|
||||||
|
User: First message
|
||||||
|
Assistant: Second message
|
||||||
|
User: Third message
|
||||||
|
Assistant: Fourth message
|
||||||
|
User: Fifth message
|
||||||
|
`.trim();
|
||||||
|
|
||||||
|
const messages = parseTranscriptContent(transcript);
|
||||||
|
|
||||||
|
expect(messages).toHaveLength(5);
|
||||||
|
messages.forEach((message, index) => {
|
||||||
|
expect(message.order).toBe(index);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle empty or whitespace-only transcript", () => {
|
||||||
|
expect(parseTranscriptContent("")).toEqual([]);
|
||||||
|
expect(parseTranscriptContent(" \n\n ")).toEqual([]);
|
||||||
|
expect(parseTranscriptContent("\t\r\n")).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle special characters in content", () => {
|
||||||
|
const transcript = `
|
||||||
|
User: Hello! How are you? 😊
|
||||||
|
Assistant: I'm great! Thanks for asking. 🤖
|
||||||
|
User: Can you help with this: https://example.com/issue?id=123&type=urgent
|
||||||
|
Assistant: Absolutely! I'll check that URL for you.
|
||||||
|
`.trim();
|
||||||
|
|
||||||
|
const messages = parseTranscriptContent(transcript);
|
||||||
|
|
||||||
|
expect(messages).toHaveLength(4);
|
||||||
|
expect(messages[0].content).toBe("Hello! How are you? 😊");
|
||||||
|
expect(messages[2].content).toBe(
|
||||||
|
"Can you help with this: https://example.com/issue?id=123&type=urgent"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should normalize role names consistently", () => {
|
||||||
|
const transcript = `
|
||||||
|
customer: Hello
|
||||||
|
support: Hi there
|
||||||
|
CUSTOMER: How are you?
|
||||||
|
SUPPORT: Good thanks
|
||||||
|
Client: Great
|
||||||
|
Agent: Wonderful
|
||||||
|
`.trim();
|
||||||
|
|
||||||
|
const messages = parseTranscriptContent(transcript);
|
||||||
|
|
||||||
|
expect(messages[0].role).toBe("User");
|
||||||
|
expect(messages[1].role).toBe("Assistant");
|
||||||
|
expect(messages[2].role).toBe("User");
|
||||||
|
expect(messages[3].role).toBe("Assistant");
|
||||||
|
expect(messages[4].role).toBe("User");
|
||||||
|
expect(messages[5].role).toBe("Assistant");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle long content without truncation", () => {
|
||||||
|
const longContent = "A".repeat(5000);
|
||||||
|
const transcript = `User: ${longContent}`;
|
||||||
|
|
||||||
|
const messages = parseTranscriptContent(transcript);
|
||||||
|
|
||||||
|
expect(messages).toHaveLength(1);
|
||||||
|
expect(messages[0].content).toBe(longContent);
|
||||||
|
expect(messages[0].content.length).toBe(5000);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@ -25,7 +25,14 @@
|
|||||||
"strictNullChecks": true,
|
"strictNullChecks": true,
|
||||||
"target": "es5"
|
"target": "es5"
|
||||||
},
|
},
|
||||||
"exclude": ["node_modules", "check-*.ts", "migrate-*.ts", "fix-*.ts", "debug-*.ts", "test-*.ts"],
|
"exclude": [
|
||||||
|
"node_modules",
|
||||||
|
"check-*.ts",
|
||||||
|
"migrate-*.ts",
|
||||||
|
"fix-*.ts",
|
||||||
|
"debug-*.ts",
|
||||||
|
"test-*.ts"
|
||||||
|
],
|
||||||
"include": [
|
"include": [
|
||||||
"next-env.d.ts",
|
"next-env.d.ts",
|
||||||
"**/*.ts",
|
"**/*.ts",
|
||||||
|
|||||||
Reference in New Issue
Block a user