mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-01-16 12:32:10 +01:00
feat: implement comprehensive email system with rate limiting and extensive test suite
- Add robust email service with rate limiting and configuration management - Implement shared rate limiter utility for consistent API protection - Create comprehensive test suite for core processing pipeline - Add API tests for dashboard metrics and authentication routes - Fix date range picker infinite loop issue - Improve session lookup in refresh sessions API - Refactor session API routing with better code organization - Update processing pipeline status monitoring - Clean up leftover files and improve code formatting
This commit is contained in:
@ -3,11 +3,217 @@ import { ProcessingStatusManager } from "./lib/processingStatusManager";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
/**
|
||||
* Migrates CSV import stage for a session
|
||||
*/
|
||||
async function migrateCsvImportStage(
|
||||
sessionId: string,
|
||||
importId: string | null
|
||||
) {
|
||||
await ProcessingStatusManager.completeStage(
|
||||
sessionId,
|
||||
ProcessingStage.CSV_IMPORT,
|
||||
{
|
||||
migratedFrom: "existing_session",
|
||||
importId,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates transcript fetch stage for a session
|
||||
*/
|
||||
async function migrateTranscriptFetchStage(
|
||||
sessionId: string,
|
||||
sessionImport: any,
|
||||
externalSessionId?: string
|
||||
) {
|
||||
if (sessionImport?.rawTranscriptContent) {
|
||||
await ProcessingStatusManager.completeStage(
|
||||
sessionId,
|
||||
ProcessingStage.TRANSCRIPT_FETCH,
|
||||
{
|
||||
migratedFrom: "existing_transcript",
|
||||
contentLength: sessionImport.rawTranscriptContent.length,
|
||||
}
|
||||
);
|
||||
} else if (!sessionImport?.fullTranscriptUrl) {
|
||||
await ProcessingStatusManager.skipStage(
|
||||
sessionId,
|
||||
ProcessingStage.TRANSCRIPT_FETCH,
|
||||
"No transcript URL in original import"
|
||||
);
|
||||
} else {
|
||||
console.log(` - Transcript fetch pending for ${externalSessionId}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates session creation stage for a session
|
||||
*/
|
||||
async function migrateSessionCreationStage(
|
||||
sessionId: string,
|
||||
messages: any[],
|
||||
sessionImport: any,
|
||||
externalSessionId?: string
|
||||
) {
|
||||
if (messages.length > 0) {
|
||||
await ProcessingStatusManager.completeStage(
|
||||
sessionId,
|
||||
ProcessingStage.SESSION_CREATION,
|
||||
{
|
||||
migratedFrom: "existing_messages",
|
||||
messageCount: messages.length,
|
||||
}
|
||||
);
|
||||
} else if (sessionImport?.rawTranscriptContent) {
|
||||
console.log(
|
||||
` - Session creation pending for ${externalSessionId} (has transcript but no messages)`
|
||||
);
|
||||
} else if (!sessionImport?.fullTranscriptUrl) {
|
||||
await ProcessingStatusManager.skipStage(
|
||||
sessionId,
|
||||
ProcessingStage.SESSION_CREATION,
|
||||
"No transcript content available"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if session has AI analysis data
|
||||
*/
|
||||
function hasAIAnalysisData(session: any): boolean {
|
||||
return !!(
|
||||
session.summary ||
|
||||
session.sentiment ||
|
||||
session.category ||
|
||||
session.language
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates AI analysis stage for a session
|
||||
*/
|
||||
async function migrateAIAnalysisStage(
|
||||
sessionId: string,
|
||||
session: any,
|
||||
messages: any[],
|
||||
externalSessionId?: string
|
||||
) {
|
||||
const hasAIAnalysis = hasAIAnalysisData(session);
|
||||
|
||||
if (hasAIAnalysis) {
|
||||
await ProcessingStatusManager.completeStage(
|
||||
sessionId,
|
||||
ProcessingStage.AI_ANALYSIS,
|
||||
{
|
||||
migratedFrom: "existing_ai_analysis",
|
||||
hasSummary: !!session.summary,
|
||||
hasSentiment: !!session.sentiment,
|
||||
hasCategory: !!session.category,
|
||||
hasLanguage: !!session.language,
|
||||
}
|
||||
);
|
||||
} else if (messages.length > 0) {
|
||||
console.log(` - AI analysis pending for ${externalSessionId}`);
|
||||
}
|
||||
|
||||
return hasAIAnalysis;
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates question extraction stage for a session
|
||||
*/
|
||||
async function migrateQuestionExtractionStage(
|
||||
sessionId: string,
|
||||
sessionQuestions: any[],
|
||||
hasAIAnalysis: boolean,
|
||||
externalSessionId?: string
|
||||
) {
|
||||
if (sessionQuestions.length > 0) {
|
||||
await ProcessingStatusManager.completeStage(
|
||||
sessionId,
|
||||
ProcessingStage.QUESTION_EXTRACTION,
|
||||
{
|
||||
migratedFrom: "existing_questions",
|
||||
questionCount: sessionQuestions.length,
|
||||
}
|
||||
);
|
||||
} else if (hasAIAnalysis) {
|
||||
console.log(` - Question extraction pending for ${externalSessionId}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates a single session to the refactored processing system
|
||||
*/
|
||||
async function migrateSession(session: any) {
|
||||
const externalSessionId = session.import?.externalSessionId;
|
||||
console.log(`Migrating session ${externalSessionId || session.id}...`);
|
||||
|
||||
await ProcessingStatusManager.initializeSession(session.id);
|
||||
|
||||
// Migrate each stage
|
||||
await migrateCsvImportStage(session.id, session.importId);
|
||||
await migrateTranscriptFetchStage(
|
||||
session.id,
|
||||
session.import,
|
||||
externalSessionId
|
||||
);
|
||||
await migrateSessionCreationStage(
|
||||
session.id,
|
||||
session.messages,
|
||||
session.import,
|
||||
externalSessionId
|
||||
);
|
||||
|
||||
const hasAIAnalysis = await migrateAIAnalysisStage(
|
||||
session.id,
|
||||
session,
|
||||
session.messages,
|
||||
externalSessionId
|
||||
);
|
||||
|
||||
await migrateQuestionExtractionStage(
|
||||
session.id,
|
||||
session.sessionQuestions,
|
||||
hasAIAnalysis,
|
||||
externalSessionId
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Displays the final migration status
|
||||
*/
|
||||
async function displayFinalStatus() {
|
||||
console.log("\n=== MIGRATION COMPLETE - FINAL STATUS ===");
|
||||
const pipelineStatus = await ProcessingStatusManager.getPipelineStatus();
|
||||
|
||||
const stages = [
|
||||
"CSV_IMPORT",
|
||||
"TRANSCRIPT_FETCH",
|
||||
"SESSION_CREATION",
|
||||
"AI_ANALYSIS",
|
||||
"QUESTION_EXTRACTION",
|
||||
];
|
||||
|
||||
for (const stage of stages) {
|
||||
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||
const pending = stageData.PENDING || 0;
|
||||
const completed = stageData.COMPLETED || 0;
|
||||
const skipped = stageData.SKIPPED || 0;
|
||||
|
||||
console.log(
|
||||
`${stage}: ${completed} completed, ${pending} pending, ${skipped} skipped`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: Main orchestration function - complexity is needed for migration coordination
|
||||
async function migrateToRefactoredSystem() {
|
||||
try {
|
||||
console.log("=== MIGRATING TO REFACTORED PROCESSING SYSTEM ===\n");
|
||||
|
||||
// Get all existing sessions
|
||||
const sessions = await prisma.session.findMany({
|
||||
include: {
|
||||
import: true,
|
||||
@ -20,123 +226,8 @@ async function migrateToRefactoredSystem() {
|
||||
console.log(`Found ${sessions.length} sessions to migrate...\n`);
|
||||
|
||||
let migratedCount = 0;
|
||||
|
||||
for (const session of sessions) {
|
||||
console.log(
|
||||
`Migrating session ${session.import?.externalSessionId || session.id}...`
|
||||
);
|
||||
|
||||
// Initialize processing status for this session
|
||||
await ProcessingStatusManager.initializeSession(session.id);
|
||||
|
||||
// Determine the current state of each stage based on existing data
|
||||
|
||||
// 1. CSV_IMPORT - Always completed if session exists
|
||||
await ProcessingStatusManager.completeStage(
|
||||
session.id,
|
||||
ProcessingStage.CSV_IMPORT,
|
||||
{
|
||||
migratedFrom: "existing_session",
|
||||
importId: session.importId,
|
||||
}
|
||||
);
|
||||
|
||||
// 2. TRANSCRIPT_FETCH - Check if transcript content exists
|
||||
if (session.import?.rawTranscriptContent) {
|
||||
await ProcessingStatusManager.completeStage(
|
||||
session.id,
|
||||
ProcessingStage.TRANSCRIPT_FETCH,
|
||||
{
|
||||
migratedFrom: "existing_transcript",
|
||||
contentLength: session.import.rawTranscriptContent.length,
|
||||
}
|
||||
);
|
||||
} else if (!session.import?.fullTranscriptUrl) {
|
||||
// No transcript URL - skip this stage
|
||||
await ProcessingStatusManager.skipStage(
|
||||
session.id,
|
||||
ProcessingStage.TRANSCRIPT_FETCH,
|
||||
"No transcript URL in original import"
|
||||
);
|
||||
} else {
|
||||
// Has URL but no content - mark as pending for retry
|
||||
console.log(
|
||||
` - Transcript fetch pending for ${session.import.externalSessionId}`
|
||||
);
|
||||
}
|
||||
|
||||
// 3. SESSION_CREATION - Check if messages exist
|
||||
if (session.messages.length > 0) {
|
||||
await ProcessingStatusManager.completeStage(
|
||||
session.id,
|
||||
ProcessingStage.SESSION_CREATION,
|
||||
{
|
||||
migratedFrom: "existing_messages",
|
||||
messageCount: session.messages.length,
|
||||
}
|
||||
);
|
||||
} else if (session.import?.rawTranscriptContent) {
|
||||
// Has transcript but no messages - needs reprocessing
|
||||
console.log(
|
||||
` - Session creation pending for ${session.import.externalSessionId} (has transcript but no messages)`
|
||||
);
|
||||
} else {
|
||||
// No transcript content - skip or mark as pending based on transcript fetch status
|
||||
if (!session.import?.fullTranscriptUrl) {
|
||||
await ProcessingStatusManager.skipStage(
|
||||
session.id,
|
||||
ProcessingStage.SESSION_CREATION,
|
||||
"No transcript content available"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// 4. AI_ANALYSIS - Check if AI fields are populated
|
||||
const hasAIAnalysis =
|
||||
session.summary ||
|
||||
session.sentiment ||
|
||||
session.category ||
|
||||
session.language;
|
||||
if (hasAIAnalysis) {
|
||||
await ProcessingStatusManager.completeStage(
|
||||
session.id,
|
||||
ProcessingStage.AI_ANALYSIS,
|
||||
{
|
||||
migratedFrom: "existing_ai_analysis",
|
||||
hasSummary: !!session.summary,
|
||||
hasSentiment: !!session.sentiment,
|
||||
hasCategory: !!session.category,
|
||||
hasLanguage: !!session.language,
|
||||
}
|
||||
);
|
||||
} else {
|
||||
// No AI analysis - mark as pending if session creation is complete
|
||||
if (session.messages.length > 0) {
|
||||
console.log(
|
||||
` - AI analysis pending for ${session.import?.externalSessionId}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. QUESTION_EXTRACTION - Check if questions exist
|
||||
if (session.sessionQuestions.length > 0) {
|
||||
await ProcessingStatusManager.completeStage(
|
||||
session.id,
|
||||
ProcessingStage.QUESTION_EXTRACTION,
|
||||
{
|
||||
migratedFrom: "existing_questions",
|
||||
questionCount: session.sessionQuestions.length,
|
||||
}
|
||||
);
|
||||
} else {
|
||||
// No questions - mark as pending if AI analysis is complete
|
||||
if (hasAIAnalysis) {
|
||||
console.log(
|
||||
` - Question extraction pending for ${session.import?.externalSessionId}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await migrateSession(session);
|
||||
migratedCount++;
|
||||
|
||||
if (migratedCount % 10 === 0) {
|
||||
@ -150,28 +241,7 @@ async function migrateToRefactoredSystem() {
|
||||
`\n✓ Successfully migrated ${migratedCount} sessions to the new processing system`
|
||||
);
|
||||
|
||||
// Show final status
|
||||
console.log("\n=== MIGRATION COMPLETE - FINAL STATUS ===");
|
||||
const pipelineStatus = await ProcessingStatusManager.getPipelineStatus();
|
||||
|
||||
const stages = [
|
||||
"CSV_IMPORT",
|
||||
"TRANSCRIPT_FETCH",
|
||||
"SESSION_CREATION",
|
||||
"AI_ANALYSIS",
|
||||
"QUESTION_EXTRACTION",
|
||||
];
|
||||
|
||||
for (const stage of stages) {
|
||||
const stageData = pipelineStatus.pipeline[stage] || {};
|
||||
const pending = stageData.PENDING || 0;
|
||||
const completed = stageData.COMPLETED || 0;
|
||||
const skipped = stageData.SKIPPED || 0;
|
||||
|
||||
console.log(
|
||||
`${stage}: ${completed} completed, ${pending} pending, ${skipped} skipped`
|
||||
);
|
||||
}
|
||||
await displayFinalStatus();
|
||||
} catch (error) {
|
||||
console.error("Error migrating to refactored system:", error);
|
||||
} finally {
|
||||
|
||||
Reference in New Issue
Block a user