refactor: comprehensive code quality improvements and dev environment fixes

- Convert ProcessingStatusManager from static class to individual functions
- Refactor processSingleImport function to reduce cognitive complexity
- Fix unused parameters in database-pool.ts event handlers
- Add missing DATABASE_URL configuration to env.ts
- Add pg package and @types/pg dependencies for PostgreSQL support
- Fix tsx command execution by updating package.json scripts to use pnpm exec
- Apply biome formatting fixes for import organization
This commit is contained in:
2025-06-29 21:56:29 +02:00
parent 8fd774422c
commit 5042a6c016
8 changed files with 3836 additions and 6878 deletions

View File

@ -33,19 +33,19 @@ const createConnectionPool = () => {
}); });
// Connection pool event handlers // Connection pool event handlers
pool.on("connect", (_client) => { pool.on("connect", () => {
console.log( console.log(
`Database connection established. Active connections: ${pool.totalCount}` `Database connection established. Active connections: ${pool.totalCount}`
); );
}); });
pool.on("acquire", (_client) => { pool.on("acquire", () => {
console.log( console.log(
`Connection acquired from pool. Waiting: ${pool.waitingCount}, Idle: ${pool.idleCount}` `Connection acquired from pool. Waiting: ${pool.waitingCount}, Idle: ${pool.idleCount}`
); );
}); });
pool.on("release", (_client) => { pool.on("release", () => {
console.log( console.log(
`Connection released to pool. Active: ${pool.totalCount - pool.idleCount}, Idle: ${pool.idleCount}` `Connection released to pool. Active: ${pool.totalCount - pool.idleCount}, Idle: ${pool.idleCount}`
); );
@ -55,7 +55,7 @@ const createConnectionPool = () => {
console.error("Database pool error:", err); console.error("Database pool error:", err);
}); });
pool.on("remove", (_client) => { pool.on("remove", () => {
console.log( console.log(
`Connection removed from pool. Total connections: ${pool.totalCount}` `Connection removed from pool. Total connections: ${pool.totalCount}`
); );

View File

@ -12,7 +12,7 @@ export interface RetryConfig {
export const DEFAULT_RETRY_CONFIG: RetryConfig = { export const DEFAULT_RETRY_CONFIG: RetryConfig = {
maxRetries: 3, maxRetries: 3,
initialDelay: 1000, // 1 second initialDelay: 1000, // 1 second
maxDelay: 10000, // 10 seconds maxDelay: 10000, // 10 seconds
backoffMultiplier: 2, backoffMultiplier: 2,
}; };
@ -21,86 +21,90 @@ export function isRetryableError(error: unknown): boolean {
if (error instanceof PrismaClientKnownRequestError) { if (error instanceof PrismaClientKnownRequestError) {
// Connection errors that are worth retrying // Connection errors that are worth retrying
const retryableCodes = [ const retryableCodes = [
'P1001', // Can't reach database server "P1001", // Can't reach database server
'P1002', // Database server was reached but timed out "P1002", // Database server was reached but timed out
'P1008', // Operations timed out "P1008", // Operations timed out
'P1017', // Server has closed the connection "P1017", // Server has closed the connection
]; ];
return retryableCodes.includes(error.code); return retryableCodes.includes(error.code);
} }
// Check for network-related errors // Check for network-related errors
if (error instanceof Error) { if (error instanceof Error) {
const retryableMessages = [ const retryableMessages = [
'ECONNREFUSED', "ECONNREFUSED",
'ECONNRESET', "ECONNRESET",
'ETIMEDOUT', "ETIMEDOUT",
'ENOTFOUND', "ENOTFOUND",
'EAI_AGAIN', "EAI_AGAIN",
'Can\'t reach database server', "Can't reach database server",
'Connection terminated', "Connection terminated",
'Connection lost', "Connection lost",
]; ];
return retryableMessages.some(msg => return retryableMessages.some((msg) => error.message.includes(msg));
error.message.includes(msg)
);
} }
return false; return false;
} }
// Calculate delay with exponential backoff // Calculate delay with exponential backoff
export function calculateDelay( export function calculateDelay(
attempt: number, attempt: number,
config: RetryConfig = DEFAULT_RETRY_CONFIG config: RetryConfig = DEFAULT_RETRY_CONFIG
): number { ): number {
const delay = config.initialDelay * Math.pow(config.backoffMultiplier, attempt - 1); const delay = config.initialDelay * config.backoffMultiplier ** (attempt - 1);
return Math.min(delay, config.maxDelay); return Math.min(delay, config.maxDelay);
} }
// Sleep utility // Sleep utility
export function sleep(ms: number): Promise<void> { export function sleep(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms)); return new Promise((resolve) => setTimeout(resolve, ms));
} }
// Retry wrapper for database operations // Retry wrapper for database operations
export async function withRetry<T>( export async function withRetry<T>(
operation: () => Promise<T>, operation: () => Promise<T>,
config: RetryConfig = DEFAULT_RETRY_CONFIG, config: RetryConfig = DEFAULT_RETRY_CONFIG,
context: string = 'database operation' context = "database operation"
): Promise<T> { ): Promise<T> {
let lastError: unknown; let lastError: unknown;
for (let attempt = 1; attempt <= config.maxRetries; attempt++) { for (let attempt = 1; attempt <= config.maxRetries; attempt++) {
try { try {
return await operation(); return await operation();
} catch (error) { } catch (error) {
lastError = error; lastError = error;
// Don't retry if error is not retryable // Don't retry if error is not retryable
if (!isRetryableError(error)) { if (!isRetryableError(error)) {
console.error(`[${context}] Non-retryable error on attempt ${attempt}:`, error); console.error(
`[${context}] Non-retryable error on attempt ${attempt}:`,
error
);
throw error; throw error;
} }
// Don't retry on last attempt // Don't retry on last attempt
if (attempt === config.maxRetries) { if (attempt === config.maxRetries) {
console.error(`[${context}] Max retries (${config.maxRetries}) exceeded:`, error); console.error(
`[${context}] Max retries (${config.maxRetries}) exceeded:`,
error
);
break; break;
} }
const delay = calculateDelay(attempt, config); const delay = calculateDelay(attempt, config);
console.warn( console.warn(
`[${context}] Attempt ${attempt}/${config.maxRetries} failed, retrying in ${delay}ms:`, `[${context}] Attempt ${attempt}/${config.maxRetries} failed, retrying in ${delay}ms:`,
error instanceof Error ? error.message : error error instanceof Error ? error.message : error
); );
await sleep(delay); await sleep(delay);
} }
} }
throw lastError; throw lastError;
} }
@ -110,21 +114,21 @@ export async function checkDatabaseHealthWithRetry(
config: Partial<RetryConfig> = {} config: Partial<RetryConfig> = {}
): Promise<boolean> { ): Promise<boolean> {
const retryConfig = { ...DEFAULT_RETRY_CONFIG, ...config }; const retryConfig = { ...DEFAULT_RETRY_CONFIG, ...config };
try { try {
return await withRetry( return await withRetry(
async () => { async () => {
const isHealthy = await checkFunction(); const isHealthy = await checkFunction();
if (!isHealthy) { if (!isHealthy) {
throw new Error('Database health check failed'); throw new Error("Database health check failed");
} }
return true; return true;
}, },
retryConfig, retryConfig,
'database health check' "database health check"
); );
} catch (error) { } catch (error) {
console.error('Database health check failed after retries:', error); console.error("Database health check failed after retries:", error);
return false; return false;
} }
} }

View File

@ -103,6 +103,10 @@ export const env = {
5 5
), ),
// Database Configuration
DATABASE_URL: parseEnvValue(process.env.DATABASE_URL) || "",
DATABASE_URL_DIRECT: parseEnvValue(process.env.DATABASE_URL_DIRECT) || "",
// Database Connection Pooling // Database Connection Pooling
DATABASE_CONNECTION_LIMIT: parseIntWithDefault( DATABASE_CONNECTION_LIMIT: parseIntWithDefault(
process.env.DATABASE_CONNECTION_LIMIT, process.env.DATABASE_CONNECTION_LIMIT,
@ -123,6 +127,10 @@ export const env = {
export function validateEnv(): { valid: boolean; errors: string[] } { export function validateEnv(): { valid: boolean; errors: string[] } {
const errors: string[] = []; const errors: string[] = [];
if (!env.DATABASE_URL) {
errors.push("DATABASE_URL is required");
}
if (!env.NEXTAUTH_SECRET) { if (!env.NEXTAUTH_SECRET) {
errors.push("NEXTAUTH_SECRET is required"); errors.push("NEXTAUTH_SECRET is required");
} }

View File

@ -1,14 +1,20 @@
// SessionImport to Session processor // SessionImport to Session processor
import { ProcessingStage, SentimentCategory } from "@prisma/client"; import { ProcessingStage, SentimentCategory } from "@prisma/client";
import cron from "node-cron"; import cron from "node-cron";
import { withRetry } from "./database-retry.js";
import { getSchedulerConfig } from "./env"; import { getSchedulerConfig } from "./env";
import { prisma } from "./prisma.js"; import { prisma } from "./prisma.js";
import { ProcessingStatusManager } from "./processingStatusManager"; import {
completeStage,
failStage,
initializeSession,
skipStage,
startStage,
} from "./processingStatusManager.js";
import { import {
fetchTranscriptContent, fetchTranscriptContent,
isValidTranscriptUrl, isValidTranscriptUrl,
} from "./transcriptFetcher"; } from "./transcriptFetcher";
import { withRetry, isRetryableError } from "./database-retry.js";
interface ImportRecord { interface ImportRecord {
id: string; id: string;
@ -167,6 +173,160 @@ async function parseTranscriptIntoMessages(
); );
} }
/**
* Create or update a Session record from ImportRecord
*/
async function createSession(importRecord: ImportRecord): Promise<string> {
const startTime = parseEuropeanDate(importRecord.startTimeRaw);
const endTime = parseEuropeanDate(importRecord.endTimeRaw);
console.log(
`[Import Processor] Processing ${importRecord.externalSessionId}: ${startTime.toISOString()} - ${endTime.toISOString()}`
);
const session = await prisma.session.upsert({
where: {
importId: importRecord.id,
},
update: {
startTime,
endTime,
ipAddress: importRecord.ipAddress,
country: importRecord.countryCode,
fullTranscriptUrl: importRecord.fullTranscriptUrl,
avgResponseTime: importRecord.avgResponseTimeSeconds,
initialMsg: importRecord.initialMessage,
},
create: {
companyId: importRecord.companyId,
importId: importRecord.id,
startTime,
endTime,
ipAddress: importRecord.ipAddress,
country: importRecord.countryCode,
fullTranscriptUrl: importRecord.fullTranscriptUrl,
avgResponseTime: importRecord.avgResponseTimeSeconds,
initialMsg: importRecord.initialMessage,
},
});
return session.id;
}
/**
* Handle transcript fetching for a session
*/
async function handleTranscriptFetching(
sessionId: string,
importRecord: ImportRecord
): Promise<string | null> {
let transcriptContent = importRecord.rawTranscriptContent;
if (
!transcriptContent &&
importRecord.fullTranscriptUrl &&
isValidTranscriptUrl(importRecord.fullTranscriptUrl)
) {
await startStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH);
console.log(
`[Import Processor] Fetching transcript for ${importRecord.externalSessionId}...`
);
const company = await prisma.company.findUnique({
where: { id: importRecord.companyId },
select: { csvUsername: true, csvPassword: true },
});
const transcriptResult = await fetchTranscriptContent(
importRecord.fullTranscriptUrl,
company?.csvUsername || undefined,
company?.csvPassword || undefined
);
if (transcriptResult.success) {
transcriptContent = transcriptResult.content;
console.log(
`[Import Processor] ✓ Fetched transcript for ${importRecord.externalSessionId} (${transcriptContent?.length} chars)`
);
await prisma.sessionImport.update({
where: { id: importRecord.id },
data: { rawTranscriptContent: transcriptContent },
});
await completeStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, {
contentLength: transcriptContent?.length || 0,
url: importRecord.fullTranscriptUrl,
});
} else {
console.log(
`[Import Processor] ⚠️ Failed to fetch transcript for ${importRecord.externalSessionId}: ${transcriptResult.error}`
);
await failStage(
sessionId,
ProcessingStage.TRANSCRIPT_FETCH,
transcriptResult.error || "Unknown error"
);
}
} else if (!importRecord.fullTranscriptUrl) {
await skipStage(
sessionId,
ProcessingStage.TRANSCRIPT_FETCH,
"No transcript URL provided"
);
} else {
await completeStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, {
contentLength: transcriptContent?.length || 0,
source: "already_fetched",
});
}
return transcriptContent;
}
/**
* Handle session creation (message parsing)
*/
async function handleSessionCreation(
sessionId: string,
transcriptContent: string | null
): Promise<void> {
await startStage(sessionId, ProcessingStage.SESSION_CREATION);
if (transcriptContent) {
await parseTranscriptIntoMessages(sessionId, transcriptContent);
}
await completeStage(sessionId, ProcessingStage.SESSION_CREATION, {
hasTranscript: !!transcriptContent,
transcriptLength: transcriptContent?.length || 0,
});
}
/**
* Handle errors and mark appropriate stage as failed
*/
async function handleProcessingError(
sessionId: string | null,
error: unknown
): Promise<void> {
if (!sessionId) return;
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes("transcript") || errorMessage.includes("fetch")) {
await failStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, errorMessage);
} else if (
errorMessage.includes("message") ||
errorMessage.includes("parse")
) {
await failStage(sessionId, ProcessingStage.SESSION_CREATION, errorMessage);
} else {
await failStage(sessionId, ProcessingStage.CSV_IMPORT, errorMessage);
}
}
/** /**
* Process a single SessionImport record into a Session record * Process a single SessionImport record into a Session record
* Uses new unified processing status tracking * Uses new unified processing status tracking
@ -177,189 +337,22 @@ async function processSingleImport(
let sessionId: string | null = null; let sessionId: string | null = null;
try { try {
// Parse dates using European format parser sessionId = await createSession(importRecord);
const startTime = parseEuropeanDate(importRecord.startTimeRaw); await initializeSession(sessionId);
const endTime = parseEuropeanDate(importRecord.endTimeRaw); await completeStage(sessionId, ProcessingStage.CSV_IMPORT);
console.log( const transcriptContent = await handleTranscriptFetching(
`[Import Processor] Processing ${importRecord.externalSessionId}: ${startTime.toISOString()} - ${endTime.toISOString()}`
);
// Create or update Session record with MINIMAL processing
const session = await prisma.session.upsert({
where: {
importId: importRecord.id,
},
update: {
startTime,
endTime,
// Direct copies (minimal processing)
ipAddress: importRecord.ipAddress,
country: importRecord.countryCode, // Keep as country code
fullTranscriptUrl: importRecord.fullTranscriptUrl,
avgResponseTime: importRecord.avgResponseTimeSeconds,
initialMsg: importRecord.initialMessage,
},
create: {
companyId: importRecord.companyId,
importId: importRecord.id,
startTime,
endTime,
// Direct copies (minimal processing)
ipAddress: importRecord.ipAddress,
country: importRecord.countryCode, // Keep as country code
fullTranscriptUrl: importRecord.fullTranscriptUrl,
avgResponseTime: importRecord.avgResponseTimeSeconds,
initialMsg: importRecord.initialMessage,
},
});
sessionId = session.id;
// Initialize processing status for this session
await ProcessingStatusManager.initializeSession(sessionId);
// Mark CSV_IMPORT as completed
await ProcessingStatusManager.completeStage(
sessionId, sessionId,
ProcessingStage.CSV_IMPORT importRecord
);
// Handle transcript fetching
let transcriptContent = importRecord.rawTranscriptContent;
if (
!transcriptContent &&
importRecord.fullTranscriptUrl &&
isValidTranscriptUrl(importRecord.fullTranscriptUrl)
) {
await ProcessingStatusManager.startStage(
sessionId,
ProcessingStage.TRANSCRIPT_FETCH
);
console.log(
`[Import Processor] Fetching transcript for ${importRecord.externalSessionId}...`
);
// Get company credentials for transcript fetching
const company = await prisma.company.findUnique({
where: { id: importRecord.companyId },
select: { csvUsername: true, csvPassword: true },
});
const transcriptResult = await fetchTranscriptContent(
importRecord.fullTranscriptUrl,
company?.csvUsername || undefined,
company?.csvPassword || undefined
);
if (transcriptResult.success) {
transcriptContent = transcriptResult.content;
console.log(
`[Import Processor] ✓ Fetched transcript for ${importRecord.externalSessionId} (${transcriptContent?.length} chars)`
);
// Update the import record with the fetched content
await prisma.sessionImport.update({
where: { id: importRecord.id },
data: { rawTranscriptContent: transcriptContent },
});
await ProcessingStatusManager.completeStage(
sessionId,
ProcessingStage.TRANSCRIPT_FETCH,
{
contentLength: transcriptContent?.length || 0,
url: importRecord.fullTranscriptUrl,
}
);
} else {
console.log(
`[Import Processor] ⚠️ Failed to fetch transcript for ${importRecord.externalSessionId}: ${transcriptResult.error}`
);
await ProcessingStatusManager.failStage(
sessionId,
ProcessingStage.TRANSCRIPT_FETCH,
transcriptResult.error || "Unknown error"
);
}
} else if (!importRecord.fullTranscriptUrl) {
// No transcript URL available - skip this stage
await ProcessingStatusManager.skipStage(
sessionId,
ProcessingStage.TRANSCRIPT_FETCH,
"No transcript URL provided"
);
} else {
// Transcript already fetched
await ProcessingStatusManager.completeStage(
sessionId,
ProcessingStage.TRANSCRIPT_FETCH,
{
contentLength: transcriptContent?.length || 0,
source: "already_fetched",
}
);
}
// Handle session creation (parse messages)
await ProcessingStatusManager.startStage(
sessionId,
ProcessingStage.SESSION_CREATION
);
if (transcriptContent) {
await parseTranscriptIntoMessages(sessionId, transcriptContent);
}
await ProcessingStatusManager.completeStage(
sessionId,
ProcessingStage.SESSION_CREATION,
{
hasTranscript: !!transcriptContent,
transcriptLength: transcriptContent?.length || 0,
}
); );
await handleSessionCreation(sessionId, transcriptContent);
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error); await handleProcessingError(sessionId, error);
// Mark the current stage as failed if we have a sessionId
if (sessionId) {
// Determine which stage failed based on the error
if (
errorMessage.includes("transcript") ||
errorMessage.includes("fetch")
) {
await ProcessingStatusManager.failStage(
sessionId,
ProcessingStage.TRANSCRIPT_FETCH,
errorMessage
);
} else if (
errorMessage.includes("message") ||
errorMessage.includes("parse")
) {
await ProcessingStatusManager.failStage(
sessionId,
ProcessingStage.SESSION_CREATION,
errorMessage
);
} else {
// General failure - mark CSV_IMPORT as failed
await ProcessingStatusManager.failStage(
sessionId,
ProcessingStage.CSV_IMPORT,
errorMessage
);
}
}
return { return {
success: false, success: false,
error: errorMessage, error: error instanceof Error ? error.message : String(error),
}; };
} }
} }

View File

@ -7,10 +7,15 @@ import {
} from "@prisma/client"; } from "@prisma/client";
import cron from "node-cron"; import cron from "node-cron";
import fetch from "node-fetch"; import fetch from "node-fetch";
import { withRetry } from "./database-retry.js";
import { prisma } from "./prisma.js"; import { prisma } from "./prisma.js";
import { ProcessingStatusManager } from "./processingStatusManager"; import {
completeStage,
failStage,
getSessionsNeedingProcessing,
startStage,
} from "./processingStatusManager.js";
import { getSchedulerConfig } from "./schedulerConfig"; import { getSchedulerConfig } from "./schedulerConfig";
import { withRetry, isRetryableError } from "./database-retry.js";
const OPENAI_API_KEY = process.env.OPENAI_API_KEY; const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions"; const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions";
@ -516,10 +521,7 @@ async function processSingleSession(
try { try {
// Mark AI analysis as started // Mark AI analysis as started
await ProcessingStatusManager.startStage( await startStage(session.id, ProcessingStage.AI_ANALYSIS);
session.id,
ProcessingStage.AI_ANALYSIS
);
// Convert messages back to transcript format for OpenAI processing // Convert messages back to transcript format for OpenAI processing
const transcript = session.messages const transcript = session.messages
@ -569,34 +571,23 @@ async function processSingleSession(
}); });
// Mark AI analysis as completed // Mark AI analysis as completed
await ProcessingStatusManager.completeStage( await completeStage(session.id, ProcessingStage.AI_ANALYSIS, {
session.id, language: processedData.language,
ProcessingStage.AI_ANALYSIS, sentiment: processedData.sentiment,
{ category: processedData.category,
language: processedData.language, questionsCount: processedData.questions.length,
sentiment: processedData.sentiment, });
category: processedData.category,
questionsCount: processedData.questions.length,
}
);
// Start question extraction stage // Start question extraction stage
await ProcessingStatusManager.startStage( await startStage(session.id, ProcessingStage.QUESTION_EXTRACTION);
session.id,
ProcessingStage.QUESTION_EXTRACTION
);
// Process questions into separate tables // Process questions into separate tables
await processQuestions(session.id, processedData.questions); await processQuestions(session.id, processedData.questions);
// Mark question extraction as completed // Mark question extraction as completed
await ProcessingStatusManager.completeStage( await completeStage(session.id, ProcessingStage.QUESTION_EXTRACTION, {
session.id, questionsProcessed: processedData.questions.length,
ProcessingStage.QUESTION_EXTRACTION, });
{
questionsProcessed: processedData.questions.length,
}
);
return { return {
sessionId: session.id, sessionId: session.id,
@ -604,7 +595,7 @@ async function processSingleSession(
}; };
} catch (error) { } catch (error) {
// Mark AI analysis as failed // Mark AI analysis as failed
await ProcessingStatusManager.failStage( await failStage(
session.id, session.id,
ProcessingStage.AI_ANALYSIS, ProcessingStage.AI_ANALYSIS,
error instanceof Error ? error.message : String(error) error instanceof Error ? error.message : String(error)
@ -688,11 +679,10 @@ async function processUnprocessedSessionsInternal(
maxConcurrency = 5 maxConcurrency = 5
): Promise<void> { ): Promise<void> {
// Get sessions that need AI processing using the new status system // Get sessions that need AI processing using the new status system
const sessionsNeedingAI = const sessionsNeedingAI = await getSessionsNeedingProcessing(
await ProcessingStatusManager.getSessionsNeedingProcessing( ProcessingStage.AI_ANALYSIS,
ProcessingStage.AI_ANALYSIS, batchSize || 50
batchSize || 50 );
);
if (sessionsNeedingAI.length === 0) { if (sessionsNeedingAI.length === 0) {
process.stdout.write( process.stdout.write(

View File

@ -12,347 +12,339 @@ interface WhereClause {
} }
/** /**
* Centralized processing status management * Initialize processing status for a session with all stages set to PENDING
*/ */
export class ProcessingStatusManager { export async function initializeSession(sessionId: string): Promise<void> {
/** const stages = [
* Initialize processing status for a session with all stages set to PENDING ProcessingStage.CSV_IMPORT,
*/ ProcessingStage.TRANSCRIPT_FETCH,
static async initializeSession(sessionId: string): Promise<void> { ProcessingStage.SESSION_CREATION,
const stages = [ ProcessingStage.AI_ANALYSIS,
ProcessingStage.CSV_IMPORT, ProcessingStage.QUESTION_EXTRACTION,
ProcessingStage.TRANSCRIPT_FETCH, ];
ProcessingStage.SESSION_CREATION,
ProcessingStage.AI_ANALYSIS,
ProcessingStage.QUESTION_EXTRACTION,
];
// Create all processing status records for this session // Create all processing status records for this session
await prisma.sessionProcessingStatus.createMany({ await prisma.sessionProcessingStatus.createMany({
data: stages.map((stage) => ({ data: stages.map((stage) => ({
sessionId, sessionId,
stage, stage,
status: ProcessingStatus.PENDING, status: ProcessingStatus.PENDING,
})), })),
skipDuplicates: true, // In case some already exist skipDuplicates: true, // In case some already exist
}); });
} }
/** /**
* Start a processing stage * Start a processing stage
*/ */
static async startStage( export async function startStage(
sessionId: string, sessionId: string,
stage: ProcessingStage, stage: ProcessingStage,
metadata?: ProcessingMetadata metadata?: ProcessingMetadata
): Promise<void> { ): Promise<void> {
await prisma.sessionProcessingStatus.upsert({ await prisma.sessionProcessingStatus.upsert({
where: { where: {
sessionId_stage: { sessionId, stage }, sessionId_stage: { sessionId, stage },
}, },
update: { update: {
status: ProcessingStatus.IN_PROGRESS, status: ProcessingStatus.IN_PROGRESS,
startedAt: new Date(), startedAt: new Date(),
errorMessage: null, errorMessage: null,
metadata: metadata || null, metadata: metadata || null,
}, },
create: { create: {
sessionId, sessionId,
stage, stage,
status: ProcessingStatus.IN_PROGRESS, status: ProcessingStatus.IN_PROGRESS,
startedAt: new Date(), startedAt: new Date(),
metadata: metadata || null, metadata: metadata || null,
}, },
}); });
} }
/** /**
* Complete a processing stage successfully * Complete a processing stage successfully
*/ */
static async completeStage( export async function completeStage(
sessionId: string, sessionId: string,
stage: ProcessingStage, stage: ProcessingStage,
metadata?: ProcessingMetadata metadata?: ProcessingMetadata
): Promise<void> { ): Promise<void> {
await prisma.sessionProcessingStatus.upsert({ await prisma.sessionProcessingStatus.upsert({
where: { where: {
sessionId_stage: { sessionId, stage }, sessionId_stage: { sessionId, stage },
}, },
update: { update: {
status: ProcessingStatus.COMPLETED, status: ProcessingStatus.COMPLETED,
completedAt: new Date(), completedAt: new Date(),
errorMessage: null, errorMessage: null,
metadata: metadata || null, metadata: metadata || null,
}, },
create: { create: {
sessionId, sessionId,
stage, stage,
status: ProcessingStatus.COMPLETED, status: ProcessingStatus.COMPLETED,
startedAt: new Date(), startedAt: new Date(),
completedAt: new Date(), completedAt: new Date(),
metadata: metadata || null, metadata: metadata || null,
}, },
}); });
} }
/** /**
* Mark a processing stage as failed * Mark a processing stage as failed
*/ */
static async failStage( export async function failStage(
sessionId: string, sessionId: string,
stage: ProcessingStage, stage: ProcessingStage,
errorMessage: string, errorMessage: string,
metadata?: ProcessingMetadata metadata?: ProcessingMetadata
): Promise<void> { ): Promise<void> {
await prisma.sessionProcessingStatus.upsert({ await prisma.sessionProcessingStatus.upsert({
where: { where: {
sessionId_stage: { sessionId, stage }, sessionId_stage: { sessionId, stage },
}, },
update: { update: {
status: ProcessingStatus.FAILED, status: ProcessingStatus.FAILED,
completedAt: new Date(), completedAt: new Date(),
errorMessage, errorMessage,
retryCount: { increment: 1 }, retryCount: { increment: 1 },
metadata: metadata || null, metadata: metadata || null,
}, },
create: { create: {
sessionId, sessionId,
stage, stage,
status: ProcessingStatus.FAILED, status: ProcessingStatus.FAILED,
startedAt: new Date(), startedAt: new Date(),
completedAt: new Date(), completedAt: new Date(),
errorMessage, errorMessage,
retryCount: 1, retryCount: 1,
metadata: metadata || null, metadata: metadata || null,
}, },
}); });
} }
/** /**
* Skip a processing stage (e.g., no transcript URL available) * Skip a processing stage (e.g., no transcript URL available)
*/ */
static async skipStage( export async function skipStage(
sessionId: string, sessionId: string,
stage: ProcessingStage, stage: ProcessingStage,
reason: string reason: string
): Promise<void> { ): Promise<void> {
await prisma.sessionProcessingStatus.upsert({ await prisma.sessionProcessingStatus.upsert({
where: { where: {
sessionId_stage: { sessionId, stage }, sessionId_stage: { sessionId, stage },
}, },
update: { update: {
status: ProcessingStatus.SKIPPED, status: ProcessingStatus.SKIPPED,
completedAt: new Date(), completedAt: new Date(),
errorMessage: reason, errorMessage: reason,
}, },
create: { create: {
sessionId, sessionId,
stage, stage,
status: ProcessingStatus.SKIPPED, status: ProcessingStatus.SKIPPED,
startedAt: new Date(), startedAt: new Date(),
completedAt: new Date(), completedAt: new Date(),
errorMessage: reason, errorMessage: reason,
}, },
}); });
} }
/** /**
* Get processing status for a specific session * Get processing status for a specific session
*/ */
static async getSessionStatus(sessionId: string) { export async function getSessionStatus(sessionId: string) {
return await prisma.sessionProcessingStatus.findMany({ return await prisma.sessionProcessingStatus.findMany({
where: { sessionId }, where: { sessionId },
orderBy: { stage: "asc" }, orderBy: { stage: "asc" },
}); });
} }
/** /**
* Get sessions that need processing for a specific stage * Get sessions that need processing for a specific stage
*/ */
static async getSessionsNeedingProcessing( export async function getSessionsNeedingProcessing(
stage: ProcessingStage, stage: ProcessingStage,
limit = 50 limit = 50
) { ) {
return await prisma.sessionProcessingStatus.findMany({ return await prisma.sessionProcessingStatus.findMany({
where: { where: {
stage, stage,
status: ProcessingStatus.PENDING, status: ProcessingStatus.PENDING,
session: { session: {
company: { company: {
status: "ACTIVE", // Only process sessions from active companies status: "ACTIVE", // Only process sessions from active companies
}, },
}, },
}, },
include: { include: {
session: { session: {
select: { select: {
id: true, id: true,
companyId: true, companyId: true,
importId: true, importId: true,
startTime: true, startTime: true,
endTime: true, endTime: true,
fullTranscriptUrl: true, fullTranscriptUrl: true,
import: import:
stage === ProcessingStage.TRANSCRIPT_FETCH stage === ProcessingStage.TRANSCRIPT_FETCH
? { ? {
select: { select: {
id: true, id: true,
fullTranscriptUrl: true, fullTranscriptUrl: true,
externalSessionId: true, externalSessionId: true,
}, },
} }
: false, : false,
company: { company: {
select: { select: {
id: true, id: true,
csvUsername: true, csvUsername: true,
csvPassword: true, csvPassword: true,
}, },
}, },
}, },
}, },
}, },
take: limit, take: limit,
orderBy: { session: { createdAt: "asc" } }, orderBy: { session: { createdAt: "asc" } },
}); });
} }
/** /**
* Get pipeline status overview * Get pipeline status overview
*/ */
static async getPipelineStatus() { export async function getPipelineStatus() {
// Get counts by stage and status // Get counts by stage and status
const statusCounts = await prisma.sessionProcessingStatus.groupBy({ const statusCounts = await prisma.sessionProcessingStatus.groupBy({
by: ["stage", "status"], by: ["stage", "status"],
_count: { id: true }, _count: { id: true },
}); });
// Get total sessions // Get total sessions
const totalSessions = await prisma.session.count(); const totalSessions = await prisma.session.count();
// Organize the data // Organize the data
const pipeline: Record<string, Record<string, number>> = {}; const pipeline: Record<string, Record<string, number>> = {};
for (const { stage, status, _count } of statusCounts) { for (const { stage, status, _count } of statusCounts) {
if (!pipeline[stage]) { if (!pipeline[stage]) {
pipeline[stage] = {}; pipeline[stage] = {};
} }
pipeline[stage][status] = _count.id; pipeline[stage][status] = _count.id;
} }
return { return {
totalSessions, totalSessions,
pipeline, pipeline,
}; };
} }
/** /**
* Get sessions with failed processing * Get sessions with failed processing
*/ */
static async getFailedSessions(stage?: ProcessingStage) { export async function getFailedSessions(stage?: ProcessingStage) {
const where: WhereClause = { const where: WhereClause = {
status: ProcessingStatus.FAILED, status: ProcessingStatus.FAILED,
}; };
if (stage) { if (stage) {
where.stage = stage; where.stage = stage;
} }
return await prisma.sessionProcessingStatus.findMany({ return await prisma.sessionProcessingStatus.findMany({
where, where,
select: { select: {
id: true, id: true,
sessionId: true, sessionId: true,
stage: true, stage: true,
status: true, status: true,
startedAt: true, startedAt: true,
completedAt: true, completedAt: true,
errorMessage: true, errorMessage: true,
retryCount: true, retryCount: true,
session: { session: {
select: { select: {
id: true, id: true,
companyId: true, companyId: true,
startTime: true, startTime: true,
import: { import: {
select: { select: {
id: true, id: true,
externalSessionId: true, externalSessionId: true,
}, },
}, },
}, },
}, },
}, },
orderBy: { completedAt: "desc" }, orderBy: { completedAt: "desc" },
take: 100, // Limit failed sessions to prevent overfetching take: 100, // Limit failed sessions to prevent overfetching
}); });
} }
/** /**
* Reset a failed stage for retry * Reset a failed stage for retry
*/ */
static async resetStageForRetry( export async function resetStageForRetry(
sessionId: string, sessionId: string,
stage: ProcessingStage stage: ProcessingStage
): Promise<void> { ): Promise<void> {
await prisma.sessionProcessingStatus.update({ await prisma.sessionProcessingStatus.update({
where: { where: {
sessionId_stage: { sessionId, stage }, sessionId_stage: { sessionId, stage },
}, },
data: { data: {
status: ProcessingStatus.PENDING, status: ProcessingStatus.PENDING,
startedAt: null, startedAt: null,
completedAt: null, completedAt: null,
errorMessage: null, errorMessage: null,
}, },
}); });
} }
/** /**
* Check if a session has completed a specific stage * Check if a session has completed a specific stage
*/ */
static async hasCompletedStage( export async function hasCompletedStage(
sessionId: string, sessionId: string,
stage: ProcessingStage stage: ProcessingStage
): Promise<boolean> { ): Promise<boolean> {
const status = await prisma.sessionProcessingStatus.findUnique({ const status = await prisma.sessionProcessingStatus.findUnique({
where: { where: {
sessionId_stage: { sessionId, stage }, sessionId_stage: { sessionId, stage },
}, },
}); });
return status?.status === ProcessingStatus.COMPLETED; return status?.status === ProcessingStatus.COMPLETED;
} }
/** /**
* Check if a session is ready for a specific stage (previous stages completed) * Check if a session is ready for a specific stage (previous stages completed)
*/ */
static async isReadyForStage( export async function isReadyForStage(
sessionId: string, sessionId: string,
stage: ProcessingStage stage: ProcessingStage
): Promise<boolean> { ): Promise<boolean> {
const stageOrder = [ const stageOrder = [
ProcessingStage.CSV_IMPORT, ProcessingStage.CSV_IMPORT,
ProcessingStage.TRANSCRIPT_FETCH, ProcessingStage.TRANSCRIPT_FETCH,
ProcessingStage.SESSION_CREATION, ProcessingStage.SESSION_CREATION,
ProcessingStage.AI_ANALYSIS, ProcessingStage.AI_ANALYSIS,
ProcessingStage.QUESTION_EXTRACTION, ProcessingStage.QUESTION_EXTRACTION,
]; ];
const currentStageIndex = stageOrder.indexOf(stage); const currentStageIndex = stageOrder.indexOf(stage);
if (currentStageIndex === 0) return true; // First stage is always ready if (currentStageIndex === 0) return true; // First stage is always ready
// Check if all previous stages are completed // Check if all previous stages are completed
const previousStages = stageOrder.slice(0, currentStageIndex); const previousStages = stageOrder.slice(0, currentStageIndex);
for (const prevStage of previousStages) { for (const prevStage of previousStages) {
const isCompleted = await ProcessingStatusManager.hasCompletedStage( const isCompleted = await hasCompletedStage(sessionId, prevStage);
sessionId, if (!isCompleted) return false;
prevStage }
);
if (!isCompleted) return false; return true;
}
return true;
}
} }

View File

@ -5,7 +5,7 @@
"private": true, "private": true,
"scripts": { "scripts": {
"build": "next build", "build": "next build",
"dev": "tsx server.ts", "dev": "pnpm exec tsx server.ts",
"dev:next-only": "next dev --turbopack", "dev:next-only": "next dev --turbopack",
"format": "npx prettier --write .", "format": "npx prettier --write .",
"format:check": "npx prettier --check .", "format:check": "npx prettier --check .",
@ -17,12 +17,12 @@
"biome:lint": "biome lint .", "biome:lint": "biome lint .",
"prisma:generate": "prisma generate", "prisma:generate": "prisma generate",
"prisma:migrate": "prisma migrate dev", "prisma:migrate": "prisma migrate dev",
"prisma:seed": "tsx prisma/seed.ts", "prisma:seed": "pnpm exec tsx prisma/seed.ts",
"prisma:seed:platform": "tsx prisma/seed-platform.ts", "prisma:seed:platform": "pnpm exec tsx prisma/seed-platform.ts",
"prisma:push": "prisma db push", "prisma:push": "prisma db push",
"prisma:push:force": "prisma db push --force-reset", "prisma:push:force": "prisma db push --force-reset",
"prisma:studio": "prisma studio", "prisma:studio": "prisma studio",
"db:check": "tsx scripts/check-database-config.ts", "db:check": "pnpm exec tsx scripts/check-database-config.ts",
"start": "node server.mjs", "start": "node server.mjs",
"test": "concurrently 'vitest run' 'playwright test'", "test": "concurrently 'vitest run' 'playwright test'",
"test:coverage": "concurrently \"vitest run --coverage\" \"echo 'To add playwright coverage thingy'\"", "test:coverage": "concurrently \"vitest run --coverage\" \"echo 'To add playwright coverage thingy'\"",
@ -79,6 +79,7 @@
"next-themes": "^0.4.6", "next-themes": "^0.4.6",
"node-cron": "^4.1.1", "node-cron": "^4.1.1",
"node-fetch": "^3.3.2", "node-fetch": "^3.3.2",
"pg": "^8.16.3",
"react": "^19.1.0", "react": "^19.1.0",
"react-day-picker": "^9.7.0", "react-day-picker": "^9.7.0",
"react-dom": "^19.1.0", "react-dom": "^19.1.0",
@ -101,6 +102,7 @@
"@testing-library/react": "^16.3.0", "@testing-library/react": "^16.3.0",
"@types/node": "^24.0.6", "@types/node": "^24.0.6",
"@types/node-cron": "^3.0.11", "@types/node-cron": "^3.0.11",
"@types/pg": "^8.15.4",
"@types/react": "^19.1.8", "@types/react": "^19.1.8",
"@types/react-dom": "^19.1.6", "@types/react-dom": "^19.1.6",
"@typescript-eslint/eslint-plugin": "^8.35.0", "@typescript-eslint/eslint-plugin": "^8.35.0",

9533
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff