refactor: comprehensive code quality improvements and dev environment fixes

- Convert ProcessingStatusManager from static class to individual functions
- Refactor processSingleImport function to reduce cognitive complexity
- Fix unused parameters in database-pool.ts event handlers
- Add missing DATABASE_URL configuration to env.ts
- Add pg package and @types/pg dependencies for PostgreSQL support
- Fix tsx command execution by updating package.json scripts to use pnpm exec
- Apply biome formatting fixes for import organization
This commit is contained in:
2025-06-29 21:56:29 +02:00
parent 8fd774422c
commit 5042a6c016
8 changed files with 3836 additions and 6878 deletions

View File

@ -33,19 +33,19 @@ const createConnectionPool = () => {
}); });
// Connection pool event handlers // Connection pool event handlers
pool.on("connect", (_client) => { pool.on("connect", () => {
console.log( console.log(
`Database connection established. Active connections: ${pool.totalCount}` `Database connection established. Active connections: ${pool.totalCount}`
); );
}); });
pool.on("acquire", (_client) => { pool.on("acquire", () => {
console.log( console.log(
`Connection acquired from pool. Waiting: ${pool.waitingCount}, Idle: ${pool.idleCount}` `Connection acquired from pool. Waiting: ${pool.waitingCount}, Idle: ${pool.idleCount}`
); );
}); });
pool.on("release", (_client) => { pool.on("release", () => {
console.log( console.log(
`Connection released to pool. Active: ${pool.totalCount - pool.idleCount}, Idle: ${pool.idleCount}` `Connection released to pool. Active: ${pool.totalCount - pool.idleCount}, Idle: ${pool.idleCount}`
); );
@ -55,7 +55,7 @@ const createConnectionPool = () => {
console.error("Database pool error:", err); console.error("Database pool error:", err);
}); });
pool.on("remove", (_client) => { pool.on("remove", () => {
console.log( console.log(
`Connection removed from pool. Total connections: ${pool.totalCount}` `Connection removed from pool. Total connections: ${pool.totalCount}`
); );

View File

@ -21,10 +21,10 @@ export function isRetryableError(error: unknown): boolean {
if (error instanceof PrismaClientKnownRequestError) { if (error instanceof PrismaClientKnownRequestError) {
// Connection errors that are worth retrying // Connection errors that are worth retrying
const retryableCodes = [ const retryableCodes = [
'P1001', // Can't reach database server "P1001", // Can't reach database server
'P1002', // Database server was reached but timed out "P1002", // Database server was reached but timed out
'P1008', // Operations timed out "P1008", // Operations timed out
'P1017', // Server has closed the connection "P1017", // Server has closed the connection
]; ];
return retryableCodes.includes(error.code); return retryableCodes.includes(error.code);
@ -33,19 +33,17 @@ export function isRetryableError(error: unknown): boolean {
// Check for network-related errors // Check for network-related errors
if (error instanceof Error) { if (error instanceof Error) {
const retryableMessages = [ const retryableMessages = [
'ECONNREFUSED', "ECONNREFUSED",
'ECONNRESET', "ECONNRESET",
'ETIMEDOUT', "ETIMEDOUT",
'ENOTFOUND', "ENOTFOUND",
'EAI_AGAIN', "EAI_AGAIN",
'Can\'t reach database server', "Can't reach database server",
'Connection terminated', "Connection terminated",
'Connection lost', "Connection lost",
]; ];
return retryableMessages.some(msg => return retryableMessages.some((msg) => error.message.includes(msg));
error.message.includes(msg)
);
} }
return false; return false;
@ -56,20 +54,20 @@ export function calculateDelay(
attempt: number, attempt: number,
config: RetryConfig = DEFAULT_RETRY_CONFIG config: RetryConfig = DEFAULT_RETRY_CONFIG
): number { ): number {
const delay = config.initialDelay * Math.pow(config.backoffMultiplier, attempt - 1); const delay = config.initialDelay * config.backoffMultiplier ** (attempt - 1);
return Math.min(delay, config.maxDelay); return Math.min(delay, config.maxDelay);
} }
// Sleep utility // Sleep utility
export function sleep(ms: number): Promise<void> { export function sleep(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms)); return new Promise((resolve) => setTimeout(resolve, ms));
} }
// Retry wrapper for database operations // Retry wrapper for database operations
export async function withRetry<T>( export async function withRetry<T>(
operation: () => Promise<T>, operation: () => Promise<T>,
config: RetryConfig = DEFAULT_RETRY_CONFIG, config: RetryConfig = DEFAULT_RETRY_CONFIG,
context: string = 'database operation' context = "database operation"
): Promise<T> { ): Promise<T> {
let lastError: unknown; let lastError: unknown;
@ -81,13 +79,19 @@ export async function withRetry<T>(
// Don't retry if error is not retryable // Don't retry if error is not retryable
if (!isRetryableError(error)) { if (!isRetryableError(error)) {
console.error(`[${context}] Non-retryable error on attempt ${attempt}:`, error); console.error(
`[${context}] Non-retryable error on attempt ${attempt}:`,
error
);
throw error; throw error;
} }
// Don't retry on last attempt // Don't retry on last attempt
if (attempt === config.maxRetries) { if (attempt === config.maxRetries) {
console.error(`[${context}] Max retries (${config.maxRetries}) exceeded:`, error); console.error(
`[${context}] Max retries (${config.maxRetries}) exceeded:`,
error
);
break; break;
} }
@ -116,15 +120,15 @@ export async function checkDatabaseHealthWithRetry(
async () => { async () => {
const isHealthy = await checkFunction(); const isHealthy = await checkFunction();
if (!isHealthy) { if (!isHealthy) {
throw new Error('Database health check failed'); throw new Error("Database health check failed");
} }
return true; return true;
}, },
retryConfig, retryConfig,
'database health check' "database health check"
); );
} catch (error) { } catch (error) {
console.error('Database health check failed after retries:', error); console.error("Database health check failed after retries:", error);
return false; return false;
} }
} }

View File

@ -103,6 +103,10 @@ export const env = {
5 5
), ),
// Database Configuration
DATABASE_URL: parseEnvValue(process.env.DATABASE_URL) || "",
DATABASE_URL_DIRECT: parseEnvValue(process.env.DATABASE_URL_DIRECT) || "",
// Database Connection Pooling // Database Connection Pooling
DATABASE_CONNECTION_LIMIT: parseIntWithDefault( DATABASE_CONNECTION_LIMIT: parseIntWithDefault(
process.env.DATABASE_CONNECTION_LIMIT, process.env.DATABASE_CONNECTION_LIMIT,
@ -123,6 +127,10 @@ export const env = {
export function validateEnv(): { valid: boolean; errors: string[] } { export function validateEnv(): { valid: boolean; errors: string[] } {
const errors: string[] = []; const errors: string[] = [];
if (!env.DATABASE_URL) {
errors.push("DATABASE_URL is required");
}
if (!env.NEXTAUTH_SECRET) { if (!env.NEXTAUTH_SECRET) {
errors.push("NEXTAUTH_SECRET is required"); errors.push("NEXTAUTH_SECRET is required");
} }

View File

@ -1,14 +1,20 @@
// SessionImport to Session processor // SessionImport to Session processor
import { ProcessingStage, SentimentCategory } from "@prisma/client"; import { ProcessingStage, SentimentCategory } from "@prisma/client";
import cron from "node-cron"; import cron from "node-cron";
import { withRetry } from "./database-retry.js";
import { getSchedulerConfig } from "./env"; import { getSchedulerConfig } from "./env";
import { prisma } from "./prisma.js"; import { prisma } from "./prisma.js";
import { ProcessingStatusManager } from "./processingStatusManager"; import {
completeStage,
failStage,
initializeSession,
skipStage,
startStage,
} from "./processingStatusManager.js";
import { import {
fetchTranscriptContent, fetchTranscriptContent,
isValidTranscriptUrl, isValidTranscriptUrl,
} from "./transcriptFetcher"; } from "./transcriptFetcher";
import { withRetry, isRetryableError } from "./database-retry.js";
interface ImportRecord { interface ImportRecord {
id: string; id: string;
@ -168,16 +174,9 @@ async function parseTranscriptIntoMessages(
} }
/** /**
* Process a single SessionImport record into a Session record * Create or update a Session record from ImportRecord
* Uses new unified processing status tracking
*/ */
async function processSingleImport( async function createSession(importRecord: ImportRecord): Promise<string> {
importRecord: ImportRecord
): Promise<{ success: boolean; error?: string }> {
let sessionId: string | null = null;
try {
// Parse dates using European format parser
const startTime = parseEuropeanDate(importRecord.startTimeRaw); const startTime = parseEuropeanDate(importRecord.startTimeRaw);
const endTime = parseEuropeanDate(importRecord.endTimeRaw); const endTime = parseEuropeanDate(importRecord.endTimeRaw);
@ -185,7 +184,6 @@ async function processSingleImport(
`[Import Processor] Processing ${importRecord.externalSessionId}: ${startTime.toISOString()} - ${endTime.toISOString()}` `[Import Processor] Processing ${importRecord.externalSessionId}: ${startTime.toISOString()} - ${endTime.toISOString()}`
); );
// Create or update Session record with MINIMAL processing
const session = await prisma.session.upsert({ const session = await prisma.session.upsert({
where: { where: {
importId: importRecord.id, importId: importRecord.id,
@ -193,9 +191,8 @@ async function processSingleImport(
update: { update: {
startTime, startTime,
endTime, endTime,
// Direct copies (minimal processing)
ipAddress: importRecord.ipAddress, ipAddress: importRecord.ipAddress,
country: importRecord.countryCode, // Keep as country code country: importRecord.countryCode,
fullTranscriptUrl: importRecord.fullTranscriptUrl, fullTranscriptUrl: importRecord.fullTranscriptUrl,
avgResponseTime: importRecord.avgResponseTimeSeconds, avgResponseTime: importRecord.avgResponseTimeSeconds,
initialMsg: importRecord.initialMessage, initialMsg: importRecord.initialMessage,
@ -205,27 +202,24 @@ async function processSingleImport(
importId: importRecord.id, importId: importRecord.id,
startTime, startTime,
endTime, endTime,
// Direct copies (minimal processing)
ipAddress: importRecord.ipAddress, ipAddress: importRecord.ipAddress,
country: importRecord.countryCode, // Keep as country code country: importRecord.countryCode,
fullTranscriptUrl: importRecord.fullTranscriptUrl, fullTranscriptUrl: importRecord.fullTranscriptUrl,
avgResponseTime: importRecord.avgResponseTimeSeconds, avgResponseTime: importRecord.avgResponseTimeSeconds,
initialMsg: importRecord.initialMessage, initialMsg: importRecord.initialMessage,
}, },
}); });
sessionId = session.id; return session.id;
}
// Initialize processing status for this session /**
await ProcessingStatusManager.initializeSession(sessionId); * Handle transcript fetching for a session
*/
// Mark CSV_IMPORT as completed async function handleTranscriptFetching(
await ProcessingStatusManager.completeStage( sessionId: string,
sessionId, importRecord: ImportRecord
ProcessingStage.CSV_IMPORT ): Promise<string | null> {
);
// Handle transcript fetching
let transcriptContent = importRecord.rawTranscriptContent; let transcriptContent = importRecord.rawTranscriptContent;
if ( if (
@ -233,16 +227,12 @@ async function processSingleImport(
importRecord.fullTranscriptUrl && importRecord.fullTranscriptUrl &&
isValidTranscriptUrl(importRecord.fullTranscriptUrl) isValidTranscriptUrl(importRecord.fullTranscriptUrl)
) { ) {
await ProcessingStatusManager.startStage( await startStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH);
sessionId,
ProcessingStage.TRANSCRIPT_FETCH
);
console.log( console.log(
`[Import Processor] Fetching transcript for ${importRecord.externalSessionId}...` `[Import Processor] Fetching transcript for ${importRecord.externalSessionId}...`
); );
// Get company credentials for transcript fetching
const company = await prisma.company.findUnique({ const company = await prisma.company.findUnique({
where: { id: importRecord.companyId }, where: { id: importRecord.companyId },
select: { csvUsername: true, csvPassword: true }, select: { csvUsername: true, csvPassword: true },
@ -260,106 +250,109 @@ async function processSingleImport(
`[Import Processor] ✓ Fetched transcript for ${importRecord.externalSessionId} (${transcriptContent?.length} chars)` `[Import Processor] ✓ Fetched transcript for ${importRecord.externalSessionId} (${transcriptContent?.length} chars)`
); );
// Update the import record with the fetched content
await prisma.sessionImport.update({ await prisma.sessionImport.update({
where: { id: importRecord.id }, where: { id: importRecord.id },
data: { rawTranscriptContent: transcriptContent }, data: { rawTranscriptContent: transcriptContent },
}); });
await ProcessingStatusManager.completeStage( await completeStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, {
sessionId,
ProcessingStage.TRANSCRIPT_FETCH,
{
contentLength: transcriptContent?.length || 0, contentLength: transcriptContent?.length || 0,
url: importRecord.fullTranscriptUrl, url: importRecord.fullTranscriptUrl,
} });
);
} else { } else {
console.log( console.log(
`[Import Processor] ⚠️ Failed to fetch transcript for ${importRecord.externalSessionId}: ${transcriptResult.error}` `[Import Processor] ⚠️ Failed to fetch transcript for ${importRecord.externalSessionId}: ${transcriptResult.error}`
); );
await ProcessingStatusManager.failStage( await failStage(
sessionId, sessionId,
ProcessingStage.TRANSCRIPT_FETCH, ProcessingStage.TRANSCRIPT_FETCH,
transcriptResult.error || "Unknown error" transcriptResult.error || "Unknown error"
); );
} }
} else if (!importRecord.fullTranscriptUrl) { } else if (!importRecord.fullTranscriptUrl) {
// No transcript URL available - skip this stage await skipStage(
await ProcessingStatusManager.skipStage(
sessionId, sessionId,
ProcessingStage.TRANSCRIPT_FETCH, ProcessingStage.TRANSCRIPT_FETCH,
"No transcript URL provided" "No transcript URL provided"
); );
} else { } else {
// Transcript already fetched await completeStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, {
await ProcessingStatusManager.completeStage(
sessionId,
ProcessingStage.TRANSCRIPT_FETCH,
{
contentLength: transcriptContent?.length || 0, contentLength: transcriptContent?.length || 0,
source: "already_fetched", source: "already_fetched",
} });
);
} }
// Handle session creation (parse messages) return transcriptContent;
await ProcessingStatusManager.startStage( }
sessionId,
ProcessingStage.SESSION_CREATION /**
); * Handle session creation (message parsing)
*/
async function handleSessionCreation(
sessionId: string,
transcriptContent: string | null
): Promise<void> {
await startStage(sessionId, ProcessingStage.SESSION_CREATION);
if (transcriptContent) { if (transcriptContent) {
await parseTranscriptIntoMessages(sessionId, transcriptContent); await parseTranscriptIntoMessages(sessionId, transcriptContent);
} }
await ProcessingStatusManager.completeStage( await completeStage(sessionId, ProcessingStage.SESSION_CREATION, {
sessionId,
ProcessingStage.SESSION_CREATION,
{
hasTranscript: !!transcriptContent, hasTranscript: !!transcriptContent,
transcriptLength: transcriptContent?.length || 0, transcriptLength: transcriptContent?.length || 0,
} });
); }
/**
* Handle errors and mark appropriate stage as failed
*/
async function handleProcessingError(
sessionId: string | null,
error: unknown
): Promise<void> {
if (!sessionId) return;
return { success: true };
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error); const errorMessage = error instanceof Error ? error.message : String(error);
// Mark the current stage as failed if we have a sessionId if (errorMessage.includes("transcript") || errorMessage.includes("fetch")) {
if (sessionId) { await failStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH, errorMessage);
// Determine which stage failed based on the error
if (
errorMessage.includes("transcript") ||
errorMessage.includes("fetch")
) {
await ProcessingStatusManager.failStage(
sessionId,
ProcessingStage.TRANSCRIPT_FETCH,
errorMessage
);
} else if ( } else if (
errorMessage.includes("message") || errorMessage.includes("message") ||
errorMessage.includes("parse") errorMessage.includes("parse")
) { ) {
await ProcessingStatusManager.failStage( await failStage(sessionId, ProcessingStage.SESSION_CREATION, errorMessage);
sessionId,
ProcessingStage.SESSION_CREATION,
errorMessage
);
} else { } else {
// General failure - mark CSV_IMPORT as failed await failStage(sessionId, ProcessingStage.CSV_IMPORT, errorMessage);
await ProcessingStatusManager.failStage(
sessionId,
ProcessingStage.CSV_IMPORT,
errorMessage
);
}
} }
}
/**
* Process a single SessionImport record into a Session record
* Uses new unified processing status tracking
*/
async function processSingleImport(
importRecord: ImportRecord
): Promise<{ success: boolean; error?: string }> {
let sessionId: string | null = null;
try {
sessionId = await createSession(importRecord);
await initializeSession(sessionId);
await completeStage(sessionId, ProcessingStage.CSV_IMPORT);
const transcriptContent = await handleTranscriptFetching(
sessionId,
importRecord
);
await handleSessionCreation(sessionId, transcriptContent);
return { success: true };
} catch (error) {
await handleProcessingError(sessionId, error);
return { return {
success: false, success: false,
error: errorMessage, error: error instanceof Error ? error.message : String(error),
}; };
} }
} }

View File

@ -7,10 +7,15 @@ import {
} from "@prisma/client"; } from "@prisma/client";
import cron from "node-cron"; import cron from "node-cron";
import fetch from "node-fetch"; import fetch from "node-fetch";
import { withRetry } from "./database-retry.js";
import { prisma } from "./prisma.js"; import { prisma } from "./prisma.js";
import { ProcessingStatusManager } from "./processingStatusManager"; import {
completeStage,
failStage,
getSessionsNeedingProcessing,
startStage,
} from "./processingStatusManager.js";
import { getSchedulerConfig } from "./schedulerConfig"; import { getSchedulerConfig } from "./schedulerConfig";
import { withRetry, isRetryableError } from "./database-retry.js";
const OPENAI_API_KEY = process.env.OPENAI_API_KEY; const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions"; const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions";
@ -516,10 +521,7 @@ async function processSingleSession(
try { try {
// Mark AI analysis as started // Mark AI analysis as started
await ProcessingStatusManager.startStage( await startStage(session.id, ProcessingStage.AI_ANALYSIS);
session.id,
ProcessingStage.AI_ANALYSIS
);
// Convert messages back to transcript format for OpenAI processing // Convert messages back to transcript format for OpenAI processing
const transcript = session.messages const transcript = session.messages
@ -569,34 +571,23 @@ async function processSingleSession(
}); });
// Mark AI analysis as completed // Mark AI analysis as completed
await ProcessingStatusManager.completeStage( await completeStage(session.id, ProcessingStage.AI_ANALYSIS, {
session.id,
ProcessingStage.AI_ANALYSIS,
{
language: processedData.language, language: processedData.language,
sentiment: processedData.sentiment, sentiment: processedData.sentiment,
category: processedData.category, category: processedData.category,
questionsCount: processedData.questions.length, questionsCount: processedData.questions.length,
} });
);
// Start question extraction stage // Start question extraction stage
await ProcessingStatusManager.startStage( await startStage(session.id, ProcessingStage.QUESTION_EXTRACTION);
session.id,
ProcessingStage.QUESTION_EXTRACTION
);
// Process questions into separate tables // Process questions into separate tables
await processQuestions(session.id, processedData.questions); await processQuestions(session.id, processedData.questions);
// Mark question extraction as completed // Mark question extraction as completed
await ProcessingStatusManager.completeStage( await completeStage(session.id, ProcessingStage.QUESTION_EXTRACTION, {
session.id,
ProcessingStage.QUESTION_EXTRACTION,
{
questionsProcessed: processedData.questions.length, questionsProcessed: processedData.questions.length,
} });
);
return { return {
sessionId: session.id, sessionId: session.id,
@ -604,7 +595,7 @@ async function processSingleSession(
}; };
} catch (error) { } catch (error) {
// Mark AI analysis as failed // Mark AI analysis as failed
await ProcessingStatusManager.failStage( await failStage(
session.id, session.id,
ProcessingStage.AI_ANALYSIS, ProcessingStage.AI_ANALYSIS,
error instanceof Error ? error.message : String(error) error instanceof Error ? error.message : String(error)
@ -688,8 +679,7 @@ async function processUnprocessedSessionsInternal(
maxConcurrency = 5 maxConcurrency = 5
): Promise<void> { ): Promise<void> {
// Get sessions that need AI processing using the new status system // Get sessions that need AI processing using the new status system
const sessionsNeedingAI = const sessionsNeedingAI = await getSessionsNeedingProcessing(
await ProcessingStatusManager.getSessionsNeedingProcessing(
ProcessingStage.AI_ANALYSIS, ProcessingStage.AI_ANALYSIS,
batchSize || 50 batchSize || 50
); );

View File

@ -12,13 +12,9 @@ interface WhereClause {
} }
/** /**
* Centralized processing status management
*/
export class ProcessingStatusManager {
/**
* Initialize processing status for a session with all stages set to PENDING * Initialize processing status for a session with all stages set to PENDING
*/ */
static async initializeSession(sessionId: string): Promise<void> { export async function initializeSession(sessionId: string): Promise<void> {
const stages = [ const stages = [
ProcessingStage.CSV_IMPORT, ProcessingStage.CSV_IMPORT,
ProcessingStage.TRANSCRIPT_FETCH, ProcessingStage.TRANSCRIPT_FETCH,
@ -36,16 +32,16 @@ export class ProcessingStatusManager {
})), })),
skipDuplicates: true, // In case some already exist skipDuplicates: true, // In case some already exist
}); });
} }
/** /**
* Start a processing stage * Start a processing stage
*/ */
static async startStage( export async function startStage(
sessionId: string, sessionId: string,
stage: ProcessingStage, stage: ProcessingStage,
metadata?: ProcessingMetadata metadata?: ProcessingMetadata
): Promise<void> { ): Promise<void> {
await prisma.sessionProcessingStatus.upsert({ await prisma.sessionProcessingStatus.upsert({
where: { where: {
sessionId_stage: { sessionId, stage }, sessionId_stage: { sessionId, stage },
@ -64,16 +60,16 @@ export class ProcessingStatusManager {
metadata: metadata || null, metadata: metadata || null,
}, },
}); });
} }
/** /**
* Complete a processing stage successfully * Complete a processing stage successfully
*/ */
static async completeStage( export async function completeStage(
sessionId: string, sessionId: string,
stage: ProcessingStage, stage: ProcessingStage,
metadata?: ProcessingMetadata metadata?: ProcessingMetadata
): Promise<void> { ): Promise<void> {
await prisma.sessionProcessingStatus.upsert({ await prisma.sessionProcessingStatus.upsert({
where: { where: {
sessionId_stage: { sessionId, stage }, sessionId_stage: { sessionId, stage },
@ -93,17 +89,17 @@ export class ProcessingStatusManager {
metadata: metadata || null, metadata: metadata || null,
}, },
}); });
} }
/** /**
* Mark a processing stage as failed * Mark a processing stage as failed
*/ */
static async failStage( export async function failStage(
sessionId: string, sessionId: string,
stage: ProcessingStage, stage: ProcessingStage,
errorMessage: string, errorMessage: string,
metadata?: ProcessingMetadata metadata?: ProcessingMetadata
): Promise<void> { ): Promise<void> {
await prisma.sessionProcessingStatus.upsert({ await prisma.sessionProcessingStatus.upsert({
where: { where: {
sessionId_stage: { sessionId, stage }, sessionId_stage: { sessionId, stage },
@ -126,16 +122,16 @@ export class ProcessingStatusManager {
metadata: metadata || null, metadata: metadata || null,
}, },
}); });
} }
/** /**
* Skip a processing stage (e.g., no transcript URL available) * Skip a processing stage (e.g., no transcript URL available)
*/ */
static async skipStage( export async function skipStage(
sessionId: string, sessionId: string,
stage: ProcessingStage, stage: ProcessingStage,
reason: string reason: string
): Promise<void> { ): Promise<void> {
await prisma.sessionProcessingStatus.upsert({ await prisma.sessionProcessingStatus.upsert({
where: { where: {
sessionId_stage: { sessionId, stage }, sessionId_stage: { sessionId, stage },
@ -154,25 +150,25 @@ export class ProcessingStatusManager {
errorMessage: reason, errorMessage: reason,
}, },
}); });
} }
/** /**
* Get processing status for a specific session * Get processing status for a specific session
*/ */
static async getSessionStatus(sessionId: string) { export async function getSessionStatus(sessionId: string) {
return await prisma.sessionProcessingStatus.findMany({ return await prisma.sessionProcessingStatus.findMany({
where: { sessionId }, where: { sessionId },
orderBy: { stage: "asc" }, orderBy: { stage: "asc" },
}); });
} }
/** /**
* Get sessions that need processing for a specific stage * Get sessions that need processing for a specific stage
*/ */
static async getSessionsNeedingProcessing( export async function getSessionsNeedingProcessing(
stage: ProcessingStage, stage: ProcessingStage,
limit = 50 limit = 50
) { ) {
return await prisma.sessionProcessingStatus.findMany({ return await prisma.sessionProcessingStatus.findMany({
where: { where: {
stage, stage,
@ -215,12 +211,12 @@ export class ProcessingStatusManager {
take: limit, take: limit,
orderBy: { session: { createdAt: "asc" } }, orderBy: { session: { createdAt: "asc" } },
}); });
} }
/** /**
* Get pipeline status overview * Get pipeline status overview
*/ */
static async getPipelineStatus() { export async function getPipelineStatus() {
// Get counts by stage and status // Get counts by stage and status
const statusCounts = await prisma.sessionProcessingStatus.groupBy({ const statusCounts = await prisma.sessionProcessingStatus.groupBy({
by: ["stage", "status"], by: ["stage", "status"],
@ -244,12 +240,12 @@ export class ProcessingStatusManager {
totalSessions, totalSessions,
pipeline, pipeline,
}; };
} }
/** /**
* Get sessions with failed processing * Get sessions with failed processing
*/ */
static async getFailedSessions(stage?: ProcessingStage) { export async function getFailedSessions(stage?: ProcessingStage) {
const where: WhereClause = { const where: WhereClause = {
status: ProcessingStatus.FAILED, status: ProcessingStatus.FAILED,
}; };
@ -286,15 +282,15 @@ export class ProcessingStatusManager {
orderBy: { completedAt: "desc" }, orderBy: { completedAt: "desc" },
take: 100, // Limit failed sessions to prevent overfetching take: 100, // Limit failed sessions to prevent overfetching
}); });
} }
/** /**
* Reset a failed stage for retry * Reset a failed stage for retry
*/ */
static async resetStageForRetry( export async function resetStageForRetry(
sessionId: string, sessionId: string,
stage: ProcessingStage stage: ProcessingStage
): Promise<void> { ): Promise<void> {
await prisma.sessionProcessingStatus.update({ await prisma.sessionProcessingStatus.update({
where: { where: {
sessionId_stage: { sessionId, stage }, sessionId_stage: { sessionId, stage },
@ -306,15 +302,15 @@ export class ProcessingStatusManager {
errorMessage: null, errorMessage: null,
}, },
}); });
} }
/** /**
* Check if a session has completed a specific stage * Check if a session has completed a specific stage
*/ */
static async hasCompletedStage( export async function hasCompletedStage(
sessionId: string, sessionId: string,
stage: ProcessingStage stage: ProcessingStage
): Promise<boolean> { ): Promise<boolean> {
const status = await prisma.sessionProcessingStatus.findUnique({ const status = await prisma.sessionProcessingStatus.findUnique({
where: { where: {
sessionId_stage: { sessionId, stage }, sessionId_stage: { sessionId, stage },
@ -322,15 +318,15 @@ export class ProcessingStatusManager {
}); });
return status?.status === ProcessingStatus.COMPLETED; return status?.status === ProcessingStatus.COMPLETED;
} }
/** /**
* Check if a session is ready for a specific stage (previous stages completed) * Check if a session is ready for a specific stage (previous stages completed)
*/ */
static async isReadyForStage( export async function isReadyForStage(
sessionId: string, sessionId: string,
stage: ProcessingStage stage: ProcessingStage
): Promise<boolean> { ): Promise<boolean> {
const stageOrder = [ const stageOrder = [
ProcessingStage.CSV_IMPORT, ProcessingStage.CSV_IMPORT,
ProcessingStage.TRANSCRIPT_FETCH, ProcessingStage.TRANSCRIPT_FETCH,
@ -346,13 +342,9 @@ export class ProcessingStatusManager {
const previousStages = stageOrder.slice(0, currentStageIndex); const previousStages = stageOrder.slice(0, currentStageIndex);
for (const prevStage of previousStages) { for (const prevStage of previousStages) {
const isCompleted = await ProcessingStatusManager.hasCompletedStage( const isCompleted = await hasCompletedStage(sessionId, prevStage);
sessionId,
prevStage
);
if (!isCompleted) return false; if (!isCompleted) return false;
} }
return true; return true;
}
} }

View File

@ -5,7 +5,7 @@
"private": true, "private": true,
"scripts": { "scripts": {
"build": "next build", "build": "next build",
"dev": "tsx server.ts", "dev": "pnpm exec tsx server.ts",
"dev:next-only": "next dev --turbopack", "dev:next-only": "next dev --turbopack",
"format": "npx prettier --write .", "format": "npx prettier --write .",
"format:check": "npx prettier --check .", "format:check": "npx prettier --check .",
@ -17,12 +17,12 @@
"biome:lint": "biome lint .", "biome:lint": "biome lint .",
"prisma:generate": "prisma generate", "prisma:generate": "prisma generate",
"prisma:migrate": "prisma migrate dev", "prisma:migrate": "prisma migrate dev",
"prisma:seed": "tsx prisma/seed.ts", "prisma:seed": "pnpm exec tsx prisma/seed.ts",
"prisma:seed:platform": "tsx prisma/seed-platform.ts", "prisma:seed:platform": "pnpm exec tsx prisma/seed-platform.ts",
"prisma:push": "prisma db push", "prisma:push": "prisma db push",
"prisma:push:force": "prisma db push --force-reset", "prisma:push:force": "prisma db push --force-reset",
"prisma:studio": "prisma studio", "prisma:studio": "prisma studio",
"db:check": "tsx scripts/check-database-config.ts", "db:check": "pnpm exec tsx scripts/check-database-config.ts",
"start": "node server.mjs", "start": "node server.mjs",
"test": "concurrently 'vitest run' 'playwright test'", "test": "concurrently 'vitest run' 'playwright test'",
"test:coverage": "concurrently \"vitest run --coverage\" \"echo 'To add playwright coverage thingy'\"", "test:coverage": "concurrently \"vitest run --coverage\" \"echo 'To add playwright coverage thingy'\"",
@ -79,6 +79,7 @@
"next-themes": "^0.4.6", "next-themes": "^0.4.6",
"node-cron": "^4.1.1", "node-cron": "^4.1.1",
"node-fetch": "^3.3.2", "node-fetch": "^3.3.2",
"pg": "^8.16.3",
"react": "^19.1.0", "react": "^19.1.0",
"react-day-picker": "^9.7.0", "react-day-picker": "^9.7.0",
"react-dom": "^19.1.0", "react-dom": "^19.1.0",
@ -101,6 +102,7 @@
"@testing-library/react": "^16.3.0", "@testing-library/react": "^16.3.0",
"@types/node": "^24.0.6", "@types/node": "^24.0.6",
"@types/node-cron": "^3.0.11", "@types/node-cron": "^3.0.11",
"@types/pg": "^8.15.4",
"@types/react": "^19.1.8", "@types/react": "^19.1.8",
"@types/react-dom": "^19.1.6", "@types/react-dom": "^19.1.6",
"@typescript-eslint/eslint-plugin": "^8.35.0", "@typescript-eslint/eslint-plugin": "^8.35.0",

9533
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff