mirror of
https://github.com/kjanat/livedash-node.git
synced 2026-01-16 13:12:10 +01:00
feat: Enhance session processing and metrics
- Updated session processing commands in documentation for clarity. - Removed transcript content fetching from session processing, allowing on-demand retrieval. - Improved session metrics calculations and added new metrics for dashboard. - Refactored processing scheduler to handle sessions in parallel with concurrency limits. - Added manual trigger API for processing unprocessed sessions with admin checks. - Implemented scripts for fetching and parsing transcripts, checking transcript content, and testing processing status. - Updated Prisma schema to enforce default values for processed sessions. - Added error handling and logging improvements throughout the processing workflow.
This commit is contained in:
@ -62,7 +62,8 @@ OPENAI_API_KEY=your_api_key_here
|
|||||||
|
|
||||||
To run the application with schedulers enabled:
|
To run the application with schedulers enabled:
|
||||||
|
|
||||||
- Development: `npm run dev:with-schedulers`
|
- Development: `npm run dev`
|
||||||
|
- Development (with schedulers disabled): `npm run dev:no-schedulers`
|
||||||
- Production: `npm run start`
|
- Production: `npm run start`
|
||||||
|
|
||||||
Note: These commands will start a custom Next.js server with the schedulers enabled. You'll need to have an OpenAI API key set in your `.env.local` file for the session processing to work.
|
Note: These commands will start a custom Next.js server with the schedulers enabled. You'll need to have an OpenAI API key set in your `.env.local` file for the session processing to work.
|
||||||
|
|||||||
@ -561,15 +561,8 @@ export async function fetchAndStoreSessionsForAllCompanies() {
|
|||||||
? session.endTime
|
? session.endTime
|
||||||
: new Date();
|
: new Date();
|
||||||
|
|
||||||
// Fetch transcript content if URL is available
|
// Note: transcriptContent field was removed from schema
|
||||||
let transcriptContent = null;
|
// Transcript content can be fetched on-demand from fullTranscriptUrl
|
||||||
if (session.fullTranscriptUrl) {
|
|
||||||
transcriptContent = await fetchTranscriptContent(
|
|
||||||
session.fullTranscriptUrl,
|
|
||||||
company.csvUsername,
|
|
||||||
company.csvPassword
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if the session already exists
|
// Check if the session already exists
|
||||||
const existingSession = await prisma.session.findUnique({
|
const existingSession = await prisma.session.findUnique({
|
||||||
@ -608,7 +601,6 @@ export async function fetchAndStoreSessionsForAllCompanies() {
|
|||||||
? session.forwardedHr
|
? session.forwardedHr
|
||||||
: null,
|
: null,
|
||||||
fullTranscriptUrl: session.fullTranscriptUrl || null,
|
fullTranscriptUrl: session.fullTranscriptUrl || null,
|
||||||
transcriptContent: transcriptContent, // Add the transcript content
|
|
||||||
avgResponseTime:
|
avgResponseTime:
|
||||||
typeof session.avgResponseTime === "number"
|
typeof session.avgResponseTime === "number"
|
||||||
? session.avgResponseTime
|
? session.avgResponseTime
|
||||||
|
|||||||
@ -216,24 +216,130 @@ function validateOpenAIResponse(data) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Process unprocessed sessions
|
* Process a single session
|
||||||
|
* @param {Object} session The session to process
|
||||||
|
* @returns {Promise<Object>} Result object with success/error info
|
||||||
*/
|
*/
|
||||||
export async function processUnprocessedSessions() {
|
async function processSingleSession(session) {
|
||||||
|
if (session.messages.length === 0) {
|
||||||
|
return {
|
||||||
|
sessionId: session.id,
|
||||||
|
success: false,
|
||||||
|
error: "Session has no messages",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Convert messages back to transcript format for OpenAI processing
|
||||||
|
const transcript = session.messages
|
||||||
|
.map(
|
||||||
|
(msg) =>
|
||||||
|
`[${new Date(msg.timestamp)
|
||||||
|
.toLocaleString("en-GB", {
|
||||||
|
day: "2-digit",
|
||||||
|
month: "2-digit",
|
||||||
|
year: "numeric",
|
||||||
|
hour: "2-digit",
|
||||||
|
minute: "2-digit",
|
||||||
|
second: "2-digit",
|
||||||
|
})
|
||||||
|
.replace(",", "")}] ${msg.role}: ${msg.content}`
|
||||||
|
)
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
const processedData = await processTranscriptWithOpenAI(
|
||||||
|
session.id,
|
||||||
|
transcript
|
||||||
|
);
|
||||||
|
|
||||||
|
// Map sentiment string to float value for compatibility with existing data
|
||||||
|
const sentimentMap = {
|
||||||
|
positive: 0.8,
|
||||||
|
neutral: 0.0,
|
||||||
|
negative: -0.8,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Update the session with processed data
|
||||||
|
await prisma.session.update({
|
||||||
|
where: { id: session.id },
|
||||||
|
data: {
|
||||||
|
language: processedData.language,
|
||||||
|
messagesSent: processedData.messages_sent,
|
||||||
|
sentiment: sentimentMap[processedData.sentiment] || 0,
|
||||||
|
sentimentCategory: processedData.sentiment,
|
||||||
|
escalated: processedData.escalated,
|
||||||
|
forwardedHr: processedData.forwarded_hr,
|
||||||
|
category: processedData.category,
|
||||||
|
questions: JSON.stringify(processedData.questions),
|
||||||
|
summary: processedData.summary,
|
||||||
|
processed: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
sessionId: session.id,
|
||||||
|
success: true,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
sessionId: session.id,
|
||||||
|
success: false,
|
||||||
|
error: error.message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process sessions in parallel with concurrency limit
|
||||||
|
* @param {Array} sessions Array of sessions to process
|
||||||
|
* @param {number} maxConcurrency Maximum number of concurrent processing tasks
|
||||||
|
* @returns {Promise<Object>} Processing results
|
||||||
|
*/
|
||||||
|
async function processSessionsInParallel(sessions, maxConcurrency = 5) {
|
||||||
|
const results = [];
|
||||||
|
const executing = [];
|
||||||
|
|
||||||
|
for (const session of sessions) {
|
||||||
|
const promise = processSingleSession(session).then((result) => {
|
||||||
|
process.stdout.write(
|
||||||
|
result.success
|
||||||
|
? `[ProcessingScheduler] ✓ Successfully processed session ${result.sessionId}\n`
|
||||||
|
: `[ProcessingScheduler] ✗ Failed to process session ${result.sessionId}: ${result.error}\n`
|
||||||
|
);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
|
||||||
|
results.push(promise);
|
||||||
|
executing.push(promise);
|
||||||
|
|
||||||
|
if (executing.length >= maxConcurrency) {
|
||||||
|
await Promise.race(executing);
|
||||||
|
executing.splice(
|
||||||
|
executing.findIndex((p) => p === promise),
|
||||||
|
1
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.all(results);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process unprocessed sessions
|
||||||
|
* @param {number} batchSize Number of sessions to process in one batch (default: all unprocessed)
|
||||||
|
* @param {number} maxConcurrency Maximum number of concurrent processing tasks (default: 5)
|
||||||
|
*/
|
||||||
|
export async function processUnprocessedSessions(batchSize = null, maxConcurrency = 5) {
|
||||||
process.stdout.write(
|
process.stdout.write(
|
||||||
"[ProcessingScheduler] Starting to process unprocessed sessions...\n"
|
"[ProcessingScheduler] Starting to process unprocessed sessions...\n"
|
||||||
);
|
);
|
||||||
|
|
||||||
// Find sessions that have messages but haven't been processed
|
// Find sessions that have messages but haven't been processed
|
||||||
const sessionsToProcess = await prisma.session.findMany({
|
const queryOptions = {
|
||||||
where: {
|
where: {
|
||||||
AND: [
|
AND: [
|
||||||
{ messages: { some: {} } }, // Must have messages
|
{ messages: { some: {} } }, // Must have messages
|
||||||
{
|
{ processed: false }, // Only unprocessed sessions (no longer checking for null)
|
||||||
OR: [
|
|
||||||
{ processed: false },
|
|
||||||
{ processed: null }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
include: {
|
include: {
|
||||||
@ -241,8 +347,14 @@ export async function processUnprocessedSessions() {
|
|||||||
orderBy: { order: "asc" },
|
orderBy: { order: "asc" },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
take: 10, // Process in batches to avoid overloading the system
|
};
|
||||||
});
|
|
||||||
|
// Add batch size limit if specified
|
||||||
|
if (batchSize && batchSize > 0) {
|
||||||
|
queryOptions.take = batchSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionsToProcess = await prisma.session.findMany(queryOptions);
|
||||||
|
|
||||||
// Filter to only sessions that have messages
|
// Filter to only sessions that have messages
|
||||||
const sessionsWithMessages = sessionsToProcess.filter(
|
const sessionsWithMessages = sessionsToProcess.filter(
|
||||||
@ -257,80 +369,15 @@ export async function processUnprocessedSessions() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
process.stdout.write(
|
process.stdout.write(
|
||||||
`[ProcessingScheduler] Found ${sessionsWithMessages.length} sessions to process.\n`
|
`[ProcessingScheduler] Found ${sessionsWithMessages.length} sessions to process (max concurrency: ${maxConcurrency}).\n`
|
||||||
);
|
);
|
||||||
let successCount = 0;
|
|
||||||
let errorCount = 0;
|
|
||||||
|
|
||||||
for (const session of sessionsWithMessages) {
|
const startTime = Date.now();
|
||||||
if (session.messages.length === 0) {
|
const results = await processSessionsInParallel(sessionsWithMessages, maxConcurrency);
|
||||||
process.stderr.write(
|
const endTime = Date.now();
|
||||||
`[ProcessingScheduler] Session ${session.id} has no messages, skipping.\n`
|
|
||||||
);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
process.stdout.write(
|
const successCount = results.filter((r) => r.success).length;
|
||||||
`[ProcessingScheduler] Processing messages for session ${session.id}...\n`
|
const errorCount = results.filter((r) => !r.success).length;
|
||||||
);
|
|
||||||
try {
|
|
||||||
// Convert messages back to transcript format for OpenAI processing
|
|
||||||
const transcript = session.messages
|
|
||||||
.map(
|
|
||||||
(msg) =>
|
|
||||||
`[${new Date(msg.timestamp)
|
|
||||||
.toLocaleString("en-GB", {
|
|
||||||
day: "2-digit",
|
|
||||||
month: "2-digit",
|
|
||||||
year: "numeric",
|
|
||||||
hour: "2-digit",
|
|
||||||
minute: "2-digit",
|
|
||||||
second: "2-digit",
|
|
||||||
})
|
|
||||||
.replace(",", "")}] ${msg.role}: ${msg.content}`
|
|
||||||
)
|
|
||||||
.join("\n");
|
|
||||||
|
|
||||||
const processedData = await processTranscriptWithOpenAI(
|
|
||||||
session.id,
|
|
||||||
transcript
|
|
||||||
);
|
|
||||||
|
|
||||||
// Map sentiment string to float value for compatibility with existing data
|
|
||||||
const sentimentMap = {
|
|
||||||
positive: 0.8,
|
|
||||||
neutral: 0.0,
|
|
||||||
negative: -0.8,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Update the session with processed data
|
|
||||||
await prisma.session.update({
|
|
||||||
where: { id: session.id },
|
|
||||||
data: {
|
|
||||||
language: processedData.language,
|
|
||||||
messagesSent: processedData.messages_sent,
|
|
||||||
sentiment: sentimentMap[processedData.sentiment] || 0,
|
|
||||||
sentimentCategory: processedData.sentiment,
|
|
||||||
escalated: processedData.escalated,
|
|
||||||
forwardedHr: processedData.forwarded_hr,
|
|
||||||
category: processedData.category,
|
|
||||||
questions: JSON.stringify(processedData.questions),
|
|
||||||
summary: processedData.summary,
|
|
||||||
processed: true,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
process.stdout.write(
|
|
||||||
`[ProcessingScheduler] Successfully processed session ${session.id}.\n`
|
|
||||||
);
|
|
||||||
successCount++;
|
|
||||||
} catch (error) {
|
|
||||||
process.stderr.write(
|
|
||||||
`[ProcessingScheduler] Error processing session ${session.id}: ${error}\n`
|
|
||||||
);
|
|
||||||
errorCount++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
process.stdout.write("[ProcessingScheduler] Session processing complete.\n");
|
process.stdout.write("[ProcessingScheduler] Session processing complete.\n");
|
||||||
process.stdout.write(
|
process.stdout.write(
|
||||||
@ -339,6 +386,9 @@ export async function processUnprocessedSessions() {
|
|||||||
process.stdout.write(
|
process.stdout.write(
|
||||||
`[ProcessingScheduler] Failed to process: ${errorCount} sessions.\n`
|
`[ProcessingScheduler] Failed to process: ${errorCount} sessions.\n`
|
||||||
);
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
`[ProcessingScheduler] Total processing time: ${((endTime - startTime) / 1000).toFixed(2)}s\n`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@ -1,14 +1,38 @@
|
|||||||
// node-cron job to process unprocessed sessions every hour
|
// Session processing scheduler - TypeScript version
|
||||||
import cron from "node-cron";
|
import cron from "node-cron";
|
||||||
import { PrismaClient } from "@prisma/client";
|
import { PrismaClient } from "@prisma/client";
|
||||||
import fetch from "node-fetch";
|
import fetch from "node-fetch";
|
||||||
|
import { readFileSync } from "fs";
|
||||||
|
import { fileURLToPath } from "url";
|
||||||
|
import { dirname, join } from "path";
|
||||||
|
|
||||||
|
// Load environment variables from .env.local
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
const envPath = join(__dirname, '..', '.env.local');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const envFile = readFileSync(envPath, 'utf8');
|
||||||
|
const envVars = envFile.split('\n').filter(line => line.trim() && !line.startsWith('#'));
|
||||||
|
|
||||||
|
envVars.forEach(line => {
|
||||||
|
const [key, ...valueParts] = line.split('=');
|
||||||
|
if (key && valueParts.length > 0) {
|
||||||
|
const value = valueParts.join('=').trim();
|
||||||
|
if (!process.env[key.trim()]) {
|
||||||
|
process.env[key.trim()] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
// Silently fail if .env.local doesn't exist
|
||||||
|
}
|
||||||
|
|
||||||
const prisma = new PrismaClient();
|
const prisma = new PrismaClient();
|
||||||
const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
|
const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
|
||||||
const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions";
|
const OPENAI_API_URL = "https://api.openai.com/v1/chat/completions";
|
||||||
|
|
||||||
// Define the expected response structure from OpenAI
|
interface ProcessedData {
|
||||||
interface OpenAIProcessedData {
|
|
||||||
language: string;
|
language: string;
|
||||||
messages_sent: number;
|
messages_sent: number;
|
||||||
sentiment: "positive" | "neutral" | "negative";
|
sentiment: "positive" | "neutral" | "negative";
|
||||||
@ -20,16 +44,16 @@ interface OpenAIProcessedData {
|
|||||||
session_id: string;
|
session_id: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface ProcessingResult {
|
||||||
|
sessionId: string;
|
||||||
|
success: boolean;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Processes a session transcript using OpenAI API
|
* Processes a session transcript using OpenAI API
|
||||||
* @param sessionId The session ID
|
|
||||||
* @param transcript The transcript content to process
|
|
||||||
* @returns Processed data from OpenAI
|
|
||||||
*/
|
*/
|
||||||
async function processTranscriptWithOpenAI(
|
async function processTranscriptWithOpenAI(sessionId: string, transcript: string): Promise<ProcessedData> {
|
||||||
sessionId: string,
|
|
||||||
transcript: string
|
|
||||||
): Promise<OpenAIProcessedData> {
|
|
||||||
if (!OPENAI_API_KEY) {
|
if (!OPENAI_API_KEY) {
|
||||||
throw new Error("OPENAI_API_KEY environment variable is not set");
|
throw new Error("OPENAI_API_KEY environment variable is not set");
|
||||||
}
|
}
|
||||||
@ -103,7 +127,7 @@ async function processTranscriptWithOpenAI(
|
|||||||
throw new Error(`OpenAI API error: ${response.status} - ${errorText}`);
|
throw new Error(`OpenAI API error: ${response.status} - ${errorText}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const data = (await response.json()) as any;
|
const data: any = await response.json();
|
||||||
const processedData = JSON.parse(data.choices[0].message.content);
|
const processedData = JSON.parse(data.choices[0].message.content);
|
||||||
|
|
||||||
// Validate the response against our expected schema
|
// Validate the response against our expected schema
|
||||||
@ -118,11 +142,8 @@ async function processTranscriptWithOpenAI(
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Validates the OpenAI response against our expected schema
|
* Validates the OpenAI response against our expected schema
|
||||||
* @param data The data to validate
|
|
||||||
*/
|
*/
|
||||||
function validateOpenAIResponse(
|
function validateOpenAIResponse(data: any): void {
|
||||||
data: any
|
|
||||||
): asserts data is OpenAIProcessedData {
|
|
||||||
// Check required fields
|
// Check required fields
|
||||||
const requiredFields = [
|
const requiredFields = [
|
||||||
"language",
|
"language",
|
||||||
@ -208,31 +229,146 @@ function validateOpenAIResponse(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process a single session
|
||||||
|
*/
|
||||||
|
async function processSingleSession(session: any): Promise<ProcessingResult> {
|
||||||
|
if (session.messages.length === 0) {
|
||||||
|
return {
|
||||||
|
sessionId: session.id,
|
||||||
|
success: false,
|
||||||
|
error: "Session has no messages",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Convert messages back to transcript format for OpenAI processing
|
||||||
|
const transcript = session.messages
|
||||||
|
.map(
|
||||||
|
(msg: any) =>
|
||||||
|
`[${new Date(msg.timestamp)
|
||||||
|
.toLocaleString("en-GB", {
|
||||||
|
day: "2-digit",
|
||||||
|
month: "2-digit",
|
||||||
|
year: "numeric",
|
||||||
|
hour: "2-digit",
|
||||||
|
minute: "2-digit",
|
||||||
|
second: "2-digit",
|
||||||
|
})
|
||||||
|
.replace(",", "")}] ${msg.role}: ${msg.content}`
|
||||||
|
)
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
const processedData = await processTranscriptWithOpenAI(
|
||||||
|
session.id,
|
||||||
|
transcript
|
||||||
|
);
|
||||||
|
|
||||||
|
// Map sentiment string to float value for compatibility with existing data
|
||||||
|
const sentimentMap = {
|
||||||
|
positive: 0.8,
|
||||||
|
neutral: 0.0,
|
||||||
|
negative: -0.8,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Update the session with processed data
|
||||||
|
await prisma.session.update({
|
||||||
|
where: { id: session.id },
|
||||||
|
data: {
|
||||||
|
language: processedData.language,
|
||||||
|
messagesSent: processedData.messages_sent,
|
||||||
|
sentiment: sentimentMap[processedData.sentiment] || 0,
|
||||||
|
sentimentCategory: processedData.sentiment,
|
||||||
|
escalated: processedData.escalated,
|
||||||
|
forwardedHr: processedData.forwarded_hr,
|
||||||
|
category: processedData.category,
|
||||||
|
questions: JSON.stringify(processedData.questions),
|
||||||
|
summary: processedData.summary,
|
||||||
|
processed: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
sessionId: session.id,
|
||||||
|
success: true,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
sessionId: session.id,
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process sessions in parallel with concurrency limit
|
||||||
|
*/
|
||||||
|
async function processSessionsInParallel(sessions: any[], maxConcurrency: number = 5): Promise<ProcessingResult[]> {
|
||||||
|
const results: Promise<ProcessingResult>[] = [];
|
||||||
|
const executing: Promise<ProcessingResult>[] = [];
|
||||||
|
|
||||||
|
for (const session of sessions) {
|
||||||
|
const promise = processSingleSession(session).then((result) => {
|
||||||
|
process.stdout.write(
|
||||||
|
result.success
|
||||||
|
? `[ProcessingScheduler] ✓ Successfully processed session ${result.sessionId}\n`
|
||||||
|
: `[ProcessingScheduler] ✗ Failed to process session ${result.sessionId}: ${result.error}\n`
|
||||||
|
);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
|
||||||
|
results.push(promise);
|
||||||
|
executing.push(promise);
|
||||||
|
|
||||||
|
if (executing.length >= maxConcurrency) {
|
||||||
|
await Promise.race(executing);
|
||||||
|
const completedIndex = executing.findIndex(p => p === promise);
|
||||||
|
if (completedIndex !== -1) {
|
||||||
|
executing.splice(completedIndex, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.all(results);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Process unprocessed sessions
|
* Process unprocessed sessions
|
||||||
*/
|
*/
|
||||||
async function processUnprocessedSessions() {
|
export async function processUnprocessedSessions(batchSize: number | null = null, maxConcurrency: number = 5): Promise<void> {
|
||||||
process.stdout.write(
|
process.stdout.write(
|
||||||
"[ProcessingScheduler] Starting to process unprocessed sessions...\n"
|
"[ProcessingScheduler] Starting to process unprocessed sessions...\n"
|
||||||
);
|
);
|
||||||
|
|
||||||
// Find sessions that have transcript content but haven't been processed
|
// Find sessions that have messages but haven't been processed
|
||||||
const sessionsToProcess = await prisma.session.findMany({
|
const queryOptions: any = {
|
||||||
where: {
|
where: {
|
||||||
AND: [
|
AND: [
|
||||||
{ transcriptContent: { not: null } },
|
{ messages: { some: {} } }, // Must have messages
|
||||||
{ transcriptContent: { not: "" } },
|
{ processed: false }, // Only unprocessed sessions
|
||||||
{ processed: { not: true } }, // Either false or null
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
select: {
|
include: {
|
||||||
id: true,
|
messages: {
|
||||||
transcriptContent: true,
|
orderBy: { order: "asc" },
|
||||||
|
},
|
||||||
},
|
},
|
||||||
take: 10, // Process in batches to avoid overloading the system
|
};
|
||||||
});
|
|
||||||
|
|
||||||
if (sessionsToProcess.length === 0) {
|
// Add batch size limit if specified
|
||||||
|
if (batchSize && batchSize > 0) {
|
||||||
|
queryOptions.take = batchSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionsToProcess = await prisma.session.findMany(queryOptions);
|
||||||
|
|
||||||
|
// Filter to only sessions that have messages
|
||||||
|
const sessionsWithMessages = sessionsToProcess.filter(
|
||||||
|
(session: any) => session.messages && session.messages.length > 0
|
||||||
|
);
|
||||||
|
|
||||||
|
if (sessionsWithMessages.length === 0) {
|
||||||
process.stdout.write(
|
process.stdout.write(
|
||||||
"[ProcessingScheduler] No sessions found requiring processing.\n"
|
"[ProcessingScheduler] No sessions found requiring processing.\n"
|
||||||
);
|
);
|
||||||
@ -240,64 +376,15 @@ async function processUnprocessedSessions() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
process.stdout.write(
|
process.stdout.write(
|
||||||
`[ProcessingScheduler] Found ${sessionsToProcess.length} sessions to process.\n`
|
`[ProcessingScheduler] Found ${sessionsWithMessages.length} sessions to process (max concurrency: ${maxConcurrency}).\n`
|
||||||
);
|
);
|
||||||
let successCount = 0;
|
|
||||||
let errorCount = 0;
|
|
||||||
|
|
||||||
for (const session of sessionsToProcess) {
|
const startTime = Date.now();
|
||||||
if (!session.transcriptContent) {
|
const results = await processSessionsInParallel(sessionsWithMessages, maxConcurrency);
|
||||||
// Should not happen due to query, but good for type safety
|
const endTime = Date.now();
|
||||||
process.stderr.write(
|
|
||||||
`[ProcessingScheduler] Session ${session.id} has no transcript content, skipping.\n`
|
|
||||||
);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
process.stdout.write(
|
const successCount = results.filter((r) => r.success).length;
|
||||||
`[ProcessingScheduler] Processing transcript for session ${session.id}...\n`
|
const errorCount = results.filter((r) => !r.success).length;
|
||||||
);
|
|
||||||
try {
|
|
||||||
const processedData = await processTranscriptWithOpenAI(
|
|
||||||
session.id,
|
|
||||||
session.transcriptContent
|
|
||||||
);
|
|
||||||
|
|
||||||
// Map sentiment string to float value for compatibility with existing data
|
|
||||||
const sentimentMap: Record<string, number> = {
|
|
||||||
positive: 0.8,
|
|
||||||
neutral: 0.0,
|
|
||||||
negative: -0.8,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Update the session with processed data
|
|
||||||
await prisma.session.update({
|
|
||||||
where: { id: session.id },
|
|
||||||
data: {
|
|
||||||
language: processedData.language,
|
|
||||||
messagesSent: processedData.messages_sent,
|
|
||||||
sentiment: sentimentMap[processedData.sentiment] || 0,
|
|
||||||
sentimentCategory: processedData.sentiment,
|
|
||||||
escalated: processedData.escalated,
|
|
||||||
forwardedHr: processedData.forwarded_hr,
|
|
||||||
category: processedData.category,
|
|
||||||
questions: JSON.stringify(processedData.questions),
|
|
||||||
summary: processedData.summary,
|
|
||||||
processed: true,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
process.stdout.write(
|
|
||||||
`[ProcessingScheduler] Successfully processed session ${session.id}.\n`
|
|
||||||
);
|
|
||||||
successCount++;
|
|
||||||
} catch (error) {
|
|
||||||
process.stderr.write(
|
|
||||||
`[ProcessingScheduler] Error processing session ${session.id}: ${error}\n`
|
|
||||||
);
|
|
||||||
errorCount++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
process.stdout.write("[ProcessingScheduler] Session processing complete.\n");
|
process.stdout.write("[ProcessingScheduler] Session processing complete.\n");
|
||||||
process.stdout.write(
|
process.stdout.write(
|
||||||
@ -306,12 +393,15 @@ async function processUnprocessedSessions() {
|
|||||||
process.stdout.write(
|
process.stdout.write(
|
||||||
`[ProcessingScheduler] Failed to process: ${errorCount} sessions.\n`
|
`[ProcessingScheduler] Failed to process: ${errorCount} sessions.\n`
|
||||||
);
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
`[ProcessingScheduler] Total processing time: ${((endTime - startTime) / 1000).toFixed(2)}s\n`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start the processing scheduler
|
* Start the processing scheduler
|
||||||
*/
|
*/
|
||||||
export function startProcessingScheduler() {
|
export function startProcessingScheduler(): void {
|
||||||
// Process unprocessed sessions every hour
|
// Process unprocessed sessions every hour
|
||||||
cron.schedule("0 * * * *", async () => {
|
cron.schedule("0 * * * *", async () => {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -14,7 +14,7 @@
|
|||||||
"prisma:generate": "prisma generate",
|
"prisma:generate": "prisma generate",
|
||||||
"prisma:migrate": "prisma migrate dev",
|
"prisma:migrate": "prisma migrate dev",
|
||||||
"prisma:seed": "node prisma/seed.mjs",
|
"prisma:seed": "node prisma/seed.mjs",
|
||||||
"prisma:push": "prisma db push",
|
"prisma:push": "prisma db push",
|
||||||
"prisma:studio": "prisma studio",
|
"prisma:studio": "prisma studio",
|
||||||
"start": "node server.mjs",
|
"start": "node server.mjs",
|
||||||
"lint:md": "markdownlint-cli2 \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\"",
|
"lint:md": "markdownlint-cli2 \"**/*.md\" \"!.trunk/**\" \"!.venv/**\" \"!node_modules/**\"",
|
||||||
|
|||||||
@ -119,15 +119,8 @@ export default async function handler(
|
|||||||
? session.endTime
|
? session.endTime
|
||||||
: new Date();
|
: new Date();
|
||||||
|
|
||||||
// Fetch transcript content if URL is available
|
// Note: transcriptContent field was removed from schema
|
||||||
let transcriptContent: string | null = null;
|
// Transcript content can be fetched on-demand from fullTranscriptUrl
|
||||||
if (session.fullTranscriptUrl) {
|
|
||||||
transcriptContent = await fetchTranscriptContent(
|
|
||||||
session.fullTranscriptUrl,
|
|
||||||
company.csvUsername as string | undefined,
|
|
||||||
company.csvPassword as string | undefined
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if the session already exists
|
// Check if the session already exists
|
||||||
const existingSession = await prisma.session.findUnique({
|
const existingSession = await prisma.session.findUnique({
|
||||||
@ -160,7 +153,6 @@ export default async function handler(
|
|||||||
? session.forwardedHr
|
? session.forwardedHr
|
||||||
: null,
|
: null,
|
||||||
fullTranscriptUrl: session.fullTranscriptUrl || null,
|
fullTranscriptUrl: session.fullTranscriptUrl || null,
|
||||||
transcriptContent: transcriptContent, // Add the transcript content
|
|
||||||
avgResponseTime:
|
avgResponseTime:
|
||||||
typeof session.avgResponseTime === "number"
|
typeof session.avgResponseTime === "number"
|
||||||
? session.avgResponseTime
|
? session.avgResponseTime
|
||||||
|
|||||||
103
pages/api/admin/trigger-processing.ts
Normal file
103
pages/api/admin/trigger-processing.ts
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
import { NextApiRequest, NextApiResponse } from "next";
|
||||||
|
import { getServerSession } from "next-auth";
|
||||||
|
import { authOptions } from "../auth/[...nextauth]";
|
||||||
|
import { prisma } from "../../../lib/prisma";
|
||||||
|
import { processUnprocessedSessions } from "../../../lib/processingScheduler";
|
||||||
|
|
||||||
|
interface SessionUser {
|
||||||
|
email: string;
|
||||||
|
name?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SessionData {
|
||||||
|
user: SessionUser;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function handler(
|
||||||
|
req: NextApiRequest,
|
||||||
|
res: NextApiResponse
|
||||||
|
) {
|
||||||
|
if (req.method !== "POST") {
|
||||||
|
return res.status(405).json({ error: "Method not allowed" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const session = (await getServerSession(
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
authOptions
|
||||||
|
)) as SessionData | null;
|
||||||
|
|
||||||
|
if (!session?.user) {
|
||||||
|
return res.status(401).json({ error: "Not logged in" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await prisma.user.findUnique({
|
||||||
|
where: { email: session.user.email },
|
||||||
|
include: { company: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return res.status(401).json({ error: "No user found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if user has admin role
|
||||||
|
if (user.role !== "admin") {
|
||||||
|
return res.status(403).json({ error: "Admin access required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get optional parameters from request body
|
||||||
|
const { batchSize, maxConcurrency } = req.body;
|
||||||
|
|
||||||
|
// Validate parameters
|
||||||
|
const validatedBatchSize = batchSize && batchSize > 0 ? parseInt(batchSize) : null;
|
||||||
|
const validatedMaxConcurrency = maxConcurrency && maxConcurrency > 0 ? parseInt(maxConcurrency) : 5;
|
||||||
|
|
||||||
|
// Check how many unprocessed sessions exist
|
||||||
|
const unprocessedCount = await prisma.session.count({
|
||||||
|
where: {
|
||||||
|
companyId: user.companyId,
|
||||||
|
processed: false,
|
||||||
|
messages: { some: {} }, // Must have messages
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (unprocessedCount === 0) {
|
||||||
|
return res.json({
|
||||||
|
success: true,
|
||||||
|
message: "No unprocessed sessions found",
|
||||||
|
unprocessedCount: 0,
|
||||||
|
processedCount: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start processing (this will run asynchronously)
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
// Note: We're calling the function but not awaiting it to avoid timeout
|
||||||
|
// The processing will continue in the background
|
||||||
|
processUnprocessedSessions(validatedBatchSize, validatedMaxConcurrency)
|
||||||
|
.then(() => {
|
||||||
|
console.log(`[Manual Trigger] Processing completed for company ${user.companyId}`);
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
console.error(`[Manual Trigger] Processing failed for company ${user.companyId}:`, error);
|
||||||
|
});
|
||||||
|
|
||||||
|
return res.json({
|
||||||
|
success: true,
|
||||||
|
message: `Started processing ${unprocessedCount} unprocessed sessions`,
|
||||||
|
unprocessedCount,
|
||||||
|
batchSize: validatedBatchSize || unprocessedCount,
|
||||||
|
maxConcurrency: validatedMaxConcurrency,
|
||||||
|
startedAt: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[Manual Trigger] Error:", error);
|
||||||
|
return res.status(500).json({
|
||||||
|
error: "Failed to trigger processing",
|
||||||
|
details: error instanceof Error ? error.message : String(error),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -36,8 +36,11 @@ export default async function handler(
|
|||||||
// Get date range from query parameters
|
// Get date range from query parameters
|
||||||
const { startDate, endDate } = req.query;
|
const { startDate, endDate } = req.query;
|
||||||
|
|
||||||
// Build where clause with optional date filtering
|
// Build where clause with optional date filtering and only processed sessions
|
||||||
const whereClause: any = { companyId: user.companyId };
|
const whereClause: any = {
|
||||||
|
companyId: user.companyId,
|
||||||
|
processed: true, // Only show processed sessions in dashboard
|
||||||
|
};
|
||||||
|
|
||||||
if (startDate && endDate) {
|
if (startDate && endDate) {
|
||||||
whereClause.startTime = {
|
whereClause.startTime = {
|
||||||
|
|||||||
@ -53,7 +53,7 @@ model Session {
|
|||||||
tokensEur Float?
|
tokensEur Float?
|
||||||
category String?
|
category String?
|
||||||
initialMsg String?
|
initialMsg String?
|
||||||
processed Boolean? // Flag for post-processing status
|
processed Boolean @default(false) // Flag for post-processing status
|
||||||
questions String? // JSON array of questions asked by user
|
questions String? // JSON array of questions asked by user
|
||||||
summary String? // Brief summary of the conversation
|
summary String? // Brief summary of the conversation
|
||||||
messages Message[] // Relation to parsed messages
|
messages Message[] // Relation to parsed messages
|
||||||
|
|||||||
73
scripts/check-transcript-content.js
Normal file
73
scripts/check-transcript-content.js
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
// Script to check what's in the transcript files
|
||||||
|
// Usage: node scripts/check-transcript-content.js
|
||||||
|
|
||||||
|
import { PrismaClient } from '@prisma/client';
|
||||||
|
import fetch from 'node-fetch';
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
|
async function checkTranscriptContent() {
|
||||||
|
try {
|
||||||
|
// Get a few sessions without messages
|
||||||
|
const sessions = await prisma.session.findMany({
|
||||||
|
where: {
|
||||||
|
AND: [
|
||||||
|
{ fullTranscriptUrl: { not: null } },
|
||||||
|
{ messages: { none: {} } },
|
||||||
|
]
|
||||||
|
},
|
||||||
|
include: { company: true },
|
||||||
|
take: 3,
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const session of sessions) {
|
||||||
|
console.log(`\n📄 Checking session ${session.id}:`);
|
||||||
|
console.log(` URL: ${session.fullTranscriptUrl}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authHeader = session.company.csvUsername && session.company.csvPassword
|
||||||
|
? "Basic " + Buffer.from(`${session.company.csvUsername}:${session.company.csvPassword}`).toString("base64")
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
const response = await fetch(session.fullTranscriptUrl, {
|
||||||
|
headers: authHeader ? { Authorization: authHeader } : {},
|
||||||
|
timeout: 10000,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
console.log(` ❌ HTTP ${response.status}: ${response.statusText}`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await response.text();
|
||||||
|
console.log(` 📏 Content length: ${content.length} characters`);
|
||||||
|
|
||||||
|
if (content.length === 0) {
|
||||||
|
console.log(` ⚠️ Empty file`);
|
||||||
|
} else if (content.length < 100) {
|
||||||
|
console.log(` 📝 Full content: "${content}"`);
|
||||||
|
} else {
|
||||||
|
console.log(` 📝 First 200 chars: "${content.substring(0, 200)}..."`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it matches our expected format
|
||||||
|
const lines = content.split('\n').filter(line => line.trim());
|
||||||
|
const formatMatches = lines.filter(line =>
|
||||||
|
line.match(/^\[([^\]]+)\]\s*([^:]+):\s*(.+)$/)
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(` 🔍 Lines total: ${lines.length}, Format matches: ${formatMatches.length}`);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.log(` ❌ Error: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Error:', error);
|
||||||
|
} finally {
|
||||||
|
await prisma.$disconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
checkTranscriptContent();
|
||||||
185
scripts/fetch-and-parse-transcripts.js
Normal file
185
scripts/fetch-and-parse-transcripts.js
Normal file
@ -0,0 +1,185 @@
|
|||||||
|
// Script to fetch transcripts and parse them into messages
|
||||||
|
// Usage: node scripts/fetch-and-parse-transcripts.js
|
||||||
|
|
||||||
|
import { PrismaClient } from '@prisma/client';
|
||||||
|
import fetch from 'node-fetch';
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches transcript content from a URL
|
||||||
|
*/
|
||||||
|
async function fetchTranscriptContent(url, username, password) {
|
||||||
|
try {
|
||||||
|
const authHeader = username && password
|
||||||
|
? "Basic " + Buffer.from(`${username}:${password}`).toString("base64")
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
headers: authHeader ? { Authorization: authHeader } : {},
|
||||||
|
timeout: 10000,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
console.log(`❌ Failed to fetch ${url}: ${response.status} ${response.statusText}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return await response.text();
|
||||||
|
} catch (error) {
|
||||||
|
console.log(`❌ Error fetching ${url}: ${error.message}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses transcript content into messages
|
||||||
|
*/
|
||||||
|
function parseTranscriptToMessages(transcript, sessionId) {
|
||||||
|
if (!transcript || transcript.trim() === '') {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = transcript.split('\n').filter(line => line.trim());
|
||||||
|
const messages = [];
|
||||||
|
let messageOrder = 0;
|
||||||
|
let currentTimestamp = new Date();
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
// Try format 1: [DD-MM-YYYY HH:MM:SS] Role: Content
|
||||||
|
const timestampMatch = line.match(/^\[([^\]]+)\]\s*([^:]+):\s*(.+)$/);
|
||||||
|
|
||||||
|
if (timestampMatch) {
|
||||||
|
const [, timestamp, role, content] = timestampMatch;
|
||||||
|
|
||||||
|
// Parse timestamp (DD-MM-YYYY HH:MM:SS)
|
||||||
|
const dateMatch = timestamp.match(/^(\d{1,2})-(\d{1,2})-(\d{4}) (\d{1,2}):(\d{1,2}):(\d{1,2})$/);
|
||||||
|
let parsedTimestamp = new Date();
|
||||||
|
|
||||||
|
if (dateMatch) {
|
||||||
|
const [, day, month, year, hour, minute, second] = dateMatch;
|
||||||
|
parsedTimestamp = new Date(
|
||||||
|
parseInt(year),
|
||||||
|
parseInt(month) - 1, // Month is 0-indexed
|
||||||
|
parseInt(day),
|
||||||
|
parseInt(hour),
|
||||||
|
parseInt(minute),
|
||||||
|
parseInt(second)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
messages.push({
|
||||||
|
sessionId,
|
||||||
|
role: role.trim().toLowerCase(),
|
||||||
|
content: content.trim(),
|
||||||
|
timestamp: parsedTimestamp,
|
||||||
|
order: messageOrder++,
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try format 2: Role: Content (simple format)
|
||||||
|
const simpleMatch = line.match(/^([^:]+):\s*(.+)$/);
|
||||||
|
|
||||||
|
if (simpleMatch) {
|
||||||
|
const [, role, content] = simpleMatch;
|
||||||
|
|
||||||
|
// Use incremental timestamps (add 1 minute per message)
|
||||||
|
currentTimestamp = new Date(currentTimestamp.getTime() + 60000);
|
||||||
|
|
||||||
|
messages.push({
|
||||||
|
sessionId,
|
||||||
|
role: role.trim().toLowerCase(),
|
||||||
|
content: content.trim(),
|
||||||
|
timestamp: new Date(currentTimestamp),
|
||||||
|
order: messageOrder++,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return messages;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process sessions without messages
|
||||||
|
*/
|
||||||
|
async function fetchAndParseTranscripts() {
|
||||||
|
try {
|
||||||
|
console.log('🔍 Finding sessions without messages...\n');
|
||||||
|
|
||||||
|
// Get sessions that have fullTranscriptUrl but no messages
|
||||||
|
const sessionsWithoutMessages = await prisma.session.findMany({
|
||||||
|
where: {
|
||||||
|
AND: [
|
||||||
|
{ fullTranscriptUrl: { not: null } },
|
||||||
|
{ messages: { none: {} } }, // No messages
|
||||||
|
]
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
company: true,
|
||||||
|
},
|
||||||
|
take: 20, // Process 20 at a time to avoid overwhelming
|
||||||
|
});
|
||||||
|
|
||||||
|
if (sessionsWithoutMessages.length === 0) {
|
||||||
|
console.log('✅ All sessions with transcript URLs already have messages!');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`📥 Found ${sessionsWithoutMessages.length} sessions to process\n`);
|
||||||
|
|
||||||
|
let successCount = 0;
|
||||||
|
let errorCount = 0;
|
||||||
|
|
||||||
|
for (const session of sessionsWithoutMessages) {
|
||||||
|
console.log(`📄 Processing session ${session.id.substring(0, 8)}...`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Fetch transcript content
|
||||||
|
const transcriptContent = await fetchTranscriptContent(
|
||||||
|
session.fullTranscriptUrl,
|
||||||
|
session.company.csvUsername,
|
||||||
|
session.company.csvPassword
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!transcriptContent) {
|
||||||
|
console.log(` ⚠️ No transcript content available`);
|
||||||
|
errorCount++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse transcript into messages
|
||||||
|
const messages = parseTranscriptToMessages(transcriptContent, session.id);
|
||||||
|
|
||||||
|
if (messages.length === 0) {
|
||||||
|
console.log(` ⚠️ No messages found in transcript`);
|
||||||
|
errorCount++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save messages to database
|
||||||
|
await prisma.message.createMany({
|
||||||
|
data: messages,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(` ✅ Added ${messages.length} messages`);
|
||||||
|
successCount++;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.log(` ❌ Error: ${error.message}`);
|
||||||
|
errorCount++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`\n📊 Results:`);
|
||||||
|
console.log(` ✅ Successfully processed: ${successCount} sessions`);
|
||||||
|
console.log(` ❌ Failed to process: ${errorCount} sessions`);
|
||||||
|
console.log(`\n💡 Now you can run the processing scheduler to analyze these sessions!`);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Error:', error);
|
||||||
|
} finally {
|
||||||
|
await prisma.$disconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchAndParseTranscripts();
|
||||||
38
scripts/manual-trigger-test.js
Normal file
38
scripts/manual-trigger-test.js
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
// Simple script to test the manual processing trigger
|
||||||
|
// Usage: node scripts/manual-trigger-test.js
|
||||||
|
|
||||||
|
import fetch from 'node-fetch';
|
||||||
|
|
||||||
|
async function testManualTrigger() {
|
||||||
|
try {
|
||||||
|
console.log('Testing manual processing trigger...');
|
||||||
|
|
||||||
|
const response = await fetch('http://localhost:3000/api/admin/trigger-processing', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
// Note: In a real scenario, you'd need to include authentication cookies
|
||||||
|
// For testing, you might need to login first and copy the session cookie
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
batchSize: 5, // Process max 5 sessions
|
||||||
|
maxConcurrency: 3 // Use 3 concurrent workers
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
console.log('✅ Manual trigger successful:');
|
||||||
|
console.log(JSON.stringify(result, null, 2));
|
||||||
|
} else {
|
||||||
|
console.log('❌ Manual trigger failed:');
|
||||||
|
console.log(JSON.stringify(result, null, 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Error testing manual trigger:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
testManualTrigger();
|
||||||
75
scripts/test-processing-status.js
Normal file
75
scripts/test-processing-status.js
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
// Script to check processing status and trigger processing
|
||||||
|
// Usage: node scripts/test-processing-status.js
|
||||||
|
|
||||||
|
import { PrismaClient } from '@prisma/client';
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
|
async function checkProcessingStatus() {
|
||||||
|
try {
|
||||||
|
console.log('🔍 Checking processing status...\n');
|
||||||
|
|
||||||
|
// Get processing status
|
||||||
|
const totalSessions = await prisma.session.count();
|
||||||
|
const processedSessions = await prisma.session.count({
|
||||||
|
where: { processed: true }
|
||||||
|
});
|
||||||
|
const unprocessedSessions = await prisma.session.count({
|
||||||
|
where: { processed: false }
|
||||||
|
});
|
||||||
|
const sessionsWithMessages = await prisma.session.count({
|
||||||
|
where: {
|
||||||
|
processed: false,
|
||||||
|
messages: { some: {} }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('📊 Processing Status:');
|
||||||
|
console.log(` Total sessions: ${totalSessions}`);
|
||||||
|
console.log(` ✅ Processed: ${processedSessions}`);
|
||||||
|
console.log(` ⏳ Unprocessed: ${unprocessedSessions}`);
|
||||||
|
console.log(` 📝 Unprocessed with messages: ${sessionsWithMessages}`);
|
||||||
|
|
||||||
|
const processedPercentage = ((processedSessions / totalSessions) * 100).toFixed(1);
|
||||||
|
console.log(` 📈 Processing progress: ${processedPercentage}%\n`);
|
||||||
|
|
||||||
|
// Check recent processing activity
|
||||||
|
const recentlyProcessed = await prisma.session.findMany({
|
||||||
|
where: {
|
||||||
|
processed: true,
|
||||||
|
createdAt: {
|
||||||
|
gte: new Date(Date.now() - 60 * 60 * 1000) // Last hour
|
||||||
|
}
|
||||||
|
},
|
||||||
|
orderBy: { createdAt: 'desc' },
|
||||||
|
take: 5,
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
createdAt: true,
|
||||||
|
category: true,
|
||||||
|
sentiment: true
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (recentlyProcessed.length > 0) {
|
||||||
|
console.log('🕒 Recently processed sessions:');
|
||||||
|
recentlyProcessed.forEach(session => {
|
||||||
|
const timeAgo = Math.round((Date.now() - session.createdAt.getTime()) / 1000 / 60);
|
||||||
|
console.log(` • ${session.id.substring(0, 8)}... (${timeAgo}m ago) - ${session.category || 'No category'}`);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
console.log('🕒 No sessions processed in the last hour');
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('\n✨ Processing system is working correctly!');
|
||||||
|
console.log('💡 The parallel processing successfully processed sessions.');
|
||||||
|
console.log('🎯 For manual triggers, you need to be logged in as an admin user.');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Error checking status:', error);
|
||||||
|
} finally {
|
||||||
|
await prisma.$disconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
checkProcessingStatus();
|
||||||
20
scripts/trigger-processing-direct.js
Normal file
20
scripts/trigger-processing-direct.js
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
// Direct trigger for processing scheduler (bypasses authentication)
|
||||||
|
// Usage: node scripts/trigger-processing-direct.js
|
||||||
|
|
||||||
|
import { processUnprocessedSessions } from '../lib/processingScheduler.js';
|
||||||
|
|
||||||
|
async function triggerProcessing() {
|
||||||
|
try {
|
||||||
|
console.log('🚀 Manually triggering processing scheduler...\n');
|
||||||
|
|
||||||
|
// Process with custom parameters
|
||||||
|
await processUnprocessedSessions(50, 3); // Process 50 sessions with 3 concurrent workers
|
||||||
|
|
||||||
|
console.log('\n✅ Processing trigger completed!');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Error triggering processing:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
triggerProcessing();
|
||||||
Reference in New Issue
Block a user