style: remove unnecessary whitespace in multiple files for cleaner code

This commit is contained in:
Max Kowalski
2025-06-27 23:32:09 +02:00
parent 7e59567f73
commit 043aa03534
11 changed files with 73 additions and 73 deletions

View File

@ -44,7 +44,7 @@ export async function fetchAndParseCsv(
const res = await fetch(url, {
headers: authHeader ? { Authorization: authHeader } : {},
});
if (!res.ok) {
throw new Error(`Failed to fetch CSV: ${res.status} ${res.statusText}`);
}

View File

@ -8,22 +8,22 @@ import { dirname, join } from "path";
*/
function parseEnvValue(value: string | undefined): string {
if (!value) return '';
// Trim whitespace
let cleaned = value.trim();
// Remove inline comments (everything after #)
const commentIndex = cleaned.indexOf('#');
if (commentIndex !== -1) {
cleaned = cleaned.substring(0, commentIndex).trim();
}
// Remove surrounding quotes (both single and double)
if ((cleaned.startsWith('"') && cleaned.endsWith('"')) ||
(cleaned.startsWith("'") && cleaned.endsWith("'"))) {
cleaned = cleaned.slice(1, -1);
}
return cleaned;
}
@ -33,7 +33,7 @@ function parseEnvValue(value: string | undefined): string {
function parseIntWithDefault(value: string | undefined, defaultValue: number): number {
const cleaned = parseEnvValue(value);
if (!cleaned) return defaultValue;
const parsed = parseInt(cleaned, 10);
return isNaN(parsed) ? defaultValue : parsed;
}
@ -137,7 +137,7 @@ export function logEnvConfig(): void {
console.log(` NEXTAUTH_URL: ${env.NEXTAUTH_URL}`);
console.log(` SCHEDULER_ENABLED: ${env.SCHEDULER_ENABLED}`);
console.log(` PORT: ${env.PORT}`);
if (env.SCHEDULER_ENABLED) {
console.log(' Scheduler intervals:');
console.log(` CSV Import: ${env.CSV_IMPORT_INTERVAL}`);

View File

@ -17,13 +17,13 @@ function parseEuropeanDate(dateStr: string): Date {
// Handle format: "DD.MM.YYYY HH:mm:ss"
const [datePart, timePart] = dateStr.trim().split(' ');
if (!datePart || !timePart) {
throw new Error(`Invalid date format: ${dateStr}. Expected format: DD.MM.YYYY HH:mm:ss`);
}
const [day, month, year] = datePart.split('.');
if (!day || !month || !year) {
throw new Error(`Invalid date part: ${datePart}. Expected format: DD.MM.YYYY`);
}
@ -31,11 +31,11 @@ function parseEuropeanDate(dateStr: string): Date {
// Convert to ISO format: YYYY-MM-DD HH:mm:ss
const isoDateStr = `${year}-${month.padStart(2, '0')}-${day.padStart(2, '0')} ${timePart}`;
const date = new Date(isoDateStr);
if (isNaN(date.getTime())) {
throw new Error(`Failed to parse date: ${dateStr} -> ${isoDateStr}`);
}
return date;
}
@ -44,7 +44,7 @@ function parseEuropeanDate(dateStr: string): Date {
*/
function parseFallbackSentiment(sentimentRaw: string | null): SentimentCategory | null {
if (!sentimentRaw) return null;
const sentimentStr = sentimentRaw.toLowerCase();
if (sentimentStr.includes('positive')) {
return SentimentCategory.POSITIVE;
@ -83,7 +83,7 @@ async function parseTranscriptIntoMessages(sessionId: string, transcriptContent:
// Try to parse different formats:
// Format 1: "User: message" or "Assistant: message"
// Format 2: "[timestamp] User: message" or "[timestamp] Assistant: message"
let role = 'unknown';
let content = trimmedLine;
let timestamp: Date | null = null;
@ -136,7 +136,7 @@ async function parseTranscriptIntoMessages(sessionId: string, transcriptContent:
*/
async function processSingleImport(importRecord: any): Promise<{ success: boolean; error?: string }> {
let sessionId: string | null = null;
try {
// Parse dates using European format parser
const startTime = parseEuropeanDate(importRecord.startTimeRaw);
@ -183,12 +183,12 @@ async function processSingleImport(importRecord: any): Promise<{ success: boolea
// Handle transcript fetching
let transcriptContent = importRecord.rawTranscriptContent;
if (!transcriptContent && importRecord.fullTranscriptUrl && isValidTranscriptUrl(importRecord.fullTranscriptUrl)) {
await ProcessingStatusManager.startStage(sessionId, ProcessingStage.TRANSCRIPT_FETCH);
console.log(`[Import Processor] Fetching transcript for ${importRecord.externalSessionId}...`);
// Get company credentials for transcript fetching
const company = await prisma.company.findUnique({
where: { id: importRecord.companyId },
@ -204,7 +204,7 @@ async function processSingleImport(importRecord: any): Promise<{ success: boolea
if (transcriptResult.success) {
transcriptContent = transcriptResult.content;
console.log(`[Import Processor] ✓ Fetched transcript for ${importRecord.externalSessionId} (${transcriptContent?.length} chars)`);
// Update the import record with the fetched content
await prisma.sessionImport.update({
where: { id: importRecord.id },
@ -232,7 +232,7 @@ async function processSingleImport(importRecord: any): Promise<{ success: boolea
// Handle session creation (parse messages)
await ProcessingStatusManager.startStage(sessionId, ProcessingStage.SESSION_CREATION);
if (transcriptContent) {
await parseTranscriptIntoMessages(sessionId, transcriptContent);
}
@ -245,7 +245,7 @@ async function processSingleImport(importRecord: any): Promise<{ success: boolea
return { success: true };
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
// Mark the current stage as failed if we have a sessionId
if (sessionId) {
// Determine which stage failed based on the error
@ -306,7 +306,7 @@ export async function processQueuedImports(batchSize: number = 50): Promise<void
// Process each import in this batch
for (const importRecord of unprocessedImports) {
const result = await processSingleImport(importRecord);
if (result.success) {
batchSuccessCount++;
totalSuccessCount++;
@ -334,7 +334,7 @@ export async function processQueuedImports(batchSize: number = 50): Promise<void
*/
export function startImportProcessingScheduler(): void {
const config = getSchedulerConfig();
if (!config.enabled) {
console.log('[Import Processing Scheduler] Disabled via configuration');
return;

View File

@ -345,8 +345,8 @@ export function sessionMetrics(
let sentimentPositiveCount = 0;
let sentimentNeutralCount = 0;
let sentimentNegativeCount = 0;
let totalTokens = 0;
let totalTokensEur = 0;
const totalTokens = 0;
const totalTokensEur = 0;
const wordCounts: { [key: string]: number } = {};
let alerts = 0;

View File

@ -91,23 +91,23 @@ async function recordAIProcessingRequest(
): Promise<void> {
const usage = openaiResponse.usage;
const model = openaiResponse.model;
// Get current pricing from database
const pricing = await getCurrentModelPricing(model);
// Fallback pricing if not found in database
const fallbackPricing = {
promptTokenCost: 0.00001, // $10.00 per 1M tokens (gpt-4-turbo rate)
completionTokenCost: 0.00003, // $30.00 per 1M tokens
};
const finalPricing = pricing || fallbackPricing;
const promptCost = usage.prompt_tokens * finalPricing.promptTokenCost;
const completionCost = usage.completion_tokens * finalPricing.completionTokenCost;
const totalCostUsd = promptCost + completionCost;
const totalCostEur = totalCostUsd * USD_TO_EUR_RATE;
await prisma.aIProcessingRequest.create({
data: {
sessionId,
@ -115,11 +115,11 @@ async function recordAIProcessingRequest(
model: openaiResponse.model,
serviceTier: openaiResponse.service_tier,
systemFingerprint: openaiResponse.system_fingerprint,
promptTokens: usage.prompt_tokens,
completionTokens: usage.completion_tokens,
totalTokens: usage.total_tokens,
// Detailed breakdown
cachedTokens: usage.prompt_tokens_details?.cached_tokens || null,
audioTokensPrompt: usage.prompt_tokens_details?.audio_tokens || null,
@ -127,11 +127,11 @@ async function recordAIProcessingRequest(
audioTokensCompletion: usage.completion_tokens_details?.audio_tokens || null,
acceptedPredictionTokens: usage.completion_tokens_details?.accepted_prediction_tokens || null,
rejectedPredictionTokens: usage.completion_tokens_details?.rejected_prediction_tokens || null,
promptTokenCost: finalPricing.promptTokenCost,
completionTokenCost: finalPricing.completionTokenCost,
totalCostEur,
processingType,
success: true,
completedAt: new Date(),
@ -178,14 +178,14 @@ async function processQuestions(sessionId: string, questions: string[]): Promise
for (let index = 0; index < questions.length; index++) {
const questionText = questions[index];
if (!questionText.trim()) continue; // Skip empty questions
// Find or create question
const question = await prisma.question.upsert({
where: { content: questionText.trim() },
create: { content: questionText.trim() },
update: {}
});
// Link to session
await prisma.sessionQuestion.create({
data: {
@ -202,7 +202,7 @@ async function processQuestions(sessionId: string, questions: string[]): Promise
*/
async function calculateMessagesSent(sessionId: string): Promise<number> {
const userMessageCount = await prisma.message.count({
where: {
where: {
sessionId,
role: { in: ['user', 'User'] } // Handle both cases
}
@ -218,7 +218,7 @@ async function calculateEndTime(sessionId: string, fallbackEndTime: Date): Promi
where: { sessionId },
orderBy: { timestamp: 'desc' }
});
return latestMessage?.timestamp || fallbackEndTime;
}
@ -291,10 +291,10 @@ async function processTranscriptWithOpenAI(sessionId: string, transcript: string
}
const openaiResponse: any = await response.json();
// Record the AI processing request for cost tracking
await recordAIProcessingRequest(sessionId, openaiResponse, 'session_analysis');
const processedData = JSON.parse(openaiResponse.choices[0].message.content);
// Validate the response against our expected schema
@ -304,11 +304,11 @@ async function processTranscriptWithOpenAI(sessionId: string, transcript: string
} catch (error) {
// Record failed request
await recordFailedAIProcessingRequest(
sessionId,
'session_analysis',
sessionId,
'session_analysis',
error instanceof Error ? error.message : String(error)
);
process.stderr.write(`Error processing transcript with OpenAI: ${error}\n`);
throw error;
}
@ -319,7 +319,7 @@ async function processTranscriptWithOpenAI(sessionId: string, transcript: string
*/
function validateOpenAIResponse(data: any): void {
const requiredFields = [
"language", "sentiment", "escalated", "forwarded_hr",
"language", "sentiment", "escalated", "forwarded_hr",
"category", "questions", "summary", "session_id"
];
@ -406,7 +406,7 @@ async function processSingleSession(session: any): Promise<ProcessingResult> {
// Calculate messagesSent from actual Message records
const messagesSent = await calculateMessagesSent(session.id);
// Calculate endTime from latest Message timestamp
const calculatedEndTime = await calculateEndTime(session.id, session.endTime);
@ -451,8 +451,8 @@ async function processSingleSession(session: any): Promise<ProcessingResult> {
} catch (error) {
// Mark AI analysis as failed
await ProcessingStatusManager.failStage(
session.id,
ProcessingStage.AI_ANALYSIS,
session.id,
ProcessingStage.AI_ANALYSIS,
error instanceof Error ? error.message : String(error)
);
@ -597,7 +597,7 @@ export async function getAIProcessingCosts(): Promise<{
*/
export function startProcessingScheduler(): void {
const config = getSchedulerConfig();
if (!config.enabled) {
console.log('[Processing Scheduler] Disabled via configuration');
return;

View File

@ -6,7 +6,7 @@ const prisma = new PrismaClient();
* Centralized processing status management
*/
export class ProcessingStatusManager {
/**
* Initialize processing status for a session with all stages set to PENDING
*/
@ -34,8 +34,8 @@ export class ProcessingStatusManager {
* Start a processing stage
*/
static async startStage(
sessionId: string,
stage: ProcessingStage,
sessionId: string,
stage: ProcessingStage,
metadata?: any
): Promise<void> {
await prisma.sessionProcessingStatus.upsert({
@ -62,8 +62,8 @@ export class ProcessingStatusManager {
* Complete a processing stage successfully
*/
static async completeStage(
sessionId: string,
stage: ProcessingStage,
sessionId: string,
stage: ProcessingStage,
metadata?: any
): Promise<void> {
await prisma.sessionProcessingStatus.upsert({
@ -91,8 +91,8 @@ export class ProcessingStatusManager {
* Mark a processing stage as failed
*/
static async failStage(
sessionId: string,
stage: ProcessingStage,
sessionId: string,
stage: ProcessingStage,
errorMessage: string,
metadata?: any
): Promise<void> {
@ -124,8 +124,8 @@ export class ProcessingStatusManager {
* Skip a processing stage (e.g., no transcript URL available)
*/
static async skipStage(
sessionId: string,
stage: ProcessingStage,
sessionId: string,
stage: ProcessingStage,
reason: string
): Promise<void> {
await prisma.sessionProcessingStatus.upsert({
@ -198,7 +198,7 @@ export class ProcessingStatusManager {
// Organize the data
const pipeline: Record<string, Record<string, number>> = {};
for (const { stage, status, _count } of statusCounts) {
if (!pipeline[stage]) {
pipeline[stage] = {};
@ -219,7 +219,7 @@ export class ProcessingStatusManager {
const where: any = {
status: ProcessingStatus.FAILED,
};
if (stage) {
where.stage = stage;
}
@ -284,7 +284,7 @@ export class ProcessingStatusManager {
// Check if all previous stages are completed
const previousStages = stageOrder.slice(0, currentStageIndex);
for (const prevStage of previousStages) {
const isCompleted = await this.hasCompletedStage(sessionId, prevStage);
if (!isCompleted) return false;

View File

@ -6,14 +6,14 @@ import { getSchedulerConfig } from "./schedulerConfig";
export function startCsvImportScheduler() {
const config = getSchedulerConfig();
if (!config.enabled) {
console.log('[CSV Import Scheduler] Disabled via configuration');
return;
}
console.log(`[CSV Import Scheduler] Starting with interval: ${config.csvImport.interval}`);
cron.schedule(config.csvImport.interval, async () => {
const companies = await prisma.company.findMany();
for (const company of companies) {

View File

@ -21,7 +21,7 @@ export interface SchedulerConfig {
*/
export function getSchedulerConfig(): SchedulerConfig {
const config = getEnvSchedulerConfig();
return {
enabled: config.enabled,
csvImport: {

View File

@ -76,7 +76,7 @@ export async function fetchTranscriptContent(
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
// Handle common network errors
if (errorMessage.includes('ENOTFOUND')) {
return {
@ -84,14 +84,14 @@ export async function fetchTranscriptContent(
error: 'Domain not found',
};
}
if (errorMessage.includes('ECONNREFUSED')) {
return {
success: false,
error: 'Connection refused',
};
}
if (errorMessage.includes('timeout')) {
return {
success: false,

View File

@ -62,7 +62,7 @@ export function parseTranscriptToMessages(
for (const line of lines) {
const trimmedLine = line.trim();
// Skip empty lines
if (!trimmedLine) {
continue;
@ -70,10 +70,10 @@ export function parseTranscriptToMessages(
// Check if line starts with a timestamp and role [DD.MM.YYYY HH:MM:SS] Role: content
const timestampRoleMatch = trimmedLine.match(/^\[(\d{2}\.\d{2}\.\d{4} \d{2}:\d{2}:\d{2})\]\s+(User|Assistant|System|user|assistant|system):\s*(.*)$/i);
// Check if line starts with just a role (User:, Assistant:, System:, etc.)
const roleMatch = trimmedLine.match(/^(User|Assistant|System|user|assistant|system):\s*(.*)$/i);
if (timestampRoleMatch) {
// Save previous message if exists
if (currentMessage) {
@ -90,7 +90,7 @@ export function parseTranscriptToMessages(
const timestamp = timestampRoleMatch[1];
const role = timestampRoleMatch[2].charAt(0).toUpperCase() + timestampRoleMatch[2].slice(1).toLowerCase();
const content = timestampRoleMatch[3] || '';
currentMessage = {
role,
content,
@ -111,7 +111,7 @@ export function parseTranscriptToMessages(
// Start new message without timestamp
const role = roleMatch[1].charAt(0).toUpperCase() + roleMatch[1].slice(1).toLowerCase();
const content = roleMatch[2] || '';
currentMessage = {
role,
content
@ -143,7 +143,7 @@ export function parseTranscriptToMessages(
// Calculate timestamps - use parsed timestamps if available, otherwise distribute across session duration
const hasTimestamps = messages.some(msg => (msg as any).timestamp);
if (hasTimestamps) {
// Use parsed timestamps from the transcript
messages.forEach((message, index) => {

View File

@ -123,8 +123,8 @@ export default async function handler(
where: { companyId: company.id }
});
res.json({
ok: true,
res.json({
ok: true,
imported: importedCount,
total: rawSessionData.length,
sessions: sessionCount,